5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files, build_package_list
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
59 ###############################################################################
61 def get_type(f, session):
63 Get the file type of C{f}
66 @param f: file entry from Changes object
68 @type session: SQLA Session
69 @param session: SQL Alchemy session object
76 if f.has_key("dbtype"):
77 file_type = f["dbtype"]
78 elif re_source_ext.match(f["type"]):
80 elif f['architecture'] == 'source' and f["type"] == 'unreadable':
81 utils.warn('unreadable source file (will continue and hope for the best)')
85 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
87 # Validate the override type
88 type_id = get_override_type(file_type, session)
90 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
94 ################################################################################
96 # Determine what parts in a .changes are NEW
98 def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = None):
100 Determine what parts in a C{changes} file are NEW.
103 @param filename: changes filename
105 @type changes: Upload.Pkg.changes dict
106 @param changes: Changes dictionary
108 @type files: Upload.Pkg.files dict
109 @param files: Files dictionary
112 @param warn: Warn if overrides are added for (old)stable
114 @type dsc: Upload.Pkg.dsc dict
115 @param dsc: (optional); Dsc dictionary
118 @param new: new packages as returned by a previous call to this function, but override information may have changed
121 @return: dictionary of NEW components.
124 # TODO: This should all use the database instead of parsing the changes
130 dbchg = get_dbchange(filename, session)
132 print "Warning: cannot find changes file in database; won't check byhand"
134 # Try to get the Package-Set field from an included .dsc file (if possible).
136 for package, entry in build_package_list(dsc, session).items():
137 if package not in new:
140 # Build up a list of potentially new things
141 for name, f in files.items():
142 # Keep a record of byhand elements
143 if f["section"] == "byhand":
148 priority = f["priority"]
149 section = f["section"]
150 file_type = get_type(f, session)
151 component = f["component"]
153 if file_type == "dsc":
156 if not new.has_key(pkg):
158 new[pkg]["priority"] = priority
159 new[pkg]["section"] = section
160 new[pkg]["type"] = file_type
161 new[pkg]["component"] = component
162 new[pkg]["files"] = []
164 old_type = new[pkg]["type"]
165 if old_type != file_type:
166 # source gets trumped by deb or udeb
167 if old_type == "dsc":
168 new[pkg]["priority"] = priority
169 new[pkg]["section"] = section
170 new[pkg]["type"] = file_type
171 new[pkg]["component"] = component
173 new[pkg]["files"].append(name)
175 if f.has_key("othercomponents"):
176 new[pkg]["othercomponents"] = f["othercomponents"]
178 # Fix up the list of target suites
180 for suite in changes["suite"].keys():
181 oldsuite = get_suite(suite, session)
183 print "WARNING: Invalid suite %s found" % suite
186 if oldsuite.overridesuite:
187 newsuite = get_suite(oldsuite.overridesuite, session)
190 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
191 oldsuite.overridesuite, suite)
192 del changes["suite"][suite]
193 changes["suite"][oldsuite.overridesuite] = 1
195 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist. Bugger" % (
196 oldsuite.overridesuite, suite)
198 # Check for unprocessed byhand files
199 if dbchg is not None:
200 for b in byhand.keys():
201 # Find the file entry in the database
203 for f in dbchg.files:
206 # If it's processed, we can ignore it
212 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
214 # Check for new stuff
215 for suite in changes["suite"].keys():
216 for pkg in new.keys():
217 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
219 for file_entry in new[pkg]["files"]:
220 if files[file_entry].has_key("new"):
221 del files[file_entry]["new"]
225 for s in ['stable', 'oldstable']:
226 if changes["suite"].has_key(s):
227 print "WARNING: overrides will be added for %s!" % s
228 for pkg in new.keys():
229 if new[pkg].has_key("othercomponents"):
230 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
234 ################################################################################
236 def check_valid(overrides, session):
237 """Check if section and priority for new overrides exist in database.
239 Additionally does sanity checks:
240 - debian-installer packages have to be udeb (or source)
241 - non debian-installer packages cannot be udeb
243 @type overrides: list of dict
244 @param overrides: list of overrides to check. The overrides need
245 to be given in form of a dict with the following keys:
247 - package: package name
251 - type: type of requested override ('dsc', 'deb' or 'udeb')
253 All values are strings.
256 @return: C{True} if all overrides are valid, C{False} if there is any
262 if session.query(Priority).filter_by(priority=o['priority']).first() is None:
264 if session.query(Section).filter_by(section=o['section']).first() is None:
266 if get_mapped_component(o['component'], session) is None:
268 if o['type'] not in ('dsc', 'deb', 'udeb'):
269 raise Exception('Unknown override type {0}'.format(o['type']))
270 if o['type'] == 'udeb' and o['section'] != 'debian-installer':
272 if o['section'] == 'debian-installer' and o['type'] not in ('dsc', 'udeb'):
274 all_valid = all_valid and o['valid']
277 ###############################################################################
279 # Used by Upload.check_timestamps
280 class TarTime(object):
281 def __init__(self, future_cutoff, past_cutoff):
283 self.future_cutoff = future_cutoff
284 self.past_cutoff = past_cutoff
287 self.future_files = {}
288 self.ancient_files = {}
290 def callback(self, member, data):
291 if member.mtime > self.future_cutoff:
292 self.future_files[Name] = member.mtime
293 if member.mtime < self.past_cutoff:
294 self.ancient_files[Name] = member.mtime
296 ###############################################################################
298 def prod_maintainer(notes, upload):
300 changes = upload.changes
302 # Here we prepare an editor and get them ready to prod...
303 (fd, temp_filename) = utils.temp_filename()
304 temp_file = os.fdopen(fd, 'w')
306 temp_file.write(note.comment)
308 editor = os.environ.get("EDITOR","vi")
311 os.system("%s %s" % (editor, temp_filename))
312 temp_fh = utils.open_file(temp_filename)
313 prod_message = "".join(temp_fh.readlines())
315 print "Prod message:"
316 print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
317 prompt = "[P]rod, Edit, Abandon, Quit ?"
319 while prompt.find(answer) == -1:
320 answer = utils.our_raw_input(prompt)
321 m = re_default_answer.search(prompt)
324 answer = answer[:1].upper()
325 os.unlink(temp_filename)
331 # Otherwise, do the proding...
332 user_email_address = utils.whoami() + " <%s>" % (
333 cnf["Dinstall::MyAdminAddress"])
335 changed_by = changes.changedby or changes.maintainer
336 maintainer = changes.maintainer
337 maintainer_to = utils.mail_addresses_for_upload(maintainer, changed_by, changes.fingerprint)
340 '__SOURCE__': upload.changes.source,
341 '__CHANGES_FILENAME__': upload.changes.changesname,
342 '__MAINTAINER_TO__': ", ".join(maintainer_to),
345 Subst["__FROM_ADDRESS__"] = user_email_address
346 Subst["__PROD_MESSAGE__"] = prod_message
347 Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
349 prod_mail_message = utils.TemplateSubst(
350 Subst,cnf["Dir::Templates"]+"/process-new.prod")
353 utils.send_mail(prod_mail_message)
355 print "Sent prodding message"
357 ################################################################################
359 def edit_note(note, upload, session, trainee=False):
360 # Write the current data to a temporary file
361 (fd, temp_filename) = utils.temp_filename()
362 editor = os.environ.get("EDITOR","vi")
365 os.system("%s %s" % (editor, temp_filename))
366 temp_file = utils.open_file(temp_filename)
367 newnote = temp_file.read().rstrip()
370 print utils.prefix_multi_line_string(newnote," ")
371 prompt = "[D]one, Edit, Abandon, Quit ?"
373 while prompt.find(answer) == -1:
374 answer = utils.our_raw_input(prompt)
375 m = re_default_answer.search(prompt)
378 answer = answer[:1].upper()
379 os.unlink(temp_filename)
386 comment = NewComment()
387 comment.package = upload.changes.source
388 comment.version = upload.changes.version
389 comment.comment = newnote
390 comment.author = utils.whoami()
391 comment.trainee = trainee
395 ###############################################################################
397 # FIXME: Should move into the database
398 # suite names DMs can upload to
399 dm_suites = ['unstable', 'experimental', 'squeeze-backports']
401 def get_newest_source(source, session):
402 'returns the newest DBSource object in dm_suites'
403 ## the most recent version of the package uploaded to unstable or
404 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
405 ## section of its control file
406 q = session.query(DBSource).filter_by(source = source). \
407 filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
408 order_by(desc('source.version'))
411 def get_suite_version_by_source(source, session):
412 'returns a list of tuples (suite_name, version) for source package'
413 q = session.query(Suite.suite_name, DBSource.version). \
414 join(Suite.sources).filter_by(source = source)
417 def get_source_by_package_and_suite(package, suite_name, session):
419 returns a DBSource query filtered by DBBinary.package and this package's
422 return session.query(DBSource). \
423 join(DBSource.binaries).filter_by(package = package). \
424 join(DBBinary.suites).filter_by(suite_name = suite_name)
426 def get_suite_version_by_package(package, arch_string, session):
428 returns a list of tuples (suite_name, version) for binary package and
431 return session.query(Suite.suite_name, DBBinary.version). \
432 join(Suite.binaries).filter_by(package = package). \
433 join(DBBinary.architecture). \
434 filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
436 class Upload(object):
438 Everything that has to do with an upload processed.
446 ###########################################################################
449 """ Reset a number of internal variables."""
451 # Initialize the substitution template map
454 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
455 if cnf.has_key("Dinstall::BugServer"):
456 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
457 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
458 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
464 self.later_check_files = []
468 def package_info(self):
470 Format various messages from this Upload to send to the maintainer.
474 ('Reject Reasons', self.rejects),
475 ('Warnings', self.warnings),
476 ('Notes', self.notes),
480 for title, messages in msgs:
482 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
487 ###########################################################################
488 def update_subst(self):
489 """ Set up the per-package template substitution mappings """
493 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
494 if not self.pkg.changes.has_key("architecture") or not \
495 isinstance(self.pkg.changes["architecture"], dict):
496 self.pkg.changes["architecture"] = { "Unknown" : "" }
498 # and maintainer2047 may not exist.
499 if not self.pkg.changes.has_key("maintainer2047"):
500 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
502 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
503 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
504 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
506 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
507 if self.pkg.changes["architecture"].has_key("source") and \
508 self.pkg.changes["changedby822"] != "" and \
509 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
511 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
512 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
513 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
515 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
516 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
517 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
519 # Process policy doesn't set the fingerprint field and I don't want to make it
520 # do it for now as I don't want to have to deal with the case where we accepted
521 # the package into PU-NEW, but the fingerprint has gone away from the keyring in
522 # the meantime so the package will be remarked as rejectable. Urgh.
523 # TODO: Fix this properly
524 if self.pkg.changes.has_key('fingerprint'):
525 session = DBConn().session()
526 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
527 if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
528 if self.pkg.changes.has_key("sponsoremail"):
529 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
532 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
533 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
535 # Apply any global override of the Maintainer field
536 if cnf.get("Dinstall::OverrideMaintainer"):
537 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
538 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
540 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
541 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
542 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
543 self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
545 ###########################################################################
546 def load_changes(self, filename):
548 Load a changes file and setup a dictionary around it. Also checks for mandantory
551 @type filename: string
552 @param filename: Changes filename, full path.
555 @return: whether the changes file was valid or not. We may want to
556 reject even if this is True (see what gets put in self.rejects).
557 This is simply to prevent us even trying things later which will
558 fail because we couldn't properly parse the file.
561 self.pkg.changes_file = filename
563 # Parse the .changes field into a dictionary
565 self.pkg.changes.update(parse_changes(filename))
566 except CantOpenError:
567 self.rejects.append("%s: can't read file." % (filename))
569 except ParseChangesError as line:
570 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
572 except ChangesUnicodeError:
573 self.rejects.append("%s: changes file not proper utf-8" % (filename))
576 # Parse the Files field from the .changes into another dictionary
578 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
579 except ParseChangesError as line:
580 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
582 except UnknownFormatError as format:
583 self.rejects.append("%s: unknown format '%s'." % (filename, format))
586 # Check for mandatory fields
587 for i in ("distribution", "source", "binary", "architecture",
588 "version", "maintainer", "files", "changes", "description"):
589 if not self.pkg.changes.has_key(i):
590 # Avoid undefined errors later
591 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
594 # Strip a source version in brackets from the source field
595 if re_strip_srcver.search(self.pkg.changes["source"]):
596 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
598 # Ensure the source field is a valid package name.
599 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
600 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
602 # Split multi-value fields into a lower-level dictionary
603 for i in ("architecture", "distribution", "binary", "closes"):
604 o = self.pkg.changes.get(i, "")
606 del self.pkg.changes[i]
608 self.pkg.changes[i] = {}
611 self.pkg.changes[i][j] = 1
613 # Fix the Maintainer: field to be RFC822/2047 compatible
615 (self.pkg.changes["maintainer822"],
616 self.pkg.changes["maintainer2047"],
617 self.pkg.changes["maintainername"],
618 self.pkg.changes["maintaineremail"]) = \
619 fix_maintainer (self.pkg.changes["maintainer"])
620 except ParseMaintError as msg:
621 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
622 % (filename, self.pkg.changes["maintainer"], msg))
624 # ...likewise for the Changed-By: field if it exists.
626 (self.pkg.changes["changedby822"],
627 self.pkg.changes["changedby2047"],
628 self.pkg.changes["changedbyname"],
629 self.pkg.changes["changedbyemail"]) = \
630 fix_maintainer (self.pkg.changes.get("changed-by", ""))
631 except ParseMaintError as msg:
632 self.pkg.changes["changedby822"] = ""
633 self.pkg.changes["changedby2047"] = ""
634 self.pkg.changes["changedbyname"] = ""
635 self.pkg.changes["changedbyemail"] = ""
637 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
638 % (filename, self.pkg.changes["changed-by"], msg))
640 # Ensure all the values in Closes: are numbers
641 if self.pkg.changes.has_key("closes"):
642 for i in self.pkg.changes["closes"].keys():
643 if re_isanum.match (i) == None:
644 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
646 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
647 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
648 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
650 # Check the .changes is non-empty
651 if not self.pkg.files:
652 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
655 # Changes was syntactically valid even if we'll reject
658 ###########################################################################
660 def check_distributions(self):
661 "Check and map the Distribution field"
665 # Handle suite mappings
666 for m in Cnf.value_list("SuiteMappings"):
669 if mtype == "map" or mtype == "silent-map":
670 (source, dest) = args[1:3]
671 if self.pkg.changes["distribution"].has_key(source):
672 del self.pkg.changes["distribution"][source]
673 self.pkg.changes["distribution"][dest] = 1
674 if mtype != "silent-map":
675 self.notes.append("Mapping %s to %s." % (source, dest))
676 if self.pkg.changes.has_key("distribution-version"):
677 if self.pkg.changes["distribution-version"].has_key(source):
678 self.pkg.changes["distribution-version"][source]=dest
679 elif mtype == "map-unreleased":
680 (source, dest) = args[1:3]
681 if self.pkg.changes["distribution"].has_key(source):
682 for arch in self.pkg.changes["architecture"].keys():
683 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
684 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
685 del self.pkg.changes["distribution"][source]
686 self.pkg.changes["distribution"][dest] = 1
688 elif mtype == "ignore":
690 if self.pkg.changes["distribution"].has_key(suite):
691 del self.pkg.changes["distribution"][suite]
692 self.warnings.append("Ignoring %s as a target suite." % (suite))
693 elif mtype == "reject":
695 if self.pkg.changes["distribution"].has_key(suite):
696 self.rejects.append("Uploads to %s are not accepted." % (suite))
697 elif mtype == "propup-version":
698 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
700 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
701 if self.pkg.changes["distribution"].has_key(args[1]):
702 self.pkg.changes.setdefault("distribution-version", {})
703 for suite in args[2:]:
704 self.pkg.changes["distribution-version"][suite] = suite
706 # Ensure there is (still) a target distribution
707 if len(self.pkg.changes["distribution"].keys()) < 1:
708 self.rejects.append("No valid distribution remaining.")
710 # Ensure target distributions exist
711 for suite in self.pkg.changes["distribution"].keys():
712 if not get_suite(suite.lower()):
713 self.rejects.append("Unknown distribution `%s'." % (suite))
715 ###########################################################################
717 def binary_file_checks(self, f, session):
719 entry = self.pkg.files[f]
721 # Extract package control information
722 deb_file = utils.open_file(f)
724 control = apt_pkg.TagSection(utils.deb_extract_control(deb_file))
726 self.rejects.append("%s: deb_extract_control() raised %s." % (f, sys.exc_info()[0]))
728 # Can't continue, none of the checks on control would work.
733 # Check for mandatory fields
734 for field in [ "Package", "Architecture", "Version", "Description" ]:
735 if field not in control:
737 self.rejects.append("%s: No %s field in control." % (f, field))
740 # Ensure the package name matches the one give in the .changes
741 if not self.pkg.changes["binary"].has_key(control.find("Package", "")):
742 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.find("Package", "")))
744 # Validate the package field
745 package = control["Package"]
746 if not re_valid_pkg_name.match(package):
747 self.rejects.append("%s: invalid package name '%s'." % (f, package))
749 # Validate the version field
750 version = control["Version"]
751 if not re_valid_version.match(version):
752 self.rejects.append("%s: invalid version number '%s'." % (f, version))
754 # Ensure the architecture of the .deb is one we know about.
755 default_suite = cnf.get("Dinstall::DefaultSuite", "unstable")
756 architecture = control["Architecture"]
757 upload_suite = self.pkg.changes["distribution"].keys()[0]
759 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
760 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
761 self.rejects.append("Unknown architecture '%s'." % (architecture))
763 # Ensure the architecture of the .deb is one of the ones
764 # listed in the .changes.
765 if not self.pkg.changes["architecture"].has_key(architecture):
766 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
768 # Sanity-check the Depends field
769 depends = control.find("Depends")
771 self.rejects.append("%s: Depends field is empty." % (f))
773 # Sanity-check the Provides field
774 provides = control.find("Provides")
775 if provides is not None:
776 provide = re_spacestrip.sub('', provides)
778 self.rejects.append("%s: Provides field is empty." % (f))
779 prov_list = provide.split(",")
780 for prov in prov_list:
781 if not re_valid_pkg_name.match(prov):
782 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
784 # If there is a Built-Using field, we need to check we can find the
785 # exact source version
786 built_using = control.find("Built-Using")
787 if built_using is not None:
789 entry["built-using"] = []
790 for dep in apt_pkg.parse_depends(built_using):
791 bu_s, bu_v, bu_e = dep[0]
792 # Check that it's an exact match dependency and we have
793 # some form of version
794 if bu_e != "=" or len(bu_v) < 1:
795 self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
797 # Find the source id for this version
798 bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
800 self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
802 entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
804 except ValueError as e:
805 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
808 # Check the section & priority match those given in the .changes (non-fatal)
809 if control.find("Section") and entry["section"] != "" \
810 and entry["section"] != control.find("Section"):
811 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
812 (f, control.find("Section", ""), entry["section"]))
813 if control.find("Priority") and entry["priority"] != "" \
814 and entry["priority"] != control.find("Priority"):
815 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
816 (f, control.find("Priority", ""), entry["priority"]))
818 entry["package"] = package
819 entry["architecture"] = architecture
820 entry["version"] = version
821 entry["maintainer"] = control.find("Maintainer", "")
823 if f.endswith(".udeb"):
824 self.pkg.files[f]["dbtype"] = "udeb"
825 elif f.endswith(".deb"):
826 self.pkg.files[f]["dbtype"] = "deb"
828 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
830 entry["source"] = control.find("Source", entry["package"])
832 # Get the source version
833 source = entry["source"]
836 if source.find("(") != -1:
837 m = re_extract_src_version.match(source)
839 source_version = m.group(2)
841 if not source_version:
842 source_version = self.pkg.files[f]["version"]
844 entry["source package"] = source
845 entry["source version"] = source_version
847 # Ensure the filename matches the contents of the .deb
848 m = re_isadeb.match(f)
851 file_package = m.group(1)
852 if entry["package"] != file_package:
853 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
854 (f, file_package, entry["dbtype"], entry["package"]))
855 epochless_version = re_no_epoch.sub('', control.find("Version"))
858 file_version = m.group(2)
859 if epochless_version != file_version:
860 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
861 (f, file_version, entry["dbtype"], epochless_version))
864 file_architecture = m.group(3)
865 if entry["architecture"] != file_architecture:
866 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
867 (f, file_architecture, entry["dbtype"], entry["architecture"]))
869 # Check for existent source
870 source_version = entry["source version"]
871 source_package = entry["source package"]
872 if self.pkg.changes["architecture"].has_key("source"):
873 if source_version != self.pkg.changes["version"]:
874 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
875 (source_version, f, self.pkg.changes["version"]))
877 # Check in the SQL database
878 if not source_exists(source_package, source_version, suites = \
879 self.pkg.changes["distribution"].keys(), session = session):
880 # Check in one of the other directories
881 source_epochless_version = re_no_epoch.sub('', source_version)
882 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
884 byhand_dir = get_policy_queue('byhand', session).path
885 new_dir = get_policy_queue('new', session).path
887 if os.path.exists(os.path.join(byhand_dir, dsc_filename)):
889 elif os.path.exists(os.path.join(new_dir, dsc_filename)):
892 dsc_file_exists = False
893 # TODO: Don't hardcode this list: use all relevant queues
894 # The question is how to determine what is relevant
895 for queue_name in ["embargoed", "unembargoed", "proposedupdates", "oldproposedupdates"]:
896 queue = get_policy_queue(queue_name, session)
898 if os.path.exists(os.path.join(queue.path, dsc_filename)):
899 dsc_file_exists = True
902 if not dsc_file_exists:
903 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
905 # Check the version and for file overwrites
906 self.check_binary_against_db(f, session)
908 def source_file_checks(self, f, session):
909 entry = self.pkg.files[f]
911 m = re_issource.match(f)
915 entry["package"] = m.group(1)
916 entry["version"] = m.group(2)
917 entry["type"] = m.group(3)
919 # Ensure the source package name matches the Source filed in the .changes
920 if self.pkg.changes["source"] != entry["package"]:
921 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
923 # Ensure the source version matches the version in the .changes file
924 if re_is_orig_source.match(f):
925 changes_version = self.pkg.changes["chopversion2"]
927 changes_version = self.pkg.changes["chopversion"]
929 if changes_version != entry["version"]:
930 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
932 # Ensure the .changes lists source in the Architecture field
933 if not self.pkg.changes["architecture"].has_key("source"):
934 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
936 # Check the signature of a .dsc file
937 if entry["type"] == "dsc":
938 # check_signature returns either:
939 # (None, [list, of, rejects]) or (signature, [])
940 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
942 self.rejects.append(j)
944 entry["architecture"] = "source"
946 def per_suite_file_checks(self, f, suite, session):
948 entry = self.pkg.files[f]
951 if entry.has_key("byhand"):
954 # Check we have fields we need to do these checks
956 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
957 if not entry.has_key(m):
958 self.rejects.append("file '%s' does not have field %s set" % (f, m))
964 # Handle component mappings
965 for m in cnf.value_list("ComponentMappings"):
966 (source, dest) = m.split()
967 if entry["component"] == source:
968 entry["original component"] = source
969 entry["component"] = dest
971 # Ensure the component is valid for the target suite
972 if entry["component"] not in get_component_names(session):
973 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
976 # Validate the component
977 if not get_component(entry["component"], session):
978 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
981 # See if the package is NEW
982 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
985 # Validate the priority
986 if entry["priority"].find('/') != -1:
987 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
989 # Determine the location
990 location = cnf["Dir::Pool"]
991 l = get_location(location, entry["component"], session=session)
993 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
994 entry["location id"] = -1
996 entry["location id"] = l.location_id
998 # Check the md5sum & size against existing files (if any)
999 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
1001 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
1002 entry["size"], entry["md5sum"], entry["location id"])
1005 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
1006 elif found is False and poolfile is not None:
1007 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
1009 if poolfile is None:
1010 entry["files id"] = None
1012 entry["files id"] = poolfile.file_id
1014 # Check for packages that have moved from one component to another
1015 entry['suite'] = suite
1016 arch_list = [entry["architecture"], 'all']
1017 component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1018 [suite], arch_list = arch_list, session = session)
1019 if component is not None:
1020 entry["othercomponents"] = component
1022 def check_files(self, action=True):
1023 file_keys = self.pkg.files.keys()
1029 os.chdir(self.pkg.directory)
1031 ret = holding.copy_to_holding(f)
1033 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1037 # check we already know the changes file
1038 # [NB: this check must be done post-suite mapping]
1039 base_filename = os.path.basename(self.pkg.changes_file)
1041 session = DBConn().session()
1044 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1045 # if in the pool or in a queue other than unchecked, reject
1046 if (dbc.in_queue is None) \
1047 or (dbc.in_queue is not None
1048 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1049 self.rejects.append("%s file already known to dak" % base_filename)
1050 except NoResultFound as e:
1054 has_binaries = False
1057 for f, entry in self.pkg.files.items():
1058 # Ensure the file does not already exist in one of the accepted directories
1059 # TODO: Dynamically generate this list
1060 for queue_name in [ "byhand", "new", "proposedupdates", "oldproposedupdates", "embargoed", "unembargoed" ]:
1061 queue = get_policy_queue(queue_name, session)
1062 if queue and os.path.exists(os.path.join(queue.path, f)):
1063 self.rejects.append("%s file already exists in the %s queue." % (f, queue_name))
1065 if not re_taint_free.match(f):
1066 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1068 # Check the file is readable
1069 if os.access(f, os.R_OK) == 0:
1070 # When running in -n, copy_to_holding() won't have
1071 # generated the reject_message, so we need to.
1073 if os.path.exists(f):
1074 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1076 # Don't directly reject, mark to check later to deal with orig's
1077 # we can find in the pool
1078 self.later_check_files.append(f)
1079 entry["type"] = "unreadable"
1082 # If it's byhand skip remaining checks
1083 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1085 entry["type"] = "byhand"
1087 # Checks for a binary package...
1088 elif re_isadeb.match(f):
1090 entry["type"] = "deb"
1092 # This routine appends to self.rejects/warnings as appropriate
1093 self.binary_file_checks(f, session)
1095 # Checks for a source package...
1096 elif re_issource.match(f):
1099 # This routine appends to self.rejects/warnings as appropriate
1100 self.source_file_checks(f, session)
1102 # Not a binary or source package? Assume byhand...
1105 entry["type"] = "byhand"
1107 # Per-suite file checks
1108 entry["oldfiles"] = {}
1109 for suite in self.pkg.changes["distribution"].keys():
1110 self.per_suite_file_checks(f, suite, session)
1114 # If the .changes file says it has source, it must have source.
1115 if self.pkg.changes["architecture"].has_key("source"):
1117 self.rejects.append("no source found and Architecture line in changes mention source.")
1119 if (not has_binaries) and (not cnf.find_b("Dinstall::AllowSourceOnlyUploads")):
1120 self.rejects.append("source only uploads are not supported.")
1122 ###########################################################################
1124 def __dsc_filename(self):
1126 Returns: (Status, Dsc_Filename)
1128 Status: Boolean; True when there was no error, False otherwise
1129 Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
1134 for name, entry in self.pkg.files.items():
1135 if entry.has_key("type") and entry["type"] == "dsc":
1137 return False, "cannot process a .changes file with multiple .dsc's."
1141 if not dsc_filename:
1142 return False, "source uploads must contain a dsc file"
1144 return True, dsc_filename
1146 def load_dsc(self, action=True, signing_rules=1):
1148 Find and load the dsc from self.pkg.files into self.dsc
1150 Returns: (Status, Reason)
1152 Status: Boolean; True when there was no error, False otherwise
1153 Reason: String; When Status is False this describes the error
1157 (status, dsc_filename) = self.__dsc_filename()
1159 # If status is false, dsc_filename has the reason
1160 return False, dsc_filename
1163 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
1164 except CantOpenError:
1166 return False, "%s: can't read file." % (dsc_filename)
1167 except ParseChangesError as line:
1168 return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
1169 except InvalidDscError as line:
1170 return False, "%s: syntax error on line %s." % (dsc_filename, line)
1171 except ChangesUnicodeError:
1172 return False, "%s: dsc file not proper utf-8." % (dsc_filename)
1176 ###########################################################################
1178 def check_dsc(self, action=True, session=None):
1179 """Returns bool indicating whether or not the source changes are valid"""
1180 # Ensure there is source to check
1181 if not self.pkg.changes["architecture"].has_key("source"):
1185 session = DBConn().session()
1187 (status, reason) = self.load_dsc(action=action)
1189 self.rejects.append(reason)
1191 (status, dsc_filename) = self.__dsc_filename()
1193 # If status is false, dsc_filename has the reason
1194 self.rejects.append(dsc_filename)
1197 # Build up the file list of files mentioned by the .dsc
1199 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1200 except NoFilesFieldError:
1201 self.rejects.append("%s: no Files: field." % (dsc_filename))
1203 except UnknownFormatError as format:
1204 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1206 except ParseChangesError as line:
1207 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1210 # Enforce mandatory fields
1211 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1212 if not self.pkg.dsc.has_key(i):
1213 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1216 # Validate the source and version fields
1217 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1218 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1219 if not re_valid_version.match(self.pkg.dsc["version"]):
1220 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1222 # Only a limited list of source formats are allowed in each suite
1223 for dist in self.pkg.changes["distribution"].keys():
1224 suite = get_suite(dist, session=session)
1226 self.rejects.append("%s: cannot find suite %s when checking source formats" % (dsc_filename, dist))
1228 allowed = [ x.format_name for x in suite.srcformats ]
1229 if self.pkg.dsc["format"] not in allowed:
1230 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1232 # Validate the Maintainer field
1234 # We ignore the return value
1235 fix_maintainer(self.pkg.dsc["maintainer"])
1236 except ParseMaintError as msg:
1237 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1238 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1240 # Validate the build-depends field(s)
1241 for field_name in [ "build-depends", "build-depends-indep" ]:
1242 field = self.pkg.dsc.get(field_name)
1244 # Have apt try to parse them...
1246 apt_pkg.parse_src_depends(field)
1248 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1250 # Ensure the version number in the .dsc matches the version number in the .changes
1251 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1252 changes_version = self.pkg.files[dsc_filename]["version"]
1254 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1255 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1257 # Ensure the Files field contain only what's expected
1258 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1260 # Ensure source is newer than existing source in target suites
1261 session = DBConn().session()
1262 self.check_source_against_db(dsc_filename, session)
1263 self.check_dsc_against_db(dsc_filename, session)
1265 dbchg = get_dbchange(self.pkg.changes_file, session)
1267 # Finally, check if we're missing any files
1268 for f in self.later_check_files:
1270 # Check if we've already processed this file if we have a dbchg object
1273 for pf in dbchg.files:
1274 if pf.filename == f and pf.processed:
1275 self.notes.append('%s was already processed so we can go ahead' % f)
1277 del self.pkg.files[f]
1279 self.rejects.append("Could not find file %s references in changes" % f)
1283 return (len(self.rejects) == 0)
1285 ###########################################################################
1287 def get_changelog_versions(self, source_dir):
1288 """Extracts a the source package and (optionally) grabs the
1289 version history out of debian/changelog for the BTS."""
1293 # Find the .dsc (again)
1295 for f in self.pkg.files.keys():
1296 if self.pkg.files[f]["type"] == "dsc":
1299 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1300 if not dsc_filename:
1303 # Create a symlink mirror of the source files in our temporary directory
1304 for f in self.pkg.files.keys():
1305 m = re_issource.match(f)
1307 src = os.path.join(source_dir, f)
1308 # If a file is missing for whatever reason, give up.
1309 if not os.path.exists(src):
1312 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1313 self.pkg.orig_files[f].has_key("path"):
1315 dest = os.path.join(os.getcwd(), f)
1316 os.symlink(src, dest)
1318 # If the orig files are not a part of the upload, create symlinks to the
1320 for orig_file in self.pkg.orig_files.keys():
1321 if not self.pkg.orig_files[orig_file].has_key("path"):
1323 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1324 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1326 # Extract the source
1328 unpacked = UnpackedSource(dsc_filename)
1329 except Exception as e:
1330 self.rejects.append("'dpkg-source -x' failed for %s. (%s)" % (dsc_filename, str(e)))
1333 if not cnf.find("Dir::BTSVersionTrack"):
1336 # Get the upstream version
1337 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1338 if re_strip_revision.search(upstr_version):
1339 upstr_version = re_strip_revision.sub('', upstr_version)
1341 # Ensure the changelog file exists
1342 changelog_file = unpacked.get_changelog_file()
1343 if changelog_file is None:
1344 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1347 # Parse the changelog
1348 self.pkg.dsc["bts changelog"] = ""
1349 for line in changelog_file.readlines():
1350 m = re_changelog_versions.match(line)
1352 self.pkg.dsc["bts changelog"] += line
1353 changelog_file.close()
1356 # Check we found at least one revision in the changelog
1357 if not self.pkg.dsc["bts changelog"]:
1358 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1360 def check_source(self):
1362 # a) there's no source
1363 if not self.pkg.changes["architecture"].has_key("source"):
1366 tmpdir = utils.temp_dirname()
1368 # Move into the temporary directory
1372 # Get the changelog version history
1373 self.get_changelog_versions(cwd)
1375 # Move back and cleanup the temporary tree
1379 shutil.rmtree(tmpdir)
1380 except OSError as e:
1381 if e.errno != errno.EACCES:
1383 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1385 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1386 # We probably have u-r or u-w directories so chmod everything
1388 cmd = "chmod -R u+rwx %s" % (tmpdir)
1389 result = os.system(cmd)
1391 utils.fubar("'%s' failed with result %s." % (cmd, result))
1392 shutil.rmtree(tmpdir)
1393 except Exception as e:
1394 print "foobar2 (%s)" % e
1395 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1397 ###########################################################################
1398 def ensure_hashes(self):
1399 # Make sure we recognise the format of the Files: field in the .changes
1400 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1401 if len(format) == 2:
1402 format = int(format[0]), int(format[1])
1404 format = int(float(format[0])), 0
1406 # We need to deal with the original changes blob, as the fields we need
1407 # might not be in the changes dict serialised into the .dak anymore.
1408 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1410 # Copy the checksums over to the current changes dict. This will keep
1411 # the existing modifications to it intact.
1412 for field in orig_changes:
1413 if field.startswith('checksums-'):
1414 self.pkg.changes[field] = orig_changes[field]
1416 # Check for unsupported hashes
1417 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1418 self.rejects.append(j)
1420 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1421 self.rejects.append(j)
1423 # We have to calculate the hash if we have an earlier changes version than
1424 # the hash appears in rather than require it exist in the changes file
1425 for hashname, hashfunc, version in utils.known_hashes:
1426 # TODO: Move _ensure_changes_hash into this class
1427 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1428 self.rejects.append(j)
1429 if "source" in self.pkg.changes["architecture"]:
1430 # TODO: Move _ensure_dsc_hash into this class
1431 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1432 self.rejects.append(j)
1434 def check_hashes(self):
1435 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1436 self.rejects.append(m)
1438 for m in utils.check_size(".changes", self.pkg.files):
1439 self.rejects.append(m)
1441 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1442 self.rejects.append(m)
1444 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1445 self.rejects.append(m)
1447 self.ensure_hashes()
1449 ###########################################################################
1451 def ensure_orig(self, target_dir='.', session=None):
1453 Ensures that all orig files mentioned in the changes file are present
1454 in target_dir. If they do not exist, they are symlinked into place.
1456 An list containing the symlinks that were created are returned (so they
1463 for filename, entry in self.pkg.dsc_files.iteritems():
1464 if not re_is_orig_source.match(filename):
1465 # File is not an orig; ignore
1468 if os.path.exists(filename):
1469 # File exists, no need to continue
1472 def symlink_if_valid(path):
1473 f = utils.open_file(path)
1474 md5sum = apt_pkg.md5sum(f)
1477 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1478 expected = (int(entry['size']), entry['md5sum'])
1480 if fingerprint != expected:
1483 dest = os.path.join(target_dir, filename)
1485 os.symlink(path, dest)
1486 symlinked.append(dest)
1492 session_ = DBConn().session()
1497 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1498 poolfile_path = os.path.join(
1499 poolfile.location.path, poolfile.filename
1502 if symlink_if_valid(poolfile_path):
1512 # Look in some other queues for the file
1513 queue_names = ['new', 'byhand',
1514 'proposedupdates', 'oldproposedupdates',
1515 'embargoed', 'unembargoed']
1517 for queue_name in queue_names:
1518 queue = get_policy_queue(queue_name, session)
1522 queuefile_path = os.path.join(queue.path, filename)
1524 if not os.path.exists(queuefile_path):
1525 # Does not exist in this queue
1528 if symlink_if_valid(queuefile_path):
1533 ###########################################################################
1535 def check_lintian(self):
1537 Extends self.rejects by checking the output of lintian against tags
1538 specified in Dinstall::LintianTags.
1543 # Don't reject binary uploads
1544 if not self.pkg.changes['architecture'].has_key('source'):
1547 # Only check some distributions
1548 for dist in ('unstable', 'experimental'):
1549 if dist in self.pkg.changes['distribution']:
1554 # If we do not have a tagfile, don't do anything
1555 tagfile = cnf.get("Dinstall::LintianTags")
1559 # Parse the yaml file
1560 sourcefile = file(tagfile, 'r')
1561 sourcecontent = sourcefile.read()
1565 lintiantags = yaml.load(sourcecontent)['lintian']
1566 except yaml.YAMLError as msg:
1567 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1570 # Try and find all orig mentioned in the .dsc
1571 symlinked = self.ensure_orig()
1573 # Setup the input file for lintian
1574 fd, temp_filename = utils.temp_filename()
1575 temptagfile = os.fdopen(fd, 'w')
1576 for tags in lintiantags.values():
1577 temptagfile.writelines(['%s\n' % x for x in tags])
1581 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1582 (temp_filename, self.pkg.changes_file)
1584 result, output = commands.getstatusoutput(cmd)
1586 # Remove our tempfile and any symlinks we created
1587 os.unlink(temp_filename)
1589 for symlink in symlinked:
1593 utils.warn("lintian failed for %s [return code: %s]." % \
1594 (self.pkg.changes_file, result))
1595 utils.warn(utils.prefix_multi_line_string(output, \
1596 " [possible output:] "))
1601 [self.pkg.changes_file, "check_lintian"] + list(txt)
1605 parsed_tags = parse_lintian_output(output)
1606 self.rejects.extend(
1607 generate_reject_messages(parsed_tags, lintiantags, log=log)
1610 ###########################################################################
1611 def check_urgency(self):
1613 if self.pkg.changes["architecture"].has_key("source"):
1614 if not self.pkg.changes.has_key("urgency"):
1615 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1616 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1617 if self.pkg.changes["urgency"] not in cnf.value_list("Urgency::Valid"):
1618 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1619 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1620 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1622 ###########################################################################
1624 # Sanity check the time stamps of files inside debs.
1625 # [Files in the near future cause ugly warnings and extreme time
1626 # travel can cause errors on extraction]
1628 def check_timestamps(self):
1631 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1632 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1633 tar = TarTime(future_cutoff, past_cutoff)
1635 for filename, entry in self.pkg.files.items():
1636 if entry["type"] == "deb":
1639 deb = apt_inst.DebFile(filename)
1640 deb.control.go(tar.callback)
1642 future_files = tar.future_files.keys()
1644 num_future_files = len(future_files)
1645 future_file = future_files[0]
1646 future_date = tar.future_files[future_file]
1647 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1648 % (filename, num_future_files, future_file, time.ctime(future_date)))
1650 ancient_files = tar.ancient_files.keys()
1652 num_ancient_files = len(ancient_files)
1653 ancient_file = ancient_files[0]
1654 ancient_date = tar.ancient_files[ancient_file]
1655 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1656 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1658 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_info()[0], sys.exc_info()[1]))
1660 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1661 for key in "maintaineremail", "changedbyemail", "maintainername", "changedbyname":
1662 if not self.pkg.changes.has_key(key):
1664 uid_email = '@'.join(uid_email.split('@')[:2])
1665 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1667 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1673 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1674 debian_emails = filter(lambda addr: addr.endswith('@debian.org'), sponsor_addresses)
1675 if uid_email not in debian_emails:
1677 uid_email = debian_emails[0]
1678 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1679 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1680 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1681 self.pkg.changes["sponsoremail"] = uid_email
1686 ###########################################################################
1687 # check_signed_by_key checks
1688 ###########################################################################
1690 def check_signed_by_key(self):
1691 """Ensure the .changes is signed by an authorized uploader."""
1692 session = DBConn().session()
1694 # First of all we check that the person has proper upload permissions
1695 # and that this upload isn't blocked
1696 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1699 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1702 # TODO: Check that import-keyring adds UIDs properly
1704 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1707 # Check that the fingerprint which uploaded has permission to do so
1708 self.check_upload_permissions(fpr, session)
1710 # Check that this package is not in a transition
1711 self.check_transition(session)
1716 def check_upload_permissions(self, fpr, session):
1717 # Check any one-off upload blocks
1718 self.check_upload_blocks(fpr, session)
1720 # If the source_acl is None, source is never allowed
1721 if fpr.source_acl is None:
1722 if self.pkg.changes["architecture"].has_key("source"):
1723 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1724 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1725 self.rejects.append(rej)
1727 # Do DM as a special case
1728 # DM is a special case unfortunately, so we check it first
1729 # (keys with no source access get more access than DMs in one
1730 # way; DMs can only upload for their packages whether source
1731 # or binary, whereas keys with no access might be able to
1732 # upload some binaries)
1733 elif fpr.source_acl.access_level == 'dm':
1734 self.check_dm_upload(fpr, session)
1736 # If not a DM, we allow full upload rights
1737 uid_email = "%s@debian.org" % (fpr.uid.uid)
1738 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1741 # Check binary upload permissions
1742 # By this point we know that DMs can't have got here unless they
1743 # are allowed to deal with the package concerned so just apply
1745 if fpr.binary_acl.access_level == 'full':
1748 # Otherwise we're in the map case
1749 tmparches = self.pkg.changes["architecture"].copy()
1750 tmparches.pop('source', None)
1752 for bam in fpr.binary_acl_map:
1753 tmparches.pop(bam.architecture.arch_string, None)
1755 if len(tmparches.keys()) > 0:
1756 if fpr.binary_reject:
1757 rej = "changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1758 if len(tmparches.keys()) == 1:
1759 rej += "\n\narchitecture involved is: %s" % ",".join(tmparches.keys())
1761 rej += "\n\narchitectures involved are: %s" % ",".join(tmparches.keys())
1762 self.rejects.append(rej)
1764 # TODO: This is where we'll implement reject vs throw away binaries later
1765 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1766 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1767 rej += "\nFingerprint: %s", (fpr.fingerprint)
1768 self.rejects.append(rej)
1771 def check_upload_blocks(self, fpr, session):
1772 """Check whether any upload blocks apply to this source, source
1773 version, uid / fpr combination"""
1775 def block_rej_template(fb):
1776 rej = 'Manual upload block in place for package %s' % fb.source
1777 if fb.version is not None:
1778 rej += ', version %s' % fb.version
1781 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1782 # version is None if the block applies to all versions
1783 if fb.version is None or fb.version == self.pkg.changes['version']:
1784 # Check both fpr and uid - either is enough to cause a reject
1785 if fb.fpr is not None:
1786 if fb.fpr.fingerprint == fpr.fingerprint:
1787 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1788 if fb.uid is not None:
1789 if fb.uid == fpr.uid:
1790 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1793 def check_dm_upload(self, fpr, session):
1794 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1795 ## none of the uploaded packages are NEW
1797 for f in self.pkg.files.keys():
1798 if self.pkg.files[f].has_key("byhand"):
1799 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1801 if self.pkg.files[f].has_key("new"):
1802 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1808 r = get_newest_source(self.pkg.changes["source"], session)
1811 rej = "Could not find existing source package %s in the DM allowed suites and this is a DM upload" % self.pkg.changes["source"]
1812 self.rejects.append(rej)
1815 if not r.dm_upload_allowed:
1816 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1817 self.rejects.append(rej)
1820 ## the Maintainer: field of the uploaded .changes file corresponds with
1821 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1823 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1824 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1826 ## the most recent version of the package uploaded to unstable or
1827 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1828 ## non-developer maintainers cannot NMU or hijack packages)
1830 # uploader includes the maintainer
1832 for uploader in r.uploaders:
1833 (rfc822, rfc2047, name, email) = uploader.get_split_maintainer()
1834 # Eww - I hope we never have two people with the same name in Debian
1835 if email == fpr.uid.uid or name == fpr.uid.name:
1840 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1843 ## none of the packages are being taken over from other source packages
1844 for b in self.pkg.changes["binary"].keys():
1845 for suite in self.pkg.changes["distribution"].keys():
1846 for s in get_source_by_package_and_suite(b, suite, session):
1847 if s.source != self.pkg.changes["source"]:
1848 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1852 def check_transition(self, session):
1855 sourcepkg = self.pkg.changes["source"]
1857 # No sourceful upload -> no need to do anything else, direct return
1858 # We also work with unstable uploads, not experimental or those going to some
1859 # proposed-updates queue
1860 if "source" not in self.pkg.changes["architecture"] or \
1861 "unstable" not in self.pkg.changes["distribution"]:
1864 # Also only check if there is a file defined (and existant) with
1866 transpath = cnf.get("Dinstall::ReleaseTransitions", "")
1867 if transpath == "" or not os.path.exists(transpath):
1870 # Parse the yaml file
1871 sourcefile = file(transpath, 'r')
1872 sourcecontent = sourcefile.read()
1874 transitions = yaml.load(sourcecontent)
1875 except yaml.YAMLError as msg:
1876 # This shouldn't happen, there is a wrapper to edit the file which
1877 # checks it, but we prefer to be safe than ending up rejecting
1879 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1882 # Now look through all defined transitions
1883 for trans in transitions:
1884 t = transitions[trans]
1885 source = t["source"]
1888 # Will be None if nothing is in testing.
1889 current = get_source_in_suite(source, "testing", session)
1890 if current is not None:
1891 compare = apt_pkg.version_compare(current.version, expected)
1893 if current is None or compare < 0:
1894 # This is still valid, the current version in testing is older than
1895 # the new version we wait for, or there is none in testing yet
1897 # Check if the source we look at is affected by this.
1898 if sourcepkg in t['packages']:
1899 # The source is affected, lets reject it.
1901 rejectmsg = "%s: part of the %s transition.\n\n" % (
1904 if current is not None:
1905 currentlymsg = "at version %s" % (current.version)
1907 currentlymsg = "not present in testing"
1909 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1911 rejectmsg += "\n".join(textwrap.wrap("""Your package
1912 is part of a testing transition designed to get %s migrated (it is
1913 currently %s, we need version %s). This transition is managed by the
1914 Release Team, and %s is the Release-Team member responsible for it.
1915 Please mail debian-release@lists.debian.org or contact %s directly if you
1916 need further assistance. You might want to upload to experimental until this
1917 transition is done."""
1918 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1920 self.rejects.append(rejectmsg)
1923 ###########################################################################
1924 # End check_signed_by_key checks
1925 ###########################################################################
1927 def build_summaries(self):
1928 """ Build a summary of changes the upload introduces. """
1930 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1932 short_summary = summary
1934 # This is for direport's benefit...
1935 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1937 summary += "\n\nChanges:\n" + f
1939 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1941 summary += self.announce(short_summary, 0)
1943 return (summary, short_summary)
1945 ###########################################################################
1947 def close_bugs(self, summary, action):
1949 Send mail to close bugs as instructed by the closes field in the changes file.
1950 Also add a line to summary if any work was done.
1952 @type summary: string
1953 @param summary: summary text, as given by L{build_summaries}
1956 @param action: Set to false no real action will be done.
1959 @return: summary. If action was taken, extended by the list of closed bugs.
1963 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1965 bugs = self.pkg.changes["closes"].keys()
1971 summary += "Closing bugs: "
1973 summary += "%s " % (bug)
1976 self.Subst["__BUG_NUMBER__"] = bug
1977 if self.pkg.changes["distribution"].has_key("stable"):
1978 self.Subst["__STABLE_WARNING__"] = """
1979 Note that this package is not part of the released stable Debian
1980 distribution. It may have dependencies on other unreleased software,
1981 or other instabilities. Please take care if you wish to install it.
1982 The update will eventually make its way into the next released Debian
1985 self.Subst["__STABLE_WARNING__"] = ""
1986 mail_message = utils.TemplateSubst(self.Subst, template)
1987 utils.send_mail(mail_message)
1989 # Clear up after ourselves
1990 del self.Subst["__BUG_NUMBER__"]
1991 del self.Subst["__STABLE_WARNING__"]
1993 if action and self.logger:
1994 self.logger.log(["closing bugs"] + bugs)
2000 ###########################################################################
2002 def announce(self, short_summary, action):
2004 Send an announce mail about a new upload.
2006 @type short_summary: string
2007 @param short_summary: Short summary text to include in the mail
2010 @param action: Set to false no real action will be done.
2013 @return: Textstring about action taken.
2019 # Skip all of this if not sending mail to avoid confusing people
2020 if cnf.has_key("Dinstall::Options::No-Mail") and cnf["Dinstall::Options::No-Mail"]:
2023 # Only do announcements for source uploads with a recent dpkg-dev installed
2024 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
2025 self.pkg.changes["architecture"].has_key("source"):
2028 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
2033 # Get a unique list of target lists
2034 for dist in self.pkg.changes["distribution"].keys():
2035 suite = get_suite(dist)
2036 if suite is None: continue
2037 for tgt in suite.announce:
2040 self.Subst["__SHORT_SUMMARY__"] = short_summary
2042 for announce_list in lists_todo.keys():
2043 summary += "Announcing to %s\n" % (announce_list)
2047 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
2048 if cnf.get("Dinstall::TrackingServer") and \
2049 self.pkg.changes["architecture"].has_key("source"):
2050 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
2051 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
2053 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
2054 utils.send_mail(mail_message)
2056 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
2058 if cnf.find_b("Dinstall::CloseBugs") and cnf.has_key("Dinstall::BugServer"):
2059 summary = self.close_bugs(summary, action)
2061 del self.Subst["__SHORT_SUMMARY__"]
2065 ###########################################################################
2067 def accept (self, summary, short_summary, session=None):
2071 This moves all files referenced from the .changes into the pool,
2072 sends the accepted mail, announces to lists, closes bugs and
2073 also checks for override disparities. If enabled it will write out
2074 the version history for the BTS Version Tracking and will finally call
2077 @type summary: string
2078 @param summary: Summary text
2080 @type short_summary: string
2081 @param short_summary: Short summary
2085 stats = SummaryStats()
2088 self.logger.log(["installing changes", self.pkg.changes_file])
2093 # Add the .dsc file to the DB first
2094 for newfile, entry in self.pkg.files.items():
2095 if entry["type"] == "dsc":
2096 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2100 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2101 for newfile, entry in self.pkg.files.items():
2102 if entry["type"] == "deb":
2103 b, pf = add_deb_to_db(self, newfile, session)
2105 poolfiles.append(pf)
2107 # If this is a sourceful diff only upload that is moving
2108 # cross-component we need to copy the .orig files into the new
2109 # component too for the same reasons as above.
2110 # XXX: mhy: I think this should be in add_dsc_to_db
2111 if self.pkg.changes["architecture"].has_key("source"):
2112 for orig_file in self.pkg.orig_files.keys():
2113 if not self.pkg.orig_files[orig_file].has_key("id"):
2114 continue # Skip if it's not in the pool
2115 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2116 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2117 continue # Skip if the location didn't change
2120 oldf = get_poolfile_by_id(orig_file_id, session)
2121 old_filename = os.path.join(oldf.location.path, oldf.filename)
2122 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
2123 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2125 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2127 # TODO: Care about size/md5sum collisions etc
2128 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2130 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2132 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2133 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2137 # Don't reference the old file from this changes
2139 if p.file_id == oldf.file_id:
2142 poolfiles.append(newf)
2144 # Fix up the DSC references
2147 for df in source.srcfiles:
2148 if df.poolfile.file_id == oldf.file_id:
2149 # Add a new DSC entry and mark the old one for deletion
2150 # Don't do it in the loop so we don't change the thing we're iterating over
2152 newdscf.source_id = source.source_id
2153 newdscf.poolfile_id = newf.file_id
2154 session.add(newdscf)
2164 # Make sure that our source object is up-to-date
2165 session.expire(source)
2167 # Add changelog information to the database
2168 self.store_changelog()
2170 # Install the files into the pool
2171 for newfile, entry in self.pkg.files.items():
2172 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2173 utils.move(newfile, destination)
2174 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2175 stats.accept_bytes += float(entry["size"])
2177 # Copy the .changes file across for suite which need it.
2178 copy_changes = dict([(x.copychanges, '')
2179 for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2180 if x.copychanges is not None])
2182 for dest in copy_changes.keys():
2183 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2185 # We're done - commit the database changes
2187 # Our SQL session will automatically start a new transaction after
2190 # Now ensure that the metadata has been added
2191 # This has to be done after we copy the files into the pool
2192 # For source if we have it:
2193 if self.pkg.changes["architecture"].has_key("source"):
2194 import_metadata_into_db(source, session)
2196 # Now for any of our binaries
2198 import_metadata_into_db(b, session)
2202 # Move the .changes into the 'done' directory
2203 ye, mo, da = time.gmtime()[0:3]
2204 donedir = os.path.join(cnf["Dir::Done"], str(ye), "%0.2d" % mo, "%0.2d" % da)
2205 if not os.path.isdir(donedir):
2206 os.makedirs(donedir)
2208 utils.move(self.pkg.changes_file,
2209 os.path.join(donedir, os.path.basename(self.pkg.changes_file)))
2211 if self.pkg.changes["architecture"].has_key("source"):
2212 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2215 self.Subst["__SUMMARY__"] = summary
2216 mail_message = utils.TemplateSubst(self.Subst,
2217 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2218 utils.send_mail(mail_message)
2219 self.announce(short_summary, 1)
2221 ## Helper stuff for DebBugs Version Tracking
2222 if cnf.find("Dir::BTSVersionTrack"):
2223 if self.pkg.changes["architecture"].has_key("source"):
2224 (fd, temp_filename) = utils.temp_filename(cnf["Dir::BTSVersionTrack"], prefix=".")
2225 version_history = os.fdopen(fd, 'w')
2226 version_history.write(self.pkg.dsc["bts changelog"])
2227 version_history.close()
2228 filename = "%s/%s" % (cnf["Dir::BTSVersionTrack"],
2229 self.pkg.changes_file[:-8]+".versions")
2230 os.rename(temp_filename, filename)
2231 os.chmod(filename, 0o644)
2233 # Write out the binary -> source mapping.
2234 (fd, temp_filename) = utils.temp_filename(cnf["Dir::BTSVersionTrack"], prefix=".")
2235 debinfo = os.fdopen(fd, 'w')
2236 for name, entry in sorted(self.pkg.files.items()):
2237 if entry["type"] == "deb":
2238 line = " ".join([entry["package"], entry["version"],
2239 entry["architecture"], entry["source package"],
2240 entry["source version"]])
2241 debinfo.write(line+"\n")
2243 filename = "%s/%s" % (cnf["Dir::BTSVersionTrack"],
2244 self.pkg.changes_file[:-8]+".debinfo")
2245 os.rename(temp_filename, filename)
2246 os.chmod(filename, 0o644)
2250 # Set up our copy queues (e.g. buildd queues)
2251 for suite_name in self.pkg.changes["distribution"].keys():
2252 suite = get_suite(suite_name, session)
2253 for q in suite.copy_queues:
2255 q.add_file_from_pool(f)
2260 stats.accept_count += 1
2262 def check_override(self):
2264 Checks override entries for validity. Mails "Override disparity" warnings,
2265 if that feature is enabled.
2267 Abandons the check if
2268 - override disparity checks are disabled
2269 - mail sending is disabled
2274 # Abandon the check if override disparity checks have been disabled
2275 if not cnf.find_b("Dinstall::OverrideDisparityCheck"):
2278 summary = self.pkg.check_override()
2283 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2286 self.Subst["__SUMMARY__"] = summary
2287 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2288 utils.send_mail(mail_message)
2289 del self.Subst["__SUMMARY__"]
2291 ###########################################################################
2293 def remove(self, from_dir=None):
2295 Used (for instance) in p-u to remove the package from unchecked
2297 Also removes the package from holding area.
2299 if from_dir is None:
2300 from_dir = self.pkg.directory
2303 for f in self.pkg.files.keys():
2304 os.unlink(os.path.join(from_dir, f))
2305 if os.path.exists(os.path.join(h.holding_dir, f)):
2306 os.unlink(os.path.join(h.holding_dir, f))
2308 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2309 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2310 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2312 ###########################################################################
2314 def move_to_queue (self, queue):
2316 Move files to a destination queue using the permissions in the table
2319 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2320 queue.path, perms=int(queue.change_perms, 8))
2321 for f in self.pkg.files.keys():
2322 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2324 ###########################################################################
2326 def force_reject(self, reject_files):
2328 Forcefully move files from the current directory to the
2329 reject directory. If any file already exists in the reject
2330 directory it will be moved to the morgue to make way for
2333 @type reject_files: dict
2334 @param reject_files: file dictionary
2340 for file_entry in reject_files:
2341 # Skip any files which don't exist or which we don't have permission to copy.
2342 if os.access(file_entry, os.R_OK) == 0:
2345 dest_file = os.path.join(cnf["Dir::Reject"], file_entry)
2348 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0o644)
2349 except OSError as e:
2350 # File exists? Let's find a new name by adding a number
2351 if e.errno == errno.EEXIST:
2353 dest_file = utils.find_next_free(dest_file, 255)
2354 except NoFreeFilenameError:
2355 # Something's either gone badly Pete Tong, or
2356 # someone is trying to exploit us.
2357 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Reject"]))
2360 # Make sure we really got it
2362 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0o644)
2363 except OSError as e:
2365 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2369 # If we got here, we own the destination file, so we can
2370 # safely overwrite it.
2371 utils.move(file_entry, dest_file, 1, perms=0o660)
2374 ###########################################################################
2375 def do_reject (self, manual=0, reject_message="", notes=""):
2377 Reject an upload. If called without a reject message or C{manual} is
2378 true, spawn an editor so the user can write one.
2381 @param manual: manual or automated rejection
2383 @type reject_message: string
2384 @param reject_message: A reject message
2389 # If we weren't given a manual rejection message, spawn an
2390 # editor so the user can add one in...
2391 if manual and not reject_message:
2392 (fd, temp_filename) = utils.temp_filename()
2393 temp_file = os.fdopen(fd, 'w')
2396 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2397 % (note.author, note.version, note.notedate, note.comment))
2399 editor = os.environ.get("EDITOR","vi")
2401 while answer == 'E':
2402 os.system("%s %s" % (editor, temp_filename))
2403 temp_fh = utils.open_file(temp_filename)
2404 reject_message = "".join(temp_fh.readlines())
2406 print "Reject message:"
2407 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2408 prompt = "[R]eject, Edit, Abandon, Quit ?"
2410 while prompt.find(answer) == -1:
2411 answer = utils.our_raw_input(prompt)
2412 m = re_default_answer.search(prompt)
2415 answer = answer[:1].upper()
2416 os.unlink(temp_filename)
2422 print "Rejecting.\n"
2426 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2427 reason_filename = os.path.join(cnf["Dir::Reject"], reason_filename)
2428 changesfile = os.path.join(cnf["Dir::Reject"], self.pkg.changes_file)
2430 # Move all the files into the reject directory
2431 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2432 self.force_reject(reject_files)
2434 # Change permissions of the .changes file to be world readable
2436 os.chmod(changesfile, os.stat(changesfile).st_mode | stat.S_IROTH)
2437 except OSError as (errno, strerror):
2438 # Ignore 'Operation not permitted' error.
2442 # If we fail here someone is probably trying to exploit the race
2443 # so let's just raise an exception ...
2444 if os.path.exists(reason_filename):
2445 os.unlink(reason_filename)
2446 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0o644)
2448 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2452 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2453 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2454 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2455 os.write(reason_fd, reject_message)
2456 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2458 # Build up the rejection email
2459 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2460 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2461 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2462 self.Subst["__REJECT_MESSAGE__"] = ""
2463 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2464 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2465 # Write the rejection email out as the <foo>.reason file
2466 os.write(reason_fd, reject_mail_message)
2468 del self.Subst["__REJECTOR_ADDRESS__"]
2469 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2470 del self.Subst["__CC__"]
2474 # Send the rejection mail
2475 utils.send_mail(reject_mail_message)
2478 self.logger.log(["rejected", self.pkg.changes_file])
2480 stats = SummaryStats()
2481 stats.reject_count += 1
2484 ################################################################################
2485 def in_override_p(self, package, component, suite, binary_type, filename, session):
2487 Check if a package already has override entries in the DB
2489 @type package: string
2490 @param package: package name
2492 @type component: string
2493 @param component: database id of the component
2496 @param suite: database id of the suite
2498 @type binary_type: string
2499 @param binary_type: type of the package
2501 @type filename: string
2502 @param filename: filename we check
2504 @return: the database result. But noone cares anyway.
2510 if binary_type == "": # must be source
2513 file_type = binary_type
2515 # Override suite name; used for example with proposed-updates
2516 oldsuite = get_suite(suite, session)
2517 if (not oldsuite is None) and oldsuite.overridesuite:
2518 suite = oldsuite.overridesuite
2520 result = get_override(package, suite, component, file_type, session)
2522 # If checking for a source package fall back on the binary override type
2523 if file_type == "dsc" and len(result) < 1:
2524 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2526 # Remember the section and priority so we can check them later if appropriate
2529 self.pkg.files[filename]["override section"] = result.section.section
2530 self.pkg.files[filename]["override priority"] = result.priority.priority
2535 ################################################################################
2536 def get_anyversion(self, sv_list, suite):
2539 @param sv_list: list of (suite, version) tuples to check
2542 @param suite: suite name
2548 anysuite = [suite] + [ vc.reference.suite_name for vc in get_version_checks(suite, "Enhances") ]
2549 for (s, v) in sv_list:
2550 if s in [ x.lower() for x in anysuite ]:
2551 if not anyversion or apt_pkg.version_compare(anyversion, v) <= 0:
2556 ################################################################################
2558 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2561 @param sv_list: list of (suite, version) tuples to check
2563 @type filename: string
2564 @param filename: XXX
2566 @type new_version: string
2567 @param new_version: XXX
2569 Ensure versions are newer than existing packages in target
2570 suites and that cross-suite version checking rules as
2571 set out in the conf file are satisfied.
2576 # Check versions for each target suite
2577 for target_suite in self.pkg.changes["distribution"].keys():
2578 # Check we can find the target suite
2579 ts = get_suite(target_suite)
2581 self.rejects.append("Cannot find target suite %s to perform version checks" % target_suite)
2584 must_be_newer_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan") ]
2585 must_be_older_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan") ]
2587 # Enforce "must be newer than target suite" even if conffile omits it
2588 if target_suite not in must_be_newer_than:
2589 must_be_newer_than.append(target_suite)
2591 for (suite, existent_version) in sv_list:
2592 vercmp = apt_pkg.version_compare(new_version, existent_version)
2594 if suite in must_be_newer_than and sourceful and vercmp < 1:
2595 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2597 if suite in must_be_older_than and vercmp > -1:
2600 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2601 # we really use the other suite, ignoring the conflicting one ...
2602 addsuite = self.pkg.changes["distribution-version"][suite]
2604 add_version = self.get_anyversion(sv_list, addsuite)
2605 target_version = self.get_anyversion(sv_list, target_suite)
2608 # not add_version can only happen if we map to a suite
2609 # that doesn't enhance the suite we're propup'ing from.
2610 # so "propup-ver x a b c; map a d" is a problem only if
2611 # d doesn't enhance a.
2613 # i think we could always propagate in this case, rather
2614 # than complaining. either way, this isn't a REJECT issue
2616 # And - we really should complain to the dorks who configured dak
2617 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2618 self.pkg.changes.setdefault("propdistribution", {})
2619 self.pkg.changes["propdistribution"][addsuite] = 1
2621 elif not target_version:
2622 # not targets_version is true when the package is NEW
2623 # we could just stick with the "...old version..." REJECT
2624 # for this, I think.
2625 self.rejects.append("Won't propogate NEW packages.")
2626 elif apt_pkg.version_compare(new_version, add_version) < 0:
2627 # propogation would be redundant. no need to reject though.
2628 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2630 elif apt_pkg.version_compare(new_version, add_version) > 0 and \
2631 apt_pkg.version_compare(add_version, target_version) >= 0:
2633 self.warnings.append("Propogating upload to %s" % (addsuite))
2634 self.pkg.changes.setdefault("propdistribution", {})
2635 self.pkg.changes["propdistribution"][addsuite] = 1
2639 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2641 ################################################################################
2642 def check_binary_against_db(self, filename, session):
2643 # Ensure version is sane
2644 self.cross_suite_version_check( \
2645 get_suite_version_by_package(self.pkg.files[filename]["package"], \
2646 self.pkg.files[filename]["architecture"], session),
2647 filename, self.pkg.files[filename]["version"], sourceful=False)
2649 # Check for any existing copies of the file
2650 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2651 q = q.filter_by(version=self.pkg.files[filename]["version"])
2652 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2655 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2657 ################################################################################
2659 def check_source_against_db(self, filename, session):
2660 source = self.pkg.dsc.get("source")
2661 version = self.pkg.dsc.get("version")
2663 # Ensure version is sane
2664 self.cross_suite_version_check( \
2665 get_suite_version_by_source(source, session), filename, version,
2668 ################################################################################
2669 def check_dsc_against_db(self, filename, session):
2672 @warning: NB: this function can remove entries from the 'files' index [if
2673 the orig tarball is a duplicate of the one in the archive]; if
2674 you're iterating over 'files' and call this function as part of
2675 the loop, be sure to add a check to the top of the loop to
2676 ensure you haven't just tried to dereference the deleted entry.
2681 self.pkg.orig_files = {} # XXX: do we need to clear it?
2682 orig_files = self.pkg.orig_files
2684 # Try and find all files mentioned in the .dsc. This has
2685 # to work harder to cope with the multiple possible
2686 # locations of an .orig.tar.gz.
2687 # The ordering on the select is needed to pick the newest orig
2688 # when it exists in multiple places.
2689 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2691 if self.pkg.files.has_key(dsc_name):
2692 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2693 actual_size = int(self.pkg.files[dsc_name]["size"])
2694 found = "%s in incoming" % (dsc_name)
2696 # Check the file does not already exist in the archive
2697 ql = get_poolfile_like_name(dsc_name, session)
2699 # Strip out anything that isn't '%s' or '/%s$'
2701 if not i.filename.endswith(dsc_name):
2704 # "[dak] has not broken them. [dak] has fixed a
2705 # brokenness. Your crappy hack exploited a bug in
2708 # "(Come on! I thought it was always obvious that
2709 # one just doesn't release different files with
2710 # the same name and version.)"
2711 # -- ajk@ on d-devel@l.d.o
2714 # Ignore exact matches for .orig.tar.gz
2716 if re_is_orig_source.match(dsc_name):
2718 if self.pkg.files.has_key(dsc_name) and \
2719 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2720 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2721 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2722 # TODO: Don't delete the entry, just mark it as not needed
2723 # This would fix the stupidity of changing something we often iterate over
2724 # whilst we're doing it
2725 del self.pkg.files[dsc_name]
2726 dsc_entry["files id"] = i.file_id
2727 if not orig_files.has_key(dsc_name):
2728 orig_files[dsc_name] = {}
2729 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2732 # Don't bitch that we couldn't find this file later
2734 self.later_check_files.remove(dsc_name)
2740 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2742 elif re_is_orig_source.match(dsc_name):
2744 ql = get_poolfile_like_name(dsc_name, session)
2746 # Strip out anything that isn't '%s' or '/%s$'
2747 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2749 if not i.filename.endswith(dsc_name):
2753 # Unfortunately, we may get more than one match here if,
2754 # for example, the package was in potato but had an -sa
2755 # upload in woody. So we need to choose the right one.
2757 # default to something sane in case we don't match any or have only one
2762 old_file = os.path.join(i.location.path, i.filename)
2763 old_file_fh = utils.open_file(old_file)
2764 actual_md5 = apt_pkg.md5sum(old_file_fh)
2766 actual_size = os.stat(old_file)[stat.ST_SIZE]
2767 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2770 old_file = os.path.join(i.location.path, i.filename)
2771 old_file_fh = utils.open_file(old_file)
2772 actual_md5 = apt_pkg.md5sum(old_file_fh)
2774 actual_size = os.stat(old_file)[stat.ST_SIZE]
2776 suite_type = x.location.archive_type
2777 # need this for updating dsc_files in install()
2778 dsc_entry["files id"] = x.file_id
2779 # See install() in process-accepted...
2780 if not orig_files.has_key(dsc_name):
2781 orig_files[dsc_name] = {}
2782 orig_files[dsc_name]["id"] = x.file_id
2783 orig_files[dsc_name]["path"] = old_file
2784 orig_files[dsc_name]["location"] = x.location.location_id
2786 # TODO: Determine queue list dynamically
2787 # Not there? Check the queue directories...
2788 for queue_name in [ "byhand", "new", "proposedupdates", "oldproposedupdates", "embargoed", "unembargoed" ]:
2789 queue = get_policy_queue(queue_name, session)
2793 in_otherdir = os.path.join(queue.path, dsc_name)
2795 if os.path.exists(in_otherdir):
2796 in_otherdir_fh = utils.open_file(in_otherdir)
2797 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2798 in_otherdir_fh.close()
2799 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2801 if not orig_files.has_key(dsc_name):
2802 orig_files[dsc_name] = {}
2803 orig_files[dsc_name]["path"] = in_otherdir
2806 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2809 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2811 if actual_md5 != dsc_entry["md5sum"]:
2812 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2813 if actual_size != int(dsc_entry["size"]):
2814 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2816 ################################################################################
2817 # This is used by process-new and process-holding to recheck a changes file
2818 # at the time we're running. It mainly wraps various other internal functions
2819 # and is similar to accepted_checks - these should probably be tidied up
2821 def recheck(self, session):
2823 for f in self.pkg.files.keys():
2824 # The .orig.tar.gz can disappear out from under us is it's a
2825 # duplicate of one in the archive.
2826 if not self.pkg.files.has_key(f):
2829 entry = self.pkg.files[f]
2831 # Check that the source still exists
2832 if entry["type"] == "deb":
2833 source_version = entry["source version"]
2834 source_package = entry["source package"]
2835 if not self.pkg.changes["architecture"].has_key("source") \
2836 and not source_exists(source_package, source_version, \
2837 suites = self.pkg.changes["distribution"].keys(), session = session):
2838 source_epochless_version = re_no_epoch.sub('', source_version)
2839 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2841 for queue_name in ["embargoed", "unembargoed", "newstage"]:
2842 queue = get_policy_queue(queue_name, session)
2843 if queue and os.path.exists(os.path.join(queue.path, dsc_filename)):
2846 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2848 # Version and file overwrite checks
2849 if entry["type"] == "deb":
2850 self.check_binary_against_db(f, session)
2851 elif entry["type"] == "dsc":
2852 self.check_source_against_db(f, session)
2853 self.check_dsc_against_db(f, session)
2855 ################################################################################
2856 def accepted_checks(self, overwrite_checks, session):
2857 # Recheck anything that relies on the database; since that's not
2858 # frozen between accept and our run time when called from p-a.
2860 # overwrite_checks is set to False when installing to stable/oldstable
2865 # Find the .dsc (again)
2867 for f in self.pkg.files.keys():
2868 if self.pkg.files[f]["type"] == "dsc":
2871 for checkfile in self.pkg.files.keys():
2872 # The .orig.tar.gz can disappear out from under us is it's a
2873 # duplicate of one in the archive.
2874 if not self.pkg.files.has_key(checkfile):
2877 entry = self.pkg.files[checkfile]
2879 # Check that the source still exists
2880 if entry["type"] == "deb":
2881 source_version = entry["source version"]
2882 source_package = entry["source package"]
2883 if not self.pkg.changes["architecture"].has_key("source") \
2884 and not source_exists(source_package, source_version, \
2885 suites = self.pkg.changes["distribution"].keys(), \
2887 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2889 # Version and file overwrite checks
2890 if overwrite_checks:
2891 if entry["type"] == "deb":
2892 self.check_binary_against_db(checkfile, session)
2893 elif entry["type"] == "dsc":
2894 self.check_source_against_db(checkfile, session)
2895 self.check_dsc_against_db(dsc_filename, session)
2897 # propogate in the case it is in the override tables:
2898 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2899 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2900 propogate[suite] = 1
2902 nopropogate[suite] = 1
2904 for suite in propogate.keys():
2905 if suite in nopropogate:
2907 self.pkg.changes["distribution"][suite] = 1
2909 for checkfile in self.pkg.files.keys():
2910 # Check the package is still in the override tables
2911 for suite in self.pkg.changes["distribution"].keys():
2912 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2913 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2915 ################################################################################
2916 # If any file of an upload has a recent mtime then chances are good
2917 # the file is still being uploaded.
2919 def upload_too_new(self):
2922 # Move back to the original directory to get accurate time stamps
2924 os.chdir(self.pkg.directory)
2925 file_list = self.pkg.files.keys()
2926 file_list.extend(self.pkg.dsc_files.keys())
2927 file_list.append(self.pkg.changes_file)
2930 last_modified = time.time()-os.path.getmtime(f)
2931 if last_modified < int(cnf["Dinstall::SkipTime"]):
2940 def store_changelog(self):
2942 # Skip binary-only upload if it is not a bin-NMU
2943 if not self.pkg.changes['architecture'].has_key('source'):
2944 from daklib.regexes import re_bin_only_nmu
2945 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2948 session = DBConn().session()
2950 # Check if upload already has a changelog entry
2951 query = """SELECT changelog_id FROM changes WHERE source = :source
2952 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2953 if session.execute(query, {'source': self.pkg.changes['source'], \
2954 'version': self.pkg.changes['version'], \
2955 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2959 # Add current changelog text into changelogs_text table, return created ID
2960 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2961 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2963 # Link ID to the upload available in changes table
2964 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2965 AND version = :version AND architecture = :architecture"""
2966 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2967 'version': self.pkg.changes['version'], \
2968 'architecture': " ".join(self.pkg.changes['architecture'].keys())})