5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files, build_package_list
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
59 ###############################################################################
61 def get_type(f, session):
63 Get the file type of C{f}
66 @param f: file entry from Changes object
68 @type session: SQLA Session
69 @param session: SQL Alchemy session object
76 if f.has_key("dbtype"):
77 file_type = f["dbtype"]
78 elif re_source_ext.match(f["type"]):
80 elif f['architecture'] == 'source' and f["type"] == 'unreadable':
81 utils.warn('unreadable source file (will continue and hope for the best)')
85 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
87 # Validate the override type
88 type_id = get_override_type(file_type, session)
90 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
94 ################################################################################
96 # Determine what parts in a .changes are NEW
98 def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = None):
100 Determine what parts in a C{changes} file are NEW.
103 @param filename: changes filename
105 @type changes: Upload.Pkg.changes dict
106 @param changes: Changes dictionary
108 @type files: Upload.Pkg.files dict
109 @param files: Files dictionary
112 @param warn: Warn if overrides are added for (old)stable
114 @type dsc: Upload.Pkg.dsc dict
115 @param dsc: (optional); Dsc dictionary
118 @param new: new packages as returned by a previous call to this function, but override information may have changed
121 @return: dictionary of NEW components.
124 # TODO: This should all use the database instead of parsing the changes
130 dbchg = get_dbchange(filename, session)
132 print "Warning: cannot find changes file in database; won't check byhand"
134 # Try to get the Package-Set field from an included .dsc file (if possible).
136 for package, entry in build_package_list(dsc, session).items():
137 if package not in new:
140 # Build up a list of potentially new things
141 for name, f in files.items():
142 # Keep a record of byhand elements
143 if f["section"] == "byhand":
148 priority = f["priority"]
149 section = f["section"]
150 file_type = get_type(f, session)
151 component = f["component"]
153 if file_type == "dsc":
156 if not new.has_key(pkg):
158 new[pkg]["priority"] = priority
159 new[pkg]["section"] = section
160 new[pkg]["type"] = file_type
161 new[pkg]["component"] = component
162 new[pkg]["files"] = []
164 old_type = new[pkg]["type"]
165 if old_type != file_type:
166 # source gets trumped by deb or udeb
167 if old_type == "dsc":
168 new[pkg]["priority"] = priority
169 new[pkg]["section"] = section
170 new[pkg]["type"] = file_type
171 new[pkg]["component"] = component
173 new[pkg]["files"].append(name)
175 if f.has_key("othercomponents"):
176 new[pkg]["othercomponents"] = f["othercomponents"]
178 # Fix up the list of target suites
180 for suite in changes["suite"].keys():
181 oldsuite = get_suite(suite, session)
183 print "WARNING: Invalid suite %s found" % suite
186 if oldsuite.overridesuite:
187 newsuite = get_suite(oldsuite.overridesuite, session)
190 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
191 oldsuite.overridesuite, suite)
192 del changes["suite"][suite]
193 changes["suite"][oldsuite.overridesuite] = 1
195 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist. Bugger" % (
196 oldsuite.overridesuite, suite)
198 # Check for unprocessed byhand files
199 if dbchg is not None:
200 for b in byhand.keys():
201 # Find the file entry in the database
203 for f in dbchg.files:
206 # If it's processed, we can ignore it
212 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
214 # Check for new stuff
215 for suite in changes["suite"].keys():
216 for pkg in new.keys():
217 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
219 for file_entry in new[pkg]["files"]:
220 if files[file_entry].has_key("new"):
221 del files[file_entry]["new"]
225 for s in ['stable', 'oldstable']:
226 if changes["suite"].has_key(s):
227 print "WARNING: overrides will be added for %s!" % s
228 for pkg in new.keys():
229 if new[pkg].has_key("othercomponents"):
230 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
234 ################################################################################
236 def check_valid(new, session = None):
238 Check if section and priority for NEW packages exist in database.
239 Additionally does sanity checks:
240 - debian-installer packages have to be udeb (or source)
241 - non debian-installer packages can not be udeb
242 - source priority can only be assigned to dsc file types
245 @param new: Dict of new packages with their section, priority and type.
248 for pkg in new.keys():
249 section_name = new[pkg]["section"]
250 priority_name = new[pkg]["priority"]
251 file_type = new[pkg]["type"]
253 section = get_section(section_name, session)
255 new[pkg]["section id"] = -1
257 new[pkg]["section id"] = section.section_id
259 priority = get_priority(priority_name, session)
261 new[pkg]["priority id"] = -1
263 new[pkg]["priority id"] = priority.priority_id
266 di = section_name.find("debian-installer") != -1
268 # If d-i, we must be udeb and vice-versa
269 if (di and file_type not in ("udeb", "dsc")) or \
270 (not di and file_type == "udeb"):
271 new[pkg]["section id"] = -1
273 # If dsc we need to be source and vice-versa
274 if (priority == "source" and file_type != "dsc") or \
275 (priority != "source" and file_type == "dsc"):
276 new[pkg]["priority id"] = -1
278 ###############################################################################
280 # Used by Upload.check_timestamps
281 class TarTime(object):
282 def __init__(self, future_cutoff, past_cutoff):
284 self.future_cutoff = future_cutoff
285 self.past_cutoff = past_cutoff
288 self.future_files = {}
289 self.ancient_files = {}
291 def callback(self, member, data):
292 if member.mtime > self.future_cutoff:
293 self.future_files[Name] = member.mtime
294 if member.mtime < self.past_cutoff:
295 self.ancient_files[Name] = member.mtime
297 ###############################################################################
299 def prod_maintainer(notes, upload):
302 # Here we prepare an editor and get them ready to prod...
303 (fd, temp_filename) = utils.temp_filename()
304 temp_file = os.fdopen(fd, 'w')
306 temp_file.write(note.comment)
308 editor = os.environ.get("EDITOR","vi")
311 os.system("%s %s" % (editor, temp_filename))
312 temp_fh = utils.open_file(temp_filename)
313 prod_message = "".join(temp_fh.readlines())
315 print "Prod message:"
316 print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
317 prompt = "[P]rod, Edit, Abandon, Quit ?"
319 while prompt.find(answer) == -1:
320 answer = utils.our_raw_input(prompt)
321 m = re_default_answer.search(prompt)
324 answer = answer[:1].upper()
325 os.unlink(temp_filename)
331 # Otherwise, do the proding...
332 user_email_address = utils.whoami() + " <%s>" % (
333 cnf["Dinstall::MyAdminAddress"])
337 Subst["__FROM_ADDRESS__"] = user_email_address
338 Subst["__PROD_MESSAGE__"] = prod_message
339 Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
341 prod_mail_message = utils.TemplateSubst(
342 Subst,cnf["Dir::Templates"]+"/process-new.prod")
345 utils.send_mail(prod_mail_message)
347 print "Sent prodding message"
349 ################################################################################
351 def edit_note(note, upload, session, trainee=False):
352 # Write the current data to a temporary file
353 (fd, temp_filename) = utils.temp_filename()
354 editor = os.environ.get("EDITOR","vi")
357 os.system("%s %s" % (editor, temp_filename))
358 temp_file = utils.open_file(temp_filename)
359 newnote = temp_file.read().rstrip()
362 print utils.prefix_multi_line_string(newnote," ")
363 prompt = "[D]one, Edit, Abandon, Quit ?"
365 while prompt.find(answer) == -1:
366 answer = utils.our_raw_input(prompt)
367 m = re_default_answer.search(prompt)
370 answer = answer[:1].upper()
371 os.unlink(temp_filename)
378 comment = NewComment()
379 comment.package = upload.pkg.changes["source"]
380 comment.version = upload.pkg.changes["version"]
381 comment.comment = newnote
382 comment.author = utils.whoami()
383 comment.trainee = trainee
387 ###############################################################################
389 # FIXME: Should move into the database
390 # suite names DMs can upload to
391 dm_suites = ['unstable', 'experimental', 'squeeze-backports']
393 def get_newest_source(source, session):
394 'returns the newest DBSource object in dm_suites'
395 ## the most recent version of the package uploaded to unstable or
396 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
397 ## section of its control file
398 q = session.query(DBSource).filter_by(source = source). \
399 filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
400 order_by(desc('source.version'))
403 def get_suite_version_by_source(source, session):
404 'returns a list of tuples (suite_name, version) for source package'
405 q = session.query(Suite.suite_name, DBSource.version). \
406 join(Suite.sources).filter_by(source = source)
409 def get_source_by_package_and_suite(package, suite_name, session):
411 returns a DBSource query filtered by DBBinary.package and this package's
414 return session.query(DBSource). \
415 join(DBSource.binaries).filter_by(package = package). \
416 join(DBBinary.suites).filter_by(suite_name = suite_name)
418 def get_suite_version_by_package(package, arch_string, session):
420 returns a list of tuples (suite_name, version) for binary package and
423 return session.query(Suite.suite_name, DBBinary.version). \
424 join(Suite.binaries).filter_by(package = package). \
425 join(DBBinary.architecture). \
426 filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
428 class Upload(object):
430 Everything that has to do with an upload processed.
438 ###########################################################################
441 """ Reset a number of internal variables."""
443 # Initialize the substitution template map
446 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
447 if cnf.has_key("Dinstall::BugServer"):
448 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
449 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
450 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
456 self.later_check_files = []
460 def package_info(self):
462 Format various messages from this Upload to send to the maintainer.
466 ('Reject Reasons', self.rejects),
467 ('Warnings', self.warnings),
468 ('Notes', self.notes),
472 for title, messages in msgs:
474 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
479 ###########################################################################
480 def update_subst(self):
481 """ Set up the per-package template substitution mappings """
485 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
486 if not self.pkg.changes.has_key("architecture") or not \
487 isinstance(self.pkg.changes["architecture"], dict):
488 self.pkg.changes["architecture"] = { "Unknown" : "" }
490 # and maintainer2047 may not exist.
491 if not self.pkg.changes.has_key("maintainer2047"):
492 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
494 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
495 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
496 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
498 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
499 if self.pkg.changes["architecture"].has_key("source") and \
500 self.pkg.changes["changedby822"] != "" and \
501 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
503 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
504 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
505 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
507 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
508 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
509 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
511 # Process policy doesn't set the fingerprint field and I don't want to make it
512 # do it for now as I don't want to have to deal with the case where we accepted
513 # the package into PU-NEW, but the fingerprint has gone away from the keyring in
514 # the meantime so the package will be remarked as rejectable. Urgh.
515 # TODO: Fix this properly
516 if self.pkg.changes.has_key('fingerprint'):
517 session = DBConn().session()
518 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
519 if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
520 if self.pkg.changes.has_key("sponsoremail"):
521 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
524 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
525 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
527 # Apply any global override of the Maintainer field
528 if cnf.get("Dinstall::OverrideMaintainer"):
529 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
530 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
532 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
533 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
534 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
535 self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
537 ###########################################################################
538 def load_changes(self, filename):
540 Load a changes file and setup a dictionary around it. Also checks for mandantory
543 @type filename: string
544 @param filename: Changes filename, full path.
547 @return: whether the changes file was valid or not. We may want to
548 reject even if this is True (see what gets put in self.rejects).
549 This is simply to prevent us even trying things later which will
550 fail because we couldn't properly parse the file.
553 self.pkg.changes_file = filename
555 # Parse the .changes field into a dictionary
557 self.pkg.changes.update(parse_changes(filename))
558 except CantOpenError:
559 self.rejects.append("%s: can't read file." % (filename))
561 except ParseChangesError as line:
562 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
564 except ChangesUnicodeError:
565 self.rejects.append("%s: changes file not proper utf-8" % (filename))
568 # Parse the Files field from the .changes into another dictionary
570 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
571 except ParseChangesError as line:
572 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
574 except UnknownFormatError as format:
575 self.rejects.append("%s: unknown format '%s'." % (filename, format))
578 # Check for mandatory fields
579 for i in ("distribution", "source", "binary", "architecture",
580 "version", "maintainer", "files", "changes", "description"):
581 if not self.pkg.changes.has_key(i):
582 # Avoid undefined errors later
583 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
586 # Strip a source version in brackets from the source field
587 if re_strip_srcver.search(self.pkg.changes["source"]):
588 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
590 # Ensure the source field is a valid package name.
591 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
592 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
594 # Split multi-value fields into a lower-level dictionary
595 for i in ("architecture", "distribution", "binary", "closes"):
596 o = self.pkg.changes.get(i, "")
598 del self.pkg.changes[i]
600 self.pkg.changes[i] = {}
603 self.pkg.changes[i][j] = 1
605 # Fix the Maintainer: field to be RFC822/2047 compatible
607 (self.pkg.changes["maintainer822"],
608 self.pkg.changes["maintainer2047"],
609 self.pkg.changes["maintainername"],
610 self.pkg.changes["maintaineremail"]) = \
611 fix_maintainer (self.pkg.changes["maintainer"])
612 except ParseMaintError as msg:
613 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
614 % (filename, self.pkg.changes["maintainer"], msg))
616 # ...likewise for the Changed-By: field if it exists.
618 (self.pkg.changes["changedby822"],
619 self.pkg.changes["changedby2047"],
620 self.pkg.changes["changedbyname"],
621 self.pkg.changes["changedbyemail"]) = \
622 fix_maintainer (self.pkg.changes.get("changed-by", ""))
623 except ParseMaintError as msg:
624 self.pkg.changes["changedby822"] = ""
625 self.pkg.changes["changedby2047"] = ""
626 self.pkg.changes["changedbyname"] = ""
627 self.pkg.changes["changedbyemail"] = ""
629 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
630 % (filename, self.pkg.changes["changed-by"], msg))
632 # Ensure all the values in Closes: are numbers
633 if self.pkg.changes.has_key("closes"):
634 for i in self.pkg.changes["closes"].keys():
635 if re_isanum.match (i) == None:
636 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
638 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
639 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
640 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
642 # Check the .changes is non-empty
643 if not self.pkg.files:
644 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
647 # Changes was syntactically valid even if we'll reject
650 ###########################################################################
652 def check_distributions(self):
653 "Check and map the Distribution field"
657 # Handle suite mappings
658 for m in Cnf.value_list("SuiteMappings"):
661 if mtype == "map" or mtype == "silent-map":
662 (source, dest) = args[1:3]
663 if self.pkg.changes["distribution"].has_key(source):
664 del self.pkg.changes["distribution"][source]
665 self.pkg.changes["distribution"][dest] = 1
666 if mtype != "silent-map":
667 self.notes.append("Mapping %s to %s." % (source, dest))
668 if self.pkg.changes.has_key("distribution-version"):
669 if self.pkg.changes["distribution-version"].has_key(source):
670 self.pkg.changes["distribution-version"][source]=dest
671 elif mtype == "map-unreleased":
672 (source, dest) = args[1:3]
673 if self.pkg.changes["distribution"].has_key(source):
674 for arch in self.pkg.changes["architecture"].keys():
675 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
676 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
677 del self.pkg.changes["distribution"][source]
678 self.pkg.changes["distribution"][dest] = 1
680 elif mtype == "ignore":
682 if self.pkg.changes["distribution"].has_key(suite):
683 del self.pkg.changes["distribution"][suite]
684 self.warnings.append("Ignoring %s as a target suite." % (suite))
685 elif mtype == "reject":
687 if self.pkg.changes["distribution"].has_key(suite):
688 self.rejects.append("Uploads to %s are not accepted." % (suite))
689 elif mtype == "propup-version":
690 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
692 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
693 if self.pkg.changes["distribution"].has_key(args[1]):
694 self.pkg.changes.setdefault("distribution-version", {})
695 for suite in args[2:]:
696 self.pkg.changes["distribution-version"][suite] = suite
698 # Ensure there is (still) a target distribution
699 if len(self.pkg.changes["distribution"].keys()) < 1:
700 self.rejects.append("No valid distribution remaining.")
702 # Ensure target distributions exist
703 for suite in self.pkg.changes["distribution"].keys():
704 if not get_suite(suite.lower()):
705 self.rejects.append("Unknown distribution `%s'." % (suite))
707 ###########################################################################
709 def binary_file_checks(self, f, session):
711 entry = self.pkg.files[f]
713 # Extract package control information
714 deb_file = utils.open_file(f)
716 control = apt_pkg.TagSection(utils.deb_extract_control(deb_file))
718 self.rejects.append("%s: deb_extract_control() raised %s." % (f, sys.exc_info()[0]))
720 # Can't continue, none of the checks on control would work.
725 # Check for mandatory fields
726 for field in [ "Package", "Architecture", "Version", "Description" ]:
727 if field not in control:
729 self.rejects.append("%s: No %s field in control." % (f, field))
732 # Ensure the package name matches the one give in the .changes
733 if not self.pkg.changes["binary"].has_key(control.find("Package", "")):
734 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.find("Package", "")))
736 # Validate the package field
737 package = control["Package"]
738 if not re_valid_pkg_name.match(package):
739 self.rejects.append("%s: invalid package name '%s'." % (f, package))
741 # Validate the version field
742 version = control["Version"]
743 if not re_valid_version.match(version):
744 self.rejects.append("%s: invalid version number '%s'." % (f, version))
746 # Ensure the architecture of the .deb is one we know about.
747 default_suite = cnf.get("Dinstall::DefaultSuite", "unstable")
748 architecture = control["Architecture"]
749 upload_suite = self.pkg.changes["distribution"].keys()[0]
751 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
752 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
753 self.rejects.append("Unknown architecture '%s'." % (architecture))
755 # Ensure the architecture of the .deb is one of the ones
756 # listed in the .changes.
757 if not self.pkg.changes["architecture"].has_key(architecture):
758 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
760 # Sanity-check the Depends field
761 depends = control.find("Depends")
763 self.rejects.append("%s: Depends field is empty." % (f))
765 # Sanity-check the Provides field
766 provides = control.find("Provides")
767 if provides is not None:
768 provide = re_spacestrip.sub('', provides)
770 self.rejects.append("%s: Provides field is empty." % (f))
771 prov_list = provide.split(",")
772 for prov in prov_list:
773 if not re_valid_pkg_name.match(prov):
774 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
776 # If there is a Built-Using field, we need to check we can find the
777 # exact source version
778 built_using = control.find("Built-Using")
779 if built_using is not None:
781 entry["built-using"] = []
782 for dep in apt_pkg.parse_depends(built_using):
783 bu_s, bu_v, bu_e = dep[0]
784 # Check that it's an exact match dependency and we have
785 # some form of version
786 if bu_e != "=" or len(bu_v) < 1:
787 self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
789 # Find the source id for this version
790 bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
792 self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
794 entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
796 except ValueError as e:
797 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
800 # Check the section & priority match those given in the .changes (non-fatal)
801 if control.find("Section") and entry["section"] != "" \
802 and entry["section"] != control.find("Section"):
803 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
804 (f, control.find("Section", ""), entry["section"]))
805 if control.find("Priority") and entry["priority"] != "" \
806 and entry["priority"] != control.find("Priority"):
807 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
808 (f, control.find("Priority", ""), entry["priority"]))
810 entry["package"] = package
811 entry["architecture"] = architecture
812 entry["version"] = version
813 entry["maintainer"] = control.find("Maintainer", "")
815 if f.endswith(".udeb"):
816 self.pkg.files[f]["dbtype"] = "udeb"
817 elif f.endswith(".deb"):
818 self.pkg.files[f]["dbtype"] = "deb"
820 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
822 entry["source"] = control.find("Source", entry["package"])
824 # Get the source version
825 source = entry["source"]
828 if source.find("(") != -1:
829 m = re_extract_src_version.match(source)
831 source_version = m.group(2)
833 if not source_version:
834 source_version = self.pkg.files[f]["version"]
836 entry["source package"] = source
837 entry["source version"] = source_version
839 # Ensure the filename matches the contents of the .deb
840 m = re_isadeb.match(f)
843 file_package = m.group(1)
844 if entry["package"] != file_package:
845 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
846 (f, file_package, entry["dbtype"], entry["package"]))
847 epochless_version = re_no_epoch.sub('', control.find("Version"))
850 file_version = m.group(2)
851 if epochless_version != file_version:
852 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
853 (f, file_version, entry["dbtype"], epochless_version))
856 file_architecture = m.group(3)
857 if entry["architecture"] != file_architecture:
858 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
859 (f, file_architecture, entry["dbtype"], entry["architecture"]))
861 # Check for existent source
862 source_version = entry["source version"]
863 source_package = entry["source package"]
864 if self.pkg.changes["architecture"].has_key("source"):
865 if source_version != self.pkg.changes["version"]:
866 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
867 (source_version, f, self.pkg.changes["version"]))
869 # Check in the SQL database
870 if not source_exists(source_package, source_version, suites = \
871 self.pkg.changes["distribution"].keys(), session = session):
872 # Check in one of the other directories
873 source_epochless_version = re_no_epoch.sub('', source_version)
874 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
876 byhand_dir = get_policy_queue('byhand', session).path
877 new_dir = get_policy_queue('new', session).path
879 if os.path.exists(os.path.join(byhand_dir, dsc_filename)):
881 elif os.path.exists(os.path.join(new_dir, dsc_filename)):
884 dsc_file_exists = False
885 # TODO: Don't hardcode this list: use all relevant queues
886 # The question is how to determine what is relevant
887 for queue_name in ["embargoed", "unembargoed", "proposedupdates", "oldproposedupdates"]:
888 queue = get_policy_queue(queue_name, session)
890 if os.path.exists(os.path.join(queue.path, dsc_filename)):
891 dsc_file_exists = True
894 if not dsc_file_exists:
895 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
897 # Check the version and for file overwrites
898 self.check_binary_against_db(f, session)
900 def source_file_checks(self, f, session):
901 entry = self.pkg.files[f]
903 m = re_issource.match(f)
907 entry["package"] = m.group(1)
908 entry["version"] = m.group(2)
909 entry["type"] = m.group(3)
911 # Ensure the source package name matches the Source filed in the .changes
912 if self.pkg.changes["source"] != entry["package"]:
913 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
915 # Ensure the source version matches the version in the .changes file
916 if re_is_orig_source.match(f):
917 changes_version = self.pkg.changes["chopversion2"]
919 changes_version = self.pkg.changes["chopversion"]
921 if changes_version != entry["version"]:
922 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
924 # Ensure the .changes lists source in the Architecture field
925 if not self.pkg.changes["architecture"].has_key("source"):
926 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
928 # Check the signature of a .dsc file
929 if entry["type"] == "dsc":
930 # check_signature returns either:
931 # (None, [list, of, rejects]) or (signature, [])
932 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
934 self.rejects.append(j)
936 entry["architecture"] = "source"
938 def per_suite_file_checks(self, f, suite, session):
940 entry = self.pkg.files[f]
943 if entry.has_key("byhand"):
946 # Check we have fields we need to do these checks
948 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
949 if not entry.has_key(m):
950 self.rejects.append("file '%s' does not have field %s set" % (f, m))
956 # Handle component mappings
957 for m in cnf.value_list("ComponentMappings"):
958 (source, dest) = m.split()
959 if entry["component"] == source:
960 entry["original component"] = source
961 entry["component"] = dest
963 # Ensure the component is valid for the target suite
964 if entry["component"] not in get_component_names(session):
965 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
968 # Validate the component
969 if not get_component(entry["component"], session):
970 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
973 # See if the package is NEW
974 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
977 # Validate the priority
978 if entry["priority"].find('/') != -1:
979 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
981 # Determine the location
982 location = cnf["Dir::Pool"]
983 l = get_location(location, entry["component"], session=session)
985 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
986 entry["location id"] = -1
988 entry["location id"] = l.location_id
990 # Check the md5sum & size against existing files (if any)
991 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
993 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
994 entry["size"], entry["md5sum"], entry["location id"])
997 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
998 elif found is False and poolfile is not None:
999 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
1001 if poolfile is None:
1002 entry["files id"] = None
1004 entry["files id"] = poolfile.file_id
1006 # Check for packages that have moved from one component to another
1007 entry['suite'] = suite
1008 arch_list = [entry["architecture"], 'all']
1009 component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1010 [suite], arch_list = arch_list, session = session)
1011 if component is not None:
1012 entry["othercomponents"] = component
1014 def check_files(self, action=True):
1015 file_keys = self.pkg.files.keys()
1021 os.chdir(self.pkg.directory)
1023 ret = holding.copy_to_holding(f)
1025 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1029 # check we already know the changes file
1030 # [NB: this check must be done post-suite mapping]
1031 base_filename = os.path.basename(self.pkg.changes_file)
1033 session = DBConn().session()
1036 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1037 # if in the pool or in a queue other than unchecked, reject
1038 if (dbc.in_queue is None) \
1039 or (dbc.in_queue is not None
1040 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1041 self.rejects.append("%s file already known to dak" % base_filename)
1042 except NoResultFound as e:
1046 has_binaries = False
1049 for f, entry in self.pkg.files.items():
1050 # Ensure the file does not already exist in one of the accepted directories
1051 # TODO: Dynamically generate this list
1052 for queue_name in [ "byhand", "new", "proposedupdates", "oldproposedupdates", "embargoed", "unembargoed" ]:
1053 queue = get_policy_queue(queue_name, session)
1054 if queue and os.path.exists(os.path.join(queue.path, f)):
1055 self.rejects.append("%s file already exists in the %s queue." % (f, queue_name))
1057 if not re_taint_free.match(f):
1058 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1060 # Check the file is readable
1061 if os.access(f, os.R_OK) == 0:
1062 # When running in -n, copy_to_holding() won't have
1063 # generated the reject_message, so we need to.
1065 if os.path.exists(f):
1066 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1068 # Don't directly reject, mark to check later to deal with orig's
1069 # we can find in the pool
1070 self.later_check_files.append(f)
1071 entry["type"] = "unreadable"
1074 # If it's byhand skip remaining checks
1075 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1077 entry["type"] = "byhand"
1079 # Checks for a binary package...
1080 elif re_isadeb.match(f):
1082 entry["type"] = "deb"
1084 # This routine appends to self.rejects/warnings as appropriate
1085 self.binary_file_checks(f, session)
1087 # Checks for a source package...
1088 elif re_issource.match(f):
1091 # This routine appends to self.rejects/warnings as appropriate
1092 self.source_file_checks(f, session)
1094 # Not a binary or source package? Assume byhand...
1097 entry["type"] = "byhand"
1099 # Per-suite file checks
1100 entry["oldfiles"] = {}
1101 for suite in self.pkg.changes["distribution"].keys():
1102 self.per_suite_file_checks(f, suite, session)
1106 # If the .changes file says it has source, it must have source.
1107 if self.pkg.changes["architecture"].has_key("source"):
1109 self.rejects.append("no source found and Architecture line in changes mention source.")
1111 if (not has_binaries) and (not cnf.find_b("Dinstall::AllowSourceOnlyUploads")):
1112 self.rejects.append("source only uploads are not supported.")
1114 ###########################################################################
1116 def __dsc_filename(self):
1118 Returns: (Status, Dsc_Filename)
1120 Status: Boolean; True when there was no error, False otherwise
1121 Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
1126 for name, entry in self.pkg.files.items():
1127 if entry.has_key("type") and entry["type"] == "dsc":
1129 return False, "cannot process a .changes file with multiple .dsc's."
1133 if not dsc_filename:
1134 return False, "source uploads must contain a dsc file"
1136 return True, dsc_filename
1138 def load_dsc(self, action=True, signing_rules=1):
1140 Find and load the dsc from self.pkg.files into self.dsc
1142 Returns: (Status, Reason)
1144 Status: Boolean; True when there was no error, False otherwise
1145 Reason: String; When Status is False this describes the error
1149 (status, dsc_filename) = self.__dsc_filename()
1151 # If status is false, dsc_filename has the reason
1152 return False, dsc_filename
1155 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
1156 except CantOpenError:
1158 return False, "%s: can't read file." % (dsc_filename)
1159 except ParseChangesError as line:
1160 return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
1161 except InvalidDscError as line:
1162 return False, "%s: syntax error on line %s." % (dsc_filename, line)
1163 except ChangesUnicodeError:
1164 return False, "%s: dsc file not proper utf-8." % (dsc_filename)
1168 ###########################################################################
1170 def check_dsc(self, action=True, session=None):
1171 """Returns bool indicating whether or not the source changes are valid"""
1172 # Ensure there is source to check
1173 if not self.pkg.changes["architecture"].has_key("source"):
1177 session = DBConn().session()
1179 (status, reason) = self.load_dsc(action=action)
1181 self.rejects.append(reason)
1183 (status, dsc_filename) = self.__dsc_filename()
1185 # If status is false, dsc_filename has the reason
1186 self.rejects.append(dsc_filename)
1189 # Build up the file list of files mentioned by the .dsc
1191 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1192 except NoFilesFieldError:
1193 self.rejects.append("%s: no Files: field." % (dsc_filename))
1195 except UnknownFormatError as format:
1196 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1198 except ParseChangesError as line:
1199 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1202 # Enforce mandatory fields
1203 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1204 if not self.pkg.dsc.has_key(i):
1205 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1208 # Validate the source and version fields
1209 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1210 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1211 if not re_valid_version.match(self.pkg.dsc["version"]):
1212 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1214 # Only a limited list of source formats are allowed in each suite
1215 for dist in self.pkg.changes["distribution"].keys():
1216 suite = get_suite(dist, session=session)
1218 self.rejects.append("%s: cannot find suite %s when checking source formats" % (dsc_filename, dist))
1220 allowed = [ x.format_name for x in suite.srcformats ]
1221 if self.pkg.dsc["format"] not in allowed:
1222 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1224 # Validate the Maintainer field
1226 # We ignore the return value
1227 fix_maintainer(self.pkg.dsc["maintainer"])
1228 except ParseMaintError as msg:
1229 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1230 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1232 # Validate the build-depends field(s)
1233 for field_name in [ "build-depends", "build-depends-indep" ]:
1234 field = self.pkg.dsc.get(field_name)
1236 # Have apt try to parse them...
1238 apt_pkg.parse_src_depends(field)
1240 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1242 # Ensure the version number in the .dsc matches the version number in the .changes
1243 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1244 changes_version = self.pkg.files[dsc_filename]["version"]
1246 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1247 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1249 # Ensure the Files field contain only what's expected
1250 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1252 # Ensure source is newer than existing source in target suites
1253 session = DBConn().session()
1254 self.check_source_against_db(dsc_filename, session)
1255 self.check_dsc_against_db(dsc_filename, session)
1257 dbchg = get_dbchange(self.pkg.changes_file, session)
1259 # Finally, check if we're missing any files
1260 for f in self.later_check_files:
1262 # Check if we've already processed this file if we have a dbchg object
1265 for pf in dbchg.files:
1266 if pf.filename == f and pf.processed:
1267 self.notes.append('%s was already processed so we can go ahead' % f)
1269 del self.pkg.files[f]
1271 self.rejects.append("Could not find file %s references in changes" % f)
1275 return (len(self.rejects) == 0)
1277 ###########################################################################
1279 def get_changelog_versions(self, source_dir):
1280 """Extracts a the source package and (optionally) grabs the
1281 version history out of debian/changelog for the BTS."""
1285 # Find the .dsc (again)
1287 for f in self.pkg.files.keys():
1288 if self.pkg.files[f]["type"] == "dsc":
1291 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1292 if not dsc_filename:
1295 # Create a symlink mirror of the source files in our temporary directory
1296 for f in self.pkg.files.keys():
1297 m = re_issource.match(f)
1299 src = os.path.join(source_dir, f)
1300 # If a file is missing for whatever reason, give up.
1301 if not os.path.exists(src):
1304 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1305 self.pkg.orig_files[f].has_key("path"):
1307 dest = os.path.join(os.getcwd(), f)
1308 os.symlink(src, dest)
1310 # If the orig files are not a part of the upload, create symlinks to the
1312 for orig_file in self.pkg.orig_files.keys():
1313 if not self.pkg.orig_files[orig_file].has_key("path"):
1315 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1316 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1318 # Extract the source
1320 unpacked = UnpackedSource(dsc_filename)
1321 except Exception as e:
1322 self.rejects.append("'dpkg-source -x' failed for %s. (%s)" % (dsc_filename, str(e)))
1325 if not cnf.find("Dir::BTSVersionTrack"):
1328 # Get the upstream version
1329 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1330 if re_strip_revision.search(upstr_version):
1331 upstr_version = re_strip_revision.sub('', upstr_version)
1333 # Ensure the changelog file exists
1334 changelog_file = unpacked.get_changelog_file()
1335 if changelog_file is None:
1336 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1339 # Parse the changelog
1340 self.pkg.dsc["bts changelog"] = ""
1341 for line in changelog_file.readlines():
1342 m = re_changelog_versions.match(line)
1344 self.pkg.dsc["bts changelog"] += line
1345 changelog_file.close()
1348 # Check we found at least one revision in the changelog
1349 if not self.pkg.dsc["bts changelog"]:
1350 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1352 def check_source(self):
1354 # a) there's no source
1355 if not self.pkg.changes["architecture"].has_key("source"):
1358 tmpdir = utils.temp_dirname()
1360 # Move into the temporary directory
1364 # Get the changelog version history
1365 self.get_changelog_versions(cwd)
1367 # Move back and cleanup the temporary tree
1371 shutil.rmtree(tmpdir)
1372 except OSError as e:
1373 if e.errno != errno.EACCES:
1375 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1377 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1378 # We probably have u-r or u-w directories so chmod everything
1380 cmd = "chmod -R u+rwx %s" % (tmpdir)
1381 result = os.system(cmd)
1383 utils.fubar("'%s' failed with result %s." % (cmd, result))
1384 shutil.rmtree(tmpdir)
1385 except Exception as e:
1386 print "foobar2 (%s)" % e
1387 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1389 ###########################################################################
1390 def ensure_hashes(self):
1391 # Make sure we recognise the format of the Files: field in the .changes
1392 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1393 if len(format) == 2:
1394 format = int(format[0]), int(format[1])
1396 format = int(float(format[0])), 0
1398 # We need to deal with the original changes blob, as the fields we need
1399 # might not be in the changes dict serialised into the .dak anymore.
1400 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1402 # Copy the checksums over to the current changes dict. This will keep
1403 # the existing modifications to it intact.
1404 for field in orig_changes:
1405 if field.startswith('checksums-'):
1406 self.pkg.changes[field] = orig_changes[field]
1408 # Check for unsupported hashes
1409 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1410 self.rejects.append(j)
1412 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1413 self.rejects.append(j)
1415 # We have to calculate the hash if we have an earlier changes version than
1416 # the hash appears in rather than require it exist in the changes file
1417 for hashname, hashfunc, version in utils.known_hashes:
1418 # TODO: Move _ensure_changes_hash into this class
1419 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1420 self.rejects.append(j)
1421 if "source" in self.pkg.changes["architecture"]:
1422 # TODO: Move _ensure_dsc_hash into this class
1423 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1424 self.rejects.append(j)
1426 def check_hashes(self):
1427 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1428 self.rejects.append(m)
1430 for m in utils.check_size(".changes", self.pkg.files):
1431 self.rejects.append(m)
1433 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1434 self.rejects.append(m)
1436 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1437 self.rejects.append(m)
1439 self.ensure_hashes()
1441 ###########################################################################
1443 def ensure_orig(self, target_dir='.', session=None):
1445 Ensures that all orig files mentioned in the changes file are present
1446 in target_dir. If they do not exist, they are symlinked into place.
1448 An list containing the symlinks that were created are returned (so they
1455 for filename, entry in self.pkg.dsc_files.iteritems():
1456 if not re_is_orig_source.match(filename):
1457 # File is not an orig; ignore
1460 if os.path.exists(filename):
1461 # File exists, no need to continue
1464 def symlink_if_valid(path):
1465 f = utils.open_file(path)
1466 md5sum = apt_pkg.md5sum(f)
1469 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1470 expected = (int(entry['size']), entry['md5sum'])
1472 if fingerprint != expected:
1475 dest = os.path.join(target_dir, filename)
1477 os.symlink(path, dest)
1478 symlinked.append(dest)
1484 session_ = DBConn().session()
1489 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1490 poolfile_path = os.path.join(
1491 poolfile.location.path, poolfile.filename
1494 if symlink_if_valid(poolfile_path):
1504 # Look in some other queues for the file
1505 queue_names = ['new', 'byhand',
1506 'proposedupdates', 'oldproposedupdates',
1507 'embargoed', 'unembargoed']
1509 for queue_name in queue_names:
1510 queue = get_policy_queue(queue_name, session)
1514 queuefile_path = os.path.join(queue.path, filename)
1516 if not os.path.exists(queuefile_path):
1517 # Does not exist in this queue
1520 if symlink_if_valid(queuefile_path):
1525 ###########################################################################
1527 def check_lintian(self):
1529 Extends self.rejects by checking the output of lintian against tags
1530 specified in Dinstall::LintianTags.
1535 # Don't reject binary uploads
1536 if not self.pkg.changes['architecture'].has_key('source'):
1539 # Only check some distributions
1540 for dist in ('unstable', 'experimental'):
1541 if dist in self.pkg.changes['distribution']:
1546 # If we do not have a tagfile, don't do anything
1547 tagfile = cnf.get("Dinstall::LintianTags")
1551 # Parse the yaml file
1552 sourcefile = file(tagfile, 'r')
1553 sourcecontent = sourcefile.read()
1557 lintiantags = yaml.load(sourcecontent)['lintian']
1558 except yaml.YAMLError as msg:
1559 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1562 # Try and find all orig mentioned in the .dsc
1563 symlinked = self.ensure_orig()
1565 # Setup the input file for lintian
1566 fd, temp_filename = utils.temp_filename()
1567 temptagfile = os.fdopen(fd, 'w')
1568 for tags in lintiantags.values():
1569 temptagfile.writelines(['%s\n' % x for x in tags])
1573 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1574 (temp_filename, self.pkg.changes_file)
1576 result, output = commands.getstatusoutput(cmd)
1578 # Remove our tempfile and any symlinks we created
1579 os.unlink(temp_filename)
1581 for symlink in symlinked:
1585 utils.warn("lintian failed for %s [return code: %s]." % \
1586 (self.pkg.changes_file, result))
1587 utils.warn(utils.prefix_multi_line_string(output, \
1588 " [possible output:] "))
1593 [self.pkg.changes_file, "check_lintian"] + list(txt)
1597 parsed_tags = parse_lintian_output(output)
1598 self.rejects.extend(
1599 generate_reject_messages(parsed_tags, lintiantags, log=log)
1602 ###########################################################################
1603 def check_urgency(self):
1605 if self.pkg.changes["architecture"].has_key("source"):
1606 if not self.pkg.changes.has_key("urgency"):
1607 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1608 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1609 if self.pkg.changes["urgency"] not in cnf.value_list("Urgency::Valid"):
1610 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1611 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1612 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1614 ###########################################################################
1616 # Sanity check the time stamps of files inside debs.
1617 # [Files in the near future cause ugly warnings and extreme time
1618 # travel can cause errors on extraction]
1620 def check_timestamps(self):
1623 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1624 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1625 tar = TarTime(future_cutoff, past_cutoff)
1627 for filename, entry in self.pkg.files.items():
1628 if entry["type"] == "deb":
1631 deb = apt_inst.DebFile(filename)
1632 deb.control.go(tar.callback)
1634 future_files = tar.future_files.keys()
1636 num_future_files = len(future_files)
1637 future_file = future_files[0]
1638 future_date = tar.future_files[future_file]
1639 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1640 % (filename, num_future_files, future_file, time.ctime(future_date)))
1642 ancient_files = tar.ancient_files.keys()
1644 num_ancient_files = len(ancient_files)
1645 ancient_file = ancient_files[0]
1646 ancient_date = tar.ancient_files[ancient_file]
1647 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1648 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1650 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_info()[0], sys.exc_info()[1]))
1652 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1653 for key in "maintaineremail", "changedbyemail", "maintainername", "changedbyname":
1654 if not self.pkg.changes.has_key(key):
1656 uid_email = '@'.join(uid_email.split('@')[:2])
1657 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1659 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1665 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1666 debian_emails = filter(lambda addr: addr.endswith('@debian.org'), sponsor_addresses)
1667 if uid_email not in debian_emails:
1669 uid_email = debian_emails[0]
1670 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1671 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1672 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1673 self.pkg.changes["sponsoremail"] = uid_email
1678 ###########################################################################
1679 # check_signed_by_key checks
1680 ###########################################################################
1682 def check_signed_by_key(self):
1683 """Ensure the .changes is signed by an authorized uploader."""
1684 session = DBConn().session()
1686 # First of all we check that the person has proper upload permissions
1687 # and that this upload isn't blocked
1688 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1691 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1694 # TODO: Check that import-keyring adds UIDs properly
1696 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1699 # Check that the fingerprint which uploaded has permission to do so
1700 self.check_upload_permissions(fpr, session)
1702 # Check that this package is not in a transition
1703 self.check_transition(session)
1708 def check_upload_permissions(self, fpr, session):
1709 # Check any one-off upload blocks
1710 self.check_upload_blocks(fpr, session)
1712 # If the source_acl is None, source is never allowed
1713 if fpr.source_acl is None:
1714 if self.pkg.changes["architecture"].has_key("source"):
1715 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1716 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1717 self.rejects.append(rej)
1719 # Do DM as a special case
1720 # DM is a special case unfortunately, so we check it first
1721 # (keys with no source access get more access than DMs in one
1722 # way; DMs can only upload for their packages whether source
1723 # or binary, whereas keys with no access might be able to
1724 # upload some binaries)
1725 elif fpr.source_acl.access_level == 'dm':
1726 self.check_dm_upload(fpr, session)
1728 # If not a DM, we allow full upload rights
1729 uid_email = "%s@debian.org" % (fpr.uid.uid)
1730 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1733 # Check binary upload permissions
1734 # By this point we know that DMs can't have got here unless they
1735 # are allowed to deal with the package concerned so just apply
1737 if fpr.binary_acl.access_level == 'full':
1740 # Otherwise we're in the map case
1741 tmparches = self.pkg.changes["architecture"].copy()
1742 tmparches.pop('source', None)
1744 for bam in fpr.binary_acl_map:
1745 tmparches.pop(bam.architecture.arch_string, None)
1747 if len(tmparches.keys()) > 0:
1748 if fpr.binary_reject:
1749 rej = "changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1750 if len(tmparches.keys()) == 1:
1751 rej += "\n\narchitecture involved is: %s" % ",".join(tmparches.keys())
1753 rej += "\n\narchitectures involved are: %s" % ",".join(tmparches.keys())
1754 self.rejects.append(rej)
1756 # TODO: This is where we'll implement reject vs throw away binaries later
1757 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1758 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1759 rej += "\nFingerprint: %s", (fpr.fingerprint)
1760 self.rejects.append(rej)
1763 def check_upload_blocks(self, fpr, session):
1764 """Check whether any upload blocks apply to this source, source
1765 version, uid / fpr combination"""
1767 def block_rej_template(fb):
1768 rej = 'Manual upload block in place for package %s' % fb.source
1769 if fb.version is not None:
1770 rej += ', version %s' % fb.version
1773 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1774 # version is None if the block applies to all versions
1775 if fb.version is None or fb.version == self.pkg.changes['version']:
1776 # Check both fpr and uid - either is enough to cause a reject
1777 if fb.fpr is not None:
1778 if fb.fpr.fingerprint == fpr.fingerprint:
1779 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1780 if fb.uid is not None:
1781 if fb.uid == fpr.uid:
1782 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1785 def check_dm_upload(self, fpr, session):
1786 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1787 ## none of the uploaded packages are NEW
1789 for f in self.pkg.files.keys():
1790 if self.pkg.files[f].has_key("byhand"):
1791 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1793 if self.pkg.files[f].has_key("new"):
1794 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1800 r = get_newest_source(self.pkg.changes["source"], session)
1803 rej = "Could not find existing source package %s in the DM allowed suites and this is a DM upload" % self.pkg.changes["source"]
1804 self.rejects.append(rej)
1807 if not r.dm_upload_allowed:
1808 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1809 self.rejects.append(rej)
1812 ## the Maintainer: field of the uploaded .changes file corresponds with
1813 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1815 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1816 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1818 ## the most recent version of the package uploaded to unstable or
1819 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1820 ## non-developer maintainers cannot NMU or hijack packages)
1822 # uploader includes the maintainer
1824 for uploader in r.uploaders:
1825 (rfc822, rfc2047, name, email) = uploader.get_split_maintainer()
1826 # Eww - I hope we never have two people with the same name in Debian
1827 if email == fpr.uid.uid or name == fpr.uid.name:
1832 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1835 ## none of the packages are being taken over from other source packages
1836 for b in self.pkg.changes["binary"].keys():
1837 for suite in self.pkg.changes["distribution"].keys():
1838 for s in get_source_by_package_and_suite(b, suite, session):
1839 if s.source != self.pkg.changes["source"]:
1840 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1844 def check_transition(self, session):
1847 sourcepkg = self.pkg.changes["source"]
1849 # No sourceful upload -> no need to do anything else, direct return
1850 # We also work with unstable uploads, not experimental or those going to some
1851 # proposed-updates queue
1852 if "source" not in self.pkg.changes["architecture"] or \
1853 "unstable" not in self.pkg.changes["distribution"]:
1856 # Also only check if there is a file defined (and existant) with
1858 transpath = cnf.get("Dinstall::ReleaseTransitions", "")
1859 if transpath == "" or not os.path.exists(transpath):
1862 # Parse the yaml file
1863 sourcefile = file(transpath, 'r')
1864 sourcecontent = sourcefile.read()
1866 transitions = yaml.load(sourcecontent)
1867 except yaml.YAMLError as msg:
1868 # This shouldn't happen, there is a wrapper to edit the file which
1869 # checks it, but we prefer to be safe than ending up rejecting
1871 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1874 # Now look through all defined transitions
1875 for trans in transitions:
1876 t = transitions[trans]
1877 source = t["source"]
1880 # Will be None if nothing is in testing.
1881 current = get_source_in_suite(source, "testing", session)
1882 if current is not None:
1883 compare = apt_pkg.version_compare(current.version, expected)
1885 if current is None or compare < 0:
1886 # This is still valid, the current version in testing is older than
1887 # the new version we wait for, or there is none in testing yet
1889 # Check if the source we look at is affected by this.
1890 if sourcepkg in t['packages']:
1891 # The source is affected, lets reject it.
1893 rejectmsg = "%s: part of the %s transition.\n\n" % (
1896 if current is not None:
1897 currentlymsg = "at version %s" % (current.version)
1899 currentlymsg = "not present in testing"
1901 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1903 rejectmsg += "\n".join(textwrap.wrap("""Your package
1904 is part of a testing transition designed to get %s migrated (it is
1905 currently %s, we need version %s). This transition is managed by the
1906 Release Team, and %s is the Release-Team member responsible for it.
1907 Please mail debian-release@lists.debian.org or contact %s directly if you
1908 need further assistance. You might want to upload to experimental until this
1909 transition is done."""
1910 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1912 self.rejects.append(rejectmsg)
1915 ###########################################################################
1916 # End check_signed_by_key checks
1917 ###########################################################################
1919 def build_summaries(self):
1920 """ Build a summary of changes the upload introduces. """
1922 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1924 short_summary = summary
1926 # This is for direport's benefit...
1927 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1929 summary += "\n\nChanges:\n" + f
1931 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1933 summary += self.announce(short_summary, 0)
1935 return (summary, short_summary)
1937 ###########################################################################
1939 def close_bugs(self, summary, action):
1941 Send mail to close bugs as instructed by the closes field in the changes file.
1942 Also add a line to summary if any work was done.
1944 @type summary: string
1945 @param summary: summary text, as given by L{build_summaries}
1948 @param action: Set to false no real action will be done.
1951 @return: summary. If action was taken, extended by the list of closed bugs.
1955 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1957 bugs = self.pkg.changes["closes"].keys()
1963 summary += "Closing bugs: "
1965 summary += "%s " % (bug)
1968 self.Subst["__BUG_NUMBER__"] = bug
1969 if self.pkg.changes["distribution"].has_key("stable"):
1970 self.Subst["__STABLE_WARNING__"] = """
1971 Note that this package is not part of the released stable Debian
1972 distribution. It may have dependencies on other unreleased software,
1973 or other instabilities. Please take care if you wish to install it.
1974 The update will eventually make its way into the next released Debian
1977 self.Subst["__STABLE_WARNING__"] = ""
1978 mail_message = utils.TemplateSubst(self.Subst, template)
1979 utils.send_mail(mail_message)
1981 # Clear up after ourselves
1982 del self.Subst["__BUG_NUMBER__"]
1983 del self.Subst["__STABLE_WARNING__"]
1985 if action and self.logger:
1986 self.logger.log(["closing bugs"] + bugs)
1992 ###########################################################################
1994 def announce(self, short_summary, action):
1996 Send an announce mail about a new upload.
1998 @type short_summary: string
1999 @param short_summary: Short summary text to include in the mail
2002 @param action: Set to false no real action will be done.
2005 @return: Textstring about action taken.
2011 # Skip all of this if not sending mail to avoid confusing people
2012 if cnf.has_key("Dinstall::Options::No-Mail") and cnf["Dinstall::Options::No-Mail"]:
2015 # Only do announcements for source uploads with a recent dpkg-dev installed
2016 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
2017 self.pkg.changes["architecture"].has_key("source"):
2020 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
2025 # Get a unique list of target lists
2026 for dist in self.pkg.changes["distribution"].keys():
2027 suite = get_suite(dist)
2028 if suite is None: continue
2029 for tgt in suite.announce:
2032 self.Subst["__SHORT_SUMMARY__"] = short_summary
2034 for announce_list in lists_todo.keys():
2035 summary += "Announcing to %s\n" % (announce_list)
2039 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
2040 if cnf.get("Dinstall::TrackingServer") and \
2041 self.pkg.changes["architecture"].has_key("source"):
2042 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
2043 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
2045 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
2046 utils.send_mail(mail_message)
2048 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
2050 if cnf.find_b("Dinstall::CloseBugs") and cnf.has_key("Dinstall::BugServer"):
2051 summary = self.close_bugs(summary, action)
2053 del self.Subst["__SHORT_SUMMARY__"]
2057 ###########################################################################
2059 def accept (self, summary, short_summary, session=None):
2063 This moves all files referenced from the .changes into the pool,
2064 sends the accepted mail, announces to lists, closes bugs and
2065 also checks for override disparities. If enabled it will write out
2066 the version history for the BTS Version Tracking and will finally call
2069 @type summary: string
2070 @param summary: Summary text
2072 @type short_summary: string
2073 @param short_summary: Short summary
2077 stats = SummaryStats()
2080 self.logger.log(["installing changes", self.pkg.changes_file])
2085 # Add the .dsc file to the DB first
2086 for newfile, entry in self.pkg.files.items():
2087 if entry["type"] == "dsc":
2088 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2092 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2093 for newfile, entry in self.pkg.files.items():
2094 if entry["type"] == "deb":
2095 b, pf = add_deb_to_db(self, newfile, session)
2097 poolfiles.append(pf)
2099 # If this is a sourceful diff only upload that is moving
2100 # cross-component we need to copy the .orig files into the new
2101 # component too for the same reasons as above.
2102 # XXX: mhy: I think this should be in add_dsc_to_db
2103 if self.pkg.changes["architecture"].has_key("source"):
2104 for orig_file in self.pkg.orig_files.keys():
2105 if not self.pkg.orig_files[orig_file].has_key("id"):
2106 continue # Skip if it's not in the pool
2107 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2108 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2109 continue # Skip if the location didn't change
2112 oldf = get_poolfile_by_id(orig_file_id, session)
2113 old_filename = os.path.join(oldf.location.path, oldf.filename)
2114 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
2115 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2117 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2119 # TODO: Care about size/md5sum collisions etc
2120 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2122 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2124 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2125 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2129 # Don't reference the old file from this changes
2131 if p.file_id == oldf.file_id:
2134 poolfiles.append(newf)
2136 # Fix up the DSC references
2139 for df in source.srcfiles:
2140 if df.poolfile.file_id == oldf.file_id:
2141 # Add a new DSC entry and mark the old one for deletion
2142 # Don't do it in the loop so we don't change the thing we're iterating over
2144 newdscf.source_id = source.source_id
2145 newdscf.poolfile_id = newf.file_id
2146 session.add(newdscf)
2156 # Make sure that our source object is up-to-date
2157 session.expire(source)
2159 # Add changelog information to the database
2160 self.store_changelog()
2162 # Install the files into the pool
2163 for newfile, entry in self.pkg.files.items():
2164 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2165 utils.move(newfile, destination)
2166 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2167 stats.accept_bytes += float(entry["size"])
2169 # Copy the .changes file across for suite which need it.
2170 copy_changes = dict([(x.copychanges, '')
2171 for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2172 if x.copychanges is not None])
2174 for dest in copy_changes.keys():
2175 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2177 # We're done - commit the database changes
2179 # Our SQL session will automatically start a new transaction after
2182 # Now ensure that the metadata has been added
2183 # This has to be done after we copy the files into the pool
2184 # For source if we have it:
2185 if self.pkg.changes["architecture"].has_key("source"):
2186 import_metadata_into_db(source, session)
2188 # Now for any of our binaries
2190 import_metadata_into_db(b, session)
2194 # Move the .changes into the 'done' directory
2195 ye, mo, da = time.gmtime()[0:3]
2196 donedir = os.path.join(cnf["Dir::Done"], str(ye), "%0.2d" % mo, "%0.2d" % da)
2197 if not os.path.isdir(donedir):
2198 os.makedirs(donedir)
2200 utils.move(self.pkg.changes_file,
2201 os.path.join(donedir, os.path.basename(self.pkg.changes_file)))
2203 if self.pkg.changes["architecture"].has_key("source"):
2204 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2207 self.Subst["__SUMMARY__"] = summary
2208 mail_message = utils.TemplateSubst(self.Subst,
2209 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2210 utils.send_mail(mail_message)
2211 self.announce(short_summary, 1)
2213 ## Helper stuff for DebBugs Version Tracking
2214 if cnf.find("Dir::BTSVersionTrack"):
2215 if self.pkg.changes["architecture"].has_key("source"):
2216 (fd, temp_filename) = utils.temp_filename(cnf["Dir::BTSVersionTrack"], prefix=".")
2217 version_history = os.fdopen(fd, 'w')
2218 version_history.write(self.pkg.dsc["bts changelog"])
2219 version_history.close()
2220 filename = "%s/%s" % (cnf["Dir::BTSVersionTrack"],
2221 self.pkg.changes_file[:-8]+".versions")
2222 os.rename(temp_filename, filename)
2223 os.chmod(filename, 0o644)
2225 # Write out the binary -> source mapping.
2226 (fd, temp_filename) = utils.temp_filename(cnf["Dir::BTSVersionTrack"], prefix=".")
2227 debinfo = os.fdopen(fd, 'w')
2228 for name, entry in sorted(self.pkg.files.items()):
2229 if entry["type"] == "deb":
2230 line = " ".join([entry["package"], entry["version"],
2231 entry["architecture"], entry["source package"],
2232 entry["source version"]])
2233 debinfo.write(line+"\n")
2235 filename = "%s/%s" % (cnf["Dir::BTSVersionTrack"],
2236 self.pkg.changes_file[:-8]+".debinfo")
2237 os.rename(temp_filename, filename)
2238 os.chmod(filename, 0o644)
2242 # Set up our copy queues (e.g. buildd queues)
2243 for suite_name in self.pkg.changes["distribution"].keys():
2244 suite = get_suite(suite_name, session)
2245 for q in suite.copy_queues:
2247 q.add_file_from_pool(f)
2252 stats.accept_count += 1
2254 def check_override(self):
2256 Checks override entries for validity. Mails "Override disparity" warnings,
2257 if that feature is enabled.
2259 Abandons the check if
2260 - override disparity checks are disabled
2261 - mail sending is disabled
2266 # Abandon the check if override disparity checks have been disabled
2267 if not cnf.find_b("Dinstall::OverrideDisparityCheck"):
2270 summary = self.pkg.check_override()
2275 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2278 self.Subst["__SUMMARY__"] = summary
2279 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2280 utils.send_mail(mail_message)
2281 del self.Subst["__SUMMARY__"]
2283 ###########################################################################
2285 def remove(self, from_dir=None):
2287 Used (for instance) in p-u to remove the package from unchecked
2289 Also removes the package from holding area.
2291 if from_dir is None:
2292 from_dir = self.pkg.directory
2295 for f in self.pkg.files.keys():
2296 os.unlink(os.path.join(from_dir, f))
2297 if os.path.exists(os.path.join(h.holding_dir, f)):
2298 os.unlink(os.path.join(h.holding_dir, f))
2300 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2301 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2302 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2304 ###########################################################################
2306 def move_to_queue (self, queue):
2308 Move files to a destination queue using the permissions in the table
2311 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2312 queue.path, perms=int(queue.change_perms, 8))
2313 for f in self.pkg.files.keys():
2314 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2316 ###########################################################################
2318 def force_reject(self, reject_files):
2320 Forcefully move files from the current directory to the
2321 reject directory. If any file already exists in the reject
2322 directory it will be moved to the morgue to make way for
2325 @type reject_files: dict
2326 @param reject_files: file dictionary
2332 for file_entry in reject_files:
2333 # Skip any files which don't exist or which we don't have permission to copy.
2334 if os.access(file_entry, os.R_OK) == 0:
2337 dest_file = os.path.join(cnf["Dir::Reject"], file_entry)
2340 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0o644)
2341 except OSError as e:
2342 # File exists? Let's find a new name by adding a number
2343 if e.errno == errno.EEXIST:
2345 dest_file = utils.find_next_free(dest_file, 255)
2346 except NoFreeFilenameError:
2347 # Something's either gone badly Pete Tong, or
2348 # someone is trying to exploit us.
2349 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Reject"]))
2352 # Make sure we really got it
2354 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0o644)
2355 except OSError as e:
2357 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2361 # If we got here, we own the destination file, so we can
2362 # safely overwrite it.
2363 utils.move(file_entry, dest_file, 1, perms=0o660)
2366 ###########################################################################
2367 def do_reject (self, manual=0, reject_message="", notes=""):
2369 Reject an upload. If called without a reject message or C{manual} is
2370 true, spawn an editor so the user can write one.
2373 @param manual: manual or automated rejection
2375 @type reject_message: string
2376 @param reject_message: A reject message
2381 # If we weren't given a manual rejection message, spawn an
2382 # editor so the user can add one in...
2383 if manual and not reject_message:
2384 (fd, temp_filename) = utils.temp_filename()
2385 temp_file = os.fdopen(fd, 'w')
2388 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2389 % (note.author, note.version, note.notedate, note.comment))
2391 editor = os.environ.get("EDITOR","vi")
2393 while answer == 'E':
2394 os.system("%s %s" % (editor, temp_filename))
2395 temp_fh = utils.open_file(temp_filename)
2396 reject_message = "".join(temp_fh.readlines())
2398 print "Reject message:"
2399 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2400 prompt = "[R]eject, Edit, Abandon, Quit ?"
2402 while prompt.find(answer) == -1:
2403 answer = utils.our_raw_input(prompt)
2404 m = re_default_answer.search(prompt)
2407 answer = answer[:1].upper()
2408 os.unlink(temp_filename)
2414 print "Rejecting.\n"
2418 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2419 reason_filename = os.path.join(cnf["Dir::Reject"], reason_filename)
2420 changesfile = os.path.join(cnf["Dir::Reject"], self.pkg.changes_file)
2422 # Move all the files into the reject directory
2423 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2424 self.force_reject(reject_files)
2426 # Change permissions of the .changes file to be world readable
2428 os.chmod(changesfile, os.stat(changesfile).st_mode | stat.S_IROTH)
2429 except OSError as (errno, strerror):
2430 # Ignore 'Operation not permitted' error.
2434 # If we fail here someone is probably trying to exploit the race
2435 # so let's just raise an exception ...
2436 if os.path.exists(reason_filename):
2437 os.unlink(reason_filename)
2438 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0o644)
2440 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2444 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2445 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2446 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2447 os.write(reason_fd, reject_message)
2448 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2450 # Build up the rejection email
2451 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2452 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2453 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2454 self.Subst["__REJECT_MESSAGE__"] = ""
2455 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2456 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2457 # Write the rejection email out as the <foo>.reason file
2458 os.write(reason_fd, reject_mail_message)
2460 del self.Subst["__REJECTOR_ADDRESS__"]
2461 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2462 del self.Subst["__CC__"]
2466 # Send the rejection mail
2467 utils.send_mail(reject_mail_message)
2470 self.logger.log(["rejected", self.pkg.changes_file])
2472 stats = SummaryStats()
2473 stats.reject_count += 1
2476 ################################################################################
2477 def in_override_p(self, package, component, suite, binary_type, filename, session):
2479 Check if a package already has override entries in the DB
2481 @type package: string
2482 @param package: package name
2484 @type component: string
2485 @param component: database id of the component
2488 @param suite: database id of the suite
2490 @type binary_type: string
2491 @param binary_type: type of the package
2493 @type filename: string
2494 @param filename: filename we check
2496 @return: the database result. But noone cares anyway.
2502 if binary_type == "": # must be source
2505 file_type = binary_type
2507 # Override suite name; used for example with proposed-updates
2508 oldsuite = get_suite(suite, session)
2509 if (not oldsuite is None) and oldsuite.overridesuite:
2510 suite = oldsuite.overridesuite
2512 result = get_override(package, suite, component, file_type, session)
2514 # If checking for a source package fall back on the binary override type
2515 if file_type == "dsc" and len(result) < 1:
2516 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2518 # Remember the section and priority so we can check them later if appropriate
2521 self.pkg.files[filename]["override section"] = result.section.section
2522 self.pkg.files[filename]["override priority"] = result.priority.priority
2527 ################################################################################
2528 def get_anyversion(self, sv_list, suite):
2531 @param sv_list: list of (suite, version) tuples to check
2534 @param suite: suite name
2540 anysuite = [suite] + [ vc.reference.suite_name for vc in get_version_checks(suite, "Enhances") ]
2541 for (s, v) in sv_list:
2542 if s in [ x.lower() for x in anysuite ]:
2543 if not anyversion or apt_pkg.version_compare(anyversion, v) <= 0:
2548 ################################################################################
2550 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2553 @param sv_list: list of (suite, version) tuples to check
2555 @type filename: string
2556 @param filename: XXX
2558 @type new_version: string
2559 @param new_version: XXX
2561 Ensure versions are newer than existing packages in target
2562 suites and that cross-suite version checking rules as
2563 set out in the conf file are satisfied.
2568 # Check versions for each target suite
2569 for target_suite in self.pkg.changes["distribution"].keys():
2570 # Check we can find the target suite
2571 ts = get_suite(target_suite)
2573 self.rejects.append("Cannot find target suite %s to perform version checks" % target_suite)
2576 must_be_newer_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan") ]
2577 must_be_older_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan") ]
2579 # Enforce "must be newer than target suite" even if conffile omits it
2580 if target_suite not in must_be_newer_than:
2581 must_be_newer_than.append(target_suite)
2583 for (suite, existent_version) in sv_list:
2584 vercmp = apt_pkg.version_compare(new_version, existent_version)
2586 if suite in must_be_newer_than and sourceful and vercmp < 1:
2587 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2589 if suite in must_be_older_than and vercmp > -1:
2592 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2593 # we really use the other suite, ignoring the conflicting one ...
2594 addsuite = self.pkg.changes["distribution-version"][suite]
2596 add_version = self.get_anyversion(sv_list, addsuite)
2597 target_version = self.get_anyversion(sv_list, target_suite)
2600 # not add_version can only happen if we map to a suite
2601 # that doesn't enhance the suite we're propup'ing from.
2602 # so "propup-ver x a b c; map a d" is a problem only if
2603 # d doesn't enhance a.
2605 # i think we could always propagate in this case, rather
2606 # than complaining. either way, this isn't a REJECT issue
2608 # And - we really should complain to the dorks who configured dak
2609 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2610 self.pkg.changes.setdefault("propdistribution", {})
2611 self.pkg.changes["propdistribution"][addsuite] = 1
2613 elif not target_version:
2614 # not targets_version is true when the package is NEW
2615 # we could just stick with the "...old version..." REJECT
2616 # for this, I think.
2617 self.rejects.append("Won't propogate NEW packages.")
2618 elif apt_pkg.version_compare(new_version, add_version) < 0:
2619 # propogation would be redundant. no need to reject though.
2620 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2622 elif apt_pkg.version_compare(new_version, add_version) > 0 and \
2623 apt_pkg.version_compare(add_version, target_version) >= 0:
2625 self.warnings.append("Propogating upload to %s" % (addsuite))
2626 self.pkg.changes.setdefault("propdistribution", {})
2627 self.pkg.changes["propdistribution"][addsuite] = 1
2631 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2633 ################################################################################
2634 def check_binary_against_db(self, filename, session):
2635 # Ensure version is sane
2636 self.cross_suite_version_check( \
2637 get_suite_version_by_package(self.pkg.files[filename]["package"], \
2638 self.pkg.files[filename]["architecture"], session),
2639 filename, self.pkg.files[filename]["version"], sourceful=False)
2641 # Check for any existing copies of the file
2642 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2643 q = q.filter_by(version=self.pkg.files[filename]["version"])
2644 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2647 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2649 ################################################################################
2651 def check_source_against_db(self, filename, session):
2652 source = self.pkg.dsc.get("source")
2653 version = self.pkg.dsc.get("version")
2655 # Ensure version is sane
2656 self.cross_suite_version_check( \
2657 get_suite_version_by_source(source, session), filename, version,
2660 ################################################################################
2661 def check_dsc_against_db(self, filename, session):
2664 @warning: NB: this function can remove entries from the 'files' index [if
2665 the orig tarball is a duplicate of the one in the archive]; if
2666 you're iterating over 'files' and call this function as part of
2667 the loop, be sure to add a check to the top of the loop to
2668 ensure you haven't just tried to dereference the deleted entry.
2673 self.pkg.orig_files = {} # XXX: do we need to clear it?
2674 orig_files = self.pkg.orig_files
2676 # Try and find all files mentioned in the .dsc. This has
2677 # to work harder to cope with the multiple possible
2678 # locations of an .orig.tar.gz.
2679 # The ordering on the select is needed to pick the newest orig
2680 # when it exists in multiple places.
2681 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2683 if self.pkg.files.has_key(dsc_name):
2684 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2685 actual_size = int(self.pkg.files[dsc_name]["size"])
2686 found = "%s in incoming" % (dsc_name)
2688 # Check the file does not already exist in the archive
2689 ql = get_poolfile_like_name(dsc_name, session)
2691 # Strip out anything that isn't '%s' or '/%s$'
2693 if not i.filename.endswith(dsc_name):
2696 # "[dak] has not broken them. [dak] has fixed a
2697 # brokenness. Your crappy hack exploited a bug in
2700 # "(Come on! I thought it was always obvious that
2701 # one just doesn't release different files with
2702 # the same name and version.)"
2703 # -- ajk@ on d-devel@l.d.o
2706 # Ignore exact matches for .orig.tar.gz
2708 if re_is_orig_source.match(dsc_name):
2710 if self.pkg.files.has_key(dsc_name) and \
2711 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2712 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2713 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2714 # TODO: Don't delete the entry, just mark it as not needed
2715 # This would fix the stupidity of changing something we often iterate over
2716 # whilst we're doing it
2717 del self.pkg.files[dsc_name]
2718 dsc_entry["files id"] = i.file_id
2719 if not orig_files.has_key(dsc_name):
2720 orig_files[dsc_name] = {}
2721 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2724 # Don't bitch that we couldn't find this file later
2726 self.later_check_files.remove(dsc_name)
2732 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2734 elif re_is_orig_source.match(dsc_name):
2736 ql = get_poolfile_like_name(dsc_name, session)
2738 # Strip out anything that isn't '%s' or '/%s$'
2739 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2741 if not i.filename.endswith(dsc_name):
2745 # Unfortunately, we may get more than one match here if,
2746 # for example, the package was in potato but had an -sa
2747 # upload in woody. So we need to choose the right one.
2749 # default to something sane in case we don't match any or have only one
2754 old_file = os.path.join(i.location.path, i.filename)
2755 old_file_fh = utils.open_file(old_file)
2756 actual_md5 = apt_pkg.md5sum(old_file_fh)
2758 actual_size = os.stat(old_file)[stat.ST_SIZE]
2759 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2762 old_file = os.path.join(i.location.path, i.filename)
2763 old_file_fh = utils.open_file(old_file)
2764 actual_md5 = apt_pkg.md5sum(old_file_fh)
2766 actual_size = os.stat(old_file)[stat.ST_SIZE]
2768 suite_type = x.location.archive_type
2769 # need this for updating dsc_files in install()
2770 dsc_entry["files id"] = x.file_id
2771 # See install() in process-accepted...
2772 if not orig_files.has_key(dsc_name):
2773 orig_files[dsc_name] = {}
2774 orig_files[dsc_name]["id"] = x.file_id
2775 orig_files[dsc_name]["path"] = old_file
2776 orig_files[dsc_name]["location"] = x.location.location_id
2778 # TODO: Determine queue list dynamically
2779 # Not there? Check the queue directories...
2780 for queue_name in [ "byhand", "new", "proposedupdates", "oldproposedupdates", "embargoed", "unembargoed" ]:
2781 queue = get_policy_queue(queue_name, session)
2785 in_otherdir = os.path.join(queue.path, dsc_name)
2787 if os.path.exists(in_otherdir):
2788 in_otherdir_fh = utils.open_file(in_otherdir)
2789 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2790 in_otherdir_fh.close()
2791 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2793 if not orig_files.has_key(dsc_name):
2794 orig_files[dsc_name] = {}
2795 orig_files[dsc_name]["path"] = in_otherdir
2798 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2801 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2803 if actual_md5 != dsc_entry["md5sum"]:
2804 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2805 if actual_size != int(dsc_entry["size"]):
2806 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2808 ################################################################################
2809 # This is used by process-new and process-holding to recheck a changes file
2810 # at the time we're running. It mainly wraps various other internal functions
2811 # and is similar to accepted_checks - these should probably be tidied up
2813 def recheck(self, session):
2815 for f in self.pkg.files.keys():
2816 # The .orig.tar.gz can disappear out from under us is it's a
2817 # duplicate of one in the archive.
2818 if not self.pkg.files.has_key(f):
2821 entry = self.pkg.files[f]
2823 # Check that the source still exists
2824 if entry["type"] == "deb":
2825 source_version = entry["source version"]
2826 source_package = entry["source package"]
2827 if not self.pkg.changes["architecture"].has_key("source") \
2828 and not source_exists(source_package, source_version, \
2829 suites = self.pkg.changes["distribution"].keys(), session = session):
2830 source_epochless_version = re_no_epoch.sub('', source_version)
2831 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2833 for queue_name in ["embargoed", "unembargoed", "newstage"]:
2834 queue = get_policy_queue(queue_name, session)
2835 if queue and os.path.exists(os.path.join(queue.path, dsc_filename)):
2838 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2840 # Version and file overwrite checks
2841 if entry["type"] == "deb":
2842 self.check_binary_against_db(f, session)
2843 elif entry["type"] == "dsc":
2844 self.check_source_against_db(f, session)
2845 self.check_dsc_against_db(f, session)
2847 ################################################################################
2848 def accepted_checks(self, overwrite_checks, session):
2849 # Recheck anything that relies on the database; since that's not
2850 # frozen between accept and our run time when called from p-a.
2852 # overwrite_checks is set to False when installing to stable/oldstable
2857 # Find the .dsc (again)
2859 for f in self.pkg.files.keys():
2860 if self.pkg.files[f]["type"] == "dsc":
2863 for checkfile in self.pkg.files.keys():
2864 # The .orig.tar.gz can disappear out from under us is it's a
2865 # duplicate of one in the archive.
2866 if not self.pkg.files.has_key(checkfile):
2869 entry = self.pkg.files[checkfile]
2871 # Check that the source still exists
2872 if entry["type"] == "deb":
2873 source_version = entry["source version"]
2874 source_package = entry["source package"]
2875 if not self.pkg.changes["architecture"].has_key("source") \
2876 and not source_exists(source_package, source_version, \
2877 suites = self.pkg.changes["distribution"].keys(), \
2879 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2881 # Version and file overwrite checks
2882 if overwrite_checks:
2883 if entry["type"] == "deb":
2884 self.check_binary_against_db(checkfile, session)
2885 elif entry["type"] == "dsc":
2886 self.check_source_against_db(checkfile, session)
2887 self.check_dsc_against_db(dsc_filename, session)
2889 # propogate in the case it is in the override tables:
2890 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2891 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2892 propogate[suite] = 1
2894 nopropogate[suite] = 1
2896 for suite in propogate.keys():
2897 if suite in nopropogate:
2899 self.pkg.changes["distribution"][suite] = 1
2901 for checkfile in self.pkg.files.keys():
2902 # Check the package is still in the override tables
2903 for suite in self.pkg.changes["distribution"].keys():
2904 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2905 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2907 ################################################################################
2908 # If any file of an upload has a recent mtime then chances are good
2909 # the file is still being uploaded.
2911 def upload_too_new(self):
2914 # Move back to the original directory to get accurate time stamps
2916 os.chdir(self.pkg.directory)
2917 file_list = self.pkg.files.keys()
2918 file_list.extend(self.pkg.dsc_files.keys())
2919 file_list.append(self.pkg.changes_file)
2922 last_modified = time.time()-os.path.getmtime(f)
2923 if last_modified < int(cnf["Dinstall::SkipTime"]):
2932 def store_changelog(self):
2934 # Skip binary-only upload if it is not a bin-NMU
2935 if not self.pkg.changes['architecture'].has_key('source'):
2936 from daklib.regexes import re_bin_only_nmu
2937 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2940 session = DBConn().session()
2942 # Check if upload already has a changelog entry
2943 query = """SELECT changelog_id FROM changes WHERE source = :source
2944 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2945 if session.execute(query, {'source': self.pkg.changes['source'], \
2946 'version': self.pkg.changes['version'], \
2947 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2951 # Add current changelog text into changelogs_text table, return created ID
2952 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2953 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2955 # Link ID to the upload available in changes table
2956 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2957 AND version = :version AND architecture = :architecture"""
2958 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2959 'version': self.pkg.changes['version'], \
2960 'architecture': " ".join(self.pkg.changes['architecture'].keys())})