5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
59 ###############################################################################
61 def get_type(f, session):
63 Get the file type of C{f}
66 @param f: file entry from Changes object
68 @type session: SQLA Session
69 @param session: SQL Alchemy session object
76 if f.has_key("dbtype"):
77 file_type = f["dbtype"]
78 elif re_source_ext.match(f["type"]):
82 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
84 # Validate the override type
85 type_id = get_override_type(file_type, session)
87 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
91 ################################################################################
93 # Determine what parts in a .changes are NEW
95 def determine_new(changes, files, warn=1, session = None):
97 Determine what parts in a C{changes} file are NEW.
99 @type changes: Upload.Pkg.changes dict
100 @param changes: Changes dictionary
102 @type files: Upload.Pkg.files dict
103 @param files: Files dictionary
106 @param warn: Warn if overrides are added for (old)stable
109 @return: dictionary of NEW components.
114 # Build up a list of potentially new things
115 for name, f in files.items():
116 # Skip byhand elements
117 # if f["type"] == "byhand":
120 priority = f["priority"]
121 section = f["section"]
122 file_type = get_type(f, session)
123 component = f["component"]
125 if file_type == "dsc":
128 if not new.has_key(pkg):
130 new[pkg]["priority"] = priority
131 new[pkg]["section"] = section
132 new[pkg]["type"] = file_type
133 new[pkg]["component"] = component
134 new[pkg]["files"] = []
136 old_type = new[pkg]["type"]
137 if old_type != file_type:
138 # source gets trumped by deb or udeb
139 if old_type == "dsc":
140 new[pkg]["priority"] = priority
141 new[pkg]["section"] = section
142 new[pkg]["type"] = file_type
143 new[pkg]["component"] = component
145 new[pkg]["files"].append(name)
147 if f.has_key("othercomponents"):
148 new[pkg]["othercomponents"] = f["othercomponents"]
150 # Fix up the list of target suites
152 for suite in changes["suite"].keys():
153 override = cnf.Find("Suite::%s::OverrideSuite" % (suite))
155 (olderr, newerr) = (get_suite(suite, session) == None,
156 get_suite(override, session) == None)
158 (oinv, newinv) = ("", "")
159 if olderr: oinv = "invalid "
160 if newerr: ninv = "invalid "
161 print "warning: overriding %ssuite %s to %ssuite %s" % (
162 oinv, suite, ninv, override)
163 del changes["suite"][suite]
164 changes["suite"][override] = 1
166 for suite in changes["suite"].keys():
167 for pkg in new.keys():
168 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
170 for file_entry in new[pkg]["files"]:
171 if files[file_entry].has_key("new"):
172 del files[file_entry]["new"]
176 for s in ['stable', 'oldstable']:
177 if changes["suite"].has_key(s):
178 print "WARNING: overrides will be added for %s!" % s
179 for pkg in new.keys():
180 if new[pkg].has_key("othercomponents"):
181 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
185 ################################################################################
187 def check_valid(new, session = None):
189 Check if section and priority for NEW packages exist in database.
190 Additionally does sanity checks:
191 - debian-installer packages have to be udeb (or source)
192 - non debian-installer packages can not be udeb
193 - source priority can only be assigned to dsc file types
196 @param new: Dict of new packages with their section, priority and type.
199 for pkg in new.keys():
200 section_name = new[pkg]["section"]
201 priority_name = new[pkg]["priority"]
202 file_type = new[pkg]["type"]
204 section = get_section(section_name, session)
206 new[pkg]["section id"] = -1
208 new[pkg]["section id"] = section.section_id
210 priority = get_priority(priority_name, session)
212 new[pkg]["priority id"] = -1
214 new[pkg]["priority id"] = priority.priority_id
217 di = section_name.find("debian-installer") != -1
219 # If d-i, we must be udeb and vice-versa
220 if (di and file_type not in ("udeb", "dsc")) or \
221 (not di and file_type == "udeb"):
222 new[pkg]["section id"] = -1
224 # If dsc we need to be source and vice-versa
225 if (priority == "source" and file_type != "dsc") or \
226 (priority != "source" and file_type == "dsc"):
227 new[pkg]["priority id"] = -1
229 ###############################################################################
231 # Used by Upload.check_timestamps
232 class TarTime(object):
233 def __init__(self, future_cutoff, past_cutoff):
235 self.future_cutoff = future_cutoff
236 self.past_cutoff = past_cutoff
239 self.future_files = {}
240 self.ancient_files = {}
242 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
243 if MTime > self.future_cutoff:
244 self.future_files[Name] = MTime
245 if MTime < self.past_cutoff:
246 self.ancient_files[Name] = MTime
248 ###############################################################################
250 def prod_maintainer(notes, upload):
253 # Here we prepare an editor and get them ready to prod...
254 (fd, temp_filename) = utils.temp_filename()
255 temp_file = os.fdopen(fd, 'w')
257 temp_file.write(note.comment)
259 editor = os.environ.get("EDITOR","vi")
262 os.system("%s %s" % (editor, temp_filename))
263 temp_fh = utils.open_file(temp_filename)
264 prod_message = "".join(temp_fh.readlines())
266 print "Prod message:"
267 print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
268 prompt = "[P]rod, Edit, Abandon, Quit ?"
270 while prompt.find(answer) == -1:
271 answer = utils.our_raw_input(prompt)
272 m = re_default_answer.search(prompt)
275 answer = answer[:1].upper()
276 os.unlink(temp_filename)
282 # Otherwise, do the proding...
283 user_email_address = utils.whoami() + " <%s>" % (
284 cnf["Dinstall::MyAdminAddress"])
288 Subst["__FROM_ADDRESS__"] = user_email_address
289 Subst["__PROD_MESSAGE__"] = prod_message
290 Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
292 prod_mail_message = utils.TemplateSubst(
293 Subst,cnf["Dir::Templates"]+"/process-new.prod")
296 utils.send_mail(prod_mail_message)
298 print "Sent prodding message"
300 ################################################################################
302 def edit_note(note, upload, session):
303 # Write the current data to a temporary file
304 (fd, temp_filename) = utils.temp_filename()
305 editor = os.environ.get("EDITOR","vi")
308 os.system("%s %s" % (editor, temp_filename))
309 temp_file = utils.open_file(temp_filename)
310 newnote = temp_file.read().rstrip()
313 print utils.prefix_multi_line_string(newnote," ")
314 prompt = "[D]one, Edit, Abandon, Quit ?"
316 while prompt.find(answer) == -1:
317 answer = utils.our_raw_input(prompt)
318 m = re_default_answer.search(prompt)
321 answer = answer[:1].upper()
322 os.unlink(temp_filename)
329 comment = NewComment()
330 comment.package = upload.pkg.changes["source"]
331 comment.version = upload.pkg.changes["version"]
332 comment.comment = newnote
333 comment.author = utils.whoami()
334 comment.trainee = bool(Options["Trainee"])
338 ###############################################################################
340 class Upload(object):
342 Everything that has to do with an upload processed.
350 ###########################################################################
353 """ Reset a number of internal variables."""
355 # Initialize the substitution template map
358 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
359 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
360 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
361 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
367 self.later_check_files = []
371 def package_info(self):
373 Format various messages from this Upload to send to the maintainer.
377 ('Reject Reasons', self.rejects),
378 ('Warnings', self.warnings),
379 ('Notes', self.notes),
383 for title, messages in msgs:
385 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
390 ###########################################################################
391 def update_subst(self):
392 """ Set up the per-package template substitution mappings """
396 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
397 if not self.pkg.changes.has_key("architecture") or not \
398 isinstance(self.pkg.changes["architecture"], dict):
399 self.pkg.changes["architecture"] = { "Unknown" : "" }
401 # and maintainer2047 may not exist.
402 if not self.pkg.changes.has_key("maintainer2047"):
403 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
405 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
406 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
407 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
409 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
410 if self.pkg.changes["architecture"].has_key("source") and \
411 self.pkg.changes["changedby822"] != "" and \
412 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
414 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
415 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
416 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
418 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
419 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
420 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
422 # Process policy doesn't set the fingerprint field and I don't want to make it
423 # do it for now as I don't want to have to deal with the case where we accepted
424 # the package into PU-NEW, but the fingerprint has gone away from the keyring in
425 # the meantime so the package will be remarked as rejectable. Urgh.
426 # TODO: Fix this properly
427 if self.pkg.changes.has_key('fingerprint'):
428 session = DBConn().session()
429 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
430 if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
431 if self.pkg.changes.has_key("sponsoremail"):
432 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
435 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
436 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
438 # Apply any global override of the Maintainer field
439 if cnf.get("Dinstall::OverrideMaintainer"):
440 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
441 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
443 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
444 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
445 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
446 self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
448 ###########################################################################
449 def load_changes(self, filename):
451 Load a changes file and setup a dictionary around it. Also checks for mandantory
454 @type filename: string
455 @param filename: Changes filename, full path.
458 @return: whether the changes file was valid or not. We may want to
459 reject even if this is True (see what gets put in self.rejects).
460 This is simply to prevent us even trying things later which will
461 fail because we couldn't properly parse the file.
464 self.pkg.changes_file = filename
466 # Parse the .changes field into a dictionary
468 self.pkg.changes.update(parse_changes(filename))
469 except CantOpenError:
470 self.rejects.append("%s: can't read file." % (filename))
472 except ParseChangesError, line:
473 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
475 except ChangesUnicodeError:
476 self.rejects.append("%s: changes file not proper utf-8" % (filename))
479 # Parse the Files field from the .changes into another dictionary
481 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
482 except ParseChangesError, line:
483 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
485 except UnknownFormatError, format:
486 self.rejects.append("%s: unknown format '%s'." % (filename, format))
489 # Check for mandatory fields
490 for i in ("distribution", "source", "binary", "architecture",
491 "version", "maintainer", "files", "changes", "description"):
492 if not self.pkg.changes.has_key(i):
493 # Avoid undefined errors later
494 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
497 # Strip a source version in brackets from the source field
498 if re_strip_srcver.search(self.pkg.changes["source"]):
499 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
501 # Ensure the source field is a valid package name.
502 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
503 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
505 # Split multi-value fields into a lower-level dictionary
506 for i in ("architecture", "distribution", "binary", "closes"):
507 o = self.pkg.changes.get(i, "")
509 del self.pkg.changes[i]
511 self.pkg.changes[i] = {}
514 self.pkg.changes[i][j] = 1
516 # Fix the Maintainer: field to be RFC822/2047 compatible
518 (self.pkg.changes["maintainer822"],
519 self.pkg.changes["maintainer2047"],
520 self.pkg.changes["maintainername"],
521 self.pkg.changes["maintaineremail"]) = \
522 fix_maintainer (self.pkg.changes["maintainer"])
523 except ParseMaintError, msg:
524 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
525 % (filename, self.pkg.changes["maintainer"], msg))
527 # ...likewise for the Changed-By: field if it exists.
529 (self.pkg.changes["changedby822"],
530 self.pkg.changes["changedby2047"],
531 self.pkg.changes["changedbyname"],
532 self.pkg.changes["changedbyemail"]) = \
533 fix_maintainer (self.pkg.changes.get("changed-by", ""))
534 except ParseMaintError, msg:
535 self.pkg.changes["changedby822"] = ""
536 self.pkg.changes["changedby2047"] = ""
537 self.pkg.changes["changedbyname"] = ""
538 self.pkg.changes["changedbyemail"] = ""
540 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
541 % (filename, self.pkg.changes["changed-by"], msg))
543 # Ensure all the values in Closes: are numbers
544 if self.pkg.changes.has_key("closes"):
545 for i in self.pkg.changes["closes"].keys():
546 if re_isanum.match (i) == None:
547 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
549 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
550 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
551 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
553 # Check the .changes is non-empty
554 if not self.pkg.files:
555 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
558 # Changes was syntactically valid even if we'll reject
561 ###########################################################################
563 def check_distributions(self):
564 "Check and map the Distribution field"
568 # Handle suite mappings
569 for m in Cnf.ValueList("SuiteMappings"):
572 if mtype == "map" or mtype == "silent-map":
573 (source, dest) = args[1:3]
574 if self.pkg.changes["distribution"].has_key(source):
575 del self.pkg.changes["distribution"][source]
576 self.pkg.changes["distribution"][dest] = 1
577 if mtype != "silent-map":
578 self.notes.append("Mapping %s to %s." % (source, dest))
579 if self.pkg.changes.has_key("distribution-version"):
580 if self.pkg.changes["distribution-version"].has_key(source):
581 self.pkg.changes["distribution-version"][source]=dest
582 elif mtype == "map-unreleased":
583 (source, dest) = args[1:3]
584 if self.pkg.changes["distribution"].has_key(source):
585 for arch in self.pkg.changes["architecture"].keys():
586 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
587 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
588 del self.pkg.changes["distribution"][source]
589 self.pkg.changes["distribution"][dest] = 1
591 elif mtype == "ignore":
593 if self.pkg.changes["distribution"].has_key(suite):
594 del self.pkg.changes["distribution"][suite]
595 self.warnings.append("Ignoring %s as a target suite." % (suite))
596 elif mtype == "reject":
598 if self.pkg.changes["distribution"].has_key(suite):
599 self.rejects.append("Uploads to %s are not accepted." % (suite))
600 elif mtype == "propup-version":
601 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
603 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
604 if self.pkg.changes["distribution"].has_key(args[1]):
605 self.pkg.changes.setdefault("distribution-version", {})
606 for suite in args[2:]:
607 self.pkg.changes["distribution-version"][suite] = suite
609 # Ensure there is (still) a target distribution
610 if len(self.pkg.changes["distribution"].keys()) < 1:
611 self.rejects.append("No valid distribution remaining.")
613 # Ensure target distributions exist
614 for suite in self.pkg.changes["distribution"].keys():
615 if not Cnf.has_key("Suite::%s" % (suite)):
616 self.rejects.append("Unknown distribution `%s'." % (suite))
618 ###########################################################################
620 def binary_file_checks(self, f, session):
622 entry = self.pkg.files[f]
624 # Extract package control information
625 deb_file = utils.open_file(f)
627 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
629 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
631 # Can't continue, none of the checks on control would work.
634 # Check for mandantory "Description:"
637 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
639 self.rejects.append("%s: Missing Description in binary package" % (f))
644 # Check for mandatory fields
645 for field in [ "Package", "Architecture", "Version" ]:
646 if control.Find(field) == None:
648 self.rejects.append("%s: No %s field in control." % (f, field))
651 # Ensure the package name matches the one give in the .changes
652 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
653 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
655 # Validate the package field
656 package = control.Find("Package")
657 if not re_valid_pkg_name.match(package):
658 self.rejects.append("%s: invalid package name '%s'." % (f, package))
660 # Validate the version field
661 version = control.Find("Version")
662 if not re_valid_version.match(version):
663 self.rejects.append("%s: invalid version number '%s'." % (f, version))
665 # Ensure the architecture of the .deb is one we know about.
666 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
667 architecture = control.Find("Architecture")
668 upload_suite = self.pkg.changes["distribution"].keys()[0]
670 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
671 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
672 self.rejects.append("Unknown architecture '%s'." % (architecture))
674 # Ensure the architecture of the .deb is one of the ones
675 # listed in the .changes.
676 if not self.pkg.changes["architecture"].has_key(architecture):
677 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
679 # Sanity-check the Depends field
680 depends = control.Find("Depends")
682 self.rejects.append("%s: Depends field is empty." % (f))
684 # Sanity-check the Provides field
685 provides = control.Find("Provides")
687 provide = re_spacestrip.sub('', provides)
689 self.rejects.append("%s: Provides field is empty." % (f))
690 prov_list = provide.split(",")
691 for prov in prov_list:
692 if not re_valid_pkg_name.match(prov):
693 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
695 # Check the section & priority match those given in the .changes (non-fatal)
696 if control.Find("Section") and entry["section"] != "" \
697 and entry["section"] != control.Find("Section"):
698 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
699 (f, control.Find("Section", ""), entry["section"]))
700 if control.Find("Priority") and entry["priority"] != "" \
701 and entry["priority"] != control.Find("Priority"):
702 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
703 (f, control.Find("Priority", ""), entry["priority"]))
705 entry["package"] = package
706 entry["architecture"] = architecture
707 entry["version"] = version
708 entry["maintainer"] = control.Find("Maintainer", "")
710 if f.endswith(".udeb"):
711 self.pkg.files[f]["dbtype"] = "udeb"
712 elif f.endswith(".deb"):
713 self.pkg.files[f]["dbtype"] = "deb"
715 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
717 entry["source"] = control.Find("Source", entry["package"])
719 # Get the source version
720 source = entry["source"]
723 if source.find("(") != -1:
724 m = re_extract_src_version.match(source)
726 source_version = m.group(2)
728 if not source_version:
729 source_version = self.pkg.files[f]["version"]
731 entry["source package"] = source
732 entry["source version"] = source_version
734 # Ensure the filename matches the contents of the .deb
735 m = re_isadeb.match(f)
738 file_package = m.group(1)
739 if entry["package"] != file_package:
740 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
741 (f, file_package, entry["dbtype"], entry["package"]))
742 epochless_version = re_no_epoch.sub('', control.Find("Version"))
745 file_version = m.group(2)
746 if epochless_version != file_version:
747 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
748 (f, file_version, entry["dbtype"], epochless_version))
751 file_architecture = m.group(3)
752 if entry["architecture"] != file_architecture:
753 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
754 (f, file_architecture, entry["dbtype"], entry["architecture"]))
756 # Check for existent source
757 source_version = entry["source version"]
758 source_package = entry["source package"]
759 if self.pkg.changes["architecture"].has_key("source"):
760 if source_version != self.pkg.changes["version"]:
761 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
762 (source_version, f, self.pkg.changes["version"]))
764 # Check in the SQL database
765 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
766 # Check in one of the other directories
767 source_epochless_version = re_no_epoch.sub('', source_version)
768 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
769 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
771 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
774 dsc_file_exists = False
775 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates", "Lenny-Volatile-Proposed-Updates"]:
776 if cnf.has_key("Dir::Queue::%s" % (myq)):
777 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
778 dsc_file_exists = True
781 if not dsc_file_exists:
782 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
784 # Check the version and for file overwrites
785 self.check_binary_against_db(f, session)
787 # Temporarily disable contents generation until we change the table storage layout
790 #if len(b.rejects) > 0:
791 # for j in b.rejects:
792 # self.rejects.append(j)
794 def source_file_checks(self, f, session):
795 entry = self.pkg.files[f]
797 m = re_issource.match(f)
801 entry["package"] = m.group(1)
802 entry["version"] = m.group(2)
803 entry["type"] = m.group(3)
805 # Ensure the source package name matches the Source filed in the .changes
806 if self.pkg.changes["source"] != entry["package"]:
807 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
809 # Ensure the source version matches the version in the .changes file
810 if re_is_orig_source.match(f):
811 changes_version = self.pkg.changes["chopversion2"]
813 changes_version = self.pkg.changes["chopversion"]
815 if changes_version != entry["version"]:
816 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
818 # Ensure the .changes lists source in the Architecture field
819 if not self.pkg.changes["architecture"].has_key("source"):
820 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
822 # Check the signature of a .dsc file
823 if entry["type"] == "dsc":
824 # check_signature returns either:
825 # (None, [list, of, rejects]) or (signature, [])
826 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
828 self.rejects.append(j)
830 entry["architecture"] = "source"
832 def per_suite_file_checks(self, f, suite, session):
834 entry = self.pkg.files[f]
837 if entry.has_key("byhand"):
840 # Check we have fields we need to do these checks
842 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
843 if not entry.has_key(m):
844 self.rejects.append("file '%s' does not have field %s set" % (f, m))
850 # Handle component mappings
851 for m in cnf.ValueList("ComponentMappings"):
852 (source, dest) = m.split()
853 if entry["component"] == source:
854 entry["original component"] = source
855 entry["component"] = dest
857 # Ensure the component is valid for the target suite
858 if cnf.has_key("Suite:%s::Components" % (suite)) and \
859 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
860 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
863 # Validate the component
864 if not get_component(entry["component"], session):
865 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
868 # See if the package is NEW
869 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
872 # Validate the priority
873 if entry["priority"].find('/') != -1:
874 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
876 # Determine the location
877 location = cnf["Dir::Pool"]
878 l = get_location(location, entry["component"], session=session)
880 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
881 entry["location id"] = -1
883 entry["location id"] = l.location_id
885 # Check the md5sum & size against existing files (if any)
886 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
888 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
889 entry["size"], entry["md5sum"], entry["location id"])
892 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
893 elif found is False and poolfile is not None:
894 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
897 entry["files id"] = None
899 entry["files id"] = poolfile.file_id
901 # Check for packages that have moved from one component to another
902 entry['suite'] = suite
903 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
905 entry["othercomponents"] = res.fetchone()[0]
907 def check_files(self, action=True):
908 file_keys = self.pkg.files.keys()
914 os.chdir(self.pkg.directory)
916 ret = holding.copy_to_holding(f)
918 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
922 # check we already know the changes file
923 # [NB: this check must be done post-suite mapping]
924 base_filename = os.path.basename(self.pkg.changes_file)
926 session = DBConn().session()
929 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
930 # if in the pool or in a queue other than unchecked, reject
931 if (dbc.in_queue is None) \
932 or (dbc.in_queue is not None
933 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
934 self.rejects.append("%s file already known to dak" % base_filename)
935 except NoResultFound, e:
942 for f, entry in self.pkg.files.items():
943 # Ensure the file does not already exist in one of the accepted directories
944 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
945 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
946 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
947 self.rejects.append("%s file already exists in the %s directory." % (f, d))
949 if not re_taint_free.match(f):
950 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
952 # Check the file is readable
953 if os.access(f, os.R_OK) == 0:
954 # When running in -n, copy_to_holding() won't have
955 # generated the reject_message, so we need to.
957 if os.path.exists(f):
958 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
960 # Don't directly reject, mark to check later to deal with orig's
961 # we can find in the pool
962 self.later_check_files.append(f)
963 entry["type"] = "unreadable"
966 # If it's byhand skip remaining checks
967 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
969 entry["type"] = "byhand"
971 # Checks for a binary package...
972 elif re_isadeb.match(f):
974 entry["type"] = "deb"
976 # This routine appends to self.rejects/warnings as appropriate
977 self.binary_file_checks(f, session)
979 # Checks for a source package...
980 elif re_issource.match(f):
983 # This routine appends to self.rejects/warnings as appropriate
984 self.source_file_checks(f, session)
986 # Not a binary or source package? Assume byhand...
989 entry["type"] = "byhand"
991 # Per-suite file checks
992 entry["oldfiles"] = {}
993 for suite in self.pkg.changes["distribution"].keys():
994 self.per_suite_file_checks(f, suite, session)
998 # If the .changes file says it has source, it must have source.
999 if self.pkg.changes["architecture"].has_key("source"):
1001 self.rejects.append("no source found and Architecture line in changes mention source.")
1003 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1004 self.rejects.append("source only uploads are not supported.")
1006 ###########################################################################
1007 def check_dsc(self, action=True, session=None):
1008 """Returns bool indicating whether or not the source changes are valid"""
1009 # Ensure there is source to check
1010 if not self.pkg.changes["architecture"].has_key("source"):
1015 for f, entry in self.pkg.files.items():
1016 if entry["type"] == "dsc":
1018 self.rejects.append("can not process a .changes file with multiple .dsc's.")
1023 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1024 if not dsc_filename:
1025 self.rejects.append("source uploads must contain a dsc file")
1028 # Parse the .dsc file
1030 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1031 except CantOpenError:
1032 # if not -n copy_to_holding() will have done this for us...
1034 self.rejects.append("%s: can't read file." % (dsc_filename))
1035 except ParseChangesError, line:
1036 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1037 except InvalidDscError, line:
1038 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1039 except ChangesUnicodeError:
1040 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1042 # Build up the file list of files mentioned by the .dsc
1044 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1045 except NoFilesFieldError:
1046 self.rejects.append("%s: no Files: field." % (dsc_filename))
1048 except UnknownFormatError, format:
1049 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1051 except ParseChangesError, line:
1052 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1055 # Enforce mandatory fields
1056 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1057 if not self.pkg.dsc.has_key(i):
1058 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1061 # Validate the source and version fields
1062 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1063 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1064 if not re_valid_version.match(self.pkg.dsc["version"]):
1065 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1067 # Only a limited list of source formats are allowed in each suite
1068 for dist in self.pkg.changes["distribution"].keys():
1069 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1070 if self.pkg.dsc["format"] not in allowed:
1071 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1073 # Validate the Maintainer field
1075 # We ignore the return value
1076 fix_maintainer(self.pkg.dsc["maintainer"])
1077 except ParseMaintError, msg:
1078 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1079 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1081 # Validate the build-depends field(s)
1082 for field_name in [ "build-depends", "build-depends-indep" ]:
1083 field = self.pkg.dsc.get(field_name)
1085 # Have apt try to parse them...
1087 apt_pkg.ParseSrcDepends(field)
1089 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1091 # Ensure the version number in the .dsc matches the version number in the .changes
1092 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1093 changes_version = self.pkg.files[dsc_filename]["version"]
1095 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1096 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1098 # Ensure the Files field contain only what's expected
1099 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1101 # Ensure source is newer than existing source in target suites
1102 session = DBConn().session()
1103 self.check_source_against_db(dsc_filename, session)
1104 self.check_dsc_against_db(dsc_filename, session)
1107 # Finally, check if we're missing any files
1108 for f in self.later_check_files:
1109 self.rejects.append("Could not find file %s references in changes" % f)
1113 ###########################################################################
1115 def get_changelog_versions(self, source_dir):
1116 """Extracts a the source package and (optionally) grabs the
1117 version history out of debian/changelog for the BTS."""
1121 # Find the .dsc (again)
1123 for f in self.pkg.files.keys():
1124 if self.pkg.files[f]["type"] == "dsc":
1127 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1128 if not dsc_filename:
1131 # Create a symlink mirror of the source files in our temporary directory
1132 for f in self.pkg.files.keys():
1133 m = re_issource.match(f)
1135 src = os.path.join(source_dir, f)
1136 # If a file is missing for whatever reason, give up.
1137 if not os.path.exists(src):
1140 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1141 self.pkg.orig_files[f].has_key("path"):
1143 dest = os.path.join(os.getcwd(), f)
1144 os.symlink(src, dest)
1146 # If the orig files are not a part of the upload, create symlinks to the
1148 for orig_file in self.pkg.orig_files.keys():
1149 if not self.pkg.orig_files[orig_file].has_key("path"):
1151 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1152 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1154 # Extract the source
1155 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1156 (result, output) = commands.getstatusoutput(cmd)
1158 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1159 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1162 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1165 # Get the upstream version
1166 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1167 if re_strip_revision.search(upstr_version):
1168 upstr_version = re_strip_revision.sub('', upstr_version)
1170 # Ensure the changelog file exists
1171 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1172 if not os.path.exists(changelog_filename):
1173 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1176 # Parse the changelog
1177 self.pkg.dsc["bts changelog"] = ""
1178 changelog_file = utils.open_file(changelog_filename)
1179 for line in changelog_file.readlines():
1180 m = re_changelog_versions.match(line)
1182 self.pkg.dsc["bts changelog"] += line
1183 changelog_file.close()
1185 # Check we found at least one revision in the changelog
1186 if not self.pkg.dsc["bts changelog"]:
1187 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1189 def check_source(self):
1191 # a) there's no source
1192 if not self.pkg.changes["architecture"].has_key("source"):
1195 tmpdir = utils.temp_dirname()
1197 # Move into the temporary directory
1201 # Get the changelog version history
1202 self.get_changelog_versions(cwd)
1204 # Move back and cleanup the temporary tree
1208 shutil.rmtree(tmpdir)
1210 if e.errno != errno.EACCES:
1212 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1214 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1215 # We probably have u-r or u-w directories so chmod everything
1217 cmd = "chmod -R u+rwx %s" % (tmpdir)
1218 result = os.system(cmd)
1220 utils.fubar("'%s' failed with result %s." % (cmd, result))
1221 shutil.rmtree(tmpdir)
1222 except Exception, e:
1223 print "foobar2 (%s)" % e
1224 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1226 ###########################################################################
1227 def ensure_hashes(self):
1228 # Make sure we recognise the format of the Files: field in the .changes
1229 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1230 if len(format) == 2:
1231 format = int(format[0]), int(format[1])
1233 format = int(float(format[0])), 0
1235 # We need to deal with the original changes blob, as the fields we need
1236 # might not be in the changes dict serialised into the .dak anymore.
1237 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1239 # Copy the checksums over to the current changes dict. This will keep
1240 # the existing modifications to it intact.
1241 for field in orig_changes:
1242 if field.startswith('checksums-'):
1243 self.pkg.changes[field] = orig_changes[field]
1245 # Check for unsupported hashes
1246 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1247 self.rejects.append(j)
1249 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1250 self.rejects.append(j)
1252 # We have to calculate the hash if we have an earlier changes version than
1253 # the hash appears in rather than require it exist in the changes file
1254 for hashname, hashfunc, version in utils.known_hashes:
1255 # TODO: Move _ensure_changes_hash into this class
1256 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1257 self.rejects.append(j)
1258 if "source" in self.pkg.changes["architecture"]:
1259 # TODO: Move _ensure_dsc_hash into this class
1260 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1261 self.rejects.append(j)
1263 def check_hashes(self):
1264 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1265 self.rejects.append(m)
1267 for m in utils.check_size(".changes", self.pkg.files):
1268 self.rejects.append(m)
1270 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1271 self.rejects.append(m)
1273 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1274 self.rejects.append(m)
1276 self.ensure_hashes()
1278 ###########################################################################
1280 def ensure_orig(self, target_dir='.', session=None):
1282 Ensures that all orig files mentioned in the changes file are present
1283 in target_dir. If they do not exist, they are symlinked into place.
1285 An list containing the symlinks that were created are returned (so they
1292 for filename, entry in self.pkg.dsc_files.iteritems():
1293 if not re_is_orig_source.match(filename):
1294 # File is not an orig; ignore
1297 if os.path.exists(filename):
1298 # File exists, no need to continue
1301 def symlink_if_valid(path):
1302 f = utils.open_file(path)
1303 md5sum = apt_pkg.md5sum(f)
1306 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1307 expected = (int(entry['size']), entry['md5sum'])
1309 if fingerprint != expected:
1312 dest = os.path.join(target_dir, filename)
1314 os.symlink(path, dest)
1315 symlinked.append(dest)
1321 session_ = DBConn().session()
1326 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1327 poolfile_path = os.path.join(
1328 poolfile.location.path, poolfile.filename
1331 if symlink_if_valid(poolfile_path):
1341 # Look in some other queues for the file
1342 queues = ('New', 'Byhand', 'ProposedUpdates',
1343 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1345 for queue in queues:
1346 if not cnf.get('Dir::Queue::%s' % queue):
1349 queuefile_path = os.path.join(
1350 cnf['Dir::Queue::%s' % queue], filename
1353 if not os.path.exists(queuefile_path):
1354 # Does not exist in this queue
1357 if symlink_if_valid(queuefile_path):
1362 ###########################################################################
1364 def check_lintian(self):
1366 Extends self.rejects by checking the output of lintian against tags
1367 specified in Dinstall::LintianTags.
1372 # Don't reject binary uploads
1373 if not self.pkg.changes['architecture'].has_key('source'):
1376 # Only check some distributions
1377 for dist in ('unstable', 'experimental'):
1378 if dist in self.pkg.changes['distribution']:
1383 # If we do not have a tagfile, don't do anything
1384 tagfile = cnf.get("Dinstall::LintianTags")
1388 # Parse the yaml file
1389 sourcefile = file(tagfile, 'r')
1390 sourcecontent = sourcefile.read()
1394 lintiantags = yaml.load(sourcecontent)['lintian']
1395 except yaml.YAMLError, msg:
1396 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1399 # Try and find all orig mentioned in the .dsc
1400 symlinked = self.ensure_orig()
1402 # Setup the input file for lintian
1403 fd, temp_filename = utils.temp_filename()
1404 temptagfile = os.fdopen(fd, 'w')
1405 for tags in lintiantags.values():
1406 temptagfile.writelines(['%s\n' % x for x in tags])
1410 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1411 (temp_filename, self.pkg.changes_file)
1413 result, output = commands.getstatusoutput(cmd)
1415 # Remove our tempfile and any symlinks we created
1416 os.unlink(temp_filename)
1418 for symlink in symlinked:
1422 utils.warn("lintian failed for %s [return code: %s]." % \
1423 (self.pkg.changes_file, result))
1424 utils.warn(utils.prefix_multi_line_string(output, \
1425 " [possible output:] "))
1430 [self.pkg.changes_file, "check_lintian"] + list(txt)
1434 parsed_tags = parse_lintian_output(output)
1435 self.rejects.extend(
1436 generate_reject_messages(parsed_tags, lintiantags, log=log)
1439 ###########################################################################
1440 def check_urgency(self):
1442 if self.pkg.changes["architecture"].has_key("source"):
1443 if not self.pkg.changes.has_key("urgency"):
1444 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1445 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1446 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1447 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1448 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1449 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1451 ###########################################################################
1453 # Sanity check the time stamps of files inside debs.
1454 # [Files in the near future cause ugly warnings and extreme time
1455 # travel can cause errors on extraction]
1457 def check_timestamps(self):
1460 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1461 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1462 tar = TarTime(future_cutoff, past_cutoff)
1464 for filename, entry in self.pkg.files.items():
1465 if entry["type"] == "deb":
1468 deb_file = utils.open_file(filename)
1469 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1472 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1473 except SystemError, e:
1474 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1475 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1478 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1482 future_files = tar.future_files.keys()
1484 num_future_files = len(future_files)
1485 future_file = future_files[0]
1486 future_date = tar.future_files[future_file]
1487 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1488 % (filename, num_future_files, future_file, time.ctime(future_date)))
1490 ancient_files = tar.ancient_files.keys()
1492 num_ancient_files = len(ancient_files)
1493 ancient_file = ancient_files[0]
1494 ancient_date = tar.ancient_files[ancient_file]
1495 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1496 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1498 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1500 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1501 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1503 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1509 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1510 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1511 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1512 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1513 self.pkg.changes["sponsoremail"] = uid_email
1518 ###########################################################################
1519 # check_signed_by_key checks
1520 ###########################################################################
1522 def check_signed_by_key(self):
1523 """Ensure the .changes is signed by an authorized uploader."""
1524 session = DBConn().session()
1526 # First of all we check that the person has proper upload permissions
1527 # and that this upload isn't blocked
1528 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1531 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1534 # TODO: Check that import-keyring adds UIDs properly
1536 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1539 # Check that the fingerprint which uploaded has permission to do so
1540 self.check_upload_permissions(fpr, session)
1542 # Check that this package is not in a transition
1543 self.check_transition(session)
1548 def check_upload_permissions(self, fpr, session):
1549 # Check any one-off upload blocks
1550 self.check_upload_blocks(fpr, session)
1552 # Start with DM as a special case
1553 # DM is a special case unfortunately, so we check it first
1554 # (keys with no source access get more access than DMs in one
1555 # way; DMs can only upload for their packages whether source
1556 # or binary, whereas keys with no access might be able to
1557 # upload some binaries)
1558 if fpr.source_acl.access_level == 'dm':
1559 self.check_dm_upload(fpr, session)
1561 # Check source-based permissions for other types
1562 if self.pkg.changes["architecture"].has_key("source") and \
1563 fpr.source_acl.access_level is None:
1564 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1565 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1566 self.rejects.append(rej)
1568 # If not a DM, we allow full upload rights
1569 uid_email = "%s@debian.org" % (fpr.uid.uid)
1570 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1573 # Check binary upload permissions
1574 # By this point we know that DMs can't have got here unless they
1575 # are allowed to deal with the package concerned so just apply
1577 if fpr.binary_acl.access_level == 'full':
1580 # Otherwise we're in the map case
1581 tmparches = self.pkg.changes["architecture"].copy()
1582 tmparches.pop('source', None)
1584 for bam in fpr.binary_acl_map:
1585 tmparches.pop(bam.architecture.arch_string, None)
1587 if len(tmparches.keys()) > 0:
1588 if fpr.binary_reject:
1589 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1590 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1591 self.rejects.append(rej)
1593 # TODO: This is where we'll implement reject vs throw away binaries later
1594 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1595 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1596 rej += "\nFingerprint: %s", (fpr.fingerprint)
1597 self.rejects.append(rej)
1600 def check_upload_blocks(self, fpr, session):
1601 """Check whether any upload blocks apply to this source, source
1602 version, uid / fpr combination"""
1604 def block_rej_template(fb):
1605 rej = 'Manual upload block in place for package %s' % fb.source
1606 if fb.version is not None:
1607 rej += ', version %s' % fb.version
1610 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1611 # version is None if the block applies to all versions
1612 if fb.version is None or fb.version == self.pkg.changes['version']:
1613 # Check both fpr and uid - either is enough to cause a reject
1614 if fb.fpr is not None:
1615 if fb.fpr.fingerprint == fpr.fingerprint:
1616 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1617 if fb.uid is not None:
1618 if fb.uid == fpr.uid:
1619 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1622 def check_dm_upload(self, fpr, session):
1623 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1624 ## none of the uploaded packages are NEW
1626 for f in self.pkg.files.keys():
1627 if self.pkg.files[f].has_key("byhand"):
1628 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1630 if self.pkg.files[f].has_key("new"):
1631 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1637 ## the most recent version of the package uploaded to unstable or
1638 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1639 ## section of its control file
1640 q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1641 q = q.join(SrcAssociation)
1642 q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1643 q = q.order_by(desc('source.version')).limit(1)
1648 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1649 self.rejects.append(rej)
1653 if not r.dm_upload_allowed:
1654 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1655 self.rejects.append(rej)
1658 ## the Maintainer: field of the uploaded .changes file corresponds with
1659 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1661 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1662 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1664 ## the most recent version of the package uploaded to unstable or
1665 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1666 ## non-developer maintainers cannot NMU or hijack packages)
1668 # srcuploaders includes the maintainer
1670 for sup in r.srcuploaders:
1671 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1672 # Eww - I hope we never have two people with the same name in Debian
1673 if email == fpr.uid.uid or name == fpr.uid.name:
1678 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1681 ## none of the packages are being taken over from other source packages
1682 for b in self.pkg.changes["binary"].keys():
1683 for suite in self.pkg.changes["distribution"].keys():
1684 q = session.query(DBSource)
1685 q = q.join(DBBinary).filter_by(package=b)
1686 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1689 if s.source != self.pkg.changes["source"]:
1690 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1694 def check_transition(self, session):
1697 sourcepkg = self.pkg.changes["source"]
1699 # No sourceful upload -> no need to do anything else, direct return
1700 # We also work with unstable uploads, not experimental or those going to some
1701 # proposed-updates queue
1702 if "source" not in self.pkg.changes["architecture"] or \
1703 "unstable" not in self.pkg.changes["distribution"]:
1706 # Also only check if there is a file defined (and existant) with
1708 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1709 if transpath == "" or not os.path.exists(transpath):
1712 # Parse the yaml file
1713 sourcefile = file(transpath, 'r')
1714 sourcecontent = sourcefile.read()
1716 transitions = yaml.load(sourcecontent)
1717 except yaml.YAMLError, msg:
1718 # This shouldn't happen, there is a wrapper to edit the file which
1719 # checks it, but we prefer to be safe than ending up rejecting
1721 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1724 # Now look through all defined transitions
1725 for trans in transitions:
1726 t = transitions[trans]
1727 source = t["source"]
1730 # Will be None if nothing is in testing.
1731 current = get_source_in_suite(source, "testing", session)
1732 if current is not None:
1733 compare = apt_pkg.VersionCompare(current.version, expected)
1735 if current is None or compare < 0:
1736 # This is still valid, the current version in testing is older than
1737 # the new version we wait for, or there is none in testing yet
1739 # Check if the source we look at is affected by this.
1740 if sourcepkg in t['packages']:
1741 # The source is affected, lets reject it.
1743 rejectmsg = "%s: part of the %s transition.\n\n" % (
1746 if current is not None:
1747 currentlymsg = "at version %s" % (current.version)
1749 currentlymsg = "not present in testing"
1751 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1753 rejectmsg += "\n".join(textwrap.wrap("""Your package
1754 is part of a testing transition designed to get %s migrated (it is
1755 currently %s, we need version %s). This transition is managed by the
1756 Release Team, and %s is the Release-Team member responsible for it.
1757 Please mail debian-release@lists.debian.org or contact %s directly if you
1758 need further assistance. You might want to upload to experimental until this
1759 transition is done."""
1760 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1762 self.rejects.append(rejectmsg)
1765 ###########################################################################
1766 # End check_signed_by_key checks
1767 ###########################################################################
1769 def build_summaries(self):
1770 """ Build a summary of changes the upload introduces. """
1772 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1774 short_summary = summary
1776 # This is for direport's benefit...
1777 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1780 summary += "Changes: " + f
1782 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1784 summary += self.announce(short_summary, 0)
1786 return (summary, short_summary)
1788 ###########################################################################
1790 def close_bugs(self, summary, action):
1792 Send mail to close bugs as instructed by the closes field in the changes file.
1793 Also add a line to summary if any work was done.
1795 @type summary: string
1796 @param summary: summary text, as given by L{build_summaries}
1799 @param action: Set to false no real action will be done.
1802 @return: summary. If action was taken, extended by the list of closed bugs.
1806 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1808 bugs = self.pkg.changes["closes"].keys()
1814 summary += "Closing bugs: "
1816 summary += "%s " % (bug)
1819 self.Subst["__BUG_NUMBER__"] = bug
1820 if self.pkg.changes["distribution"].has_key("stable"):
1821 self.Subst["__STABLE_WARNING__"] = """
1822 Note that this package is not part of the released stable Debian
1823 distribution. It may have dependencies on other unreleased software,
1824 or other instabilities. Please take care if you wish to install it.
1825 The update will eventually make its way into the next released Debian
1828 self.Subst["__STABLE_WARNING__"] = ""
1829 mail_message = utils.TemplateSubst(self.Subst, template)
1830 utils.send_mail(mail_message)
1832 # Clear up after ourselves
1833 del self.Subst["__BUG_NUMBER__"]
1834 del self.Subst["__STABLE_WARNING__"]
1836 if action and self.logger:
1837 self.logger.log(["closing bugs"] + bugs)
1843 ###########################################################################
1845 def announce(self, short_summary, action):
1847 Send an announce mail about a new upload.
1849 @type short_summary: string
1850 @param short_summary: Short summary text to include in the mail
1853 @param action: Set to false no real action will be done.
1856 @return: Textstring about action taken.
1861 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1863 # Only do announcements for source uploads with a recent dpkg-dev installed
1864 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1865 self.pkg.changes["architecture"].has_key("source"):
1871 self.Subst["__SHORT_SUMMARY__"] = short_summary
1873 for dist in self.pkg.changes["distribution"].keys():
1874 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1875 if announce_list == "" or lists_done.has_key(announce_list):
1878 lists_done[announce_list] = 1
1879 summary += "Announcing to %s\n" % (announce_list)
1883 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1884 if cnf.get("Dinstall::TrackingServer") and \
1885 self.pkg.changes["architecture"].has_key("source"):
1886 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1887 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1889 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1890 utils.send_mail(mail_message)
1892 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1894 if cnf.FindB("Dinstall::CloseBugs"):
1895 summary = self.close_bugs(summary, action)
1897 del self.Subst["__SHORT_SUMMARY__"]
1901 ###########################################################################
1903 def accept (self, summary, short_summary, session=None):
1907 This moves all files referenced from the .changes into the pool,
1908 sends the accepted mail, announces to lists, closes bugs and
1909 also checks for override disparities. If enabled it will write out
1910 the version history for the BTS Version Tracking and will finally call
1913 @type summary: string
1914 @param summary: Summary text
1916 @type short_summary: string
1917 @param short_summary: Short summary
1921 stats = SummaryStats()
1924 self.logger.log(["installing changes", self.pkg.changes_file])
1928 # Add the .dsc file to the DB first
1929 for newfile, entry in self.pkg.files.items():
1930 if entry["type"] == "dsc":
1931 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1935 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1936 for newfile, entry in self.pkg.files.items():
1937 if entry["type"] == "deb":
1938 poolfiles.append(add_deb_to_db(self, newfile, session))
1940 # If this is a sourceful diff only upload that is moving
1941 # cross-component we need to copy the .orig files into the new
1942 # component too for the same reasons as above.
1943 # XXX: mhy: I think this should be in add_dsc_to_db
1944 if self.pkg.changes["architecture"].has_key("source"):
1945 for orig_file in self.pkg.orig_files.keys():
1946 if not self.pkg.orig_files[orig_file].has_key("id"):
1947 continue # Skip if it's not in the pool
1948 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1949 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1950 continue # Skip if the location didn't change
1953 oldf = get_poolfile_by_id(orig_file_id, session)
1954 old_filename = os.path.join(oldf.location.path, oldf.filename)
1955 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
1956 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1958 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1960 # TODO: Care about size/md5sum collisions etc
1961 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
1963 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
1965 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1966 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1970 # Don't reference the old file from this changes
1972 if p.file_id == oldf.file_id:
1975 poolfiles.append(newf)
1977 # Fix up the DSC references
1980 for df in source.srcfiles:
1981 if df.poolfile.file_id == oldf.file_id:
1982 # Add a new DSC entry and mark the old one for deletion
1983 # Don't do it in the loop so we don't change the thing we're iterating over
1985 newdscf.source_id = source.source_id
1986 newdscf.poolfile_id = newf.file_id
1987 session.add(newdscf)
1997 # Make sure that our source object is up-to-date
1998 session.expire(source)
2000 # Add changelog information to the database
2001 self.store_changelog()
2003 # Install the files into the pool
2004 for newfile, entry in self.pkg.files.items():
2005 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2006 utils.move(newfile, destination)
2007 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2008 stats.accept_bytes += float(entry["size"])
2010 # Copy the .changes file across for suite which need it.
2012 for suite_name in self.pkg.changes["distribution"].keys():
2013 if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
2014 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
2016 for dest in copy_changes.keys():
2017 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2019 # We're done - commit the database changes
2021 # Our SQL session will automatically start a new transaction after
2024 # Move the .changes into the 'done' directory
2025 utils.move(self.pkg.changes_file,
2026 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2028 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2029 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2032 self.Subst["__SUMMARY__"] = summary
2033 mail_message = utils.TemplateSubst(self.Subst,
2034 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2035 utils.send_mail(mail_message)
2036 self.announce(short_summary, 1)
2038 ## Helper stuff for DebBugs Version Tracking
2039 if cnf.Find("Dir::Queue::BTSVersionTrack"):
2040 if self.pkg.changes["architecture"].has_key("source"):
2041 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2042 version_history = os.fdopen(fd, 'w')
2043 version_history.write(self.pkg.dsc["bts changelog"])
2044 version_history.close()
2045 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2046 self.pkg.changes_file[:-8]+".versions")
2047 os.rename(temp_filename, filename)
2048 os.chmod(filename, 0644)
2050 # Write out the binary -> source mapping.
2051 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2052 debinfo = os.fdopen(fd, 'w')
2053 for name, entry in sorted(self.pkg.files.items()):
2054 if entry["type"] == "deb":
2055 line = " ".join([entry["package"], entry["version"],
2056 entry["architecture"], entry["source package"],
2057 entry["source version"]])
2058 debinfo.write(line+"\n")
2060 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2061 self.pkg.changes_file[:-8]+".debinfo")
2062 os.rename(temp_filename, filename)
2063 os.chmod(filename, 0644)
2067 # Set up our copy queues (e.g. buildd queues)
2068 for suite_name in self.pkg.changes["distribution"].keys():
2069 suite = get_suite(suite_name, session)
2070 for q in suite.copy_queues:
2072 q.add_file_from_pool(f)
2077 stats.accept_count += 1
2079 def check_override(self):
2081 Checks override entries for validity. Mails "Override disparity" warnings,
2082 if that feature is enabled.
2084 Abandons the check if
2085 - override disparity checks are disabled
2086 - mail sending is disabled
2091 # Abandon the check if override disparity checks have been disabled
2092 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2095 summary = self.pkg.check_override()
2100 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2103 self.Subst["__SUMMARY__"] = summary
2104 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2105 utils.send_mail(mail_message)
2106 del self.Subst["__SUMMARY__"]
2108 ###########################################################################
2110 def remove(self, from_dir=None):
2112 Used (for instance) in p-u to remove the package from unchecked
2114 Also removes the package from holding area.
2116 if from_dir is None:
2117 from_dir = self.pkg.directory
2120 for f in self.pkg.files.keys():
2121 os.unlink(os.path.join(from_dir, f))
2122 if os.path.exists(os.path.join(h.holding_dir, f)):
2123 os.unlink(os.path.join(h.holding_dir, f))
2125 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2126 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2127 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2129 ###########################################################################
2131 def move_to_queue (self, queue):
2133 Move files to a destination queue using the permissions in the table
2136 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2137 queue.path, perms=int(queue.change_perms, 8))
2138 for f in self.pkg.files.keys():
2139 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2141 ###########################################################################
2143 def force_reject(self, reject_files):
2145 Forcefully move files from the current directory to the
2146 reject directory. If any file already exists in the reject
2147 directory it will be moved to the morgue to make way for
2150 @type reject_files: dict
2151 @param reject_files: file dictionary
2157 for file_entry in reject_files:
2158 # Skip any files which don't exist or which we don't have permission to copy.
2159 if os.access(file_entry, os.R_OK) == 0:
2162 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2165 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2167 # File exists? Let's find a new name by adding a number
2168 if e.errno == errno.EEXIST:
2170 dest_file = utils.find_next_free(dest_file, 255)
2171 except NoFreeFilenameError:
2172 # Something's either gone badly Pete Tong, or
2173 # someone is trying to exploit us.
2174 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2177 # Make sure we really got it
2179 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2182 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2186 # If we got here, we own the destination file, so we can
2187 # safely overwrite it.
2188 utils.move(file_entry, dest_file, 1, perms=0660)
2191 ###########################################################################
2192 def do_reject (self, manual=0, reject_message="", notes=""):
2194 Reject an upload. If called without a reject message or C{manual} is
2195 true, spawn an editor so the user can write one.
2198 @param manual: manual or automated rejection
2200 @type reject_message: string
2201 @param reject_message: A reject message
2206 # If we weren't given a manual rejection message, spawn an
2207 # editor so the user can add one in...
2208 if manual and not reject_message:
2209 (fd, temp_filename) = utils.temp_filename()
2210 temp_file = os.fdopen(fd, 'w')
2213 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2214 % (note.author, note.version, note.notedate, note.comment))
2216 editor = os.environ.get("EDITOR","vi")
2218 while answer == 'E':
2219 os.system("%s %s" % (editor, temp_filename))
2220 temp_fh = utils.open_file(temp_filename)
2221 reject_message = "".join(temp_fh.readlines())
2223 print "Reject message:"
2224 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2225 prompt = "[R]eject, Edit, Abandon, Quit ?"
2227 while prompt.find(answer) == -1:
2228 answer = utils.our_raw_input(prompt)
2229 m = re_default_answer.search(prompt)
2232 answer = answer[:1].upper()
2233 os.unlink(temp_filename)
2239 print "Rejecting.\n"
2243 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2244 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2246 # Move all the files into the reject directory
2247 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2248 self.force_reject(reject_files)
2250 # If we fail here someone is probably trying to exploit the race
2251 # so let's just raise an exception ...
2252 if os.path.exists(reason_filename):
2253 os.unlink(reason_filename)
2254 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2256 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2260 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2261 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2262 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2263 os.write(reason_fd, reject_message)
2264 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2266 # Build up the rejection email
2267 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2268 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2269 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2270 self.Subst["__REJECT_MESSAGE__"] = ""
2271 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2272 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2273 # Write the rejection email out as the <foo>.reason file
2274 os.write(reason_fd, reject_mail_message)
2276 del self.Subst["__REJECTOR_ADDRESS__"]
2277 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2278 del self.Subst["__CC__"]
2282 # Send the rejection mail
2283 utils.send_mail(reject_mail_message)
2286 self.logger.log(["rejected", self.pkg.changes_file])
2290 ################################################################################
2291 def in_override_p(self, package, component, suite, binary_type, filename, session):
2293 Check if a package already has override entries in the DB
2295 @type package: string
2296 @param package: package name
2298 @type component: string
2299 @param component: database id of the component
2302 @param suite: database id of the suite
2304 @type binary_type: string
2305 @param binary_type: type of the package
2307 @type filename: string
2308 @param filename: filename we check
2310 @return: the database result. But noone cares anyway.
2316 if binary_type == "": # must be source
2319 file_type = binary_type
2321 # Override suite name; used for example with proposed-updates
2322 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2323 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2325 result = get_override(package, suite, component, file_type, session)
2327 # If checking for a source package fall back on the binary override type
2328 if file_type == "dsc" and len(result) < 1:
2329 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2331 # Remember the section and priority so we can check them later if appropriate
2334 self.pkg.files[filename]["override section"] = result.section.section
2335 self.pkg.files[filename]["override priority"] = result.priority.priority
2340 ################################################################################
2341 def get_anyversion(self, sv_list, suite):
2344 @param sv_list: list of (suite, version) tuples to check
2347 @param suite: suite name
2353 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2354 for (s, v) in sv_list:
2355 if s in [ x.lower() for x in anysuite ]:
2356 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2361 ################################################################################
2363 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2366 @param sv_list: list of (suite, version) tuples to check
2368 @type filename: string
2369 @param filename: XXX
2371 @type new_version: string
2372 @param new_version: XXX
2374 Ensure versions are newer than existing packages in target
2375 suites and that cross-suite version checking rules as
2376 set out in the conf file are satisfied.
2381 # Check versions for each target suite
2382 for target_suite in self.pkg.changes["distribution"].keys():
2383 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2384 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2386 # Enforce "must be newer than target suite" even if conffile omits it
2387 if target_suite not in must_be_newer_than:
2388 must_be_newer_than.append(target_suite)
2390 for (suite, existent_version) in sv_list:
2391 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2393 if suite in must_be_newer_than and sourceful and vercmp < 1:
2394 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2396 if suite in must_be_older_than and vercmp > -1:
2399 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2400 # we really use the other suite, ignoring the conflicting one ...
2401 addsuite = self.pkg.changes["distribution-version"][suite]
2403 add_version = self.get_anyversion(sv_list, addsuite)
2404 target_version = self.get_anyversion(sv_list, target_suite)
2407 # not add_version can only happen if we map to a suite
2408 # that doesn't enhance the suite we're propup'ing from.
2409 # so "propup-ver x a b c; map a d" is a problem only if
2410 # d doesn't enhance a.
2412 # i think we could always propagate in this case, rather
2413 # than complaining. either way, this isn't a REJECT issue
2415 # And - we really should complain to the dorks who configured dak
2416 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2417 self.pkg.changes.setdefault("propdistribution", {})
2418 self.pkg.changes["propdistribution"][addsuite] = 1
2420 elif not target_version:
2421 # not targets_version is true when the package is NEW
2422 # we could just stick with the "...old version..." REJECT
2423 # for this, I think.
2424 self.rejects.append("Won't propogate NEW packages.")
2425 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2426 # propogation would be redundant. no need to reject though.
2427 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2429 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2430 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2432 self.warnings.append("Propogating upload to %s" % (addsuite))
2433 self.pkg.changes.setdefault("propdistribution", {})
2434 self.pkg.changes["propdistribution"][addsuite] = 1
2438 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2440 ################################################################################
2441 def check_binary_against_db(self, filename, session):
2442 # Ensure version is sane
2443 q = session.query(BinAssociation)
2444 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2445 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2447 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2448 filename, self.pkg.files[filename]["version"], sourceful=False)
2450 # Check for any existing copies of the file
2451 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2452 q = q.filter_by(version=self.pkg.files[filename]["version"])
2453 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2456 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2458 ################################################################################
2460 def check_source_against_db(self, filename, session):
2461 source = self.pkg.dsc.get("source")
2462 version = self.pkg.dsc.get("version")
2464 # Ensure version is sane
2465 q = session.query(SrcAssociation)
2466 q = q.join(DBSource).filter(DBSource.source==source)
2468 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2469 filename, version, sourceful=True)
2471 ################################################################################
2472 def check_dsc_against_db(self, filename, session):
2475 @warning: NB: this function can remove entries from the 'files' index [if
2476 the orig tarball is a duplicate of the one in the archive]; if
2477 you're iterating over 'files' and call this function as part of
2478 the loop, be sure to add a check to the top of the loop to
2479 ensure you haven't just tried to dereference the deleted entry.
2484 self.pkg.orig_files = {} # XXX: do we need to clear it?
2485 orig_files = self.pkg.orig_files
2487 # Try and find all files mentioned in the .dsc. This has
2488 # to work harder to cope with the multiple possible
2489 # locations of an .orig.tar.gz.
2490 # The ordering on the select is needed to pick the newest orig
2491 # when it exists in multiple places.
2492 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2494 if self.pkg.files.has_key(dsc_name):
2495 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2496 actual_size = int(self.pkg.files[dsc_name]["size"])
2497 found = "%s in incoming" % (dsc_name)
2499 # Check the file does not already exist in the archive
2500 ql = get_poolfile_like_name(dsc_name, session)
2502 # Strip out anything that isn't '%s' or '/%s$'
2504 if not i.filename.endswith(dsc_name):
2507 # "[dak] has not broken them. [dak] has fixed a
2508 # brokenness. Your crappy hack exploited a bug in
2511 # "(Come on! I thought it was always obvious that
2512 # one just doesn't release different files with
2513 # the same name and version.)"
2514 # -- ajk@ on d-devel@l.d.o
2517 # Ignore exact matches for .orig.tar.gz
2519 if re_is_orig_source.match(dsc_name):
2521 if self.pkg.files.has_key(dsc_name) and \
2522 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2523 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2524 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2525 # TODO: Don't delete the entry, just mark it as not needed
2526 # This would fix the stupidity of changing something we often iterate over
2527 # whilst we're doing it
2528 del self.pkg.files[dsc_name]
2529 dsc_entry["files id"] = i.file_id
2530 if not orig_files.has_key(dsc_name):
2531 orig_files[dsc_name] = {}
2532 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2535 # Don't bitch that we couldn't find this file later
2537 self.later_check_files.remove(dsc_name)
2543 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2545 elif re_is_orig_source.match(dsc_name):
2547 ql = get_poolfile_like_name(dsc_name, session)
2549 # Strip out anything that isn't '%s' or '/%s$'
2550 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2552 if not i.filename.endswith(dsc_name):
2556 # Unfortunately, we may get more than one match here if,
2557 # for example, the package was in potato but had an -sa
2558 # upload in woody. So we need to choose the right one.
2560 # default to something sane in case we don't match any or have only one
2565 old_file = os.path.join(i.location.path, i.filename)
2566 old_file_fh = utils.open_file(old_file)
2567 actual_md5 = apt_pkg.md5sum(old_file_fh)
2569 actual_size = os.stat(old_file)[stat.ST_SIZE]
2570 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2573 old_file = os.path.join(i.location.path, i.filename)
2574 old_file_fh = utils.open_file(old_file)
2575 actual_md5 = apt_pkg.md5sum(old_file_fh)
2577 actual_size = os.stat(old_file)[stat.ST_SIZE]
2579 suite_type = x.location.archive_type
2580 # need this for updating dsc_files in install()
2581 dsc_entry["files id"] = x.file_id
2582 # See install() in process-accepted...
2583 if not orig_files.has_key(dsc_name):
2584 orig_files[dsc_name] = {}
2585 orig_files[dsc_name]["id"] = x.file_id
2586 orig_files[dsc_name]["path"] = old_file
2587 orig_files[dsc_name]["location"] = x.location.location_id
2589 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2590 # Not there? Check the queue directories...
2591 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2592 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2594 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2595 if os.path.exists(in_otherdir):
2596 in_otherdir_fh = utils.open_file(in_otherdir)
2597 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2598 in_otherdir_fh.close()
2599 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2601 if not orig_files.has_key(dsc_name):
2602 orig_files[dsc_name] = {}
2603 orig_files[dsc_name]["path"] = in_otherdir
2606 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2609 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2611 if actual_md5 != dsc_entry["md5sum"]:
2612 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2613 if actual_size != int(dsc_entry["size"]):
2614 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2616 ################################################################################
2617 # This is used by process-new and process-holding to recheck a changes file
2618 # at the time we're running. It mainly wraps various other internal functions
2619 # and is similar to accepted_checks - these should probably be tidied up
2621 def recheck(self, session):
2623 for f in self.pkg.files.keys():
2624 # The .orig.tar.gz can disappear out from under us is it's a
2625 # duplicate of one in the archive.
2626 if not self.pkg.files.has_key(f):
2629 entry = self.pkg.files[f]
2631 # Check that the source still exists
2632 if entry["type"] == "deb":
2633 source_version = entry["source version"]
2634 source_package = entry["source package"]
2635 if not self.pkg.changes["architecture"].has_key("source") \
2636 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2637 source_epochless_version = re_no_epoch.sub('', source_version)
2638 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2640 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2641 if cnf.has_key("Dir::Queue::%s" % (q)):
2642 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2645 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2647 # Version and file overwrite checks
2648 if entry["type"] == "deb":
2649 self.check_binary_against_db(f, session)
2650 elif entry["type"] == "dsc":
2651 self.check_source_against_db(f, session)
2652 self.check_dsc_against_db(f, session)
2654 ################################################################################
2655 def accepted_checks(self, overwrite_checks, session):
2656 # Recheck anything that relies on the database; since that's not
2657 # frozen between accept and our run time when called from p-a.
2659 # overwrite_checks is set to False when installing to stable/oldstable
2664 # Find the .dsc (again)
2666 for f in self.pkg.files.keys():
2667 if self.pkg.files[f]["type"] == "dsc":
2670 for checkfile in self.pkg.files.keys():
2671 # The .orig.tar.gz can disappear out from under us is it's a
2672 # duplicate of one in the archive.
2673 if not self.pkg.files.has_key(checkfile):
2676 entry = self.pkg.files[checkfile]
2678 # Check that the source still exists
2679 if entry["type"] == "deb":
2680 source_version = entry["source version"]
2681 source_package = entry["source package"]
2682 if not self.pkg.changes["architecture"].has_key("source") \
2683 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2684 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2686 # Version and file overwrite checks
2687 if overwrite_checks:
2688 if entry["type"] == "deb":
2689 self.check_binary_against_db(checkfile, session)
2690 elif entry["type"] == "dsc":
2691 self.check_source_against_db(checkfile, session)
2692 self.check_dsc_against_db(dsc_filename, session)
2694 # propogate in the case it is in the override tables:
2695 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2696 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2697 propogate[suite] = 1
2699 nopropogate[suite] = 1
2701 for suite in propogate.keys():
2702 if suite in nopropogate:
2704 self.pkg.changes["distribution"][suite] = 1
2706 for checkfile in self.pkg.files.keys():
2707 # Check the package is still in the override tables
2708 for suite in self.pkg.changes["distribution"].keys():
2709 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2710 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2712 ################################################################################
2713 # If any file of an upload has a recent mtime then chances are good
2714 # the file is still being uploaded.
2716 def upload_too_new(self):
2719 # Move back to the original directory to get accurate time stamps
2721 os.chdir(self.pkg.directory)
2722 file_list = self.pkg.files.keys()
2723 file_list.extend(self.pkg.dsc_files.keys())
2724 file_list.append(self.pkg.changes_file)
2727 last_modified = time.time()-os.path.getmtime(f)
2728 if last_modified < int(cnf["Dinstall::SkipTime"]):
2737 def store_changelog(self):
2739 # Skip binary-only upload if it is not a bin-NMU
2740 if not self.pkg.changes['architecture'].has_key('source'):
2741 from daklib.regexes import re_bin_only_nmu
2742 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2745 session = DBConn().session()
2747 # Check if upload already has a changelog entry
2748 query = """SELECT changelog_id FROM changes WHERE source = :source
2749 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2750 if session.execute(query, {'source': self.pkg.changes['source'], \
2751 'version': self.pkg.changes['version'], \
2752 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2756 # Add current changelog text into changelogs_text table, return created ID
2757 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2758 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2760 # Link ID to the upload available in changes table
2761 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2762 AND version = :version AND architecture = :architecture"""
2763 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2764 'version': self.pkg.changes['version'], \
2765 'architecture': " ".join(self.pkg.changes['architecture'].keys())})