5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
59 ###############################################################################
61 def get_type(f, session):
63 Get the file type of C{f}
66 @param f: file entry from Changes object
68 @type session: SQLA Session
69 @param session: SQL Alchemy session object
76 if f.has_key("dbtype"):
77 file_type = f["dbtype"]
78 elif re_source_ext.match(f["type"]):
82 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
84 # Validate the override type
85 type_id = get_override_type(file_type, session)
87 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
91 ################################################################################
93 # Determine what parts in a .changes are NEW
95 def determine_new(changes, files, warn=1, session = None):
97 Determine what parts in a C{changes} file are NEW.
99 @type changes: Upload.Pkg.changes dict
100 @param changes: Changes dictionary
102 @type files: Upload.Pkg.files dict
103 @param files: Files dictionary
106 @param warn: Warn if overrides are added for (old)stable
109 @return: dictionary of NEW components.
115 # Build up a list of potentially new things
116 for name, f in files.items():
117 # Keep a record of byhand elements
118 if f["section"] == "byhand":
122 priority = f["priority"]
123 section = f["section"]
124 file_type = get_type(f, session)
125 component = f["component"]
127 if file_type == "dsc":
130 if not new.has_key(pkg):
132 new[pkg]["priority"] = priority
133 new[pkg]["section"] = section
134 new[pkg]["type"] = file_type
135 new[pkg]["component"] = component
136 new[pkg]["files"] = []
138 old_type = new[pkg]["type"]
139 if old_type != file_type:
140 # source gets trumped by deb or udeb
141 if old_type == "dsc":
142 new[pkg]["priority"] = priority
143 new[pkg]["section"] = section
144 new[pkg]["type"] = file_type
145 new[pkg]["component"] = component
147 new[pkg]["files"].append(name)
149 if f.has_key("othercomponents"):
150 new[pkg]["othercomponents"] = f["othercomponents"]
152 # Fix up the list of target suites
154 for suite in changes["suite"].keys():
155 override = cnf.Find("Suite::%s::OverrideSuite" % (suite))
157 (olderr, newerr) = (get_suite(suite, session) == None,
158 get_suite(override, session) == None)
160 (oinv, newinv) = ("", "")
161 if olderr: oinv = "invalid "
162 if newerr: ninv = "invalid "
163 print "warning: overriding %ssuite %s to %ssuite %s" % (
164 oinv, suite, ninv, override)
165 del changes["suite"][suite]
166 changes["suite"][override] = 1
168 for suite in changes["suite"].keys():
169 for pkg in new.keys():
170 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
172 for file_entry in new[pkg]["files"]:
173 if files[file_entry].has_key("new"):
174 del files[file_entry]["new"]
178 for s in ['stable', 'oldstable']:
179 if changes["suite"].has_key(s):
180 print "WARNING: overrides will be added for %s!" % s
181 for pkg in new.keys():
182 if new[pkg].has_key("othercomponents"):
183 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
187 ################################################################################
189 def check_valid(new, session = None):
191 Check if section and priority for NEW packages exist in database.
192 Additionally does sanity checks:
193 - debian-installer packages have to be udeb (or source)
194 - non debian-installer packages can not be udeb
195 - source priority can only be assigned to dsc file types
198 @param new: Dict of new packages with their section, priority and type.
201 for pkg in new.keys():
202 section_name = new[pkg]["section"]
203 priority_name = new[pkg]["priority"]
204 file_type = new[pkg]["type"]
206 section = get_section(section_name, session)
208 new[pkg]["section id"] = -1
210 new[pkg]["section id"] = section.section_id
212 priority = get_priority(priority_name, session)
214 new[pkg]["priority id"] = -1
216 new[pkg]["priority id"] = priority.priority_id
219 di = section_name.find("debian-installer") != -1
221 # If d-i, we must be udeb and vice-versa
222 if (di and file_type not in ("udeb", "dsc")) or \
223 (not di and file_type == "udeb"):
224 new[pkg]["section id"] = -1
226 # If dsc we need to be source and vice-versa
227 if (priority == "source" and file_type != "dsc") or \
228 (priority != "source" and file_type == "dsc"):
229 new[pkg]["priority id"] = -1
231 ###############################################################################
233 # Used by Upload.check_timestamps
234 class TarTime(object):
235 def __init__(self, future_cutoff, past_cutoff):
237 self.future_cutoff = future_cutoff
238 self.past_cutoff = past_cutoff
241 self.future_files = {}
242 self.ancient_files = {}
244 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
245 if MTime > self.future_cutoff:
246 self.future_files[Name] = MTime
247 if MTime < self.past_cutoff:
248 self.ancient_files[Name] = MTime
250 ###############################################################################
252 def prod_maintainer(notes, upload):
255 # Here we prepare an editor and get them ready to prod...
256 (fd, temp_filename) = utils.temp_filename()
257 temp_file = os.fdopen(fd, 'w')
259 temp_file.write(note.comment)
261 editor = os.environ.get("EDITOR","vi")
264 os.system("%s %s" % (editor, temp_filename))
265 temp_fh = utils.open_file(temp_filename)
266 prod_message = "".join(temp_fh.readlines())
268 print "Prod message:"
269 print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
270 prompt = "[P]rod, Edit, Abandon, Quit ?"
272 while prompt.find(answer) == -1:
273 answer = utils.our_raw_input(prompt)
274 m = re_default_answer.search(prompt)
277 answer = answer[:1].upper()
278 os.unlink(temp_filename)
284 # Otherwise, do the proding...
285 user_email_address = utils.whoami() + " <%s>" % (
286 cnf["Dinstall::MyAdminAddress"])
290 Subst["__FROM_ADDRESS__"] = user_email_address
291 Subst["__PROD_MESSAGE__"] = prod_message
292 Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
294 prod_mail_message = utils.TemplateSubst(
295 Subst,cnf["Dir::Templates"]+"/process-new.prod")
298 utils.send_mail(prod_mail_message)
300 print "Sent prodding message"
302 ################################################################################
304 def edit_note(note, upload, session):
305 # Write the current data to a temporary file
306 (fd, temp_filename) = utils.temp_filename()
307 editor = os.environ.get("EDITOR","vi")
310 os.system("%s %s" % (editor, temp_filename))
311 temp_file = utils.open_file(temp_filename)
312 newnote = temp_file.read().rstrip()
315 print utils.prefix_multi_line_string(newnote," ")
316 prompt = "[D]one, Edit, Abandon, Quit ?"
318 while prompt.find(answer) == -1:
319 answer = utils.our_raw_input(prompt)
320 m = re_default_answer.search(prompt)
323 answer = answer[:1].upper()
324 os.unlink(temp_filename)
331 comment = NewComment()
332 comment.package = upload.pkg.changes["source"]
333 comment.version = upload.pkg.changes["version"]
334 comment.comment = newnote
335 comment.author = utils.whoami()
336 comment.trainee = bool(Options["Trainee"])
340 ###############################################################################
342 class Upload(object):
344 Everything that has to do with an upload processed.
352 ###########################################################################
355 """ Reset a number of internal variables."""
357 # Initialize the substitution template map
360 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
361 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
362 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
363 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
369 self.later_check_files = []
373 def package_info(self):
375 Format various messages from this Upload to send to the maintainer.
379 ('Reject Reasons', self.rejects),
380 ('Warnings', self.warnings),
381 ('Notes', self.notes),
385 for title, messages in msgs:
387 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
392 ###########################################################################
393 def update_subst(self):
394 """ Set up the per-package template substitution mappings """
398 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
399 if not self.pkg.changes.has_key("architecture") or not \
400 isinstance(self.pkg.changes["architecture"], dict):
401 self.pkg.changes["architecture"] = { "Unknown" : "" }
403 # and maintainer2047 may not exist.
404 if not self.pkg.changes.has_key("maintainer2047"):
405 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
407 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
408 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
409 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
411 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
412 if self.pkg.changes["architecture"].has_key("source") and \
413 self.pkg.changes["changedby822"] != "" and \
414 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
416 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
417 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
418 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
420 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
421 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
422 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
424 # Process policy doesn't set the fingerprint field and I don't want to make it
425 # do it for now as I don't want to have to deal with the case where we accepted
426 # the package into PU-NEW, but the fingerprint has gone away from the keyring in
427 # the meantime so the package will be remarked as rejectable. Urgh.
428 # TODO: Fix this properly
429 if self.pkg.changes.has_key('fingerprint'):
430 session = DBConn().session()
431 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
432 if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
433 if self.pkg.changes.has_key("sponsoremail"):
434 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
437 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
438 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
440 # Apply any global override of the Maintainer field
441 if cnf.get("Dinstall::OverrideMaintainer"):
442 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
443 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
445 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
446 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
447 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
448 self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
450 ###########################################################################
451 def load_changes(self, filename):
453 Load a changes file and setup a dictionary around it. Also checks for mandantory
456 @type filename: string
457 @param filename: Changes filename, full path.
460 @return: whether the changes file was valid or not. We may want to
461 reject even if this is True (see what gets put in self.rejects).
462 This is simply to prevent us even trying things later which will
463 fail because we couldn't properly parse the file.
466 self.pkg.changes_file = filename
468 # Parse the .changes field into a dictionary
470 self.pkg.changes.update(parse_changes(filename))
471 except CantOpenError:
472 self.rejects.append("%s: can't read file." % (filename))
474 except ParseChangesError, line:
475 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
477 except ChangesUnicodeError:
478 self.rejects.append("%s: changes file not proper utf-8" % (filename))
481 # Parse the Files field from the .changes into another dictionary
483 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
484 except ParseChangesError, line:
485 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
487 except UnknownFormatError, format:
488 self.rejects.append("%s: unknown format '%s'." % (filename, format))
491 # Check for mandatory fields
492 for i in ("distribution", "source", "binary", "architecture",
493 "version", "maintainer", "files", "changes", "description"):
494 if not self.pkg.changes.has_key(i):
495 # Avoid undefined errors later
496 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
499 # Strip a source version in brackets from the source field
500 if re_strip_srcver.search(self.pkg.changes["source"]):
501 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
503 # Ensure the source field is a valid package name.
504 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
505 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
507 # Split multi-value fields into a lower-level dictionary
508 for i in ("architecture", "distribution", "binary", "closes"):
509 o = self.pkg.changes.get(i, "")
511 del self.pkg.changes[i]
513 self.pkg.changes[i] = {}
516 self.pkg.changes[i][j] = 1
518 # Fix the Maintainer: field to be RFC822/2047 compatible
520 (self.pkg.changes["maintainer822"],
521 self.pkg.changes["maintainer2047"],
522 self.pkg.changes["maintainername"],
523 self.pkg.changes["maintaineremail"]) = \
524 fix_maintainer (self.pkg.changes["maintainer"])
525 except ParseMaintError, msg:
526 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
527 % (filename, self.pkg.changes["maintainer"], msg))
529 # ...likewise for the Changed-By: field if it exists.
531 (self.pkg.changes["changedby822"],
532 self.pkg.changes["changedby2047"],
533 self.pkg.changes["changedbyname"],
534 self.pkg.changes["changedbyemail"]) = \
535 fix_maintainer (self.pkg.changes.get("changed-by", ""))
536 except ParseMaintError, msg:
537 self.pkg.changes["changedby822"] = ""
538 self.pkg.changes["changedby2047"] = ""
539 self.pkg.changes["changedbyname"] = ""
540 self.pkg.changes["changedbyemail"] = ""
542 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
543 % (filename, self.pkg.changes["changed-by"], msg))
545 # Ensure all the values in Closes: are numbers
546 if self.pkg.changes.has_key("closes"):
547 for i in self.pkg.changes["closes"].keys():
548 if re_isanum.match (i) == None:
549 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
551 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
552 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
553 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
555 # Check the .changes is non-empty
556 if not self.pkg.files:
557 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
560 # Changes was syntactically valid even if we'll reject
563 ###########################################################################
565 def check_distributions(self):
566 "Check and map the Distribution field"
570 # Handle suite mappings
571 for m in Cnf.ValueList("SuiteMappings"):
574 if mtype == "map" or mtype == "silent-map":
575 (source, dest) = args[1:3]
576 if self.pkg.changes["distribution"].has_key(source):
577 del self.pkg.changes["distribution"][source]
578 self.pkg.changes["distribution"][dest] = 1
579 if mtype != "silent-map":
580 self.notes.append("Mapping %s to %s." % (source, dest))
581 if self.pkg.changes.has_key("distribution-version"):
582 if self.pkg.changes["distribution-version"].has_key(source):
583 self.pkg.changes["distribution-version"][source]=dest
584 elif mtype == "map-unreleased":
585 (source, dest) = args[1:3]
586 if self.pkg.changes["distribution"].has_key(source):
587 for arch in self.pkg.changes["architecture"].keys():
588 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
589 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
590 del self.pkg.changes["distribution"][source]
591 self.pkg.changes["distribution"][dest] = 1
593 elif mtype == "ignore":
595 if self.pkg.changes["distribution"].has_key(suite):
596 del self.pkg.changes["distribution"][suite]
597 self.warnings.append("Ignoring %s as a target suite." % (suite))
598 elif mtype == "reject":
600 if self.pkg.changes["distribution"].has_key(suite):
601 self.rejects.append("Uploads to %s are not accepted." % (suite))
602 elif mtype == "propup-version":
603 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
605 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
606 if self.pkg.changes["distribution"].has_key(args[1]):
607 self.pkg.changes.setdefault("distribution-version", {})
608 for suite in args[2:]:
609 self.pkg.changes["distribution-version"][suite] = suite
611 # Ensure there is (still) a target distribution
612 if len(self.pkg.changes["distribution"].keys()) < 1:
613 self.rejects.append("No valid distribution remaining.")
615 # Ensure target distributions exist
616 for suite in self.pkg.changes["distribution"].keys():
617 if not Cnf.has_key("Suite::%s" % (suite)):
618 self.rejects.append("Unknown distribution `%s'." % (suite))
620 ###########################################################################
622 def binary_file_checks(self, f, session):
624 entry = self.pkg.files[f]
626 # Extract package control information
627 deb_file = utils.open_file(f)
629 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
631 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
633 # Can't continue, none of the checks on control would work.
636 # Check for mandantory "Description:"
639 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
641 self.rejects.append("%s: Missing Description in binary package" % (f))
646 # Check for mandatory fields
647 for field in [ "Package", "Architecture", "Version" ]:
648 if control.Find(field) == None:
650 self.rejects.append("%s: No %s field in control." % (f, field))
653 # Ensure the package name matches the one give in the .changes
654 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
655 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
657 # Validate the package field
658 package = control.Find("Package")
659 if not re_valid_pkg_name.match(package):
660 self.rejects.append("%s: invalid package name '%s'." % (f, package))
662 # Validate the version field
663 version = control.Find("Version")
664 if not re_valid_version.match(version):
665 self.rejects.append("%s: invalid version number '%s'." % (f, version))
667 # Ensure the architecture of the .deb is one we know about.
668 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
669 architecture = control.Find("Architecture")
670 upload_suite = self.pkg.changes["distribution"].keys()[0]
672 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
673 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
674 self.rejects.append("Unknown architecture '%s'." % (architecture))
676 # Ensure the architecture of the .deb is one of the ones
677 # listed in the .changes.
678 if not self.pkg.changes["architecture"].has_key(architecture):
679 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
681 # Sanity-check the Depends field
682 depends = control.Find("Depends")
684 self.rejects.append("%s: Depends field is empty." % (f))
686 # Sanity-check the Provides field
687 provides = control.Find("Provides")
689 provide = re_spacestrip.sub('', provides)
691 self.rejects.append("%s: Provides field is empty." % (f))
692 prov_list = provide.split(",")
693 for prov in prov_list:
694 if not re_valid_pkg_name.match(prov):
695 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
697 # Check the section & priority match those given in the .changes (non-fatal)
698 if control.Find("Section") and entry["section"] != "" \
699 and entry["section"] != control.Find("Section"):
700 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
701 (f, control.Find("Section", ""), entry["section"]))
702 if control.Find("Priority") and entry["priority"] != "" \
703 and entry["priority"] != control.Find("Priority"):
704 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
705 (f, control.Find("Priority", ""), entry["priority"]))
707 entry["package"] = package
708 entry["architecture"] = architecture
709 entry["version"] = version
710 entry["maintainer"] = control.Find("Maintainer", "")
712 if f.endswith(".udeb"):
713 self.pkg.files[f]["dbtype"] = "udeb"
714 elif f.endswith(".deb"):
715 self.pkg.files[f]["dbtype"] = "deb"
717 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
719 entry["source"] = control.Find("Source", entry["package"])
721 # Get the source version
722 source = entry["source"]
725 if source.find("(") != -1:
726 m = re_extract_src_version.match(source)
728 source_version = m.group(2)
730 if not source_version:
731 source_version = self.pkg.files[f]["version"]
733 entry["source package"] = source
734 entry["source version"] = source_version
736 # Ensure the filename matches the contents of the .deb
737 m = re_isadeb.match(f)
740 file_package = m.group(1)
741 if entry["package"] != file_package:
742 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
743 (f, file_package, entry["dbtype"], entry["package"]))
744 epochless_version = re_no_epoch.sub('', control.Find("Version"))
747 file_version = m.group(2)
748 if epochless_version != file_version:
749 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
750 (f, file_version, entry["dbtype"], epochless_version))
753 file_architecture = m.group(3)
754 if entry["architecture"] != file_architecture:
755 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
756 (f, file_architecture, entry["dbtype"], entry["architecture"]))
758 # Check for existent source
759 source_version = entry["source version"]
760 source_package = entry["source package"]
761 if self.pkg.changes["architecture"].has_key("source"):
762 if source_version != self.pkg.changes["version"]:
763 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
764 (source_version, f, self.pkg.changes["version"]))
766 # Check in the SQL database
767 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
768 # Check in one of the other directories
769 source_epochless_version = re_no_epoch.sub('', source_version)
770 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
771 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
773 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
776 dsc_file_exists = False
777 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates", "Lenny-Volatile-Proposed-Updates"]:
778 if cnf.has_key("Dir::Queue::%s" % (myq)):
779 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
780 dsc_file_exists = True
783 if not dsc_file_exists:
784 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
786 # Check the version and for file overwrites
787 self.check_binary_against_db(f, session)
789 # Temporarily disable contents generation until we change the table storage layout
792 #if len(b.rejects) > 0:
793 # for j in b.rejects:
794 # self.rejects.append(j)
796 def source_file_checks(self, f, session):
797 entry = self.pkg.files[f]
799 m = re_issource.match(f)
803 entry["package"] = m.group(1)
804 entry["version"] = m.group(2)
805 entry["type"] = m.group(3)
807 # Ensure the source package name matches the Source filed in the .changes
808 if self.pkg.changes["source"] != entry["package"]:
809 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
811 # Ensure the source version matches the version in the .changes file
812 if re_is_orig_source.match(f):
813 changes_version = self.pkg.changes["chopversion2"]
815 changes_version = self.pkg.changes["chopversion"]
817 if changes_version != entry["version"]:
818 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
820 # Ensure the .changes lists source in the Architecture field
821 if not self.pkg.changes["architecture"].has_key("source"):
822 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
824 # Check the signature of a .dsc file
825 if entry["type"] == "dsc":
826 # check_signature returns either:
827 # (None, [list, of, rejects]) or (signature, [])
828 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
830 self.rejects.append(j)
832 entry["architecture"] = "source"
834 def per_suite_file_checks(self, f, suite, session):
836 entry = self.pkg.files[f]
839 if entry.has_key("byhand"):
842 # Check we have fields we need to do these checks
844 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
845 if not entry.has_key(m):
846 self.rejects.append("file '%s' does not have field %s set" % (f, m))
852 # Handle component mappings
853 for m in cnf.ValueList("ComponentMappings"):
854 (source, dest) = m.split()
855 if entry["component"] == source:
856 entry["original component"] = source
857 entry["component"] = dest
859 # Ensure the component is valid for the target suite
860 if cnf.has_key("Suite:%s::Components" % (suite)) and \
861 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
862 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
865 # Validate the component
866 if not get_component(entry["component"], session):
867 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
870 # See if the package is NEW
871 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
874 # Validate the priority
875 if entry["priority"].find('/') != -1:
876 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
878 # Determine the location
879 location = cnf["Dir::Pool"]
880 l = get_location(location, entry["component"], session=session)
882 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
883 entry["location id"] = -1
885 entry["location id"] = l.location_id
887 # Check the md5sum & size against existing files (if any)
888 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
890 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
891 entry["size"], entry["md5sum"], entry["location id"])
894 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
895 elif found is False and poolfile is not None:
896 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
899 entry["files id"] = None
901 entry["files id"] = poolfile.file_id
903 # Check for packages that have moved from one component to another
904 entry['suite'] = suite
905 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
907 entry["othercomponents"] = res.fetchone()[0]
909 def check_files(self, action=True):
910 file_keys = self.pkg.files.keys()
916 os.chdir(self.pkg.directory)
918 ret = holding.copy_to_holding(f)
920 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
924 # check we already know the changes file
925 # [NB: this check must be done post-suite mapping]
926 base_filename = os.path.basename(self.pkg.changes_file)
928 session = DBConn().session()
931 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
932 # if in the pool or in a queue other than unchecked, reject
933 if (dbc.in_queue is None) \
934 or (dbc.in_queue is not None
935 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
936 self.rejects.append("%s file already known to dak" % base_filename)
937 except NoResultFound, e:
944 for f, entry in self.pkg.files.items():
945 # Ensure the file does not already exist in one of the accepted directories
946 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
947 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
948 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
949 self.rejects.append("%s file already exists in the %s directory." % (f, d))
951 if not re_taint_free.match(f):
952 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
954 # Check the file is readable
955 if os.access(f, os.R_OK) == 0:
956 # When running in -n, copy_to_holding() won't have
957 # generated the reject_message, so we need to.
959 if os.path.exists(f):
960 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
962 # Don't directly reject, mark to check later to deal with orig's
963 # we can find in the pool
964 self.later_check_files.append(f)
965 entry["type"] = "unreadable"
968 # If it's byhand skip remaining checks
969 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
971 entry["type"] = "byhand"
973 # Checks for a binary package...
974 elif re_isadeb.match(f):
976 entry["type"] = "deb"
978 # This routine appends to self.rejects/warnings as appropriate
979 self.binary_file_checks(f, session)
981 # Checks for a source package...
982 elif re_issource.match(f):
985 # This routine appends to self.rejects/warnings as appropriate
986 self.source_file_checks(f, session)
988 # Not a binary or source package? Assume byhand...
991 entry["type"] = "byhand"
993 # Per-suite file checks
994 entry["oldfiles"] = {}
995 for suite in self.pkg.changes["distribution"].keys():
996 self.per_suite_file_checks(f, suite, session)
1000 # If the .changes file says it has source, it must have source.
1001 if self.pkg.changes["architecture"].has_key("source"):
1003 self.rejects.append("no source found and Architecture line in changes mention source.")
1005 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1006 self.rejects.append("source only uploads are not supported.")
1008 ###########################################################################
1009 def check_dsc(self, action=True, session=None):
1010 """Returns bool indicating whether or not the source changes are valid"""
1011 # Ensure there is source to check
1012 if not self.pkg.changes["architecture"].has_key("source"):
1017 for f, entry in self.pkg.files.items():
1018 if entry["type"] == "dsc":
1020 self.rejects.append("can not process a .changes file with multiple .dsc's.")
1025 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1026 if not dsc_filename:
1027 self.rejects.append("source uploads must contain a dsc file")
1030 # Parse the .dsc file
1032 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1033 except CantOpenError:
1034 # if not -n copy_to_holding() will have done this for us...
1036 self.rejects.append("%s: can't read file." % (dsc_filename))
1037 except ParseChangesError, line:
1038 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1039 except InvalidDscError, line:
1040 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1041 except ChangesUnicodeError:
1042 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1044 # Build up the file list of files mentioned by the .dsc
1046 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1047 except NoFilesFieldError:
1048 self.rejects.append("%s: no Files: field." % (dsc_filename))
1050 except UnknownFormatError, format:
1051 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1053 except ParseChangesError, line:
1054 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1057 # Enforce mandatory fields
1058 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1059 if not self.pkg.dsc.has_key(i):
1060 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1063 # Validate the source and version fields
1064 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1065 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1066 if not re_valid_version.match(self.pkg.dsc["version"]):
1067 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1069 # Only a limited list of source formats are allowed in each suite
1070 for dist in self.pkg.changes["distribution"].keys():
1071 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1072 if self.pkg.dsc["format"] not in allowed:
1073 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1075 # Validate the Maintainer field
1077 # We ignore the return value
1078 fix_maintainer(self.pkg.dsc["maintainer"])
1079 except ParseMaintError, msg:
1080 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1081 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1083 # Validate the build-depends field(s)
1084 for field_name in [ "build-depends", "build-depends-indep" ]:
1085 field = self.pkg.dsc.get(field_name)
1087 # Have apt try to parse them...
1089 apt_pkg.ParseSrcDepends(field)
1091 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1093 # Ensure the version number in the .dsc matches the version number in the .changes
1094 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1095 changes_version = self.pkg.files[dsc_filename]["version"]
1097 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1098 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1100 # Ensure the Files field contain only what's expected
1101 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1103 # Ensure source is newer than existing source in target suites
1104 session = DBConn().session()
1105 self.check_source_against_db(dsc_filename, session)
1106 self.check_dsc_against_db(dsc_filename, session)
1109 # Finally, check if we're missing any files
1110 for f in self.later_check_files:
1111 self.rejects.append("Could not find file %s references in changes" % f)
1115 ###########################################################################
1117 def get_changelog_versions(self, source_dir):
1118 """Extracts a the source package and (optionally) grabs the
1119 version history out of debian/changelog for the BTS."""
1123 # Find the .dsc (again)
1125 for f in self.pkg.files.keys():
1126 if self.pkg.files[f]["type"] == "dsc":
1129 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1130 if not dsc_filename:
1133 # Create a symlink mirror of the source files in our temporary directory
1134 for f in self.pkg.files.keys():
1135 m = re_issource.match(f)
1137 src = os.path.join(source_dir, f)
1138 # If a file is missing for whatever reason, give up.
1139 if not os.path.exists(src):
1142 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1143 self.pkg.orig_files[f].has_key("path"):
1145 dest = os.path.join(os.getcwd(), f)
1146 os.symlink(src, dest)
1148 # If the orig files are not a part of the upload, create symlinks to the
1150 for orig_file in self.pkg.orig_files.keys():
1151 if not self.pkg.orig_files[orig_file].has_key("path"):
1153 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1154 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1156 # Extract the source
1157 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1158 (result, output) = commands.getstatusoutput(cmd)
1160 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1161 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1164 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1167 # Get the upstream version
1168 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1169 if re_strip_revision.search(upstr_version):
1170 upstr_version = re_strip_revision.sub('', upstr_version)
1172 # Ensure the changelog file exists
1173 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1174 if not os.path.exists(changelog_filename):
1175 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1178 # Parse the changelog
1179 self.pkg.dsc["bts changelog"] = ""
1180 changelog_file = utils.open_file(changelog_filename)
1181 for line in changelog_file.readlines():
1182 m = re_changelog_versions.match(line)
1184 self.pkg.dsc["bts changelog"] += line
1185 changelog_file.close()
1187 # Check we found at least one revision in the changelog
1188 if not self.pkg.dsc["bts changelog"]:
1189 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1191 def check_source(self):
1193 # a) there's no source
1194 if not self.pkg.changes["architecture"].has_key("source"):
1197 tmpdir = utils.temp_dirname()
1199 # Move into the temporary directory
1203 # Get the changelog version history
1204 self.get_changelog_versions(cwd)
1206 # Move back and cleanup the temporary tree
1210 shutil.rmtree(tmpdir)
1212 if e.errno != errno.EACCES:
1214 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1216 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1217 # We probably have u-r or u-w directories so chmod everything
1219 cmd = "chmod -R u+rwx %s" % (tmpdir)
1220 result = os.system(cmd)
1222 utils.fubar("'%s' failed with result %s." % (cmd, result))
1223 shutil.rmtree(tmpdir)
1224 except Exception, e:
1225 print "foobar2 (%s)" % e
1226 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1228 ###########################################################################
1229 def ensure_hashes(self):
1230 # Make sure we recognise the format of the Files: field in the .changes
1231 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1232 if len(format) == 2:
1233 format = int(format[0]), int(format[1])
1235 format = int(float(format[0])), 0
1237 # We need to deal with the original changes blob, as the fields we need
1238 # might not be in the changes dict serialised into the .dak anymore.
1239 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1241 # Copy the checksums over to the current changes dict. This will keep
1242 # the existing modifications to it intact.
1243 for field in orig_changes:
1244 if field.startswith('checksums-'):
1245 self.pkg.changes[field] = orig_changes[field]
1247 # Check for unsupported hashes
1248 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1249 self.rejects.append(j)
1251 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1252 self.rejects.append(j)
1254 # We have to calculate the hash if we have an earlier changes version than
1255 # the hash appears in rather than require it exist in the changes file
1256 for hashname, hashfunc, version in utils.known_hashes:
1257 # TODO: Move _ensure_changes_hash into this class
1258 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1259 self.rejects.append(j)
1260 if "source" in self.pkg.changes["architecture"]:
1261 # TODO: Move _ensure_dsc_hash into this class
1262 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1263 self.rejects.append(j)
1265 def check_hashes(self):
1266 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1267 self.rejects.append(m)
1269 for m in utils.check_size(".changes", self.pkg.files):
1270 self.rejects.append(m)
1272 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1273 self.rejects.append(m)
1275 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1276 self.rejects.append(m)
1278 self.ensure_hashes()
1280 ###########################################################################
1282 def ensure_orig(self, target_dir='.', session=None):
1284 Ensures that all orig files mentioned in the changes file are present
1285 in target_dir. If they do not exist, they are symlinked into place.
1287 An list containing the symlinks that were created are returned (so they
1294 for filename, entry in self.pkg.dsc_files.iteritems():
1295 if not re_is_orig_source.match(filename):
1296 # File is not an orig; ignore
1299 if os.path.exists(filename):
1300 # File exists, no need to continue
1303 def symlink_if_valid(path):
1304 f = utils.open_file(path)
1305 md5sum = apt_pkg.md5sum(f)
1308 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1309 expected = (int(entry['size']), entry['md5sum'])
1311 if fingerprint != expected:
1314 dest = os.path.join(target_dir, filename)
1316 os.symlink(path, dest)
1317 symlinked.append(dest)
1323 session_ = DBConn().session()
1328 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1329 poolfile_path = os.path.join(
1330 poolfile.location.path, poolfile.filename
1333 if symlink_if_valid(poolfile_path):
1343 # Look in some other queues for the file
1344 queues = ('New', 'Byhand', 'ProposedUpdates',
1345 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1347 for queue in queues:
1348 if not cnf.get('Dir::Queue::%s' % queue):
1351 queuefile_path = os.path.join(
1352 cnf['Dir::Queue::%s' % queue], filename
1355 if not os.path.exists(queuefile_path):
1356 # Does not exist in this queue
1359 if symlink_if_valid(queuefile_path):
1364 ###########################################################################
1366 def check_lintian(self):
1368 Extends self.rejects by checking the output of lintian against tags
1369 specified in Dinstall::LintianTags.
1374 # Don't reject binary uploads
1375 if not self.pkg.changes['architecture'].has_key('source'):
1378 # Only check some distributions
1379 for dist in ('unstable', 'experimental'):
1380 if dist in self.pkg.changes['distribution']:
1385 # If we do not have a tagfile, don't do anything
1386 tagfile = cnf.get("Dinstall::LintianTags")
1390 # Parse the yaml file
1391 sourcefile = file(tagfile, 'r')
1392 sourcecontent = sourcefile.read()
1396 lintiantags = yaml.load(sourcecontent)['lintian']
1397 except yaml.YAMLError, msg:
1398 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1401 # Try and find all orig mentioned in the .dsc
1402 symlinked = self.ensure_orig()
1404 # Setup the input file for lintian
1405 fd, temp_filename = utils.temp_filename()
1406 temptagfile = os.fdopen(fd, 'w')
1407 for tags in lintiantags.values():
1408 temptagfile.writelines(['%s\n' % x for x in tags])
1412 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1413 (temp_filename, self.pkg.changes_file)
1415 result, output = commands.getstatusoutput(cmd)
1417 # Remove our tempfile and any symlinks we created
1418 os.unlink(temp_filename)
1420 for symlink in symlinked:
1424 utils.warn("lintian failed for %s [return code: %s]." % \
1425 (self.pkg.changes_file, result))
1426 utils.warn(utils.prefix_multi_line_string(output, \
1427 " [possible output:] "))
1432 [self.pkg.changes_file, "check_lintian"] + list(txt)
1436 parsed_tags = parse_lintian_output(output)
1437 self.rejects.extend(
1438 generate_reject_messages(parsed_tags, lintiantags, log=log)
1441 ###########################################################################
1442 def check_urgency(self):
1444 if self.pkg.changes["architecture"].has_key("source"):
1445 if not self.pkg.changes.has_key("urgency"):
1446 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1447 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1448 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1449 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1450 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1451 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1453 ###########################################################################
1455 # Sanity check the time stamps of files inside debs.
1456 # [Files in the near future cause ugly warnings and extreme time
1457 # travel can cause errors on extraction]
1459 def check_timestamps(self):
1462 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1463 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1464 tar = TarTime(future_cutoff, past_cutoff)
1466 for filename, entry in self.pkg.files.items():
1467 if entry["type"] == "deb":
1470 deb_file = utils.open_file(filename)
1471 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1474 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1475 except SystemError, e:
1476 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1477 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1480 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1484 future_files = tar.future_files.keys()
1486 num_future_files = len(future_files)
1487 future_file = future_files[0]
1488 future_date = tar.future_files[future_file]
1489 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1490 % (filename, num_future_files, future_file, time.ctime(future_date)))
1492 ancient_files = tar.ancient_files.keys()
1494 num_ancient_files = len(ancient_files)
1495 ancient_file = ancient_files[0]
1496 ancient_date = tar.ancient_files[ancient_file]
1497 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1498 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1500 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1502 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1503 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1505 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1511 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1512 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1513 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1514 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1515 self.pkg.changes["sponsoremail"] = uid_email
1520 ###########################################################################
1521 # check_signed_by_key checks
1522 ###########################################################################
1524 def check_signed_by_key(self):
1525 """Ensure the .changes is signed by an authorized uploader."""
1526 session = DBConn().session()
1528 # First of all we check that the person has proper upload permissions
1529 # and that this upload isn't blocked
1530 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1533 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1536 # TODO: Check that import-keyring adds UIDs properly
1538 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1541 # Check that the fingerprint which uploaded has permission to do so
1542 self.check_upload_permissions(fpr, session)
1544 # Check that this package is not in a transition
1545 self.check_transition(session)
1550 def check_upload_permissions(self, fpr, session):
1551 # Check any one-off upload blocks
1552 self.check_upload_blocks(fpr, session)
1554 # Start with DM as a special case
1555 # DM is a special case unfortunately, so we check it first
1556 # (keys with no source access get more access than DMs in one
1557 # way; DMs can only upload for their packages whether source
1558 # or binary, whereas keys with no access might be able to
1559 # upload some binaries)
1560 if fpr.source_acl.access_level == 'dm':
1561 self.check_dm_upload(fpr, session)
1563 # Check source-based permissions for other types
1564 if self.pkg.changes["architecture"].has_key("source") and \
1565 fpr.source_acl.access_level is None:
1566 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1567 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1568 self.rejects.append(rej)
1570 # If not a DM, we allow full upload rights
1571 uid_email = "%s@debian.org" % (fpr.uid.uid)
1572 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1575 # Check binary upload permissions
1576 # By this point we know that DMs can't have got here unless they
1577 # are allowed to deal with the package concerned so just apply
1579 if fpr.binary_acl.access_level == 'full':
1582 # Otherwise we're in the map case
1583 tmparches = self.pkg.changes["architecture"].copy()
1584 tmparches.pop('source', None)
1586 for bam in fpr.binary_acl_map:
1587 tmparches.pop(bam.architecture.arch_string, None)
1589 if len(tmparches.keys()) > 0:
1590 if fpr.binary_reject:
1591 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1592 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1593 self.rejects.append(rej)
1595 # TODO: This is where we'll implement reject vs throw away binaries later
1596 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1597 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1598 rej += "\nFingerprint: %s", (fpr.fingerprint)
1599 self.rejects.append(rej)
1602 def check_upload_blocks(self, fpr, session):
1603 """Check whether any upload blocks apply to this source, source
1604 version, uid / fpr combination"""
1606 def block_rej_template(fb):
1607 rej = 'Manual upload block in place for package %s' % fb.source
1608 if fb.version is not None:
1609 rej += ', version %s' % fb.version
1612 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1613 # version is None if the block applies to all versions
1614 if fb.version is None or fb.version == self.pkg.changes['version']:
1615 # Check both fpr and uid - either is enough to cause a reject
1616 if fb.fpr is not None:
1617 if fb.fpr.fingerprint == fpr.fingerprint:
1618 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1619 if fb.uid is not None:
1620 if fb.uid == fpr.uid:
1621 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1624 def check_dm_upload(self, fpr, session):
1625 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1626 ## none of the uploaded packages are NEW
1628 for f in self.pkg.files.keys():
1629 if self.pkg.files[f].has_key("byhand"):
1630 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1632 if self.pkg.files[f].has_key("new"):
1633 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1639 ## the most recent version of the package uploaded to unstable or
1640 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1641 ## section of its control file
1642 q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1643 q = q.join(SrcAssociation)
1644 q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1645 q = q.order_by(desc('source.version')).limit(1)
1650 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1651 self.rejects.append(rej)
1655 if not r.dm_upload_allowed:
1656 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1657 self.rejects.append(rej)
1660 ## the Maintainer: field of the uploaded .changes file corresponds with
1661 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1663 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1664 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1666 ## the most recent version of the package uploaded to unstable or
1667 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1668 ## non-developer maintainers cannot NMU or hijack packages)
1670 # srcuploaders includes the maintainer
1672 for sup in r.srcuploaders:
1673 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1674 # Eww - I hope we never have two people with the same name in Debian
1675 if email == fpr.uid.uid or name == fpr.uid.name:
1680 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1683 ## none of the packages are being taken over from other source packages
1684 for b in self.pkg.changes["binary"].keys():
1685 for suite in self.pkg.changes["distribution"].keys():
1686 q = session.query(DBSource)
1687 q = q.join(DBBinary).filter_by(package=b)
1688 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1691 if s.source != self.pkg.changes["source"]:
1692 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1696 def check_transition(self, session):
1699 sourcepkg = self.pkg.changes["source"]
1701 # No sourceful upload -> no need to do anything else, direct return
1702 # We also work with unstable uploads, not experimental or those going to some
1703 # proposed-updates queue
1704 if "source" not in self.pkg.changes["architecture"] or \
1705 "unstable" not in self.pkg.changes["distribution"]:
1708 # Also only check if there is a file defined (and existant) with
1710 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1711 if transpath == "" or not os.path.exists(transpath):
1714 # Parse the yaml file
1715 sourcefile = file(transpath, 'r')
1716 sourcecontent = sourcefile.read()
1718 transitions = yaml.load(sourcecontent)
1719 except yaml.YAMLError, msg:
1720 # This shouldn't happen, there is a wrapper to edit the file which
1721 # checks it, but we prefer to be safe than ending up rejecting
1723 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1726 # Now look through all defined transitions
1727 for trans in transitions:
1728 t = transitions[trans]
1729 source = t["source"]
1732 # Will be None if nothing is in testing.
1733 current = get_source_in_suite(source, "testing", session)
1734 if current is not None:
1735 compare = apt_pkg.VersionCompare(current.version, expected)
1737 if current is None or compare < 0:
1738 # This is still valid, the current version in testing is older than
1739 # the new version we wait for, or there is none in testing yet
1741 # Check if the source we look at is affected by this.
1742 if sourcepkg in t['packages']:
1743 # The source is affected, lets reject it.
1745 rejectmsg = "%s: part of the %s transition.\n\n" % (
1748 if current is not None:
1749 currentlymsg = "at version %s" % (current.version)
1751 currentlymsg = "not present in testing"
1753 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1755 rejectmsg += "\n".join(textwrap.wrap("""Your package
1756 is part of a testing transition designed to get %s migrated (it is
1757 currently %s, we need version %s). This transition is managed by the
1758 Release Team, and %s is the Release-Team member responsible for it.
1759 Please mail debian-release@lists.debian.org or contact %s directly if you
1760 need further assistance. You might want to upload to experimental until this
1761 transition is done."""
1762 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1764 self.rejects.append(rejectmsg)
1767 ###########################################################################
1768 # End check_signed_by_key checks
1769 ###########################################################################
1771 def build_summaries(self):
1772 """ Build a summary of changes the upload introduces. """
1774 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1776 short_summary = summary
1778 # This is for direport's benefit...
1779 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1782 summary += "Changes: " + f
1784 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1786 summary += self.announce(short_summary, 0)
1788 return (summary, short_summary)
1790 ###########################################################################
1792 def close_bugs(self, summary, action):
1794 Send mail to close bugs as instructed by the closes field in the changes file.
1795 Also add a line to summary if any work was done.
1797 @type summary: string
1798 @param summary: summary text, as given by L{build_summaries}
1801 @param action: Set to false no real action will be done.
1804 @return: summary. If action was taken, extended by the list of closed bugs.
1808 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1810 bugs = self.pkg.changes["closes"].keys()
1816 summary += "Closing bugs: "
1818 summary += "%s " % (bug)
1821 self.Subst["__BUG_NUMBER__"] = bug
1822 if self.pkg.changes["distribution"].has_key("stable"):
1823 self.Subst["__STABLE_WARNING__"] = """
1824 Note that this package is not part of the released stable Debian
1825 distribution. It may have dependencies on other unreleased software,
1826 or other instabilities. Please take care if you wish to install it.
1827 The update will eventually make its way into the next released Debian
1830 self.Subst["__STABLE_WARNING__"] = ""
1831 mail_message = utils.TemplateSubst(self.Subst, template)
1832 utils.send_mail(mail_message)
1834 # Clear up after ourselves
1835 del self.Subst["__BUG_NUMBER__"]
1836 del self.Subst["__STABLE_WARNING__"]
1838 if action and self.logger:
1839 self.logger.log(["closing bugs"] + bugs)
1845 ###########################################################################
1847 def announce(self, short_summary, action):
1849 Send an announce mail about a new upload.
1851 @type short_summary: string
1852 @param short_summary: Short summary text to include in the mail
1855 @param action: Set to false no real action will be done.
1858 @return: Textstring about action taken.
1863 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1865 # Only do announcements for source uploads with a recent dpkg-dev installed
1866 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1867 self.pkg.changes["architecture"].has_key("source"):
1873 self.Subst["__SHORT_SUMMARY__"] = short_summary
1875 for dist in self.pkg.changes["distribution"].keys():
1876 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1877 if announce_list == "" or lists_done.has_key(announce_list):
1880 lists_done[announce_list] = 1
1881 summary += "Announcing to %s\n" % (announce_list)
1885 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1886 if cnf.get("Dinstall::TrackingServer") and \
1887 self.pkg.changes["architecture"].has_key("source"):
1888 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1889 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1891 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1892 utils.send_mail(mail_message)
1894 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1896 if cnf.FindB("Dinstall::CloseBugs"):
1897 summary = self.close_bugs(summary, action)
1899 del self.Subst["__SHORT_SUMMARY__"]
1903 ###########################################################################
1905 def accept (self, summary, short_summary, session=None):
1909 This moves all files referenced from the .changes into the pool,
1910 sends the accepted mail, announces to lists, closes bugs and
1911 also checks for override disparities. If enabled it will write out
1912 the version history for the BTS Version Tracking and will finally call
1915 @type summary: string
1916 @param summary: Summary text
1918 @type short_summary: string
1919 @param short_summary: Short summary
1923 stats = SummaryStats()
1926 self.logger.log(["installing changes", self.pkg.changes_file])
1930 # Add the .dsc file to the DB first
1931 for newfile, entry in self.pkg.files.items():
1932 if entry["type"] == "dsc":
1933 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1937 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1938 for newfile, entry in self.pkg.files.items():
1939 if entry["type"] == "deb":
1940 poolfiles.append(add_deb_to_db(self, newfile, session))
1942 # If this is a sourceful diff only upload that is moving
1943 # cross-component we need to copy the .orig files into the new
1944 # component too for the same reasons as above.
1945 # XXX: mhy: I think this should be in add_dsc_to_db
1946 if self.pkg.changes["architecture"].has_key("source"):
1947 for orig_file in self.pkg.orig_files.keys():
1948 if not self.pkg.orig_files[orig_file].has_key("id"):
1949 continue # Skip if it's not in the pool
1950 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1951 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1952 continue # Skip if the location didn't change
1955 oldf = get_poolfile_by_id(orig_file_id, session)
1956 old_filename = os.path.join(oldf.location.path, oldf.filename)
1957 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
1958 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1960 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1962 # TODO: Care about size/md5sum collisions etc
1963 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
1965 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
1967 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1968 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1972 # Don't reference the old file from this changes
1974 if p.file_id == oldf.file_id:
1977 poolfiles.append(newf)
1979 # Fix up the DSC references
1982 for df in source.srcfiles:
1983 if df.poolfile.file_id == oldf.file_id:
1984 # Add a new DSC entry and mark the old one for deletion
1985 # Don't do it in the loop so we don't change the thing we're iterating over
1987 newdscf.source_id = source.source_id
1988 newdscf.poolfile_id = newf.file_id
1989 session.add(newdscf)
1999 # Make sure that our source object is up-to-date
2000 session.expire(source)
2002 # Add changelog information to the database
2003 self.store_changelog()
2005 # Install the files into the pool
2006 for newfile, entry in self.pkg.files.items():
2007 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2008 utils.move(newfile, destination)
2009 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2010 stats.accept_bytes += float(entry["size"])
2012 # Copy the .changes file across for suite which need it.
2014 for suite_name in self.pkg.changes["distribution"].keys():
2015 if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
2016 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
2018 for dest in copy_changes.keys():
2019 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2021 # We're done - commit the database changes
2023 # Our SQL session will automatically start a new transaction after
2026 # Move the .changes into the 'done' directory
2027 utils.move(self.pkg.changes_file,
2028 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2030 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2031 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2034 self.Subst["__SUMMARY__"] = summary
2035 mail_message = utils.TemplateSubst(self.Subst,
2036 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2037 utils.send_mail(mail_message)
2038 self.announce(short_summary, 1)
2040 ## Helper stuff for DebBugs Version Tracking
2041 if cnf.Find("Dir::Queue::BTSVersionTrack"):
2042 if self.pkg.changes["architecture"].has_key("source"):
2043 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2044 version_history = os.fdopen(fd, 'w')
2045 version_history.write(self.pkg.dsc["bts changelog"])
2046 version_history.close()
2047 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2048 self.pkg.changes_file[:-8]+".versions")
2049 os.rename(temp_filename, filename)
2050 os.chmod(filename, 0644)
2052 # Write out the binary -> source mapping.
2053 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2054 debinfo = os.fdopen(fd, 'w')
2055 for name, entry in sorted(self.pkg.files.items()):
2056 if entry["type"] == "deb":
2057 line = " ".join([entry["package"], entry["version"],
2058 entry["architecture"], entry["source package"],
2059 entry["source version"]])
2060 debinfo.write(line+"\n")
2062 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2063 self.pkg.changes_file[:-8]+".debinfo")
2064 os.rename(temp_filename, filename)
2065 os.chmod(filename, 0644)
2069 # Set up our copy queues (e.g. buildd queues)
2070 for suite_name in self.pkg.changes["distribution"].keys():
2071 suite = get_suite(suite_name, session)
2072 for q in suite.copy_queues:
2074 q.add_file_from_pool(f)
2079 stats.accept_count += 1
2081 def check_override(self):
2083 Checks override entries for validity. Mails "Override disparity" warnings,
2084 if that feature is enabled.
2086 Abandons the check if
2087 - override disparity checks are disabled
2088 - mail sending is disabled
2093 # Abandon the check if override disparity checks have been disabled
2094 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2097 summary = self.pkg.check_override()
2102 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2105 self.Subst["__SUMMARY__"] = summary
2106 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2107 utils.send_mail(mail_message)
2108 del self.Subst["__SUMMARY__"]
2110 ###########################################################################
2112 def remove(self, from_dir=None):
2114 Used (for instance) in p-u to remove the package from unchecked
2116 Also removes the package from holding area.
2118 if from_dir is None:
2119 from_dir = self.pkg.directory
2122 for f in self.pkg.files.keys():
2123 os.unlink(os.path.join(from_dir, f))
2124 if os.path.exists(os.path.join(h.holding_dir, f)):
2125 os.unlink(os.path.join(h.holding_dir, f))
2127 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2128 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2129 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2131 ###########################################################################
2133 def move_to_queue (self, queue):
2135 Move files to a destination queue using the permissions in the table
2138 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2139 queue.path, perms=int(queue.change_perms, 8))
2140 for f in self.pkg.files.keys():
2141 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2143 ###########################################################################
2145 def force_reject(self, reject_files):
2147 Forcefully move files from the current directory to the
2148 reject directory. If any file already exists in the reject
2149 directory it will be moved to the morgue to make way for
2152 @type reject_files: dict
2153 @param reject_files: file dictionary
2159 for file_entry in reject_files:
2160 # Skip any files which don't exist or which we don't have permission to copy.
2161 if os.access(file_entry, os.R_OK) == 0:
2164 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2167 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2169 # File exists? Let's find a new name by adding a number
2170 if e.errno == errno.EEXIST:
2172 dest_file = utils.find_next_free(dest_file, 255)
2173 except NoFreeFilenameError:
2174 # Something's either gone badly Pete Tong, or
2175 # someone is trying to exploit us.
2176 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2179 # Make sure we really got it
2181 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2184 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2188 # If we got here, we own the destination file, so we can
2189 # safely overwrite it.
2190 utils.move(file_entry, dest_file, 1, perms=0660)
2193 ###########################################################################
2194 def do_reject (self, manual=0, reject_message="", notes=""):
2196 Reject an upload. If called without a reject message or C{manual} is
2197 true, spawn an editor so the user can write one.
2200 @param manual: manual or automated rejection
2202 @type reject_message: string
2203 @param reject_message: A reject message
2208 # If we weren't given a manual rejection message, spawn an
2209 # editor so the user can add one in...
2210 if manual and not reject_message:
2211 (fd, temp_filename) = utils.temp_filename()
2212 temp_file = os.fdopen(fd, 'w')
2215 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2216 % (note.author, note.version, note.notedate, note.comment))
2218 editor = os.environ.get("EDITOR","vi")
2220 while answer == 'E':
2221 os.system("%s %s" % (editor, temp_filename))
2222 temp_fh = utils.open_file(temp_filename)
2223 reject_message = "".join(temp_fh.readlines())
2225 print "Reject message:"
2226 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2227 prompt = "[R]eject, Edit, Abandon, Quit ?"
2229 while prompt.find(answer) == -1:
2230 answer = utils.our_raw_input(prompt)
2231 m = re_default_answer.search(prompt)
2234 answer = answer[:1].upper()
2235 os.unlink(temp_filename)
2241 print "Rejecting.\n"
2245 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2246 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2248 # Move all the files into the reject directory
2249 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2250 self.force_reject(reject_files)
2252 # If we fail here someone is probably trying to exploit the race
2253 # so let's just raise an exception ...
2254 if os.path.exists(reason_filename):
2255 os.unlink(reason_filename)
2256 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2258 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2262 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2263 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2264 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2265 os.write(reason_fd, reject_message)
2266 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2268 # Build up the rejection email
2269 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2270 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2271 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2272 self.Subst["__REJECT_MESSAGE__"] = ""
2273 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2274 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2275 # Write the rejection email out as the <foo>.reason file
2276 os.write(reason_fd, reject_mail_message)
2278 del self.Subst["__REJECTOR_ADDRESS__"]
2279 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2280 del self.Subst["__CC__"]
2284 # Send the rejection mail
2285 utils.send_mail(reject_mail_message)
2288 self.logger.log(["rejected", self.pkg.changes_file])
2292 ################################################################################
2293 def in_override_p(self, package, component, suite, binary_type, filename, session):
2295 Check if a package already has override entries in the DB
2297 @type package: string
2298 @param package: package name
2300 @type component: string
2301 @param component: database id of the component
2304 @param suite: database id of the suite
2306 @type binary_type: string
2307 @param binary_type: type of the package
2309 @type filename: string
2310 @param filename: filename we check
2312 @return: the database result. But noone cares anyway.
2318 if binary_type == "": # must be source
2321 file_type = binary_type
2323 # Override suite name; used for example with proposed-updates
2324 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2325 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2327 result = get_override(package, suite, component, file_type, session)
2329 # If checking for a source package fall back on the binary override type
2330 if file_type == "dsc" and len(result) < 1:
2331 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2333 # Remember the section and priority so we can check them later if appropriate
2336 self.pkg.files[filename]["override section"] = result.section.section
2337 self.pkg.files[filename]["override priority"] = result.priority.priority
2342 ################################################################################
2343 def get_anyversion(self, sv_list, suite):
2346 @param sv_list: list of (suite, version) tuples to check
2349 @param suite: suite name
2355 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2356 for (s, v) in sv_list:
2357 if s in [ x.lower() for x in anysuite ]:
2358 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2363 ################################################################################
2365 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2368 @param sv_list: list of (suite, version) tuples to check
2370 @type filename: string
2371 @param filename: XXX
2373 @type new_version: string
2374 @param new_version: XXX
2376 Ensure versions are newer than existing packages in target
2377 suites and that cross-suite version checking rules as
2378 set out in the conf file are satisfied.
2383 # Check versions for each target suite
2384 for target_suite in self.pkg.changes["distribution"].keys():
2385 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2386 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2388 # Enforce "must be newer than target suite" even if conffile omits it
2389 if target_suite not in must_be_newer_than:
2390 must_be_newer_than.append(target_suite)
2392 for (suite, existent_version) in sv_list:
2393 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2395 if suite in must_be_newer_than and sourceful and vercmp < 1:
2396 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2398 if suite in must_be_older_than and vercmp > -1:
2401 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2402 # we really use the other suite, ignoring the conflicting one ...
2403 addsuite = self.pkg.changes["distribution-version"][suite]
2405 add_version = self.get_anyversion(sv_list, addsuite)
2406 target_version = self.get_anyversion(sv_list, target_suite)
2409 # not add_version can only happen if we map to a suite
2410 # that doesn't enhance the suite we're propup'ing from.
2411 # so "propup-ver x a b c; map a d" is a problem only if
2412 # d doesn't enhance a.
2414 # i think we could always propagate in this case, rather
2415 # than complaining. either way, this isn't a REJECT issue
2417 # And - we really should complain to the dorks who configured dak
2418 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2419 self.pkg.changes.setdefault("propdistribution", {})
2420 self.pkg.changes["propdistribution"][addsuite] = 1
2422 elif not target_version:
2423 # not targets_version is true when the package is NEW
2424 # we could just stick with the "...old version..." REJECT
2425 # for this, I think.
2426 self.rejects.append("Won't propogate NEW packages.")
2427 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2428 # propogation would be redundant. no need to reject though.
2429 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2431 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2432 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2434 self.warnings.append("Propogating upload to %s" % (addsuite))
2435 self.pkg.changes.setdefault("propdistribution", {})
2436 self.pkg.changes["propdistribution"][addsuite] = 1
2440 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2442 ################################################################################
2443 def check_binary_against_db(self, filename, session):
2444 # Ensure version is sane
2445 q = session.query(BinAssociation)
2446 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2447 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2449 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2450 filename, self.pkg.files[filename]["version"], sourceful=False)
2452 # Check for any existing copies of the file
2453 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2454 q = q.filter_by(version=self.pkg.files[filename]["version"])
2455 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2458 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2460 ################################################################################
2462 def check_source_against_db(self, filename, session):
2463 source = self.pkg.dsc.get("source")
2464 version = self.pkg.dsc.get("version")
2466 # Ensure version is sane
2467 q = session.query(SrcAssociation)
2468 q = q.join(DBSource).filter(DBSource.source==source)
2470 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2471 filename, version, sourceful=True)
2473 ################################################################################
2474 def check_dsc_against_db(self, filename, session):
2477 @warning: NB: this function can remove entries from the 'files' index [if
2478 the orig tarball is a duplicate of the one in the archive]; if
2479 you're iterating over 'files' and call this function as part of
2480 the loop, be sure to add a check to the top of the loop to
2481 ensure you haven't just tried to dereference the deleted entry.
2486 self.pkg.orig_files = {} # XXX: do we need to clear it?
2487 orig_files = self.pkg.orig_files
2489 # Try and find all files mentioned in the .dsc. This has
2490 # to work harder to cope with the multiple possible
2491 # locations of an .orig.tar.gz.
2492 # The ordering on the select is needed to pick the newest orig
2493 # when it exists in multiple places.
2494 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2496 if self.pkg.files.has_key(dsc_name):
2497 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2498 actual_size = int(self.pkg.files[dsc_name]["size"])
2499 found = "%s in incoming" % (dsc_name)
2501 # Check the file does not already exist in the archive
2502 ql = get_poolfile_like_name(dsc_name, session)
2504 # Strip out anything that isn't '%s' or '/%s$'
2506 if not i.filename.endswith(dsc_name):
2509 # "[dak] has not broken them. [dak] has fixed a
2510 # brokenness. Your crappy hack exploited a bug in
2513 # "(Come on! I thought it was always obvious that
2514 # one just doesn't release different files with
2515 # the same name and version.)"
2516 # -- ajk@ on d-devel@l.d.o
2519 # Ignore exact matches for .orig.tar.gz
2521 if re_is_orig_source.match(dsc_name):
2523 if self.pkg.files.has_key(dsc_name) and \
2524 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2525 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2526 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2527 # TODO: Don't delete the entry, just mark it as not needed
2528 # This would fix the stupidity of changing something we often iterate over
2529 # whilst we're doing it
2530 del self.pkg.files[dsc_name]
2531 dsc_entry["files id"] = i.file_id
2532 if not orig_files.has_key(dsc_name):
2533 orig_files[dsc_name] = {}
2534 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2537 # Don't bitch that we couldn't find this file later
2539 self.later_check_files.remove(dsc_name)
2545 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2547 elif re_is_orig_source.match(dsc_name):
2549 ql = get_poolfile_like_name(dsc_name, session)
2551 # Strip out anything that isn't '%s' or '/%s$'
2552 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2554 if not i.filename.endswith(dsc_name):
2558 # Unfortunately, we may get more than one match here if,
2559 # for example, the package was in potato but had an -sa
2560 # upload in woody. So we need to choose the right one.
2562 # default to something sane in case we don't match any or have only one
2567 old_file = os.path.join(i.location.path, i.filename)
2568 old_file_fh = utils.open_file(old_file)
2569 actual_md5 = apt_pkg.md5sum(old_file_fh)
2571 actual_size = os.stat(old_file)[stat.ST_SIZE]
2572 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2575 old_file = os.path.join(i.location.path, i.filename)
2576 old_file_fh = utils.open_file(old_file)
2577 actual_md5 = apt_pkg.md5sum(old_file_fh)
2579 actual_size = os.stat(old_file)[stat.ST_SIZE]
2581 suite_type = x.location.archive_type
2582 # need this for updating dsc_files in install()
2583 dsc_entry["files id"] = x.file_id
2584 # See install() in process-accepted...
2585 if not orig_files.has_key(dsc_name):
2586 orig_files[dsc_name] = {}
2587 orig_files[dsc_name]["id"] = x.file_id
2588 orig_files[dsc_name]["path"] = old_file
2589 orig_files[dsc_name]["location"] = x.location.location_id
2591 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2592 # Not there? Check the queue directories...
2593 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2594 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2596 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2597 if os.path.exists(in_otherdir):
2598 in_otherdir_fh = utils.open_file(in_otherdir)
2599 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2600 in_otherdir_fh.close()
2601 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2603 if not orig_files.has_key(dsc_name):
2604 orig_files[dsc_name] = {}
2605 orig_files[dsc_name]["path"] = in_otherdir
2608 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2611 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2613 if actual_md5 != dsc_entry["md5sum"]:
2614 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2615 if actual_size != int(dsc_entry["size"]):
2616 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2618 ################################################################################
2619 # This is used by process-new and process-holding to recheck a changes file
2620 # at the time we're running. It mainly wraps various other internal functions
2621 # and is similar to accepted_checks - these should probably be tidied up
2623 def recheck(self, session):
2625 for f in self.pkg.files.keys():
2626 # The .orig.tar.gz can disappear out from under us is it's a
2627 # duplicate of one in the archive.
2628 if not self.pkg.files.has_key(f):
2631 entry = self.pkg.files[f]
2633 # Check that the source still exists
2634 if entry["type"] == "deb":
2635 source_version = entry["source version"]
2636 source_package = entry["source package"]
2637 if not self.pkg.changes["architecture"].has_key("source") \
2638 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2639 source_epochless_version = re_no_epoch.sub('', source_version)
2640 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2642 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2643 if cnf.has_key("Dir::Queue::%s" % (q)):
2644 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2647 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2649 # Version and file overwrite checks
2650 if entry["type"] == "deb":
2651 self.check_binary_against_db(f, session)
2652 elif entry["type"] == "dsc":
2653 self.check_source_against_db(f, session)
2654 self.check_dsc_against_db(f, session)
2656 ################################################################################
2657 def accepted_checks(self, overwrite_checks, session):
2658 # Recheck anything that relies on the database; since that's not
2659 # frozen between accept and our run time when called from p-a.
2661 # overwrite_checks is set to False when installing to stable/oldstable
2666 # Find the .dsc (again)
2668 for f in self.pkg.files.keys():
2669 if self.pkg.files[f]["type"] == "dsc":
2672 for checkfile in self.pkg.files.keys():
2673 # The .orig.tar.gz can disappear out from under us is it's a
2674 # duplicate of one in the archive.
2675 if not self.pkg.files.has_key(checkfile):
2678 entry = self.pkg.files[checkfile]
2680 # Check that the source still exists
2681 if entry["type"] == "deb":
2682 source_version = entry["source version"]
2683 source_package = entry["source package"]
2684 if not self.pkg.changes["architecture"].has_key("source") \
2685 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2686 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2688 # Version and file overwrite checks
2689 if overwrite_checks:
2690 if entry["type"] == "deb":
2691 self.check_binary_against_db(checkfile, session)
2692 elif entry["type"] == "dsc":
2693 self.check_source_against_db(checkfile, session)
2694 self.check_dsc_against_db(dsc_filename, session)
2696 # propogate in the case it is in the override tables:
2697 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2698 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2699 propogate[suite] = 1
2701 nopropogate[suite] = 1
2703 for suite in propogate.keys():
2704 if suite in nopropogate:
2706 self.pkg.changes["distribution"][suite] = 1
2708 for checkfile in self.pkg.files.keys():
2709 # Check the package is still in the override tables
2710 for suite in self.pkg.changes["distribution"].keys():
2711 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2712 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2714 ################################################################################
2715 # If any file of an upload has a recent mtime then chances are good
2716 # the file is still being uploaded.
2718 def upload_too_new(self):
2721 # Move back to the original directory to get accurate time stamps
2723 os.chdir(self.pkg.directory)
2724 file_list = self.pkg.files.keys()
2725 file_list.extend(self.pkg.dsc_files.keys())
2726 file_list.append(self.pkg.changes_file)
2729 last_modified = time.time()-os.path.getmtime(f)
2730 if last_modified < int(cnf["Dinstall::SkipTime"]):
2739 def store_changelog(self):
2741 # Skip binary-only upload if it is not a bin-NMU
2742 if not self.pkg.changes['architecture'].has_key('source'):
2743 from daklib.regexes import re_bin_only_nmu
2744 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2747 session = DBConn().session()
2749 # Check if upload already has a changelog entry
2750 query = """SELECT changelog_id FROM changes WHERE source = :source
2751 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2752 if session.execute(query, {'source': self.pkg.changes['source'], \
2753 'version': self.pkg.changes['version'], \
2754 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2758 # Add current changelog text into changelogs_text table, return created ID
2759 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2760 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2762 # Link ID to the upload available in changes table
2763 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2764 AND version = :version AND architecture = :architecture"""
2765 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2766 'version': self.pkg.changes['version'], \
2767 'architecture': " ".join(self.pkg.changes['architecture'].keys())})