5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files, build_package_set
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
59 # suppress some deprecation warnings in squeeze related to apt_pkg
62 warnings.filterwarnings('ignore', \
63 "apt_pkg.ParseSection\(\) is deprecated. Please see apt_pkg\.TagSection\(\) for the replacement\.", \
65 warnings.filterwarnings('ignore', \
66 "Attribute 'Find' of the 'apt_pkg\.TagSection' object is deprecated, use 'find' instead\.", \
69 ###############################################################################
71 def get_type(f, session):
73 Get the file type of C{f}
76 @param f: file entry from Changes object
78 @type session: SQLA Session
79 @param session: SQL Alchemy session object
86 if f.has_key("dbtype"):
87 file_type = f["dbtype"]
88 elif re_source_ext.match(f["type"]):
92 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
94 # Validate the override type
95 type_id = get_override_type(file_type, session)
97 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
101 ################################################################################
103 # Determine what parts in a .changes are NEW
105 def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = {}):
107 Determine what parts in a C{changes} file are NEW.
110 @param filename: changes filename
112 @type changes: Upload.Pkg.changes dict
113 @param changes: Changes dictionary
115 @type files: Upload.Pkg.files dict
116 @param files: Files dictionary
119 @param warn: Warn if overrides are added for (old)stable
121 @type dsc: Upload.Pkg.dsc dict
122 @param dsc: (optional); Dsc dictionary
125 @param new: new packages as returned by a previous call to this function, but override information may have changed
128 @return: dictionary of NEW components.
131 # TODO: This should all use the database instead of parsing the changes
135 dbchg = get_dbchange(filename, session)
137 print "Warning: cannot find changes file in database; won't check byhand"
139 # Try to get the Package-Set field from an included .dsc file (if possible).
141 for package, entry in build_package_set(dsc, session).items():
142 if not new.has_key(package):
145 # Build up a list of potentially new things
146 for name, f in files.items():
147 # Keep a record of byhand elements
148 if f["section"] == "byhand":
153 priority = f["priority"]
154 section = f["section"]
155 file_type = get_type(f, session)
156 component = f["component"]
158 if file_type == "dsc":
161 if not new.has_key(pkg):
163 new[pkg]["priority"] = priority
164 new[pkg]["section"] = section
165 new[pkg]["type"] = file_type
166 new[pkg]["component"] = component
167 new[pkg]["files"] = []
169 old_type = new[pkg]["type"]
170 if old_type != file_type:
171 # source gets trumped by deb or udeb
172 if old_type == "dsc":
173 new[pkg]["priority"] = priority
174 new[pkg]["section"] = section
175 new[pkg]["type"] = file_type
176 new[pkg]["component"] = component
178 new[pkg]["files"].append(name)
180 if f.has_key("othercomponents"):
181 new[pkg]["othercomponents"] = f["othercomponents"]
183 # Fix up the list of target suites
185 for suite in changes["suite"].keys():
186 oldsuite = get_suite(suite, session)
188 print "WARNING: Invalid suite %s found" % suite
191 if oldsuite.overridesuite:
192 newsuite = get_suite(oldsuite.overridesuite, session)
195 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
196 oldsuite.overridesuite, suite)
197 del changes["suite"][suite]
198 changes["suite"][oldsuite.overridesuite] = 1
200 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist. Bugger" % (
201 oldsuite.overridesuite, suite)
203 # Check for unprocessed byhand files
204 if dbchg is not None:
205 for b in byhand.keys():
206 # Find the file entry in the database
208 for f in dbchg.files:
211 # If it's processed, we can ignore it
217 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
219 # Check for new stuff
220 for suite in changes["suite"].keys():
221 for pkg in new.keys():
222 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
224 for file_entry in new[pkg]["files"]:
225 if files[file_entry].has_key("new"):
226 del files[file_entry]["new"]
230 for s in ['stable', 'oldstable']:
231 if changes["suite"].has_key(s):
232 print "WARNING: overrides will be added for %s!" % s
233 for pkg in new.keys():
234 if new[pkg].has_key("othercomponents"):
235 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
239 ################################################################################
241 def check_valid(new, session = None):
243 Check if section and priority for NEW packages exist in database.
244 Additionally does sanity checks:
245 - debian-installer packages have to be udeb (or source)
246 - non debian-installer packages can not be udeb
247 - source priority can only be assigned to dsc file types
250 @param new: Dict of new packages with their section, priority and type.
253 for pkg in new.keys():
254 section_name = new[pkg]["section"]
255 priority_name = new[pkg]["priority"]
256 file_type = new[pkg]["type"]
258 section = get_section(section_name, session)
260 new[pkg]["section id"] = -1
262 new[pkg]["section id"] = section.section_id
264 priority = get_priority(priority_name, session)
266 new[pkg]["priority id"] = -1
268 new[pkg]["priority id"] = priority.priority_id
271 di = section_name.find("debian-installer") != -1
273 # If d-i, we must be udeb and vice-versa
274 if (di and file_type not in ("udeb", "dsc")) or \
275 (not di and file_type == "udeb"):
276 new[pkg]["section id"] = -1
278 # If dsc we need to be source and vice-versa
279 if (priority == "source" and file_type != "dsc") or \
280 (priority != "source" and file_type == "dsc"):
281 new[pkg]["priority id"] = -1
283 ###############################################################################
285 # Used by Upload.check_timestamps
286 class TarTime(object):
287 def __init__(self, future_cutoff, past_cutoff):
289 self.future_cutoff = future_cutoff
290 self.past_cutoff = past_cutoff
293 self.future_files = {}
294 self.ancient_files = {}
296 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
297 if MTime > self.future_cutoff:
298 self.future_files[Name] = MTime
299 if MTime < self.past_cutoff:
300 self.ancient_files[Name] = MTime
302 ###############################################################################
304 def prod_maintainer(notes, upload):
307 # Here we prepare an editor and get them ready to prod...
308 (fd, temp_filename) = utils.temp_filename()
309 temp_file = os.fdopen(fd, 'w')
311 temp_file.write(note.comment)
313 editor = os.environ.get("EDITOR","vi")
316 os.system("%s %s" % (editor, temp_filename))
317 temp_fh = utils.open_file(temp_filename)
318 prod_message = "".join(temp_fh.readlines())
320 print "Prod message:"
321 print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
322 prompt = "[P]rod, Edit, Abandon, Quit ?"
324 while prompt.find(answer) == -1:
325 answer = utils.our_raw_input(prompt)
326 m = re_default_answer.search(prompt)
329 answer = answer[:1].upper()
330 os.unlink(temp_filename)
336 # Otherwise, do the proding...
337 user_email_address = utils.whoami() + " <%s>" % (
338 cnf["Dinstall::MyAdminAddress"])
342 Subst["__FROM_ADDRESS__"] = user_email_address
343 Subst["__PROD_MESSAGE__"] = prod_message
344 Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
346 prod_mail_message = utils.TemplateSubst(
347 Subst,cnf["Dir::Templates"]+"/process-new.prod")
350 utils.send_mail(prod_mail_message)
352 print "Sent prodding message"
354 ################################################################################
356 def edit_note(note, upload, session, trainee=False):
357 # Write the current data to a temporary file
358 (fd, temp_filename) = utils.temp_filename()
359 editor = os.environ.get("EDITOR","vi")
362 os.system("%s %s" % (editor, temp_filename))
363 temp_file = utils.open_file(temp_filename)
364 newnote = temp_file.read().rstrip()
367 print utils.prefix_multi_line_string(newnote," ")
368 prompt = "[D]one, Edit, Abandon, Quit ?"
370 while prompt.find(answer) == -1:
371 answer = utils.our_raw_input(prompt)
372 m = re_default_answer.search(prompt)
375 answer = answer[:1].upper()
376 os.unlink(temp_filename)
383 comment = NewComment()
384 comment.package = upload.pkg.changes["source"]
385 comment.version = upload.pkg.changes["version"]
386 comment.comment = newnote
387 comment.author = utils.whoami()
388 comment.trainee = trainee
392 ###############################################################################
394 # suite names DMs can upload to
395 dm_suites = ['unstable', 'experimental']
397 def get_newest_source(source, session):
398 'returns the newest DBSource object in dm_suites'
399 ## the most recent version of the package uploaded to unstable or
400 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
401 ## section of its control file
402 q = session.query(DBSource).filter_by(source = source). \
403 filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
404 order_by(desc('source.version'))
407 def get_suite_version_by_source(source, session):
408 'returns a list of tuples (suite_name, version) for source package'
409 q = session.query(Suite.suite_name, DBSource.version). \
410 join(Suite.sources).filter_by(source = source)
413 def get_source_by_package_and_suite(package, suite_name, session):
415 returns a DBSource query filtered by DBBinary.package and this package's
418 return session.query(DBSource). \
419 join(DBSource.binaries).filter_by(package = package). \
420 join(DBBinary.suites).filter_by(suite_name = suite_name)
422 def get_suite_version_by_package(package, arch_string, session):
424 returns a list of tuples (suite_name, version) for binary package and
427 return session.query(Suite.suite_name, DBBinary.version). \
428 join(Suite.binaries).filter_by(package = package). \
429 join(DBBinary.architecture). \
430 filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
432 class Upload(object):
434 Everything that has to do with an upload processed.
442 ###########################################################################
445 """ Reset a number of internal variables."""
447 # Initialize the substitution template map
450 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
451 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
452 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
453 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
459 self.later_check_files = []
463 def package_info(self):
465 Format various messages from this Upload to send to the maintainer.
469 ('Reject Reasons', self.rejects),
470 ('Warnings', self.warnings),
471 ('Notes', self.notes),
475 for title, messages in msgs:
477 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
482 ###########################################################################
483 def update_subst(self):
484 """ Set up the per-package template substitution mappings """
488 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
489 if not self.pkg.changes.has_key("architecture") or not \
490 isinstance(self.pkg.changes["architecture"], dict):
491 self.pkg.changes["architecture"] = { "Unknown" : "" }
493 # and maintainer2047 may not exist.
494 if not self.pkg.changes.has_key("maintainer2047"):
495 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
497 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
498 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
499 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
501 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
502 if self.pkg.changes["architecture"].has_key("source") and \
503 self.pkg.changes["changedby822"] != "" and \
504 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
506 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
507 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
508 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
510 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
511 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
512 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
514 # Process policy doesn't set the fingerprint field and I don't want to make it
515 # do it for now as I don't want to have to deal with the case where we accepted
516 # the package into PU-NEW, but the fingerprint has gone away from the keyring in
517 # the meantime so the package will be remarked as rejectable. Urgh.
518 # TODO: Fix this properly
519 if self.pkg.changes.has_key('fingerprint'):
520 session = DBConn().session()
521 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
522 if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
523 if self.pkg.changes.has_key("sponsoremail"):
524 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
527 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
528 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
530 # Apply any global override of the Maintainer field
531 if cnf.get("Dinstall::OverrideMaintainer"):
532 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
533 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
535 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
536 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
537 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
538 self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
540 ###########################################################################
541 def load_changes(self, filename):
543 Load a changes file and setup a dictionary around it. Also checks for mandantory
546 @type filename: string
547 @param filename: Changes filename, full path.
550 @return: whether the changes file was valid or not. We may want to
551 reject even if this is True (see what gets put in self.rejects).
552 This is simply to prevent us even trying things later which will
553 fail because we couldn't properly parse the file.
556 self.pkg.changes_file = filename
558 # Parse the .changes field into a dictionary
560 self.pkg.changes.update(parse_changes(filename))
561 except CantOpenError:
562 self.rejects.append("%s: can't read file." % (filename))
564 except ParseChangesError, line:
565 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
567 except ChangesUnicodeError:
568 self.rejects.append("%s: changes file not proper utf-8" % (filename))
571 # Parse the Files field from the .changes into another dictionary
573 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
574 except ParseChangesError, line:
575 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
577 except UnknownFormatError, format:
578 self.rejects.append("%s: unknown format '%s'." % (filename, format))
581 # Check for mandatory fields
582 for i in ("distribution", "source", "binary", "architecture",
583 "version", "maintainer", "files", "changes", "description"):
584 if not self.pkg.changes.has_key(i):
585 # Avoid undefined errors later
586 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
589 # Strip a source version in brackets from the source field
590 if re_strip_srcver.search(self.pkg.changes["source"]):
591 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
593 # Ensure the source field is a valid package name.
594 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
595 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
597 # Split multi-value fields into a lower-level dictionary
598 for i in ("architecture", "distribution", "binary", "closes"):
599 o = self.pkg.changes.get(i, "")
601 del self.pkg.changes[i]
603 self.pkg.changes[i] = {}
606 self.pkg.changes[i][j] = 1
608 # Fix the Maintainer: field to be RFC822/2047 compatible
610 (self.pkg.changes["maintainer822"],
611 self.pkg.changes["maintainer2047"],
612 self.pkg.changes["maintainername"],
613 self.pkg.changes["maintaineremail"]) = \
614 fix_maintainer (self.pkg.changes["maintainer"])
615 except ParseMaintError, msg:
616 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
617 % (filename, self.pkg.changes["maintainer"], msg))
619 # ...likewise for the Changed-By: field if it exists.
621 (self.pkg.changes["changedby822"],
622 self.pkg.changes["changedby2047"],
623 self.pkg.changes["changedbyname"],
624 self.pkg.changes["changedbyemail"]) = \
625 fix_maintainer (self.pkg.changes.get("changed-by", ""))
626 except ParseMaintError, msg:
627 self.pkg.changes["changedby822"] = ""
628 self.pkg.changes["changedby2047"] = ""
629 self.pkg.changes["changedbyname"] = ""
630 self.pkg.changes["changedbyemail"] = ""
632 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
633 % (filename, self.pkg.changes["changed-by"], msg))
635 # Ensure all the values in Closes: are numbers
636 if self.pkg.changes.has_key("closes"):
637 for i in self.pkg.changes["closes"].keys():
638 if re_isanum.match (i) == None:
639 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
641 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
642 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
643 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
645 # Check the .changes is non-empty
646 if not self.pkg.files:
647 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
650 # Changes was syntactically valid even if we'll reject
653 ###########################################################################
655 def check_distributions(self):
656 "Check and map the Distribution field"
660 # Handle suite mappings
661 for m in Cnf.ValueList("SuiteMappings"):
664 if mtype == "map" or mtype == "silent-map":
665 (source, dest) = args[1:3]
666 if self.pkg.changes["distribution"].has_key(source):
667 del self.pkg.changes["distribution"][source]
668 self.pkg.changes["distribution"][dest] = 1
669 if mtype != "silent-map":
670 self.notes.append("Mapping %s to %s." % (source, dest))
671 if self.pkg.changes.has_key("distribution-version"):
672 if self.pkg.changes["distribution-version"].has_key(source):
673 self.pkg.changes["distribution-version"][source]=dest
674 elif mtype == "map-unreleased":
675 (source, dest) = args[1:3]
676 if self.pkg.changes["distribution"].has_key(source):
677 for arch in self.pkg.changes["architecture"].keys():
678 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
679 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
680 del self.pkg.changes["distribution"][source]
681 self.pkg.changes["distribution"][dest] = 1
683 elif mtype == "ignore":
685 if self.pkg.changes["distribution"].has_key(suite):
686 del self.pkg.changes["distribution"][suite]
687 self.warnings.append("Ignoring %s as a target suite." % (suite))
688 elif mtype == "reject":
690 if self.pkg.changes["distribution"].has_key(suite):
691 self.rejects.append("Uploads to %s are not accepted." % (suite))
692 elif mtype == "propup-version":
693 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
695 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
696 if self.pkg.changes["distribution"].has_key(args[1]):
697 self.pkg.changes.setdefault("distribution-version", {})
698 for suite in args[2:]:
699 self.pkg.changes["distribution-version"][suite] = suite
701 # Ensure there is (still) a target distribution
702 if len(self.pkg.changes["distribution"].keys()) < 1:
703 self.rejects.append("No valid distribution remaining.")
705 # Ensure target distributions exist
706 for suite in self.pkg.changes["distribution"].keys():
707 if not Cnf.has_key("Suite::%s" % (suite)):
708 self.rejects.append("Unknown distribution `%s'." % (suite))
710 ###########################################################################
712 def binary_file_checks(self, f, session):
714 entry = self.pkg.files[f]
716 # Extract package control information
717 deb_file = utils.open_file(f)
719 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
721 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
723 # Can't continue, none of the checks on control would work.
726 # Check for mandantory "Description:"
729 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
731 self.rejects.append("%s: Missing Description in binary package" % (f))
736 # Check for mandatory fields
737 for field in [ "Package", "Architecture", "Version" ]:
738 if control.Find(field) == None:
740 self.rejects.append("%s: No %s field in control." % (f, field))
743 # Ensure the package name matches the one give in the .changes
744 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
745 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
747 # Validate the package field
748 package = control.Find("Package")
749 if not re_valid_pkg_name.match(package):
750 self.rejects.append("%s: invalid package name '%s'." % (f, package))
752 # Validate the version field
753 version = control.Find("Version")
754 if not re_valid_version.match(version):
755 self.rejects.append("%s: invalid version number '%s'." % (f, version))
757 # Ensure the architecture of the .deb is one we know about.
758 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
759 architecture = control.Find("Architecture")
760 upload_suite = self.pkg.changes["distribution"].keys()[0]
762 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
763 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
764 self.rejects.append("Unknown architecture '%s'." % (architecture))
766 # Ensure the architecture of the .deb is one of the ones
767 # listed in the .changes.
768 if not self.pkg.changes["architecture"].has_key(architecture):
769 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
771 # Sanity-check the Depends field
772 depends = control.Find("Depends")
774 self.rejects.append("%s: Depends field is empty." % (f))
776 # Sanity-check the Provides field
777 provides = control.Find("Provides")
779 provide = re_spacestrip.sub('', provides)
781 self.rejects.append("%s: Provides field is empty." % (f))
782 prov_list = provide.split(",")
783 for prov in prov_list:
784 if not re_valid_pkg_name.match(prov):
785 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
787 # If there is a Built-Using field, we need to check we can find the
788 # exact source version
789 built_using = control.Find("Built-Using")
792 entry["built-using"] = []
793 for dep in apt_pkg.parse_depends(built_using):
794 bu_s, bu_v, bu_e = dep[0]
795 # Check that it's an exact match dependency and we have
796 # some form of version
797 if bu_e != "=" or len(bu_v) < 1:
798 self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
800 # Find the source id for this version
801 bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
803 self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
805 entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
807 except ValueError, e:
808 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
811 # Check the section & priority match those given in the .changes (non-fatal)
812 if control.Find("Section") and entry["section"] != "" \
813 and entry["section"] != control.Find("Section"):
814 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
815 (f, control.Find("Section", ""), entry["section"]))
816 if control.Find("Priority") and entry["priority"] != "" \
817 and entry["priority"] != control.Find("Priority"):
818 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
819 (f, control.Find("Priority", ""), entry["priority"]))
821 entry["package"] = package
822 entry["architecture"] = architecture
823 entry["version"] = version
824 entry["maintainer"] = control.Find("Maintainer", "")
826 if f.endswith(".udeb"):
827 self.pkg.files[f]["dbtype"] = "udeb"
828 elif f.endswith(".deb"):
829 self.pkg.files[f]["dbtype"] = "deb"
831 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
833 entry["source"] = control.Find("Source", entry["package"])
835 # Get the source version
836 source = entry["source"]
839 if source.find("(") != -1:
840 m = re_extract_src_version.match(source)
842 source_version = m.group(2)
844 if not source_version:
845 source_version = self.pkg.files[f]["version"]
847 entry["source package"] = source
848 entry["source version"] = source_version
850 # Ensure the filename matches the contents of the .deb
851 m = re_isadeb.match(f)
854 file_package = m.group(1)
855 if entry["package"] != file_package:
856 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
857 (f, file_package, entry["dbtype"], entry["package"]))
858 epochless_version = re_no_epoch.sub('', control.Find("Version"))
861 file_version = m.group(2)
862 if epochless_version != file_version:
863 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
864 (f, file_version, entry["dbtype"], epochless_version))
867 file_architecture = m.group(3)
868 if entry["architecture"] != file_architecture:
869 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
870 (f, file_architecture, entry["dbtype"], entry["architecture"]))
872 # Check for existent source
873 source_version = entry["source version"]
874 source_package = entry["source package"]
875 if self.pkg.changes["architecture"].has_key("source"):
876 if source_version != self.pkg.changes["version"]:
877 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
878 (source_version, f, self.pkg.changes["version"]))
880 # Check in the SQL database
881 if not source_exists(source_package, source_version, suites = \
882 self.pkg.changes["distribution"].keys(), session = session):
883 # Check in one of the other directories
884 source_epochless_version = re_no_epoch.sub('', source_version)
885 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
886 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
888 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
891 dsc_file_exists = False
892 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
893 if cnf.has_key("Dir::Queue::%s" % (myq)):
894 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
895 dsc_file_exists = True
898 if not dsc_file_exists:
899 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
901 # Check the version and for file overwrites
902 self.check_binary_against_db(f, session)
904 def source_file_checks(self, f, session):
905 entry = self.pkg.files[f]
907 m = re_issource.match(f)
911 entry["package"] = m.group(1)
912 entry["version"] = m.group(2)
913 entry["type"] = m.group(3)
915 # Ensure the source package name matches the Source filed in the .changes
916 if self.pkg.changes["source"] != entry["package"]:
917 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
919 # Ensure the source version matches the version in the .changes file
920 if re_is_orig_source.match(f):
921 changes_version = self.pkg.changes["chopversion2"]
923 changes_version = self.pkg.changes["chopversion"]
925 if changes_version != entry["version"]:
926 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
928 # Ensure the .changes lists source in the Architecture field
929 if not self.pkg.changes["architecture"].has_key("source"):
930 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
932 # Check the signature of a .dsc file
933 if entry["type"] == "dsc":
934 # check_signature returns either:
935 # (None, [list, of, rejects]) or (signature, [])
936 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
938 self.rejects.append(j)
940 entry["architecture"] = "source"
942 def per_suite_file_checks(self, f, suite, session):
944 entry = self.pkg.files[f]
947 if entry.has_key("byhand"):
950 # Check we have fields we need to do these checks
952 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
953 if not entry.has_key(m):
954 self.rejects.append("file '%s' does not have field %s set" % (f, m))
960 # Handle component mappings
961 for m in cnf.ValueList("ComponentMappings"):
962 (source, dest) = m.split()
963 if entry["component"] == source:
964 entry["original component"] = source
965 entry["component"] = dest
967 # Ensure the component is valid for the target suite
968 if cnf.has_key("Suite:%s::Components" % (suite)) and \
969 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
970 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
973 # Validate the component
974 if not get_component(entry["component"], session):
975 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
978 # See if the package is NEW
979 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
982 # Validate the priority
983 if entry["priority"].find('/') != -1:
984 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
986 # Determine the location
987 location = cnf["Dir::Pool"]
988 l = get_location(location, entry["component"], session=session)
990 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
991 entry["location id"] = -1
993 entry["location id"] = l.location_id
995 # Check the md5sum & size against existing files (if any)
996 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
998 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
999 entry["size"], entry["md5sum"], entry["location id"])
1002 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
1003 elif found is False and poolfile is not None:
1004 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
1006 if poolfile is None:
1007 entry["files id"] = None
1009 entry["files id"] = poolfile.file_id
1011 # Check for packages that have moved from one component to another
1012 entry['suite'] = suite
1013 arch_list = [entry["architecture"], 'all']
1014 component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1015 [suite], arch_list = arch_list, session = session)
1016 if component is not None:
1017 entry["othercomponents"] = component
1019 def check_files(self, action=True):
1020 file_keys = self.pkg.files.keys()
1026 os.chdir(self.pkg.directory)
1028 ret = holding.copy_to_holding(f)
1030 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1034 # check we already know the changes file
1035 # [NB: this check must be done post-suite mapping]
1036 base_filename = os.path.basename(self.pkg.changes_file)
1038 session = DBConn().session()
1041 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1042 # if in the pool or in a queue other than unchecked, reject
1043 if (dbc.in_queue is None) \
1044 or (dbc.in_queue is not None
1045 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1046 self.rejects.append("%s file already known to dak" % base_filename)
1047 except NoResultFound, e:
1051 has_binaries = False
1054 for f, entry in self.pkg.files.items():
1055 # Ensure the file does not already exist in one of the accepted directories
1056 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1057 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1058 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1059 self.rejects.append("%s file already exists in the %s directory." % (f, d))
1061 if not re_taint_free.match(f):
1062 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1064 # Check the file is readable
1065 if os.access(f, os.R_OK) == 0:
1066 # When running in -n, copy_to_holding() won't have
1067 # generated the reject_message, so we need to.
1069 if os.path.exists(f):
1070 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1072 # Don't directly reject, mark to check later to deal with orig's
1073 # we can find in the pool
1074 self.later_check_files.append(f)
1075 entry["type"] = "unreadable"
1078 # If it's byhand skip remaining checks
1079 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1081 entry["type"] = "byhand"
1083 # Checks for a binary package...
1084 elif re_isadeb.match(f):
1086 entry["type"] = "deb"
1088 # This routine appends to self.rejects/warnings as appropriate
1089 self.binary_file_checks(f, session)
1091 # Checks for a source package...
1092 elif re_issource.match(f):
1095 # This routine appends to self.rejects/warnings as appropriate
1096 self.source_file_checks(f, session)
1098 # Not a binary or source package? Assume byhand...
1101 entry["type"] = "byhand"
1103 # Per-suite file checks
1104 entry["oldfiles"] = {}
1105 for suite in self.pkg.changes["distribution"].keys():
1106 self.per_suite_file_checks(f, suite, session)
1110 # If the .changes file says it has source, it must have source.
1111 if self.pkg.changes["architecture"].has_key("source"):
1113 self.rejects.append("no source found and Architecture line in changes mention source.")
1115 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1116 self.rejects.append("source only uploads are not supported.")
1118 ###########################################################################
1120 def __dsc_filename(self):
1122 Returns: (Status, Dsc_Filename)
1124 Status: Boolean; True when there was no error, False otherwise
1125 Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
1130 for name, entry in self.pkg.files.items():
1131 if entry.has_key("type") and entry["type"] == "dsc":
1133 return False, "cannot process a .changes file with multiple .dsc's."
1137 if not dsc_filename:
1138 return False, "source uploads must contain a dsc file"
1140 return True, dsc_filename
1142 def load_dsc(self, action=True, signing_rules=1):
1144 Find and load the dsc from self.pkg.files into self.dsc
1146 Returns: (Status, Reason)
1148 Status: Boolean; True when there was no error, False otherwise
1149 Reason: String; When Status is False this describes the error
1153 (status, dsc_filename) = self.__dsc_filename()
1155 # If status is false, dsc_filename has the reason
1156 return False, dsc_filename
1159 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
1160 except CantOpenError:
1162 return False, "%s: can't read file." % (dsc_filename)
1163 except ParseChangesError, line:
1164 return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
1165 except InvalidDscError, line:
1166 return False, "%s: syntax error on line %s." % (dsc_filename, line)
1167 except ChangesUnicodeError:
1168 return False, "%s: dsc file not proper utf-8." % (dsc_filename)
1172 ###########################################################################
1174 def check_dsc(self, action=True, session=None):
1175 """Returns bool indicating whether or not the source changes are valid"""
1176 # Ensure there is source to check
1177 if not self.pkg.changes["architecture"].has_key("source"):
1180 (status, reason) = self.load_dsc(action=action)
1182 self.rejects.append(reason)
1184 (status, dsc_filename) = self.__dsc_filename()
1186 # If status is false, dsc_filename has the reason
1187 self.rejects.append(dsc_filename)
1190 # Build up the file list of files mentioned by the .dsc
1192 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1193 except NoFilesFieldError:
1194 self.rejects.append("%s: no Files: field." % (dsc_filename))
1196 except UnknownFormatError, format:
1197 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1199 except ParseChangesError, line:
1200 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1203 # Enforce mandatory fields
1204 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1205 if not self.pkg.dsc.has_key(i):
1206 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1209 # Validate the source and version fields
1210 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1211 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1212 if not re_valid_version.match(self.pkg.dsc["version"]):
1213 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1215 # Only a limited list of source formats are allowed in each suite
1216 for dist in self.pkg.changes["distribution"].keys():
1217 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1218 if self.pkg.dsc["format"] not in allowed:
1219 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1221 # Validate the Maintainer field
1223 # We ignore the return value
1224 fix_maintainer(self.pkg.dsc["maintainer"])
1225 except ParseMaintError, msg:
1226 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1227 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1229 # Validate the build-depends field(s)
1230 for field_name in [ "build-depends", "build-depends-indep" ]:
1231 field = self.pkg.dsc.get(field_name)
1233 # Have apt try to parse them...
1235 apt_pkg.ParseSrcDepends(field)
1237 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1239 # Ensure the version number in the .dsc matches the version number in the .changes
1240 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1241 changes_version = self.pkg.files[dsc_filename]["version"]
1243 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1244 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1246 # Ensure the Files field contain only what's expected
1247 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1249 # Ensure source is newer than existing source in target suites
1250 session = DBConn().session()
1251 self.check_source_against_db(dsc_filename, session)
1252 self.check_dsc_against_db(dsc_filename, session)
1254 dbchg = get_dbchange(self.pkg.changes_file, session)
1256 # Finally, check if we're missing any files
1257 for f in self.later_check_files:
1259 # Check if we've already processed this file if we have a dbchg object
1262 for pf in dbchg.files:
1263 if pf.filename == f and pf.processed:
1264 self.notes.append('%s was already processed so we can go ahead' % f)
1266 del self.pkg.files[f]
1268 self.rejects.append("Could not find file %s references in changes" % f)
1274 ###########################################################################
1276 def get_changelog_versions(self, source_dir):
1277 """Extracts a the source package and (optionally) grabs the
1278 version history out of debian/changelog for the BTS."""
1282 # Find the .dsc (again)
1284 for f in self.pkg.files.keys():
1285 if self.pkg.files[f]["type"] == "dsc":
1288 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1289 if not dsc_filename:
1292 # Create a symlink mirror of the source files in our temporary directory
1293 for f in self.pkg.files.keys():
1294 m = re_issource.match(f)
1296 src = os.path.join(source_dir, f)
1297 # If a file is missing for whatever reason, give up.
1298 if not os.path.exists(src):
1301 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1302 self.pkg.orig_files[f].has_key("path"):
1304 dest = os.path.join(os.getcwd(), f)
1305 os.symlink(src, dest)
1307 # If the orig files are not a part of the upload, create symlinks to the
1309 for orig_file in self.pkg.orig_files.keys():
1310 if not self.pkg.orig_files[orig_file].has_key("path"):
1312 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1313 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1315 # Extract the source
1317 unpacked = UnpackedSource(dsc_filename)
1319 self.rejects.append("'dpkg-source -x' failed for %s." % dsc_filename)
1322 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1325 # Get the upstream version
1326 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1327 if re_strip_revision.search(upstr_version):
1328 upstr_version = re_strip_revision.sub('', upstr_version)
1330 # Ensure the changelog file exists
1331 changelog_file = unpacked.get_changelog_file()
1332 if changelog_file is None:
1333 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1336 # Parse the changelog
1337 self.pkg.dsc["bts changelog"] = ""
1338 for line in changelog_file.readlines():
1339 m = re_changelog_versions.match(line)
1341 self.pkg.dsc["bts changelog"] += line
1342 changelog_file.close()
1345 # Check we found at least one revision in the changelog
1346 if not self.pkg.dsc["bts changelog"]:
1347 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1349 def check_source(self):
1351 # a) there's no source
1352 if not self.pkg.changes["architecture"].has_key("source"):
1355 tmpdir = utils.temp_dirname()
1357 # Move into the temporary directory
1361 # Get the changelog version history
1362 self.get_changelog_versions(cwd)
1364 # Move back and cleanup the temporary tree
1368 shutil.rmtree(tmpdir)
1370 if e.errno != errno.EACCES:
1372 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1374 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1375 # We probably have u-r or u-w directories so chmod everything
1377 cmd = "chmod -R u+rwx %s" % (tmpdir)
1378 result = os.system(cmd)
1380 utils.fubar("'%s' failed with result %s." % (cmd, result))
1381 shutil.rmtree(tmpdir)
1382 except Exception, e:
1383 print "foobar2 (%s)" % e
1384 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1386 ###########################################################################
1387 def ensure_hashes(self):
1388 # Make sure we recognise the format of the Files: field in the .changes
1389 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1390 if len(format) == 2:
1391 format = int(format[0]), int(format[1])
1393 format = int(float(format[0])), 0
1395 # We need to deal with the original changes blob, as the fields we need
1396 # might not be in the changes dict serialised into the .dak anymore.
1397 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1399 # Copy the checksums over to the current changes dict. This will keep
1400 # the existing modifications to it intact.
1401 for field in orig_changes:
1402 if field.startswith('checksums-'):
1403 self.pkg.changes[field] = orig_changes[field]
1405 # Check for unsupported hashes
1406 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1407 self.rejects.append(j)
1409 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1410 self.rejects.append(j)
1412 # We have to calculate the hash if we have an earlier changes version than
1413 # the hash appears in rather than require it exist in the changes file
1414 for hashname, hashfunc, version in utils.known_hashes:
1415 # TODO: Move _ensure_changes_hash into this class
1416 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1417 self.rejects.append(j)
1418 if "source" in self.pkg.changes["architecture"]:
1419 # TODO: Move _ensure_dsc_hash into this class
1420 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1421 self.rejects.append(j)
1423 def check_hashes(self):
1424 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1425 self.rejects.append(m)
1427 for m in utils.check_size(".changes", self.pkg.files):
1428 self.rejects.append(m)
1430 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1431 self.rejects.append(m)
1433 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1434 self.rejects.append(m)
1436 self.ensure_hashes()
1438 ###########################################################################
1440 def ensure_orig(self, target_dir='.', session=None):
1442 Ensures that all orig files mentioned in the changes file are present
1443 in target_dir. If they do not exist, they are symlinked into place.
1445 An list containing the symlinks that were created are returned (so they
1452 for filename, entry in self.pkg.dsc_files.iteritems():
1453 if not re_is_orig_source.match(filename):
1454 # File is not an orig; ignore
1457 if os.path.exists(filename):
1458 # File exists, no need to continue
1461 def symlink_if_valid(path):
1462 f = utils.open_file(path)
1463 md5sum = apt_pkg.md5sum(f)
1466 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1467 expected = (int(entry['size']), entry['md5sum'])
1469 if fingerprint != expected:
1472 dest = os.path.join(target_dir, filename)
1474 os.symlink(path, dest)
1475 symlinked.append(dest)
1481 session_ = DBConn().session()
1486 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1487 poolfile_path = os.path.join(
1488 poolfile.location.path, poolfile.filename
1491 if symlink_if_valid(poolfile_path):
1501 # Look in some other queues for the file
1502 queues = ('New', 'Byhand', 'ProposedUpdates',
1503 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1505 for queue in queues:
1506 if not cnf.get('Dir::Queue::%s' % queue):
1509 queuefile_path = os.path.join(
1510 cnf['Dir::Queue::%s' % queue], filename
1513 if not os.path.exists(queuefile_path):
1514 # Does not exist in this queue
1517 if symlink_if_valid(queuefile_path):
1522 ###########################################################################
1524 def check_lintian(self):
1526 Extends self.rejects by checking the output of lintian against tags
1527 specified in Dinstall::LintianTags.
1532 # Don't reject binary uploads
1533 if not self.pkg.changes['architecture'].has_key('source'):
1536 # Only check some distributions
1537 for dist in ('unstable', 'experimental'):
1538 if dist in self.pkg.changes['distribution']:
1543 # If we do not have a tagfile, don't do anything
1544 tagfile = cnf.get("Dinstall::LintianTags")
1548 # Parse the yaml file
1549 sourcefile = file(tagfile, 'r')
1550 sourcecontent = sourcefile.read()
1554 lintiantags = yaml.load(sourcecontent)['lintian']
1555 except yaml.YAMLError, msg:
1556 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1559 # Try and find all orig mentioned in the .dsc
1560 symlinked = self.ensure_orig()
1562 # Setup the input file for lintian
1563 fd, temp_filename = utils.temp_filename()
1564 temptagfile = os.fdopen(fd, 'w')
1565 for tags in lintiantags.values():
1566 temptagfile.writelines(['%s\n' % x for x in tags])
1570 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1571 (temp_filename, self.pkg.changes_file)
1573 result, output = commands.getstatusoutput(cmd)
1575 # Remove our tempfile and any symlinks we created
1576 os.unlink(temp_filename)
1578 for symlink in symlinked:
1582 utils.warn("lintian failed for %s [return code: %s]." % \
1583 (self.pkg.changes_file, result))
1584 utils.warn(utils.prefix_multi_line_string(output, \
1585 " [possible output:] "))
1590 [self.pkg.changes_file, "check_lintian"] + list(txt)
1594 parsed_tags = parse_lintian_output(output)
1595 self.rejects.extend(
1596 generate_reject_messages(parsed_tags, lintiantags, log=log)
1599 ###########################################################################
1600 def check_urgency(self):
1602 if self.pkg.changes["architecture"].has_key("source"):
1603 if not self.pkg.changes.has_key("urgency"):
1604 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1605 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1606 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1607 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1608 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1609 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1611 ###########################################################################
1613 # Sanity check the time stamps of files inside debs.
1614 # [Files in the near future cause ugly warnings and extreme time
1615 # travel can cause errors on extraction]
1617 def check_timestamps(self):
1620 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1621 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1622 tar = TarTime(future_cutoff, past_cutoff)
1624 for filename, entry in self.pkg.files.items():
1625 if entry["type"] == "deb":
1628 deb_file = utils.open_file(filename)
1629 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1632 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1633 except SystemError, e:
1634 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1635 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1638 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1642 future_files = tar.future_files.keys()
1644 num_future_files = len(future_files)
1645 future_file = future_files[0]
1646 future_date = tar.future_files[future_file]
1647 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1648 % (filename, num_future_files, future_file, time.ctime(future_date)))
1650 ancient_files = tar.ancient_files.keys()
1652 num_ancient_files = len(ancient_files)
1653 ancient_file = ancient_files[0]
1654 ancient_date = tar.ancient_files[ancient_file]
1655 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1656 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1658 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1660 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1661 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1663 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1669 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1670 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1671 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1672 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1673 self.pkg.changes["sponsoremail"] = uid_email
1678 ###########################################################################
1679 # check_signed_by_key checks
1680 ###########################################################################
1682 def check_signed_by_key(self):
1683 """Ensure the .changes is signed by an authorized uploader."""
1684 session = DBConn().session()
1686 # First of all we check that the person has proper upload permissions
1687 # and that this upload isn't blocked
1688 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1691 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1694 # TODO: Check that import-keyring adds UIDs properly
1696 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1699 # Check that the fingerprint which uploaded has permission to do so
1700 self.check_upload_permissions(fpr, session)
1702 # Check that this package is not in a transition
1703 self.check_transition(session)
1708 def check_upload_permissions(self, fpr, session):
1709 # Check any one-off upload blocks
1710 self.check_upload_blocks(fpr, session)
1712 # Start with DM as a special case
1713 # DM is a special case unfortunately, so we check it first
1714 # (keys with no source access get more access than DMs in one
1715 # way; DMs can only upload for their packages whether source
1716 # or binary, whereas keys with no access might be able to
1717 # upload some binaries)
1718 if fpr.source_acl.access_level == 'dm':
1719 self.check_dm_upload(fpr, session)
1721 # Check source-based permissions for other types
1722 if self.pkg.changes["architecture"].has_key("source") and \
1723 fpr.source_acl.access_level is None:
1724 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1725 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1726 self.rejects.append(rej)
1728 # If not a DM, we allow full upload rights
1729 uid_email = "%s@debian.org" % (fpr.uid.uid)
1730 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1733 # Check binary upload permissions
1734 # By this point we know that DMs can't have got here unless they
1735 # are allowed to deal with the package concerned so just apply
1737 if fpr.binary_acl.access_level == 'full':
1740 # Otherwise we're in the map case
1741 tmparches = self.pkg.changes["architecture"].copy()
1742 tmparches.pop('source', None)
1744 for bam in fpr.binary_acl_map:
1745 tmparches.pop(bam.architecture.arch_string, None)
1747 if len(tmparches.keys()) > 0:
1748 if fpr.binary_reject:
1749 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1750 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1751 self.rejects.append(rej)
1753 # TODO: This is where we'll implement reject vs throw away binaries later
1754 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1755 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1756 rej += "\nFingerprint: %s", (fpr.fingerprint)
1757 self.rejects.append(rej)
1760 def check_upload_blocks(self, fpr, session):
1761 """Check whether any upload blocks apply to this source, source
1762 version, uid / fpr combination"""
1764 def block_rej_template(fb):
1765 rej = 'Manual upload block in place for package %s' % fb.source
1766 if fb.version is not None:
1767 rej += ', version %s' % fb.version
1770 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1771 # version is None if the block applies to all versions
1772 if fb.version is None or fb.version == self.pkg.changes['version']:
1773 # Check both fpr and uid - either is enough to cause a reject
1774 if fb.fpr is not None:
1775 if fb.fpr.fingerprint == fpr.fingerprint:
1776 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1777 if fb.uid is not None:
1778 if fb.uid == fpr.uid:
1779 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1782 def check_dm_upload(self, fpr, session):
1783 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1784 ## none of the uploaded packages are NEW
1786 for f in self.pkg.files.keys():
1787 if self.pkg.files[f].has_key("byhand"):
1788 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1790 if self.pkg.files[f].has_key("new"):
1791 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1797 r = get_newest_source(self.pkg.changes["source"], session)
1800 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1801 self.rejects.append(rej)
1804 if not r.dm_upload_allowed:
1805 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1806 self.rejects.append(rej)
1809 ## the Maintainer: field of the uploaded .changes file corresponds with
1810 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1812 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1813 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1815 ## the most recent version of the package uploaded to unstable or
1816 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1817 ## non-developer maintainers cannot NMU or hijack packages)
1819 # srcuploaders includes the maintainer
1821 for sup in r.srcuploaders:
1822 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1823 # Eww - I hope we never have two people with the same name in Debian
1824 if email == fpr.uid.uid or name == fpr.uid.name:
1829 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1832 ## none of the packages are being taken over from other source packages
1833 for b in self.pkg.changes["binary"].keys():
1834 for suite in self.pkg.changes["distribution"].keys():
1835 for s in get_source_by_package_and_suite(b, suite, session):
1836 if s.source != self.pkg.changes["source"]:
1837 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1841 def check_transition(self, session):
1844 sourcepkg = self.pkg.changes["source"]
1846 # No sourceful upload -> no need to do anything else, direct return
1847 # We also work with unstable uploads, not experimental or those going to some
1848 # proposed-updates queue
1849 if "source" not in self.pkg.changes["architecture"] or \
1850 "unstable" not in self.pkg.changes["distribution"]:
1853 # Also only check if there is a file defined (and existant) with
1855 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1856 if transpath == "" or not os.path.exists(transpath):
1859 # Parse the yaml file
1860 sourcefile = file(transpath, 'r')
1861 sourcecontent = sourcefile.read()
1863 transitions = yaml.load(sourcecontent)
1864 except yaml.YAMLError, msg:
1865 # This shouldn't happen, there is a wrapper to edit the file which
1866 # checks it, but we prefer to be safe than ending up rejecting
1868 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1871 # Now look through all defined transitions
1872 for trans in transitions:
1873 t = transitions[trans]
1874 source = t["source"]
1877 # Will be None if nothing is in testing.
1878 current = get_source_in_suite(source, "testing", session)
1879 if current is not None:
1880 compare = apt_pkg.VersionCompare(current.version, expected)
1882 if current is None or compare < 0:
1883 # This is still valid, the current version in testing is older than
1884 # the new version we wait for, or there is none in testing yet
1886 # Check if the source we look at is affected by this.
1887 if sourcepkg in t['packages']:
1888 # The source is affected, lets reject it.
1890 rejectmsg = "%s: part of the %s transition.\n\n" % (
1893 if current is not None:
1894 currentlymsg = "at version %s" % (current.version)
1896 currentlymsg = "not present in testing"
1898 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1900 rejectmsg += "\n".join(textwrap.wrap("""Your package
1901 is part of a testing transition designed to get %s migrated (it is
1902 currently %s, we need version %s). This transition is managed by the
1903 Release Team, and %s is the Release-Team member responsible for it.
1904 Please mail debian-release@lists.debian.org or contact %s directly if you
1905 need further assistance. You might want to upload to experimental until this
1906 transition is done."""
1907 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1909 self.rejects.append(rejectmsg)
1912 ###########################################################################
1913 # End check_signed_by_key checks
1914 ###########################################################################
1916 def build_summaries(self):
1917 """ Build a summary of changes the upload introduces. """
1919 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1921 short_summary = summary
1923 # This is for direport's benefit...
1924 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1927 summary += "Changes: " + f
1929 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1931 summary += self.announce(short_summary, 0)
1933 return (summary, short_summary)
1935 ###########################################################################
1937 def close_bugs(self, summary, action):
1939 Send mail to close bugs as instructed by the closes field in the changes file.
1940 Also add a line to summary if any work was done.
1942 @type summary: string
1943 @param summary: summary text, as given by L{build_summaries}
1946 @param action: Set to false no real action will be done.
1949 @return: summary. If action was taken, extended by the list of closed bugs.
1953 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1955 bugs = self.pkg.changes["closes"].keys()
1961 summary += "Closing bugs: "
1963 summary += "%s " % (bug)
1966 self.Subst["__BUG_NUMBER__"] = bug
1967 if self.pkg.changes["distribution"].has_key("stable"):
1968 self.Subst["__STABLE_WARNING__"] = """
1969 Note that this package is not part of the released stable Debian
1970 distribution. It may have dependencies on other unreleased software,
1971 or other instabilities. Please take care if you wish to install it.
1972 The update will eventually make its way into the next released Debian
1975 self.Subst["__STABLE_WARNING__"] = ""
1976 mail_message = utils.TemplateSubst(self.Subst, template)
1977 utils.send_mail(mail_message)
1979 # Clear up after ourselves
1980 del self.Subst["__BUG_NUMBER__"]
1981 del self.Subst["__STABLE_WARNING__"]
1983 if action and self.logger:
1984 self.logger.log(["closing bugs"] + bugs)
1990 ###########################################################################
1992 def announce(self, short_summary, action):
1994 Send an announce mail about a new upload.
1996 @type short_summary: string
1997 @param short_summary: Short summary text to include in the mail
2000 @param action: Set to false no real action will be done.
2003 @return: Textstring about action taken.
2008 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
2010 # Only do announcements for source uploads with a recent dpkg-dev installed
2011 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
2012 self.pkg.changes["architecture"].has_key("source"):
2018 self.Subst["__SHORT_SUMMARY__"] = short_summary
2020 for dist in self.pkg.changes["distribution"].keys():
2021 suite = get_suite(dist)
2022 if suite is None: continue
2023 announce_list = suite.announce
2024 if announce_list == "" or lists_done.has_key(announce_list):
2027 lists_done[announce_list] = 1
2028 summary += "Announcing to %s\n" % (announce_list)
2032 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
2033 if cnf.get("Dinstall::TrackingServer") and \
2034 self.pkg.changes["architecture"].has_key("source"):
2035 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
2036 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
2038 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
2039 utils.send_mail(mail_message)
2041 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
2043 if cnf.FindB("Dinstall::CloseBugs"):
2044 summary = self.close_bugs(summary, action)
2046 del self.Subst["__SHORT_SUMMARY__"]
2050 ###########################################################################
2052 def accept (self, summary, short_summary, session=None):
2056 This moves all files referenced from the .changes into the pool,
2057 sends the accepted mail, announces to lists, closes bugs and
2058 also checks for override disparities. If enabled it will write out
2059 the version history for the BTS Version Tracking and will finally call
2062 @type summary: string
2063 @param summary: Summary text
2065 @type short_summary: string
2066 @param short_summary: Short summary
2070 stats = SummaryStats()
2073 self.logger.log(["installing changes", self.pkg.changes_file])
2077 # Add the .dsc file to the DB first
2078 for newfile, entry in self.pkg.files.items():
2079 if entry["type"] == "dsc":
2080 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2084 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2085 for newfile, entry in self.pkg.files.items():
2086 if entry["type"] == "deb":
2087 poolfiles.append(add_deb_to_db(self, newfile, session))
2089 # If this is a sourceful diff only upload that is moving
2090 # cross-component we need to copy the .orig files into the new
2091 # component too for the same reasons as above.
2092 # XXX: mhy: I think this should be in add_dsc_to_db
2093 if self.pkg.changes["architecture"].has_key("source"):
2094 for orig_file in self.pkg.orig_files.keys():
2095 if not self.pkg.orig_files[orig_file].has_key("id"):
2096 continue # Skip if it's not in the pool
2097 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2098 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2099 continue # Skip if the location didn't change
2102 oldf = get_poolfile_by_id(orig_file_id, session)
2103 old_filename = os.path.join(oldf.location.path, oldf.filename)
2104 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
2105 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2107 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2109 # TODO: Care about size/md5sum collisions etc
2110 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2112 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2114 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2115 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2119 # Don't reference the old file from this changes
2121 if p.file_id == oldf.file_id:
2124 poolfiles.append(newf)
2126 # Fix up the DSC references
2129 for df in source.srcfiles:
2130 if df.poolfile.file_id == oldf.file_id:
2131 # Add a new DSC entry and mark the old one for deletion
2132 # Don't do it in the loop so we don't change the thing we're iterating over
2134 newdscf.source_id = source.source_id
2135 newdscf.poolfile_id = newf.file_id
2136 session.add(newdscf)
2146 # Make sure that our source object is up-to-date
2147 session.expire(source)
2149 # Add changelog information to the database
2150 self.store_changelog()
2152 # Install the files into the pool
2153 for newfile, entry in self.pkg.files.items():
2154 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2155 utils.move(newfile, destination)
2156 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2157 stats.accept_bytes += float(entry["size"])
2159 # Copy the .changes file across for suite which need it.
2160 copy_changes = dict([(x.copychanges, '')
2161 for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2162 if x.copychanges is not None])
2164 for dest in copy_changes.keys():
2165 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2167 # We're done - commit the database changes
2169 # Our SQL session will automatically start a new transaction after
2172 # Move the .changes into the 'done' directory
2173 utils.move(self.pkg.changes_file,
2174 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2176 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2177 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2180 self.Subst["__SUMMARY__"] = summary
2181 mail_message = utils.TemplateSubst(self.Subst,
2182 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2183 utils.send_mail(mail_message)
2184 self.announce(short_summary, 1)
2186 ## Helper stuff for DebBugs Version Tracking
2187 if cnf.Find("Dir::Queue::BTSVersionTrack"):
2188 if self.pkg.changes["architecture"].has_key("source"):
2189 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2190 version_history = os.fdopen(fd, 'w')
2191 version_history.write(self.pkg.dsc["bts changelog"])
2192 version_history.close()
2193 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2194 self.pkg.changes_file[:-8]+".versions")
2195 os.rename(temp_filename, filename)
2196 os.chmod(filename, 0644)
2198 # Write out the binary -> source mapping.
2199 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2200 debinfo = os.fdopen(fd, 'w')
2201 for name, entry in sorted(self.pkg.files.items()):
2202 if entry["type"] == "deb":
2203 line = " ".join([entry["package"], entry["version"],
2204 entry["architecture"], entry["source package"],
2205 entry["source version"]])
2206 debinfo.write(line+"\n")
2208 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2209 self.pkg.changes_file[:-8]+".debinfo")
2210 os.rename(temp_filename, filename)
2211 os.chmod(filename, 0644)
2215 # Set up our copy queues (e.g. buildd queues)
2216 for suite_name in self.pkg.changes["distribution"].keys():
2217 suite = get_suite(suite_name, session)
2218 for q in suite.copy_queues:
2220 q.add_file_from_pool(f)
2225 stats.accept_count += 1
2227 def check_override(self):
2229 Checks override entries for validity. Mails "Override disparity" warnings,
2230 if that feature is enabled.
2232 Abandons the check if
2233 - override disparity checks are disabled
2234 - mail sending is disabled
2239 # Abandon the check if override disparity checks have been disabled
2240 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2243 summary = self.pkg.check_override()
2248 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2251 self.Subst["__SUMMARY__"] = summary
2252 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2253 utils.send_mail(mail_message)
2254 del self.Subst["__SUMMARY__"]
2256 ###########################################################################
2258 def remove(self, from_dir=None):
2260 Used (for instance) in p-u to remove the package from unchecked
2262 Also removes the package from holding area.
2264 if from_dir is None:
2265 from_dir = self.pkg.directory
2268 for f in self.pkg.files.keys():
2269 os.unlink(os.path.join(from_dir, f))
2270 if os.path.exists(os.path.join(h.holding_dir, f)):
2271 os.unlink(os.path.join(h.holding_dir, f))
2273 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2274 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2275 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2277 ###########################################################################
2279 def move_to_queue (self, queue):
2281 Move files to a destination queue using the permissions in the table
2284 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2285 queue.path, perms=int(queue.change_perms, 8))
2286 for f in self.pkg.files.keys():
2287 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2289 ###########################################################################
2291 def force_reject(self, reject_files):
2293 Forcefully move files from the current directory to the
2294 reject directory. If any file already exists in the reject
2295 directory it will be moved to the morgue to make way for
2298 @type reject_files: dict
2299 @param reject_files: file dictionary
2305 for file_entry in reject_files:
2306 # Skip any files which don't exist or which we don't have permission to copy.
2307 if os.access(file_entry, os.R_OK) == 0:
2310 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2313 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2315 # File exists? Let's find a new name by adding a number
2316 if e.errno == errno.EEXIST:
2318 dest_file = utils.find_next_free(dest_file, 255)
2319 except NoFreeFilenameError:
2320 # Something's either gone badly Pete Tong, or
2321 # someone is trying to exploit us.
2322 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2325 # Make sure we really got it
2327 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2330 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2334 # If we got here, we own the destination file, so we can
2335 # safely overwrite it.
2336 utils.move(file_entry, dest_file, 1, perms=0660)
2339 ###########################################################################
2340 def do_reject (self, manual=0, reject_message="", notes=""):
2342 Reject an upload. If called without a reject message or C{manual} is
2343 true, spawn an editor so the user can write one.
2346 @param manual: manual or automated rejection
2348 @type reject_message: string
2349 @param reject_message: A reject message
2354 # If we weren't given a manual rejection message, spawn an
2355 # editor so the user can add one in...
2356 if manual and not reject_message:
2357 (fd, temp_filename) = utils.temp_filename()
2358 temp_file = os.fdopen(fd, 'w')
2361 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2362 % (note.author, note.version, note.notedate, note.comment))
2364 editor = os.environ.get("EDITOR","vi")
2366 while answer == 'E':
2367 os.system("%s %s" % (editor, temp_filename))
2368 temp_fh = utils.open_file(temp_filename)
2369 reject_message = "".join(temp_fh.readlines())
2371 print "Reject message:"
2372 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2373 prompt = "[R]eject, Edit, Abandon, Quit ?"
2375 while prompt.find(answer) == -1:
2376 answer = utils.our_raw_input(prompt)
2377 m = re_default_answer.search(prompt)
2380 answer = answer[:1].upper()
2381 os.unlink(temp_filename)
2387 print "Rejecting.\n"
2391 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2392 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2394 # Move all the files into the reject directory
2395 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2396 self.force_reject(reject_files)
2398 # If we fail here someone is probably trying to exploit the race
2399 # so let's just raise an exception ...
2400 if os.path.exists(reason_filename):
2401 os.unlink(reason_filename)
2402 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2404 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2408 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2409 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2410 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2411 os.write(reason_fd, reject_message)
2412 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2414 # Build up the rejection email
2415 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2416 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2417 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2418 self.Subst["__REJECT_MESSAGE__"] = ""
2419 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2420 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2421 # Write the rejection email out as the <foo>.reason file
2422 os.write(reason_fd, reject_mail_message)
2424 del self.Subst["__REJECTOR_ADDRESS__"]
2425 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2426 del self.Subst["__CC__"]
2430 # Send the rejection mail
2431 utils.send_mail(reject_mail_message)
2434 self.logger.log(["rejected", self.pkg.changes_file])
2438 ################################################################################
2439 def in_override_p(self, package, component, suite, binary_type, filename, session):
2441 Check if a package already has override entries in the DB
2443 @type package: string
2444 @param package: package name
2446 @type component: string
2447 @param component: database id of the component
2450 @param suite: database id of the suite
2452 @type binary_type: string
2453 @param binary_type: type of the package
2455 @type filename: string
2456 @param filename: filename we check
2458 @return: the database result. But noone cares anyway.
2464 if binary_type == "": # must be source
2467 file_type = binary_type
2469 # Override suite name; used for example with proposed-updates
2470 oldsuite = get_suite(suite, session)
2471 if (not oldsuite is None) and oldsuite.overridesuite:
2472 suite = oldsuite.overridesuite
2474 result = get_override(package, suite, component, file_type, session)
2476 # If checking for a source package fall back on the binary override type
2477 if file_type == "dsc" and len(result) < 1:
2478 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2480 # Remember the section and priority so we can check them later if appropriate
2483 self.pkg.files[filename]["override section"] = result.section.section
2484 self.pkg.files[filename]["override priority"] = result.priority.priority
2489 ################################################################################
2490 def get_anyversion(self, sv_list, suite):
2493 @param sv_list: list of (suite, version) tuples to check
2496 @param suite: suite name
2502 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2503 for (s, v) in sv_list:
2504 if s in [ x.lower() for x in anysuite ]:
2505 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2510 ################################################################################
2512 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2515 @param sv_list: list of (suite, version) tuples to check
2517 @type filename: string
2518 @param filename: XXX
2520 @type new_version: string
2521 @param new_version: XXX
2523 Ensure versions are newer than existing packages in target
2524 suites and that cross-suite version checking rules as
2525 set out in the conf file are satisfied.
2530 # Check versions for each target suite
2531 for target_suite in self.pkg.changes["distribution"].keys():
2532 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2533 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2535 # Enforce "must be newer than target suite" even if conffile omits it
2536 if target_suite not in must_be_newer_than:
2537 must_be_newer_than.append(target_suite)
2539 for (suite, existent_version) in sv_list:
2540 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2542 if suite in must_be_newer_than and sourceful and vercmp < 1:
2543 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2545 if suite in must_be_older_than and vercmp > -1:
2548 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2549 # we really use the other suite, ignoring the conflicting one ...
2550 addsuite = self.pkg.changes["distribution-version"][suite]
2552 add_version = self.get_anyversion(sv_list, addsuite)
2553 target_version = self.get_anyversion(sv_list, target_suite)
2556 # not add_version can only happen if we map to a suite
2557 # that doesn't enhance the suite we're propup'ing from.
2558 # so "propup-ver x a b c; map a d" is a problem only if
2559 # d doesn't enhance a.
2561 # i think we could always propagate in this case, rather
2562 # than complaining. either way, this isn't a REJECT issue
2564 # And - we really should complain to the dorks who configured dak
2565 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2566 self.pkg.changes.setdefault("propdistribution", {})
2567 self.pkg.changes["propdistribution"][addsuite] = 1
2569 elif not target_version:
2570 # not targets_version is true when the package is NEW
2571 # we could just stick with the "...old version..." REJECT
2572 # for this, I think.
2573 self.rejects.append("Won't propogate NEW packages.")
2574 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2575 # propogation would be redundant. no need to reject though.
2576 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2578 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2579 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2581 self.warnings.append("Propogating upload to %s" % (addsuite))
2582 self.pkg.changes.setdefault("propdistribution", {})
2583 self.pkg.changes["propdistribution"][addsuite] = 1
2587 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2589 ################################################################################
2590 def check_binary_against_db(self, filename, session):
2591 # Ensure version is sane
2592 self.cross_suite_version_check( \
2593 get_suite_version_by_package(self.pkg.files[filename]["package"], \
2594 self.pkg.files[filename]["architecture"], session),
2595 filename, self.pkg.files[filename]["version"], sourceful=False)
2597 # Check for any existing copies of the file
2598 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2599 q = q.filter_by(version=self.pkg.files[filename]["version"])
2600 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2603 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2605 ################################################################################
2607 def check_source_against_db(self, filename, session):
2608 source = self.pkg.dsc.get("source")
2609 version = self.pkg.dsc.get("version")
2611 # Ensure version is sane
2612 self.cross_suite_version_check( \
2613 get_suite_version_by_source(source, session), filename, version,
2616 ################################################################################
2617 def check_dsc_against_db(self, filename, session):
2620 @warning: NB: this function can remove entries from the 'files' index [if
2621 the orig tarball is a duplicate of the one in the archive]; if
2622 you're iterating over 'files' and call this function as part of
2623 the loop, be sure to add a check to the top of the loop to
2624 ensure you haven't just tried to dereference the deleted entry.
2629 self.pkg.orig_files = {} # XXX: do we need to clear it?
2630 orig_files = self.pkg.orig_files
2632 # Try and find all files mentioned in the .dsc. This has
2633 # to work harder to cope with the multiple possible
2634 # locations of an .orig.tar.gz.
2635 # The ordering on the select is needed to pick the newest orig
2636 # when it exists in multiple places.
2637 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2639 if self.pkg.files.has_key(dsc_name):
2640 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2641 actual_size = int(self.pkg.files[dsc_name]["size"])
2642 found = "%s in incoming" % (dsc_name)
2644 # Check the file does not already exist in the archive
2645 ql = get_poolfile_like_name(dsc_name, session)
2647 # Strip out anything that isn't '%s' or '/%s$'
2649 if not i.filename.endswith(dsc_name):
2652 # "[dak] has not broken them. [dak] has fixed a
2653 # brokenness. Your crappy hack exploited a bug in
2656 # "(Come on! I thought it was always obvious that
2657 # one just doesn't release different files with
2658 # the same name and version.)"
2659 # -- ajk@ on d-devel@l.d.o
2662 # Ignore exact matches for .orig.tar.gz
2664 if re_is_orig_source.match(dsc_name):
2666 if self.pkg.files.has_key(dsc_name) and \
2667 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2668 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2669 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2670 # TODO: Don't delete the entry, just mark it as not needed
2671 # This would fix the stupidity of changing something we often iterate over
2672 # whilst we're doing it
2673 del self.pkg.files[dsc_name]
2674 dsc_entry["files id"] = i.file_id
2675 if not orig_files.has_key(dsc_name):
2676 orig_files[dsc_name] = {}
2677 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2680 # Don't bitch that we couldn't find this file later
2682 self.later_check_files.remove(dsc_name)
2688 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2690 elif re_is_orig_source.match(dsc_name):
2692 ql = get_poolfile_like_name(dsc_name, session)
2694 # Strip out anything that isn't '%s' or '/%s$'
2695 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2697 if not i.filename.endswith(dsc_name):
2701 # Unfortunately, we may get more than one match here if,
2702 # for example, the package was in potato but had an -sa
2703 # upload in woody. So we need to choose the right one.
2705 # default to something sane in case we don't match any or have only one
2710 old_file = os.path.join(i.location.path, i.filename)
2711 old_file_fh = utils.open_file(old_file)
2712 actual_md5 = apt_pkg.md5sum(old_file_fh)
2714 actual_size = os.stat(old_file)[stat.ST_SIZE]
2715 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2718 old_file = os.path.join(i.location.path, i.filename)
2719 old_file_fh = utils.open_file(old_file)
2720 actual_md5 = apt_pkg.md5sum(old_file_fh)
2722 actual_size = os.stat(old_file)[stat.ST_SIZE]
2724 suite_type = x.location.archive_type
2725 # need this for updating dsc_files in install()
2726 dsc_entry["files id"] = x.file_id
2727 # See install() in process-accepted...
2728 if not orig_files.has_key(dsc_name):
2729 orig_files[dsc_name] = {}
2730 orig_files[dsc_name]["id"] = x.file_id
2731 orig_files[dsc_name]["path"] = old_file
2732 orig_files[dsc_name]["location"] = x.location.location_id
2734 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2735 # Not there? Check the queue directories...
2736 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2737 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2739 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2740 if os.path.exists(in_otherdir):
2741 in_otherdir_fh = utils.open_file(in_otherdir)
2742 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2743 in_otherdir_fh.close()
2744 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2746 if not orig_files.has_key(dsc_name):
2747 orig_files[dsc_name] = {}
2748 orig_files[dsc_name]["path"] = in_otherdir
2751 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2754 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2756 if actual_md5 != dsc_entry["md5sum"]:
2757 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2758 if actual_size != int(dsc_entry["size"]):
2759 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2761 ################################################################################
2762 # This is used by process-new and process-holding to recheck a changes file
2763 # at the time we're running. It mainly wraps various other internal functions
2764 # and is similar to accepted_checks - these should probably be tidied up
2766 def recheck(self, session):
2768 for f in self.pkg.files.keys():
2769 # The .orig.tar.gz can disappear out from under us is it's a
2770 # duplicate of one in the archive.
2771 if not self.pkg.files.has_key(f):
2774 entry = self.pkg.files[f]
2776 # Check that the source still exists
2777 if entry["type"] == "deb":
2778 source_version = entry["source version"]
2779 source_package = entry["source package"]
2780 if not self.pkg.changes["architecture"].has_key("source") \
2781 and not source_exists(source_package, source_version, \
2782 suites = self.pkg.changes["distribution"].keys(), session = session):
2783 source_epochless_version = re_no_epoch.sub('', source_version)
2784 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2786 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2787 if cnf.has_key("Dir::Queue::%s" % (q)):
2788 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2791 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2793 # Version and file overwrite checks
2794 if entry["type"] == "deb":
2795 self.check_binary_against_db(f, session)
2796 elif entry["type"] == "dsc":
2797 self.check_source_against_db(f, session)
2798 self.check_dsc_against_db(f, session)
2800 ################################################################################
2801 def accepted_checks(self, overwrite_checks, session):
2802 # Recheck anything that relies on the database; since that's not
2803 # frozen between accept and our run time when called from p-a.
2805 # overwrite_checks is set to False when installing to stable/oldstable
2810 # Find the .dsc (again)
2812 for f in self.pkg.files.keys():
2813 if self.pkg.files[f]["type"] == "dsc":
2816 for checkfile in self.pkg.files.keys():
2817 # The .orig.tar.gz can disappear out from under us is it's a
2818 # duplicate of one in the archive.
2819 if not self.pkg.files.has_key(checkfile):
2822 entry = self.pkg.files[checkfile]
2824 # Check that the source still exists
2825 if entry["type"] == "deb":
2826 source_version = entry["source version"]
2827 source_package = entry["source package"]
2828 if not self.pkg.changes["architecture"].has_key("source") \
2829 and not source_exists(source_package, source_version, \
2830 suites = self.pkg.changes["distribution"].keys(), \
2832 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2834 # Version and file overwrite checks
2835 if overwrite_checks:
2836 if entry["type"] == "deb":
2837 self.check_binary_against_db(checkfile, session)
2838 elif entry["type"] == "dsc":
2839 self.check_source_against_db(checkfile, session)
2840 self.check_dsc_against_db(dsc_filename, session)
2842 # propogate in the case it is in the override tables:
2843 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2844 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2845 propogate[suite] = 1
2847 nopropogate[suite] = 1
2849 for suite in propogate.keys():
2850 if suite in nopropogate:
2852 self.pkg.changes["distribution"][suite] = 1
2854 for checkfile in self.pkg.files.keys():
2855 # Check the package is still in the override tables
2856 for suite in self.pkg.changes["distribution"].keys():
2857 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2858 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2860 ################################################################################
2861 # If any file of an upload has a recent mtime then chances are good
2862 # the file is still being uploaded.
2864 def upload_too_new(self):
2867 # Move back to the original directory to get accurate time stamps
2869 os.chdir(self.pkg.directory)
2870 file_list = self.pkg.files.keys()
2871 file_list.extend(self.pkg.dsc_files.keys())
2872 file_list.append(self.pkg.changes_file)
2875 last_modified = time.time()-os.path.getmtime(f)
2876 if last_modified < int(cnf["Dinstall::SkipTime"]):
2885 def store_changelog(self):
2887 # Skip binary-only upload if it is not a bin-NMU
2888 if not self.pkg.changes['architecture'].has_key('source'):
2889 from daklib.regexes import re_bin_only_nmu
2890 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2893 session = DBConn().session()
2895 # Check if upload already has a changelog entry
2896 query = """SELECT changelog_id FROM changes WHERE source = :source
2897 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2898 if session.execute(query, {'source': self.pkg.changes['source'], \
2899 'version': self.pkg.changes['version'], \
2900 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2904 # Add current changelog text into changelogs_text table, return created ID
2905 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2906 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2908 # Link ID to the upload available in changes table
2909 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2910 AND version = :version AND architecture = :architecture"""
2911 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2912 'version': self.pkg.changes['version'], \
2913 'architecture': " ".join(self.pkg.changes['architecture'].keys())})