5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files, build_package_set
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
59 # suppress some deprecation warnings in squeeze related to apt_pkg
62 warnings.filterwarnings('ignore', \
63 "apt_pkg.ParseSection\(\) is deprecated. Please see apt_pkg\.TagSection\(\) for the replacement\.", \
65 warnings.filterwarnings('ignore', \
66 "Attribute 'Find' of the 'apt_pkg\.TagSection' object is deprecated, use 'find' instead\.", \
69 ###############################################################################
71 def get_type(f, session):
73 Get the file type of C{f}
76 @param f: file entry from Changes object
78 @type session: SQLA Session
79 @param session: SQL Alchemy session object
86 if f.has_key("dbtype"):
87 file_type = f["dbtype"]
88 elif re_source_ext.match(f["type"]):
92 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
94 # Validate the override type
95 type_id = get_override_type(file_type, session)
97 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
101 ################################################################################
103 # Determine what parts in a .changes are NEW
105 def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = None):
107 Determine what parts in a C{changes} file are NEW.
110 @param filename: changes filename
112 @type changes: Upload.Pkg.changes dict
113 @param changes: Changes dictionary
115 @type files: Upload.Pkg.files dict
116 @param files: Files dictionary
119 @param warn: Warn if overrides are added for (old)stable
121 @type dsc: Upload.Pkg.dsc dict
122 @param dsc: (optional); Dsc dictionary
125 @param new: new packages as returned by a previous call to this function, but override information may have changed
128 @return: dictionary of NEW components.
131 # TODO: This should all use the database instead of parsing the changes
137 dbchg = get_dbchange(filename, session)
139 print "Warning: cannot find changes file in database; won't check byhand"
141 # Try to get the Package-Set field from an included .dsc file (if possible).
143 for package, entry in build_package_set(dsc, session).items():
144 if not new.has_key(package):
147 # Build up a list of potentially new things
148 for name, f in files.items():
149 # Keep a record of byhand elements
150 if f["section"] == "byhand":
155 priority = f["priority"]
156 section = f["section"]
157 file_type = get_type(f, session)
158 component = f["component"]
160 if file_type == "dsc":
163 if not new.has_key(pkg):
165 new[pkg]["priority"] = priority
166 new[pkg]["section"] = section
167 new[pkg]["type"] = file_type
168 new[pkg]["component"] = component
169 new[pkg]["files"] = []
171 old_type = new[pkg]["type"]
172 if old_type != file_type:
173 # source gets trumped by deb or udeb
174 if old_type == "dsc":
175 new[pkg]["priority"] = priority
176 new[pkg]["section"] = section
177 new[pkg]["type"] = file_type
178 new[pkg]["component"] = component
180 new[pkg]["files"].append(name)
182 if f.has_key("othercomponents"):
183 new[pkg]["othercomponents"] = f["othercomponents"]
185 # Fix up the list of target suites
187 for suite in changes["suite"].keys():
188 oldsuite = get_suite(suite, session)
190 print "WARNING: Invalid suite %s found" % suite
193 if oldsuite.overridesuite:
194 newsuite = get_suite(oldsuite.overridesuite, session)
197 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
198 oldsuite.overridesuite, suite)
199 del changes["suite"][suite]
200 changes["suite"][oldsuite.overridesuite] = 1
202 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist. Bugger" % (
203 oldsuite.overridesuite, suite)
205 # Check for unprocessed byhand files
206 if dbchg is not None:
207 for b in byhand.keys():
208 # Find the file entry in the database
210 for f in dbchg.files:
213 # If it's processed, we can ignore it
219 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
221 # Check for new stuff
222 for suite in changes["suite"].keys():
223 for pkg in new.keys():
224 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
226 for file_entry in new[pkg]["files"]:
227 if files[file_entry].has_key("new"):
228 del files[file_entry]["new"]
232 for s in ['stable', 'oldstable']:
233 if changes["suite"].has_key(s):
234 print "WARNING: overrides will be added for %s!" % s
235 for pkg in new.keys():
236 if new[pkg].has_key("othercomponents"):
237 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
241 ################################################################################
243 def check_valid(new, session = None):
245 Check if section and priority for NEW packages exist in database.
246 Additionally does sanity checks:
247 - debian-installer packages have to be udeb (or source)
248 - non debian-installer packages can not be udeb
249 - source priority can only be assigned to dsc file types
252 @param new: Dict of new packages with their section, priority and type.
255 for pkg in new.keys():
256 section_name = new[pkg]["section"]
257 priority_name = new[pkg]["priority"]
258 file_type = new[pkg]["type"]
260 section = get_section(section_name, session)
262 new[pkg]["section id"] = -1
264 new[pkg]["section id"] = section.section_id
266 priority = get_priority(priority_name, session)
268 new[pkg]["priority id"] = -1
270 new[pkg]["priority id"] = priority.priority_id
273 di = section_name.find("debian-installer") != -1
275 # If d-i, we must be udeb and vice-versa
276 if (di and file_type not in ("udeb", "dsc")) or \
277 (not di and file_type == "udeb"):
278 new[pkg]["section id"] = -1
280 # If dsc we need to be source and vice-versa
281 if (priority == "source" and file_type != "dsc") or \
282 (priority != "source" and file_type == "dsc"):
283 new[pkg]["priority id"] = -1
285 ###############################################################################
287 # Used by Upload.check_timestamps
288 class TarTime(object):
289 def __init__(self, future_cutoff, past_cutoff):
291 self.future_cutoff = future_cutoff
292 self.past_cutoff = past_cutoff
295 self.future_files = {}
296 self.ancient_files = {}
298 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
299 if MTime > self.future_cutoff:
300 self.future_files[Name] = MTime
301 if MTime < self.past_cutoff:
302 self.ancient_files[Name] = MTime
304 ###############################################################################
306 def prod_maintainer(notes, upload):
309 # Here we prepare an editor and get them ready to prod...
310 (fd, temp_filename) = utils.temp_filename()
311 temp_file = os.fdopen(fd, 'w')
313 temp_file.write(note.comment)
315 editor = os.environ.get("EDITOR","vi")
318 os.system("%s %s" % (editor, temp_filename))
319 temp_fh = utils.open_file(temp_filename)
320 prod_message = "".join(temp_fh.readlines())
322 print "Prod message:"
323 print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
324 prompt = "[P]rod, Edit, Abandon, Quit ?"
326 while prompt.find(answer) == -1:
327 answer = utils.our_raw_input(prompt)
328 m = re_default_answer.search(prompt)
331 answer = answer[:1].upper()
332 os.unlink(temp_filename)
338 # Otherwise, do the proding...
339 user_email_address = utils.whoami() + " <%s>" % (
340 cnf["Dinstall::MyAdminAddress"])
344 Subst["__FROM_ADDRESS__"] = user_email_address
345 Subst["__PROD_MESSAGE__"] = prod_message
346 Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
348 prod_mail_message = utils.TemplateSubst(
349 Subst,cnf["Dir::Templates"]+"/process-new.prod")
352 utils.send_mail(prod_mail_message)
354 print "Sent prodding message"
356 ################################################################################
358 def edit_note(note, upload, session, trainee=False):
359 # Write the current data to a temporary file
360 (fd, temp_filename) = utils.temp_filename()
361 editor = os.environ.get("EDITOR","vi")
364 os.system("%s %s" % (editor, temp_filename))
365 temp_file = utils.open_file(temp_filename)
366 newnote = temp_file.read().rstrip()
369 print utils.prefix_multi_line_string(newnote," ")
370 prompt = "[D]one, Edit, Abandon, Quit ?"
372 while prompt.find(answer) == -1:
373 answer = utils.our_raw_input(prompt)
374 m = re_default_answer.search(prompt)
377 answer = answer[:1].upper()
378 os.unlink(temp_filename)
385 comment = NewComment()
386 comment.package = upload.pkg.changes["source"]
387 comment.version = upload.pkg.changes["version"]
388 comment.comment = newnote
389 comment.author = utils.whoami()
390 comment.trainee = trainee
394 ###############################################################################
396 # suite names DMs can upload to
397 dm_suites = ['unstable', 'experimental']
399 def get_newest_source(source, session):
400 'returns the newest DBSource object in dm_suites'
401 ## the most recent version of the package uploaded to unstable or
402 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
403 ## section of its control file
404 q = session.query(DBSource).filter_by(source = source). \
405 filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
406 order_by(desc('source.version'))
409 def get_suite_version_by_source(source, session):
410 'returns a list of tuples (suite_name, version) for source package'
411 q = session.query(Suite.suite_name, DBSource.version). \
412 join(Suite.sources).filter_by(source = source)
415 def get_source_by_package_and_suite(package, suite_name, session):
417 returns a DBSource query filtered by DBBinary.package and this package's
420 return session.query(DBSource). \
421 join(DBSource.binaries).filter_by(package = package). \
422 join(DBBinary.suites).filter_by(suite_name = suite_name)
424 def get_suite_version_by_package(package, arch_string, session):
426 returns a list of tuples (suite_name, version) for binary package and
429 return session.query(Suite.suite_name, DBBinary.version). \
430 join(Suite.binaries).filter_by(package = package). \
431 join(DBBinary.architecture). \
432 filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
434 class Upload(object):
436 Everything that has to do with an upload processed.
444 ###########################################################################
447 """ Reset a number of internal variables."""
449 # Initialize the substitution template map
452 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
453 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
454 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
455 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
461 self.later_check_files = []
465 def package_info(self):
467 Format various messages from this Upload to send to the maintainer.
471 ('Reject Reasons', self.rejects),
472 ('Warnings', self.warnings),
473 ('Notes', self.notes),
477 for title, messages in msgs:
479 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
484 ###########################################################################
485 def update_subst(self):
486 """ Set up the per-package template substitution mappings """
490 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
491 if not self.pkg.changes.has_key("architecture") or not \
492 isinstance(self.pkg.changes["architecture"], dict):
493 self.pkg.changes["architecture"] = { "Unknown" : "" }
495 # and maintainer2047 may not exist.
496 if not self.pkg.changes.has_key("maintainer2047"):
497 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
499 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
500 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
501 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
503 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
504 if self.pkg.changes["architecture"].has_key("source") and \
505 self.pkg.changes["changedby822"] != "" and \
506 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
508 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
509 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
510 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
512 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
513 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
514 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
516 # Process policy doesn't set the fingerprint field and I don't want to make it
517 # do it for now as I don't want to have to deal with the case where we accepted
518 # the package into PU-NEW, but the fingerprint has gone away from the keyring in
519 # the meantime so the package will be remarked as rejectable. Urgh.
520 # TODO: Fix this properly
521 if self.pkg.changes.has_key('fingerprint'):
522 session = DBConn().session()
523 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
524 if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
525 if self.pkg.changes.has_key("sponsoremail"):
526 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
529 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
530 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
532 # Apply any global override of the Maintainer field
533 if cnf.get("Dinstall::OverrideMaintainer"):
534 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
535 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
537 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
538 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
539 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
540 self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
542 ###########################################################################
543 def load_changes(self, filename):
545 Load a changes file and setup a dictionary around it. Also checks for mandantory
548 @type filename: string
549 @param filename: Changes filename, full path.
552 @return: whether the changes file was valid or not. We may want to
553 reject even if this is True (see what gets put in self.rejects).
554 This is simply to prevent us even trying things later which will
555 fail because we couldn't properly parse the file.
558 self.pkg.changes_file = filename
560 # Parse the .changes field into a dictionary
562 self.pkg.changes.update(parse_changes(filename))
563 except CantOpenError:
564 self.rejects.append("%s: can't read file." % (filename))
566 except ParseChangesError, line:
567 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
569 except ChangesUnicodeError:
570 self.rejects.append("%s: changes file not proper utf-8" % (filename))
573 # Parse the Files field from the .changes into another dictionary
575 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
576 except ParseChangesError, line:
577 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
579 except UnknownFormatError, format:
580 self.rejects.append("%s: unknown format '%s'." % (filename, format))
583 # Check for mandatory fields
584 for i in ("distribution", "source", "binary", "architecture",
585 "version", "maintainer", "files", "changes", "description"):
586 if not self.pkg.changes.has_key(i):
587 # Avoid undefined errors later
588 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
591 # Strip a source version in brackets from the source field
592 if re_strip_srcver.search(self.pkg.changes["source"]):
593 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
595 # Ensure the source field is a valid package name.
596 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
597 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
599 # Split multi-value fields into a lower-level dictionary
600 for i in ("architecture", "distribution", "binary", "closes"):
601 o = self.pkg.changes.get(i, "")
603 del self.pkg.changes[i]
605 self.pkg.changes[i] = {}
608 self.pkg.changes[i][j] = 1
610 # Fix the Maintainer: field to be RFC822/2047 compatible
612 (self.pkg.changes["maintainer822"],
613 self.pkg.changes["maintainer2047"],
614 self.pkg.changes["maintainername"],
615 self.pkg.changes["maintaineremail"]) = \
616 fix_maintainer (self.pkg.changes["maintainer"])
617 except ParseMaintError, msg:
618 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
619 % (filename, self.pkg.changes["maintainer"], msg))
621 # ...likewise for the Changed-By: field if it exists.
623 (self.pkg.changes["changedby822"],
624 self.pkg.changes["changedby2047"],
625 self.pkg.changes["changedbyname"],
626 self.pkg.changes["changedbyemail"]) = \
627 fix_maintainer (self.pkg.changes.get("changed-by", ""))
628 except ParseMaintError, msg:
629 self.pkg.changes["changedby822"] = ""
630 self.pkg.changes["changedby2047"] = ""
631 self.pkg.changes["changedbyname"] = ""
632 self.pkg.changes["changedbyemail"] = ""
634 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
635 % (filename, self.pkg.changes["changed-by"], msg))
637 # Ensure all the values in Closes: are numbers
638 if self.pkg.changes.has_key("closes"):
639 for i in self.pkg.changes["closes"].keys():
640 if re_isanum.match (i) == None:
641 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
643 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
644 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
645 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
647 # Check the .changes is non-empty
648 if not self.pkg.files:
649 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
652 # Changes was syntactically valid even if we'll reject
655 ###########################################################################
657 def check_distributions(self):
658 "Check and map the Distribution field"
662 # Handle suite mappings
663 for m in Cnf.ValueList("SuiteMappings"):
666 if mtype == "map" or mtype == "silent-map":
667 (source, dest) = args[1:3]
668 if self.pkg.changes["distribution"].has_key(source):
669 del self.pkg.changes["distribution"][source]
670 self.pkg.changes["distribution"][dest] = 1
671 if mtype != "silent-map":
672 self.notes.append("Mapping %s to %s." % (source, dest))
673 if self.pkg.changes.has_key("distribution-version"):
674 if self.pkg.changes["distribution-version"].has_key(source):
675 self.pkg.changes["distribution-version"][source]=dest
676 elif mtype == "map-unreleased":
677 (source, dest) = args[1:3]
678 if self.pkg.changes["distribution"].has_key(source):
679 for arch in self.pkg.changes["architecture"].keys():
680 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
681 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
682 del self.pkg.changes["distribution"][source]
683 self.pkg.changes["distribution"][dest] = 1
685 elif mtype == "ignore":
687 if self.pkg.changes["distribution"].has_key(suite):
688 del self.pkg.changes["distribution"][suite]
689 self.warnings.append("Ignoring %s as a target suite." % (suite))
690 elif mtype == "reject":
692 if self.pkg.changes["distribution"].has_key(suite):
693 self.rejects.append("Uploads to %s are not accepted." % (suite))
694 elif mtype == "propup-version":
695 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
697 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
698 if self.pkg.changes["distribution"].has_key(args[1]):
699 self.pkg.changes.setdefault("distribution-version", {})
700 for suite in args[2:]:
701 self.pkg.changes["distribution-version"][suite] = suite
703 # Ensure there is (still) a target distribution
704 if len(self.pkg.changes["distribution"].keys()) < 1:
705 self.rejects.append("No valid distribution remaining.")
707 # Ensure target distributions exist
708 for suite in self.pkg.changes["distribution"].keys():
709 if not Cnf.has_key("Suite::%s" % (suite)):
710 self.rejects.append("Unknown distribution `%s'." % (suite))
712 ###########################################################################
714 def binary_file_checks(self, f, session):
716 entry = self.pkg.files[f]
718 # Extract package control information
719 deb_file = utils.open_file(f)
721 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
723 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
725 # Can't continue, none of the checks on control would work.
728 # Check for mandantory "Description:"
731 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
733 self.rejects.append("%s: Missing Description in binary package" % (f))
738 # Check for mandatory fields
739 for field in [ "Package", "Architecture", "Version" ]:
740 if control.Find(field) == None:
742 self.rejects.append("%s: No %s field in control." % (f, field))
745 # Ensure the package name matches the one give in the .changes
746 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
747 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
749 # Validate the package field
750 package = control.Find("Package")
751 if not re_valid_pkg_name.match(package):
752 self.rejects.append("%s: invalid package name '%s'." % (f, package))
754 # Validate the version field
755 version = control.Find("Version")
756 if not re_valid_version.match(version):
757 self.rejects.append("%s: invalid version number '%s'." % (f, version))
759 # Ensure the architecture of the .deb is one we know about.
760 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
761 architecture = control.Find("Architecture")
762 upload_suite = self.pkg.changes["distribution"].keys()[0]
764 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
765 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
766 self.rejects.append("Unknown architecture '%s'." % (architecture))
768 # Ensure the architecture of the .deb is one of the ones
769 # listed in the .changes.
770 if not self.pkg.changes["architecture"].has_key(architecture):
771 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
773 # Sanity-check the Depends field
774 depends = control.Find("Depends")
776 self.rejects.append("%s: Depends field is empty." % (f))
778 # Sanity-check the Provides field
779 provides = control.Find("Provides")
781 provide = re_spacestrip.sub('', provides)
783 self.rejects.append("%s: Provides field is empty." % (f))
784 prov_list = provide.split(",")
785 for prov in prov_list:
786 if not re_valid_pkg_name.match(prov):
787 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
789 # If there is a Built-Using field, we need to check we can find the
790 # exact source version
791 built_using = control.Find("Built-Using")
794 entry["built-using"] = []
795 for dep in apt_pkg.parse_depends(built_using):
796 bu_s, bu_v, bu_e = dep[0]
797 # Check that it's an exact match dependency and we have
798 # some form of version
799 if bu_e != "=" or len(bu_v) < 1:
800 self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
802 # Find the source id for this version
803 bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
805 self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
807 entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
809 except ValueError, e:
810 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
813 # Check the section & priority match those given in the .changes (non-fatal)
814 if control.Find("Section") and entry["section"] != "" \
815 and entry["section"] != control.Find("Section"):
816 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
817 (f, control.Find("Section", ""), entry["section"]))
818 if control.Find("Priority") and entry["priority"] != "" \
819 and entry["priority"] != control.Find("Priority"):
820 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
821 (f, control.Find("Priority", ""), entry["priority"]))
823 entry["package"] = package
824 entry["architecture"] = architecture
825 entry["version"] = version
826 entry["maintainer"] = control.Find("Maintainer", "")
828 if f.endswith(".udeb"):
829 self.pkg.files[f]["dbtype"] = "udeb"
830 elif f.endswith(".deb"):
831 self.pkg.files[f]["dbtype"] = "deb"
833 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
835 entry["source"] = control.Find("Source", entry["package"])
837 # Get the source version
838 source = entry["source"]
841 if source.find("(") != -1:
842 m = re_extract_src_version.match(source)
844 source_version = m.group(2)
846 if not source_version:
847 source_version = self.pkg.files[f]["version"]
849 entry["source package"] = source
850 entry["source version"] = source_version
852 # Ensure the filename matches the contents of the .deb
853 m = re_isadeb.match(f)
856 file_package = m.group(1)
857 if entry["package"] != file_package:
858 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
859 (f, file_package, entry["dbtype"], entry["package"]))
860 epochless_version = re_no_epoch.sub('', control.Find("Version"))
863 file_version = m.group(2)
864 if epochless_version != file_version:
865 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
866 (f, file_version, entry["dbtype"], epochless_version))
869 file_architecture = m.group(3)
870 if entry["architecture"] != file_architecture:
871 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
872 (f, file_architecture, entry["dbtype"], entry["architecture"]))
874 # Check for existent source
875 source_version = entry["source version"]
876 source_package = entry["source package"]
877 if self.pkg.changes["architecture"].has_key("source"):
878 if source_version != self.pkg.changes["version"]:
879 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
880 (source_version, f, self.pkg.changes["version"]))
882 # Check in the SQL database
883 if not source_exists(source_package, source_version, suites = \
884 self.pkg.changes["distribution"].keys(), session = session):
885 # Check in one of the other directories
886 source_epochless_version = re_no_epoch.sub('', source_version)
887 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
888 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
890 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
893 dsc_file_exists = False
894 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
895 if cnf.has_key("Dir::Queue::%s" % (myq)):
896 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
897 dsc_file_exists = True
900 if not dsc_file_exists:
901 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
903 # Check the version and for file overwrites
904 self.check_binary_against_db(f, session)
906 def source_file_checks(self, f, session):
907 entry = self.pkg.files[f]
909 m = re_issource.match(f)
913 entry["package"] = m.group(1)
914 entry["version"] = m.group(2)
915 entry["type"] = m.group(3)
917 # Ensure the source package name matches the Source filed in the .changes
918 if self.pkg.changes["source"] != entry["package"]:
919 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
921 # Ensure the source version matches the version in the .changes file
922 if re_is_orig_source.match(f):
923 changes_version = self.pkg.changes["chopversion2"]
925 changes_version = self.pkg.changes["chopversion"]
927 if changes_version != entry["version"]:
928 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
930 # Ensure the .changes lists source in the Architecture field
931 if not self.pkg.changes["architecture"].has_key("source"):
932 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
934 # Check the signature of a .dsc file
935 if entry["type"] == "dsc":
936 # check_signature returns either:
937 # (None, [list, of, rejects]) or (signature, [])
938 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
940 self.rejects.append(j)
942 entry["architecture"] = "source"
944 def per_suite_file_checks(self, f, suite, session):
946 entry = self.pkg.files[f]
949 if entry.has_key("byhand"):
952 # Check we have fields we need to do these checks
954 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
955 if not entry.has_key(m):
956 self.rejects.append("file '%s' does not have field %s set" % (f, m))
962 # Handle component mappings
963 for m in cnf.ValueList("ComponentMappings"):
964 (source, dest) = m.split()
965 if entry["component"] == source:
966 entry["original component"] = source
967 entry["component"] = dest
969 # Ensure the component is valid for the target suite
970 if cnf.has_key("Suite:%s::Components" % (suite)) and \
971 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
972 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
975 # Validate the component
976 if not get_component(entry["component"], session):
977 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
980 # See if the package is NEW
981 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
984 # Validate the priority
985 if entry["priority"].find('/') != -1:
986 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
988 # Determine the location
989 location = cnf["Dir::Pool"]
990 l = get_location(location, entry["component"], session=session)
992 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
993 entry["location id"] = -1
995 entry["location id"] = l.location_id
997 # Check the md5sum & size against existing files (if any)
998 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
1000 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
1001 entry["size"], entry["md5sum"], entry["location id"])
1004 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
1005 elif found is False and poolfile is not None:
1006 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
1008 if poolfile is None:
1009 entry["files id"] = None
1011 entry["files id"] = poolfile.file_id
1013 # Check for packages that have moved from one component to another
1014 entry['suite'] = suite
1015 arch_list = [entry["architecture"], 'all']
1016 component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1017 [suite], arch_list = arch_list, session = session)
1018 if component is not None:
1019 entry["othercomponents"] = component
1021 def check_files(self, action=True):
1022 file_keys = self.pkg.files.keys()
1028 os.chdir(self.pkg.directory)
1030 ret = holding.copy_to_holding(f)
1032 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1036 # check we already know the changes file
1037 # [NB: this check must be done post-suite mapping]
1038 base_filename = os.path.basename(self.pkg.changes_file)
1040 session = DBConn().session()
1043 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1044 # if in the pool or in a queue other than unchecked, reject
1045 if (dbc.in_queue is None) \
1046 or (dbc.in_queue is not None
1047 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1048 self.rejects.append("%s file already known to dak" % base_filename)
1049 except NoResultFound, e:
1053 has_binaries = False
1056 for f, entry in self.pkg.files.items():
1057 # Ensure the file does not already exist in one of the accepted directories
1058 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1059 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1060 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1061 self.rejects.append("%s file already exists in the %s directory." % (f, d))
1063 if not re_taint_free.match(f):
1064 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1066 # Check the file is readable
1067 if os.access(f, os.R_OK) == 0:
1068 # When running in -n, copy_to_holding() won't have
1069 # generated the reject_message, so we need to.
1071 if os.path.exists(f):
1072 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1074 # Don't directly reject, mark to check later to deal with orig's
1075 # we can find in the pool
1076 self.later_check_files.append(f)
1077 entry["type"] = "unreadable"
1080 # If it's byhand skip remaining checks
1081 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1083 entry["type"] = "byhand"
1085 # Checks for a binary package...
1086 elif re_isadeb.match(f):
1088 entry["type"] = "deb"
1090 # This routine appends to self.rejects/warnings as appropriate
1091 self.binary_file_checks(f, session)
1093 # Checks for a source package...
1094 elif re_issource.match(f):
1097 # This routine appends to self.rejects/warnings as appropriate
1098 self.source_file_checks(f, session)
1100 # Not a binary or source package? Assume byhand...
1103 entry["type"] = "byhand"
1105 # Per-suite file checks
1106 entry["oldfiles"] = {}
1107 for suite in self.pkg.changes["distribution"].keys():
1108 self.per_suite_file_checks(f, suite, session)
1112 # If the .changes file says it has source, it must have source.
1113 if self.pkg.changes["architecture"].has_key("source"):
1115 self.rejects.append("no source found and Architecture line in changes mention source.")
1117 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1118 self.rejects.append("source only uploads are not supported.")
1120 ###########################################################################
1122 def __dsc_filename(self):
1124 Returns: (Status, Dsc_Filename)
1126 Status: Boolean; True when there was no error, False otherwise
1127 Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
1132 for name, entry in self.pkg.files.items():
1133 if entry.has_key("type") and entry["type"] == "dsc":
1135 return False, "cannot process a .changes file with multiple .dsc's."
1139 if not dsc_filename:
1140 return False, "source uploads must contain a dsc file"
1142 return True, dsc_filename
1144 def load_dsc(self, action=True, signing_rules=1):
1146 Find and load the dsc from self.pkg.files into self.dsc
1148 Returns: (Status, Reason)
1150 Status: Boolean; True when there was no error, False otherwise
1151 Reason: String; When Status is False this describes the error
1155 (status, dsc_filename) = self.__dsc_filename()
1157 # If status is false, dsc_filename has the reason
1158 return False, dsc_filename
1161 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
1162 except CantOpenError:
1164 return False, "%s: can't read file." % (dsc_filename)
1165 except ParseChangesError, line:
1166 return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
1167 except InvalidDscError, line:
1168 return False, "%s: syntax error on line %s." % (dsc_filename, line)
1169 except ChangesUnicodeError:
1170 return False, "%s: dsc file not proper utf-8." % (dsc_filename)
1174 ###########################################################################
1176 def check_dsc(self, action=True, session=None):
1177 """Returns bool indicating whether or not the source changes are valid"""
1178 # Ensure there is source to check
1179 if not self.pkg.changes["architecture"].has_key("source"):
1182 (status, reason) = self.load_dsc(action=action)
1184 self.rejects.append(reason)
1186 (status, dsc_filename) = self.__dsc_filename()
1188 # If status is false, dsc_filename has the reason
1189 self.rejects.append(dsc_filename)
1192 # Build up the file list of files mentioned by the .dsc
1194 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1195 except NoFilesFieldError:
1196 self.rejects.append("%s: no Files: field." % (dsc_filename))
1198 except UnknownFormatError, format:
1199 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1201 except ParseChangesError, line:
1202 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1205 # Enforce mandatory fields
1206 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1207 if not self.pkg.dsc.has_key(i):
1208 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1211 # Validate the source and version fields
1212 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1213 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1214 if not re_valid_version.match(self.pkg.dsc["version"]):
1215 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1217 # Only a limited list of source formats are allowed in each suite
1218 for dist in self.pkg.changes["distribution"].keys():
1219 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1220 if self.pkg.dsc["format"] not in allowed:
1221 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1223 # Validate the Maintainer field
1225 # We ignore the return value
1226 fix_maintainer(self.pkg.dsc["maintainer"])
1227 except ParseMaintError, msg:
1228 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1229 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1231 # Validate the build-depends field(s)
1232 for field_name in [ "build-depends", "build-depends-indep" ]:
1233 field = self.pkg.dsc.get(field_name)
1235 # Have apt try to parse them...
1237 apt_pkg.ParseSrcDepends(field)
1239 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1241 # Ensure the version number in the .dsc matches the version number in the .changes
1242 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1243 changes_version = self.pkg.files[dsc_filename]["version"]
1245 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1246 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1248 # Ensure the Files field contain only what's expected
1249 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1251 # Ensure source is newer than existing source in target suites
1252 session = DBConn().session()
1253 self.check_source_against_db(dsc_filename, session)
1254 self.check_dsc_against_db(dsc_filename, session)
1256 dbchg = get_dbchange(self.pkg.changes_file, session)
1258 # Finally, check if we're missing any files
1259 for f in self.later_check_files:
1261 # Check if we've already processed this file if we have a dbchg object
1264 for pf in dbchg.files:
1265 if pf.filename == f and pf.processed:
1266 self.notes.append('%s was already processed so we can go ahead' % f)
1268 del self.pkg.files[f]
1270 self.rejects.append("Could not find file %s references in changes" % f)
1276 ###########################################################################
1278 def get_changelog_versions(self, source_dir):
1279 """Extracts a the source package and (optionally) grabs the
1280 version history out of debian/changelog for the BTS."""
1284 # Find the .dsc (again)
1286 for f in self.pkg.files.keys():
1287 if self.pkg.files[f]["type"] == "dsc":
1290 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1291 if not dsc_filename:
1294 # Create a symlink mirror of the source files in our temporary directory
1295 for f in self.pkg.files.keys():
1296 m = re_issource.match(f)
1298 src = os.path.join(source_dir, f)
1299 # If a file is missing for whatever reason, give up.
1300 if not os.path.exists(src):
1303 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1304 self.pkg.orig_files[f].has_key("path"):
1306 dest = os.path.join(os.getcwd(), f)
1307 os.symlink(src, dest)
1309 # If the orig files are not a part of the upload, create symlinks to the
1311 for orig_file in self.pkg.orig_files.keys():
1312 if not self.pkg.orig_files[orig_file].has_key("path"):
1314 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1315 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1317 # Extract the source
1319 unpacked = UnpackedSource(dsc_filename)
1321 self.rejects.append("'dpkg-source -x' failed for %s." % dsc_filename)
1324 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1327 # Get the upstream version
1328 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1329 if re_strip_revision.search(upstr_version):
1330 upstr_version = re_strip_revision.sub('', upstr_version)
1332 # Ensure the changelog file exists
1333 changelog_file = unpacked.get_changelog_file()
1334 if changelog_file is None:
1335 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1338 # Parse the changelog
1339 self.pkg.dsc["bts changelog"] = ""
1340 for line in changelog_file.readlines():
1341 m = re_changelog_versions.match(line)
1343 self.pkg.dsc["bts changelog"] += line
1344 changelog_file.close()
1347 # Check we found at least one revision in the changelog
1348 if not self.pkg.dsc["bts changelog"]:
1349 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1351 def check_source(self):
1353 # a) there's no source
1354 if not self.pkg.changes["architecture"].has_key("source"):
1357 tmpdir = utils.temp_dirname()
1359 # Move into the temporary directory
1363 # Get the changelog version history
1364 self.get_changelog_versions(cwd)
1366 # Move back and cleanup the temporary tree
1370 shutil.rmtree(tmpdir)
1372 if e.errno != errno.EACCES:
1374 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1376 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1377 # We probably have u-r or u-w directories so chmod everything
1379 cmd = "chmod -R u+rwx %s" % (tmpdir)
1380 result = os.system(cmd)
1382 utils.fubar("'%s' failed with result %s." % (cmd, result))
1383 shutil.rmtree(tmpdir)
1384 except Exception, e:
1385 print "foobar2 (%s)" % e
1386 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1388 ###########################################################################
1389 def ensure_hashes(self):
1390 # Make sure we recognise the format of the Files: field in the .changes
1391 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1392 if len(format) == 2:
1393 format = int(format[0]), int(format[1])
1395 format = int(float(format[0])), 0
1397 # We need to deal with the original changes blob, as the fields we need
1398 # might not be in the changes dict serialised into the .dak anymore.
1399 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1401 # Copy the checksums over to the current changes dict. This will keep
1402 # the existing modifications to it intact.
1403 for field in orig_changes:
1404 if field.startswith('checksums-'):
1405 self.pkg.changes[field] = orig_changes[field]
1407 # Check for unsupported hashes
1408 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1409 self.rejects.append(j)
1411 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1412 self.rejects.append(j)
1414 # We have to calculate the hash if we have an earlier changes version than
1415 # the hash appears in rather than require it exist in the changes file
1416 for hashname, hashfunc, version in utils.known_hashes:
1417 # TODO: Move _ensure_changes_hash into this class
1418 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1419 self.rejects.append(j)
1420 if "source" in self.pkg.changes["architecture"]:
1421 # TODO: Move _ensure_dsc_hash into this class
1422 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1423 self.rejects.append(j)
1425 def check_hashes(self):
1426 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1427 self.rejects.append(m)
1429 for m in utils.check_size(".changes", self.pkg.files):
1430 self.rejects.append(m)
1432 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1433 self.rejects.append(m)
1435 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1436 self.rejects.append(m)
1438 self.ensure_hashes()
1440 ###########################################################################
1442 def ensure_orig(self, target_dir='.', session=None):
1444 Ensures that all orig files mentioned in the changes file are present
1445 in target_dir. If they do not exist, they are symlinked into place.
1447 An list containing the symlinks that were created are returned (so they
1454 for filename, entry in self.pkg.dsc_files.iteritems():
1455 if not re_is_orig_source.match(filename):
1456 # File is not an orig; ignore
1459 if os.path.exists(filename):
1460 # File exists, no need to continue
1463 def symlink_if_valid(path):
1464 f = utils.open_file(path)
1465 md5sum = apt_pkg.md5sum(f)
1468 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1469 expected = (int(entry['size']), entry['md5sum'])
1471 if fingerprint != expected:
1474 dest = os.path.join(target_dir, filename)
1476 os.symlink(path, dest)
1477 symlinked.append(dest)
1483 session_ = DBConn().session()
1488 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1489 poolfile_path = os.path.join(
1490 poolfile.location.path, poolfile.filename
1493 if symlink_if_valid(poolfile_path):
1503 # Look in some other queues for the file
1504 queues = ('New', 'Byhand', 'ProposedUpdates',
1505 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1507 for queue in queues:
1508 if not cnf.get('Dir::Queue::%s' % queue):
1511 queuefile_path = os.path.join(
1512 cnf['Dir::Queue::%s' % queue], filename
1515 if not os.path.exists(queuefile_path):
1516 # Does not exist in this queue
1519 if symlink_if_valid(queuefile_path):
1524 ###########################################################################
1526 def check_lintian(self):
1528 Extends self.rejects by checking the output of lintian against tags
1529 specified in Dinstall::LintianTags.
1534 # Don't reject binary uploads
1535 if not self.pkg.changes['architecture'].has_key('source'):
1538 # Only check some distributions
1539 for dist in ('unstable', 'experimental'):
1540 if dist in self.pkg.changes['distribution']:
1545 # If we do not have a tagfile, don't do anything
1546 tagfile = cnf.get("Dinstall::LintianTags")
1550 # Parse the yaml file
1551 sourcefile = file(tagfile, 'r')
1552 sourcecontent = sourcefile.read()
1556 lintiantags = yaml.load(sourcecontent)['lintian']
1557 except yaml.YAMLError, msg:
1558 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1561 # Try and find all orig mentioned in the .dsc
1562 symlinked = self.ensure_orig()
1564 # Setup the input file for lintian
1565 fd, temp_filename = utils.temp_filename()
1566 temptagfile = os.fdopen(fd, 'w')
1567 for tags in lintiantags.values():
1568 temptagfile.writelines(['%s\n' % x for x in tags])
1572 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1573 (temp_filename, self.pkg.changes_file)
1575 result, output = commands.getstatusoutput(cmd)
1577 # Remove our tempfile and any symlinks we created
1578 os.unlink(temp_filename)
1580 for symlink in symlinked:
1584 utils.warn("lintian failed for %s [return code: %s]." % \
1585 (self.pkg.changes_file, result))
1586 utils.warn(utils.prefix_multi_line_string(output, \
1587 " [possible output:] "))
1592 [self.pkg.changes_file, "check_lintian"] + list(txt)
1596 parsed_tags = parse_lintian_output(output)
1597 self.rejects.extend(
1598 generate_reject_messages(parsed_tags, lintiantags, log=log)
1601 ###########################################################################
1602 def check_urgency(self):
1604 if self.pkg.changes["architecture"].has_key("source"):
1605 if not self.pkg.changes.has_key("urgency"):
1606 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1607 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1608 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1609 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1610 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1611 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1613 ###########################################################################
1615 # Sanity check the time stamps of files inside debs.
1616 # [Files in the near future cause ugly warnings and extreme time
1617 # travel can cause errors on extraction]
1619 def check_timestamps(self):
1622 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1623 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1624 tar = TarTime(future_cutoff, past_cutoff)
1626 for filename, entry in self.pkg.files.items():
1627 if entry["type"] == "deb":
1630 deb_file = utils.open_file(filename)
1631 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1634 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1635 except SystemError, e:
1636 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1637 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1640 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1644 future_files = tar.future_files.keys()
1646 num_future_files = len(future_files)
1647 future_file = future_files[0]
1648 future_date = tar.future_files[future_file]
1649 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1650 % (filename, num_future_files, future_file, time.ctime(future_date)))
1652 ancient_files = tar.ancient_files.keys()
1654 num_ancient_files = len(ancient_files)
1655 ancient_file = ancient_files[0]
1656 ancient_date = tar.ancient_files[ancient_file]
1657 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1658 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1660 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1662 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1663 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1665 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1671 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1672 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1673 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1674 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1675 self.pkg.changes["sponsoremail"] = uid_email
1680 ###########################################################################
1681 # check_signed_by_key checks
1682 ###########################################################################
1684 def check_signed_by_key(self):
1685 """Ensure the .changes is signed by an authorized uploader."""
1686 session = DBConn().session()
1688 # First of all we check that the person has proper upload permissions
1689 # and that this upload isn't blocked
1690 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1693 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1696 # TODO: Check that import-keyring adds UIDs properly
1698 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1701 # Check that the fingerprint which uploaded has permission to do so
1702 self.check_upload_permissions(fpr, session)
1704 # Check that this package is not in a transition
1705 self.check_transition(session)
1710 def check_upload_permissions(self, fpr, session):
1711 # Check any one-off upload blocks
1712 self.check_upload_blocks(fpr, session)
1714 # Start with DM as a special case
1715 # DM is a special case unfortunately, so we check it first
1716 # (keys with no source access get more access than DMs in one
1717 # way; DMs can only upload for their packages whether source
1718 # or binary, whereas keys with no access might be able to
1719 # upload some binaries)
1720 if fpr.source_acl.access_level == 'dm':
1721 self.check_dm_upload(fpr, session)
1723 # Check source-based permissions for other types
1724 if self.pkg.changes["architecture"].has_key("source") and \
1725 fpr.source_acl.access_level is None:
1726 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1727 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1728 self.rejects.append(rej)
1730 # If not a DM, we allow full upload rights
1731 uid_email = "%s@debian.org" % (fpr.uid.uid)
1732 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1735 # Check binary upload permissions
1736 # By this point we know that DMs can't have got here unless they
1737 # are allowed to deal with the package concerned so just apply
1739 if fpr.binary_acl.access_level == 'full':
1742 # Otherwise we're in the map case
1743 tmparches = self.pkg.changes["architecture"].copy()
1744 tmparches.pop('source', None)
1746 for bam in fpr.binary_acl_map:
1747 tmparches.pop(bam.architecture.arch_string, None)
1749 if len(tmparches.keys()) > 0:
1750 if fpr.binary_reject:
1751 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1752 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1753 self.rejects.append(rej)
1755 # TODO: This is where we'll implement reject vs throw away binaries later
1756 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1757 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1758 rej += "\nFingerprint: %s", (fpr.fingerprint)
1759 self.rejects.append(rej)
1762 def check_upload_blocks(self, fpr, session):
1763 """Check whether any upload blocks apply to this source, source
1764 version, uid / fpr combination"""
1766 def block_rej_template(fb):
1767 rej = 'Manual upload block in place for package %s' % fb.source
1768 if fb.version is not None:
1769 rej += ', version %s' % fb.version
1772 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1773 # version is None if the block applies to all versions
1774 if fb.version is None or fb.version == self.pkg.changes['version']:
1775 # Check both fpr and uid - either is enough to cause a reject
1776 if fb.fpr is not None:
1777 if fb.fpr.fingerprint == fpr.fingerprint:
1778 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1779 if fb.uid is not None:
1780 if fb.uid == fpr.uid:
1781 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1784 def check_dm_upload(self, fpr, session):
1785 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1786 ## none of the uploaded packages are NEW
1788 for f in self.pkg.files.keys():
1789 if self.pkg.files[f].has_key("byhand"):
1790 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1792 if self.pkg.files[f].has_key("new"):
1793 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1799 r = get_newest_source(self.pkg.changes["source"], session)
1802 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1803 self.rejects.append(rej)
1806 if not r.dm_upload_allowed:
1807 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1808 self.rejects.append(rej)
1811 ## the Maintainer: field of the uploaded .changes file corresponds with
1812 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1814 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1815 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1817 ## the most recent version of the package uploaded to unstable or
1818 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1819 ## non-developer maintainers cannot NMU or hijack packages)
1821 # srcuploaders includes the maintainer
1823 for sup in r.srcuploaders:
1824 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1825 # Eww - I hope we never have two people with the same name in Debian
1826 if email == fpr.uid.uid or name == fpr.uid.name:
1831 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1834 ## none of the packages are being taken over from other source packages
1835 for b in self.pkg.changes["binary"].keys():
1836 for suite in self.pkg.changes["distribution"].keys():
1837 for s in get_source_by_package_and_suite(b, suite, session):
1838 if s.source != self.pkg.changes["source"]:
1839 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1843 def check_transition(self, session):
1846 sourcepkg = self.pkg.changes["source"]
1848 # No sourceful upload -> no need to do anything else, direct return
1849 # We also work with unstable uploads, not experimental or those going to some
1850 # proposed-updates queue
1851 if "source" not in self.pkg.changes["architecture"] or \
1852 "unstable" not in self.pkg.changes["distribution"]:
1855 # Also only check if there is a file defined (and existant) with
1857 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1858 if transpath == "" or not os.path.exists(transpath):
1861 # Parse the yaml file
1862 sourcefile = file(transpath, 'r')
1863 sourcecontent = sourcefile.read()
1865 transitions = yaml.load(sourcecontent)
1866 except yaml.YAMLError, msg:
1867 # This shouldn't happen, there is a wrapper to edit the file which
1868 # checks it, but we prefer to be safe than ending up rejecting
1870 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1873 # Now look through all defined transitions
1874 for trans in transitions:
1875 t = transitions[trans]
1876 source = t["source"]
1879 # Will be None if nothing is in testing.
1880 current = get_source_in_suite(source, "testing", session)
1881 if current is not None:
1882 compare = apt_pkg.VersionCompare(current.version, expected)
1884 if current is None or compare < 0:
1885 # This is still valid, the current version in testing is older than
1886 # the new version we wait for, or there is none in testing yet
1888 # Check if the source we look at is affected by this.
1889 if sourcepkg in t['packages']:
1890 # The source is affected, lets reject it.
1892 rejectmsg = "%s: part of the %s transition.\n\n" % (
1895 if current is not None:
1896 currentlymsg = "at version %s" % (current.version)
1898 currentlymsg = "not present in testing"
1900 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1902 rejectmsg += "\n".join(textwrap.wrap("""Your package
1903 is part of a testing transition designed to get %s migrated (it is
1904 currently %s, we need version %s). This transition is managed by the
1905 Release Team, and %s is the Release-Team member responsible for it.
1906 Please mail debian-release@lists.debian.org or contact %s directly if you
1907 need further assistance. You might want to upload to experimental until this
1908 transition is done."""
1909 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1911 self.rejects.append(rejectmsg)
1914 ###########################################################################
1915 # End check_signed_by_key checks
1916 ###########################################################################
1918 def build_summaries(self):
1919 """ Build a summary of changes the upload introduces. """
1921 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1923 short_summary = summary
1925 # This is for direport's benefit...
1926 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1929 summary += "Changes: " + f
1931 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1933 summary += self.announce(short_summary, 0)
1935 return (summary, short_summary)
1937 ###########################################################################
1939 def close_bugs(self, summary, action):
1941 Send mail to close bugs as instructed by the closes field in the changes file.
1942 Also add a line to summary if any work was done.
1944 @type summary: string
1945 @param summary: summary text, as given by L{build_summaries}
1948 @param action: Set to false no real action will be done.
1951 @return: summary. If action was taken, extended by the list of closed bugs.
1955 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1957 bugs = self.pkg.changes["closes"].keys()
1963 summary += "Closing bugs: "
1965 summary += "%s " % (bug)
1968 self.Subst["__BUG_NUMBER__"] = bug
1969 if self.pkg.changes["distribution"].has_key("stable"):
1970 self.Subst["__STABLE_WARNING__"] = """
1971 Note that this package is not part of the released stable Debian
1972 distribution. It may have dependencies on other unreleased software,
1973 or other instabilities. Please take care if you wish to install it.
1974 The update will eventually make its way into the next released Debian
1977 self.Subst["__STABLE_WARNING__"] = ""
1978 mail_message = utils.TemplateSubst(self.Subst, template)
1979 utils.send_mail(mail_message)
1981 # Clear up after ourselves
1982 del self.Subst["__BUG_NUMBER__"]
1983 del self.Subst["__STABLE_WARNING__"]
1985 if action and self.logger:
1986 self.logger.log(["closing bugs"] + bugs)
1992 ###########################################################################
1994 def announce(self, short_summary, action):
1996 Send an announce mail about a new upload.
1998 @type short_summary: string
1999 @param short_summary: Short summary text to include in the mail
2002 @param action: Set to false no real action will be done.
2005 @return: Textstring about action taken.
2010 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
2012 # Only do announcements for source uploads with a recent dpkg-dev installed
2013 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
2014 self.pkg.changes["architecture"].has_key("source"):
2020 self.Subst["__SHORT_SUMMARY__"] = short_summary
2022 for dist in self.pkg.changes["distribution"].keys():
2023 suite = get_suite(dist)
2024 if suite is None: continue
2025 announce_list = suite.announce
2026 if announce_list == "" or lists_done.has_key(announce_list):
2029 lists_done[announce_list] = 1
2030 summary += "Announcing to %s\n" % (announce_list)
2034 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
2035 if cnf.get("Dinstall::TrackingServer") and \
2036 self.pkg.changes["architecture"].has_key("source"):
2037 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
2038 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
2040 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
2041 utils.send_mail(mail_message)
2043 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
2045 if cnf.FindB("Dinstall::CloseBugs"):
2046 summary = self.close_bugs(summary, action)
2048 del self.Subst["__SHORT_SUMMARY__"]
2052 ###########################################################################
2054 def accept (self, summary, short_summary, session=None):
2058 This moves all files referenced from the .changes into the pool,
2059 sends the accepted mail, announces to lists, closes bugs and
2060 also checks for override disparities. If enabled it will write out
2061 the version history for the BTS Version Tracking and will finally call
2064 @type summary: string
2065 @param summary: Summary text
2067 @type short_summary: string
2068 @param short_summary: Short summary
2072 stats = SummaryStats()
2075 self.logger.log(["installing changes", self.pkg.changes_file])
2079 # Add the .dsc file to the DB first
2080 for newfile, entry in self.pkg.files.items():
2081 if entry["type"] == "dsc":
2082 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2086 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2087 for newfile, entry in self.pkg.files.items():
2088 if entry["type"] == "deb":
2089 poolfiles.append(add_deb_to_db(self, newfile, session))
2091 # If this is a sourceful diff only upload that is moving
2092 # cross-component we need to copy the .orig files into the new
2093 # component too for the same reasons as above.
2094 # XXX: mhy: I think this should be in add_dsc_to_db
2095 if self.pkg.changes["architecture"].has_key("source"):
2096 for orig_file in self.pkg.orig_files.keys():
2097 if not self.pkg.orig_files[orig_file].has_key("id"):
2098 continue # Skip if it's not in the pool
2099 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2100 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2101 continue # Skip if the location didn't change
2104 oldf = get_poolfile_by_id(orig_file_id, session)
2105 old_filename = os.path.join(oldf.location.path, oldf.filename)
2106 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
2107 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2109 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2111 # TODO: Care about size/md5sum collisions etc
2112 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2114 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2116 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2117 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2121 # Don't reference the old file from this changes
2123 if p.file_id == oldf.file_id:
2126 poolfiles.append(newf)
2128 # Fix up the DSC references
2131 for df in source.srcfiles:
2132 if df.poolfile.file_id == oldf.file_id:
2133 # Add a new DSC entry and mark the old one for deletion
2134 # Don't do it in the loop so we don't change the thing we're iterating over
2136 newdscf.source_id = source.source_id
2137 newdscf.poolfile_id = newf.file_id
2138 session.add(newdscf)
2148 # Make sure that our source object is up-to-date
2149 session.expire(source)
2151 # Add changelog information to the database
2152 self.store_changelog()
2154 # Install the files into the pool
2155 for newfile, entry in self.pkg.files.items():
2156 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2157 utils.move(newfile, destination)
2158 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2159 stats.accept_bytes += float(entry["size"])
2161 # Copy the .changes file across for suite which need it.
2162 copy_changes = dict([(x.copychanges, '')
2163 for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2164 if x.copychanges is not None])
2166 for dest in copy_changes.keys():
2167 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2169 # We're done - commit the database changes
2171 # Our SQL session will automatically start a new transaction after
2174 # Move the .changes into the 'done' directory
2175 utils.move(self.pkg.changes_file,
2176 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2178 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2179 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2182 self.Subst["__SUMMARY__"] = summary
2183 mail_message = utils.TemplateSubst(self.Subst,
2184 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2185 utils.send_mail(mail_message)
2186 self.announce(short_summary, 1)
2188 ## Helper stuff for DebBugs Version Tracking
2189 if cnf.Find("Dir::Queue::BTSVersionTrack"):
2190 if self.pkg.changes["architecture"].has_key("source"):
2191 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2192 version_history = os.fdopen(fd, 'w')
2193 version_history.write(self.pkg.dsc["bts changelog"])
2194 version_history.close()
2195 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2196 self.pkg.changes_file[:-8]+".versions")
2197 os.rename(temp_filename, filename)
2198 os.chmod(filename, 0644)
2200 # Write out the binary -> source mapping.
2201 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2202 debinfo = os.fdopen(fd, 'w')
2203 for name, entry in sorted(self.pkg.files.items()):
2204 if entry["type"] == "deb":
2205 line = " ".join([entry["package"], entry["version"],
2206 entry["architecture"], entry["source package"],
2207 entry["source version"]])
2208 debinfo.write(line+"\n")
2210 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2211 self.pkg.changes_file[:-8]+".debinfo")
2212 os.rename(temp_filename, filename)
2213 os.chmod(filename, 0644)
2217 # Set up our copy queues (e.g. buildd queues)
2218 for suite_name in self.pkg.changes["distribution"].keys():
2219 suite = get_suite(suite_name, session)
2220 for q in suite.copy_queues:
2222 q.add_file_from_pool(f)
2227 stats.accept_count += 1
2229 def check_override(self):
2231 Checks override entries for validity. Mails "Override disparity" warnings,
2232 if that feature is enabled.
2234 Abandons the check if
2235 - override disparity checks are disabled
2236 - mail sending is disabled
2241 # Abandon the check if override disparity checks have been disabled
2242 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2245 summary = self.pkg.check_override()
2250 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2253 self.Subst["__SUMMARY__"] = summary
2254 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2255 utils.send_mail(mail_message)
2256 del self.Subst["__SUMMARY__"]
2258 ###########################################################################
2260 def remove(self, from_dir=None):
2262 Used (for instance) in p-u to remove the package from unchecked
2264 Also removes the package from holding area.
2266 if from_dir is None:
2267 from_dir = self.pkg.directory
2270 for f in self.pkg.files.keys():
2271 os.unlink(os.path.join(from_dir, f))
2272 if os.path.exists(os.path.join(h.holding_dir, f)):
2273 os.unlink(os.path.join(h.holding_dir, f))
2275 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2276 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2277 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2279 ###########################################################################
2281 def move_to_queue (self, queue):
2283 Move files to a destination queue using the permissions in the table
2286 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2287 queue.path, perms=int(queue.change_perms, 8))
2288 for f in self.pkg.files.keys():
2289 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2291 ###########################################################################
2293 def force_reject(self, reject_files):
2295 Forcefully move files from the current directory to the
2296 reject directory. If any file already exists in the reject
2297 directory it will be moved to the morgue to make way for
2300 @type reject_files: dict
2301 @param reject_files: file dictionary
2307 for file_entry in reject_files:
2308 # Skip any files which don't exist or which we don't have permission to copy.
2309 if os.access(file_entry, os.R_OK) == 0:
2312 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2315 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2317 # File exists? Let's find a new name by adding a number
2318 if e.errno == errno.EEXIST:
2320 dest_file = utils.find_next_free(dest_file, 255)
2321 except NoFreeFilenameError:
2322 # Something's either gone badly Pete Tong, or
2323 # someone is trying to exploit us.
2324 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2327 # Make sure we really got it
2329 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2332 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2336 # If we got here, we own the destination file, so we can
2337 # safely overwrite it.
2338 utils.move(file_entry, dest_file, 1, perms=0660)
2341 ###########################################################################
2342 def do_reject (self, manual=0, reject_message="", notes=""):
2344 Reject an upload. If called without a reject message or C{manual} is
2345 true, spawn an editor so the user can write one.
2348 @param manual: manual or automated rejection
2350 @type reject_message: string
2351 @param reject_message: A reject message
2356 # If we weren't given a manual rejection message, spawn an
2357 # editor so the user can add one in...
2358 if manual and not reject_message:
2359 (fd, temp_filename) = utils.temp_filename()
2360 temp_file = os.fdopen(fd, 'w')
2363 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2364 % (note.author, note.version, note.notedate, note.comment))
2366 editor = os.environ.get("EDITOR","vi")
2368 while answer == 'E':
2369 os.system("%s %s" % (editor, temp_filename))
2370 temp_fh = utils.open_file(temp_filename)
2371 reject_message = "".join(temp_fh.readlines())
2373 print "Reject message:"
2374 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2375 prompt = "[R]eject, Edit, Abandon, Quit ?"
2377 while prompt.find(answer) == -1:
2378 answer = utils.our_raw_input(prompt)
2379 m = re_default_answer.search(prompt)
2382 answer = answer[:1].upper()
2383 os.unlink(temp_filename)
2389 print "Rejecting.\n"
2393 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2394 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2396 # Move all the files into the reject directory
2397 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2398 self.force_reject(reject_files)
2400 # If we fail here someone is probably trying to exploit the race
2401 # so let's just raise an exception ...
2402 if os.path.exists(reason_filename):
2403 os.unlink(reason_filename)
2404 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2406 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2410 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2411 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2412 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2413 os.write(reason_fd, reject_message)
2414 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2416 # Build up the rejection email
2417 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2418 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2419 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2420 self.Subst["__REJECT_MESSAGE__"] = ""
2421 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2422 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2423 # Write the rejection email out as the <foo>.reason file
2424 os.write(reason_fd, reject_mail_message)
2426 del self.Subst["__REJECTOR_ADDRESS__"]
2427 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2428 del self.Subst["__CC__"]
2432 # Send the rejection mail
2433 utils.send_mail(reject_mail_message)
2436 self.logger.log(["rejected", self.pkg.changes_file])
2440 ################################################################################
2441 def in_override_p(self, package, component, suite, binary_type, filename, session):
2443 Check if a package already has override entries in the DB
2445 @type package: string
2446 @param package: package name
2448 @type component: string
2449 @param component: database id of the component
2452 @param suite: database id of the suite
2454 @type binary_type: string
2455 @param binary_type: type of the package
2457 @type filename: string
2458 @param filename: filename we check
2460 @return: the database result. But noone cares anyway.
2466 if binary_type == "": # must be source
2469 file_type = binary_type
2471 # Override suite name; used for example with proposed-updates
2472 oldsuite = get_suite(suite, session)
2473 if (not oldsuite is None) and oldsuite.overridesuite:
2474 suite = oldsuite.overridesuite
2476 result = get_override(package, suite, component, file_type, session)
2478 # If checking for a source package fall back on the binary override type
2479 if file_type == "dsc" and len(result) < 1:
2480 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2482 # Remember the section and priority so we can check them later if appropriate
2485 self.pkg.files[filename]["override section"] = result.section.section
2486 self.pkg.files[filename]["override priority"] = result.priority.priority
2491 ################################################################################
2492 def get_anyversion(self, sv_list, suite):
2495 @param sv_list: list of (suite, version) tuples to check
2498 @param suite: suite name
2504 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2505 for (s, v) in sv_list:
2506 if s in [ x.lower() for x in anysuite ]:
2507 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2512 ################################################################################
2514 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2517 @param sv_list: list of (suite, version) tuples to check
2519 @type filename: string
2520 @param filename: XXX
2522 @type new_version: string
2523 @param new_version: XXX
2525 Ensure versions are newer than existing packages in target
2526 suites and that cross-suite version checking rules as
2527 set out in the conf file are satisfied.
2532 # Check versions for each target suite
2533 for target_suite in self.pkg.changes["distribution"].keys():
2534 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2535 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2537 # Enforce "must be newer than target suite" even if conffile omits it
2538 if target_suite not in must_be_newer_than:
2539 must_be_newer_than.append(target_suite)
2541 for (suite, existent_version) in sv_list:
2542 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2544 if suite in must_be_newer_than and sourceful and vercmp < 1:
2545 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2547 if suite in must_be_older_than and vercmp > -1:
2550 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2551 # we really use the other suite, ignoring the conflicting one ...
2552 addsuite = self.pkg.changes["distribution-version"][suite]
2554 add_version = self.get_anyversion(sv_list, addsuite)
2555 target_version = self.get_anyversion(sv_list, target_suite)
2558 # not add_version can only happen if we map to a suite
2559 # that doesn't enhance the suite we're propup'ing from.
2560 # so "propup-ver x a b c; map a d" is a problem only if
2561 # d doesn't enhance a.
2563 # i think we could always propagate in this case, rather
2564 # than complaining. either way, this isn't a REJECT issue
2566 # And - we really should complain to the dorks who configured dak
2567 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2568 self.pkg.changes.setdefault("propdistribution", {})
2569 self.pkg.changes["propdistribution"][addsuite] = 1
2571 elif not target_version:
2572 # not targets_version is true when the package is NEW
2573 # we could just stick with the "...old version..." REJECT
2574 # for this, I think.
2575 self.rejects.append("Won't propogate NEW packages.")
2576 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2577 # propogation would be redundant. no need to reject though.
2578 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2580 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2581 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2583 self.warnings.append("Propogating upload to %s" % (addsuite))
2584 self.pkg.changes.setdefault("propdistribution", {})
2585 self.pkg.changes["propdistribution"][addsuite] = 1
2589 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2591 ################################################################################
2592 def check_binary_against_db(self, filename, session):
2593 # Ensure version is sane
2594 self.cross_suite_version_check( \
2595 get_suite_version_by_package(self.pkg.files[filename]["package"], \
2596 self.pkg.files[filename]["architecture"], session),
2597 filename, self.pkg.files[filename]["version"], sourceful=False)
2599 # Check for any existing copies of the file
2600 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2601 q = q.filter_by(version=self.pkg.files[filename]["version"])
2602 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2605 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2607 ################################################################################
2609 def check_source_against_db(self, filename, session):
2610 source = self.pkg.dsc.get("source")
2611 version = self.pkg.dsc.get("version")
2613 # Ensure version is sane
2614 self.cross_suite_version_check( \
2615 get_suite_version_by_source(source, session), filename, version,
2618 ################################################################################
2619 def check_dsc_against_db(self, filename, session):
2622 @warning: NB: this function can remove entries from the 'files' index [if
2623 the orig tarball is a duplicate of the one in the archive]; if
2624 you're iterating over 'files' and call this function as part of
2625 the loop, be sure to add a check to the top of the loop to
2626 ensure you haven't just tried to dereference the deleted entry.
2631 self.pkg.orig_files = {} # XXX: do we need to clear it?
2632 orig_files = self.pkg.orig_files
2634 # Try and find all files mentioned in the .dsc. This has
2635 # to work harder to cope with the multiple possible
2636 # locations of an .orig.tar.gz.
2637 # The ordering on the select is needed to pick the newest orig
2638 # when it exists in multiple places.
2639 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2641 if self.pkg.files.has_key(dsc_name):
2642 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2643 actual_size = int(self.pkg.files[dsc_name]["size"])
2644 found = "%s in incoming" % (dsc_name)
2646 # Check the file does not already exist in the archive
2647 ql = get_poolfile_like_name(dsc_name, session)
2649 # Strip out anything that isn't '%s' or '/%s$'
2651 if not i.filename.endswith(dsc_name):
2654 # "[dak] has not broken them. [dak] has fixed a
2655 # brokenness. Your crappy hack exploited a bug in
2658 # "(Come on! I thought it was always obvious that
2659 # one just doesn't release different files with
2660 # the same name and version.)"
2661 # -- ajk@ on d-devel@l.d.o
2664 # Ignore exact matches for .orig.tar.gz
2666 if re_is_orig_source.match(dsc_name):
2668 if self.pkg.files.has_key(dsc_name) and \
2669 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2670 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2671 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2672 # TODO: Don't delete the entry, just mark it as not needed
2673 # This would fix the stupidity of changing something we often iterate over
2674 # whilst we're doing it
2675 del self.pkg.files[dsc_name]
2676 dsc_entry["files id"] = i.file_id
2677 if not orig_files.has_key(dsc_name):
2678 orig_files[dsc_name] = {}
2679 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2682 # Don't bitch that we couldn't find this file later
2684 self.later_check_files.remove(dsc_name)
2690 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2692 elif re_is_orig_source.match(dsc_name):
2694 ql = get_poolfile_like_name(dsc_name, session)
2696 # Strip out anything that isn't '%s' or '/%s$'
2697 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2699 if not i.filename.endswith(dsc_name):
2703 # Unfortunately, we may get more than one match here if,
2704 # for example, the package was in potato but had an -sa
2705 # upload in woody. So we need to choose the right one.
2707 # default to something sane in case we don't match any or have only one
2712 old_file = os.path.join(i.location.path, i.filename)
2713 old_file_fh = utils.open_file(old_file)
2714 actual_md5 = apt_pkg.md5sum(old_file_fh)
2716 actual_size = os.stat(old_file)[stat.ST_SIZE]
2717 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2720 old_file = os.path.join(i.location.path, i.filename)
2721 old_file_fh = utils.open_file(old_file)
2722 actual_md5 = apt_pkg.md5sum(old_file_fh)
2724 actual_size = os.stat(old_file)[stat.ST_SIZE]
2726 suite_type = x.location.archive_type
2727 # need this for updating dsc_files in install()
2728 dsc_entry["files id"] = x.file_id
2729 # See install() in process-accepted...
2730 if not orig_files.has_key(dsc_name):
2731 orig_files[dsc_name] = {}
2732 orig_files[dsc_name]["id"] = x.file_id
2733 orig_files[dsc_name]["path"] = old_file
2734 orig_files[dsc_name]["location"] = x.location.location_id
2736 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2737 # Not there? Check the queue directories...
2738 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2739 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2741 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2742 if os.path.exists(in_otherdir):
2743 in_otherdir_fh = utils.open_file(in_otherdir)
2744 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2745 in_otherdir_fh.close()
2746 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2748 if not orig_files.has_key(dsc_name):
2749 orig_files[dsc_name] = {}
2750 orig_files[dsc_name]["path"] = in_otherdir
2753 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2756 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2758 if actual_md5 != dsc_entry["md5sum"]:
2759 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2760 if actual_size != int(dsc_entry["size"]):
2761 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2763 ################################################################################
2764 # This is used by process-new and process-holding to recheck a changes file
2765 # at the time we're running. It mainly wraps various other internal functions
2766 # and is similar to accepted_checks - these should probably be tidied up
2768 def recheck(self, session):
2770 for f in self.pkg.files.keys():
2771 # The .orig.tar.gz can disappear out from under us is it's a
2772 # duplicate of one in the archive.
2773 if not self.pkg.files.has_key(f):
2776 entry = self.pkg.files[f]
2778 # Check that the source still exists
2779 if entry["type"] == "deb":
2780 source_version = entry["source version"]
2781 source_package = entry["source package"]
2782 if not self.pkg.changes["architecture"].has_key("source") \
2783 and not source_exists(source_package, source_version, \
2784 suites = self.pkg.changes["distribution"].keys(), session = session):
2785 source_epochless_version = re_no_epoch.sub('', source_version)
2786 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2788 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2789 if cnf.has_key("Dir::Queue::%s" % (q)):
2790 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2793 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2795 # Version and file overwrite checks
2796 if entry["type"] == "deb":
2797 self.check_binary_against_db(f, session)
2798 elif entry["type"] == "dsc":
2799 self.check_source_against_db(f, session)
2800 self.check_dsc_against_db(f, session)
2802 ################################################################################
2803 def accepted_checks(self, overwrite_checks, session):
2804 # Recheck anything that relies on the database; since that's not
2805 # frozen between accept and our run time when called from p-a.
2807 # overwrite_checks is set to False when installing to stable/oldstable
2812 # Find the .dsc (again)
2814 for f in self.pkg.files.keys():
2815 if self.pkg.files[f]["type"] == "dsc":
2818 for checkfile in self.pkg.files.keys():
2819 # The .orig.tar.gz can disappear out from under us is it's a
2820 # duplicate of one in the archive.
2821 if not self.pkg.files.has_key(checkfile):
2824 entry = self.pkg.files[checkfile]
2826 # Check that the source still exists
2827 if entry["type"] == "deb":
2828 source_version = entry["source version"]
2829 source_package = entry["source package"]
2830 if not self.pkg.changes["architecture"].has_key("source") \
2831 and not source_exists(source_package, source_version, \
2832 suites = self.pkg.changes["distribution"].keys(), \
2834 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2836 # Version and file overwrite checks
2837 if overwrite_checks:
2838 if entry["type"] == "deb":
2839 self.check_binary_against_db(checkfile, session)
2840 elif entry["type"] == "dsc":
2841 self.check_source_against_db(checkfile, session)
2842 self.check_dsc_against_db(dsc_filename, session)
2844 # propogate in the case it is in the override tables:
2845 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2846 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2847 propogate[suite] = 1
2849 nopropogate[suite] = 1
2851 for suite in propogate.keys():
2852 if suite in nopropogate:
2854 self.pkg.changes["distribution"][suite] = 1
2856 for checkfile in self.pkg.files.keys():
2857 # Check the package is still in the override tables
2858 for suite in self.pkg.changes["distribution"].keys():
2859 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2860 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2862 ################################################################################
2863 # If any file of an upload has a recent mtime then chances are good
2864 # the file is still being uploaded.
2866 def upload_too_new(self):
2869 # Move back to the original directory to get accurate time stamps
2871 os.chdir(self.pkg.directory)
2872 file_list = self.pkg.files.keys()
2873 file_list.extend(self.pkg.dsc_files.keys())
2874 file_list.append(self.pkg.changes_file)
2877 last_modified = time.time()-os.path.getmtime(f)
2878 if last_modified < int(cnf["Dinstall::SkipTime"]):
2887 def store_changelog(self):
2889 # Skip binary-only upload if it is not a bin-NMU
2890 if not self.pkg.changes['architecture'].has_key('source'):
2891 from daklib.regexes import re_bin_only_nmu
2892 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2895 session = DBConn().session()
2897 # Check if upload already has a changelog entry
2898 query = """SELECT changelog_id FROM changes WHERE source = :source
2899 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2900 if session.execute(query, {'source': self.pkg.changes['source'], \
2901 'version': self.pkg.changes['version'], \
2902 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2906 # Add current changelog text into changelogs_text table, return created ID
2907 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2908 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2910 # Link ID to the upload available in changes table
2911 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2912 AND version = :version AND architecture = :architecture"""
2913 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2914 'version': self.pkg.changes['version'], \
2915 'architecture': " ".join(self.pkg.changes['architecture'].keys())})