5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files, build_package_set
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
59 # suppress some deprecation warnings in squeeze related to apt_pkg
62 warnings.filterwarnings('ignore', \
63 "apt_pkg.ParseSection\(\) is deprecated. Please see apt_pkg\.TagSection\(\) for the replacement\.", \
65 warnings.filterwarnings('ignore', \
66 "Attribute 'Find' of the 'apt_pkg\.TagSection' object is deprecated, use 'find' instead\.", \
69 ###############################################################################
71 def get_type(f, session):
73 Get the file type of C{f}
76 @param f: file entry from Changes object
78 @type session: SQLA Session
79 @param session: SQL Alchemy session object
86 if f.has_key("dbtype"):
87 file_type = f["dbtype"]
88 elif re_source_ext.match(f["type"]):
92 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
94 # Validate the override type
95 type_id = get_override_type(file_type, session)
97 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
101 ################################################################################
103 # Determine what parts in a .changes are NEW
105 def determine_new(filename, changes, files, warn=1, session = None, dsc = None):
107 Determine what parts in a C{changes} file are NEW.
110 @param filename: changes filename
112 @type changes: Upload.Pkg.changes dict
113 @param changes: Changes dictionary
115 @type files: Upload.Pkg.files dict
116 @param files: Files dictionary
119 @param warn: Warn if overrides are added for (old)stable
121 @type dsc: Upload.Pkg.dsc dict
122 @param dsc: (optional); Dsc dictionary
125 @return: dictionary of NEW components.
128 # TODO: This should all use the database instead of parsing the changes
133 dbchg = get_dbchange(filename, session)
135 print "Warning: cannot find changes file in database; won't check byhand"
137 # Try to get the Package-Set field from an included .dsc file (if possible).
139 new = build_package_set(dsc, session)
141 # Build up a list of potentially new things
142 for name, f in files.items():
143 # Keep a record of byhand elements
144 if f["section"] == "byhand":
149 priority = f["priority"]
150 section = f["section"]
151 file_type = get_type(f, session)
152 component = f["component"]
154 if file_type == "dsc":
157 if not new.has_key(pkg):
159 new[pkg]["priority"] = priority
160 new[pkg]["section"] = section
161 new[pkg]["type"] = file_type
162 new[pkg]["component"] = component
163 new[pkg]["files"] = []
165 old_type = new[pkg]["type"]
166 if old_type != file_type:
167 # source gets trumped by deb or udeb
168 if old_type == "dsc":
169 new[pkg]["priority"] = priority
170 new[pkg]["section"] = section
171 new[pkg]["type"] = file_type
172 new[pkg]["component"] = component
174 new[pkg]["files"].append(name)
176 if f.has_key("othercomponents"):
177 new[pkg]["othercomponents"] = f["othercomponents"]
179 # Fix up the list of target suites
181 for suite in changes["suite"].keys():
182 oldsuite = get_suite(suite, session)
184 print "WARNING: Invalid suite %s found" % suite
187 if oldsuite.overridesuite:
188 newsuite = get_suite(oldsuite.overridesuite, session)
191 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
192 oldsuite.overridesuite, suite)
193 del changes["suite"][suite]
194 changes["suite"][oldsuite.overridesuite] = 1
196 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist. Bugger" % (
197 oldsuite.overridesuite, suite)
199 # Check for unprocessed byhand files
200 if dbchg is not None:
201 for b in byhand.keys():
202 # Find the file entry in the database
204 for f in dbchg.files:
207 # If it's processed, we can ignore it
213 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
215 # Check for new stuff
216 for suite in changes["suite"].keys():
217 for pkg in new.keys():
218 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
220 for file_entry in new[pkg]["files"]:
221 if files[file_entry].has_key("new"):
222 del files[file_entry]["new"]
226 for s in ['stable', 'oldstable']:
227 if changes["suite"].has_key(s):
228 print "WARNING: overrides will be added for %s!" % s
229 for pkg in new.keys():
230 if new[pkg].has_key("othercomponents"):
231 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
235 ################################################################################
237 def check_valid(new, session = None):
239 Check if section and priority for NEW packages exist in database.
240 Additionally does sanity checks:
241 - debian-installer packages have to be udeb (or source)
242 - non debian-installer packages can not be udeb
243 - source priority can only be assigned to dsc file types
246 @param new: Dict of new packages with their section, priority and type.
249 for pkg in new.keys():
250 section_name = new[pkg]["section"]
251 priority_name = new[pkg]["priority"]
252 file_type = new[pkg]["type"]
254 section = get_section(section_name, session)
256 new[pkg]["section id"] = -1
258 new[pkg]["section id"] = section.section_id
260 priority = get_priority(priority_name, session)
262 new[pkg]["priority id"] = -1
264 new[pkg]["priority id"] = priority.priority_id
267 di = section_name.find("debian-installer") != -1
269 # If d-i, we must be udeb and vice-versa
270 if (di and file_type not in ("udeb", "dsc")) or \
271 (not di and file_type == "udeb"):
272 new[pkg]["section id"] = -1
274 # If dsc we need to be source and vice-versa
275 if (priority == "source" and file_type != "dsc") or \
276 (priority != "source" and file_type == "dsc"):
277 new[pkg]["priority id"] = -1
279 ###############################################################################
281 # Used by Upload.check_timestamps
282 class TarTime(object):
283 def __init__(self, future_cutoff, past_cutoff):
285 self.future_cutoff = future_cutoff
286 self.past_cutoff = past_cutoff
289 self.future_files = {}
290 self.ancient_files = {}
292 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
293 if MTime > self.future_cutoff:
294 self.future_files[Name] = MTime
295 if MTime < self.past_cutoff:
296 self.ancient_files[Name] = MTime
298 ###############################################################################
300 def prod_maintainer(notes, upload):
303 # Here we prepare an editor and get them ready to prod...
304 (fd, temp_filename) = utils.temp_filename()
305 temp_file = os.fdopen(fd, 'w')
307 temp_file.write(note.comment)
309 editor = os.environ.get("EDITOR","vi")
312 os.system("%s %s" % (editor, temp_filename))
313 temp_fh = utils.open_file(temp_filename)
314 prod_message = "".join(temp_fh.readlines())
316 print "Prod message:"
317 print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
318 prompt = "[P]rod, Edit, Abandon, Quit ?"
320 while prompt.find(answer) == -1:
321 answer = utils.our_raw_input(prompt)
322 m = re_default_answer.search(prompt)
325 answer = answer[:1].upper()
326 os.unlink(temp_filename)
332 # Otherwise, do the proding...
333 user_email_address = utils.whoami() + " <%s>" % (
334 cnf["Dinstall::MyAdminAddress"])
338 Subst["__FROM_ADDRESS__"] = user_email_address
339 Subst["__PROD_MESSAGE__"] = prod_message
340 Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
342 prod_mail_message = utils.TemplateSubst(
343 Subst,cnf["Dir::Templates"]+"/process-new.prod")
346 utils.send_mail(prod_mail_message)
348 print "Sent prodding message"
350 ################################################################################
352 def edit_note(note, upload, session, trainee=False):
353 # Write the current data to a temporary file
354 (fd, temp_filename) = utils.temp_filename()
355 editor = os.environ.get("EDITOR","vi")
358 os.system("%s %s" % (editor, temp_filename))
359 temp_file = utils.open_file(temp_filename)
360 newnote = temp_file.read().rstrip()
363 print utils.prefix_multi_line_string(newnote," ")
364 prompt = "[D]one, Edit, Abandon, Quit ?"
366 while prompt.find(answer) == -1:
367 answer = utils.our_raw_input(prompt)
368 m = re_default_answer.search(prompt)
371 answer = answer[:1].upper()
372 os.unlink(temp_filename)
379 comment = NewComment()
380 comment.package = upload.pkg.changes["source"]
381 comment.version = upload.pkg.changes["version"]
382 comment.comment = newnote
383 comment.author = utils.whoami()
384 comment.trainee = trainee
388 ###############################################################################
390 # suite names DMs can upload to
391 dm_suites = ['unstable', 'experimental']
393 def get_newest_source(source, session):
394 'returns the newest DBSource object in dm_suites'
395 ## the most recent version of the package uploaded to unstable or
396 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
397 ## section of its control file
398 q = session.query(DBSource).filter_by(source = source). \
399 filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
400 order_by(desc('source.version'))
403 def get_suite_version_by_source(source, session):
404 'returns a list of tuples (suite_name, version) for source package'
405 q = session.query(Suite.suite_name, DBSource.version). \
406 join(Suite.sources).filter_by(source = source)
409 def get_source_by_package_and_suite(package, suite_name, session):
411 returns a DBSource query filtered by DBBinary.package and this package's
414 return session.query(DBSource). \
415 join(DBSource.binaries).filter_by(package = package). \
416 join(DBBinary.suites).filter_by(suite_name = suite_name)
418 def get_suite_version_by_package(package, arch_string, session):
420 returns a list of tuples (suite_name, version) for binary package and
423 return session.query(Suite.suite_name, DBBinary.version). \
424 join(Suite.binaries).filter_by(package = package). \
425 join(DBBinary.architecture). \
426 filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
428 class Upload(object):
430 Everything that has to do with an upload processed.
438 ###########################################################################
441 """ Reset a number of internal variables."""
443 # Initialize the substitution template map
446 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
447 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
448 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
449 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
455 self.later_check_files = []
459 def package_info(self):
461 Format various messages from this Upload to send to the maintainer.
465 ('Reject Reasons', self.rejects),
466 ('Warnings', self.warnings),
467 ('Notes', self.notes),
471 for title, messages in msgs:
473 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
478 ###########################################################################
479 def update_subst(self):
480 """ Set up the per-package template substitution mappings """
484 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
485 if not self.pkg.changes.has_key("architecture") or not \
486 isinstance(self.pkg.changes["architecture"], dict):
487 self.pkg.changes["architecture"] = { "Unknown" : "" }
489 # and maintainer2047 may not exist.
490 if not self.pkg.changes.has_key("maintainer2047"):
491 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
493 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
494 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
495 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
497 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
498 if self.pkg.changes["architecture"].has_key("source") and \
499 self.pkg.changes["changedby822"] != "" and \
500 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
502 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
503 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
504 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
506 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
507 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
508 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
510 # Process policy doesn't set the fingerprint field and I don't want to make it
511 # do it for now as I don't want to have to deal with the case where we accepted
512 # the package into PU-NEW, but the fingerprint has gone away from the keyring in
513 # the meantime so the package will be remarked as rejectable. Urgh.
514 # TODO: Fix this properly
515 if self.pkg.changes.has_key('fingerprint'):
516 session = DBConn().session()
517 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
518 if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
519 if self.pkg.changes.has_key("sponsoremail"):
520 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
523 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
524 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
526 # Apply any global override of the Maintainer field
527 if cnf.get("Dinstall::OverrideMaintainer"):
528 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
529 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
531 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
532 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
533 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
534 self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
536 ###########################################################################
537 def load_changes(self, filename):
539 Load a changes file and setup a dictionary around it. Also checks for mandantory
542 @type filename: string
543 @param filename: Changes filename, full path.
546 @return: whether the changes file was valid or not. We may want to
547 reject even if this is True (see what gets put in self.rejects).
548 This is simply to prevent us even trying things later which will
549 fail because we couldn't properly parse the file.
552 self.pkg.changes_file = filename
554 # Parse the .changes field into a dictionary
556 self.pkg.changes.update(parse_changes(filename))
557 except CantOpenError:
558 self.rejects.append("%s: can't read file." % (filename))
560 except ParseChangesError, line:
561 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
563 except ChangesUnicodeError:
564 self.rejects.append("%s: changes file not proper utf-8" % (filename))
567 # Parse the Files field from the .changes into another dictionary
569 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
570 except ParseChangesError, line:
571 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
573 except UnknownFormatError, format:
574 self.rejects.append("%s: unknown format '%s'." % (filename, format))
577 # Check for mandatory fields
578 for i in ("distribution", "source", "binary", "architecture",
579 "version", "maintainer", "files", "changes", "description"):
580 if not self.pkg.changes.has_key(i):
581 # Avoid undefined errors later
582 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
585 # Strip a source version in brackets from the source field
586 if re_strip_srcver.search(self.pkg.changes["source"]):
587 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
589 # Ensure the source field is a valid package name.
590 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
591 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
593 # Split multi-value fields into a lower-level dictionary
594 for i in ("architecture", "distribution", "binary", "closes"):
595 o = self.pkg.changes.get(i, "")
597 del self.pkg.changes[i]
599 self.pkg.changes[i] = {}
602 self.pkg.changes[i][j] = 1
604 # Fix the Maintainer: field to be RFC822/2047 compatible
606 (self.pkg.changes["maintainer822"],
607 self.pkg.changes["maintainer2047"],
608 self.pkg.changes["maintainername"],
609 self.pkg.changes["maintaineremail"]) = \
610 fix_maintainer (self.pkg.changes["maintainer"])
611 except ParseMaintError, msg:
612 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
613 % (filename, self.pkg.changes["maintainer"], msg))
615 # ...likewise for the Changed-By: field if it exists.
617 (self.pkg.changes["changedby822"],
618 self.pkg.changes["changedby2047"],
619 self.pkg.changes["changedbyname"],
620 self.pkg.changes["changedbyemail"]) = \
621 fix_maintainer (self.pkg.changes.get("changed-by", ""))
622 except ParseMaintError, msg:
623 self.pkg.changes["changedby822"] = ""
624 self.pkg.changes["changedby2047"] = ""
625 self.pkg.changes["changedbyname"] = ""
626 self.pkg.changes["changedbyemail"] = ""
628 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
629 % (filename, self.pkg.changes["changed-by"], msg))
631 # Ensure all the values in Closes: are numbers
632 if self.pkg.changes.has_key("closes"):
633 for i in self.pkg.changes["closes"].keys():
634 if re_isanum.match (i) == None:
635 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
637 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
638 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
639 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
641 # Check the .changes is non-empty
642 if not self.pkg.files:
643 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
646 # Changes was syntactically valid even if we'll reject
649 ###########################################################################
651 def check_distributions(self):
652 "Check and map the Distribution field"
656 # Handle suite mappings
657 for m in Cnf.ValueList("SuiteMappings"):
660 if mtype == "map" or mtype == "silent-map":
661 (source, dest) = args[1:3]
662 if self.pkg.changes["distribution"].has_key(source):
663 del self.pkg.changes["distribution"][source]
664 self.pkg.changes["distribution"][dest] = 1
665 if mtype != "silent-map":
666 self.notes.append("Mapping %s to %s." % (source, dest))
667 if self.pkg.changes.has_key("distribution-version"):
668 if self.pkg.changes["distribution-version"].has_key(source):
669 self.pkg.changes["distribution-version"][source]=dest
670 elif mtype == "map-unreleased":
671 (source, dest) = args[1:3]
672 if self.pkg.changes["distribution"].has_key(source):
673 for arch in self.pkg.changes["architecture"].keys():
674 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
675 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
676 del self.pkg.changes["distribution"][source]
677 self.pkg.changes["distribution"][dest] = 1
679 elif mtype == "ignore":
681 if self.pkg.changes["distribution"].has_key(suite):
682 del self.pkg.changes["distribution"][suite]
683 self.warnings.append("Ignoring %s as a target suite." % (suite))
684 elif mtype == "reject":
686 if self.pkg.changes["distribution"].has_key(suite):
687 self.rejects.append("Uploads to %s are not accepted." % (suite))
688 elif mtype == "propup-version":
689 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
691 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
692 if self.pkg.changes["distribution"].has_key(args[1]):
693 self.pkg.changes.setdefault("distribution-version", {})
694 for suite in args[2:]:
695 self.pkg.changes["distribution-version"][suite] = suite
697 # Ensure there is (still) a target distribution
698 if len(self.pkg.changes["distribution"].keys()) < 1:
699 self.rejects.append("No valid distribution remaining.")
701 # Ensure target distributions exist
702 for suite in self.pkg.changes["distribution"].keys():
703 if not Cnf.has_key("Suite::%s" % (suite)):
704 self.rejects.append("Unknown distribution `%s'." % (suite))
706 ###########################################################################
708 def binary_file_checks(self, f, session):
710 entry = self.pkg.files[f]
712 # Extract package control information
713 deb_file = utils.open_file(f)
715 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
717 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
719 # Can't continue, none of the checks on control would work.
722 # Check for mandantory "Description:"
725 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
727 self.rejects.append("%s: Missing Description in binary package" % (f))
732 # Check for mandatory fields
733 for field in [ "Package", "Architecture", "Version" ]:
734 if control.Find(field) == None:
736 self.rejects.append("%s: No %s field in control." % (f, field))
739 # Ensure the package name matches the one give in the .changes
740 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
741 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
743 # Validate the package field
744 package = control.Find("Package")
745 if not re_valid_pkg_name.match(package):
746 self.rejects.append("%s: invalid package name '%s'." % (f, package))
748 # Validate the version field
749 version = control.Find("Version")
750 if not re_valid_version.match(version):
751 self.rejects.append("%s: invalid version number '%s'." % (f, version))
753 # Ensure the architecture of the .deb is one we know about.
754 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
755 architecture = control.Find("Architecture")
756 upload_suite = self.pkg.changes["distribution"].keys()[0]
758 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
759 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
760 self.rejects.append("Unknown architecture '%s'." % (architecture))
762 # Ensure the architecture of the .deb is one of the ones
763 # listed in the .changes.
764 if not self.pkg.changes["architecture"].has_key(architecture):
765 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
767 # Sanity-check the Depends field
768 depends = control.Find("Depends")
770 self.rejects.append("%s: Depends field is empty." % (f))
772 # Sanity-check the Provides field
773 provides = control.Find("Provides")
775 provide = re_spacestrip.sub('', provides)
777 self.rejects.append("%s: Provides field is empty." % (f))
778 prov_list = provide.split(",")
779 for prov in prov_list:
780 if not re_valid_pkg_name.match(prov):
781 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
783 # If there is a Built-Using field, we need to check we can find the
784 # exact source version
785 built_using = control.Find("Built-Using")
788 entry["built-using"] = []
789 for dep in apt_pkg.parse_depends(built_using):
790 bu_s, bu_v, bu_e = dep[0]
791 # Check that it's an exact match dependency and we have
792 # some form of version
793 if bu_e != "=" or len(bu_v) < 1:
794 self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
796 # Find the source id for this version
797 bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
799 self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
801 entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
803 except ValueError, e:
804 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
807 # Check the section & priority match those given in the .changes (non-fatal)
808 if control.Find("Section") and entry["section"] != "" \
809 and entry["section"] != control.Find("Section"):
810 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
811 (f, control.Find("Section", ""), entry["section"]))
812 if control.Find("Priority") and entry["priority"] != "" \
813 and entry["priority"] != control.Find("Priority"):
814 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
815 (f, control.Find("Priority", ""), entry["priority"]))
817 entry["package"] = package
818 entry["architecture"] = architecture
819 entry["version"] = version
820 entry["maintainer"] = control.Find("Maintainer", "")
822 if f.endswith(".udeb"):
823 self.pkg.files[f]["dbtype"] = "udeb"
824 elif f.endswith(".deb"):
825 self.pkg.files[f]["dbtype"] = "deb"
827 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
829 entry["source"] = control.Find("Source", entry["package"])
831 # Get the source version
832 source = entry["source"]
835 if source.find("(") != -1:
836 m = re_extract_src_version.match(source)
838 source_version = m.group(2)
840 if not source_version:
841 source_version = self.pkg.files[f]["version"]
843 entry["source package"] = source
844 entry["source version"] = source_version
846 # Ensure the filename matches the contents of the .deb
847 m = re_isadeb.match(f)
850 file_package = m.group(1)
851 if entry["package"] != file_package:
852 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
853 (f, file_package, entry["dbtype"], entry["package"]))
854 epochless_version = re_no_epoch.sub('', control.Find("Version"))
857 file_version = m.group(2)
858 if epochless_version != file_version:
859 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
860 (f, file_version, entry["dbtype"], epochless_version))
863 file_architecture = m.group(3)
864 if entry["architecture"] != file_architecture:
865 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
866 (f, file_architecture, entry["dbtype"], entry["architecture"]))
868 # Check for existent source
869 source_version = entry["source version"]
870 source_package = entry["source package"]
871 if self.pkg.changes["architecture"].has_key("source"):
872 if source_version != self.pkg.changes["version"]:
873 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
874 (source_version, f, self.pkg.changes["version"]))
876 # Check in the SQL database
877 if not source_exists(source_package, source_version, suites = \
878 self.pkg.changes["distribution"].keys(), session = session):
879 # Check in one of the other directories
880 source_epochless_version = re_no_epoch.sub('', source_version)
881 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
882 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
884 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
887 dsc_file_exists = False
888 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
889 if cnf.has_key("Dir::Queue::%s" % (myq)):
890 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
891 dsc_file_exists = True
894 if not dsc_file_exists:
895 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
897 # Check the version and for file overwrites
898 self.check_binary_against_db(f, session)
900 def source_file_checks(self, f, session):
901 entry = self.pkg.files[f]
903 m = re_issource.match(f)
907 entry["package"] = m.group(1)
908 entry["version"] = m.group(2)
909 entry["type"] = m.group(3)
911 # Ensure the source package name matches the Source filed in the .changes
912 if self.pkg.changes["source"] != entry["package"]:
913 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
915 # Ensure the source version matches the version in the .changes file
916 if re_is_orig_source.match(f):
917 changes_version = self.pkg.changes["chopversion2"]
919 changes_version = self.pkg.changes["chopversion"]
921 if changes_version != entry["version"]:
922 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
924 # Ensure the .changes lists source in the Architecture field
925 if not self.pkg.changes["architecture"].has_key("source"):
926 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
928 # Check the signature of a .dsc file
929 if entry["type"] == "dsc":
930 # check_signature returns either:
931 # (None, [list, of, rejects]) or (signature, [])
932 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
934 self.rejects.append(j)
936 entry["architecture"] = "source"
938 def per_suite_file_checks(self, f, suite, session):
940 entry = self.pkg.files[f]
943 if entry.has_key("byhand"):
946 # Check we have fields we need to do these checks
948 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
949 if not entry.has_key(m):
950 self.rejects.append("file '%s' does not have field %s set" % (f, m))
956 # Handle component mappings
957 for m in cnf.ValueList("ComponentMappings"):
958 (source, dest) = m.split()
959 if entry["component"] == source:
960 entry["original component"] = source
961 entry["component"] = dest
963 # Ensure the component is valid for the target suite
964 if cnf.has_key("Suite:%s::Components" % (suite)) and \
965 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
966 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
969 # Validate the component
970 if not get_component(entry["component"], session):
971 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
974 # See if the package is NEW
975 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
978 # Validate the priority
979 if entry["priority"].find('/') != -1:
980 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
982 # Determine the location
983 location = cnf["Dir::Pool"]
984 l = get_location(location, entry["component"], session=session)
986 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
987 entry["location id"] = -1
989 entry["location id"] = l.location_id
991 # Check the md5sum & size against existing files (if any)
992 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
994 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
995 entry["size"], entry["md5sum"], entry["location id"])
998 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
999 elif found is False and poolfile is not None:
1000 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
1002 if poolfile is None:
1003 entry["files id"] = None
1005 entry["files id"] = poolfile.file_id
1007 # Check for packages that have moved from one component to another
1008 entry['suite'] = suite
1009 arch_list = [entry["architecture"], 'all']
1010 component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1011 [suite], arch_list = arch_list, session = session)
1012 if component is not None:
1013 entry["othercomponents"] = component
1015 def check_files(self, action=True):
1016 file_keys = self.pkg.files.keys()
1022 os.chdir(self.pkg.directory)
1024 ret = holding.copy_to_holding(f)
1026 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1030 # check we already know the changes file
1031 # [NB: this check must be done post-suite mapping]
1032 base_filename = os.path.basename(self.pkg.changes_file)
1034 session = DBConn().session()
1037 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1038 # if in the pool or in a queue other than unchecked, reject
1039 if (dbc.in_queue is None) \
1040 or (dbc.in_queue is not None
1041 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1042 self.rejects.append("%s file already known to dak" % base_filename)
1043 except NoResultFound, e:
1047 has_binaries = False
1050 for f, entry in self.pkg.files.items():
1051 # Ensure the file does not already exist in one of the accepted directories
1052 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1053 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1054 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1055 self.rejects.append("%s file already exists in the %s directory." % (f, d))
1057 if not re_taint_free.match(f):
1058 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1060 # Check the file is readable
1061 if os.access(f, os.R_OK) == 0:
1062 # When running in -n, copy_to_holding() won't have
1063 # generated the reject_message, so we need to.
1065 if os.path.exists(f):
1066 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1068 # Don't directly reject, mark to check later to deal with orig's
1069 # we can find in the pool
1070 self.later_check_files.append(f)
1071 entry["type"] = "unreadable"
1074 # If it's byhand skip remaining checks
1075 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1077 entry["type"] = "byhand"
1079 # Checks for a binary package...
1080 elif re_isadeb.match(f):
1082 entry["type"] = "deb"
1084 # This routine appends to self.rejects/warnings as appropriate
1085 self.binary_file_checks(f, session)
1087 # Checks for a source package...
1088 elif re_issource.match(f):
1091 # This routine appends to self.rejects/warnings as appropriate
1092 self.source_file_checks(f, session)
1094 # Not a binary or source package? Assume byhand...
1097 entry["type"] = "byhand"
1099 # Per-suite file checks
1100 entry["oldfiles"] = {}
1101 for suite in self.pkg.changes["distribution"].keys():
1102 self.per_suite_file_checks(f, suite, session)
1106 # If the .changes file says it has source, it must have source.
1107 if self.pkg.changes["architecture"].has_key("source"):
1109 self.rejects.append("no source found and Architecture line in changes mention source.")
1111 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1112 self.rejects.append("source only uploads are not supported.")
1114 ###########################################################################
1116 def __dsc_filename(self):
1118 Returns: (Status, Dsc_Filename)
1120 Status: Boolean; True when there was no error, False otherwise
1121 Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
1126 for name, entry in self.pkg.files.items():
1127 if entry.has_key("type") and entry["type"] == "dsc":
1129 return False, "cannot process a .changes file with multiple .dsc's."
1133 if not dsc_filename:
1134 return False, "source uploads must contain a dsc file"
1136 return True, dsc_filename
1138 def load_dsc(self, action=True, signing_rules=1):
1140 Find and load the dsc from self.pkg.files into self.dsc
1142 Returns: (Status, Reason)
1144 Status: Boolean; True when there was no error, False otherwise
1145 Reason: String; When Status is False this describes the error
1149 (status, dsc_filename) = self.__dsc_filename()
1151 # If status is false, dsc_filename has the reason
1152 return False, dsc_filename
1155 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
1156 except CantOpenError:
1158 return False, "%s: can't read file." % (dsc_filename)
1159 except ParseChangesError, line:
1160 return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
1161 except InvalidDscError, line:
1162 return False, "%s: syntax error on line %s." % (dsc_filename, line)
1163 except ChangesUnicodeError:
1164 return False, "%s: dsc file not proper utf-8." % (dsc_filename)
1168 ###########################################################################
1170 def check_dsc(self, action=True, session=None):
1171 """Returns bool indicating whether or not the source changes are valid"""
1172 # Ensure there is source to check
1173 if not self.pkg.changes["architecture"].has_key("source"):
1176 (status, reason) = self.load_dsc(action=action)
1178 self.rejects.append(reason)
1180 (status, dsc_filename) = self.__dsc_filename()
1182 # If status is false, dsc_filename has the reason
1183 self.rejects.append(dsc_filename)
1186 # Build up the file list of files mentioned by the .dsc
1188 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1189 except NoFilesFieldError:
1190 self.rejects.append("%s: no Files: field." % (dsc_filename))
1192 except UnknownFormatError, format:
1193 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1195 except ParseChangesError, line:
1196 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1199 # Enforce mandatory fields
1200 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1201 if not self.pkg.dsc.has_key(i):
1202 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1205 # Validate the source and version fields
1206 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1207 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1208 if not re_valid_version.match(self.pkg.dsc["version"]):
1209 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1211 # Only a limited list of source formats are allowed in each suite
1212 for dist in self.pkg.changes["distribution"].keys():
1213 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1214 if self.pkg.dsc["format"] not in allowed:
1215 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1217 # Validate the Maintainer field
1219 # We ignore the return value
1220 fix_maintainer(self.pkg.dsc["maintainer"])
1221 except ParseMaintError, msg:
1222 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1223 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1225 # Validate the build-depends field(s)
1226 for field_name in [ "build-depends", "build-depends-indep" ]:
1227 field = self.pkg.dsc.get(field_name)
1229 # Have apt try to parse them...
1231 apt_pkg.ParseSrcDepends(field)
1233 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1235 # Ensure the version number in the .dsc matches the version number in the .changes
1236 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1237 changes_version = self.pkg.files[dsc_filename]["version"]
1239 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1240 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1242 # Ensure the Files field contain only what's expected
1243 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1245 # Ensure source is newer than existing source in target suites
1246 session = DBConn().session()
1247 self.check_source_against_db(dsc_filename, session)
1248 self.check_dsc_against_db(dsc_filename, session)
1250 dbchg = get_dbchange(self.pkg.changes_file, session)
1252 # Finally, check if we're missing any files
1253 for f in self.later_check_files:
1255 # Check if we've already processed this file if we have a dbchg object
1258 for pf in dbchg.files:
1259 if pf.filename == f and pf.processed:
1260 self.notes.append('%s was already processed so we can go ahead' % f)
1262 del self.pkg.files[f]
1264 self.rejects.append("Could not find file %s references in changes" % f)
1270 ###########################################################################
1272 def get_changelog_versions(self, source_dir):
1273 """Extracts a the source package and (optionally) grabs the
1274 version history out of debian/changelog for the BTS."""
1278 # Find the .dsc (again)
1280 for f in self.pkg.files.keys():
1281 if self.pkg.files[f]["type"] == "dsc":
1284 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1285 if not dsc_filename:
1288 # Create a symlink mirror of the source files in our temporary directory
1289 for f in self.pkg.files.keys():
1290 m = re_issource.match(f)
1292 src = os.path.join(source_dir, f)
1293 # If a file is missing for whatever reason, give up.
1294 if not os.path.exists(src):
1297 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1298 self.pkg.orig_files[f].has_key("path"):
1300 dest = os.path.join(os.getcwd(), f)
1301 os.symlink(src, dest)
1303 # If the orig files are not a part of the upload, create symlinks to the
1305 for orig_file in self.pkg.orig_files.keys():
1306 if not self.pkg.orig_files[orig_file].has_key("path"):
1308 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1309 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1311 # Extract the source
1313 unpacked = UnpackedSource(dsc_filename)
1315 self.rejects.append("'dpkg-source -x' failed for %s." % dsc_filename)
1318 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1321 # Get the upstream version
1322 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1323 if re_strip_revision.search(upstr_version):
1324 upstr_version = re_strip_revision.sub('', upstr_version)
1326 # Ensure the changelog file exists
1327 changelog_file = unpacked.get_changelog_file()
1328 if changelog_file is None:
1329 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1332 # Parse the changelog
1333 self.pkg.dsc["bts changelog"] = ""
1334 for line in changelog_file.readlines():
1335 m = re_changelog_versions.match(line)
1337 self.pkg.dsc["bts changelog"] += line
1338 changelog_file.close()
1341 # Check we found at least one revision in the changelog
1342 if not self.pkg.dsc["bts changelog"]:
1343 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1345 def check_source(self):
1347 # a) there's no source
1348 if not self.pkg.changes["architecture"].has_key("source"):
1351 tmpdir = utils.temp_dirname()
1353 # Move into the temporary directory
1357 # Get the changelog version history
1358 self.get_changelog_versions(cwd)
1360 # Move back and cleanup the temporary tree
1364 shutil.rmtree(tmpdir)
1366 if e.errno != errno.EACCES:
1368 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1370 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1371 # We probably have u-r or u-w directories so chmod everything
1373 cmd = "chmod -R u+rwx %s" % (tmpdir)
1374 result = os.system(cmd)
1376 utils.fubar("'%s' failed with result %s." % (cmd, result))
1377 shutil.rmtree(tmpdir)
1378 except Exception, e:
1379 print "foobar2 (%s)" % e
1380 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1382 ###########################################################################
1383 def ensure_hashes(self):
1384 # Make sure we recognise the format of the Files: field in the .changes
1385 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1386 if len(format) == 2:
1387 format = int(format[0]), int(format[1])
1389 format = int(float(format[0])), 0
1391 # We need to deal with the original changes blob, as the fields we need
1392 # might not be in the changes dict serialised into the .dak anymore.
1393 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1395 # Copy the checksums over to the current changes dict. This will keep
1396 # the existing modifications to it intact.
1397 for field in orig_changes:
1398 if field.startswith('checksums-'):
1399 self.pkg.changes[field] = orig_changes[field]
1401 # Check for unsupported hashes
1402 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1403 self.rejects.append(j)
1405 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1406 self.rejects.append(j)
1408 # We have to calculate the hash if we have an earlier changes version than
1409 # the hash appears in rather than require it exist in the changes file
1410 for hashname, hashfunc, version in utils.known_hashes:
1411 # TODO: Move _ensure_changes_hash into this class
1412 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1413 self.rejects.append(j)
1414 if "source" in self.pkg.changes["architecture"]:
1415 # TODO: Move _ensure_dsc_hash into this class
1416 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1417 self.rejects.append(j)
1419 def check_hashes(self):
1420 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1421 self.rejects.append(m)
1423 for m in utils.check_size(".changes", self.pkg.files):
1424 self.rejects.append(m)
1426 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1427 self.rejects.append(m)
1429 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1430 self.rejects.append(m)
1432 self.ensure_hashes()
1434 ###########################################################################
1436 def ensure_orig(self, target_dir='.', session=None):
1438 Ensures that all orig files mentioned in the changes file are present
1439 in target_dir. If they do not exist, they are symlinked into place.
1441 An list containing the symlinks that were created are returned (so they
1448 for filename, entry in self.pkg.dsc_files.iteritems():
1449 if not re_is_orig_source.match(filename):
1450 # File is not an orig; ignore
1453 if os.path.exists(filename):
1454 # File exists, no need to continue
1457 def symlink_if_valid(path):
1458 f = utils.open_file(path)
1459 md5sum = apt_pkg.md5sum(f)
1462 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1463 expected = (int(entry['size']), entry['md5sum'])
1465 if fingerprint != expected:
1468 dest = os.path.join(target_dir, filename)
1470 os.symlink(path, dest)
1471 symlinked.append(dest)
1477 session_ = DBConn().session()
1482 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1483 poolfile_path = os.path.join(
1484 poolfile.location.path, poolfile.filename
1487 if symlink_if_valid(poolfile_path):
1497 # Look in some other queues for the file
1498 queues = ('New', 'Byhand', 'ProposedUpdates',
1499 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1501 for queue in queues:
1502 if not cnf.get('Dir::Queue::%s' % queue):
1505 queuefile_path = os.path.join(
1506 cnf['Dir::Queue::%s' % queue], filename
1509 if not os.path.exists(queuefile_path):
1510 # Does not exist in this queue
1513 if symlink_if_valid(queuefile_path):
1518 ###########################################################################
1520 def check_lintian(self):
1522 Extends self.rejects by checking the output of lintian against tags
1523 specified in Dinstall::LintianTags.
1528 # Don't reject binary uploads
1529 if not self.pkg.changes['architecture'].has_key('source'):
1532 # Only check some distributions
1533 for dist in ('unstable', 'experimental'):
1534 if dist in self.pkg.changes['distribution']:
1539 # If we do not have a tagfile, don't do anything
1540 tagfile = cnf.get("Dinstall::LintianTags")
1544 # Parse the yaml file
1545 sourcefile = file(tagfile, 'r')
1546 sourcecontent = sourcefile.read()
1550 lintiantags = yaml.load(sourcecontent)['lintian']
1551 except yaml.YAMLError, msg:
1552 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1555 # Try and find all orig mentioned in the .dsc
1556 symlinked = self.ensure_orig()
1558 # Setup the input file for lintian
1559 fd, temp_filename = utils.temp_filename()
1560 temptagfile = os.fdopen(fd, 'w')
1561 for tags in lintiantags.values():
1562 temptagfile.writelines(['%s\n' % x for x in tags])
1566 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1567 (temp_filename, self.pkg.changes_file)
1569 result, output = commands.getstatusoutput(cmd)
1571 # Remove our tempfile and any symlinks we created
1572 os.unlink(temp_filename)
1574 for symlink in symlinked:
1578 utils.warn("lintian failed for %s [return code: %s]." % \
1579 (self.pkg.changes_file, result))
1580 utils.warn(utils.prefix_multi_line_string(output, \
1581 " [possible output:] "))
1586 [self.pkg.changes_file, "check_lintian"] + list(txt)
1590 parsed_tags = parse_lintian_output(output)
1591 self.rejects.extend(
1592 generate_reject_messages(parsed_tags, lintiantags, log=log)
1595 ###########################################################################
1596 def check_urgency(self):
1598 if self.pkg.changes["architecture"].has_key("source"):
1599 if not self.pkg.changes.has_key("urgency"):
1600 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1601 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1602 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1603 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1604 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1605 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1607 ###########################################################################
1609 # Sanity check the time stamps of files inside debs.
1610 # [Files in the near future cause ugly warnings and extreme time
1611 # travel can cause errors on extraction]
1613 def check_timestamps(self):
1616 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1617 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1618 tar = TarTime(future_cutoff, past_cutoff)
1620 for filename, entry in self.pkg.files.items():
1621 if entry["type"] == "deb":
1624 deb_file = utils.open_file(filename)
1625 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1628 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1629 except SystemError, e:
1630 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1631 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1634 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1638 future_files = tar.future_files.keys()
1640 num_future_files = len(future_files)
1641 future_file = future_files[0]
1642 future_date = tar.future_files[future_file]
1643 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1644 % (filename, num_future_files, future_file, time.ctime(future_date)))
1646 ancient_files = tar.ancient_files.keys()
1648 num_ancient_files = len(ancient_files)
1649 ancient_file = ancient_files[0]
1650 ancient_date = tar.ancient_files[ancient_file]
1651 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1652 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1654 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1656 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1657 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1659 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1665 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1666 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1667 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1668 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1669 self.pkg.changes["sponsoremail"] = uid_email
1674 ###########################################################################
1675 # check_signed_by_key checks
1676 ###########################################################################
1678 def check_signed_by_key(self):
1679 """Ensure the .changes is signed by an authorized uploader."""
1680 session = DBConn().session()
1682 # First of all we check that the person has proper upload permissions
1683 # and that this upload isn't blocked
1684 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1687 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1690 # TODO: Check that import-keyring adds UIDs properly
1692 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1695 # Check that the fingerprint which uploaded has permission to do so
1696 self.check_upload_permissions(fpr, session)
1698 # Check that this package is not in a transition
1699 self.check_transition(session)
1704 def check_upload_permissions(self, fpr, session):
1705 # Check any one-off upload blocks
1706 self.check_upload_blocks(fpr, session)
1708 # Start with DM as a special case
1709 # DM is a special case unfortunately, so we check it first
1710 # (keys with no source access get more access than DMs in one
1711 # way; DMs can only upload for their packages whether source
1712 # or binary, whereas keys with no access might be able to
1713 # upload some binaries)
1714 if fpr.source_acl.access_level == 'dm':
1715 self.check_dm_upload(fpr, session)
1717 # Check source-based permissions for other types
1718 if self.pkg.changes["architecture"].has_key("source") and \
1719 fpr.source_acl.access_level is None:
1720 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1721 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1722 self.rejects.append(rej)
1724 # If not a DM, we allow full upload rights
1725 uid_email = "%s@debian.org" % (fpr.uid.uid)
1726 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1729 # Check binary upload permissions
1730 # By this point we know that DMs can't have got here unless they
1731 # are allowed to deal with the package concerned so just apply
1733 if fpr.binary_acl.access_level == 'full':
1736 # Otherwise we're in the map case
1737 tmparches = self.pkg.changes["architecture"].copy()
1738 tmparches.pop('source', None)
1740 for bam in fpr.binary_acl_map:
1741 tmparches.pop(bam.architecture.arch_string, None)
1743 if len(tmparches.keys()) > 0:
1744 if fpr.binary_reject:
1745 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1746 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1747 self.rejects.append(rej)
1749 # TODO: This is where we'll implement reject vs throw away binaries later
1750 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1751 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1752 rej += "\nFingerprint: %s", (fpr.fingerprint)
1753 self.rejects.append(rej)
1756 def check_upload_blocks(self, fpr, session):
1757 """Check whether any upload blocks apply to this source, source
1758 version, uid / fpr combination"""
1760 def block_rej_template(fb):
1761 rej = 'Manual upload block in place for package %s' % fb.source
1762 if fb.version is not None:
1763 rej += ', version %s' % fb.version
1766 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1767 # version is None if the block applies to all versions
1768 if fb.version is None or fb.version == self.pkg.changes['version']:
1769 # Check both fpr and uid - either is enough to cause a reject
1770 if fb.fpr is not None:
1771 if fb.fpr.fingerprint == fpr.fingerprint:
1772 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1773 if fb.uid is not None:
1774 if fb.uid == fpr.uid:
1775 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1778 def check_dm_upload(self, fpr, session):
1779 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1780 ## none of the uploaded packages are NEW
1782 for f in self.pkg.files.keys():
1783 if self.pkg.files[f].has_key("byhand"):
1784 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1786 if self.pkg.files[f].has_key("new"):
1787 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1793 r = get_newest_source(self.pkg.changes["source"], session)
1796 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1797 self.rejects.append(rej)
1800 if not r.dm_upload_allowed:
1801 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1802 self.rejects.append(rej)
1805 ## the Maintainer: field of the uploaded .changes file corresponds with
1806 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1808 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1809 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1811 ## the most recent version of the package uploaded to unstable or
1812 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1813 ## non-developer maintainers cannot NMU or hijack packages)
1815 # srcuploaders includes the maintainer
1817 for sup in r.srcuploaders:
1818 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1819 # Eww - I hope we never have two people with the same name in Debian
1820 if email == fpr.uid.uid or name == fpr.uid.name:
1825 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1828 ## none of the packages are being taken over from other source packages
1829 for b in self.pkg.changes["binary"].keys():
1830 for suite in self.pkg.changes["distribution"].keys():
1831 for s in get_source_by_package_and_suite(b, suite, session):
1832 if s.source != self.pkg.changes["source"]:
1833 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1837 def check_transition(self, session):
1840 sourcepkg = self.pkg.changes["source"]
1842 # No sourceful upload -> no need to do anything else, direct return
1843 # We also work with unstable uploads, not experimental or those going to some
1844 # proposed-updates queue
1845 if "source" not in self.pkg.changes["architecture"] or \
1846 "unstable" not in self.pkg.changes["distribution"]:
1849 # Also only check if there is a file defined (and existant) with
1851 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1852 if transpath == "" or not os.path.exists(transpath):
1855 # Parse the yaml file
1856 sourcefile = file(transpath, 'r')
1857 sourcecontent = sourcefile.read()
1859 transitions = yaml.load(sourcecontent)
1860 except yaml.YAMLError, msg:
1861 # This shouldn't happen, there is a wrapper to edit the file which
1862 # checks it, but we prefer to be safe than ending up rejecting
1864 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1867 # Now look through all defined transitions
1868 for trans in transitions:
1869 t = transitions[trans]
1870 source = t["source"]
1873 # Will be None if nothing is in testing.
1874 current = get_source_in_suite(source, "testing", session)
1875 if current is not None:
1876 compare = apt_pkg.VersionCompare(current.version, expected)
1878 if current is None or compare < 0:
1879 # This is still valid, the current version in testing is older than
1880 # the new version we wait for, or there is none in testing yet
1882 # Check if the source we look at is affected by this.
1883 if sourcepkg in t['packages']:
1884 # The source is affected, lets reject it.
1886 rejectmsg = "%s: part of the %s transition.\n\n" % (
1889 if current is not None:
1890 currentlymsg = "at version %s" % (current.version)
1892 currentlymsg = "not present in testing"
1894 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1896 rejectmsg += "\n".join(textwrap.wrap("""Your package
1897 is part of a testing transition designed to get %s migrated (it is
1898 currently %s, we need version %s). This transition is managed by the
1899 Release Team, and %s is the Release-Team member responsible for it.
1900 Please mail debian-release@lists.debian.org or contact %s directly if you
1901 need further assistance. You might want to upload to experimental until this
1902 transition is done."""
1903 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1905 self.rejects.append(rejectmsg)
1908 ###########################################################################
1909 # End check_signed_by_key checks
1910 ###########################################################################
1912 def build_summaries(self):
1913 """ Build a summary of changes the upload introduces. """
1915 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1917 short_summary = summary
1919 # This is for direport's benefit...
1920 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1923 summary += "Changes: " + f
1925 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1927 summary += self.announce(short_summary, 0)
1929 return (summary, short_summary)
1931 ###########################################################################
1933 def close_bugs(self, summary, action):
1935 Send mail to close bugs as instructed by the closes field in the changes file.
1936 Also add a line to summary if any work was done.
1938 @type summary: string
1939 @param summary: summary text, as given by L{build_summaries}
1942 @param action: Set to false no real action will be done.
1945 @return: summary. If action was taken, extended by the list of closed bugs.
1949 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1951 bugs = self.pkg.changes["closes"].keys()
1957 summary += "Closing bugs: "
1959 summary += "%s " % (bug)
1962 self.Subst["__BUG_NUMBER__"] = bug
1963 if self.pkg.changes["distribution"].has_key("stable"):
1964 self.Subst["__STABLE_WARNING__"] = """
1965 Note that this package is not part of the released stable Debian
1966 distribution. It may have dependencies on other unreleased software,
1967 or other instabilities. Please take care if you wish to install it.
1968 The update will eventually make its way into the next released Debian
1971 self.Subst["__STABLE_WARNING__"] = ""
1972 mail_message = utils.TemplateSubst(self.Subst, template)
1973 utils.send_mail(mail_message)
1975 # Clear up after ourselves
1976 del self.Subst["__BUG_NUMBER__"]
1977 del self.Subst["__STABLE_WARNING__"]
1979 if action and self.logger:
1980 self.logger.log(["closing bugs"] + bugs)
1986 ###########################################################################
1988 def announce(self, short_summary, action):
1990 Send an announce mail about a new upload.
1992 @type short_summary: string
1993 @param short_summary: Short summary text to include in the mail
1996 @param action: Set to false no real action will be done.
1999 @return: Textstring about action taken.
2004 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
2006 # Only do announcements for source uploads with a recent dpkg-dev installed
2007 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
2008 self.pkg.changes["architecture"].has_key("source"):
2014 self.Subst["__SHORT_SUMMARY__"] = short_summary
2016 for dist in self.pkg.changes["distribution"].keys():
2017 suite = get_suite(dist)
2018 if suite is None: continue
2019 announce_list = suite.announce
2020 if announce_list == "" or lists_done.has_key(announce_list):
2023 lists_done[announce_list] = 1
2024 summary += "Announcing to %s\n" % (announce_list)
2028 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
2029 if cnf.get("Dinstall::TrackingServer") and \
2030 self.pkg.changes["architecture"].has_key("source"):
2031 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
2032 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
2034 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
2035 utils.send_mail(mail_message)
2037 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
2039 if cnf.FindB("Dinstall::CloseBugs"):
2040 summary = self.close_bugs(summary, action)
2042 del self.Subst["__SHORT_SUMMARY__"]
2046 ###########################################################################
2048 def accept (self, summary, short_summary, session=None):
2052 This moves all files referenced from the .changes into the pool,
2053 sends the accepted mail, announces to lists, closes bugs and
2054 also checks for override disparities. If enabled it will write out
2055 the version history for the BTS Version Tracking and will finally call
2058 @type summary: string
2059 @param summary: Summary text
2061 @type short_summary: string
2062 @param short_summary: Short summary
2066 stats = SummaryStats()
2069 self.logger.log(["installing changes", self.pkg.changes_file])
2073 # Add the .dsc file to the DB first
2074 for newfile, entry in self.pkg.files.items():
2075 if entry["type"] == "dsc":
2076 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2080 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2081 for newfile, entry in self.pkg.files.items():
2082 if entry["type"] == "deb":
2083 poolfiles.append(add_deb_to_db(self, newfile, session))
2085 # If this is a sourceful diff only upload that is moving
2086 # cross-component we need to copy the .orig files into the new
2087 # component too for the same reasons as above.
2088 # XXX: mhy: I think this should be in add_dsc_to_db
2089 if self.pkg.changes["architecture"].has_key("source"):
2090 for orig_file in self.pkg.orig_files.keys():
2091 if not self.pkg.orig_files[orig_file].has_key("id"):
2092 continue # Skip if it's not in the pool
2093 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2094 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2095 continue # Skip if the location didn't change
2098 oldf = get_poolfile_by_id(orig_file_id, session)
2099 old_filename = os.path.join(oldf.location.path, oldf.filename)
2100 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
2101 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2103 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2105 # TODO: Care about size/md5sum collisions etc
2106 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2108 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2110 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2111 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2115 # Don't reference the old file from this changes
2117 if p.file_id == oldf.file_id:
2120 poolfiles.append(newf)
2122 # Fix up the DSC references
2125 for df in source.srcfiles:
2126 if df.poolfile.file_id == oldf.file_id:
2127 # Add a new DSC entry and mark the old one for deletion
2128 # Don't do it in the loop so we don't change the thing we're iterating over
2130 newdscf.source_id = source.source_id
2131 newdscf.poolfile_id = newf.file_id
2132 session.add(newdscf)
2142 # Make sure that our source object is up-to-date
2143 session.expire(source)
2145 # Add changelog information to the database
2146 self.store_changelog()
2148 # Install the files into the pool
2149 for newfile, entry in self.pkg.files.items():
2150 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2151 utils.move(newfile, destination)
2152 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2153 stats.accept_bytes += float(entry["size"])
2155 # Copy the .changes file across for suite which need it.
2156 copy_changes = dict([(x.copychanges, '')
2157 for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2158 if x.copychanges is not None])
2160 for dest in copy_changes.keys():
2161 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2163 # We're done - commit the database changes
2165 # Our SQL session will automatically start a new transaction after
2168 # Move the .changes into the 'done' directory
2169 utils.move(self.pkg.changes_file,
2170 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2172 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2173 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2176 self.Subst["__SUMMARY__"] = summary
2177 mail_message = utils.TemplateSubst(self.Subst,
2178 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2179 utils.send_mail(mail_message)
2180 self.announce(short_summary, 1)
2182 ## Helper stuff for DebBugs Version Tracking
2183 if cnf.Find("Dir::Queue::BTSVersionTrack"):
2184 if self.pkg.changes["architecture"].has_key("source"):
2185 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2186 version_history = os.fdopen(fd, 'w')
2187 version_history.write(self.pkg.dsc["bts changelog"])
2188 version_history.close()
2189 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2190 self.pkg.changes_file[:-8]+".versions")
2191 os.rename(temp_filename, filename)
2192 os.chmod(filename, 0644)
2194 # Write out the binary -> source mapping.
2195 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2196 debinfo = os.fdopen(fd, 'w')
2197 for name, entry in sorted(self.pkg.files.items()):
2198 if entry["type"] == "deb":
2199 line = " ".join([entry["package"], entry["version"],
2200 entry["architecture"], entry["source package"],
2201 entry["source version"]])
2202 debinfo.write(line+"\n")
2204 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2205 self.pkg.changes_file[:-8]+".debinfo")
2206 os.rename(temp_filename, filename)
2207 os.chmod(filename, 0644)
2211 # Set up our copy queues (e.g. buildd queues)
2212 for suite_name in self.pkg.changes["distribution"].keys():
2213 suite = get_suite(suite_name, session)
2214 for q in suite.copy_queues:
2216 q.add_file_from_pool(f)
2221 stats.accept_count += 1
2223 def check_override(self):
2225 Checks override entries for validity. Mails "Override disparity" warnings,
2226 if that feature is enabled.
2228 Abandons the check if
2229 - override disparity checks are disabled
2230 - mail sending is disabled
2235 # Abandon the check if override disparity checks have been disabled
2236 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2239 summary = self.pkg.check_override()
2244 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2247 self.Subst["__SUMMARY__"] = summary
2248 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2249 utils.send_mail(mail_message)
2250 del self.Subst["__SUMMARY__"]
2252 ###########################################################################
2254 def remove(self, from_dir=None):
2256 Used (for instance) in p-u to remove the package from unchecked
2258 Also removes the package from holding area.
2260 if from_dir is None:
2261 from_dir = self.pkg.directory
2264 for f in self.pkg.files.keys():
2265 os.unlink(os.path.join(from_dir, f))
2266 if os.path.exists(os.path.join(h.holding_dir, f)):
2267 os.unlink(os.path.join(h.holding_dir, f))
2269 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2270 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2271 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2273 ###########################################################################
2275 def move_to_queue (self, queue):
2277 Move files to a destination queue using the permissions in the table
2280 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2281 queue.path, perms=int(queue.change_perms, 8))
2282 for f in self.pkg.files.keys():
2283 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2285 ###########################################################################
2287 def force_reject(self, reject_files):
2289 Forcefully move files from the current directory to the
2290 reject directory. If any file already exists in the reject
2291 directory it will be moved to the morgue to make way for
2294 @type reject_files: dict
2295 @param reject_files: file dictionary
2301 for file_entry in reject_files:
2302 # Skip any files which don't exist or which we don't have permission to copy.
2303 if os.access(file_entry, os.R_OK) == 0:
2306 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2309 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2311 # File exists? Let's find a new name by adding a number
2312 if e.errno == errno.EEXIST:
2314 dest_file = utils.find_next_free(dest_file, 255)
2315 except NoFreeFilenameError:
2316 # Something's either gone badly Pete Tong, or
2317 # someone is trying to exploit us.
2318 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2321 # Make sure we really got it
2323 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2326 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2330 # If we got here, we own the destination file, so we can
2331 # safely overwrite it.
2332 utils.move(file_entry, dest_file, 1, perms=0660)
2335 ###########################################################################
2336 def do_reject (self, manual=0, reject_message="", notes=""):
2338 Reject an upload. If called without a reject message or C{manual} is
2339 true, spawn an editor so the user can write one.
2342 @param manual: manual or automated rejection
2344 @type reject_message: string
2345 @param reject_message: A reject message
2350 # If we weren't given a manual rejection message, spawn an
2351 # editor so the user can add one in...
2352 if manual and not reject_message:
2353 (fd, temp_filename) = utils.temp_filename()
2354 temp_file = os.fdopen(fd, 'w')
2357 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2358 % (note.author, note.version, note.notedate, note.comment))
2360 editor = os.environ.get("EDITOR","vi")
2362 while answer == 'E':
2363 os.system("%s %s" % (editor, temp_filename))
2364 temp_fh = utils.open_file(temp_filename)
2365 reject_message = "".join(temp_fh.readlines())
2367 print "Reject message:"
2368 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2369 prompt = "[R]eject, Edit, Abandon, Quit ?"
2371 while prompt.find(answer) == -1:
2372 answer = utils.our_raw_input(prompt)
2373 m = re_default_answer.search(prompt)
2376 answer = answer[:1].upper()
2377 os.unlink(temp_filename)
2383 print "Rejecting.\n"
2387 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2388 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2390 # Move all the files into the reject directory
2391 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2392 self.force_reject(reject_files)
2394 # If we fail here someone is probably trying to exploit the race
2395 # so let's just raise an exception ...
2396 if os.path.exists(reason_filename):
2397 os.unlink(reason_filename)
2398 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2400 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2404 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2405 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2406 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2407 os.write(reason_fd, reject_message)
2408 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2410 # Build up the rejection email
2411 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2412 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2413 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2414 self.Subst["__REJECT_MESSAGE__"] = ""
2415 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2416 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2417 # Write the rejection email out as the <foo>.reason file
2418 os.write(reason_fd, reject_mail_message)
2420 del self.Subst["__REJECTOR_ADDRESS__"]
2421 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2422 del self.Subst["__CC__"]
2426 # Send the rejection mail
2427 utils.send_mail(reject_mail_message)
2430 self.logger.log(["rejected", self.pkg.changes_file])
2434 ################################################################################
2435 def in_override_p(self, package, component, suite, binary_type, filename, session):
2437 Check if a package already has override entries in the DB
2439 @type package: string
2440 @param package: package name
2442 @type component: string
2443 @param component: database id of the component
2446 @param suite: database id of the suite
2448 @type binary_type: string
2449 @param binary_type: type of the package
2451 @type filename: string
2452 @param filename: filename we check
2454 @return: the database result. But noone cares anyway.
2460 if binary_type == "": # must be source
2463 file_type = binary_type
2465 # Override suite name; used for example with proposed-updates
2466 oldsuite = get_suite(suite, session)
2467 if (not oldsuite is None) and oldsuite.overridesuite:
2468 suite = oldsuite.overridesuite
2470 result = get_override(package, suite, component, file_type, session)
2472 # If checking for a source package fall back on the binary override type
2473 if file_type == "dsc" and len(result) < 1:
2474 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2476 # Remember the section and priority so we can check them later if appropriate
2479 self.pkg.files[filename]["override section"] = result.section.section
2480 self.pkg.files[filename]["override priority"] = result.priority.priority
2485 ################################################################################
2486 def get_anyversion(self, sv_list, suite):
2489 @param sv_list: list of (suite, version) tuples to check
2492 @param suite: suite name
2498 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2499 for (s, v) in sv_list:
2500 if s in [ x.lower() for x in anysuite ]:
2501 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2506 ################################################################################
2508 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2511 @param sv_list: list of (suite, version) tuples to check
2513 @type filename: string
2514 @param filename: XXX
2516 @type new_version: string
2517 @param new_version: XXX
2519 Ensure versions are newer than existing packages in target
2520 suites and that cross-suite version checking rules as
2521 set out in the conf file are satisfied.
2526 # Check versions for each target suite
2527 for target_suite in self.pkg.changes["distribution"].keys():
2528 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2529 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2531 # Enforce "must be newer than target suite" even if conffile omits it
2532 if target_suite not in must_be_newer_than:
2533 must_be_newer_than.append(target_suite)
2535 for (suite, existent_version) in sv_list:
2536 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2538 if suite in must_be_newer_than and sourceful and vercmp < 1:
2539 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2541 if suite in must_be_older_than and vercmp > -1:
2544 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2545 # we really use the other suite, ignoring the conflicting one ...
2546 addsuite = self.pkg.changes["distribution-version"][suite]
2548 add_version = self.get_anyversion(sv_list, addsuite)
2549 target_version = self.get_anyversion(sv_list, target_suite)
2552 # not add_version can only happen if we map to a suite
2553 # that doesn't enhance the suite we're propup'ing from.
2554 # so "propup-ver x a b c; map a d" is a problem only if
2555 # d doesn't enhance a.
2557 # i think we could always propagate in this case, rather
2558 # than complaining. either way, this isn't a REJECT issue
2560 # And - we really should complain to the dorks who configured dak
2561 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2562 self.pkg.changes.setdefault("propdistribution", {})
2563 self.pkg.changes["propdistribution"][addsuite] = 1
2565 elif not target_version:
2566 # not targets_version is true when the package is NEW
2567 # we could just stick with the "...old version..." REJECT
2568 # for this, I think.
2569 self.rejects.append("Won't propogate NEW packages.")
2570 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2571 # propogation would be redundant. no need to reject though.
2572 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2574 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2575 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2577 self.warnings.append("Propogating upload to %s" % (addsuite))
2578 self.pkg.changes.setdefault("propdistribution", {})
2579 self.pkg.changes["propdistribution"][addsuite] = 1
2583 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2585 ################################################################################
2586 def check_binary_against_db(self, filename, session):
2587 # Ensure version is sane
2588 self.cross_suite_version_check( \
2589 get_suite_version_by_package(self.pkg.files[filename]["package"], \
2590 self.pkg.files[filename]["architecture"], session),
2591 filename, self.pkg.files[filename]["version"], sourceful=False)
2593 # Check for any existing copies of the file
2594 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2595 q = q.filter_by(version=self.pkg.files[filename]["version"])
2596 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2599 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2601 ################################################################################
2603 def check_source_against_db(self, filename, session):
2604 source = self.pkg.dsc.get("source")
2605 version = self.pkg.dsc.get("version")
2607 # Ensure version is sane
2608 self.cross_suite_version_check( \
2609 get_suite_version_by_source(source, session), filename, version,
2612 ################################################################################
2613 def check_dsc_against_db(self, filename, session):
2616 @warning: NB: this function can remove entries from the 'files' index [if
2617 the orig tarball is a duplicate of the one in the archive]; if
2618 you're iterating over 'files' and call this function as part of
2619 the loop, be sure to add a check to the top of the loop to
2620 ensure you haven't just tried to dereference the deleted entry.
2625 self.pkg.orig_files = {} # XXX: do we need to clear it?
2626 orig_files = self.pkg.orig_files
2628 # Try and find all files mentioned in the .dsc. This has
2629 # to work harder to cope with the multiple possible
2630 # locations of an .orig.tar.gz.
2631 # The ordering on the select is needed to pick the newest orig
2632 # when it exists in multiple places.
2633 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2635 if self.pkg.files.has_key(dsc_name):
2636 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2637 actual_size = int(self.pkg.files[dsc_name]["size"])
2638 found = "%s in incoming" % (dsc_name)
2640 # Check the file does not already exist in the archive
2641 ql = get_poolfile_like_name(dsc_name, session)
2643 # Strip out anything that isn't '%s' or '/%s$'
2645 if not i.filename.endswith(dsc_name):
2648 # "[dak] has not broken them. [dak] has fixed a
2649 # brokenness. Your crappy hack exploited a bug in
2652 # "(Come on! I thought it was always obvious that
2653 # one just doesn't release different files with
2654 # the same name and version.)"
2655 # -- ajk@ on d-devel@l.d.o
2658 # Ignore exact matches for .orig.tar.gz
2660 if re_is_orig_source.match(dsc_name):
2662 if self.pkg.files.has_key(dsc_name) and \
2663 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2664 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2665 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2666 # TODO: Don't delete the entry, just mark it as not needed
2667 # This would fix the stupidity of changing something we often iterate over
2668 # whilst we're doing it
2669 del self.pkg.files[dsc_name]
2670 dsc_entry["files id"] = i.file_id
2671 if not orig_files.has_key(dsc_name):
2672 orig_files[dsc_name] = {}
2673 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2676 # Don't bitch that we couldn't find this file later
2678 self.later_check_files.remove(dsc_name)
2684 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2686 elif re_is_orig_source.match(dsc_name):
2688 ql = get_poolfile_like_name(dsc_name, session)
2690 # Strip out anything that isn't '%s' or '/%s$'
2691 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2693 if not i.filename.endswith(dsc_name):
2697 # Unfortunately, we may get more than one match here if,
2698 # for example, the package was in potato but had an -sa
2699 # upload in woody. So we need to choose the right one.
2701 # default to something sane in case we don't match any or have only one
2706 old_file = os.path.join(i.location.path, i.filename)
2707 old_file_fh = utils.open_file(old_file)
2708 actual_md5 = apt_pkg.md5sum(old_file_fh)
2710 actual_size = os.stat(old_file)[stat.ST_SIZE]
2711 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2714 old_file = os.path.join(i.location.path, i.filename)
2715 old_file_fh = utils.open_file(old_file)
2716 actual_md5 = apt_pkg.md5sum(old_file_fh)
2718 actual_size = os.stat(old_file)[stat.ST_SIZE]
2720 suite_type = x.location.archive_type
2721 # need this for updating dsc_files in install()
2722 dsc_entry["files id"] = x.file_id
2723 # See install() in process-accepted...
2724 if not orig_files.has_key(dsc_name):
2725 orig_files[dsc_name] = {}
2726 orig_files[dsc_name]["id"] = x.file_id
2727 orig_files[dsc_name]["path"] = old_file
2728 orig_files[dsc_name]["location"] = x.location.location_id
2730 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2731 # Not there? Check the queue directories...
2732 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2733 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2735 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2736 if os.path.exists(in_otherdir):
2737 in_otherdir_fh = utils.open_file(in_otherdir)
2738 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2739 in_otherdir_fh.close()
2740 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2742 if not orig_files.has_key(dsc_name):
2743 orig_files[dsc_name] = {}
2744 orig_files[dsc_name]["path"] = in_otherdir
2747 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2750 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2752 if actual_md5 != dsc_entry["md5sum"]:
2753 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2754 if actual_size != int(dsc_entry["size"]):
2755 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2757 ################################################################################
2758 # This is used by process-new and process-holding to recheck a changes file
2759 # at the time we're running. It mainly wraps various other internal functions
2760 # and is similar to accepted_checks - these should probably be tidied up
2762 def recheck(self, session):
2764 for f in self.pkg.files.keys():
2765 # The .orig.tar.gz can disappear out from under us is it's a
2766 # duplicate of one in the archive.
2767 if not self.pkg.files.has_key(f):
2770 entry = self.pkg.files[f]
2772 # Check that the source still exists
2773 if entry["type"] == "deb":
2774 source_version = entry["source version"]
2775 source_package = entry["source package"]
2776 if not self.pkg.changes["architecture"].has_key("source") \
2777 and not source_exists(source_package, source_version, \
2778 suites = self.pkg.changes["distribution"].keys(), session = session):
2779 source_epochless_version = re_no_epoch.sub('', source_version)
2780 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2782 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2783 if cnf.has_key("Dir::Queue::%s" % (q)):
2784 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2787 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2789 # Version and file overwrite checks
2790 if entry["type"] == "deb":
2791 self.check_binary_against_db(f, session)
2792 elif entry["type"] == "dsc":
2793 self.check_source_against_db(f, session)
2794 self.check_dsc_against_db(f, session)
2796 ################################################################################
2797 def accepted_checks(self, overwrite_checks, session):
2798 # Recheck anything that relies on the database; since that's not
2799 # frozen between accept and our run time when called from p-a.
2801 # overwrite_checks is set to False when installing to stable/oldstable
2806 # Find the .dsc (again)
2808 for f in self.pkg.files.keys():
2809 if self.pkg.files[f]["type"] == "dsc":
2812 for checkfile in self.pkg.files.keys():
2813 # The .orig.tar.gz can disappear out from under us is it's a
2814 # duplicate of one in the archive.
2815 if not self.pkg.files.has_key(checkfile):
2818 entry = self.pkg.files[checkfile]
2820 # Check that the source still exists
2821 if entry["type"] == "deb":
2822 source_version = entry["source version"]
2823 source_package = entry["source package"]
2824 if not self.pkg.changes["architecture"].has_key("source") \
2825 and not source_exists(source_package, source_version, \
2826 suites = self.pkg.changes["distribution"].keys(), \
2828 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2830 # Version and file overwrite checks
2831 if overwrite_checks:
2832 if entry["type"] == "deb":
2833 self.check_binary_against_db(checkfile, session)
2834 elif entry["type"] == "dsc":
2835 self.check_source_against_db(checkfile, session)
2836 self.check_dsc_against_db(dsc_filename, session)
2838 # propogate in the case it is in the override tables:
2839 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2840 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2841 propogate[suite] = 1
2843 nopropogate[suite] = 1
2845 for suite in propogate.keys():
2846 if suite in nopropogate:
2848 self.pkg.changes["distribution"][suite] = 1
2850 for checkfile in self.pkg.files.keys():
2851 # Check the package is still in the override tables
2852 for suite in self.pkg.changes["distribution"].keys():
2853 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2854 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2856 ################################################################################
2857 # If any file of an upload has a recent mtime then chances are good
2858 # the file is still being uploaded.
2860 def upload_too_new(self):
2863 # Move back to the original directory to get accurate time stamps
2865 os.chdir(self.pkg.directory)
2866 file_list = self.pkg.files.keys()
2867 file_list.extend(self.pkg.dsc_files.keys())
2868 file_list.append(self.pkg.changes_file)
2871 last_modified = time.time()-os.path.getmtime(f)
2872 if last_modified < int(cnf["Dinstall::SkipTime"]):
2881 def store_changelog(self):
2883 # Skip binary-only upload if it is not a bin-NMU
2884 if not self.pkg.changes['architecture'].has_key('source'):
2885 from daklib.regexes import re_bin_only_nmu
2886 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2889 session = DBConn().session()
2891 # Check if upload already has a changelog entry
2892 query = """SELECT changelog_id FROM changes WHERE source = :source
2893 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2894 if session.execute(query, {'source': self.pkg.changes['source'], \
2895 'version': self.pkg.changes['version'], \
2896 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2900 # Add current changelog text into changelogs_text table, return created ID
2901 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2902 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2904 # Link ID to the upload available in changes table
2905 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2906 AND version = :version AND architecture = :architecture"""
2907 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2908 'version': self.pkg.changes['version'], \
2909 'architecture': " ".join(self.pkg.changes['architecture'].keys())})