5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
59 # suppress some deprecation warnings in squeeze related to apt_pkg
62 warnings.filterwarnings('ignore', \
63 "apt_pkg.ParseSection\(\) is deprecated. Please see apt_pkg\.TagSection\(\) for the replacement\.", \
65 warnings.filterwarnings('ignore', \
66 "Attribute 'Find' of the 'apt_pkg\.TagSection' object is deprecated, use 'find' instead\.", \
69 ###############################################################################
71 def get_type(f, session):
73 Get the file type of C{f}
76 @param f: file entry from Changes object
78 @type session: SQLA Session
79 @param session: SQL Alchemy session object
86 if f.has_key("dbtype"):
87 file_type = f["dbtype"]
88 elif re_source_ext.match(f["type"]):
92 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
94 # Validate the override type
95 type_id = get_override_type(file_type, session)
97 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
101 ################################################################################
103 # Determine what parts in a .changes are NEW
105 def determine_new(filename, changes, files, warn=1, session = None):
107 Determine what parts in a C{changes} file are NEW.
110 @param filename: changes filename
112 @type changes: Upload.Pkg.changes dict
113 @param changes: Changes dictionary
115 @type files: Upload.Pkg.files dict
116 @param files: Files dictionary
119 @param warn: Warn if overrides are added for (old)stable
122 @return: dictionary of NEW components.
125 # TODO: This should all use the database instead of parsing the changes
130 dbchg = get_dbchange(filename, session)
132 print "Warning: cannot find changes file in database; won't check byhand"
134 # Build up a list of potentially new things
135 for name, f in files.items():
136 # Keep a record of byhand elements
137 if f["section"] == "byhand":
142 priority = f["priority"]
143 section = f["section"]
144 file_type = get_type(f, session)
145 component = f["component"]
147 if file_type == "dsc":
150 if not new.has_key(pkg):
152 new[pkg]["priority"] = priority
153 new[pkg]["section"] = section
154 new[pkg]["type"] = file_type
155 new[pkg]["component"] = component
156 new[pkg]["files"] = []
158 old_type = new[pkg]["type"]
159 if old_type != file_type:
160 # source gets trumped by deb or udeb
161 if old_type == "dsc":
162 new[pkg]["priority"] = priority
163 new[pkg]["section"] = section
164 new[pkg]["type"] = file_type
165 new[pkg]["component"] = component
167 new[pkg]["files"].append(name)
169 if f.has_key("othercomponents"):
170 new[pkg]["othercomponents"] = f["othercomponents"]
172 # Fix up the list of target suites
174 for suite in changes["suite"].keys():
175 oldsuite = get_suite(suite, session)
177 print "WARNING: Invalid suite %s found" % suite
180 if oldsuite.overridesuite:
181 newsuite = get_suite(oldsuite.overridesuite, session)
184 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
185 oldsuite.overridesuite, suite)
186 del changes["suite"][suite]
187 changes["suite"][oldsuite.overridesuite] = 1
189 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist. Bugger" % (
190 oldsuite.overridesuite, suite)
192 # Check for unprocessed byhand files
193 if dbchg is not None:
194 for b in byhand.keys():
195 # Find the file entry in the database
197 for f in dbchg.files:
200 # If it's processed, we can ignore it
206 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
208 # Check for new stuff
209 for suite in changes["suite"].keys():
210 for pkg in new.keys():
211 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
213 for file_entry in new[pkg]["files"]:
214 if files[file_entry].has_key("new"):
215 del files[file_entry]["new"]
219 for s in ['stable', 'oldstable']:
220 if changes["suite"].has_key(s):
221 print "WARNING: overrides will be added for %s!" % s
222 for pkg in new.keys():
223 if new[pkg].has_key("othercomponents"):
224 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
228 ################################################################################
230 def check_valid(new, session = None):
232 Check if section and priority for NEW packages exist in database.
233 Additionally does sanity checks:
234 - debian-installer packages have to be udeb (or source)
235 - non debian-installer packages can not be udeb
236 - source priority can only be assigned to dsc file types
239 @param new: Dict of new packages with their section, priority and type.
242 for pkg in new.keys():
243 section_name = new[pkg]["section"]
244 priority_name = new[pkg]["priority"]
245 file_type = new[pkg]["type"]
247 section = get_section(section_name, session)
249 new[pkg]["section id"] = -1
251 new[pkg]["section id"] = section.section_id
253 priority = get_priority(priority_name, session)
255 new[pkg]["priority id"] = -1
257 new[pkg]["priority id"] = priority.priority_id
260 di = section_name.find("debian-installer") != -1
262 # If d-i, we must be udeb and vice-versa
263 if (di and file_type not in ("udeb", "dsc")) or \
264 (not di and file_type == "udeb"):
265 new[pkg]["section id"] = -1
267 # If dsc we need to be source and vice-versa
268 if (priority == "source" and file_type != "dsc") or \
269 (priority != "source" and file_type == "dsc"):
270 new[pkg]["priority id"] = -1
272 ###############################################################################
274 # Used by Upload.check_timestamps
275 class TarTime(object):
276 def __init__(self, future_cutoff, past_cutoff):
278 self.future_cutoff = future_cutoff
279 self.past_cutoff = past_cutoff
282 self.future_files = {}
283 self.ancient_files = {}
285 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
286 if MTime > self.future_cutoff:
287 self.future_files[Name] = MTime
288 if MTime < self.past_cutoff:
289 self.ancient_files[Name] = MTime
291 ###############################################################################
293 def prod_maintainer(notes, upload):
296 # Here we prepare an editor and get them ready to prod...
297 (fd, temp_filename) = utils.temp_filename()
298 temp_file = os.fdopen(fd, 'w')
300 temp_file.write(note.comment)
302 editor = os.environ.get("EDITOR","vi")
305 os.system("%s %s" % (editor, temp_filename))
306 temp_fh = utils.open_file(temp_filename)
307 prod_message = "".join(temp_fh.readlines())
309 print "Prod message:"
310 print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
311 prompt = "[P]rod, Edit, Abandon, Quit ?"
313 while prompt.find(answer) == -1:
314 answer = utils.our_raw_input(prompt)
315 m = re_default_answer.search(prompt)
318 answer = answer[:1].upper()
319 os.unlink(temp_filename)
325 # Otherwise, do the proding...
326 user_email_address = utils.whoami() + " <%s>" % (
327 cnf["Dinstall::MyAdminAddress"])
331 Subst["__FROM_ADDRESS__"] = user_email_address
332 Subst["__PROD_MESSAGE__"] = prod_message
333 Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
335 prod_mail_message = utils.TemplateSubst(
336 Subst,cnf["Dir::Templates"]+"/process-new.prod")
339 utils.send_mail(prod_mail_message)
341 print "Sent prodding message"
343 ################################################################################
345 def edit_note(note, upload, session, trainee=False):
346 # Write the current data to a temporary file
347 (fd, temp_filename) = utils.temp_filename()
348 editor = os.environ.get("EDITOR","vi")
351 os.system("%s %s" % (editor, temp_filename))
352 temp_file = utils.open_file(temp_filename)
353 newnote = temp_file.read().rstrip()
356 print utils.prefix_multi_line_string(newnote," ")
357 prompt = "[D]one, Edit, Abandon, Quit ?"
359 while prompt.find(answer) == -1:
360 answer = utils.our_raw_input(prompt)
361 m = re_default_answer.search(prompt)
364 answer = answer[:1].upper()
365 os.unlink(temp_filename)
372 comment = NewComment()
373 comment.package = upload.pkg.changes["source"]
374 comment.version = upload.pkg.changes["version"]
375 comment.comment = newnote
376 comment.author = utils.whoami()
377 comment.trainee = trainee
381 ###############################################################################
383 # suite names DMs can upload to
384 dm_suites = ['unstable', 'experimental']
386 def get_newest_source(source, session):
387 'returns the newest DBSource object in dm_suites'
388 ## the most recent version of the package uploaded to unstable or
389 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
390 ## section of its control file
391 q = session.query(DBSource).filter_by(source = source). \
392 filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
393 order_by(desc('source.version'))
396 def get_suite_version_by_source(source, session):
397 'returns a list of tuples (suite_name, version) for source package'
398 q = session.query(Suite.suite_name, DBSource.version). \
399 join(Suite.sources).filter_by(source = source)
402 def get_source_by_package_and_suite(package, suite_name, session):
404 returns a DBSource query filtered by DBBinary.package and this package's
407 return session.query(DBSource). \
408 join(DBSource.binaries).filter_by(package = package). \
409 join(DBBinary.suites).filter_by(suite_name = suite_name)
411 def get_suite_version_by_package(package, arch_string, session):
413 returns a list of tuples (suite_name, version) for binary package and
416 return session.query(Suite.suite_name, DBBinary.version). \
417 join(Suite.binaries).filter_by(package = package). \
418 join(DBBinary.architecture). \
419 filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
421 class Upload(object):
423 Everything that has to do with an upload processed.
431 ###########################################################################
434 """ Reset a number of internal variables."""
436 # Initialize the substitution template map
439 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
440 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
441 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
442 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
448 self.later_check_files = []
452 def package_info(self):
454 Format various messages from this Upload to send to the maintainer.
458 ('Reject Reasons', self.rejects),
459 ('Warnings', self.warnings),
460 ('Notes', self.notes),
464 for title, messages in msgs:
466 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
471 ###########################################################################
472 def update_subst(self):
473 """ Set up the per-package template substitution mappings """
477 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
478 if not self.pkg.changes.has_key("architecture") or not \
479 isinstance(self.pkg.changes["architecture"], dict):
480 self.pkg.changes["architecture"] = { "Unknown" : "" }
482 # and maintainer2047 may not exist.
483 if not self.pkg.changes.has_key("maintainer2047"):
484 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
486 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
487 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
488 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
490 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
491 if self.pkg.changes["architecture"].has_key("source") and \
492 self.pkg.changes["changedby822"] != "" and \
493 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
495 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
496 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
497 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
499 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
500 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
501 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
503 # Process policy doesn't set the fingerprint field and I don't want to make it
504 # do it for now as I don't want to have to deal with the case where we accepted
505 # the package into PU-NEW, but the fingerprint has gone away from the keyring in
506 # the meantime so the package will be remarked as rejectable. Urgh.
507 # TODO: Fix this properly
508 if self.pkg.changes.has_key('fingerprint'):
509 session = DBConn().session()
510 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
511 if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
512 if self.pkg.changes.has_key("sponsoremail"):
513 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
516 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
517 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
519 # Apply any global override of the Maintainer field
520 if cnf.get("Dinstall::OverrideMaintainer"):
521 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
522 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
524 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
525 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
526 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
527 self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
529 ###########################################################################
530 def load_changes(self, filename):
532 Load a changes file and setup a dictionary around it. Also checks for mandantory
535 @type filename: string
536 @param filename: Changes filename, full path.
539 @return: whether the changes file was valid or not. We may want to
540 reject even if this is True (see what gets put in self.rejects).
541 This is simply to prevent us even trying things later which will
542 fail because we couldn't properly parse the file.
545 self.pkg.changes_file = filename
547 # Parse the .changes field into a dictionary
549 self.pkg.changes.update(parse_changes(filename))
550 except CantOpenError:
551 self.rejects.append("%s: can't read file." % (filename))
553 except ParseChangesError, line:
554 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
556 except ChangesUnicodeError:
557 self.rejects.append("%s: changes file not proper utf-8" % (filename))
560 # Parse the Files field from the .changes into another dictionary
562 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
563 except ParseChangesError, line:
564 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
566 except UnknownFormatError, format:
567 self.rejects.append("%s: unknown format '%s'." % (filename, format))
570 # Check for mandatory fields
571 for i in ("distribution", "source", "binary", "architecture",
572 "version", "maintainer", "files", "changes", "description"):
573 if not self.pkg.changes.has_key(i):
574 # Avoid undefined errors later
575 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
578 # Strip a source version in brackets from the source field
579 if re_strip_srcver.search(self.pkg.changes["source"]):
580 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
582 # Ensure the source field is a valid package name.
583 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
584 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
586 # Split multi-value fields into a lower-level dictionary
587 for i in ("architecture", "distribution", "binary", "closes"):
588 o = self.pkg.changes.get(i, "")
590 del self.pkg.changes[i]
592 self.pkg.changes[i] = {}
595 self.pkg.changes[i][j] = 1
597 # Fix the Maintainer: field to be RFC822/2047 compatible
599 (self.pkg.changes["maintainer822"],
600 self.pkg.changes["maintainer2047"],
601 self.pkg.changes["maintainername"],
602 self.pkg.changes["maintaineremail"]) = \
603 fix_maintainer (self.pkg.changes["maintainer"])
604 except ParseMaintError, msg:
605 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
606 % (filename, self.pkg.changes["maintainer"], msg))
608 # ...likewise for the Changed-By: field if it exists.
610 (self.pkg.changes["changedby822"],
611 self.pkg.changes["changedby2047"],
612 self.pkg.changes["changedbyname"],
613 self.pkg.changes["changedbyemail"]) = \
614 fix_maintainer (self.pkg.changes.get("changed-by", ""))
615 except ParseMaintError, msg:
616 self.pkg.changes["changedby822"] = ""
617 self.pkg.changes["changedby2047"] = ""
618 self.pkg.changes["changedbyname"] = ""
619 self.pkg.changes["changedbyemail"] = ""
621 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
622 % (filename, self.pkg.changes["changed-by"], msg))
624 # Ensure all the values in Closes: are numbers
625 if self.pkg.changes.has_key("closes"):
626 for i in self.pkg.changes["closes"].keys():
627 if re_isanum.match (i) == None:
628 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
630 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
631 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
632 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
634 # Check the .changes is non-empty
635 if not self.pkg.files:
636 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
639 # Changes was syntactically valid even if we'll reject
642 ###########################################################################
644 def check_distributions(self):
645 "Check and map the Distribution field"
649 # Handle suite mappings
650 for m in Cnf.ValueList("SuiteMappings"):
653 if mtype == "map" or mtype == "silent-map":
654 (source, dest) = args[1:3]
655 if self.pkg.changes["distribution"].has_key(source):
656 del self.pkg.changes["distribution"][source]
657 self.pkg.changes["distribution"][dest] = 1
658 if mtype != "silent-map":
659 self.notes.append("Mapping %s to %s." % (source, dest))
660 if self.pkg.changes.has_key("distribution-version"):
661 if self.pkg.changes["distribution-version"].has_key(source):
662 self.pkg.changes["distribution-version"][source]=dest
663 elif mtype == "map-unreleased":
664 (source, dest) = args[1:3]
665 if self.pkg.changes["distribution"].has_key(source):
666 for arch in self.pkg.changes["architecture"].keys():
667 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
668 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
669 del self.pkg.changes["distribution"][source]
670 self.pkg.changes["distribution"][dest] = 1
672 elif mtype == "ignore":
674 if self.pkg.changes["distribution"].has_key(suite):
675 del self.pkg.changes["distribution"][suite]
676 self.warnings.append("Ignoring %s as a target suite." % (suite))
677 elif mtype == "reject":
679 if self.pkg.changes["distribution"].has_key(suite):
680 self.rejects.append("Uploads to %s are not accepted." % (suite))
681 elif mtype == "propup-version":
682 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
684 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
685 if self.pkg.changes["distribution"].has_key(args[1]):
686 self.pkg.changes.setdefault("distribution-version", {})
687 for suite in args[2:]:
688 self.pkg.changes["distribution-version"][suite] = suite
690 # Ensure there is (still) a target distribution
691 if len(self.pkg.changes["distribution"].keys()) < 1:
692 self.rejects.append("No valid distribution remaining.")
694 # Ensure target distributions exist
695 for suite in self.pkg.changes["distribution"].keys():
696 if not Cnf.has_key("Suite::%s" % (suite)):
697 self.rejects.append("Unknown distribution `%s'." % (suite))
699 ###########################################################################
701 def binary_file_checks(self, f, session):
703 entry = self.pkg.files[f]
705 # Extract package control information
706 deb_file = utils.open_file(f)
708 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
710 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
712 # Can't continue, none of the checks on control would work.
715 # Check for mandantory "Description:"
718 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
720 self.rejects.append("%s: Missing Description in binary package" % (f))
725 # Check for mandatory fields
726 for field in [ "Package", "Architecture", "Version" ]:
727 if control.Find(field) == None:
729 self.rejects.append("%s: No %s field in control." % (f, field))
732 # Ensure the package name matches the one give in the .changes
733 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
734 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
736 # Validate the package field
737 package = control.Find("Package")
738 if not re_valid_pkg_name.match(package):
739 self.rejects.append("%s: invalid package name '%s'." % (f, package))
741 # Validate the version field
742 version = control.Find("Version")
743 if not re_valid_version.match(version):
744 self.rejects.append("%s: invalid version number '%s'." % (f, version))
746 # Ensure the architecture of the .deb is one we know about.
747 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
748 architecture = control.Find("Architecture")
749 upload_suite = self.pkg.changes["distribution"].keys()[0]
751 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
752 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
753 self.rejects.append("Unknown architecture '%s'." % (architecture))
755 # Ensure the architecture of the .deb is one of the ones
756 # listed in the .changes.
757 if not self.pkg.changes["architecture"].has_key(architecture):
758 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
760 # Sanity-check the Depends field
761 depends = control.Find("Depends")
763 self.rejects.append("%s: Depends field is empty." % (f))
765 # Sanity-check the Provides field
766 provides = control.Find("Provides")
768 provide = re_spacestrip.sub('', provides)
770 self.rejects.append("%s: Provides field is empty." % (f))
771 prov_list = provide.split(",")
772 for prov in prov_list:
773 if not re_valid_pkg_name.match(prov):
774 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
776 # If there is a Built-Using field, we need to check we can find the
777 # exact source version
778 built_using = control.Find("Built-Using")
781 entry["built-using"] = []
782 for dep in apt_pkg.parse_depends(built_using):
783 bu_s, bu_v, bu_e = dep[0]
784 # Check that it's an exact match dependency and we have
785 # some form of version
786 if bu_e != "=" or len(bu_v) < 1:
787 self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
789 # Find the source id for this version
790 bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
792 self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
794 entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
796 except ValueError, e:
797 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
800 # Check the section & priority match those given in the .changes (non-fatal)
801 if control.Find("Section") and entry["section"] != "" \
802 and entry["section"] != control.Find("Section"):
803 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
804 (f, control.Find("Section", ""), entry["section"]))
805 if control.Find("Priority") and entry["priority"] != "" \
806 and entry["priority"] != control.Find("Priority"):
807 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
808 (f, control.Find("Priority", ""), entry["priority"]))
810 entry["package"] = package
811 entry["architecture"] = architecture
812 entry["version"] = version
813 entry["maintainer"] = control.Find("Maintainer", "")
815 if f.endswith(".udeb"):
816 self.pkg.files[f]["dbtype"] = "udeb"
817 elif f.endswith(".deb"):
818 self.pkg.files[f]["dbtype"] = "deb"
820 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
822 entry["source"] = control.Find("Source", entry["package"])
824 # Get the source version
825 source = entry["source"]
828 if source.find("(") != -1:
829 m = re_extract_src_version.match(source)
831 source_version = m.group(2)
833 if not source_version:
834 source_version = self.pkg.files[f]["version"]
836 entry["source package"] = source
837 entry["source version"] = source_version
839 # Ensure the filename matches the contents of the .deb
840 m = re_isadeb.match(f)
843 file_package = m.group(1)
844 if entry["package"] != file_package:
845 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
846 (f, file_package, entry["dbtype"], entry["package"]))
847 epochless_version = re_no_epoch.sub('', control.Find("Version"))
850 file_version = m.group(2)
851 if epochless_version != file_version:
852 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
853 (f, file_version, entry["dbtype"], epochless_version))
856 file_architecture = m.group(3)
857 if entry["architecture"] != file_architecture:
858 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
859 (f, file_architecture, entry["dbtype"], entry["architecture"]))
861 # Check for existent source
862 source_version = entry["source version"]
863 source_package = entry["source package"]
864 if self.pkg.changes["architecture"].has_key("source"):
865 if source_version != self.pkg.changes["version"]:
866 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
867 (source_version, f, self.pkg.changes["version"]))
869 # Check in the SQL database
870 if not source_exists(source_package, source_version, suites = \
871 self.pkg.changes["distribution"].keys(), session = session):
872 # Check in one of the other directories
873 source_epochless_version = re_no_epoch.sub('', source_version)
874 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
875 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
877 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
880 dsc_file_exists = False
881 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
882 if cnf.has_key("Dir::Queue::%s" % (myq)):
883 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
884 dsc_file_exists = True
887 if not dsc_file_exists:
888 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
890 # Check the version and for file overwrites
891 self.check_binary_against_db(f, session)
893 def source_file_checks(self, f, session):
894 entry = self.pkg.files[f]
896 m = re_issource.match(f)
900 entry["package"] = m.group(1)
901 entry["version"] = m.group(2)
902 entry["type"] = m.group(3)
904 # Ensure the source package name matches the Source filed in the .changes
905 if self.pkg.changes["source"] != entry["package"]:
906 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
908 # Ensure the source version matches the version in the .changes file
909 if re_is_orig_source.match(f):
910 changes_version = self.pkg.changes["chopversion2"]
912 changes_version = self.pkg.changes["chopversion"]
914 if changes_version != entry["version"]:
915 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
917 # Ensure the .changes lists source in the Architecture field
918 if not self.pkg.changes["architecture"].has_key("source"):
919 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
921 # Check the signature of a .dsc file
922 if entry["type"] == "dsc":
923 # check_signature returns either:
924 # (None, [list, of, rejects]) or (signature, [])
925 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
927 self.rejects.append(j)
929 entry["architecture"] = "source"
931 def per_suite_file_checks(self, f, suite, session):
933 entry = self.pkg.files[f]
936 if entry.has_key("byhand"):
939 # Check we have fields we need to do these checks
941 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
942 if not entry.has_key(m):
943 self.rejects.append("file '%s' does not have field %s set" % (f, m))
949 # Handle component mappings
950 for m in cnf.ValueList("ComponentMappings"):
951 (source, dest) = m.split()
952 if entry["component"] == source:
953 entry["original component"] = source
954 entry["component"] = dest
956 # Ensure the component is valid for the target suite
957 if cnf.has_key("Suite:%s::Components" % (suite)) and \
958 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
959 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
962 # Validate the component
963 if not get_component(entry["component"], session):
964 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
967 # See if the package is NEW
968 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
971 # Validate the priority
972 if entry["priority"].find('/') != -1:
973 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
975 # Determine the location
976 location = cnf["Dir::Pool"]
977 l = get_location(location, entry["component"], session=session)
979 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
980 entry["location id"] = -1
982 entry["location id"] = l.location_id
984 # Check the md5sum & size against existing files (if any)
985 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
987 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
988 entry["size"], entry["md5sum"], entry["location id"])
991 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
992 elif found is False and poolfile is not None:
993 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
996 entry["files id"] = None
998 entry["files id"] = poolfile.file_id
1000 # Check for packages that have moved from one component to another
1001 entry['suite'] = suite
1002 arch_list = [entry["architecture"], 'all']
1003 component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1004 [suite], arch_list = arch_list, session = session)
1005 if component is not None:
1006 entry["othercomponents"] = component
1008 def check_files(self, action=True):
1009 file_keys = self.pkg.files.keys()
1015 os.chdir(self.pkg.directory)
1017 ret = holding.copy_to_holding(f)
1019 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1023 # check we already know the changes file
1024 # [NB: this check must be done post-suite mapping]
1025 base_filename = os.path.basename(self.pkg.changes_file)
1027 session = DBConn().session()
1030 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1031 # if in the pool or in a queue other than unchecked, reject
1032 if (dbc.in_queue is None) \
1033 or (dbc.in_queue is not None
1034 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1035 self.rejects.append("%s file already known to dak" % base_filename)
1036 except NoResultFound, e:
1040 has_binaries = False
1043 for f, entry in self.pkg.files.items():
1044 # Ensure the file does not already exist in one of the accepted directories
1045 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1046 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1047 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1048 self.rejects.append("%s file already exists in the %s directory." % (f, d))
1050 if not re_taint_free.match(f):
1051 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1053 # Check the file is readable
1054 if os.access(f, os.R_OK) == 0:
1055 # When running in -n, copy_to_holding() won't have
1056 # generated the reject_message, so we need to.
1058 if os.path.exists(f):
1059 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1061 # Don't directly reject, mark to check later to deal with orig's
1062 # we can find in the pool
1063 self.later_check_files.append(f)
1064 entry["type"] = "unreadable"
1067 # If it's byhand skip remaining checks
1068 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1070 entry["type"] = "byhand"
1072 # Checks for a binary package...
1073 elif re_isadeb.match(f):
1075 entry["type"] = "deb"
1077 # This routine appends to self.rejects/warnings as appropriate
1078 self.binary_file_checks(f, session)
1080 # Checks for a source package...
1081 elif re_issource.match(f):
1084 # This routine appends to self.rejects/warnings as appropriate
1085 self.source_file_checks(f, session)
1087 # Not a binary or source package? Assume byhand...
1090 entry["type"] = "byhand"
1092 # Per-suite file checks
1093 entry["oldfiles"] = {}
1094 for suite in self.pkg.changes["distribution"].keys():
1095 self.per_suite_file_checks(f, suite, session)
1099 # If the .changes file says it has source, it must have source.
1100 if self.pkg.changes["architecture"].has_key("source"):
1102 self.rejects.append("no source found and Architecture line in changes mention source.")
1104 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1105 self.rejects.append("source only uploads are not supported.")
1107 ###########################################################################
1108 def check_dsc(self, action=True, session=None):
1109 """Returns bool indicating whether or not the source changes are valid"""
1110 # Ensure there is source to check
1111 if not self.pkg.changes["architecture"].has_key("source"):
1116 for f, entry in self.pkg.files.items():
1117 if entry["type"] == "dsc":
1119 self.rejects.append("can not process a .changes file with multiple .dsc's.")
1124 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1125 if not dsc_filename:
1126 self.rejects.append("source uploads must contain a dsc file")
1129 # Parse the .dsc file
1131 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1132 except CantOpenError:
1133 # if not -n copy_to_holding() will have done this for us...
1135 self.rejects.append("%s: can't read file." % (dsc_filename))
1136 except ParseChangesError, line:
1137 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1138 except InvalidDscError, line:
1139 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1140 except ChangesUnicodeError:
1141 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1143 # Build up the file list of files mentioned by the .dsc
1145 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1146 except NoFilesFieldError:
1147 self.rejects.append("%s: no Files: field." % (dsc_filename))
1149 except UnknownFormatError, format:
1150 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1152 except ParseChangesError, line:
1153 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1156 # Enforce mandatory fields
1157 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1158 if not self.pkg.dsc.has_key(i):
1159 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1162 # Validate the source and version fields
1163 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1164 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1165 if not re_valid_version.match(self.pkg.dsc["version"]):
1166 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1168 # Only a limited list of source formats are allowed in each suite
1169 for dist in self.pkg.changes["distribution"].keys():
1170 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1171 if self.pkg.dsc["format"] not in allowed:
1172 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1174 # Validate the Maintainer field
1176 # We ignore the return value
1177 fix_maintainer(self.pkg.dsc["maintainer"])
1178 except ParseMaintError, msg:
1179 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1180 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1182 # Validate the build-depends field(s)
1183 for field_name in [ "build-depends", "build-depends-indep" ]:
1184 field = self.pkg.dsc.get(field_name)
1186 # Have apt try to parse them...
1188 apt_pkg.ParseSrcDepends(field)
1190 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1192 # Ensure the version number in the .dsc matches the version number in the .changes
1193 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1194 changes_version = self.pkg.files[dsc_filename]["version"]
1196 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1197 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1199 # Ensure the Files field contain only what's expected
1200 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1202 # Ensure source is newer than existing source in target suites
1203 session = DBConn().session()
1204 self.check_source_against_db(dsc_filename, session)
1205 self.check_dsc_against_db(dsc_filename, session)
1207 dbchg = get_dbchange(self.pkg.changes_file, session)
1209 # Finally, check if we're missing any files
1210 for f in self.later_check_files:
1212 # Check if we've already processed this file if we have a dbchg object
1215 for pf in dbchg.files:
1216 if pf.filename == f and pf.processed:
1217 self.notes.append('%s was already processed so we can go ahead' % f)
1219 del self.pkg.files[f]
1221 self.rejects.append("Could not find file %s references in changes" % f)
1227 ###########################################################################
1229 def get_changelog_versions(self, source_dir):
1230 """Extracts a the source package and (optionally) grabs the
1231 version history out of debian/changelog for the BTS."""
1235 # Find the .dsc (again)
1237 for f in self.pkg.files.keys():
1238 if self.pkg.files[f]["type"] == "dsc":
1241 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1242 if not dsc_filename:
1245 # Create a symlink mirror of the source files in our temporary directory
1246 for f in self.pkg.files.keys():
1247 m = re_issource.match(f)
1249 src = os.path.join(source_dir, f)
1250 # If a file is missing for whatever reason, give up.
1251 if not os.path.exists(src):
1254 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1255 self.pkg.orig_files[f].has_key("path"):
1257 dest = os.path.join(os.getcwd(), f)
1258 os.symlink(src, dest)
1260 # If the orig files are not a part of the upload, create symlinks to the
1262 for orig_file in self.pkg.orig_files.keys():
1263 if not self.pkg.orig_files[orig_file].has_key("path"):
1265 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1266 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1268 # Extract the source
1270 unpacked = UnpackedSource(dsc_filename)
1272 self.rejects.append("'dpkg-source -x' failed for %s." % dsc_filename)
1275 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1278 # Get the upstream version
1279 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1280 if re_strip_revision.search(upstr_version):
1281 upstr_version = re_strip_revision.sub('', upstr_version)
1283 # Ensure the changelog file exists
1284 changelog_file = unpacked.get_changelog_file()
1285 if changelog_file is None:
1286 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1289 # Parse the changelog
1290 self.pkg.dsc["bts changelog"] = ""
1291 for line in changelog_file.readlines():
1292 m = re_changelog_versions.match(line)
1294 self.pkg.dsc["bts changelog"] += line
1295 changelog_file.close()
1298 # Check we found at least one revision in the changelog
1299 if not self.pkg.dsc["bts changelog"]:
1300 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1302 def check_source(self):
1304 # a) there's no source
1305 if not self.pkg.changes["architecture"].has_key("source"):
1308 tmpdir = utils.temp_dirname()
1310 # Move into the temporary directory
1314 # Get the changelog version history
1315 self.get_changelog_versions(cwd)
1317 # Move back and cleanup the temporary tree
1321 shutil.rmtree(tmpdir)
1323 if e.errno != errno.EACCES:
1325 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1327 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1328 # We probably have u-r or u-w directories so chmod everything
1330 cmd = "chmod -R u+rwx %s" % (tmpdir)
1331 result = os.system(cmd)
1333 utils.fubar("'%s' failed with result %s." % (cmd, result))
1334 shutil.rmtree(tmpdir)
1335 except Exception, e:
1336 print "foobar2 (%s)" % e
1337 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1339 ###########################################################################
1340 def ensure_hashes(self):
1341 # Make sure we recognise the format of the Files: field in the .changes
1342 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1343 if len(format) == 2:
1344 format = int(format[0]), int(format[1])
1346 format = int(float(format[0])), 0
1348 # We need to deal with the original changes blob, as the fields we need
1349 # might not be in the changes dict serialised into the .dak anymore.
1350 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1352 # Copy the checksums over to the current changes dict. This will keep
1353 # the existing modifications to it intact.
1354 for field in orig_changes:
1355 if field.startswith('checksums-'):
1356 self.pkg.changes[field] = orig_changes[field]
1358 # Check for unsupported hashes
1359 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1360 self.rejects.append(j)
1362 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1363 self.rejects.append(j)
1365 # We have to calculate the hash if we have an earlier changes version than
1366 # the hash appears in rather than require it exist in the changes file
1367 for hashname, hashfunc, version in utils.known_hashes:
1368 # TODO: Move _ensure_changes_hash into this class
1369 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1370 self.rejects.append(j)
1371 if "source" in self.pkg.changes["architecture"]:
1372 # TODO: Move _ensure_dsc_hash into this class
1373 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1374 self.rejects.append(j)
1376 def check_hashes(self):
1377 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1378 self.rejects.append(m)
1380 for m in utils.check_size(".changes", self.pkg.files):
1381 self.rejects.append(m)
1383 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1384 self.rejects.append(m)
1386 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1387 self.rejects.append(m)
1389 self.ensure_hashes()
1391 ###########################################################################
1393 def ensure_orig(self, target_dir='.', session=None):
1395 Ensures that all orig files mentioned in the changes file are present
1396 in target_dir. If they do not exist, they are symlinked into place.
1398 An list containing the symlinks that were created are returned (so they
1405 for filename, entry in self.pkg.dsc_files.iteritems():
1406 if not re_is_orig_source.match(filename):
1407 # File is not an orig; ignore
1410 if os.path.exists(filename):
1411 # File exists, no need to continue
1414 def symlink_if_valid(path):
1415 f = utils.open_file(path)
1416 md5sum = apt_pkg.md5sum(f)
1419 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1420 expected = (int(entry['size']), entry['md5sum'])
1422 if fingerprint != expected:
1425 dest = os.path.join(target_dir, filename)
1427 os.symlink(path, dest)
1428 symlinked.append(dest)
1434 session_ = DBConn().session()
1439 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1440 poolfile_path = os.path.join(
1441 poolfile.location.path, poolfile.filename
1444 if symlink_if_valid(poolfile_path):
1454 # Look in some other queues for the file
1455 queues = ('New', 'Byhand', 'ProposedUpdates',
1456 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1458 for queue in queues:
1459 if not cnf.get('Dir::Queue::%s' % queue):
1462 queuefile_path = os.path.join(
1463 cnf['Dir::Queue::%s' % queue], filename
1466 if not os.path.exists(queuefile_path):
1467 # Does not exist in this queue
1470 if symlink_if_valid(queuefile_path):
1475 ###########################################################################
1477 def check_lintian(self):
1479 Extends self.rejects by checking the output of lintian against tags
1480 specified in Dinstall::LintianTags.
1485 # Don't reject binary uploads
1486 if not self.pkg.changes['architecture'].has_key('source'):
1489 # Only check some distributions
1490 for dist in ('unstable', 'experimental'):
1491 if dist in self.pkg.changes['distribution']:
1496 # If we do not have a tagfile, don't do anything
1497 tagfile = cnf.get("Dinstall::LintianTags")
1501 # Parse the yaml file
1502 sourcefile = file(tagfile, 'r')
1503 sourcecontent = sourcefile.read()
1507 lintiantags = yaml.load(sourcecontent)['lintian']
1508 except yaml.YAMLError, msg:
1509 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1512 # Try and find all orig mentioned in the .dsc
1513 symlinked = self.ensure_orig()
1515 # Setup the input file for lintian
1516 fd, temp_filename = utils.temp_filename()
1517 temptagfile = os.fdopen(fd, 'w')
1518 for tags in lintiantags.values():
1519 temptagfile.writelines(['%s\n' % x for x in tags])
1523 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1524 (temp_filename, self.pkg.changes_file)
1526 result, output = commands.getstatusoutput(cmd)
1528 # Remove our tempfile and any symlinks we created
1529 os.unlink(temp_filename)
1531 for symlink in symlinked:
1535 utils.warn("lintian failed for %s [return code: %s]." % \
1536 (self.pkg.changes_file, result))
1537 utils.warn(utils.prefix_multi_line_string(output, \
1538 " [possible output:] "))
1543 [self.pkg.changes_file, "check_lintian"] + list(txt)
1547 parsed_tags = parse_lintian_output(output)
1548 self.rejects.extend(
1549 generate_reject_messages(parsed_tags, lintiantags, log=log)
1552 ###########################################################################
1553 def check_urgency(self):
1555 if self.pkg.changes["architecture"].has_key("source"):
1556 if not self.pkg.changes.has_key("urgency"):
1557 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1558 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1559 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1560 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1561 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1562 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1564 ###########################################################################
1566 # Sanity check the time stamps of files inside debs.
1567 # [Files in the near future cause ugly warnings and extreme time
1568 # travel can cause errors on extraction]
1570 def check_timestamps(self):
1573 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1574 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1575 tar = TarTime(future_cutoff, past_cutoff)
1577 for filename, entry in self.pkg.files.items():
1578 if entry["type"] == "deb":
1581 deb_file = utils.open_file(filename)
1582 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1585 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1586 except SystemError, e:
1587 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1588 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1591 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1595 future_files = tar.future_files.keys()
1597 num_future_files = len(future_files)
1598 future_file = future_files[0]
1599 future_date = tar.future_files[future_file]
1600 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1601 % (filename, num_future_files, future_file, time.ctime(future_date)))
1603 ancient_files = tar.ancient_files.keys()
1605 num_ancient_files = len(ancient_files)
1606 ancient_file = ancient_files[0]
1607 ancient_date = tar.ancient_files[ancient_file]
1608 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1609 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1611 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1613 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1614 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1616 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1622 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1623 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1624 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1625 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1626 self.pkg.changes["sponsoremail"] = uid_email
1631 ###########################################################################
1632 # check_signed_by_key checks
1633 ###########################################################################
1635 def check_signed_by_key(self):
1636 """Ensure the .changes is signed by an authorized uploader."""
1637 session = DBConn().session()
1639 # First of all we check that the person has proper upload permissions
1640 # and that this upload isn't blocked
1641 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1644 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1647 # TODO: Check that import-keyring adds UIDs properly
1649 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1652 # Check that the fingerprint which uploaded has permission to do so
1653 self.check_upload_permissions(fpr, session)
1655 # Check that this package is not in a transition
1656 self.check_transition(session)
1661 def check_upload_permissions(self, fpr, session):
1662 # Check any one-off upload blocks
1663 self.check_upload_blocks(fpr, session)
1665 # Start with DM as a special case
1666 # DM is a special case unfortunately, so we check it first
1667 # (keys with no source access get more access than DMs in one
1668 # way; DMs can only upload for their packages whether source
1669 # or binary, whereas keys with no access might be able to
1670 # upload some binaries)
1671 if fpr.source_acl.access_level == 'dm':
1672 self.check_dm_upload(fpr, session)
1674 # Check source-based permissions for other types
1675 if self.pkg.changes["architecture"].has_key("source") and \
1676 fpr.source_acl.access_level is None:
1677 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1678 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1679 self.rejects.append(rej)
1681 # If not a DM, we allow full upload rights
1682 uid_email = "%s@debian.org" % (fpr.uid.uid)
1683 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1686 # Check binary upload permissions
1687 # By this point we know that DMs can't have got here unless they
1688 # are allowed to deal with the package concerned so just apply
1690 if fpr.binary_acl.access_level == 'full':
1693 # Otherwise we're in the map case
1694 tmparches = self.pkg.changes["architecture"].copy()
1695 tmparches.pop('source', None)
1697 for bam in fpr.binary_acl_map:
1698 tmparches.pop(bam.architecture.arch_string, None)
1700 if len(tmparches.keys()) > 0:
1701 if fpr.binary_reject:
1702 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1703 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1704 self.rejects.append(rej)
1706 # TODO: This is where we'll implement reject vs throw away binaries later
1707 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1708 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1709 rej += "\nFingerprint: %s", (fpr.fingerprint)
1710 self.rejects.append(rej)
1713 def check_upload_blocks(self, fpr, session):
1714 """Check whether any upload blocks apply to this source, source
1715 version, uid / fpr combination"""
1717 def block_rej_template(fb):
1718 rej = 'Manual upload block in place for package %s' % fb.source
1719 if fb.version is not None:
1720 rej += ', version %s' % fb.version
1723 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1724 # version is None if the block applies to all versions
1725 if fb.version is None or fb.version == self.pkg.changes['version']:
1726 # Check both fpr and uid - either is enough to cause a reject
1727 if fb.fpr is not None:
1728 if fb.fpr.fingerprint == fpr.fingerprint:
1729 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1730 if fb.uid is not None:
1731 if fb.uid == fpr.uid:
1732 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1735 def check_dm_upload(self, fpr, session):
1736 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1737 ## none of the uploaded packages are NEW
1739 for f in self.pkg.files.keys():
1740 if self.pkg.files[f].has_key("byhand"):
1741 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1743 if self.pkg.files[f].has_key("new"):
1744 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1750 r = get_newest_source(self.pkg.changes["source"], session)
1753 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1754 self.rejects.append(rej)
1757 if not r.dm_upload_allowed:
1758 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1759 self.rejects.append(rej)
1762 ## the Maintainer: field of the uploaded .changes file corresponds with
1763 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1765 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1766 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1768 ## the most recent version of the package uploaded to unstable or
1769 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1770 ## non-developer maintainers cannot NMU or hijack packages)
1772 # srcuploaders includes the maintainer
1774 for sup in r.srcuploaders:
1775 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1776 # Eww - I hope we never have two people with the same name in Debian
1777 if email == fpr.uid.uid or name == fpr.uid.name:
1782 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1785 ## none of the packages are being taken over from other source packages
1786 for b in self.pkg.changes["binary"].keys():
1787 for suite in self.pkg.changes["distribution"].keys():
1788 for s in get_source_by_package_and_suite(b, suite, session):
1789 if s.source != self.pkg.changes["source"]:
1790 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1794 def check_transition(self, session):
1797 sourcepkg = self.pkg.changes["source"]
1799 # No sourceful upload -> no need to do anything else, direct return
1800 # We also work with unstable uploads, not experimental or those going to some
1801 # proposed-updates queue
1802 if "source" not in self.pkg.changes["architecture"] or \
1803 "unstable" not in self.pkg.changes["distribution"]:
1806 # Also only check if there is a file defined (and existant) with
1808 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1809 if transpath == "" or not os.path.exists(transpath):
1812 # Parse the yaml file
1813 sourcefile = file(transpath, 'r')
1814 sourcecontent = sourcefile.read()
1816 transitions = yaml.load(sourcecontent)
1817 except yaml.YAMLError, msg:
1818 # This shouldn't happen, there is a wrapper to edit the file which
1819 # checks it, but we prefer to be safe than ending up rejecting
1821 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1824 # Now look through all defined transitions
1825 for trans in transitions:
1826 t = transitions[trans]
1827 source = t["source"]
1830 # Will be None if nothing is in testing.
1831 current = get_source_in_suite(source, "testing", session)
1832 if current is not None:
1833 compare = apt_pkg.VersionCompare(current.version, expected)
1835 if current is None or compare < 0:
1836 # This is still valid, the current version in testing is older than
1837 # the new version we wait for, or there is none in testing yet
1839 # Check if the source we look at is affected by this.
1840 if sourcepkg in t['packages']:
1841 # The source is affected, lets reject it.
1843 rejectmsg = "%s: part of the %s transition.\n\n" % (
1846 if current is not None:
1847 currentlymsg = "at version %s" % (current.version)
1849 currentlymsg = "not present in testing"
1851 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1853 rejectmsg += "\n".join(textwrap.wrap("""Your package
1854 is part of a testing transition designed to get %s migrated (it is
1855 currently %s, we need version %s). This transition is managed by the
1856 Release Team, and %s is the Release-Team member responsible for it.
1857 Please mail debian-release@lists.debian.org or contact %s directly if you
1858 need further assistance. You might want to upload to experimental until this
1859 transition is done."""
1860 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1862 self.rejects.append(rejectmsg)
1865 ###########################################################################
1866 # End check_signed_by_key checks
1867 ###########################################################################
1869 def build_summaries(self):
1870 """ Build a summary of changes the upload introduces. """
1872 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1874 short_summary = summary
1876 # This is for direport's benefit...
1877 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1880 summary += "Changes: " + f
1882 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1884 summary += self.announce(short_summary, 0)
1886 return (summary, short_summary)
1888 ###########################################################################
1890 def close_bugs(self, summary, action):
1892 Send mail to close bugs as instructed by the closes field in the changes file.
1893 Also add a line to summary if any work was done.
1895 @type summary: string
1896 @param summary: summary text, as given by L{build_summaries}
1899 @param action: Set to false no real action will be done.
1902 @return: summary. If action was taken, extended by the list of closed bugs.
1906 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1908 bugs = self.pkg.changes["closes"].keys()
1914 summary += "Closing bugs: "
1916 summary += "%s " % (bug)
1919 self.Subst["__BUG_NUMBER__"] = bug
1920 if self.pkg.changes["distribution"].has_key("stable"):
1921 self.Subst["__STABLE_WARNING__"] = """
1922 Note that this package is not part of the released stable Debian
1923 distribution. It may have dependencies on other unreleased software,
1924 or other instabilities. Please take care if you wish to install it.
1925 The update will eventually make its way into the next released Debian
1928 self.Subst["__STABLE_WARNING__"] = ""
1929 mail_message = utils.TemplateSubst(self.Subst, template)
1930 utils.send_mail(mail_message)
1932 # Clear up after ourselves
1933 del self.Subst["__BUG_NUMBER__"]
1934 del self.Subst["__STABLE_WARNING__"]
1936 if action and self.logger:
1937 self.logger.log(["closing bugs"] + bugs)
1943 ###########################################################################
1945 def announce(self, short_summary, action):
1947 Send an announce mail about a new upload.
1949 @type short_summary: string
1950 @param short_summary: Short summary text to include in the mail
1953 @param action: Set to false no real action will be done.
1956 @return: Textstring about action taken.
1961 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1963 # Only do announcements for source uploads with a recent dpkg-dev installed
1964 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1965 self.pkg.changes["architecture"].has_key("source"):
1971 self.Subst["__SHORT_SUMMARY__"] = short_summary
1973 for dist in self.pkg.changes["distribution"].keys():
1974 suite = get_suite(dist)
1975 if suite is None: continue
1976 announce_list = suite.announce
1977 if announce_list == "" or lists_done.has_key(announce_list):
1980 lists_done[announce_list] = 1
1981 summary += "Announcing to %s\n" % (announce_list)
1985 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1986 if cnf.get("Dinstall::TrackingServer") and \
1987 self.pkg.changes["architecture"].has_key("source"):
1988 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1989 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1991 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1992 utils.send_mail(mail_message)
1994 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1996 if cnf.FindB("Dinstall::CloseBugs"):
1997 summary = self.close_bugs(summary, action)
1999 del self.Subst["__SHORT_SUMMARY__"]
2003 ###########################################################################
2005 def accept (self, summary, short_summary, session=None):
2009 This moves all files referenced from the .changes into the pool,
2010 sends the accepted mail, announces to lists, closes bugs and
2011 also checks for override disparities. If enabled it will write out
2012 the version history for the BTS Version Tracking and will finally call
2015 @type summary: string
2016 @param summary: Summary text
2018 @type short_summary: string
2019 @param short_summary: Short summary
2023 stats = SummaryStats()
2026 self.logger.log(["installing changes", self.pkg.changes_file])
2031 # Add the .dsc file to the DB first
2032 for newfile, entry in self.pkg.files.items():
2033 if entry["type"] == "dsc":
2034 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2038 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2039 for newfile, entry in self.pkg.files.items():
2040 if entry["type"] == "deb":
2041 b, pf = add_deb_to_db(self, newfile, session)
2043 poolfiles.append(pf)
2045 # If this is a sourceful diff only upload that is moving
2046 # cross-component we need to copy the .orig files into the new
2047 # component too for the same reasons as above.
2048 # XXX: mhy: I think this should be in add_dsc_to_db
2049 if self.pkg.changes["architecture"].has_key("source"):
2050 for orig_file in self.pkg.orig_files.keys():
2051 if not self.pkg.orig_files[orig_file].has_key("id"):
2052 continue # Skip if it's not in the pool
2053 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2054 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2055 continue # Skip if the location didn't change
2058 oldf = get_poolfile_by_id(orig_file_id, session)
2059 old_filename = os.path.join(oldf.location.path, oldf.filename)
2060 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
2061 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2063 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2065 # TODO: Care about size/md5sum collisions etc
2066 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2068 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2070 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2071 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2075 # Don't reference the old file from this changes
2077 if p.file_id == oldf.file_id:
2080 poolfiles.append(newf)
2082 # Fix up the DSC references
2085 for df in source.srcfiles:
2086 if df.poolfile.file_id == oldf.file_id:
2087 # Add a new DSC entry and mark the old one for deletion
2088 # Don't do it in the loop so we don't change the thing we're iterating over
2090 newdscf.source_id = source.source_id
2091 newdscf.poolfile_id = newf.file_id
2092 session.add(newdscf)
2102 # Make sure that our source object is up-to-date
2103 session.expire(source)
2105 # Add changelog information to the database
2106 self.store_changelog()
2108 # Install the files into the pool
2109 for newfile, entry in self.pkg.files.items():
2110 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2111 utils.move(newfile, destination)
2112 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2113 stats.accept_bytes += float(entry["size"])
2115 # Copy the .changes file across for suite which need it.
2116 copy_changes = dict([(x.copychanges, '')
2117 for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2118 if x.copychanges is not None])
2120 for dest in copy_changes.keys():
2121 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2123 # We're done - commit the database changes
2125 # Our SQL session will automatically start a new transaction after
2128 # Now ensure that the metadata has been added
2129 # This has to be done after we copy the files into the pool
2130 # For source if we have it:
2131 if self.pkg.changes["architecture"].has_key("source"):
2132 import_metadata_into_db(source, session)
2134 # Now for any of our binaries
2136 import_metadata_into_db(b, session)
2140 # Move the .changes into the 'done' directory
2141 utils.move(self.pkg.changes_file,
2142 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2144 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2145 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2148 self.Subst["__SUMMARY__"] = summary
2149 mail_message = utils.TemplateSubst(self.Subst,
2150 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2151 utils.send_mail(mail_message)
2152 self.announce(short_summary, 1)
2154 ## Helper stuff for DebBugs Version Tracking
2155 if cnf.Find("Dir::Queue::BTSVersionTrack"):
2156 if self.pkg.changes["architecture"].has_key("source"):
2157 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2158 version_history = os.fdopen(fd, 'w')
2159 version_history.write(self.pkg.dsc["bts changelog"])
2160 version_history.close()
2161 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2162 self.pkg.changes_file[:-8]+".versions")
2163 os.rename(temp_filename, filename)
2164 os.chmod(filename, 0644)
2166 # Write out the binary -> source mapping.
2167 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2168 debinfo = os.fdopen(fd, 'w')
2169 for name, entry in sorted(self.pkg.files.items()):
2170 if entry["type"] == "deb":
2171 line = " ".join([entry["package"], entry["version"],
2172 entry["architecture"], entry["source package"],
2173 entry["source version"]])
2174 debinfo.write(line+"\n")
2176 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2177 self.pkg.changes_file[:-8]+".debinfo")
2178 os.rename(temp_filename, filename)
2179 os.chmod(filename, 0644)
2183 # Set up our copy queues (e.g. buildd queues)
2184 for suite_name in self.pkg.changes["distribution"].keys():
2185 suite = get_suite(suite_name, session)
2186 for q in suite.copy_queues:
2188 q.add_file_from_pool(f)
2193 stats.accept_count += 1
2195 def check_override(self):
2197 Checks override entries for validity. Mails "Override disparity" warnings,
2198 if that feature is enabled.
2200 Abandons the check if
2201 - override disparity checks are disabled
2202 - mail sending is disabled
2207 # Abandon the check if override disparity checks have been disabled
2208 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2211 summary = self.pkg.check_override()
2216 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2219 self.Subst["__SUMMARY__"] = summary
2220 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2221 utils.send_mail(mail_message)
2222 del self.Subst["__SUMMARY__"]
2224 ###########################################################################
2226 def remove(self, from_dir=None):
2228 Used (for instance) in p-u to remove the package from unchecked
2230 Also removes the package from holding area.
2232 if from_dir is None:
2233 from_dir = self.pkg.directory
2236 for f in self.pkg.files.keys():
2237 os.unlink(os.path.join(from_dir, f))
2238 if os.path.exists(os.path.join(h.holding_dir, f)):
2239 os.unlink(os.path.join(h.holding_dir, f))
2241 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2242 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2243 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2245 ###########################################################################
2247 def move_to_queue (self, queue):
2249 Move files to a destination queue using the permissions in the table
2252 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2253 queue.path, perms=int(queue.change_perms, 8))
2254 for f in self.pkg.files.keys():
2255 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2257 ###########################################################################
2259 def force_reject(self, reject_files):
2261 Forcefully move files from the current directory to the
2262 reject directory. If any file already exists in the reject
2263 directory it will be moved to the morgue to make way for
2266 @type reject_files: dict
2267 @param reject_files: file dictionary
2273 for file_entry in reject_files:
2274 # Skip any files which don't exist or which we don't have permission to copy.
2275 if os.access(file_entry, os.R_OK) == 0:
2278 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2281 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2283 # File exists? Let's find a new name by adding a number
2284 if e.errno == errno.EEXIST:
2286 dest_file = utils.find_next_free(dest_file, 255)
2287 except NoFreeFilenameError:
2288 # Something's either gone badly Pete Tong, or
2289 # someone is trying to exploit us.
2290 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2293 # Make sure we really got it
2295 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2298 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2302 # If we got here, we own the destination file, so we can
2303 # safely overwrite it.
2304 utils.move(file_entry, dest_file, 1, perms=0660)
2307 ###########################################################################
2308 def do_reject (self, manual=0, reject_message="", notes=""):
2310 Reject an upload. If called without a reject message or C{manual} is
2311 true, spawn an editor so the user can write one.
2314 @param manual: manual or automated rejection
2316 @type reject_message: string
2317 @param reject_message: A reject message
2322 # If we weren't given a manual rejection message, spawn an
2323 # editor so the user can add one in...
2324 if manual and not reject_message:
2325 (fd, temp_filename) = utils.temp_filename()
2326 temp_file = os.fdopen(fd, 'w')
2329 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2330 % (note.author, note.version, note.notedate, note.comment))
2332 editor = os.environ.get("EDITOR","vi")
2334 while answer == 'E':
2335 os.system("%s %s" % (editor, temp_filename))
2336 temp_fh = utils.open_file(temp_filename)
2337 reject_message = "".join(temp_fh.readlines())
2339 print "Reject message:"
2340 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2341 prompt = "[R]eject, Edit, Abandon, Quit ?"
2343 while prompt.find(answer) == -1:
2344 answer = utils.our_raw_input(prompt)
2345 m = re_default_answer.search(prompt)
2348 answer = answer[:1].upper()
2349 os.unlink(temp_filename)
2355 print "Rejecting.\n"
2359 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2360 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2362 # Move all the files into the reject directory
2363 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2364 self.force_reject(reject_files)
2366 # If we fail here someone is probably trying to exploit the race
2367 # so let's just raise an exception ...
2368 if os.path.exists(reason_filename):
2369 os.unlink(reason_filename)
2370 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2372 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2376 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2377 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2378 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2379 os.write(reason_fd, reject_message)
2380 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2382 # Build up the rejection email
2383 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2384 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2385 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2386 self.Subst["__REJECT_MESSAGE__"] = ""
2387 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2388 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2389 # Write the rejection email out as the <foo>.reason file
2390 os.write(reason_fd, reject_mail_message)
2392 del self.Subst["__REJECTOR_ADDRESS__"]
2393 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2394 del self.Subst["__CC__"]
2398 # Send the rejection mail
2399 utils.send_mail(reject_mail_message)
2402 self.logger.log(["rejected", self.pkg.changes_file])
2406 ################################################################################
2407 def in_override_p(self, package, component, suite, binary_type, filename, session):
2409 Check if a package already has override entries in the DB
2411 @type package: string
2412 @param package: package name
2414 @type component: string
2415 @param component: database id of the component
2418 @param suite: database id of the suite
2420 @type binary_type: string
2421 @param binary_type: type of the package
2423 @type filename: string
2424 @param filename: filename we check
2426 @return: the database result. But noone cares anyway.
2432 if binary_type == "": # must be source
2435 file_type = binary_type
2437 # Override suite name; used for example with proposed-updates
2438 oldsuite = get_suite(suite, session)
2439 if (not oldsuite is None) and oldsuite.overridesuite:
2440 suite = oldsuite.overridesuite
2442 result = get_override(package, suite, component, file_type, session)
2444 # If checking for a source package fall back on the binary override type
2445 if file_type == "dsc" and len(result) < 1:
2446 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2448 # Remember the section and priority so we can check them later if appropriate
2451 self.pkg.files[filename]["override section"] = result.section.section
2452 self.pkg.files[filename]["override priority"] = result.priority.priority
2457 ################################################################################
2458 def get_anyversion(self, sv_list, suite):
2461 @param sv_list: list of (suite, version) tuples to check
2464 @param suite: suite name
2470 anysuite = [suite] + [ vc.reference.suite_name for vc in get_version_checks(suite, "Enhances") ]
2471 for (s, v) in sv_list:
2472 if s in [ x.lower() for x in anysuite ]:
2473 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2478 ################################################################################
2480 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2483 @param sv_list: list of (suite, version) tuples to check
2485 @type filename: string
2486 @param filename: XXX
2488 @type new_version: string
2489 @param new_version: XXX
2491 Ensure versions are newer than existing packages in target
2492 suites and that cross-suite version checking rules as
2493 set out in the conf file are satisfied.
2498 # Check versions for each target suite
2499 for target_suite in self.pkg.changes["distribution"].keys():
2500 must_be_newer_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan") ]
2501 must_be_older_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan") ]
2503 # Enforce "must be newer than target suite" even if conffile omits it
2504 if target_suite not in must_be_newer_than:
2505 must_be_newer_than.append(target_suite)
2507 for (suite, existent_version) in sv_list:
2508 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2510 if suite in must_be_newer_than and sourceful and vercmp < 1:
2511 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2513 if suite in must_be_older_than and vercmp > -1:
2516 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2517 # we really use the other suite, ignoring the conflicting one ...
2518 addsuite = self.pkg.changes["distribution-version"][suite]
2520 add_version = self.get_anyversion(sv_list, addsuite)
2521 target_version = self.get_anyversion(sv_list, target_suite)
2524 # not add_version can only happen if we map to a suite
2525 # that doesn't enhance the suite we're propup'ing from.
2526 # so "propup-ver x a b c; map a d" is a problem only if
2527 # d doesn't enhance a.
2529 # i think we could always propagate in this case, rather
2530 # than complaining. either way, this isn't a REJECT issue
2532 # And - we really should complain to the dorks who configured dak
2533 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2534 self.pkg.changes.setdefault("propdistribution", {})
2535 self.pkg.changes["propdistribution"][addsuite] = 1
2537 elif not target_version:
2538 # not targets_version is true when the package is NEW
2539 # we could just stick with the "...old version..." REJECT
2540 # for this, I think.
2541 self.rejects.append("Won't propogate NEW packages.")
2542 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2543 # propogation would be redundant. no need to reject though.
2544 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2546 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2547 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2549 self.warnings.append("Propogating upload to %s" % (addsuite))
2550 self.pkg.changes.setdefault("propdistribution", {})
2551 self.pkg.changes["propdistribution"][addsuite] = 1
2555 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2557 ################################################################################
2558 def check_binary_against_db(self, filename, session):
2559 # Ensure version is sane
2560 self.cross_suite_version_check( \
2561 get_suite_version_by_package(self.pkg.files[filename]["package"], \
2562 self.pkg.files[filename]["architecture"], session),
2563 filename, self.pkg.files[filename]["version"], sourceful=False)
2565 # Check for any existing copies of the file
2566 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2567 q = q.filter_by(version=self.pkg.files[filename]["version"])
2568 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2571 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2573 ################################################################################
2575 def check_source_against_db(self, filename, session):
2576 source = self.pkg.dsc.get("source")
2577 version = self.pkg.dsc.get("version")
2579 # Ensure version is sane
2580 self.cross_suite_version_check( \
2581 get_suite_version_by_source(source, session), filename, version,
2584 ################################################################################
2585 def check_dsc_against_db(self, filename, session):
2588 @warning: NB: this function can remove entries from the 'files' index [if
2589 the orig tarball is a duplicate of the one in the archive]; if
2590 you're iterating over 'files' and call this function as part of
2591 the loop, be sure to add a check to the top of the loop to
2592 ensure you haven't just tried to dereference the deleted entry.
2597 self.pkg.orig_files = {} # XXX: do we need to clear it?
2598 orig_files = self.pkg.orig_files
2600 # Try and find all files mentioned in the .dsc. This has
2601 # to work harder to cope with the multiple possible
2602 # locations of an .orig.tar.gz.
2603 # The ordering on the select is needed to pick the newest orig
2604 # when it exists in multiple places.
2605 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2607 if self.pkg.files.has_key(dsc_name):
2608 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2609 actual_size = int(self.pkg.files[dsc_name]["size"])
2610 found = "%s in incoming" % (dsc_name)
2612 # Check the file does not already exist in the archive
2613 ql = get_poolfile_like_name(dsc_name, session)
2615 # Strip out anything that isn't '%s' or '/%s$'
2617 if not i.filename.endswith(dsc_name):
2620 # "[dak] has not broken them. [dak] has fixed a
2621 # brokenness. Your crappy hack exploited a bug in
2624 # "(Come on! I thought it was always obvious that
2625 # one just doesn't release different files with
2626 # the same name and version.)"
2627 # -- ajk@ on d-devel@l.d.o
2630 # Ignore exact matches for .orig.tar.gz
2632 if re_is_orig_source.match(dsc_name):
2634 if self.pkg.files.has_key(dsc_name) and \
2635 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2636 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2637 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2638 # TODO: Don't delete the entry, just mark it as not needed
2639 # This would fix the stupidity of changing something we often iterate over
2640 # whilst we're doing it
2641 del self.pkg.files[dsc_name]
2642 dsc_entry["files id"] = i.file_id
2643 if not orig_files.has_key(dsc_name):
2644 orig_files[dsc_name] = {}
2645 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2648 # Don't bitch that we couldn't find this file later
2650 self.later_check_files.remove(dsc_name)
2656 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2658 elif re_is_orig_source.match(dsc_name):
2660 ql = get_poolfile_like_name(dsc_name, session)
2662 # Strip out anything that isn't '%s' or '/%s$'
2663 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2665 if not i.filename.endswith(dsc_name):
2669 # Unfortunately, we may get more than one match here if,
2670 # for example, the package was in potato but had an -sa
2671 # upload in woody. So we need to choose the right one.
2673 # default to something sane in case we don't match any or have only one
2678 old_file = os.path.join(i.location.path, i.filename)
2679 old_file_fh = utils.open_file(old_file)
2680 actual_md5 = apt_pkg.md5sum(old_file_fh)
2682 actual_size = os.stat(old_file)[stat.ST_SIZE]
2683 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2686 old_file = os.path.join(i.location.path, i.filename)
2687 old_file_fh = utils.open_file(old_file)
2688 actual_md5 = apt_pkg.md5sum(old_file_fh)
2690 actual_size = os.stat(old_file)[stat.ST_SIZE]
2692 suite_type = x.location.archive_type
2693 # need this for updating dsc_files in install()
2694 dsc_entry["files id"] = x.file_id
2695 # See install() in process-accepted...
2696 if not orig_files.has_key(dsc_name):
2697 orig_files[dsc_name] = {}
2698 orig_files[dsc_name]["id"] = x.file_id
2699 orig_files[dsc_name]["path"] = old_file
2700 orig_files[dsc_name]["location"] = x.location.location_id
2702 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2703 # Not there? Check the queue directories...
2704 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2705 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2707 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2708 if os.path.exists(in_otherdir):
2709 in_otherdir_fh = utils.open_file(in_otherdir)
2710 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2711 in_otherdir_fh.close()
2712 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2714 if not orig_files.has_key(dsc_name):
2715 orig_files[dsc_name] = {}
2716 orig_files[dsc_name]["path"] = in_otherdir
2719 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2722 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2724 if actual_md5 != dsc_entry["md5sum"]:
2725 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2726 if actual_size != int(dsc_entry["size"]):
2727 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2729 ################################################################################
2730 # This is used by process-new and process-holding to recheck a changes file
2731 # at the time we're running. It mainly wraps various other internal functions
2732 # and is similar to accepted_checks - these should probably be tidied up
2734 def recheck(self, session):
2736 for f in self.pkg.files.keys():
2737 # The .orig.tar.gz can disappear out from under us is it's a
2738 # duplicate of one in the archive.
2739 if not self.pkg.files.has_key(f):
2742 entry = self.pkg.files[f]
2744 # Check that the source still exists
2745 if entry["type"] == "deb":
2746 source_version = entry["source version"]
2747 source_package = entry["source package"]
2748 if not self.pkg.changes["architecture"].has_key("source") \
2749 and not source_exists(source_package, source_version, \
2750 suites = self.pkg.changes["distribution"].keys(), session = session):
2751 source_epochless_version = re_no_epoch.sub('', source_version)
2752 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2754 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2755 if cnf.has_key("Dir::Queue::%s" % (q)):
2756 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2759 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2761 # Version and file overwrite checks
2762 if entry["type"] == "deb":
2763 self.check_binary_against_db(f, session)
2764 elif entry["type"] == "dsc":
2765 self.check_source_against_db(f, session)
2766 self.check_dsc_against_db(f, session)
2768 ################################################################################
2769 def accepted_checks(self, overwrite_checks, session):
2770 # Recheck anything that relies on the database; since that's not
2771 # frozen between accept and our run time when called from p-a.
2773 # overwrite_checks is set to False when installing to stable/oldstable
2778 # Find the .dsc (again)
2780 for f in self.pkg.files.keys():
2781 if self.pkg.files[f]["type"] == "dsc":
2784 for checkfile in self.pkg.files.keys():
2785 # The .orig.tar.gz can disappear out from under us is it's a
2786 # duplicate of one in the archive.
2787 if not self.pkg.files.has_key(checkfile):
2790 entry = self.pkg.files[checkfile]
2792 # Check that the source still exists
2793 if entry["type"] == "deb":
2794 source_version = entry["source version"]
2795 source_package = entry["source package"]
2796 if not self.pkg.changes["architecture"].has_key("source") \
2797 and not source_exists(source_package, source_version, \
2798 suites = self.pkg.changes["distribution"].keys(), \
2800 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2802 # Version and file overwrite checks
2803 if overwrite_checks:
2804 if entry["type"] == "deb":
2805 self.check_binary_against_db(checkfile, session)
2806 elif entry["type"] == "dsc":
2807 self.check_source_against_db(checkfile, session)
2808 self.check_dsc_against_db(dsc_filename, session)
2810 # propogate in the case it is in the override tables:
2811 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2812 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2813 propogate[suite] = 1
2815 nopropogate[suite] = 1
2817 for suite in propogate.keys():
2818 if suite in nopropogate:
2820 self.pkg.changes["distribution"][suite] = 1
2822 for checkfile in self.pkg.files.keys():
2823 # Check the package is still in the override tables
2824 for suite in self.pkg.changes["distribution"].keys():
2825 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2826 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2828 ################################################################################
2829 # If any file of an upload has a recent mtime then chances are good
2830 # the file is still being uploaded.
2832 def upload_too_new(self):
2835 # Move back to the original directory to get accurate time stamps
2837 os.chdir(self.pkg.directory)
2838 file_list = self.pkg.files.keys()
2839 file_list.extend(self.pkg.dsc_files.keys())
2840 file_list.append(self.pkg.changes_file)
2843 last_modified = time.time()-os.path.getmtime(f)
2844 if last_modified < int(cnf["Dinstall::SkipTime"]):
2853 def store_changelog(self):
2855 # Skip binary-only upload if it is not a bin-NMU
2856 if not self.pkg.changes['architecture'].has_key('source'):
2857 from daklib.regexes import re_bin_only_nmu
2858 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2861 session = DBConn().session()
2863 # Check if upload already has a changelog entry
2864 query = """SELECT changelog_id FROM changes WHERE source = :source
2865 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2866 if session.execute(query, {'source': self.pkg.changes['source'], \
2867 'version': self.pkg.changes['version'], \
2868 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2872 # Add current changelog text into changelogs_text table, return created ID
2873 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2874 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2876 # Link ID to the upload available in changes table
2877 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2878 AND version = :version AND architecture = :architecture"""
2879 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2880 'version': self.pkg.changes['version'], \
2881 'architecture': " ".join(self.pkg.changes['architecture'].keys())})