5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
59 # suppress some deprecation warnings in squeeze related to apt_pkg
62 warnings.filterwarnings('ignore', \
63 "apt_pkg.ParseSection\(\) is deprecated. Please see apt_pkg\.TagSection\(\) for the replacement\.", \
65 warnings.filterwarnings('ignore', \
66 "Attribute 'Find' of the 'apt_pkg\.TagSection' object is deprecated, use 'find' instead\.", \
69 ###############################################################################
71 def get_type(f, session):
73 Get the file type of C{f}
76 @param f: file entry from Changes object
78 @type session: SQLA Session
79 @param session: SQL Alchemy session object
86 if f.has_key("dbtype"):
87 file_type = f["dbtype"]
88 elif re_source_ext.match(f["type"]):
92 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
94 # Validate the override type
95 type_id = get_override_type(file_type, session)
97 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
101 ################################################################################
103 # Determine what parts in a .changes are NEW
105 def determine_new(filename, changes, files, warn=1, session = None):
107 Determine what parts in a C{changes} file are NEW.
110 @param filename: changes filename
112 @type changes: Upload.Pkg.changes dict
113 @param changes: Changes dictionary
115 @type files: Upload.Pkg.files dict
116 @param files: Files dictionary
119 @param warn: Warn if overrides are added for (old)stable
122 @return: dictionary of NEW components.
125 # TODO: This should all use the database instead of parsing the changes
130 dbchg = get_dbchange(filename, session)
132 print "Warning: cannot find changes file in database; won't check byhand"
134 # Build up a list of potentially new things
135 for name, f in files.items():
136 # Keep a record of byhand elements
137 if f["section"] == "byhand":
142 priority = f["priority"]
143 section = f["section"]
144 file_type = get_type(f, session)
145 component = f["component"]
147 if file_type == "dsc":
150 if not new.has_key(pkg):
152 new[pkg]["priority"] = priority
153 new[pkg]["section"] = section
154 new[pkg]["type"] = file_type
155 new[pkg]["component"] = component
156 new[pkg]["files"] = []
158 old_type = new[pkg]["type"]
159 if old_type != file_type:
160 # source gets trumped by deb or udeb
161 if old_type == "dsc":
162 new[pkg]["priority"] = priority
163 new[pkg]["section"] = section
164 new[pkg]["type"] = file_type
165 new[pkg]["component"] = component
167 new[pkg]["files"].append(name)
169 if f.has_key("othercomponents"):
170 new[pkg]["othercomponents"] = f["othercomponents"]
172 # Fix up the list of target suites
174 for suite in changes["suite"].keys():
175 oldsuite = get_suite(suite, session)
177 print "WARNING: Invalid suite %s found" % suite
180 if oldsuite.overridesuite:
181 newsuite = get_suite(oldsuite.overridesuite, session)
184 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
185 oldsuite.overridesuite, suite)
186 del changes["suite"][suite]
187 changes["suite"][oldsuite.overridesuite] = 1
189 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist. Bugger" % (
190 oldsuite.overridesuite, suite)
192 # Check for unprocessed byhand files
193 if dbchg is not None:
194 for b in byhand.keys():
195 # Find the file entry in the database
197 for f in dbchg.files:
200 # If it's processed, we can ignore it
206 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
208 # Check for new stuff
209 for suite in changes["suite"].keys():
210 for pkg in new.keys():
211 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
213 for file_entry in new[pkg]["files"]:
214 if files[file_entry].has_key("new"):
215 del files[file_entry]["new"]
219 for s in ['stable', 'oldstable']:
220 if changes["suite"].has_key(s):
221 print "WARNING: overrides will be added for %s!" % s
222 for pkg in new.keys():
223 if new[pkg].has_key("othercomponents"):
224 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
228 ################################################################################
230 def check_valid(new, session = None):
232 Check if section and priority for NEW packages exist in database.
233 Additionally does sanity checks:
234 - debian-installer packages have to be udeb (or source)
235 - non debian-installer packages can not be udeb
236 - source priority can only be assigned to dsc file types
239 @param new: Dict of new packages with their section, priority and type.
242 for pkg in new.keys():
243 section_name = new[pkg]["section"]
244 priority_name = new[pkg]["priority"]
245 file_type = new[pkg]["type"]
247 section = get_section(section_name, session)
249 new[pkg]["section id"] = -1
251 new[pkg]["section id"] = section.section_id
253 priority = get_priority(priority_name, session)
255 new[pkg]["priority id"] = -1
257 new[pkg]["priority id"] = priority.priority_id
260 di = section_name.find("debian-installer") != -1
262 # If d-i, we must be udeb and vice-versa
263 if (di and file_type not in ("udeb", "dsc")) or \
264 (not di and file_type == "udeb"):
265 new[pkg]["section id"] = -1
267 # If dsc we need to be source and vice-versa
268 if (priority == "source" and file_type != "dsc") or \
269 (priority != "source" and file_type == "dsc"):
270 new[pkg]["priority id"] = -1
272 ###############################################################################
274 # Used by Upload.check_timestamps
275 class TarTime(object):
276 def __init__(self, future_cutoff, past_cutoff):
278 self.future_cutoff = future_cutoff
279 self.past_cutoff = past_cutoff
282 self.future_files = {}
283 self.ancient_files = {}
285 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
286 if MTime > self.future_cutoff:
287 self.future_files[Name] = MTime
288 if MTime < self.past_cutoff:
289 self.ancient_files[Name] = MTime
291 ###############################################################################
293 def prod_maintainer(notes, upload):
296 # Here we prepare an editor and get them ready to prod...
297 (fd, temp_filename) = utils.temp_filename()
298 temp_file = os.fdopen(fd, 'w')
300 temp_file.write(note.comment)
302 editor = os.environ.get("EDITOR","vi")
305 os.system("%s %s" % (editor, temp_filename))
306 temp_fh = utils.open_file(temp_filename)
307 prod_message = "".join(temp_fh.readlines())
309 print "Prod message:"
310 print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
311 prompt = "[P]rod, Edit, Abandon, Quit ?"
313 while prompt.find(answer) == -1:
314 answer = utils.our_raw_input(prompt)
315 m = re_default_answer.search(prompt)
318 answer = answer[:1].upper()
319 os.unlink(temp_filename)
325 # Otherwise, do the proding...
326 user_email_address = utils.whoami() + " <%s>" % (
327 cnf["Dinstall::MyAdminAddress"])
331 Subst["__FROM_ADDRESS__"] = user_email_address
332 Subst["__PROD_MESSAGE__"] = prod_message
333 Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
335 prod_mail_message = utils.TemplateSubst(
336 Subst,cnf["Dir::Templates"]+"/process-new.prod")
339 utils.send_mail(prod_mail_message)
341 print "Sent prodding message"
343 ################################################################################
345 def edit_note(note, upload, session, trainee=False):
346 # Write the current data to a temporary file
347 (fd, temp_filename) = utils.temp_filename()
348 editor = os.environ.get("EDITOR","vi")
351 os.system("%s %s" % (editor, temp_filename))
352 temp_file = utils.open_file(temp_filename)
353 newnote = temp_file.read().rstrip()
356 print utils.prefix_multi_line_string(newnote," ")
357 prompt = "[D]one, Edit, Abandon, Quit ?"
359 while prompt.find(answer) == -1:
360 answer = utils.our_raw_input(prompt)
361 m = re_default_answer.search(prompt)
364 answer = answer[:1].upper()
365 os.unlink(temp_filename)
372 comment = NewComment()
373 comment.package = upload.pkg.changes["source"]
374 comment.version = upload.pkg.changes["version"]
375 comment.comment = newnote
376 comment.author = utils.whoami()
377 comment.trainee = trainee
381 ###############################################################################
383 # suite names DMs can upload to
384 dm_suites = ['unstable', 'experimental']
386 def get_newest_source(source, session):
387 'returns the newest DBSource object in dm_suites'
388 ## the most recent version of the package uploaded to unstable or
389 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
390 ## section of its control file
391 q = session.query(DBSource).filter_by(source = source). \
392 filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
393 order_by(desc('source.version'))
396 def get_suite_version_by_source(source, session):
397 'returns a list of tuples (suite_name, version) for source package'
398 q = session.query(Suite.suite_name, DBSource.version). \
399 join(Suite.sources).filter_by(source = source)
402 def get_source_by_package_and_suite(package, suite_name, session):
404 returns a DBSource query filtered by DBBinary.package and this package's
407 return session.query(DBSource). \
408 join(DBSource.binaries).filter_by(package = package). \
409 join(DBBinary.suites).filter_by(suite_name = suite_name)
411 def get_suite_version_by_package(package, arch_string, session):
413 returns a list of tuples (suite_name, version) for binary package and
416 return session.query(Suite.suite_name, DBBinary.version). \
417 join(Suite.binaries).filter_by(package = package). \
418 join(DBBinary.architecture). \
419 filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
421 class Upload(object):
423 Everything that has to do with an upload processed.
431 ###########################################################################
434 """ Reset a number of internal variables."""
436 # Initialize the substitution template map
439 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
440 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
441 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
442 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
448 self.later_check_files = []
452 def package_info(self):
454 Format various messages from this Upload to send to the maintainer.
458 ('Reject Reasons', self.rejects),
459 ('Warnings', self.warnings),
460 ('Notes', self.notes),
464 for title, messages in msgs:
466 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
471 ###########################################################################
472 def update_subst(self):
473 """ Set up the per-package template substitution mappings """
477 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
478 if not self.pkg.changes.has_key("architecture") or not \
479 isinstance(self.pkg.changes["architecture"], dict):
480 self.pkg.changes["architecture"] = { "Unknown" : "" }
482 # and maintainer2047 may not exist.
483 if not self.pkg.changes.has_key("maintainer2047"):
484 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
486 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
487 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
488 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
490 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
491 if self.pkg.changes["architecture"].has_key("source") and \
492 self.pkg.changes["changedby822"] != "" and \
493 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
495 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
496 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
497 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
499 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
500 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
501 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
503 # Process policy doesn't set the fingerprint field and I don't want to make it
504 # do it for now as I don't want to have to deal with the case where we accepted
505 # the package into PU-NEW, but the fingerprint has gone away from the keyring in
506 # the meantime so the package will be remarked as rejectable. Urgh.
507 # TODO: Fix this properly
508 if self.pkg.changes.has_key('fingerprint'):
509 session = DBConn().session()
510 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
511 if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
512 if self.pkg.changes.has_key("sponsoremail"):
513 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
516 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
517 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
519 # Apply any global override of the Maintainer field
520 if cnf.get("Dinstall::OverrideMaintainer"):
521 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
522 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
524 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
525 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
526 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
527 self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
529 ###########################################################################
530 def load_changes(self, filename):
532 Load a changes file and setup a dictionary around it. Also checks for mandantory
535 @type filename: string
536 @param filename: Changes filename, full path.
539 @return: whether the changes file was valid or not. We may want to
540 reject even if this is True (see what gets put in self.rejects).
541 This is simply to prevent us even trying things later which will
542 fail because we couldn't properly parse the file.
545 self.pkg.changes_file = filename
547 # Parse the .changes field into a dictionary
549 self.pkg.changes.update(parse_changes(filename))
550 except CantOpenError:
551 self.rejects.append("%s: can't read file." % (filename))
553 except ParseChangesError, line:
554 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
556 except ChangesUnicodeError:
557 self.rejects.append("%s: changes file not proper utf-8" % (filename))
560 # Parse the Files field from the .changes into another dictionary
562 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
563 except ParseChangesError, line:
564 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
566 except UnknownFormatError, format:
567 self.rejects.append("%s: unknown format '%s'." % (filename, format))
570 # Check for mandatory fields
571 for i in ("distribution", "source", "binary", "architecture",
572 "version", "maintainer", "files", "changes", "description"):
573 if not self.pkg.changes.has_key(i):
574 # Avoid undefined errors later
575 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
578 # Strip a source version in brackets from the source field
579 if re_strip_srcver.search(self.pkg.changes["source"]):
580 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
582 # Ensure the source field is a valid package name.
583 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
584 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
586 # Split multi-value fields into a lower-level dictionary
587 for i in ("architecture", "distribution", "binary", "closes"):
588 o = self.pkg.changes.get(i, "")
590 del self.pkg.changes[i]
592 self.pkg.changes[i] = {}
595 self.pkg.changes[i][j] = 1
597 # Fix the Maintainer: field to be RFC822/2047 compatible
599 (self.pkg.changes["maintainer822"],
600 self.pkg.changes["maintainer2047"],
601 self.pkg.changes["maintainername"],
602 self.pkg.changes["maintaineremail"]) = \
603 fix_maintainer (self.pkg.changes["maintainer"])
604 except ParseMaintError, msg:
605 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
606 % (filename, self.pkg.changes["maintainer"], msg))
608 # ...likewise for the Changed-By: field if it exists.
610 (self.pkg.changes["changedby822"],
611 self.pkg.changes["changedby2047"],
612 self.pkg.changes["changedbyname"],
613 self.pkg.changes["changedbyemail"]) = \
614 fix_maintainer (self.pkg.changes.get("changed-by", ""))
615 except ParseMaintError, msg:
616 self.pkg.changes["changedby822"] = ""
617 self.pkg.changes["changedby2047"] = ""
618 self.pkg.changes["changedbyname"] = ""
619 self.pkg.changes["changedbyemail"] = ""
621 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
622 % (filename, self.pkg.changes["changed-by"], msg))
624 # Ensure all the values in Closes: are numbers
625 if self.pkg.changes.has_key("closes"):
626 for i in self.pkg.changes["closes"].keys():
627 if re_isanum.match (i) == None:
628 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
630 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
631 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
632 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
634 # Check the .changes is non-empty
635 if not self.pkg.files:
636 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
639 # Changes was syntactically valid even if we'll reject
642 ###########################################################################
644 def check_distributions(self):
645 "Check and map the Distribution field"
649 # Handle suite mappings
650 for m in Cnf.ValueList("SuiteMappings"):
653 if mtype == "map" or mtype == "silent-map":
654 (source, dest) = args[1:3]
655 if self.pkg.changes["distribution"].has_key(source):
656 del self.pkg.changes["distribution"][source]
657 self.pkg.changes["distribution"][dest] = 1
658 if mtype != "silent-map":
659 self.notes.append("Mapping %s to %s." % (source, dest))
660 if self.pkg.changes.has_key("distribution-version"):
661 if self.pkg.changes["distribution-version"].has_key(source):
662 self.pkg.changes["distribution-version"][source]=dest
663 elif mtype == "map-unreleased":
664 (source, dest) = args[1:3]
665 if self.pkg.changes["distribution"].has_key(source):
666 for arch in self.pkg.changes["architecture"].keys():
667 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
668 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
669 del self.pkg.changes["distribution"][source]
670 self.pkg.changes["distribution"][dest] = 1
672 elif mtype == "ignore":
674 if self.pkg.changes["distribution"].has_key(suite):
675 del self.pkg.changes["distribution"][suite]
676 self.warnings.append("Ignoring %s as a target suite." % (suite))
677 elif mtype == "reject":
679 if self.pkg.changes["distribution"].has_key(suite):
680 self.rejects.append("Uploads to %s are not accepted." % (suite))
681 elif mtype == "propup-version":
682 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
684 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
685 if self.pkg.changes["distribution"].has_key(args[1]):
686 self.pkg.changes.setdefault("distribution-version", {})
687 for suite in args[2:]:
688 self.pkg.changes["distribution-version"][suite] = suite
690 # Ensure there is (still) a target distribution
691 if len(self.pkg.changes["distribution"].keys()) < 1:
692 self.rejects.append("No valid distribution remaining.")
694 # Ensure target distributions exist
695 for suite in self.pkg.changes["distribution"].keys():
696 if not Cnf.has_key("Suite::%s" % (suite)):
697 self.rejects.append("Unknown distribution `%s'." % (suite))
699 ###########################################################################
701 def binary_file_checks(self, f, session):
703 entry = self.pkg.files[f]
705 # Extract package control information
706 deb_file = utils.open_file(f)
708 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
710 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
712 # Can't continue, none of the checks on control would work.
715 # Check for mandantory "Description:"
718 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
720 self.rejects.append("%s: Missing Description in binary package" % (f))
725 # Check for mandatory fields
726 for field in [ "Package", "Architecture", "Version" ]:
727 if control.Find(field) == None:
729 self.rejects.append("%s: No %s field in control." % (f, field))
732 # Ensure the package name matches the one give in the .changes
733 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
734 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
736 # Validate the package field
737 package = control.Find("Package")
738 if not re_valid_pkg_name.match(package):
739 self.rejects.append("%s: invalid package name '%s'." % (f, package))
741 # Validate the version field
742 version = control.Find("Version")
743 if not re_valid_version.match(version):
744 self.rejects.append("%s: invalid version number '%s'." % (f, version))
746 # Ensure the architecture of the .deb is one we know about.
747 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
748 architecture = control.Find("Architecture")
749 upload_suite = self.pkg.changes["distribution"].keys()[0]
751 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
752 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
753 self.rejects.append("Unknown architecture '%s'." % (architecture))
755 # Ensure the architecture of the .deb is one of the ones
756 # listed in the .changes.
757 if not self.pkg.changes["architecture"].has_key(architecture):
758 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
760 # Sanity-check the Depends field
761 depends = control.Find("Depends")
763 self.rejects.append("%s: Depends field is empty." % (f))
765 # Sanity-check the Provides field
766 provides = control.Find("Provides")
768 provide = re_spacestrip.sub('', provides)
770 self.rejects.append("%s: Provides field is empty." % (f))
771 prov_list = provide.split(",")
772 for prov in prov_list:
773 if not re_valid_pkg_name.match(prov):
774 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
776 # Check the section & priority match those given in the .changes (non-fatal)
777 if control.Find("Section") and entry["section"] != "" \
778 and entry["section"] != control.Find("Section"):
779 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
780 (f, control.Find("Section", ""), entry["section"]))
781 if control.Find("Priority") and entry["priority"] != "" \
782 and entry["priority"] != control.Find("Priority"):
783 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
784 (f, control.Find("Priority", ""), entry["priority"]))
786 entry["package"] = package
787 entry["architecture"] = architecture
788 entry["version"] = version
789 entry["maintainer"] = control.Find("Maintainer", "")
791 if f.endswith(".udeb"):
792 self.pkg.files[f]["dbtype"] = "udeb"
793 elif f.endswith(".deb"):
794 self.pkg.files[f]["dbtype"] = "deb"
796 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
798 entry["source"] = control.Find("Source", entry["package"])
800 # Get the source version
801 source = entry["source"]
804 if source.find("(") != -1:
805 m = re_extract_src_version.match(source)
807 source_version = m.group(2)
809 if not source_version:
810 source_version = self.pkg.files[f]["version"]
812 entry["source package"] = source
813 entry["source version"] = source_version
815 # Ensure the filename matches the contents of the .deb
816 m = re_isadeb.match(f)
819 file_package = m.group(1)
820 if entry["package"] != file_package:
821 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
822 (f, file_package, entry["dbtype"], entry["package"]))
823 epochless_version = re_no_epoch.sub('', control.Find("Version"))
826 file_version = m.group(2)
827 if epochless_version != file_version:
828 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
829 (f, file_version, entry["dbtype"], epochless_version))
832 file_architecture = m.group(3)
833 if entry["architecture"] != file_architecture:
834 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
835 (f, file_architecture, entry["dbtype"], entry["architecture"]))
837 # Check for existent source
838 source_version = entry["source version"]
839 source_package = entry["source package"]
840 if self.pkg.changes["architecture"].has_key("source"):
841 if source_version != self.pkg.changes["version"]:
842 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
843 (source_version, f, self.pkg.changes["version"]))
845 # Check in the SQL database
846 if not source_exists(source_package, source_version, suites = \
847 self.pkg.changes["distribution"].keys(), session = session):
848 # Check in one of the other directories
849 source_epochless_version = re_no_epoch.sub('', source_version)
850 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
851 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
853 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
856 dsc_file_exists = False
857 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
858 if cnf.has_key("Dir::Queue::%s" % (myq)):
859 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
860 dsc_file_exists = True
863 if not dsc_file_exists:
864 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
866 # Check the version and for file overwrites
867 self.check_binary_against_db(f, session)
869 # Temporarily disable contents generation until we change the table storage layout
872 #if len(b.rejects) > 0:
873 # for j in b.rejects:
874 # self.rejects.append(j)
876 def source_file_checks(self, f, session):
877 entry = self.pkg.files[f]
879 m = re_issource.match(f)
883 entry["package"] = m.group(1)
884 entry["version"] = m.group(2)
885 entry["type"] = m.group(3)
887 # Ensure the source package name matches the Source filed in the .changes
888 if self.pkg.changes["source"] != entry["package"]:
889 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
891 # Ensure the source version matches the version in the .changes file
892 if re_is_orig_source.match(f):
893 changes_version = self.pkg.changes["chopversion2"]
895 changes_version = self.pkg.changes["chopversion"]
897 if changes_version != entry["version"]:
898 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
900 # Ensure the .changes lists source in the Architecture field
901 if not self.pkg.changes["architecture"].has_key("source"):
902 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
904 # Check the signature of a .dsc file
905 if entry["type"] == "dsc":
906 # check_signature returns either:
907 # (None, [list, of, rejects]) or (signature, [])
908 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
910 self.rejects.append(j)
912 entry["architecture"] = "source"
914 def per_suite_file_checks(self, f, suite, session):
916 entry = self.pkg.files[f]
919 if entry.has_key("byhand"):
922 # Check we have fields we need to do these checks
924 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
925 if not entry.has_key(m):
926 self.rejects.append("file '%s' does not have field %s set" % (f, m))
932 # Handle component mappings
933 for m in cnf.ValueList("ComponentMappings"):
934 (source, dest) = m.split()
935 if entry["component"] == source:
936 entry["original component"] = source
937 entry["component"] = dest
939 # Ensure the component is valid for the target suite
940 if cnf.has_key("Suite:%s::Components" % (suite)) and \
941 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
942 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
945 # Validate the component
946 if not get_component(entry["component"], session):
947 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
950 # See if the package is NEW
951 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
954 # Validate the priority
955 if entry["priority"].find('/') != -1:
956 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
958 # Determine the location
959 location = cnf["Dir::Pool"]
960 l = get_location(location, entry["component"], session=session)
962 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
963 entry["location id"] = -1
965 entry["location id"] = l.location_id
967 # Check the md5sum & size against existing files (if any)
968 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
970 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
971 entry["size"], entry["md5sum"], entry["location id"])
974 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
975 elif found is False and poolfile is not None:
976 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
979 entry["files id"] = None
981 entry["files id"] = poolfile.file_id
983 # Check for packages that have moved from one component to another
984 entry['suite'] = suite
985 arch_list = [entry["architecture"], 'all']
986 component = get_component_by_package_suite(self.pkg.files[f]['package'], \
987 [suite], arch_list = arch_list, session = session)
988 if component is not None:
989 entry["othercomponents"] = component
991 def check_files(self, action=True):
992 file_keys = self.pkg.files.keys()
998 os.chdir(self.pkg.directory)
1000 ret = holding.copy_to_holding(f)
1002 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1006 # check we already know the changes file
1007 # [NB: this check must be done post-suite mapping]
1008 base_filename = os.path.basename(self.pkg.changes_file)
1010 session = DBConn().session()
1013 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1014 # if in the pool or in a queue other than unchecked, reject
1015 if (dbc.in_queue is None) \
1016 or (dbc.in_queue is not None
1017 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1018 self.rejects.append("%s file already known to dak" % base_filename)
1019 except NoResultFound, e:
1023 has_binaries = False
1026 for f, entry in self.pkg.files.items():
1027 # Ensure the file does not already exist in one of the accepted directories
1028 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1029 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1030 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1031 self.rejects.append("%s file already exists in the %s directory." % (f, d))
1033 if not re_taint_free.match(f):
1034 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1036 # Check the file is readable
1037 if os.access(f, os.R_OK) == 0:
1038 # When running in -n, copy_to_holding() won't have
1039 # generated the reject_message, so we need to.
1041 if os.path.exists(f):
1042 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1044 # Don't directly reject, mark to check later to deal with orig's
1045 # we can find in the pool
1046 self.later_check_files.append(f)
1047 entry["type"] = "unreadable"
1050 # If it's byhand skip remaining checks
1051 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1053 entry["type"] = "byhand"
1055 # Checks for a binary package...
1056 elif re_isadeb.match(f):
1058 entry["type"] = "deb"
1060 # This routine appends to self.rejects/warnings as appropriate
1061 self.binary_file_checks(f, session)
1063 # Checks for a source package...
1064 elif re_issource.match(f):
1067 # This routine appends to self.rejects/warnings as appropriate
1068 self.source_file_checks(f, session)
1070 # Not a binary or source package? Assume byhand...
1073 entry["type"] = "byhand"
1075 # Per-suite file checks
1076 entry["oldfiles"] = {}
1077 for suite in self.pkg.changes["distribution"].keys():
1078 self.per_suite_file_checks(f, suite, session)
1082 # If the .changes file says it has source, it must have source.
1083 if self.pkg.changes["architecture"].has_key("source"):
1085 self.rejects.append("no source found and Architecture line in changes mention source.")
1087 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1088 self.rejects.append("source only uploads are not supported.")
1090 ###########################################################################
1091 def check_dsc(self, action=True, session=None):
1092 """Returns bool indicating whether or not the source changes are valid"""
1093 # Ensure there is source to check
1094 if not self.pkg.changes["architecture"].has_key("source"):
1099 for f, entry in self.pkg.files.items():
1100 if entry["type"] == "dsc":
1102 self.rejects.append("can not process a .changes file with multiple .dsc's.")
1107 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1108 if not dsc_filename:
1109 self.rejects.append("source uploads must contain a dsc file")
1112 # Parse the .dsc file
1114 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1115 except CantOpenError:
1116 # if not -n copy_to_holding() will have done this for us...
1118 self.rejects.append("%s: can't read file." % (dsc_filename))
1119 except ParseChangesError, line:
1120 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1121 except InvalidDscError, line:
1122 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1123 except ChangesUnicodeError:
1124 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1126 # Build up the file list of files mentioned by the .dsc
1128 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1129 except NoFilesFieldError:
1130 self.rejects.append("%s: no Files: field." % (dsc_filename))
1132 except UnknownFormatError, format:
1133 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1135 except ParseChangesError, line:
1136 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1139 # Enforce mandatory fields
1140 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1141 if not self.pkg.dsc.has_key(i):
1142 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1145 # Validate the source and version fields
1146 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1147 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1148 if not re_valid_version.match(self.pkg.dsc["version"]):
1149 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1151 # Only a limited list of source formats are allowed in each suite
1152 for dist in self.pkg.changes["distribution"].keys():
1153 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1154 if self.pkg.dsc["format"] not in allowed:
1155 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1157 # Validate the Maintainer field
1159 # We ignore the return value
1160 fix_maintainer(self.pkg.dsc["maintainer"])
1161 except ParseMaintError, msg:
1162 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1163 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1165 # Validate the build-depends field(s)
1166 for field_name in [ "build-depends", "build-depends-indep" ]:
1167 field = self.pkg.dsc.get(field_name)
1169 # Have apt try to parse them...
1171 apt_pkg.ParseSrcDepends(field)
1173 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1175 # Ensure the version number in the .dsc matches the version number in the .changes
1176 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1177 changes_version = self.pkg.files[dsc_filename]["version"]
1179 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1180 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1182 # Ensure the Files field contain only what's expected
1183 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1185 # Ensure source is newer than existing source in target suites
1186 session = DBConn().session()
1187 self.check_source_against_db(dsc_filename, session)
1188 self.check_dsc_against_db(dsc_filename, session)
1190 dbchg = get_dbchange(self.pkg.changes_file, session)
1192 # Finally, check if we're missing any files
1193 for f in self.later_check_files:
1195 # Check if we've already processed this file if we have a dbchg object
1198 for pf in dbchg.files:
1199 if pf.filename == f and pf.processed:
1200 self.notes.append('%s was already processed so we can go ahead' % f)
1202 del self.pkg.files[f]
1204 self.rejects.append("Could not find file %s references in changes" % f)
1210 ###########################################################################
1212 def get_changelog_versions(self, source_dir):
1213 """Extracts a the source package and (optionally) grabs the
1214 version history out of debian/changelog for the BTS."""
1218 # Find the .dsc (again)
1220 for f in self.pkg.files.keys():
1221 if self.pkg.files[f]["type"] == "dsc":
1224 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1225 if not dsc_filename:
1228 # Create a symlink mirror of the source files in our temporary directory
1229 for f in self.pkg.files.keys():
1230 m = re_issource.match(f)
1232 src = os.path.join(source_dir, f)
1233 # If a file is missing for whatever reason, give up.
1234 if not os.path.exists(src):
1237 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1238 self.pkg.orig_files[f].has_key("path"):
1240 dest = os.path.join(os.getcwd(), f)
1241 os.symlink(src, dest)
1243 # If the orig files are not a part of the upload, create symlinks to the
1245 for orig_file in self.pkg.orig_files.keys():
1246 if not self.pkg.orig_files[orig_file].has_key("path"):
1248 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1249 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1251 # Extract the source
1252 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1253 (result, output) = commands.getstatusoutput(cmd)
1255 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1256 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1259 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1262 # Get the upstream version
1263 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1264 if re_strip_revision.search(upstr_version):
1265 upstr_version = re_strip_revision.sub('', upstr_version)
1267 # Ensure the changelog file exists
1268 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1269 if not os.path.exists(changelog_filename):
1270 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1273 # Parse the changelog
1274 self.pkg.dsc["bts changelog"] = ""
1275 changelog_file = utils.open_file(changelog_filename)
1276 for line in changelog_file.readlines():
1277 m = re_changelog_versions.match(line)
1279 self.pkg.dsc["bts changelog"] += line
1280 changelog_file.close()
1282 # Check we found at least one revision in the changelog
1283 if not self.pkg.dsc["bts changelog"]:
1284 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1286 def check_source(self):
1288 # a) there's no source
1289 if not self.pkg.changes["architecture"].has_key("source"):
1292 tmpdir = utils.temp_dirname()
1294 # Move into the temporary directory
1298 # Get the changelog version history
1299 self.get_changelog_versions(cwd)
1301 # Move back and cleanup the temporary tree
1305 shutil.rmtree(tmpdir)
1307 if e.errno != errno.EACCES:
1309 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1311 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1312 # We probably have u-r or u-w directories so chmod everything
1314 cmd = "chmod -R u+rwx %s" % (tmpdir)
1315 result = os.system(cmd)
1317 utils.fubar("'%s' failed with result %s." % (cmd, result))
1318 shutil.rmtree(tmpdir)
1319 except Exception, e:
1320 print "foobar2 (%s)" % e
1321 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1323 ###########################################################################
1324 def ensure_hashes(self):
1325 # Make sure we recognise the format of the Files: field in the .changes
1326 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1327 if len(format) == 2:
1328 format = int(format[0]), int(format[1])
1330 format = int(float(format[0])), 0
1332 # We need to deal with the original changes blob, as the fields we need
1333 # might not be in the changes dict serialised into the .dak anymore.
1334 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1336 # Copy the checksums over to the current changes dict. This will keep
1337 # the existing modifications to it intact.
1338 for field in orig_changes:
1339 if field.startswith('checksums-'):
1340 self.pkg.changes[field] = orig_changes[field]
1342 # Check for unsupported hashes
1343 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1344 self.rejects.append(j)
1346 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1347 self.rejects.append(j)
1349 # We have to calculate the hash if we have an earlier changes version than
1350 # the hash appears in rather than require it exist in the changes file
1351 for hashname, hashfunc, version in utils.known_hashes:
1352 # TODO: Move _ensure_changes_hash into this class
1353 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1354 self.rejects.append(j)
1355 if "source" in self.pkg.changes["architecture"]:
1356 # TODO: Move _ensure_dsc_hash into this class
1357 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1358 self.rejects.append(j)
1360 def check_hashes(self):
1361 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1362 self.rejects.append(m)
1364 for m in utils.check_size(".changes", self.pkg.files):
1365 self.rejects.append(m)
1367 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1368 self.rejects.append(m)
1370 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1371 self.rejects.append(m)
1373 self.ensure_hashes()
1375 ###########################################################################
1377 def ensure_orig(self, target_dir='.', session=None):
1379 Ensures that all orig files mentioned in the changes file are present
1380 in target_dir. If they do not exist, they are symlinked into place.
1382 An list containing the symlinks that were created are returned (so they
1389 for filename, entry in self.pkg.dsc_files.iteritems():
1390 if not re_is_orig_source.match(filename):
1391 # File is not an orig; ignore
1394 if os.path.exists(filename):
1395 # File exists, no need to continue
1398 def symlink_if_valid(path):
1399 f = utils.open_file(path)
1400 md5sum = apt_pkg.md5sum(f)
1403 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1404 expected = (int(entry['size']), entry['md5sum'])
1406 if fingerprint != expected:
1409 dest = os.path.join(target_dir, filename)
1411 os.symlink(path, dest)
1412 symlinked.append(dest)
1418 session_ = DBConn().session()
1423 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1424 poolfile_path = os.path.join(
1425 poolfile.location.path, poolfile.filename
1428 if symlink_if_valid(poolfile_path):
1438 # Look in some other queues for the file
1439 queues = ('New', 'Byhand', 'ProposedUpdates',
1440 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1442 for queue in queues:
1443 if not cnf.get('Dir::Queue::%s' % queue):
1446 queuefile_path = os.path.join(
1447 cnf['Dir::Queue::%s' % queue], filename
1450 if not os.path.exists(queuefile_path):
1451 # Does not exist in this queue
1454 if symlink_if_valid(queuefile_path):
1459 ###########################################################################
1461 def check_lintian(self):
1463 Extends self.rejects by checking the output of lintian against tags
1464 specified in Dinstall::LintianTags.
1469 # Don't reject binary uploads
1470 if not self.pkg.changes['architecture'].has_key('source'):
1473 # Only check some distributions
1474 for dist in ('unstable', 'experimental'):
1475 if dist in self.pkg.changes['distribution']:
1480 # If we do not have a tagfile, don't do anything
1481 tagfile = cnf.get("Dinstall::LintianTags")
1485 # Parse the yaml file
1486 sourcefile = file(tagfile, 'r')
1487 sourcecontent = sourcefile.read()
1491 lintiantags = yaml.load(sourcecontent)['lintian']
1492 except yaml.YAMLError, msg:
1493 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1496 # Try and find all orig mentioned in the .dsc
1497 symlinked = self.ensure_orig()
1499 # Setup the input file for lintian
1500 fd, temp_filename = utils.temp_filename()
1501 temptagfile = os.fdopen(fd, 'w')
1502 for tags in lintiantags.values():
1503 temptagfile.writelines(['%s\n' % x for x in tags])
1507 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1508 (temp_filename, self.pkg.changes_file)
1510 result, output = commands.getstatusoutput(cmd)
1512 # Remove our tempfile and any symlinks we created
1513 os.unlink(temp_filename)
1515 for symlink in symlinked:
1519 utils.warn("lintian failed for %s [return code: %s]." % \
1520 (self.pkg.changes_file, result))
1521 utils.warn(utils.prefix_multi_line_string(output, \
1522 " [possible output:] "))
1527 [self.pkg.changes_file, "check_lintian"] + list(txt)
1531 parsed_tags = parse_lintian_output(output)
1532 self.rejects.extend(
1533 generate_reject_messages(parsed_tags, lintiantags, log=log)
1536 ###########################################################################
1537 def check_urgency(self):
1539 if self.pkg.changes["architecture"].has_key("source"):
1540 if not self.pkg.changes.has_key("urgency"):
1541 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1542 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1543 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1544 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1545 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1546 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1548 ###########################################################################
1550 # Sanity check the time stamps of files inside debs.
1551 # [Files in the near future cause ugly warnings and extreme time
1552 # travel can cause errors on extraction]
1554 def check_timestamps(self):
1557 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1558 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1559 tar = TarTime(future_cutoff, past_cutoff)
1561 for filename, entry in self.pkg.files.items():
1562 if entry["type"] == "deb":
1565 deb_file = utils.open_file(filename)
1566 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1569 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1570 except SystemError, e:
1571 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1572 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1575 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1579 future_files = tar.future_files.keys()
1581 num_future_files = len(future_files)
1582 future_file = future_files[0]
1583 future_date = tar.future_files[future_file]
1584 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1585 % (filename, num_future_files, future_file, time.ctime(future_date)))
1587 ancient_files = tar.ancient_files.keys()
1589 num_ancient_files = len(ancient_files)
1590 ancient_file = ancient_files[0]
1591 ancient_date = tar.ancient_files[ancient_file]
1592 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1593 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1595 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1597 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1598 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1600 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1606 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1607 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1608 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1609 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1610 self.pkg.changes["sponsoremail"] = uid_email
1615 ###########################################################################
1616 # check_signed_by_key checks
1617 ###########################################################################
1619 def check_signed_by_key(self):
1620 """Ensure the .changes is signed by an authorized uploader."""
1621 session = DBConn().session()
1623 # First of all we check that the person has proper upload permissions
1624 # and that this upload isn't blocked
1625 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1628 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1631 # TODO: Check that import-keyring adds UIDs properly
1633 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1636 # Check that the fingerprint which uploaded has permission to do so
1637 self.check_upload_permissions(fpr, session)
1639 # Check that this package is not in a transition
1640 self.check_transition(session)
1645 def check_upload_permissions(self, fpr, session):
1646 # Check any one-off upload blocks
1647 self.check_upload_blocks(fpr, session)
1649 # Start with DM as a special case
1650 # DM is a special case unfortunately, so we check it first
1651 # (keys with no source access get more access than DMs in one
1652 # way; DMs can only upload for their packages whether source
1653 # or binary, whereas keys with no access might be able to
1654 # upload some binaries)
1655 if fpr.source_acl.access_level == 'dm':
1656 self.check_dm_upload(fpr, session)
1658 # Check source-based permissions for other types
1659 if self.pkg.changes["architecture"].has_key("source") and \
1660 fpr.source_acl.access_level is None:
1661 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1662 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1663 self.rejects.append(rej)
1665 # If not a DM, we allow full upload rights
1666 uid_email = "%s@debian.org" % (fpr.uid.uid)
1667 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1670 # Check binary upload permissions
1671 # By this point we know that DMs can't have got here unless they
1672 # are allowed to deal with the package concerned so just apply
1674 if fpr.binary_acl.access_level == 'full':
1677 # Otherwise we're in the map case
1678 tmparches = self.pkg.changes["architecture"].copy()
1679 tmparches.pop('source', None)
1681 for bam in fpr.binary_acl_map:
1682 tmparches.pop(bam.architecture.arch_string, None)
1684 if len(tmparches.keys()) > 0:
1685 if fpr.binary_reject:
1686 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1687 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1688 self.rejects.append(rej)
1690 # TODO: This is where we'll implement reject vs throw away binaries later
1691 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1692 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1693 rej += "\nFingerprint: %s", (fpr.fingerprint)
1694 self.rejects.append(rej)
1697 def check_upload_blocks(self, fpr, session):
1698 """Check whether any upload blocks apply to this source, source
1699 version, uid / fpr combination"""
1701 def block_rej_template(fb):
1702 rej = 'Manual upload block in place for package %s' % fb.source
1703 if fb.version is not None:
1704 rej += ', version %s' % fb.version
1707 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1708 # version is None if the block applies to all versions
1709 if fb.version is None or fb.version == self.pkg.changes['version']:
1710 # Check both fpr and uid - either is enough to cause a reject
1711 if fb.fpr is not None:
1712 if fb.fpr.fingerprint == fpr.fingerprint:
1713 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1714 if fb.uid is not None:
1715 if fb.uid == fpr.uid:
1716 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1719 def check_dm_upload(self, fpr, session):
1720 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1721 ## none of the uploaded packages are NEW
1723 for f in self.pkg.files.keys():
1724 if self.pkg.files[f].has_key("byhand"):
1725 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1727 if self.pkg.files[f].has_key("new"):
1728 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1734 r = get_newest_source(self.pkg.changes["source"], session)
1737 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1738 self.rejects.append(rej)
1741 if not r.dm_upload_allowed:
1742 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1743 self.rejects.append(rej)
1746 ## the Maintainer: field of the uploaded .changes file corresponds with
1747 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1749 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1750 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1752 ## the most recent version of the package uploaded to unstable or
1753 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1754 ## non-developer maintainers cannot NMU or hijack packages)
1756 # srcuploaders includes the maintainer
1758 for sup in r.srcuploaders:
1759 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1760 # Eww - I hope we never have two people with the same name in Debian
1761 if email == fpr.uid.uid or name == fpr.uid.name:
1766 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1769 ## none of the packages are being taken over from other source packages
1770 for b in self.pkg.changes["binary"].keys():
1771 for suite in self.pkg.changes["distribution"].keys():
1772 for s in get_source_by_package_and_suite(b, suite, session):
1773 if s.source != self.pkg.changes["source"]:
1774 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1778 def check_transition(self, session):
1781 sourcepkg = self.pkg.changes["source"]
1783 # No sourceful upload -> no need to do anything else, direct return
1784 # We also work with unstable uploads, not experimental or those going to some
1785 # proposed-updates queue
1786 if "source" not in self.pkg.changes["architecture"] or \
1787 "unstable" not in self.pkg.changes["distribution"]:
1790 # Also only check if there is a file defined (and existant) with
1792 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1793 if transpath == "" or not os.path.exists(transpath):
1796 # Parse the yaml file
1797 sourcefile = file(transpath, 'r')
1798 sourcecontent = sourcefile.read()
1800 transitions = yaml.load(sourcecontent)
1801 except yaml.YAMLError, msg:
1802 # This shouldn't happen, there is a wrapper to edit the file which
1803 # checks it, but we prefer to be safe than ending up rejecting
1805 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1808 # Now look through all defined transitions
1809 for trans in transitions:
1810 t = transitions[trans]
1811 source = t["source"]
1814 # Will be None if nothing is in testing.
1815 current = get_source_in_suite(source, "testing", session)
1816 if current is not None:
1817 compare = apt_pkg.VersionCompare(current.version, expected)
1819 if current is None or compare < 0:
1820 # This is still valid, the current version in testing is older than
1821 # the new version we wait for, or there is none in testing yet
1823 # Check if the source we look at is affected by this.
1824 if sourcepkg in t['packages']:
1825 # The source is affected, lets reject it.
1827 rejectmsg = "%s: part of the %s transition.\n\n" % (
1830 if current is not None:
1831 currentlymsg = "at version %s" % (current.version)
1833 currentlymsg = "not present in testing"
1835 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1837 rejectmsg += "\n".join(textwrap.wrap("""Your package
1838 is part of a testing transition designed to get %s migrated (it is
1839 currently %s, we need version %s). This transition is managed by the
1840 Release Team, and %s is the Release-Team member responsible for it.
1841 Please mail debian-release@lists.debian.org or contact %s directly if you
1842 need further assistance. You might want to upload to experimental until this
1843 transition is done."""
1844 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1846 self.rejects.append(rejectmsg)
1849 ###########################################################################
1850 # End check_signed_by_key checks
1851 ###########################################################################
1853 def build_summaries(self):
1854 """ Build a summary of changes the upload introduces. """
1856 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1858 short_summary = summary
1860 # This is for direport's benefit...
1861 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1864 summary += "Changes: " + f
1866 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1868 summary += self.announce(short_summary, 0)
1870 return (summary, short_summary)
1872 ###########################################################################
1874 def close_bugs(self, summary, action):
1876 Send mail to close bugs as instructed by the closes field in the changes file.
1877 Also add a line to summary if any work was done.
1879 @type summary: string
1880 @param summary: summary text, as given by L{build_summaries}
1883 @param action: Set to false no real action will be done.
1886 @return: summary. If action was taken, extended by the list of closed bugs.
1890 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1892 bugs = self.pkg.changes["closes"].keys()
1898 summary += "Closing bugs: "
1900 summary += "%s " % (bug)
1903 self.Subst["__BUG_NUMBER__"] = bug
1904 if self.pkg.changes["distribution"].has_key("stable"):
1905 self.Subst["__STABLE_WARNING__"] = """
1906 Note that this package is not part of the released stable Debian
1907 distribution. It may have dependencies on other unreleased software,
1908 or other instabilities. Please take care if you wish to install it.
1909 The update will eventually make its way into the next released Debian
1912 self.Subst["__STABLE_WARNING__"] = ""
1913 mail_message = utils.TemplateSubst(self.Subst, template)
1914 utils.send_mail(mail_message)
1916 # Clear up after ourselves
1917 del self.Subst["__BUG_NUMBER__"]
1918 del self.Subst["__STABLE_WARNING__"]
1920 if action and self.logger:
1921 self.logger.log(["closing bugs"] + bugs)
1927 ###########################################################################
1929 def announce(self, short_summary, action):
1931 Send an announce mail about a new upload.
1933 @type short_summary: string
1934 @param short_summary: Short summary text to include in the mail
1937 @param action: Set to false no real action will be done.
1940 @return: Textstring about action taken.
1945 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1947 # Only do announcements for source uploads with a recent dpkg-dev installed
1948 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1949 self.pkg.changes["architecture"].has_key("source"):
1955 self.Subst["__SHORT_SUMMARY__"] = short_summary
1957 for dist in self.pkg.changes["distribution"].keys():
1958 suite = get_suite(dist)
1959 if suite is None: continue
1960 announce_list = suite.announce
1961 if announce_list == "" or lists_done.has_key(announce_list):
1964 lists_done[announce_list] = 1
1965 summary += "Announcing to %s\n" % (announce_list)
1969 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1970 if cnf.get("Dinstall::TrackingServer") and \
1971 self.pkg.changes["architecture"].has_key("source"):
1972 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1973 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1975 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1976 utils.send_mail(mail_message)
1978 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1980 if cnf.FindB("Dinstall::CloseBugs"):
1981 summary = self.close_bugs(summary, action)
1983 del self.Subst["__SHORT_SUMMARY__"]
1987 ###########################################################################
1989 def accept (self, summary, short_summary, session=None):
1993 This moves all files referenced from the .changes into the pool,
1994 sends the accepted mail, announces to lists, closes bugs and
1995 also checks for override disparities. If enabled it will write out
1996 the version history for the BTS Version Tracking and will finally call
1999 @type summary: string
2000 @param summary: Summary text
2002 @type short_summary: string
2003 @param short_summary: Short summary
2007 stats = SummaryStats()
2010 self.logger.log(["installing changes", self.pkg.changes_file])
2014 # Add the .dsc file to the DB first
2015 for newfile, entry in self.pkg.files.items():
2016 if entry["type"] == "dsc":
2017 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2021 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2022 for newfile, entry in self.pkg.files.items():
2023 if entry["type"] == "deb":
2024 poolfiles.append(add_deb_to_db(self, newfile, session))
2026 # If this is a sourceful diff only upload that is moving
2027 # cross-component we need to copy the .orig files into the new
2028 # component too for the same reasons as above.
2029 # XXX: mhy: I think this should be in add_dsc_to_db
2030 if self.pkg.changes["architecture"].has_key("source"):
2031 for orig_file in self.pkg.orig_files.keys():
2032 if not self.pkg.orig_files[orig_file].has_key("id"):
2033 continue # Skip if it's not in the pool
2034 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2035 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2036 continue # Skip if the location didn't change
2039 oldf = get_poolfile_by_id(orig_file_id, session)
2040 old_filename = os.path.join(oldf.location.path, oldf.filename)
2041 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
2042 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2044 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2046 # TODO: Care about size/md5sum collisions etc
2047 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2049 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2051 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2052 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2056 # Don't reference the old file from this changes
2058 if p.file_id == oldf.file_id:
2061 poolfiles.append(newf)
2063 # Fix up the DSC references
2066 for df in source.srcfiles:
2067 if df.poolfile.file_id == oldf.file_id:
2068 # Add a new DSC entry and mark the old one for deletion
2069 # Don't do it in the loop so we don't change the thing we're iterating over
2071 newdscf.source_id = source.source_id
2072 newdscf.poolfile_id = newf.file_id
2073 session.add(newdscf)
2083 # Make sure that our source object is up-to-date
2084 session.expire(source)
2086 # Add changelog information to the database
2087 self.store_changelog()
2089 # Install the files into the pool
2090 for newfile, entry in self.pkg.files.items():
2091 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2092 utils.move(newfile, destination)
2093 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2094 stats.accept_bytes += float(entry["size"])
2096 # Copy the .changes file across for suite which need it.
2097 copy_changes = dict([(x.copychanges, '')
2098 for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2099 if x.copychanges is not None])
2101 for dest in copy_changes.keys():
2102 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2104 # We're done - commit the database changes
2106 # Our SQL session will automatically start a new transaction after
2109 # Move the .changes into the 'done' directory
2110 utils.move(self.pkg.changes_file,
2111 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2113 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2114 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2117 self.Subst["__SUMMARY__"] = summary
2118 mail_message = utils.TemplateSubst(self.Subst,
2119 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2120 utils.send_mail(mail_message)
2121 self.announce(short_summary, 1)
2123 ## Helper stuff for DebBugs Version Tracking
2124 if cnf.Find("Dir::Queue::BTSVersionTrack"):
2125 if self.pkg.changes["architecture"].has_key("source"):
2126 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2127 version_history = os.fdopen(fd, 'w')
2128 version_history.write(self.pkg.dsc["bts changelog"])
2129 version_history.close()
2130 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2131 self.pkg.changes_file[:-8]+".versions")
2132 os.rename(temp_filename, filename)
2133 os.chmod(filename, 0644)
2135 # Write out the binary -> source mapping.
2136 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2137 debinfo = os.fdopen(fd, 'w')
2138 for name, entry in sorted(self.pkg.files.items()):
2139 if entry["type"] == "deb":
2140 line = " ".join([entry["package"], entry["version"],
2141 entry["architecture"], entry["source package"],
2142 entry["source version"]])
2143 debinfo.write(line+"\n")
2145 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2146 self.pkg.changes_file[:-8]+".debinfo")
2147 os.rename(temp_filename, filename)
2148 os.chmod(filename, 0644)
2152 # Set up our copy queues (e.g. buildd queues)
2153 for suite_name in self.pkg.changes["distribution"].keys():
2154 suite = get_suite(suite_name, session)
2155 for q in suite.copy_queues:
2157 q.add_file_from_pool(f)
2162 stats.accept_count += 1
2164 def check_override(self):
2166 Checks override entries for validity. Mails "Override disparity" warnings,
2167 if that feature is enabled.
2169 Abandons the check if
2170 - override disparity checks are disabled
2171 - mail sending is disabled
2176 # Abandon the check if override disparity checks have been disabled
2177 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2180 summary = self.pkg.check_override()
2185 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2188 self.Subst["__SUMMARY__"] = summary
2189 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2190 utils.send_mail(mail_message)
2191 del self.Subst["__SUMMARY__"]
2193 ###########################################################################
2195 def remove(self, from_dir=None):
2197 Used (for instance) in p-u to remove the package from unchecked
2199 Also removes the package from holding area.
2201 if from_dir is None:
2202 from_dir = self.pkg.directory
2205 for f in self.pkg.files.keys():
2206 os.unlink(os.path.join(from_dir, f))
2207 if os.path.exists(os.path.join(h.holding_dir, f)):
2208 os.unlink(os.path.join(h.holding_dir, f))
2210 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2211 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2212 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2214 ###########################################################################
2216 def move_to_queue (self, queue):
2218 Move files to a destination queue using the permissions in the table
2221 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2222 queue.path, perms=int(queue.change_perms, 8))
2223 for f in self.pkg.files.keys():
2224 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2226 ###########################################################################
2228 def force_reject(self, reject_files):
2230 Forcefully move files from the current directory to the
2231 reject directory. If any file already exists in the reject
2232 directory it will be moved to the morgue to make way for
2235 @type reject_files: dict
2236 @param reject_files: file dictionary
2242 for file_entry in reject_files:
2243 # Skip any files which don't exist or which we don't have permission to copy.
2244 if os.access(file_entry, os.R_OK) == 0:
2247 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2250 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2252 # File exists? Let's find a new name by adding a number
2253 if e.errno == errno.EEXIST:
2255 dest_file = utils.find_next_free(dest_file, 255)
2256 except NoFreeFilenameError:
2257 # Something's either gone badly Pete Tong, or
2258 # someone is trying to exploit us.
2259 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2262 # Make sure we really got it
2264 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2267 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2271 # If we got here, we own the destination file, so we can
2272 # safely overwrite it.
2273 utils.move(file_entry, dest_file, 1, perms=0660)
2276 ###########################################################################
2277 def do_reject (self, manual=0, reject_message="", notes=""):
2279 Reject an upload. If called without a reject message or C{manual} is
2280 true, spawn an editor so the user can write one.
2283 @param manual: manual or automated rejection
2285 @type reject_message: string
2286 @param reject_message: A reject message
2291 # If we weren't given a manual rejection message, spawn an
2292 # editor so the user can add one in...
2293 if manual and not reject_message:
2294 (fd, temp_filename) = utils.temp_filename()
2295 temp_file = os.fdopen(fd, 'w')
2298 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2299 % (note.author, note.version, note.notedate, note.comment))
2301 editor = os.environ.get("EDITOR","vi")
2303 while answer == 'E':
2304 os.system("%s %s" % (editor, temp_filename))
2305 temp_fh = utils.open_file(temp_filename)
2306 reject_message = "".join(temp_fh.readlines())
2308 print "Reject message:"
2309 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2310 prompt = "[R]eject, Edit, Abandon, Quit ?"
2312 while prompt.find(answer) == -1:
2313 answer = utils.our_raw_input(prompt)
2314 m = re_default_answer.search(prompt)
2317 answer = answer[:1].upper()
2318 os.unlink(temp_filename)
2324 print "Rejecting.\n"
2328 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2329 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2331 # Move all the files into the reject directory
2332 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2333 self.force_reject(reject_files)
2335 # If we fail here someone is probably trying to exploit the race
2336 # so let's just raise an exception ...
2337 if os.path.exists(reason_filename):
2338 os.unlink(reason_filename)
2339 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2341 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2345 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2346 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2347 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2348 os.write(reason_fd, reject_message)
2349 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2351 # Build up the rejection email
2352 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2353 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2354 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2355 self.Subst["__REJECT_MESSAGE__"] = ""
2356 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2357 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2358 # Write the rejection email out as the <foo>.reason file
2359 os.write(reason_fd, reject_mail_message)
2361 del self.Subst["__REJECTOR_ADDRESS__"]
2362 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2363 del self.Subst["__CC__"]
2367 # Send the rejection mail
2368 utils.send_mail(reject_mail_message)
2371 self.logger.log(["rejected", self.pkg.changes_file])
2375 ################################################################################
2376 def in_override_p(self, package, component, suite, binary_type, filename, session):
2378 Check if a package already has override entries in the DB
2380 @type package: string
2381 @param package: package name
2383 @type component: string
2384 @param component: database id of the component
2387 @param suite: database id of the suite
2389 @type binary_type: string
2390 @param binary_type: type of the package
2392 @type filename: string
2393 @param filename: filename we check
2395 @return: the database result. But noone cares anyway.
2401 if binary_type == "": # must be source
2404 file_type = binary_type
2406 # Override suite name; used for example with proposed-updates
2407 oldsuite = get_suite(suite, session)
2408 if (not oldsuite is None) and oldsuite.overridesuite:
2409 suite = oldsuite.overridesuite
2411 result = get_override(package, suite, component, file_type, session)
2413 # If checking for a source package fall back on the binary override type
2414 if file_type == "dsc" and len(result) < 1:
2415 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2417 # Remember the section and priority so we can check them later if appropriate
2420 self.pkg.files[filename]["override section"] = result.section.section
2421 self.pkg.files[filename]["override priority"] = result.priority.priority
2426 ################################################################################
2427 def get_anyversion(self, sv_list, suite):
2430 @param sv_list: list of (suite, version) tuples to check
2433 @param suite: suite name
2439 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2440 for (s, v) in sv_list:
2441 if s in [ x.lower() for x in anysuite ]:
2442 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2447 ################################################################################
2449 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2452 @param sv_list: list of (suite, version) tuples to check
2454 @type filename: string
2455 @param filename: XXX
2457 @type new_version: string
2458 @param new_version: XXX
2460 Ensure versions are newer than existing packages in target
2461 suites and that cross-suite version checking rules as
2462 set out in the conf file are satisfied.
2467 # Check versions for each target suite
2468 for target_suite in self.pkg.changes["distribution"].keys():
2469 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2470 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2472 # Enforce "must be newer than target suite" even if conffile omits it
2473 if target_suite not in must_be_newer_than:
2474 must_be_newer_than.append(target_suite)
2476 for (suite, existent_version) in sv_list:
2477 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2479 if suite in must_be_newer_than and sourceful and vercmp < 1:
2480 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2482 if suite in must_be_older_than and vercmp > -1:
2485 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2486 # we really use the other suite, ignoring the conflicting one ...
2487 addsuite = self.pkg.changes["distribution-version"][suite]
2489 add_version = self.get_anyversion(sv_list, addsuite)
2490 target_version = self.get_anyversion(sv_list, target_suite)
2493 # not add_version can only happen if we map to a suite
2494 # that doesn't enhance the suite we're propup'ing from.
2495 # so "propup-ver x a b c; map a d" is a problem only if
2496 # d doesn't enhance a.
2498 # i think we could always propagate in this case, rather
2499 # than complaining. either way, this isn't a REJECT issue
2501 # And - we really should complain to the dorks who configured dak
2502 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2503 self.pkg.changes.setdefault("propdistribution", {})
2504 self.pkg.changes["propdistribution"][addsuite] = 1
2506 elif not target_version:
2507 # not targets_version is true when the package is NEW
2508 # we could just stick with the "...old version..." REJECT
2509 # for this, I think.
2510 self.rejects.append("Won't propogate NEW packages.")
2511 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2512 # propogation would be redundant. no need to reject though.
2513 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2515 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2516 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2518 self.warnings.append("Propogating upload to %s" % (addsuite))
2519 self.pkg.changes.setdefault("propdistribution", {})
2520 self.pkg.changes["propdistribution"][addsuite] = 1
2524 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2526 ################################################################################
2527 def check_binary_against_db(self, filename, session):
2528 # Ensure version is sane
2529 self.cross_suite_version_check( \
2530 get_suite_version_by_package(self.pkg.files[filename]["package"], \
2531 self.pkg.files[filename]["architecture"], session),
2532 filename, self.pkg.files[filename]["version"], sourceful=False)
2534 # Check for any existing copies of the file
2535 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2536 q = q.filter_by(version=self.pkg.files[filename]["version"])
2537 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2540 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2542 ################################################################################
2544 def check_source_against_db(self, filename, session):
2545 source = self.pkg.dsc.get("source")
2546 version = self.pkg.dsc.get("version")
2548 # Ensure version is sane
2549 self.cross_suite_version_check( \
2550 get_suite_version_by_source(source, session), filename, version,
2553 ################################################################################
2554 def check_dsc_against_db(self, filename, session):
2557 @warning: NB: this function can remove entries from the 'files' index [if
2558 the orig tarball is a duplicate of the one in the archive]; if
2559 you're iterating over 'files' and call this function as part of
2560 the loop, be sure to add a check to the top of the loop to
2561 ensure you haven't just tried to dereference the deleted entry.
2566 self.pkg.orig_files = {} # XXX: do we need to clear it?
2567 orig_files = self.pkg.orig_files
2569 # Try and find all files mentioned in the .dsc. This has
2570 # to work harder to cope with the multiple possible
2571 # locations of an .orig.tar.gz.
2572 # The ordering on the select is needed to pick the newest orig
2573 # when it exists in multiple places.
2574 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2576 if self.pkg.files.has_key(dsc_name):
2577 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2578 actual_size = int(self.pkg.files[dsc_name]["size"])
2579 found = "%s in incoming" % (dsc_name)
2581 # Check the file does not already exist in the archive
2582 ql = get_poolfile_like_name(dsc_name, session)
2584 # Strip out anything that isn't '%s' or '/%s$'
2586 if not i.filename.endswith(dsc_name):
2589 # "[dak] has not broken them. [dak] has fixed a
2590 # brokenness. Your crappy hack exploited a bug in
2593 # "(Come on! I thought it was always obvious that
2594 # one just doesn't release different files with
2595 # the same name and version.)"
2596 # -- ajk@ on d-devel@l.d.o
2599 # Ignore exact matches for .orig.tar.gz
2601 if re_is_orig_source.match(dsc_name):
2603 if self.pkg.files.has_key(dsc_name) and \
2604 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2605 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2606 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2607 # TODO: Don't delete the entry, just mark it as not needed
2608 # This would fix the stupidity of changing something we often iterate over
2609 # whilst we're doing it
2610 del self.pkg.files[dsc_name]
2611 dsc_entry["files id"] = i.file_id
2612 if not orig_files.has_key(dsc_name):
2613 orig_files[dsc_name] = {}
2614 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2617 # Don't bitch that we couldn't find this file later
2619 self.later_check_files.remove(dsc_name)
2625 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2627 elif re_is_orig_source.match(dsc_name):
2629 ql = get_poolfile_like_name(dsc_name, session)
2631 # Strip out anything that isn't '%s' or '/%s$'
2632 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2634 if not i.filename.endswith(dsc_name):
2638 # Unfortunately, we may get more than one match here if,
2639 # for example, the package was in potato but had an -sa
2640 # upload in woody. So we need to choose the right one.
2642 # default to something sane in case we don't match any or have only one
2647 old_file = os.path.join(i.location.path, i.filename)
2648 old_file_fh = utils.open_file(old_file)
2649 actual_md5 = apt_pkg.md5sum(old_file_fh)
2651 actual_size = os.stat(old_file)[stat.ST_SIZE]
2652 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2655 old_file = os.path.join(i.location.path, i.filename)
2656 old_file_fh = utils.open_file(old_file)
2657 actual_md5 = apt_pkg.md5sum(old_file_fh)
2659 actual_size = os.stat(old_file)[stat.ST_SIZE]
2661 suite_type = x.location.archive_type
2662 # need this for updating dsc_files in install()
2663 dsc_entry["files id"] = x.file_id
2664 # See install() in process-accepted...
2665 if not orig_files.has_key(dsc_name):
2666 orig_files[dsc_name] = {}
2667 orig_files[dsc_name]["id"] = x.file_id
2668 orig_files[dsc_name]["path"] = old_file
2669 orig_files[dsc_name]["location"] = x.location.location_id
2671 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2672 # Not there? Check the queue directories...
2673 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2674 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2676 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2677 if os.path.exists(in_otherdir):
2678 in_otherdir_fh = utils.open_file(in_otherdir)
2679 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2680 in_otherdir_fh.close()
2681 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2683 if not orig_files.has_key(dsc_name):
2684 orig_files[dsc_name] = {}
2685 orig_files[dsc_name]["path"] = in_otherdir
2688 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2691 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2693 if actual_md5 != dsc_entry["md5sum"]:
2694 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2695 if actual_size != int(dsc_entry["size"]):
2696 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2698 ################################################################################
2699 # This is used by process-new and process-holding to recheck a changes file
2700 # at the time we're running. It mainly wraps various other internal functions
2701 # and is similar to accepted_checks - these should probably be tidied up
2703 def recheck(self, session):
2705 for f in self.pkg.files.keys():
2706 # The .orig.tar.gz can disappear out from under us is it's a
2707 # duplicate of one in the archive.
2708 if not self.pkg.files.has_key(f):
2711 entry = self.pkg.files[f]
2713 # Check that the source still exists
2714 if entry["type"] == "deb":
2715 source_version = entry["source version"]
2716 source_package = entry["source package"]
2717 if not self.pkg.changes["architecture"].has_key("source") \
2718 and not source_exists(source_package, source_version, \
2719 suites = self.pkg.changes["distribution"].keys(), session = session):
2720 source_epochless_version = re_no_epoch.sub('', source_version)
2721 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2723 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2724 if cnf.has_key("Dir::Queue::%s" % (q)):
2725 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2728 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2730 # Version and file overwrite checks
2731 if entry["type"] == "deb":
2732 self.check_binary_against_db(f, session)
2733 elif entry["type"] == "dsc":
2734 self.check_source_against_db(f, session)
2735 self.check_dsc_against_db(f, session)
2737 ################################################################################
2738 def accepted_checks(self, overwrite_checks, session):
2739 # Recheck anything that relies on the database; since that's not
2740 # frozen between accept and our run time when called from p-a.
2742 # overwrite_checks is set to False when installing to stable/oldstable
2747 # Find the .dsc (again)
2749 for f in self.pkg.files.keys():
2750 if self.pkg.files[f]["type"] == "dsc":
2753 for checkfile in self.pkg.files.keys():
2754 # The .orig.tar.gz can disappear out from under us is it's a
2755 # duplicate of one in the archive.
2756 if not self.pkg.files.has_key(checkfile):
2759 entry = self.pkg.files[checkfile]
2761 # Check that the source still exists
2762 if entry["type"] == "deb":
2763 source_version = entry["source version"]
2764 source_package = entry["source package"]
2765 if not self.pkg.changes["architecture"].has_key("source") \
2766 and not source_exists(source_package, source_version, \
2767 suites = self.pkg.changes["distribution"].keys(), \
2769 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2771 # Version and file overwrite checks
2772 if overwrite_checks:
2773 if entry["type"] == "deb":
2774 self.check_binary_against_db(checkfile, session)
2775 elif entry["type"] == "dsc":
2776 self.check_source_against_db(checkfile, session)
2777 self.check_dsc_against_db(dsc_filename, session)
2779 # propogate in the case it is in the override tables:
2780 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2781 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2782 propogate[suite] = 1
2784 nopropogate[suite] = 1
2786 for suite in propogate.keys():
2787 if suite in nopropogate:
2789 self.pkg.changes["distribution"][suite] = 1
2791 for checkfile in self.pkg.files.keys():
2792 # Check the package is still in the override tables
2793 for suite in self.pkg.changes["distribution"].keys():
2794 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2795 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2797 ################################################################################
2798 # If any file of an upload has a recent mtime then chances are good
2799 # the file is still being uploaded.
2801 def upload_too_new(self):
2804 # Move back to the original directory to get accurate time stamps
2806 os.chdir(self.pkg.directory)
2807 file_list = self.pkg.files.keys()
2808 file_list.extend(self.pkg.dsc_files.keys())
2809 file_list.append(self.pkg.changes_file)
2812 last_modified = time.time()-os.path.getmtime(f)
2813 if last_modified < int(cnf["Dinstall::SkipTime"]):
2822 def store_changelog(self):
2824 # Skip binary-only upload if it is not a bin-NMU
2825 if not self.pkg.changes['architecture'].has_key('source'):
2826 from daklib.regexes import re_bin_only_nmu
2827 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2830 session = DBConn().session()
2832 # Check if upload already has a changelog entry
2833 query = """SELECT changelog_id FROM changes WHERE source = :source
2834 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2835 if session.execute(query, {'source': self.pkg.changes['source'], \
2836 'version': self.pkg.changes['version'], \
2837 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2841 # Add current changelog text into changelogs_text table, return created ID
2842 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2843 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2845 # Link ID to the upload available in changes table
2846 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2847 AND version = :version AND architecture = :architecture"""
2848 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2849 'version': self.pkg.changes['version'], \
2850 'architecture': " ".join(self.pkg.changes['architecture'].keys())})