5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
59 ###############################################################################
61 def get_type(f, session):
63 Get the file type of C{f}
66 @param f: file entry from Changes object
68 @type session: SQLA Session
69 @param session: SQL Alchemy session object
76 if f.has_key("dbtype"):
77 file_type = f["dbtype"]
78 elif re_source_ext.match(f["type"]):
82 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
84 # Validate the override type
85 type_id = get_override_type(file_type, session)
87 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
91 ################################################################################
93 # Determine what parts in a .changes are NEW
95 def determine_new(filename, changes, files, warn=1, session = None):
97 Determine what parts in a C{changes} file are NEW.
100 @param filename: changes filename
102 @type changes: Upload.Pkg.changes dict
103 @param changes: Changes dictionary
105 @type files: Upload.Pkg.files dict
106 @param files: Files dictionary
109 @param warn: Warn if overrides are added for (old)stable
112 @return: dictionary of NEW components.
115 # TODO: This should all use the database instead of parsing the changes
120 dbchg = get_dbchange(filename, session)
122 print "Warning: cannot find changes file in database; won't check byhand"
124 # Build up a list of potentially new things
125 for name, f in files.items():
126 # Keep a record of byhand elements
127 if f["section"] == "byhand":
132 priority = f["priority"]
133 section = f["section"]
134 file_type = get_type(f, session)
135 component = f["component"]
137 if file_type == "dsc":
140 if not new.has_key(pkg):
142 new[pkg]["priority"] = priority
143 new[pkg]["section"] = section
144 new[pkg]["type"] = file_type
145 new[pkg]["component"] = component
146 new[pkg]["files"] = []
148 old_type = new[pkg]["type"]
149 if old_type != file_type:
150 # source gets trumped by deb or udeb
151 if old_type == "dsc":
152 new[pkg]["priority"] = priority
153 new[pkg]["section"] = section
154 new[pkg]["type"] = file_type
155 new[pkg]["component"] = component
157 new[pkg]["files"].append(name)
159 if f.has_key("othercomponents"):
160 new[pkg]["othercomponents"] = f["othercomponents"]
162 # Fix up the list of target suites
164 for suite in changes["suite"].keys():
165 oldsuite = get_suite(suite, session)
167 print "WARNING: Invalid suite %s found" % suite
170 if oldsuite.overridesuite:
171 newsuite = get_suite(oldsuite.overridesuite, session)
174 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
175 oldsuite.overridesuite, suite)
176 del changes["suite"][suite]
177 changes["suite"][oldsuite.overridesuite] = 1
179 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist. Bugger" % (
180 oldsuite.overridesuite, suite)
182 # Check for unprocessed byhand files
183 if dbchg is not None:
184 for b in byhand.keys():
185 # Find the file entry in the database
187 for f in dbchg.files:
190 # If it's processed, we can ignore it
196 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
198 # Check for new stuff
199 for suite in changes["suite"].keys():
200 for pkg in new.keys():
201 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
203 for file_entry in new[pkg]["files"]:
204 if files[file_entry].has_key("new"):
205 del files[file_entry]["new"]
209 for s in ['stable', 'oldstable']:
210 if changes["suite"].has_key(s):
211 print "WARNING: overrides will be added for %s!" % s
212 for pkg in new.keys():
213 if new[pkg].has_key("othercomponents"):
214 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
218 ################################################################################
220 def check_valid(new, session = None):
222 Check if section and priority for NEW packages exist in database.
223 Additionally does sanity checks:
224 - debian-installer packages have to be udeb (or source)
225 - non debian-installer packages can not be udeb
226 - source priority can only be assigned to dsc file types
229 @param new: Dict of new packages with their section, priority and type.
232 for pkg in new.keys():
233 section_name = new[pkg]["section"]
234 priority_name = new[pkg]["priority"]
235 file_type = new[pkg]["type"]
237 section = get_section(section_name, session)
239 new[pkg]["section id"] = -1
241 new[pkg]["section id"] = section.section_id
243 priority = get_priority(priority_name, session)
245 new[pkg]["priority id"] = -1
247 new[pkg]["priority id"] = priority.priority_id
250 di = section_name.find("debian-installer") != -1
252 # If d-i, we must be udeb and vice-versa
253 if (di and file_type not in ("udeb", "dsc")) or \
254 (not di and file_type == "udeb"):
255 new[pkg]["section id"] = -1
257 # If dsc we need to be source and vice-versa
258 if (priority == "source" and file_type != "dsc") or \
259 (priority != "source" and file_type == "dsc"):
260 new[pkg]["priority id"] = -1
262 ###############################################################################
264 # Used by Upload.check_timestamps
265 class TarTime(object):
266 def __init__(self, future_cutoff, past_cutoff):
268 self.future_cutoff = future_cutoff
269 self.past_cutoff = past_cutoff
272 self.future_files = {}
273 self.ancient_files = {}
275 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
276 if MTime > self.future_cutoff:
277 self.future_files[Name] = MTime
278 if MTime < self.past_cutoff:
279 self.ancient_files[Name] = MTime
281 ###############################################################################
283 def prod_maintainer(notes, upload):
286 # Here we prepare an editor and get them ready to prod...
287 (fd, temp_filename) = utils.temp_filename()
288 temp_file = os.fdopen(fd, 'w')
290 temp_file.write(note.comment)
292 editor = os.environ.get("EDITOR","vi")
295 os.system("%s %s" % (editor, temp_filename))
296 temp_fh = utils.open_file(temp_filename)
297 prod_message = "".join(temp_fh.readlines())
299 print "Prod message:"
300 print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
301 prompt = "[P]rod, Edit, Abandon, Quit ?"
303 while prompt.find(answer) == -1:
304 answer = utils.our_raw_input(prompt)
305 m = re_default_answer.search(prompt)
308 answer = answer[:1].upper()
309 os.unlink(temp_filename)
315 # Otherwise, do the proding...
316 user_email_address = utils.whoami() + " <%s>" % (
317 cnf["Dinstall::MyAdminAddress"])
321 Subst["__FROM_ADDRESS__"] = user_email_address
322 Subst["__PROD_MESSAGE__"] = prod_message
323 Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
325 prod_mail_message = utils.TemplateSubst(
326 Subst,cnf["Dir::Templates"]+"/process-new.prod")
329 utils.send_mail(prod_mail_message)
331 print "Sent prodding message"
333 ################################################################################
335 def edit_note(note, upload, session, trainee=False):
336 # Write the current data to a temporary file
337 (fd, temp_filename) = utils.temp_filename()
338 editor = os.environ.get("EDITOR","vi")
341 os.system("%s %s" % (editor, temp_filename))
342 temp_file = utils.open_file(temp_filename)
343 newnote = temp_file.read().rstrip()
346 print utils.prefix_multi_line_string(newnote," ")
347 prompt = "[D]one, Edit, Abandon, Quit ?"
349 while prompt.find(answer) == -1:
350 answer = utils.our_raw_input(prompt)
351 m = re_default_answer.search(prompt)
354 answer = answer[:1].upper()
355 os.unlink(temp_filename)
362 comment = NewComment()
363 comment.package = upload.pkg.changes["source"]
364 comment.version = upload.pkg.changes["version"]
365 comment.comment = newnote
366 comment.author = utils.whoami()
367 comment.trainee = trainee
371 ###############################################################################
373 # suite names DMs can upload to
374 dm_suites = ['unstable', 'experimental']
376 def get_newest_source(source, session):
377 'returns the newest DBSource object in dm_suites'
378 ## the most recent version of the package uploaded to unstable or
379 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
380 ## section of its control file
381 q = session.query(DBSource).filter_by(source = source). \
382 filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
383 order_by(desc('source.version'))
386 def get_suite_version_by_source(source, session):
387 'returns a list of tuples (suite_name, version) for source package'
388 q = session.query(Suite.suite_name, DBSource.version). \
389 join(Suite.sources).filter_by(source = source)
392 def get_source_by_package_and_suite(package, suite_name, session):
394 returns a DBSource query filtered by DBBinary.package and this package's
397 return session.query(DBSource). \
398 join(DBSource.binaries).filter_by(package = package). \
399 join(DBBinary.suites).filter_by(suite_name = suite_name)
401 def get_suite_version_by_package(package, arch_string, session):
403 returns a list of tuples (suite_name, version) for binary package and
406 return session.query(Suite.suite_name, DBBinary.version). \
407 join(Suite.binaries).filter_by(package = package). \
408 join(DBBinary.architecture). \
409 filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
411 class Upload(object):
413 Everything that has to do with an upload processed.
421 ###########################################################################
424 """ Reset a number of internal variables."""
426 # Initialize the substitution template map
429 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
430 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
431 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
432 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
438 self.later_check_files = []
442 def package_info(self):
444 Format various messages from this Upload to send to the maintainer.
448 ('Reject Reasons', self.rejects),
449 ('Warnings', self.warnings),
450 ('Notes', self.notes),
454 for title, messages in msgs:
456 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
461 ###########################################################################
462 def update_subst(self):
463 """ Set up the per-package template substitution mappings """
467 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
468 if not self.pkg.changes.has_key("architecture") or not \
469 isinstance(self.pkg.changes["architecture"], dict):
470 self.pkg.changes["architecture"] = { "Unknown" : "" }
472 # and maintainer2047 may not exist.
473 if not self.pkg.changes.has_key("maintainer2047"):
474 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
476 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
477 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
478 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
480 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
481 if self.pkg.changes["architecture"].has_key("source") and \
482 self.pkg.changes["changedby822"] != "" and \
483 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
485 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
486 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
487 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
489 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
490 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
491 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
493 # Process policy doesn't set the fingerprint field and I don't want to make it
494 # do it for now as I don't want to have to deal with the case where we accepted
495 # the package into PU-NEW, but the fingerprint has gone away from the keyring in
496 # the meantime so the package will be remarked as rejectable. Urgh.
497 # TODO: Fix this properly
498 if self.pkg.changes.has_key('fingerprint'):
499 session = DBConn().session()
500 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
501 if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
502 if self.pkg.changes.has_key("sponsoremail"):
503 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
506 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
507 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
509 # Apply any global override of the Maintainer field
510 if cnf.get("Dinstall::OverrideMaintainer"):
511 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
512 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
514 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
515 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
516 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
517 self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
519 ###########################################################################
520 def load_changes(self, filename):
522 Load a changes file and setup a dictionary around it. Also checks for mandantory
525 @type filename: string
526 @param filename: Changes filename, full path.
529 @return: whether the changes file was valid or not. We may want to
530 reject even if this is True (see what gets put in self.rejects).
531 This is simply to prevent us even trying things later which will
532 fail because we couldn't properly parse the file.
535 self.pkg.changes_file = filename
537 # Parse the .changes field into a dictionary
539 self.pkg.changes.update(parse_changes(filename))
540 except CantOpenError:
541 self.rejects.append("%s: can't read file." % (filename))
543 except ParseChangesError, line:
544 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
546 except ChangesUnicodeError:
547 self.rejects.append("%s: changes file not proper utf-8" % (filename))
550 # Parse the Files field from the .changes into another dictionary
552 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
553 except ParseChangesError, line:
554 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
556 except UnknownFormatError, format:
557 self.rejects.append("%s: unknown format '%s'." % (filename, format))
560 # Check for mandatory fields
561 for i in ("distribution", "source", "binary", "architecture",
562 "version", "maintainer", "files", "changes", "description"):
563 if not self.pkg.changes.has_key(i):
564 # Avoid undefined errors later
565 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
568 # Strip a source version in brackets from the source field
569 if re_strip_srcver.search(self.pkg.changes["source"]):
570 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
572 # Ensure the source field is a valid package name.
573 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
574 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
576 # Split multi-value fields into a lower-level dictionary
577 for i in ("architecture", "distribution", "binary", "closes"):
578 o = self.pkg.changes.get(i, "")
580 del self.pkg.changes[i]
582 self.pkg.changes[i] = {}
585 self.pkg.changes[i][j] = 1
587 # Fix the Maintainer: field to be RFC822/2047 compatible
589 (self.pkg.changes["maintainer822"],
590 self.pkg.changes["maintainer2047"],
591 self.pkg.changes["maintainername"],
592 self.pkg.changes["maintaineremail"]) = \
593 fix_maintainer (self.pkg.changes["maintainer"])
594 except ParseMaintError, msg:
595 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
596 % (filename, self.pkg.changes["maintainer"], msg))
598 # ...likewise for the Changed-By: field if it exists.
600 (self.pkg.changes["changedby822"],
601 self.pkg.changes["changedby2047"],
602 self.pkg.changes["changedbyname"],
603 self.pkg.changes["changedbyemail"]) = \
604 fix_maintainer (self.pkg.changes.get("changed-by", ""))
605 except ParseMaintError, msg:
606 self.pkg.changes["changedby822"] = ""
607 self.pkg.changes["changedby2047"] = ""
608 self.pkg.changes["changedbyname"] = ""
609 self.pkg.changes["changedbyemail"] = ""
611 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
612 % (filename, self.pkg.changes["changed-by"], msg))
614 # Ensure all the values in Closes: are numbers
615 if self.pkg.changes.has_key("closes"):
616 for i in self.pkg.changes["closes"].keys():
617 if re_isanum.match (i) == None:
618 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
620 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
621 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
622 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
624 # Check the .changes is non-empty
625 if not self.pkg.files:
626 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
629 # Changes was syntactically valid even if we'll reject
632 ###########################################################################
634 def check_distributions(self):
635 "Check and map the Distribution field"
639 # Handle suite mappings
640 for m in Cnf.ValueList("SuiteMappings"):
643 if mtype == "map" or mtype == "silent-map":
644 (source, dest) = args[1:3]
645 if self.pkg.changes["distribution"].has_key(source):
646 del self.pkg.changes["distribution"][source]
647 self.pkg.changes["distribution"][dest] = 1
648 if mtype != "silent-map":
649 self.notes.append("Mapping %s to %s." % (source, dest))
650 if self.pkg.changes.has_key("distribution-version"):
651 if self.pkg.changes["distribution-version"].has_key(source):
652 self.pkg.changes["distribution-version"][source]=dest
653 elif mtype == "map-unreleased":
654 (source, dest) = args[1:3]
655 if self.pkg.changes["distribution"].has_key(source):
656 for arch in self.pkg.changes["architecture"].keys():
657 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
658 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
659 del self.pkg.changes["distribution"][source]
660 self.pkg.changes["distribution"][dest] = 1
662 elif mtype == "ignore":
664 if self.pkg.changes["distribution"].has_key(suite):
665 del self.pkg.changes["distribution"][suite]
666 self.warnings.append("Ignoring %s as a target suite." % (suite))
667 elif mtype == "reject":
669 if self.pkg.changes["distribution"].has_key(suite):
670 self.rejects.append("Uploads to %s are not accepted." % (suite))
671 elif mtype == "propup-version":
672 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
674 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
675 if self.pkg.changes["distribution"].has_key(args[1]):
676 self.pkg.changes.setdefault("distribution-version", {})
677 for suite in args[2:]:
678 self.pkg.changes["distribution-version"][suite] = suite
680 # Ensure there is (still) a target distribution
681 if len(self.pkg.changes["distribution"].keys()) < 1:
682 self.rejects.append("No valid distribution remaining.")
684 # Ensure target distributions exist
685 for suite in self.pkg.changes["distribution"].keys():
686 if not Cnf.has_key("Suite::%s" % (suite)):
687 self.rejects.append("Unknown distribution `%s'." % (suite))
689 ###########################################################################
691 def binary_file_checks(self, f, session):
693 entry = self.pkg.files[f]
695 # Extract package control information
696 deb_file = utils.open_file(f)
698 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
700 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
702 # Can't continue, none of the checks on control would work.
705 # Check for mandantory "Description:"
708 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
710 self.rejects.append("%s: Missing Description in binary package" % (f))
715 # Check for mandatory fields
716 for field in [ "Package", "Architecture", "Version" ]:
717 if control.Find(field) == None:
719 self.rejects.append("%s: No %s field in control." % (f, field))
722 # Ensure the package name matches the one give in the .changes
723 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
724 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
726 # Validate the package field
727 package = control.Find("Package")
728 if not re_valid_pkg_name.match(package):
729 self.rejects.append("%s: invalid package name '%s'." % (f, package))
731 # Validate the version field
732 version = control.Find("Version")
733 if not re_valid_version.match(version):
734 self.rejects.append("%s: invalid version number '%s'." % (f, version))
736 # Ensure the architecture of the .deb is one we know about.
737 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
738 architecture = control.Find("Architecture")
739 upload_suite = self.pkg.changes["distribution"].keys()[0]
741 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
742 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
743 self.rejects.append("Unknown architecture '%s'." % (architecture))
745 # Ensure the architecture of the .deb is one of the ones
746 # listed in the .changes.
747 if not self.pkg.changes["architecture"].has_key(architecture):
748 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
750 # Sanity-check the Depends field
751 depends = control.Find("Depends")
753 self.rejects.append("%s: Depends field is empty." % (f))
755 # Sanity-check the Provides field
756 provides = control.Find("Provides")
758 provide = re_spacestrip.sub('', provides)
760 self.rejects.append("%s: Provides field is empty." % (f))
761 prov_list = provide.split(",")
762 for prov in prov_list:
763 if not re_valid_pkg_name.match(prov):
764 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
766 # Check the section & priority match those given in the .changes (non-fatal)
767 if control.Find("Section") and entry["section"] != "" \
768 and entry["section"] != control.Find("Section"):
769 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
770 (f, control.Find("Section", ""), entry["section"]))
771 if control.Find("Priority") and entry["priority"] != "" \
772 and entry["priority"] != control.Find("Priority"):
773 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
774 (f, control.Find("Priority", ""), entry["priority"]))
776 entry["package"] = package
777 entry["architecture"] = architecture
778 entry["version"] = version
779 entry["maintainer"] = control.Find("Maintainer", "")
781 if f.endswith(".udeb"):
782 self.pkg.files[f]["dbtype"] = "udeb"
783 elif f.endswith(".deb"):
784 self.pkg.files[f]["dbtype"] = "deb"
786 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
788 entry["source"] = control.Find("Source", entry["package"])
790 # Get the source version
791 source = entry["source"]
794 if source.find("(") != -1:
795 m = re_extract_src_version.match(source)
797 source_version = m.group(2)
799 if not source_version:
800 source_version = self.pkg.files[f]["version"]
802 entry["source package"] = source
803 entry["source version"] = source_version
805 # Ensure the filename matches the contents of the .deb
806 m = re_isadeb.match(f)
809 file_package = m.group(1)
810 if entry["package"] != file_package:
811 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
812 (f, file_package, entry["dbtype"], entry["package"]))
813 epochless_version = re_no_epoch.sub('', control.Find("Version"))
816 file_version = m.group(2)
817 if epochless_version != file_version:
818 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
819 (f, file_version, entry["dbtype"], epochless_version))
822 file_architecture = m.group(3)
823 if entry["architecture"] != file_architecture:
824 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
825 (f, file_architecture, entry["dbtype"], entry["architecture"]))
827 # Check for existent source
828 source_version = entry["source version"]
829 source_package = entry["source package"]
830 if self.pkg.changes["architecture"].has_key("source"):
831 if source_version != self.pkg.changes["version"]:
832 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
833 (source_version, f, self.pkg.changes["version"]))
835 # Check in the SQL database
836 if not source_exists(source_package, source_version, suites = \
837 self.pkg.changes["distribution"].keys(), session = session):
838 # Check in one of the other directories
839 source_epochless_version = re_no_epoch.sub('', source_version)
840 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
841 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
843 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
846 dsc_file_exists = False
847 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
848 if cnf.has_key("Dir::Queue::%s" % (myq)):
849 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
850 dsc_file_exists = True
853 if not dsc_file_exists:
854 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
856 # Check the version and for file overwrites
857 self.check_binary_against_db(f, session)
859 # Temporarily disable contents generation until we change the table storage layout
862 #if len(b.rejects) > 0:
863 # for j in b.rejects:
864 # self.rejects.append(j)
866 def source_file_checks(self, f, session):
867 entry = self.pkg.files[f]
869 m = re_issource.match(f)
873 entry["package"] = m.group(1)
874 entry["version"] = m.group(2)
875 entry["type"] = m.group(3)
877 # Ensure the source package name matches the Source filed in the .changes
878 if self.pkg.changes["source"] != entry["package"]:
879 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
881 # Ensure the source version matches the version in the .changes file
882 if re_is_orig_source.match(f):
883 changes_version = self.pkg.changes["chopversion2"]
885 changes_version = self.pkg.changes["chopversion"]
887 if changes_version != entry["version"]:
888 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
890 # Ensure the .changes lists source in the Architecture field
891 if not self.pkg.changes["architecture"].has_key("source"):
892 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
894 # Check the signature of a .dsc file
895 if entry["type"] == "dsc":
896 # check_signature returns either:
897 # (None, [list, of, rejects]) or (signature, [])
898 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
900 self.rejects.append(j)
902 entry["architecture"] = "source"
904 def per_suite_file_checks(self, f, suite, session):
906 entry = self.pkg.files[f]
909 if entry.has_key("byhand"):
912 # Check we have fields we need to do these checks
914 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
915 if not entry.has_key(m):
916 self.rejects.append("file '%s' does not have field %s set" % (f, m))
922 # Handle component mappings
923 for m in cnf.ValueList("ComponentMappings"):
924 (source, dest) = m.split()
925 if entry["component"] == source:
926 entry["original component"] = source
927 entry["component"] = dest
929 # Ensure the component is valid for the target suite
930 if cnf.has_key("Suite:%s::Components" % (suite)) and \
931 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
932 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
935 # Validate the component
936 if not get_component(entry["component"], session):
937 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
940 # See if the package is NEW
941 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
944 # Validate the priority
945 if entry["priority"].find('/') != -1:
946 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
948 # Determine the location
949 location = cnf["Dir::Pool"]
950 l = get_location(location, entry["component"], session=session)
952 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
953 entry["location id"] = -1
955 entry["location id"] = l.location_id
957 # Check the md5sum & size against existing files (if any)
958 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
960 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
961 entry["size"], entry["md5sum"], entry["location id"])
964 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
965 elif found is False and poolfile is not None:
966 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
969 entry["files id"] = None
971 entry["files id"] = poolfile.file_id
973 # Check for packages that have moved from one component to another
974 entry['suite'] = suite
975 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
977 entry["othercomponents"] = res.fetchone()[0]
979 def check_files(self, action=True):
980 file_keys = self.pkg.files.keys()
986 os.chdir(self.pkg.directory)
988 ret = holding.copy_to_holding(f)
990 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
994 # check we already know the changes file
995 # [NB: this check must be done post-suite mapping]
996 base_filename = os.path.basename(self.pkg.changes_file)
998 session = DBConn().session()
1001 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1002 # if in the pool or in a queue other than unchecked, reject
1003 if (dbc.in_queue is None) \
1004 or (dbc.in_queue is not None
1005 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1006 self.rejects.append("%s file already known to dak" % base_filename)
1007 except NoResultFound, e:
1011 has_binaries = False
1014 for f, entry in self.pkg.files.items():
1015 # Ensure the file does not already exist in one of the accepted directories
1016 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1017 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1018 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1019 self.rejects.append("%s file already exists in the %s directory." % (f, d))
1021 if not re_taint_free.match(f):
1022 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1024 # Check the file is readable
1025 if os.access(f, os.R_OK) == 0:
1026 # When running in -n, copy_to_holding() won't have
1027 # generated the reject_message, so we need to.
1029 if os.path.exists(f):
1030 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1032 # Don't directly reject, mark to check later to deal with orig's
1033 # we can find in the pool
1034 self.later_check_files.append(f)
1035 entry["type"] = "unreadable"
1038 # If it's byhand skip remaining checks
1039 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1041 entry["type"] = "byhand"
1043 # Checks for a binary package...
1044 elif re_isadeb.match(f):
1046 entry["type"] = "deb"
1048 # This routine appends to self.rejects/warnings as appropriate
1049 self.binary_file_checks(f, session)
1051 # Checks for a source package...
1052 elif re_issource.match(f):
1055 # This routine appends to self.rejects/warnings as appropriate
1056 self.source_file_checks(f, session)
1058 # Not a binary or source package? Assume byhand...
1061 entry["type"] = "byhand"
1063 # Per-suite file checks
1064 entry["oldfiles"] = {}
1065 for suite in self.pkg.changes["distribution"].keys():
1066 self.per_suite_file_checks(f, suite, session)
1070 # If the .changes file says it has source, it must have source.
1071 if self.pkg.changes["architecture"].has_key("source"):
1073 self.rejects.append("no source found and Architecture line in changes mention source.")
1075 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1076 self.rejects.append("source only uploads are not supported.")
1078 ###########################################################################
1079 def check_dsc(self, action=True, session=None):
1080 """Returns bool indicating whether or not the source changes are valid"""
1081 # Ensure there is source to check
1082 if not self.pkg.changes["architecture"].has_key("source"):
1087 for f, entry in self.pkg.files.items():
1088 if entry["type"] == "dsc":
1090 self.rejects.append("can not process a .changes file with multiple .dsc's.")
1095 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1096 if not dsc_filename:
1097 self.rejects.append("source uploads must contain a dsc file")
1100 # Parse the .dsc file
1102 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1103 except CantOpenError:
1104 # if not -n copy_to_holding() will have done this for us...
1106 self.rejects.append("%s: can't read file." % (dsc_filename))
1107 except ParseChangesError, line:
1108 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1109 except InvalidDscError, line:
1110 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1111 except ChangesUnicodeError:
1112 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1114 # Build up the file list of files mentioned by the .dsc
1116 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1117 except NoFilesFieldError:
1118 self.rejects.append("%s: no Files: field." % (dsc_filename))
1120 except UnknownFormatError, format:
1121 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1123 except ParseChangesError, line:
1124 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1127 # Enforce mandatory fields
1128 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1129 if not self.pkg.dsc.has_key(i):
1130 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1133 # Validate the source and version fields
1134 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1135 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1136 if not re_valid_version.match(self.pkg.dsc["version"]):
1137 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1139 # Only a limited list of source formats are allowed in each suite
1140 for dist in self.pkg.changes["distribution"].keys():
1141 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1142 if self.pkg.dsc["format"] not in allowed:
1143 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1145 # Validate the Maintainer field
1147 # We ignore the return value
1148 fix_maintainer(self.pkg.dsc["maintainer"])
1149 except ParseMaintError, msg:
1150 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1151 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1153 # Validate the build-depends field(s)
1154 for field_name in [ "build-depends", "build-depends-indep" ]:
1155 field = self.pkg.dsc.get(field_name)
1157 # Have apt try to parse them...
1159 apt_pkg.ParseSrcDepends(field)
1161 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1163 # Ensure the version number in the .dsc matches the version number in the .changes
1164 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1165 changes_version = self.pkg.files[dsc_filename]["version"]
1167 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1168 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1170 # Ensure the Files field contain only what's expected
1171 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1173 # Ensure source is newer than existing source in target suites
1174 session = DBConn().session()
1175 self.check_source_against_db(dsc_filename, session)
1176 self.check_dsc_against_db(dsc_filename, session)
1178 dbchg = get_dbchange(self.pkg.changes_file, session)
1180 # Finally, check if we're missing any files
1181 for f in self.later_check_files:
1183 # Check if we've already processed this file if we have a dbchg object
1186 for pf in dbchg.files:
1187 if pf.filename == f and pf.processed:
1188 self.notes.append('%s was already processed so we can go ahead' % f)
1190 del self.pkg.files[f]
1192 self.rejects.append("Could not find file %s references in changes" % f)
1198 ###########################################################################
1200 def get_changelog_versions(self, source_dir):
1201 """Extracts a the source package and (optionally) grabs the
1202 version history out of debian/changelog for the BTS."""
1206 # Find the .dsc (again)
1208 for f in self.pkg.files.keys():
1209 if self.pkg.files[f]["type"] == "dsc":
1212 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1213 if not dsc_filename:
1216 # Create a symlink mirror of the source files in our temporary directory
1217 for f in self.pkg.files.keys():
1218 m = re_issource.match(f)
1220 src = os.path.join(source_dir, f)
1221 # If a file is missing for whatever reason, give up.
1222 if not os.path.exists(src):
1225 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1226 self.pkg.orig_files[f].has_key("path"):
1228 dest = os.path.join(os.getcwd(), f)
1229 os.symlink(src, dest)
1231 # If the orig files are not a part of the upload, create symlinks to the
1233 for orig_file in self.pkg.orig_files.keys():
1234 if not self.pkg.orig_files[orig_file].has_key("path"):
1236 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1237 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1239 # Extract the source
1240 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1241 (result, output) = commands.getstatusoutput(cmd)
1243 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1244 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1247 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1250 # Get the upstream version
1251 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1252 if re_strip_revision.search(upstr_version):
1253 upstr_version = re_strip_revision.sub('', upstr_version)
1255 # Ensure the changelog file exists
1256 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1257 if not os.path.exists(changelog_filename):
1258 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1261 # Parse the changelog
1262 self.pkg.dsc["bts changelog"] = ""
1263 changelog_file = utils.open_file(changelog_filename)
1264 for line in changelog_file.readlines():
1265 m = re_changelog_versions.match(line)
1267 self.pkg.dsc["bts changelog"] += line
1268 changelog_file.close()
1270 # Check we found at least one revision in the changelog
1271 if not self.pkg.dsc["bts changelog"]:
1272 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1274 def check_source(self):
1276 # a) there's no source
1277 if not self.pkg.changes["architecture"].has_key("source"):
1280 tmpdir = utils.temp_dirname()
1282 # Move into the temporary directory
1286 # Get the changelog version history
1287 self.get_changelog_versions(cwd)
1289 # Move back and cleanup the temporary tree
1293 shutil.rmtree(tmpdir)
1295 if e.errno != errno.EACCES:
1297 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1299 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1300 # We probably have u-r or u-w directories so chmod everything
1302 cmd = "chmod -R u+rwx %s" % (tmpdir)
1303 result = os.system(cmd)
1305 utils.fubar("'%s' failed with result %s." % (cmd, result))
1306 shutil.rmtree(tmpdir)
1307 except Exception, e:
1308 print "foobar2 (%s)" % e
1309 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1311 ###########################################################################
1312 def ensure_hashes(self):
1313 # Make sure we recognise the format of the Files: field in the .changes
1314 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1315 if len(format) == 2:
1316 format = int(format[0]), int(format[1])
1318 format = int(float(format[0])), 0
1320 # We need to deal with the original changes blob, as the fields we need
1321 # might not be in the changes dict serialised into the .dak anymore.
1322 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1324 # Copy the checksums over to the current changes dict. This will keep
1325 # the existing modifications to it intact.
1326 for field in orig_changes:
1327 if field.startswith('checksums-'):
1328 self.pkg.changes[field] = orig_changes[field]
1330 # Check for unsupported hashes
1331 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1332 self.rejects.append(j)
1334 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1335 self.rejects.append(j)
1337 # We have to calculate the hash if we have an earlier changes version than
1338 # the hash appears in rather than require it exist in the changes file
1339 for hashname, hashfunc, version in utils.known_hashes:
1340 # TODO: Move _ensure_changes_hash into this class
1341 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1342 self.rejects.append(j)
1343 if "source" in self.pkg.changes["architecture"]:
1344 # TODO: Move _ensure_dsc_hash into this class
1345 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1346 self.rejects.append(j)
1348 def check_hashes(self):
1349 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1350 self.rejects.append(m)
1352 for m in utils.check_size(".changes", self.pkg.files):
1353 self.rejects.append(m)
1355 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1356 self.rejects.append(m)
1358 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1359 self.rejects.append(m)
1361 self.ensure_hashes()
1363 ###########################################################################
1365 def ensure_orig(self, target_dir='.', session=None):
1367 Ensures that all orig files mentioned in the changes file are present
1368 in target_dir. If they do not exist, they are symlinked into place.
1370 An list containing the symlinks that were created are returned (so they
1377 for filename, entry in self.pkg.dsc_files.iteritems():
1378 if not re_is_orig_source.match(filename):
1379 # File is not an orig; ignore
1382 if os.path.exists(filename):
1383 # File exists, no need to continue
1386 def symlink_if_valid(path):
1387 f = utils.open_file(path)
1388 md5sum = apt_pkg.md5sum(f)
1391 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1392 expected = (int(entry['size']), entry['md5sum'])
1394 if fingerprint != expected:
1397 dest = os.path.join(target_dir, filename)
1399 os.symlink(path, dest)
1400 symlinked.append(dest)
1406 session_ = DBConn().session()
1411 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1412 poolfile_path = os.path.join(
1413 poolfile.location.path, poolfile.filename
1416 if symlink_if_valid(poolfile_path):
1426 # Look in some other queues for the file
1427 queues = ('New', 'Byhand', 'ProposedUpdates',
1428 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1430 for queue in queues:
1431 if not cnf.get('Dir::Queue::%s' % queue):
1434 queuefile_path = os.path.join(
1435 cnf['Dir::Queue::%s' % queue], filename
1438 if not os.path.exists(queuefile_path):
1439 # Does not exist in this queue
1442 if symlink_if_valid(queuefile_path):
1447 ###########################################################################
1449 def check_lintian(self):
1451 Extends self.rejects by checking the output of lintian against tags
1452 specified in Dinstall::LintianTags.
1457 # Don't reject binary uploads
1458 if not self.pkg.changes['architecture'].has_key('source'):
1461 # Only check some distributions
1462 for dist in ('unstable', 'experimental'):
1463 if dist in self.pkg.changes['distribution']:
1468 # If we do not have a tagfile, don't do anything
1469 tagfile = cnf.get("Dinstall::LintianTags")
1473 # Parse the yaml file
1474 sourcefile = file(tagfile, 'r')
1475 sourcecontent = sourcefile.read()
1479 lintiantags = yaml.load(sourcecontent)['lintian']
1480 except yaml.YAMLError, msg:
1481 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1484 # Try and find all orig mentioned in the .dsc
1485 symlinked = self.ensure_orig()
1487 # Setup the input file for lintian
1488 fd, temp_filename = utils.temp_filename()
1489 temptagfile = os.fdopen(fd, 'w')
1490 for tags in lintiantags.values():
1491 temptagfile.writelines(['%s\n' % x for x in tags])
1495 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1496 (temp_filename, self.pkg.changes_file)
1498 result, output = commands.getstatusoutput(cmd)
1500 # Remove our tempfile and any symlinks we created
1501 os.unlink(temp_filename)
1503 for symlink in symlinked:
1507 utils.warn("lintian failed for %s [return code: %s]." % \
1508 (self.pkg.changes_file, result))
1509 utils.warn(utils.prefix_multi_line_string(output, \
1510 " [possible output:] "))
1515 [self.pkg.changes_file, "check_lintian"] + list(txt)
1519 parsed_tags = parse_lintian_output(output)
1520 self.rejects.extend(
1521 generate_reject_messages(parsed_tags, lintiantags, log=log)
1524 ###########################################################################
1525 def check_urgency(self):
1527 if self.pkg.changes["architecture"].has_key("source"):
1528 if not self.pkg.changes.has_key("urgency"):
1529 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1530 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1531 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1532 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1533 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1534 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1536 ###########################################################################
1538 # Sanity check the time stamps of files inside debs.
1539 # [Files in the near future cause ugly warnings and extreme time
1540 # travel can cause errors on extraction]
1542 def check_timestamps(self):
1545 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1546 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1547 tar = TarTime(future_cutoff, past_cutoff)
1549 for filename, entry in self.pkg.files.items():
1550 if entry["type"] == "deb":
1553 deb_file = utils.open_file(filename)
1554 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1557 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1558 except SystemError, e:
1559 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1560 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1563 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1567 future_files = tar.future_files.keys()
1569 num_future_files = len(future_files)
1570 future_file = future_files[0]
1571 future_date = tar.future_files[future_file]
1572 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1573 % (filename, num_future_files, future_file, time.ctime(future_date)))
1575 ancient_files = tar.ancient_files.keys()
1577 num_ancient_files = len(ancient_files)
1578 ancient_file = ancient_files[0]
1579 ancient_date = tar.ancient_files[ancient_file]
1580 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1581 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1583 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1585 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1586 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1588 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1594 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1595 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1596 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1597 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1598 self.pkg.changes["sponsoremail"] = uid_email
1603 ###########################################################################
1604 # check_signed_by_key checks
1605 ###########################################################################
1607 def check_signed_by_key(self):
1608 """Ensure the .changes is signed by an authorized uploader."""
1609 session = DBConn().session()
1611 # First of all we check that the person has proper upload permissions
1612 # and that this upload isn't blocked
1613 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1616 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1619 # TODO: Check that import-keyring adds UIDs properly
1621 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1624 # Check that the fingerprint which uploaded has permission to do so
1625 self.check_upload_permissions(fpr, session)
1627 # Check that this package is not in a transition
1628 self.check_transition(session)
1633 def check_upload_permissions(self, fpr, session):
1634 # Check any one-off upload blocks
1635 self.check_upload_blocks(fpr, session)
1637 # Start with DM as a special case
1638 # DM is a special case unfortunately, so we check it first
1639 # (keys with no source access get more access than DMs in one
1640 # way; DMs can only upload for their packages whether source
1641 # or binary, whereas keys with no access might be able to
1642 # upload some binaries)
1643 if fpr.source_acl.access_level == 'dm':
1644 self.check_dm_upload(fpr, session)
1646 # Check source-based permissions for other types
1647 if self.pkg.changes["architecture"].has_key("source") and \
1648 fpr.source_acl.access_level is None:
1649 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1650 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1651 self.rejects.append(rej)
1653 # If not a DM, we allow full upload rights
1654 uid_email = "%s@debian.org" % (fpr.uid.uid)
1655 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1658 # Check binary upload permissions
1659 # By this point we know that DMs can't have got here unless they
1660 # are allowed to deal with the package concerned so just apply
1662 if fpr.binary_acl.access_level == 'full':
1665 # Otherwise we're in the map case
1666 tmparches = self.pkg.changes["architecture"].copy()
1667 tmparches.pop('source', None)
1669 for bam in fpr.binary_acl_map:
1670 tmparches.pop(bam.architecture.arch_string, None)
1672 if len(tmparches.keys()) > 0:
1673 if fpr.binary_reject:
1674 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1675 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1676 self.rejects.append(rej)
1678 # TODO: This is where we'll implement reject vs throw away binaries later
1679 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1680 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1681 rej += "\nFingerprint: %s", (fpr.fingerprint)
1682 self.rejects.append(rej)
1685 def check_upload_blocks(self, fpr, session):
1686 """Check whether any upload blocks apply to this source, source
1687 version, uid / fpr combination"""
1689 def block_rej_template(fb):
1690 rej = 'Manual upload block in place for package %s' % fb.source
1691 if fb.version is not None:
1692 rej += ', version %s' % fb.version
1695 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1696 # version is None if the block applies to all versions
1697 if fb.version is None or fb.version == self.pkg.changes['version']:
1698 # Check both fpr and uid - either is enough to cause a reject
1699 if fb.fpr is not None:
1700 if fb.fpr.fingerprint == fpr.fingerprint:
1701 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1702 if fb.uid is not None:
1703 if fb.uid == fpr.uid:
1704 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1707 def check_dm_upload(self, fpr, session):
1708 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1709 ## none of the uploaded packages are NEW
1711 for f in self.pkg.files.keys():
1712 if self.pkg.files[f].has_key("byhand"):
1713 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1715 if self.pkg.files[f].has_key("new"):
1716 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1722 r = get_newest_source(self.pkg.changes["source"], session)
1725 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1726 self.rejects.append(rej)
1729 if not r.dm_upload_allowed:
1730 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1731 self.rejects.append(rej)
1734 ## the Maintainer: field of the uploaded .changes file corresponds with
1735 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1737 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1738 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1740 ## the most recent version of the package uploaded to unstable or
1741 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1742 ## non-developer maintainers cannot NMU or hijack packages)
1744 # srcuploaders includes the maintainer
1746 for sup in r.srcuploaders:
1747 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1748 # Eww - I hope we never have two people with the same name in Debian
1749 if email == fpr.uid.uid or name == fpr.uid.name:
1754 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1757 ## none of the packages are being taken over from other source packages
1758 for b in self.pkg.changes["binary"].keys():
1759 for suite in self.pkg.changes["distribution"].keys():
1760 for s in get_source_by_package_and_suite(b, suite, session):
1761 if s.source != self.pkg.changes["source"]:
1762 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1766 def check_transition(self, session):
1769 sourcepkg = self.pkg.changes["source"]
1771 # No sourceful upload -> no need to do anything else, direct return
1772 # We also work with unstable uploads, not experimental or those going to some
1773 # proposed-updates queue
1774 if "source" not in self.pkg.changes["architecture"] or \
1775 "unstable" not in self.pkg.changes["distribution"]:
1778 # Also only check if there is a file defined (and existant) with
1780 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1781 if transpath == "" or not os.path.exists(transpath):
1784 # Parse the yaml file
1785 sourcefile = file(transpath, 'r')
1786 sourcecontent = sourcefile.read()
1788 transitions = yaml.load(sourcecontent)
1789 except yaml.YAMLError, msg:
1790 # This shouldn't happen, there is a wrapper to edit the file which
1791 # checks it, but we prefer to be safe than ending up rejecting
1793 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1796 # Now look through all defined transitions
1797 for trans in transitions:
1798 t = transitions[trans]
1799 source = t["source"]
1802 # Will be None if nothing is in testing.
1803 current = get_source_in_suite(source, "testing", session)
1804 if current is not None:
1805 compare = apt_pkg.VersionCompare(current.version, expected)
1807 if current is None or compare < 0:
1808 # This is still valid, the current version in testing is older than
1809 # the new version we wait for, or there is none in testing yet
1811 # Check if the source we look at is affected by this.
1812 if sourcepkg in t['packages']:
1813 # The source is affected, lets reject it.
1815 rejectmsg = "%s: part of the %s transition.\n\n" % (
1818 if current is not None:
1819 currentlymsg = "at version %s" % (current.version)
1821 currentlymsg = "not present in testing"
1823 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1825 rejectmsg += "\n".join(textwrap.wrap("""Your package
1826 is part of a testing transition designed to get %s migrated (it is
1827 currently %s, we need version %s). This transition is managed by the
1828 Release Team, and %s is the Release-Team member responsible for it.
1829 Please mail debian-release@lists.debian.org or contact %s directly if you
1830 need further assistance. You might want to upload to experimental until this
1831 transition is done."""
1832 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1834 self.rejects.append(rejectmsg)
1837 ###########################################################################
1838 # End check_signed_by_key checks
1839 ###########################################################################
1841 def build_summaries(self):
1842 """ Build a summary of changes the upload introduces. """
1844 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1846 short_summary = summary
1848 # This is for direport's benefit...
1849 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1852 summary += "Changes: " + f
1854 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1856 summary += self.announce(short_summary, 0)
1858 return (summary, short_summary)
1860 ###########################################################################
1862 def close_bugs(self, summary, action):
1864 Send mail to close bugs as instructed by the closes field in the changes file.
1865 Also add a line to summary if any work was done.
1867 @type summary: string
1868 @param summary: summary text, as given by L{build_summaries}
1871 @param action: Set to false no real action will be done.
1874 @return: summary. If action was taken, extended by the list of closed bugs.
1878 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1880 bugs = self.pkg.changes["closes"].keys()
1886 summary += "Closing bugs: "
1888 summary += "%s " % (bug)
1891 self.Subst["__BUG_NUMBER__"] = bug
1892 if self.pkg.changes["distribution"].has_key("stable"):
1893 self.Subst["__STABLE_WARNING__"] = """
1894 Note that this package is not part of the released stable Debian
1895 distribution. It may have dependencies on other unreleased software,
1896 or other instabilities. Please take care if you wish to install it.
1897 The update will eventually make its way into the next released Debian
1900 self.Subst["__STABLE_WARNING__"] = ""
1901 mail_message = utils.TemplateSubst(self.Subst, template)
1902 utils.send_mail(mail_message)
1904 # Clear up after ourselves
1905 del self.Subst["__BUG_NUMBER__"]
1906 del self.Subst["__STABLE_WARNING__"]
1908 if action and self.logger:
1909 self.logger.log(["closing bugs"] + bugs)
1915 ###########################################################################
1917 def announce(self, short_summary, action):
1919 Send an announce mail about a new upload.
1921 @type short_summary: string
1922 @param short_summary: Short summary text to include in the mail
1925 @param action: Set to false no real action will be done.
1928 @return: Textstring about action taken.
1933 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1935 # Only do announcements for source uploads with a recent dpkg-dev installed
1936 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1937 self.pkg.changes["architecture"].has_key("source"):
1943 self.Subst["__SHORT_SUMMARY__"] = short_summary
1945 for dist in self.pkg.changes["distribution"].keys():
1946 suite = get_suite(dist)
1947 if suite is None: continue
1948 announce_list = suite.announce
1949 if announce_list == "" or lists_done.has_key(announce_list):
1952 lists_done[announce_list] = 1
1953 summary += "Announcing to %s\n" % (announce_list)
1957 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1958 if cnf.get("Dinstall::TrackingServer") and \
1959 self.pkg.changes["architecture"].has_key("source"):
1960 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1961 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1963 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1964 utils.send_mail(mail_message)
1966 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1968 if cnf.FindB("Dinstall::CloseBugs"):
1969 summary = self.close_bugs(summary, action)
1971 del self.Subst["__SHORT_SUMMARY__"]
1975 ###########################################################################
1977 def accept (self, summary, short_summary, session=None):
1981 This moves all files referenced from the .changes into the pool,
1982 sends the accepted mail, announces to lists, closes bugs and
1983 also checks for override disparities. If enabled it will write out
1984 the version history for the BTS Version Tracking and will finally call
1987 @type summary: string
1988 @param summary: Summary text
1990 @type short_summary: string
1991 @param short_summary: Short summary
1995 stats = SummaryStats()
1998 self.logger.log(["installing changes", self.pkg.changes_file])
2002 # Add the .dsc file to the DB first
2003 for newfile, entry in self.pkg.files.items():
2004 if entry["type"] == "dsc":
2005 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2009 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2010 for newfile, entry in self.pkg.files.items():
2011 if entry["type"] == "deb":
2012 poolfiles.append(add_deb_to_db(self, newfile, session))
2014 # If this is a sourceful diff only upload that is moving
2015 # cross-component we need to copy the .orig files into the new
2016 # component too for the same reasons as above.
2017 # XXX: mhy: I think this should be in add_dsc_to_db
2018 if self.pkg.changes["architecture"].has_key("source"):
2019 for orig_file in self.pkg.orig_files.keys():
2020 if not self.pkg.orig_files[orig_file].has_key("id"):
2021 continue # Skip if it's not in the pool
2022 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2023 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2024 continue # Skip if the location didn't change
2027 oldf = get_poolfile_by_id(orig_file_id, session)
2028 old_filename = os.path.join(oldf.location.path, oldf.filename)
2029 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
2030 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2032 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2034 # TODO: Care about size/md5sum collisions etc
2035 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2037 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2039 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2040 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2044 # Don't reference the old file from this changes
2046 if p.file_id == oldf.file_id:
2049 poolfiles.append(newf)
2051 # Fix up the DSC references
2054 for df in source.srcfiles:
2055 if df.poolfile.file_id == oldf.file_id:
2056 # Add a new DSC entry and mark the old one for deletion
2057 # Don't do it in the loop so we don't change the thing we're iterating over
2059 newdscf.source_id = source.source_id
2060 newdscf.poolfile_id = newf.file_id
2061 session.add(newdscf)
2071 # Make sure that our source object is up-to-date
2072 session.expire(source)
2074 # Add changelog information to the database
2075 self.store_changelog()
2077 # Install the files into the pool
2078 for newfile, entry in self.pkg.files.items():
2079 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2080 utils.move(newfile, destination)
2081 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2082 stats.accept_bytes += float(entry["size"])
2084 # Copy the .changes file across for suite which need it.
2085 copy_changes = dict([(x.copychanges, '')
2086 for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2087 if x.copychanges is not None])
2089 for dest in copy_changes.keys():
2090 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2092 # We're done - commit the database changes
2094 # Our SQL session will automatically start a new transaction after
2097 # Move the .changes into the 'done' directory
2098 utils.move(self.pkg.changes_file,
2099 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2101 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2102 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2105 self.Subst["__SUMMARY__"] = summary
2106 mail_message = utils.TemplateSubst(self.Subst,
2107 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2108 utils.send_mail(mail_message)
2109 self.announce(short_summary, 1)
2111 ## Helper stuff for DebBugs Version Tracking
2112 if cnf.Find("Dir::Queue::BTSVersionTrack"):
2113 if self.pkg.changes["architecture"].has_key("source"):
2114 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2115 version_history = os.fdopen(fd, 'w')
2116 version_history.write(self.pkg.dsc["bts changelog"])
2117 version_history.close()
2118 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2119 self.pkg.changes_file[:-8]+".versions")
2120 os.rename(temp_filename, filename)
2121 os.chmod(filename, 0644)
2123 # Write out the binary -> source mapping.
2124 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2125 debinfo = os.fdopen(fd, 'w')
2126 for name, entry in sorted(self.pkg.files.items()):
2127 if entry["type"] == "deb":
2128 line = " ".join([entry["package"], entry["version"],
2129 entry["architecture"], entry["source package"],
2130 entry["source version"]])
2131 debinfo.write(line+"\n")
2133 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2134 self.pkg.changes_file[:-8]+".debinfo")
2135 os.rename(temp_filename, filename)
2136 os.chmod(filename, 0644)
2140 # Set up our copy queues (e.g. buildd queues)
2141 for suite_name in self.pkg.changes["distribution"].keys():
2142 suite = get_suite(suite_name, session)
2143 for q in suite.copy_queues:
2145 q.add_file_from_pool(f)
2150 stats.accept_count += 1
2152 def check_override(self):
2154 Checks override entries for validity. Mails "Override disparity" warnings,
2155 if that feature is enabled.
2157 Abandons the check if
2158 - override disparity checks are disabled
2159 - mail sending is disabled
2164 # Abandon the check if override disparity checks have been disabled
2165 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2168 summary = self.pkg.check_override()
2173 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2176 self.Subst["__SUMMARY__"] = summary
2177 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2178 utils.send_mail(mail_message)
2179 del self.Subst["__SUMMARY__"]
2181 ###########################################################################
2183 def remove(self, from_dir=None):
2185 Used (for instance) in p-u to remove the package from unchecked
2187 Also removes the package from holding area.
2189 if from_dir is None:
2190 from_dir = self.pkg.directory
2193 for f in self.pkg.files.keys():
2194 os.unlink(os.path.join(from_dir, f))
2195 if os.path.exists(os.path.join(h.holding_dir, f)):
2196 os.unlink(os.path.join(h.holding_dir, f))
2198 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2199 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2200 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2202 ###########################################################################
2204 def move_to_queue (self, queue):
2206 Move files to a destination queue using the permissions in the table
2209 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2210 queue.path, perms=int(queue.change_perms, 8))
2211 for f in self.pkg.files.keys():
2212 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2214 ###########################################################################
2216 def force_reject(self, reject_files):
2218 Forcefully move files from the current directory to the
2219 reject directory. If any file already exists in the reject
2220 directory it will be moved to the morgue to make way for
2223 @type reject_files: dict
2224 @param reject_files: file dictionary
2230 for file_entry in reject_files:
2231 # Skip any files which don't exist or which we don't have permission to copy.
2232 if os.access(file_entry, os.R_OK) == 0:
2235 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2238 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2240 # File exists? Let's find a new name by adding a number
2241 if e.errno == errno.EEXIST:
2243 dest_file = utils.find_next_free(dest_file, 255)
2244 except NoFreeFilenameError:
2245 # Something's either gone badly Pete Tong, or
2246 # someone is trying to exploit us.
2247 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2250 # Make sure we really got it
2252 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2255 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2259 # If we got here, we own the destination file, so we can
2260 # safely overwrite it.
2261 utils.move(file_entry, dest_file, 1, perms=0660)
2264 ###########################################################################
2265 def do_reject (self, manual=0, reject_message="", notes=""):
2267 Reject an upload. If called without a reject message or C{manual} is
2268 true, spawn an editor so the user can write one.
2271 @param manual: manual or automated rejection
2273 @type reject_message: string
2274 @param reject_message: A reject message
2279 # If we weren't given a manual rejection message, spawn an
2280 # editor so the user can add one in...
2281 if manual and not reject_message:
2282 (fd, temp_filename) = utils.temp_filename()
2283 temp_file = os.fdopen(fd, 'w')
2286 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2287 % (note.author, note.version, note.notedate, note.comment))
2289 editor = os.environ.get("EDITOR","vi")
2291 while answer == 'E':
2292 os.system("%s %s" % (editor, temp_filename))
2293 temp_fh = utils.open_file(temp_filename)
2294 reject_message = "".join(temp_fh.readlines())
2296 print "Reject message:"
2297 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2298 prompt = "[R]eject, Edit, Abandon, Quit ?"
2300 while prompt.find(answer) == -1:
2301 answer = utils.our_raw_input(prompt)
2302 m = re_default_answer.search(prompt)
2305 answer = answer[:1].upper()
2306 os.unlink(temp_filename)
2312 print "Rejecting.\n"
2316 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2317 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2319 # Move all the files into the reject directory
2320 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2321 self.force_reject(reject_files)
2323 # If we fail here someone is probably trying to exploit the race
2324 # so let's just raise an exception ...
2325 if os.path.exists(reason_filename):
2326 os.unlink(reason_filename)
2327 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2329 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2333 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2334 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2335 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2336 os.write(reason_fd, reject_message)
2337 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2339 # Build up the rejection email
2340 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2341 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2342 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2343 self.Subst["__REJECT_MESSAGE__"] = ""
2344 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2345 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2346 # Write the rejection email out as the <foo>.reason file
2347 os.write(reason_fd, reject_mail_message)
2349 del self.Subst["__REJECTOR_ADDRESS__"]
2350 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2351 del self.Subst["__CC__"]
2355 # Send the rejection mail
2356 utils.send_mail(reject_mail_message)
2359 self.logger.log(["rejected", self.pkg.changes_file])
2363 ################################################################################
2364 def in_override_p(self, package, component, suite, binary_type, filename, session):
2366 Check if a package already has override entries in the DB
2368 @type package: string
2369 @param package: package name
2371 @type component: string
2372 @param component: database id of the component
2375 @param suite: database id of the suite
2377 @type binary_type: string
2378 @param binary_type: type of the package
2380 @type filename: string
2381 @param filename: filename we check
2383 @return: the database result. But noone cares anyway.
2389 if binary_type == "": # must be source
2392 file_type = binary_type
2394 # Override suite name; used for example with proposed-updates
2395 oldsuite = get_suite(suite, session)
2396 if (not oldsuite is None) and oldsuite.overridesuite:
2397 suite = oldsuite.overridesuite
2399 result = get_override(package, suite, component, file_type, session)
2401 # If checking for a source package fall back on the binary override type
2402 if file_type == "dsc" and len(result) < 1:
2403 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2405 # Remember the section and priority so we can check them later if appropriate
2408 self.pkg.files[filename]["override section"] = result.section.section
2409 self.pkg.files[filename]["override priority"] = result.priority.priority
2414 ################################################################################
2415 def get_anyversion(self, sv_list, suite):
2418 @param sv_list: list of (suite, version) tuples to check
2421 @param suite: suite name
2427 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2428 for (s, v) in sv_list:
2429 if s in [ x.lower() for x in anysuite ]:
2430 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2435 ################################################################################
2437 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2440 @param sv_list: list of (suite, version) tuples to check
2442 @type filename: string
2443 @param filename: XXX
2445 @type new_version: string
2446 @param new_version: XXX
2448 Ensure versions are newer than existing packages in target
2449 suites and that cross-suite version checking rules as
2450 set out in the conf file are satisfied.
2455 # Check versions for each target suite
2456 for target_suite in self.pkg.changes["distribution"].keys():
2457 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2458 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2460 # Enforce "must be newer than target suite" even if conffile omits it
2461 if target_suite not in must_be_newer_than:
2462 must_be_newer_than.append(target_suite)
2464 for (suite, existent_version) in sv_list:
2465 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2467 if suite in must_be_newer_than and sourceful and vercmp < 1:
2468 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2470 if suite in must_be_older_than and vercmp > -1:
2473 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2474 # we really use the other suite, ignoring the conflicting one ...
2475 addsuite = self.pkg.changes["distribution-version"][suite]
2477 add_version = self.get_anyversion(sv_list, addsuite)
2478 target_version = self.get_anyversion(sv_list, target_suite)
2481 # not add_version can only happen if we map to a suite
2482 # that doesn't enhance the suite we're propup'ing from.
2483 # so "propup-ver x a b c; map a d" is a problem only if
2484 # d doesn't enhance a.
2486 # i think we could always propagate in this case, rather
2487 # than complaining. either way, this isn't a REJECT issue
2489 # And - we really should complain to the dorks who configured dak
2490 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2491 self.pkg.changes.setdefault("propdistribution", {})
2492 self.pkg.changes["propdistribution"][addsuite] = 1
2494 elif not target_version:
2495 # not targets_version is true when the package is NEW
2496 # we could just stick with the "...old version..." REJECT
2497 # for this, I think.
2498 self.rejects.append("Won't propogate NEW packages.")
2499 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2500 # propogation would be redundant. no need to reject though.
2501 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2503 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2504 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2506 self.warnings.append("Propogating upload to %s" % (addsuite))
2507 self.pkg.changes.setdefault("propdistribution", {})
2508 self.pkg.changes["propdistribution"][addsuite] = 1
2512 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2514 ################################################################################
2515 def check_binary_against_db(self, filename, session):
2516 # Ensure version is sane
2517 self.cross_suite_version_check( \
2518 get_suite_version_by_package(self.pkg.files[filename]["package"], \
2519 self.pkg.files[filename]["architecture"], session),
2520 filename, self.pkg.files[filename]["version"], sourceful=False)
2522 # Check for any existing copies of the file
2523 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2524 q = q.filter_by(version=self.pkg.files[filename]["version"])
2525 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2528 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2530 ################################################################################
2532 def check_source_against_db(self, filename, session):
2533 source = self.pkg.dsc.get("source")
2534 version = self.pkg.dsc.get("version")
2536 # Ensure version is sane
2537 self.cross_suite_version_check( \
2538 get_suite_version_by_source(source, session), filename, version,
2541 ################################################################################
2542 def check_dsc_against_db(self, filename, session):
2545 @warning: NB: this function can remove entries from the 'files' index [if
2546 the orig tarball is a duplicate of the one in the archive]; if
2547 you're iterating over 'files' and call this function as part of
2548 the loop, be sure to add a check to the top of the loop to
2549 ensure you haven't just tried to dereference the deleted entry.
2554 self.pkg.orig_files = {} # XXX: do we need to clear it?
2555 orig_files = self.pkg.orig_files
2557 # Try and find all files mentioned in the .dsc. This has
2558 # to work harder to cope with the multiple possible
2559 # locations of an .orig.tar.gz.
2560 # The ordering on the select is needed to pick the newest orig
2561 # when it exists in multiple places.
2562 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2564 if self.pkg.files.has_key(dsc_name):
2565 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2566 actual_size = int(self.pkg.files[dsc_name]["size"])
2567 found = "%s in incoming" % (dsc_name)
2569 # Check the file does not already exist in the archive
2570 ql = get_poolfile_like_name(dsc_name, session)
2572 # Strip out anything that isn't '%s' or '/%s$'
2574 if not i.filename.endswith(dsc_name):
2577 # "[dak] has not broken them. [dak] has fixed a
2578 # brokenness. Your crappy hack exploited a bug in
2581 # "(Come on! I thought it was always obvious that
2582 # one just doesn't release different files with
2583 # the same name and version.)"
2584 # -- ajk@ on d-devel@l.d.o
2587 # Ignore exact matches for .orig.tar.gz
2589 if re_is_orig_source.match(dsc_name):
2591 if self.pkg.files.has_key(dsc_name) and \
2592 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2593 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2594 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2595 # TODO: Don't delete the entry, just mark it as not needed
2596 # This would fix the stupidity of changing something we often iterate over
2597 # whilst we're doing it
2598 del self.pkg.files[dsc_name]
2599 dsc_entry["files id"] = i.file_id
2600 if not orig_files.has_key(dsc_name):
2601 orig_files[dsc_name] = {}
2602 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2605 # Don't bitch that we couldn't find this file later
2607 self.later_check_files.remove(dsc_name)
2613 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2615 elif re_is_orig_source.match(dsc_name):
2617 ql = get_poolfile_like_name(dsc_name, session)
2619 # Strip out anything that isn't '%s' or '/%s$'
2620 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2622 if not i.filename.endswith(dsc_name):
2626 # Unfortunately, we may get more than one match here if,
2627 # for example, the package was in potato but had an -sa
2628 # upload in woody. So we need to choose the right one.
2630 # default to something sane in case we don't match any or have only one
2635 old_file = os.path.join(i.location.path, i.filename)
2636 old_file_fh = utils.open_file(old_file)
2637 actual_md5 = apt_pkg.md5sum(old_file_fh)
2639 actual_size = os.stat(old_file)[stat.ST_SIZE]
2640 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2643 old_file = os.path.join(i.location.path, i.filename)
2644 old_file_fh = utils.open_file(old_file)
2645 actual_md5 = apt_pkg.md5sum(old_file_fh)
2647 actual_size = os.stat(old_file)[stat.ST_SIZE]
2649 suite_type = x.location.archive_type
2650 # need this for updating dsc_files in install()
2651 dsc_entry["files id"] = x.file_id
2652 # See install() in process-accepted...
2653 if not orig_files.has_key(dsc_name):
2654 orig_files[dsc_name] = {}
2655 orig_files[dsc_name]["id"] = x.file_id
2656 orig_files[dsc_name]["path"] = old_file
2657 orig_files[dsc_name]["location"] = x.location.location_id
2659 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2660 # Not there? Check the queue directories...
2661 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2662 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2664 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2665 if os.path.exists(in_otherdir):
2666 in_otherdir_fh = utils.open_file(in_otherdir)
2667 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2668 in_otherdir_fh.close()
2669 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2671 if not orig_files.has_key(dsc_name):
2672 orig_files[dsc_name] = {}
2673 orig_files[dsc_name]["path"] = in_otherdir
2676 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2679 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2681 if actual_md5 != dsc_entry["md5sum"]:
2682 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2683 if actual_size != int(dsc_entry["size"]):
2684 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2686 ################################################################################
2687 # This is used by process-new and process-holding to recheck a changes file
2688 # at the time we're running. It mainly wraps various other internal functions
2689 # and is similar to accepted_checks - these should probably be tidied up
2691 def recheck(self, session):
2693 for f in self.pkg.files.keys():
2694 # The .orig.tar.gz can disappear out from under us is it's a
2695 # duplicate of one in the archive.
2696 if not self.pkg.files.has_key(f):
2699 entry = self.pkg.files[f]
2701 # Check that the source still exists
2702 if entry["type"] == "deb":
2703 source_version = entry["source version"]
2704 source_package = entry["source package"]
2705 if not self.pkg.changes["architecture"].has_key("source") \
2706 and not source_exists(source_package, source_version, \
2707 suites = self.pkg.changes["distribution"].keys(), session = session):
2708 source_epochless_version = re_no_epoch.sub('', source_version)
2709 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2711 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2712 if cnf.has_key("Dir::Queue::%s" % (q)):
2713 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2716 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2718 # Version and file overwrite checks
2719 if entry["type"] == "deb":
2720 self.check_binary_against_db(f, session)
2721 elif entry["type"] == "dsc":
2722 self.check_source_against_db(f, session)
2723 self.check_dsc_against_db(f, session)
2725 ################################################################################
2726 def accepted_checks(self, overwrite_checks, session):
2727 # Recheck anything that relies on the database; since that's not
2728 # frozen between accept and our run time when called from p-a.
2730 # overwrite_checks is set to False when installing to stable/oldstable
2735 # Find the .dsc (again)
2737 for f in self.pkg.files.keys():
2738 if self.pkg.files[f]["type"] == "dsc":
2741 for checkfile in self.pkg.files.keys():
2742 # The .orig.tar.gz can disappear out from under us is it's a
2743 # duplicate of one in the archive.
2744 if not self.pkg.files.has_key(checkfile):
2747 entry = self.pkg.files[checkfile]
2749 # Check that the source still exists
2750 if entry["type"] == "deb":
2751 source_version = entry["source version"]
2752 source_package = entry["source package"]
2753 if not self.pkg.changes["architecture"].has_key("source") \
2754 and not source_exists(source_package, source_version, \
2755 suites = self.pkg.changes["distribution"].keys(), \
2757 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2759 # Version and file overwrite checks
2760 if overwrite_checks:
2761 if entry["type"] == "deb":
2762 self.check_binary_against_db(checkfile, session)
2763 elif entry["type"] == "dsc":
2764 self.check_source_against_db(checkfile, session)
2765 self.check_dsc_against_db(dsc_filename, session)
2767 # propogate in the case it is in the override tables:
2768 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2769 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2770 propogate[suite] = 1
2772 nopropogate[suite] = 1
2774 for suite in propogate.keys():
2775 if suite in nopropogate:
2777 self.pkg.changes["distribution"][suite] = 1
2779 for checkfile in self.pkg.files.keys():
2780 # Check the package is still in the override tables
2781 for suite in self.pkg.changes["distribution"].keys():
2782 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2783 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2785 ################################################################################
2786 # If any file of an upload has a recent mtime then chances are good
2787 # the file is still being uploaded.
2789 def upload_too_new(self):
2792 # Move back to the original directory to get accurate time stamps
2794 os.chdir(self.pkg.directory)
2795 file_list = self.pkg.files.keys()
2796 file_list.extend(self.pkg.dsc_files.keys())
2797 file_list.append(self.pkg.changes_file)
2800 last_modified = time.time()-os.path.getmtime(f)
2801 if last_modified < int(cnf["Dinstall::SkipTime"]):
2810 def store_changelog(self):
2812 # Skip binary-only upload if it is not a bin-NMU
2813 if not self.pkg.changes['architecture'].has_key('source'):
2814 from daklib.regexes import re_bin_only_nmu
2815 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2818 session = DBConn().session()
2820 # Check if upload already has a changelog entry
2821 query = """SELECT changelog_id FROM changes WHERE source = :source
2822 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2823 if session.execute(query, {'source': self.pkg.changes['source'], \
2824 'version': self.pkg.changes['version'], \
2825 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2829 # Add current changelog text into changelogs_text table, return created ID
2830 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2831 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2833 # Link ID to the upload available in changes table
2834 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2835 AND version = :version AND architecture = :architecture"""
2836 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2837 'version': self.pkg.changes['version'], \
2838 'architecture': " ".join(self.pkg.changes['architecture'].keys())})