5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
59 ###############################################################################
61 def get_type(f, session):
63 Get the file type of C{f}
66 @param f: file entry from Changes object
68 @type session: SQLA Session
69 @param session: SQL Alchemy session object
76 if f.has_key("dbtype"):
77 file_type = f["dbtype"]
78 elif re_source_ext.match(f["type"]):
82 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
84 # Validate the override type
85 type_id = get_override_type(file_type, session)
87 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
91 ################################################################################
93 # Determine what parts in a .changes are NEW
95 def determine_new(filename, changes, files, warn=1, session = None):
97 Determine what parts in a C{changes} file are NEW.
100 @param filename: changes filename
102 @type changes: Upload.Pkg.changes dict
103 @param changes: Changes dictionary
105 @type files: Upload.Pkg.files dict
106 @param files: Files dictionary
109 @param warn: Warn if overrides are added for (old)stable
112 @return: dictionary of NEW components.
115 # TODO: This should all use the database instead of parsing the changes
120 dbchg = get_dbchange(filename, session)
122 print "Warning: cannot find changes file in database; won't check byhand"
124 # Build up a list of potentially new things
125 for name, f in files.items():
126 # Keep a record of byhand elements
127 if f["section"] == "byhand":
132 priority = f["priority"]
133 section = f["section"]
134 file_type = get_type(f, session)
135 component = f["component"]
137 if file_type == "dsc":
140 if not new.has_key(pkg):
142 new[pkg]["priority"] = priority
143 new[pkg]["section"] = section
144 new[pkg]["type"] = file_type
145 new[pkg]["component"] = component
146 new[pkg]["files"] = []
148 old_type = new[pkg]["type"]
149 if old_type != file_type:
150 # source gets trumped by deb or udeb
151 if old_type == "dsc":
152 new[pkg]["priority"] = priority
153 new[pkg]["section"] = section
154 new[pkg]["type"] = file_type
155 new[pkg]["component"] = component
157 new[pkg]["files"].append(name)
159 if f.has_key("othercomponents"):
160 new[pkg]["othercomponents"] = f["othercomponents"]
162 # Fix up the list of target suites
164 for suite in changes["suite"].keys():
165 oldsuite = get_suite(suite, session)
167 print "WARNING: Invalid suite %s found" % suite
170 if oldsuite.overridesuite:
171 newsuite = get_suite(oldsuite.overridesuite, session)
174 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
175 oldsuite.overridesuite, suite)
176 del changes["suite"][suite]
177 changes["suite"][oldsuite.overridesuite] = 1
179 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist. Bugger" % (
180 oldsuite.overridesuite, suite)
182 # Check for unprocessed byhand files
183 if dbchg is not None:
184 for b in byhand.keys():
185 # Find the file entry in the database
187 for f in dbchg.files:
190 # If it's processed, we can ignore it
196 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
198 # Check for new stuff
199 for suite in changes["suite"].keys():
200 for pkg in new.keys():
201 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
203 for file_entry in new[pkg]["files"]:
204 if files[file_entry].has_key("new"):
205 del files[file_entry]["new"]
209 for s in ['stable', 'oldstable']:
210 if changes["suite"].has_key(s):
211 print "WARNING: overrides will be added for %s!" % s
212 for pkg in new.keys():
213 if new[pkg].has_key("othercomponents"):
214 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
218 ################################################################################
220 def check_valid(new, session = None):
222 Check if section and priority for NEW packages exist in database.
223 Additionally does sanity checks:
224 - debian-installer packages have to be udeb (or source)
225 - non debian-installer packages can not be udeb
226 - source priority can only be assigned to dsc file types
229 @param new: Dict of new packages with their section, priority and type.
232 for pkg in new.keys():
233 section_name = new[pkg]["section"]
234 priority_name = new[pkg]["priority"]
235 file_type = new[pkg]["type"]
237 section = get_section(section_name, session)
239 new[pkg]["section id"] = -1
241 new[pkg]["section id"] = section.section_id
243 priority = get_priority(priority_name, session)
245 new[pkg]["priority id"] = -1
247 new[pkg]["priority id"] = priority.priority_id
250 di = section_name.find("debian-installer") != -1
252 # If d-i, we must be udeb and vice-versa
253 if (di and file_type not in ("udeb", "dsc")) or \
254 (not di and file_type == "udeb"):
255 new[pkg]["section id"] = -1
257 # If dsc we need to be source and vice-versa
258 if (priority == "source" and file_type != "dsc") or \
259 (priority != "source" and file_type == "dsc"):
260 new[pkg]["priority id"] = -1
262 ###############################################################################
264 # Used by Upload.check_timestamps
265 class TarTime(object):
266 def __init__(self, future_cutoff, past_cutoff):
268 self.future_cutoff = future_cutoff
269 self.past_cutoff = past_cutoff
272 self.future_files = {}
273 self.ancient_files = {}
275 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
276 if MTime > self.future_cutoff:
277 self.future_files[Name] = MTime
278 if MTime < self.past_cutoff:
279 self.ancient_files[Name] = MTime
281 ###############################################################################
283 def prod_maintainer(notes, upload):
286 # Here we prepare an editor and get them ready to prod...
287 (fd, temp_filename) = utils.temp_filename()
288 temp_file = os.fdopen(fd, 'w')
290 temp_file.write(note.comment)
292 editor = os.environ.get("EDITOR","vi")
295 os.system("%s %s" % (editor, temp_filename))
296 temp_fh = utils.open_file(temp_filename)
297 prod_message = "".join(temp_fh.readlines())
299 print "Prod message:"
300 print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
301 prompt = "[P]rod, Edit, Abandon, Quit ?"
303 while prompt.find(answer) == -1:
304 answer = utils.our_raw_input(prompt)
305 m = re_default_answer.search(prompt)
308 answer = answer[:1].upper()
309 os.unlink(temp_filename)
315 # Otherwise, do the proding...
316 user_email_address = utils.whoami() + " <%s>" % (
317 cnf["Dinstall::MyAdminAddress"])
321 Subst["__FROM_ADDRESS__"] = user_email_address
322 Subst["__PROD_MESSAGE__"] = prod_message
323 Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
325 prod_mail_message = utils.TemplateSubst(
326 Subst,cnf["Dir::Templates"]+"/process-new.prod")
329 utils.send_mail(prod_mail_message)
331 print "Sent prodding message"
333 ################################################################################
335 def edit_note(note, upload, session, trainee=False):
336 # Write the current data to a temporary file
337 (fd, temp_filename) = utils.temp_filename()
338 editor = os.environ.get("EDITOR","vi")
341 os.system("%s %s" % (editor, temp_filename))
342 temp_file = utils.open_file(temp_filename)
343 newnote = temp_file.read().rstrip()
346 print utils.prefix_multi_line_string(newnote," ")
347 prompt = "[D]one, Edit, Abandon, Quit ?"
349 while prompt.find(answer) == -1:
350 answer = utils.our_raw_input(prompt)
351 m = re_default_answer.search(prompt)
354 answer = answer[:1].upper()
355 os.unlink(temp_filename)
362 comment = NewComment()
363 comment.package = upload.pkg.changes["source"]
364 comment.version = upload.pkg.changes["version"]
365 comment.comment = newnote
366 comment.author = utils.whoami()
367 comment.trainee = trainee
371 ###############################################################################
373 # suite names DMs can upload to
374 dm_suites = ['unstable', 'experimental']
376 def get_newest_source(source, session):
377 'returns the newest DBSource object in dm_suites'
378 ## the most recent version of the package uploaded to unstable or
379 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
380 ## section of its control file
381 q = session.query(DBSource).filter_by(source = source). \
382 filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
383 order_by(desc('source.version'))
386 def get_suite_version_by_source(source, session):
387 'returns a list of tuples (suite_name, version) for source package'
388 q = session.query(Suite.suite_name, DBSource.version). \
389 join(Suite.sources).filter_by(source = source)
392 def get_source_by_package_and_suite(package, suite_name, session):
394 returns a DBSource query filtered by DBBinary.package and this package's
397 return session.query(DBSource). \
398 join(DBSource.binaries).filter_by(package = package). \
399 join(DBBinary.suites).filter_by(suite_name = suite_name)
401 def get_suite_version_by_package(package, arch_string, session):
403 returns a list of tuples (suite_name, version) for binary package and
406 return session.query(Suite.suite_name, DBBinary.version). \
407 join(Suite.binaries).filter_by(package = package). \
408 join(DBBinary.architecture). \
409 filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
411 class Upload(object):
413 Everything that has to do with an upload processed.
421 ###########################################################################
424 """ Reset a number of internal variables."""
426 # Initialize the substitution template map
429 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
430 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
431 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
432 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
438 self.later_check_files = []
442 def package_info(self):
444 Format various messages from this Upload to send to the maintainer.
448 ('Reject Reasons', self.rejects),
449 ('Warnings', self.warnings),
450 ('Notes', self.notes),
454 for title, messages in msgs:
456 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
461 ###########################################################################
462 def update_subst(self):
463 """ Set up the per-package template substitution mappings """
467 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
468 if not self.pkg.changes.has_key("architecture") or not \
469 isinstance(self.pkg.changes["architecture"], dict):
470 self.pkg.changes["architecture"] = { "Unknown" : "" }
472 # and maintainer2047 may not exist.
473 if not self.pkg.changes.has_key("maintainer2047"):
474 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
476 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
477 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
478 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
480 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
481 if self.pkg.changes["architecture"].has_key("source") and \
482 self.pkg.changes["changedby822"] != "" and \
483 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
485 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
486 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
487 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
489 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
490 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
491 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
493 # Process policy doesn't set the fingerprint field and I don't want to make it
494 # do it for now as I don't want to have to deal with the case where we accepted
495 # the package into PU-NEW, but the fingerprint has gone away from the keyring in
496 # the meantime so the package will be remarked as rejectable. Urgh.
497 # TODO: Fix this properly
498 if self.pkg.changes.has_key('fingerprint'):
499 session = DBConn().session()
500 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
501 if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
502 if self.pkg.changes.has_key("sponsoremail"):
503 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
506 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
507 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
509 # Apply any global override of the Maintainer field
510 if cnf.get("Dinstall::OverrideMaintainer"):
511 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
512 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
514 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
515 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
516 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
517 self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
519 ###########################################################################
520 def load_changes(self, filename):
522 Load a changes file and setup a dictionary around it. Also checks for mandantory
525 @type filename: string
526 @param filename: Changes filename, full path.
529 @return: whether the changes file was valid or not. We may want to
530 reject even if this is True (see what gets put in self.rejects).
531 This is simply to prevent us even trying things later which will
532 fail because we couldn't properly parse the file.
535 self.pkg.changes_file = filename
537 # Parse the .changes field into a dictionary
539 self.pkg.changes.update(parse_changes(filename))
540 except CantOpenError:
541 self.rejects.append("%s: can't read file." % (filename))
543 except ParseChangesError, line:
544 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
546 except ChangesUnicodeError:
547 self.rejects.append("%s: changes file not proper utf-8" % (filename))
550 # Parse the Files field from the .changes into another dictionary
552 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
553 except ParseChangesError, line:
554 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
556 except UnknownFormatError, format:
557 self.rejects.append("%s: unknown format '%s'." % (filename, format))
560 # Check for mandatory fields
561 for i in ("distribution", "source", "binary", "architecture",
562 "version", "maintainer", "files", "changes", "description"):
563 if not self.pkg.changes.has_key(i):
564 # Avoid undefined errors later
565 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
568 # Strip a source version in brackets from the source field
569 if re_strip_srcver.search(self.pkg.changes["source"]):
570 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
572 # Ensure the source field is a valid package name.
573 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
574 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
576 # Split multi-value fields into a lower-level dictionary
577 for i in ("architecture", "distribution", "binary", "closes"):
578 o = self.pkg.changes.get(i, "")
580 del self.pkg.changes[i]
582 self.pkg.changes[i] = {}
585 self.pkg.changes[i][j] = 1
587 # Fix the Maintainer: field to be RFC822/2047 compatible
589 (self.pkg.changes["maintainer822"],
590 self.pkg.changes["maintainer2047"],
591 self.pkg.changes["maintainername"],
592 self.pkg.changes["maintaineremail"]) = \
593 fix_maintainer (self.pkg.changes["maintainer"])
594 except ParseMaintError, msg:
595 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
596 % (filename, self.pkg.changes["maintainer"], msg))
598 # ...likewise for the Changed-By: field if it exists.
600 (self.pkg.changes["changedby822"],
601 self.pkg.changes["changedby2047"],
602 self.pkg.changes["changedbyname"],
603 self.pkg.changes["changedbyemail"]) = \
604 fix_maintainer (self.pkg.changes.get("changed-by", ""))
605 except ParseMaintError, msg:
606 self.pkg.changes["changedby822"] = ""
607 self.pkg.changes["changedby2047"] = ""
608 self.pkg.changes["changedbyname"] = ""
609 self.pkg.changes["changedbyemail"] = ""
611 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
612 % (filename, self.pkg.changes["changed-by"], msg))
614 # Ensure all the values in Closes: are numbers
615 if self.pkg.changes.has_key("closes"):
616 for i in self.pkg.changes["closes"].keys():
617 if re_isanum.match (i) == None:
618 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
620 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
621 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
622 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
624 # Check the .changes is non-empty
625 if not self.pkg.files:
626 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
629 # Changes was syntactically valid even if we'll reject
632 ###########################################################################
634 def check_distributions(self):
635 "Check and map the Distribution field"
639 # Handle suite mappings
640 for m in Cnf.ValueList("SuiteMappings"):
643 if mtype == "map" or mtype == "silent-map":
644 (source, dest) = args[1:3]
645 if self.pkg.changes["distribution"].has_key(source):
646 del self.pkg.changes["distribution"][source]
647 self.pkg.changes["distribution"][dest] = 1
648 if mtype != "silent-map":
649 self.notes.append("Mapping %s to %s." % (source, dest))
650 if self.pkg.changes.has_key("distribution-version"):
651 if self.pkg.changes["distribution-version"].has_key(source):
652 self.pkg.changes["distribution-version"][source]=dest
653 elif mtype == "map-unreleased":
654 (source, dest) = args[1:3]
655 if self.pkg.changes["distribution"].has_key(source):
656 for arch in self.pkg.changes["architecture"].keys():
657 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
658 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
659 del self.pkg.changes["distribution"][source]
660 self.pkg.changes["distribution"][dest] = 1
662 elif mtype == "ignore":
664 if self.pkg.changes["distribution"].has_key(suite):
665 del self.pkg.changes["distribution"][suite]
666 self.warnings.append("Ignoring %s as a target suite." % (suite))
667 elif mtype == "reject":
669 if self.pkg.changes["distribution"].has_key(suite):
670 self.rejects.append("Uploads to %s are not accepted." % (suite))
671 elif mtype == "propup-version":
672 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
674 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
675 if self.pkg.changes["distribution"].has_key(args[1]):
676 self.pkg.changes.setdefault("distribution-version", {})
677 for suite in args[2:]:
678 self.pkg.changes["distribution-version"][suite] = suite
680 # Ensure there is (still) a target distribution
681 if len(self.pkg.changes["distribution"].keys()) < 1:
682 self.rejects.append("No valid distribution remaining.")
684 # Ensure target distributions exist
685 for suite in self.pkg.changes["distribution"].keys():
686 if not Cnf.has_key("Suite::%s" % (suite)):
687 self.rejects.append("Unknown distribution `%s'." % (suite))
689 ###########################################################################
691 def binary_file_checks(self, f, session):
693 entry = self.pkg.files[f]
695 # Extract package control information
696 deb_file = utils.open_file(f)
698 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
700 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
702 # Can't continue, none of the checks on control would work.
705 # Check for mandantory "Description:"
708 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
710 self.rejects.append("%s: Missing Description in binary package" % (f))
715 # Check for mandatory fields
716 for field in [ "Package", "Architecture", "Version" ]:
717 if control.Find(field) == None:
719 self.rejects.append("%s: No %s field in control." % (f, field))
722 # Ensure the package name matches the one give in the .changes
723 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
724 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
726 # Validate the package field
727 package = control.Find("Package")
728 if not re_valid_pkg_name.match(package):
729 self.rejects.append("%s: invalid package name '%s'." % (f, package))
731 # Validate the version field
732 version = control.Find("Version")
733 if not re_valid_version.match(version):
734 self.rejects.append("%s: invalid version number '%s'." % (f, version))
736 # Ensure the architecture of the .deb is one we know about.
737 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
738 architecture = control.Find("Architecture")
739 upload_suite = self.pkg.changes["distribution"].keys()[0]
741 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
742 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
743 self.rejects.append("Unknown architecture '%s'." % (architecture))
745 # Ensure the architecture of the .deb is one of the ones
746 # listed in the .changes.
747 if not self.pkg.changes["architecture"].has_key(architecture):
748 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
750 # Sanity-check the Depends field
751 depends = control.Find("Depends")
753 self.rejects.append("%s: Depends field is empty." % (f))
755 # Sanity-check the Provides field
756 provides = control.Find("Provides")
758 provide = re_spacestrip.sub('', provides)
760 self.rejects.append("%s: Provides field is empty." % (f))
761 prov_list = provide.split(",")
762 for prov in prov_list:
763 if not re_valid_pkg_name.match(prov):
764 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
766 # Check the section & priority match those given in the .changes (non-fatal)
767 if control.Find("Section") and entry["section"] != "" \
768 and entry["section"] != control.Find("Section"):
769 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
770 (f, control.Find("Section", ""), entry["section"]))
771 if control.Find("Priority") and entry["priority"] != "" \
772 and entry["priority"] != control.Find("Priority"):
773 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
774 (f, control.Find("Priority", ""), entry["priority"]))
776 entry["package"] = package
777 entry["architecture"] = architecture
778 entry["version"] = version
779 entry["maintainer"] = control.Find("Maintainer", "")
781 if f.endswith(".udeb"):
782 self.pkg.files[f]["dbtype"] = "udeb"
783 elif f.endswith(".deb"):
784 self.pkg.files[f]["dbtype"] = "deb"
786 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
788 entry["source"] = control.Find("Source", entry["package"])
790 # Get the source version
791 source = entry["source"]
794 if source.find("(") != -1:
795 m = re_extract_src_version.match(source)
797 source_version = m.group(2)
799 if not source_version:
800 source_version = self.pkg.files[f]["version"]
802 entry["source package"] = source
803 entry["source version"] = source_version
805 # Ensure the filename matches the contents of the .deb
806 m = re_isadeb.match(f)
809 file_package = m.group(1)
810 if entry["package"] != file_package:
811 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
812 (f, file_package, entry["dbtype"], entry["package"]))
813 epochless_version = re_no_epoch.sub('', control.Find("Version"))
816 file_version = m.group(2)
817 if epochless_version != file_version:
818 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
819 (f, file_version, entry["dbtype"], epochless_version))
822 file_architecture = m.group(3)
823 if entry["architecture"] != file_architecture:
824 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
825 (f, file_architecture, entry["dbtype"], entry["architecture"]))
827 # Check for existent source
828 source_version = entry["source version"]
829 source_package = entry["source package"]
830 if self.pkg.changes["architecture"].has_key("source"):
831 if source_version != self.pkg.changes["version"]:
832 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
833 (source_version, f, self.pkg.changes["version"]))
835 # Check in the SQL database
836 if not source_exists(source_package, source_version, suites = \
837 self.pkg.changes["distribution"].keys(), session = session):
838 # Check in one of the other directories
839 source_epochless_version = re_no_epoch.sub('', source_version)
840 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
841 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
843 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
846 dsc_file_exists = False
847 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
848 if cnf.has_key("Dir::Queue::%s" % (myq)):
849 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
850 dsc_file_exists = True
853 if not dsc_file_exists:
854 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
856 # Check the version and for file overwrites
857 self.check_binary_against_db(f, session)
859 # Temporarily disable contents generation until we change the table storage layout
862 #if len(b.rejects) > 0:
863 # for j in b.rejects:
864 # self.rejects.append(j)
866 def source_file_checks(self, f, session):
867 entry = self.pkg.files[f]
869 m = re_issource.match(f)
873 entry["package"] = m.group(1)
874 entry["version"] = m.group(2)
875 entry["type"] = m.group(3)
877 # Ensure the source package name matches the Source filed in the .changes
878 if self.pkg.changes["source"] != entry["package"]:
879 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
881 # Ensure the source version matches the version in the .changes file
882 if re_is_orig_source.match(f):
883 changes_version = self.pkg.changes["chopversion2"]
885 changes_version = self.pkg.changes["chopversion"]
887 if changes_version != entry["version"]:
888 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
890 # Ensure the .changes lists source in the Architecture field
891 if not self.pkg.changes["architecture"].has_key("source"):
892 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
894 # Check the signature of a .dsc file
895 if entry["type"] == "dsc":
896 # check_signature returns either:
897 # (None, [list, of, rejects]) or (signature, [])
898 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
900 self.rejects.append(j)
902 entry["architecture"] = "source"
904 def per_suite_file_checks(self, f, suite, session):
906 entry = self.pkg.files[f]
909 if entry.has_key("byhand"):
912 # Check we have fields we need to do these checks
914 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
915 if not entry.has_key(m):
916 self.rejects.append("file '%s' does not have field %s set" % (f, m))
922 # Handle component mappings
923 for m in cnf.ValueList("ComponentMappings"):
924 (source, dest) = m.split()
925 if entry["component"] == source:
926 entry["original component"] = source
927 entry["component"] = dest
929 # Ensure the component is valid for the target suite
930 if cnf.has_key("Suite:%s::Components" % (suite)) and \
931 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
932 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
935 # Validate the component
936 if not get_component(entry["component"], session):
937 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
940 # See if the package is NEW
941 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
944 # Validate the priority
945 if entry["priority"].find('/') != -1:
946 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
948 # Determine the location
949 location = cnf["Dir::Pool"]
950 l = get_location(location, entry["component"], session=session)
952 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
953 entry["location id"] = -1
955 entry["location id"] = l.location_id
957 # Check the md5sum & size against existing files (if any)
958 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
960 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
961 entry["size"], entry["md5sum"], entry["location id"])
964 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
965 elif found is False and poolfile is not None:
966 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
969 entry["files id"] = None
971 entry["files id"] = poolfile.file_id
973 # Check for packages that have moved from one component to another
974 entry['suite'] = suite
975 arch_list = [entry["architecture"], 'all']
976 component = get_component_by_package_suite(self.pkg.files[f]['package'], \
977 [suite], arch_list = arch_list, session = session)
978 if component is not None:
979 entry["othercomponents"] = component
981 def check_files(self, action=True):
982 file_keys = self.pkg.files.keys()
988 os.chdir(self.pkg.directory)
990 ret = holding.copy_to_holding(f)
992 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
996 # check we already know the changes file
997 # [NB: this check must be done post-suite mapping]
998 base_filename = os.path.basename(self.pkg.changes_file)
1000 session = DBConn().session()
1003 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1004 # if in the pool or in a queue other than unchecked, reject
1005 if (dbc.in_queue is None) \
1006 or (dbc.in_queue is not None
1007 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1008 self.rejects.append("%s file already known to dak" % base_filename)
1009 except NoResultFound, e:
1013 has_binaries = False
1016 for f, entry in self.pkg.files.items():
1017 # Ensure the file does not already exist in one of the accepted directories
1018 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1019 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1020 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1021 self.rejects.append("%s file already exists in the %s directory." % (f, d))
1023 if not re_taint_free.match(f):
1024 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1026 # Check the file is readable
1027 if os.access(f, os.R_OK) == 0:
1028 # When running in -n, copy_to_holding() won't have
1029 # generated the reject_message, so we need to.
1031 if os.path.exists(f):
1032 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1034 # Don't directly reject, mark to check later to deal with orig's
1035 # we can find in the pool
1036 self.later_check_files.append(f)
1037 entry["type"] = "unreadable"
1040 # If it's byhand skip remaining checks
1041 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1043 entry["type"] = "byhand"
1045 # Checks for a binary package...
1046 elif re_isadeb.match(f):
1048 entry["type"] = "deb"
1050 # This routine appends to self.rejects/warnings as appropriate
1051 self.binary_file_checks(f, session)
1053 # Checks for a source package...
1054 elif re_issource.match(f):
1057 # This routine appends to self.rejects/warnings as appropriate
1058 self.source_file_checks(f, session)
1060 # Not a binary or source package? Assume byhand...
1063 entry["type"] = "byhand"
1065 # Per-suite file checks
1066 entry["oldfiles"] = {}
1067 for suite in self.pkg.changes["distribution"].keys():
1068 self.per_suite_file_checks(f, suite, session)
1072 # If the .changes file says it has source, it must have source.
1073 if self.pkg.changes["architecture"].has_key("source"):
1075 self.rejects.append("no source found and Architecture line in changes mention source.")
1077 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1078 self.rejects.append("source only uploads are not supported.")
1080 ###########################################################################
1081 def check_dsc(self, action=True, session=None):
1082 """Returns bool indicating whether or not the source changes are valid"""
1083 # Ensure there is source to check
1084 if not self.pkg.changes["architecture"].has_key("source"):
1089 for f, entry in self.pkg.files.items():
1090 if entry["type"] == "dsc":
1092 self.rejects.append("can not process a .changes file with multiple .dsc's.")
1097 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1098 if not dsc_filename:
1099 self.rejects.append("source uploads must contain a dsc file")
1102 # Parse the .dsc file
1104 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1105 except CantOpenError:
1106 # if not -n copy_to_holding() will have done this for us...
1108 self.rejects.append("%s: can't read file." % (dsc_filename))
1109 except ParseChangesError, line:
1110 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1111 except InvalidDscError, line:
1112 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1113 except ChangesUnicodeError:
1114 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1116 # Build up the file list of files mentioned by the .dsc
1118 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1119 except NoFilesFieldError:
1120 self.rejects.append("%s: no Files: field." % (dsc_filename))
1122 except UnknownFormatError, format:
1123 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1125 except ParseChangesError, line:
1126 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1129 # Enforce mandatory fields
1130 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1131 if not self.pkg.dsc.has_key(i):
1132 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1135 # Validate the source and version fields
1136 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1137 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1138 if not re_valid_version.match(self.pkg.dsc["version"]):
1139 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1141 # Only a limited list of source formats are allowed in each suite
1142 for dist in self.pkg.changes["distribution"].keys():
1143 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1144 if self.pkg.dsc["format"] not in allowed:
1145 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1147 # Validate the Maintainer field
1149 # We ignore the return value
1150 fix_maintainer(self.pkg.dsc["maintainer"])
1151 except ParseMaintError, msg:
1152 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1153 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1155 # Validate the build-depends field(s)
1156 for field_name in [ "build-depends", "build-depends-indep" ]:
1157 field = self.pkg.dsc.get(field_name)
1159 # Have apt try to parse them...
1161 apt_pkg.ParseSrcDepends(field)
1163 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1165 # Ensure the version number in the .dsc matches the version number in the .changes
1166 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1167 changes_version = self.pkg.files[dsc_filename]["version"]
1169 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1170 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1172 # Ensure the Files field contain only what's expected
1173 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1175 # Ensure source is newer than existing source in target suites
1176 session = DBConn().session()
1177 self.check_source_against_db(dsc_filename, session)
1178 self.check_dsc_against_db(dsc_filename, session)
1180 dbchg = get_dbchange(self.pkg.changes_file, session)
1182 # Finally, check if we're missing any files
1183 for f in self.later_check_files:
1185 # Check if we've already processed this file if we have a dbchg object
1188 for pf in dbchg.files:
1189 if pf.filename == f and pf.processed:
1190 self.notes.append('%s was already processed so we can go ahead' % f)
1192 del self.pkg.files[f]
1194 self.rejects.append("Could not find file %s references in changes" % f)
1200 ###########################################################################
1202 def get_changelog_versions(self, source_dir):
1203 """Extracts a the source package and (optionally) grabs the
1204 version history out of debian/changelog for the BTS."""
1208 # Find the .dsc (again)
1210 for f in self.pkg.files.keys():
1211 if self.pkg.files[f]["type"] == "dsc":
1214 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1215 if not dsc_filename:
1218 # Create a symlink mirror of the source files in our temporary directory
1219 for f in self.pkg.files.keys():
1220 m = re_issource.match(f)
1222 src = os.path.join(source_dir, f)
1223 # If a file is missing for whatever reason, give up.
1224 if not os.path.exists(src):
1227 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1228 self.pkg.orig_files[f].has_key("path"):
1230 dest = os.path.join(os.getcwd(), f)
1231 os.symlink(src, dest)
1233 # If the orig files are not a part of the upload, create symlinks to the
1235 for orig_file in self.pkg.orig_files.keys():
1236 if not self.pkg.orig_files[orig_file].has_key("path"):
1238 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1239 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1241 # Extract the source
1242 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1243 (result, output) = commands.getstatusoutput(cmd)
1245 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1246 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1249 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1252 # Get the upstream version
1253 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1254 if re_strip_revision.search(upstr_version):
1255 upstr_version = re_strip_revision.sub('', upstr_version)
1257 # Ensure the changelog file exists
1258 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1259 if not os.path.exists(changelog_filename):
1260 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1263 # Parse the changelog
1264 self.pkg.dsc["bts changelog"] = ""
1265 changelog_file = utils.open_file(changelog_filename)
1266 for line in changelog_file.readlines():
1267 m = re_changelog_versions.match(line)
1269 self.pkg.dsc["bts changelog"] += line
1270 changelog_file.close()
1272 # Check we found at least one revision in the changelog
1273 if not self.pkg.dsc["bts changelog"]:
1274 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1276 def check_source(self):
1278 # a) there's no source
1279 if not self.pkg.changes["architecture"].has_key("source"):
1282 tmpdir = utils.temp_dirname()
1284 # Move into the temporary directory
1288 # Get the changelog version history
1289 self.get_changelog_versions(cwd)
1291 # Move back and cleanup the temporary tree
1295 shutil.rmtree(tmpdir)
1297 if e.errno != errno.EACCES:
1299 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1301 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1302 # We probably have u-r or u-w directories so chmod everything
1304 cmd = "chmod -R u+rwx %s" % (tmpdir)
1305 result = os.system(cmd)
1307 utils.fubar("'%s' failed with result %s." % (cmd, result))
1308 shutil.rmtree(tmpdir)
1309 except Exception, e:
1310 print "foobar2 (%s)" % e
1311 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1313 ###########################################################################
1314 def ensure_hashes(self):
1315 # Make sure we recognise the format of the Files: field in the .changes
1316 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1317 if len(format) == 2:
1318 format = int(format[0]), int(format[1])
1320 format = int(float(format[0])), 0
1322 # We need to deal with the original changes blob, as the fields we need
1323 # might not be in the changes dict serialised into the .dak anymore.
1324 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1326 # Copy the checksums over to the current changes dict. This will keep
1327 # the existing modifications to it intact.
1328 for field in orig_changes:
1329 if field.startswith('checksums-'):
1330 self.pkg.changes[field] = orig_changes[field]
1332 # Check for unsupported hashes
1333 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1334 self.rejects.append(j)
1336 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1337 self.rejects.append(j)
1339 # We have to calculate the hash if we have an earlier changes version than
1340 # the hash appears in rather than require it exist in the changes file
1341 for hashname, hashfunc, version in utils.known_hashes:
1342 # TODO: Move _ensure_changes_hash into this class
1343 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1344 self.rejects.append(j)
1345 if "source" in self.pkg.changes["architecture"]:
1346 # TODO: Move _ensure_dsc_hash into this class
1347 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1348 self.rejects.append(j)
1350 def check_hashes(self):
1351 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1352 self.rejects.append(m)
1354 for m in utils.check_size(".changes", self.pkg.files):
1355 self.rejects.append(m)
1357 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1358 self.rejects.append(m)
1360 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1361 self.rejects.append(m)
1363 self.ensure_hashes()
1365 ###########################################################################
1367 def ensure_orig(self, target_dir='.', session=None):
1369 Ensures that all orig files mentioned in the changes file are present
1370 in target_dir. If they do not exist, they are symlinked into place.
1372 An list containing the symlinks that were created are returned (so they
1379 for filename, entry in self.pkg.dsc_files.iteritems():
1380 if not re_is_orig_source.match(filename):
1381 # File is not an orig; ignore
1384 if os.path.exists(filename):
1385 # File exists, no need to continue
1388 def symlink_if_valid(path):
1389 f = utils.open_file(path)
1390 md5sum = apt_pkg.md5sum(f)
1393 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1394 expected = (int(entry['size']), entry['md5sum'])
1396 if fingerprint != expected:
1399 dest = os.path.join(target_dir, filename)
1401 os.symlink(path, dest)
1402 symlinked.append(dest)
1408 session_ = DBConn().session()
1413 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1414 poolfile_path = os.path.join(
1415 poolfile.location.path, poolfile.filename
1418 if symlink_if_valid(poolfile_path):
1428 # Look in some other queues for the file
1429 queues = ('New', 'Byhand', 'ProposedUpdates',
1430 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1432 for queue in queues:
1433 if not cnf.get('Dir::Queue::%s' % queue):
1436 queuefile_path = os.path.join(
1437 cnf['Dir::Queue::%s' % queue], filename
1440 if not os.path.exists(queuefile_path):
1441 # Does not exist in this queue
1444 if symlink_if_valid(queuefile_path):
1449 ###########################################################################
1451 def check_lintian(self):
1453 Extends self.rejects by checking the output of lintian against tags
1454 specified in Dinstall::LintianTags.
1459 # Don't reject binary uploads
1460 if not self.pkg.changes['architecture'].has_key('source'):
1463 # Only check some distributions
1464 for dist in ('unstable', 'experimental'):
1465 if dist in self.pkg.changes['distribution']:
1470 # If we do not have a tagfile, don't do anything
1471 tagfile = cnf.get("Dinstall::LintianTags")
1475 # Parse the yaml file
1476 sourcefile = file(tagfile, 'r')
1477 sourcecontent = sourcefile.read()
1481 lintiantags = yaml.load(sourcecontent)['lintian']
1482 except yaml.YAMLError, msg:
1483 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1486 # Try and find all orig mentioned in the .dsc
1487 symlinked = self.ensure_orig()
1489 # Setup the input file for lintian
1490 fd, temp_filename = utils.temp_filename()
1491 temptagfile = os.fdopen(fd, 'w')
1492 for tags in lintiantags.values():
1493 temptagfile.writelines(['%s\n' % x for x in tags])
1497 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1498 (temp_filename, self.pkg.changes_file)
1500 result, output = commands.getstatusoutput(cmd)
1502 # Remove our tempfile and any symlinks we created
1503 os.unlink(temp_filename)
1505 for symlink in symlinked:
1509 utils.warn("lintian failed for %s [return code: %s]." % \
1510 (self.pkg.changes_file, result))
1511 utils.warn(utils.prefix_multi_line_string(output, \
1512 " [possible output:] "))
1517 [self.pkg.changes_file, "check_lintian"] + list(txt)
1521 parsed_tags = parse_lintian_output(output)
1522 self.rejects.extend(
1523 generate_reject_messages(parsed_tags, lintiantags, log=log)
1526 ###########################################################################
1527 def check_urgency(self):
1529 if self.pkg.changes["architecture"].has_key("source"):
1530 if not self.pkg.changes.has_key("urgency"):
1531 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1532 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1533 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1534 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1535 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1536 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1538 ###########################################################################
1540 # Sanity check the time stamps of files inside debs.
1541 # [Files in the near future cause ugly warnings and extreme time
1542 # travel can cause errors on extraction]
1544 def check_timestamps(self):
1547 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1548 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1549 tar = TarTime(future_cutoff, past_cutoff)
1551 for filename, entry in self.pkg.files.items():
1552 if entry["type"] == "deb":
1555 deb_file = utils.open_file(filename)
1556 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1559 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1560 except SystemError, e:
1561 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1562 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1565 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1569 future_files = tar.future_files.keys()
1571 num_future_files = len(future_files)
1572 future_file = future_files[0]
1573 future_date = tar.future_files[future_file]
1574 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1575 % (filename, num_future_files, future_file, time.ctime(future_date)))
1577 ancient_files = tar.ancient_files.keys()
1579 num_ancient_files = len(ancient_files)
1580 ancient_file = ancient_files[0]
1581 ancient_date = tar.ancient_files[ancient_file]
1582 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1583 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1585 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1587 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1588 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1590 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1596 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1597 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1598 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1599 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1600 self.pkg.changes["sponsoremail"] = uid_email
1605 ###########################################################################
1606 # check_signed_by_key checks
1607 ###########################################################################
1609 def check_signed_by_key(self):
1610 """Ensure the .changes is signed by an authorized uploader."""
1611 session = DBConn().session()
1613 # First of all we check that the person has proper upload permissions
1614 # and that this upload isn't blocked
1615 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1618 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1621 # TODO: Check that import-keyring adds UIDs properly
1623 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1626 # Check that the fingerprint which uploaded has permission to do so
1627 self.check_upload_permissions(fpr, session)
1629 # Check that this package is not in a transition
1630 self.check_transition(session)
1635 def check_upload_permissions(self, fpr, session):
1636 # Check any one-off upload blocks
1637 self.check_upload_blocks(fpr, session)
1639 # Start with DM as a special case
1640 # DM is a special case unfortunately, so we check it first
1641 # (keys with no source access get more access than DMs in one
1642 # way; DMs can only upload for their packages whether source
1643 # or binary, whereas keys with no access might be able to
1644 # upload some binaries)
1645 if fpr.source_acl.access_level == 'dm':
1646 self.check_dm_upload(fpr, session)
1648 # Check source-based permissions for other types
1649 if self.pkg.changes["architecture"].has_key("source") and \
1650 fpr.source_acl.access_level is None:
1651 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1652 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1653 self.rejects.append(rej)
1655 # If not a DM, we allow full upload rights
1656 uid_email = "%s@debian.org" % (fpr.uid.uid)
1657 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1660 # Check binary upload permissions
1661 # By this point we know that DMs can't have got here unless they
1662 # are allowed to deal with the package concerned so just apply
1664 if fpr.binary_acl.access_level == 'full':
1667 # Otherwise we're in the map case
1668 tmparches = self.pkg.changes["architecture"].copy()
1669 tmparches.pop('source', None)
1671 for bam in fpr.binary_acl_map:
1672 tmparches.pop(bam.architecture.arch_string, None)
1674 if len(tmparches.keys()) > 0:
1675 if fpr.binary_reject:
1676 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1677 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1678 self.rejects.append(rej)
1680 # TODO: This is where we'll implement reject vs throw away binaries later
1681 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1682 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1683 rej += "\nFingerprint: %s", (fpr.fingerprint)
1684 self.rejects.append(rej)
1687 def check_upload_blocks(self, fpr, session):
1688 """Check whether any upload blocks apply to this source, source
1689 version, uid / fpr combination"""
1691 def block_rej_template(fb):
1692 rej = 'Manual upload block in place for package %s' % fb.source
1693 if fb.version is not None:
1694 rej += ', version %s' % fb.version
1697 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1698 # version is None if the block applies to all versions
1699 if fb.version is None or fb.version == self.pkg.changes['version']:
1700 # Check both fpr and uid - either is enough to cause a reject
1701 if fb.fpr is not None:
1702 if fb.fpr.fingerprint == fpr.fingerprint:
1703 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1704 if fb.uid is not None:
1705 if fb.uid == fpr.uid:
1706 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1709 def check_dm_upload(self, fpr, session):
1710 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1711 ## none of the uploaded packages are NEW
1713 for f in self.pkg.files.keys():
1714 if self.pkg.files[f].has_key("byhand"):
1715 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1717 if self.pkg.files[f].has_key("new"):
1718 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1724 r = get_newest_source(self.pkg.changes["source"], session)
1727 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1728 self.rejects.append(rej)
1731 if not r.dm_upload_allowed:
1732 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1733 self.rejects.append(rej)
1736 ## the Maintainer: field of the uploaded .changes file corresponds with
1737 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1739 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1740 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1742 ## the most recent version of the package uploaded to unstable or
1743 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1744 ## non-developer maintainers cannot NMU or hijack packages)
1746 # srcuploaders includes the maintainer
1748 for sup in r.srcuploaders:
1749 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1750 # Eww - I hope we never have two people with the same name in Debian
1751 if email == fpr.uid.uid or name == fpr.uid.name:
1756 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1759 ## none of the packages are being taken over from other source packages
1760 for b in self.pkg.changes["binary"].keys():
1761 for suite in self.pkg.changes["distribution"].keys():
1762 for s in get_source_by_package_and_suite(b, suite, session):
1763 if s.source != self.pkg.changes["source"]:
1764 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1768 def check_transition(self, session):
1771 sourcepkg = self.pkg.changes["source"]
1773 # No sourceful upload -> no need to do anything else, direct return
1774 # We also work with unstable uploads, not experimental or those going to some
1775 # proposed-updates queue
1776 if "source" not in self.pkg.changes["architecture"] or \
1777 "unstable" not in self.pkg.changes["distribution"]:
1780 # Also only check if there is a file defined (and existant) with
1782 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1783 if transpath == "" or not os.path.exists(transpath):
1786 # Parse the yaml file
1787 sourcefile = file(transpath, 'r')
1788 sourcecontent = sourcefile.read()
1790 transitions = yaml.load(sourcecontent)
1791 except yaml.YAMLError, msg:
1792 # This shouldn't happen, there is a wrapper to edit the file which
1793 # checks it, but we prefer to be safe than ending up rejecting
1795 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1798 # Now look through all defined transitions
1799 for trans in transitions:
1800 t = transitions[trans]
1801 source = t["source"]
1804 # Will be None if nothing is in testing.
1805 current = get_source_in_suite(source, "testing", session)
1806 if current is not None:
1807 compare = apt_pkg.VersionCompare(current.version, expected)
1809 if current is None or compare < 0:
1810 # This is still valid, the current version in testing is older than
1811 # the new version we wait for, or there is none in testing yet
1813 # Check if the source we look at is affected by this.
1814 if sourcepkg in t['packages']:
1815 # The source is affected, lets reject it.
1817 rejectmsg = "%s: part of the %s transition.\n\n" % (
1820 if current is not None:
1821 currentlymsg = "at version %s" % (current.version)
1823 currentlymsg = "not present in testing"
1825 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1827 rejectmsg += "\n".join(textwrap.wrap("""Your package
1828 is part of a testing transition designed to get %s migrated (it is
1829 currently %s, we need version %s). This transition is managed by the
1830 Release Team, and %s is the Release-Team member responsible for it.
1831 Please mail debian-release@lists.debian.org or contact %s directly if you
1832 need further assistance. You might want to upload to experimental until this
1833 transition is done."""
1834 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1836 self.rejects.append(rejectmsg)
1839 ###########################################################################
1840 # End check_signed_by_key checks
1841 ###########################################################################
1843 def build_summaries(self):
1844 """ Build a summary of changes the upload introduces. """
1846 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1848 short_summary = summary
1850 # This is for direport's benefit...
1851 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1854 summary += "Changes: " + f
1856 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1858 summary += self.announce(short_summary, 0)
1860 return (summary, short_summary)
1862 ###########################################################################
1864 def close_bugs(self, summary, action):
1866 Send mail to close bugs as instructed by the closes field in the changes file.
1867 Also add a line to summary if any work was done.
1869 @type summary: string
1870 @param summary: summary text, as given by L{build_summaries}
1873 @param action: Set to false no real action will be done.
1876 @return: summary. If action was taken, extended by the list of closed bugs.
1880 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1882 bugs = self.pkg.changes["closes"].keys()
1888 summary += "Closing bugs: "
1890 summary += "%s " % (bug)
1893 self.Subst["__BUG_NUMBER__"] = bug
1894 if self.pkg.changes["distribution"].has_key("stable"):
1895 self.Subst["__STABLE_WARNING__"] = """
1896 Note that this package is not part of the released stable Debian
1897 distribution. It may have dependencies on other unreleased software,
1898 or other instabilities. Please take care if you wish to install it.
1899 The update will eventually make its way into the next released Debian
1902 self.Subst["__STABLE_WARNING__"] = ""
1903 mail_message = utils.TemplateSubst(self.Subst, template)
1904 utils.send_mail(mail_message)
1906 # Clear up after ourselves
1907 del self.Subst["__BUG_NUMBER__"]
1908 del self.Subst["__STABLE_WARNING__"]
1910 if action and self.logger:
1911 self.logger.log(["closing bugs"] + bugs)
1917 ###########################################################################
1919 def announce(self, short_summary, action):
1921 Send an announce mail about a new upload.
1923 @type short_summary: string
1924 @param short_summary: Short summary text to include in the mail
1927 @param action: Set to false no real action will be done.
1930 @return: Textstring about action taken.
1935 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1937 # Only do announcements for source uploads with a recent dpkg-dev installed
1938 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1939 self.pkg.changes["architecture"].has_key("source"):
1945 self.Subst["__SHORT_SUMMARY__"] = short_summary
1947 for dist in self.pkg.changes["distribution"].keys():
1948 suite = get_suite(dist)
1949 if suite is None: continue
1950 announce_list = suite.announce
1951 if announce_list == "" or lists_done.has_key(announce_list):
1954 lists_done[announce_list] = 1
1955 summary += "Announcing to %s\n" % (announce_list)
1959 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1960 if cnf.get("Dinstall::TrackingServer") and \
1961 self.pkg.changes["architecture"].has_key("source"):
1962 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1963 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1965 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1966 utils.send_mail(mail_message)
1968 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1970 if cnf.FindB("Dinstall::CloseBugs"):
1971 summary = self.close_bugs(summary, action)
1973 del self.Subst["__SHORT_SUMMARY__"]
1977 ###########################################################################
1979 def accept (self, summary, short_summary, session=None):
1983 This moves all files referenced from the .changes into the pool,
1984 sends the accepted mail, announces to lists, closes bugs and
1985 also checks for override disparities. If enabled it will write out
1986 the version history for the BTS Version Tracking and will finally call
1989 @type summary: string
1990 @param summary: Summary text
1992 @type short_summary: string
1993 @param short_summary: Short summary
1997 stats = SummaryStats()
2000 self.logger.log(["installing changes", self.pkg.changes_file])
2004 # Add the .dsc file to the DB first
2005 for newfile, entry in self.pkg.files.items():
2006 if entry["type"] == "dsc":
2007 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2011 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2012 for newfile, entry in self.pkg.files.items():
2013 if entry["type"] == "deb":
2014 poolfiles.append(add_deb_to_db(self, newfile, session))
2016 # If this is a sourceful diff only upload that is moving
2017 # cross-component we need to copy the .orig files into the new
2018 # component too for the same reasons as above.
2019 # XXX: mhy: I think this should be in add_dsc_to_db
2020 if self.pkg.changes["architecture"].has_key("source"):
2021 for orig_file in self.pkg.orig_files.keys():
2022 if not self.pkg.orig_files[orig_file].has_key("id"):
2023 continue # Skip if it's not in the pool
2024 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2025 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2026 continue # Skip if the location didn't change
2029 oldf = get_poolfile_by_id(orig_file_id, session)
2030 old_filename = os.path.join(oldf.location.path, oldf.filename)
2031 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
2032 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2034 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2036 # TODO: Care about size/md5sum collisions etc
2037 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2039 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2041 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2042 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2046 # Don't reference the old file from this changes
2048 if p.file_id == oldf.file_id:
2051 poolfiles.append(newf)
2053 # Fix up the DSC references
2056 for df in source.srcfiles:
2057 if df.poolfile.file_id == oldf.file_id:
2058 # Add a new DSC entry and mark the old one for deletion
2059 # Don't do it in the loop so we don't change the thing we're iterating over
2061 newdscf.source_id = source.source_id
2062 newdscf.poolfile_id = newf.file_id
2063 session.add(newdscf)
2073 # Make sure that our source object is up-to-date
2074 session.expire(source)
2076 # Add changelog information to the database
2077 self.store_changelog()
2079 # Install the files into the pool
2080 for newfile, entry in self.pkg.files.items():
2081 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2082 utils.move(newfile, destination)
2083 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2084 stats.accept_bytes += float(entry["size"])
2086 # Copy the .changes file across for suite which need it.
2087 copy_changes = dict([(x.copychanges, '')
2088 for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2089 if x.copychanges is not None])
2091 for dest in copy_changes.keys():
2092 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2094 # We're done - commit the database changes
2096 # Our SQL session will automatically start a new transaction after
2099 # Move the .changes into the 'done' directory
2100 utils.move(self.pkg.changes_file,
2101 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2103 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2104 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2107 self.Subst["__SUMMARY__"] = summary
2108 mail_message = utils.TemplateSubst(self.Subst,
2109 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2110 utils.send_mail(mail_message)
2111 self.announce(short_summary, 1)
2113 ## Helper stuff for DebBugs Version Tracking
2114 if cnf.Find("Dir::Queue::BTSVersionTrack"):
2115 if self.pkg.changes["architecture"].has_key("source"):
2116 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2117 version_history = os.fdopen(fd, 'w')
2118 version_history.write(self.pkg.dsc["bts changelog"])
2119 version_history.close()
2120 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2121 self.pkg.changes_file[:-8]+".versions")
2122 os.rename(temp_filename, filename)
2123 os.chmod(filename, 0644)
2125 # Write out the binary -> source mapping.
2126 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2127 debinfo = os.fdopen(fd, 'w')
2128 for name, entry in sorted(self.pkg.files.items()):
2129 if entry["type"] == "deb":
2130 line = " ".join([entry["package"], entry["version"],
2131 entry["architecture"], entry["source package"],
2132 entry["source version"]])
2133 debinfo.write(line+"\n")
2135 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2136 self.pkg.changes_file[:-8]+".debinfo")
2137 os.rename(temp_filename, filename)
2138 os.chmod(filename, 0644)
2142 # Set up our copy queues (e.g. buildd queues)
2143 for suite_name in self.pkg.changes["distribution"].keys():
2144 suite = get_suite(suite_name, session)
2145 for q in suite.copy_queues:
2147 q.add_file_from_pool(f)
2152 stats.accept_count += 1
2154 def check_override(self):
2156 Checks override entries for validity. Mails "Override disparity" warnings,
2157 if that feature is enabled.
2159 Abandons the check if
2160 - override disparity checks are disabled
2161 - mail sending is disabled
2166 # Abandon the check if override disparity checks have been disabled
2167 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2170 summary = self.pkg.check_override()
2175 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2178 self.Subst["__SUMMARY__"] = summary
2179 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2180 utils.send_mail(mail_message)
2181 del self.Subst["__SUMMARY__"]
2183 ###########################################################################
2185 def remove(self, from_dir=None):
2187 Used (for instance) in p-u to remove the package from unchecked
2189 Also removes the package from holding area.
2191 if from_dir is None:
2192 from_dir = self.pkg.directory
2195 for f in self.pkg.files.keys():
2196 os.unlink(os.path.join(from_dir, f))
2197 if os.path.exists(os.path.join(h.holding_dir, f)):
2198 os.unlink(os.path.join(h.holding_dir, f))
2200 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2201 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2202 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2204 ###########################################################################
2206 def move_to_queue (self, queue):
2208 Move files to a destination queue using the permissions in the table
2211 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2212 queue.path, perms=int(queue.change_perms, 8))
2213 for f in self.pkg.files.keys():
2214 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2216 ###########################################################################
2218 def force_reject(self, reject_files):
2220 Forcefully move files from the current directory to the
2221 reject directory. If any file already exists in the reject
2222 directory it will be moved to the morgue to make way for
2225 @type reject_files: dict
2226 @param reject_files: file dictionary
2232 for file_entry in reject_files:
2233 # Skip any files which don't exist or which we don't have permission to copy.
2234 if os.access(file_entry, os.R_OK) == 0:
2237 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2240 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2242 # File exists? Let's find a new name by adding a number
2243 if e.errno == errno.EEXIST:
2245 dest_file = utils.find_next_free(dest_file, 255)
2246 except NoFreeFilenameError:
2247 # Something's either gone badly Pete Tong, or
2248 # someone is trying to exploit us.
2249 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2252 # Make sure we really got it
2254 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2257 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2261 # If we got here, we own the destination file, so we can
2262 # safely overwrite it.
2263 utils.move(file_entry, dest_file, 1, perms=0660)
2266 ###########################################################################
2267 def do_reject (self, manual=0, reject_message="", notes=""):
2269 Reject an upload. If called without a reject message or C{manual} is
2270 true, spawn an editor so the user can write one.
2273 @param manual: manual or automated rejection
2275 @type reject_message: string
2276 @param reject_message: A reject message
2281 # If we weren't given a manual rejection message, spawn an
2282 # editor so the user can add one in...
2283 if manual and not reject_message:
2284 (fd, temp_filename) = utils.temp_filename()
2285 temp_file = os.fdopen(fd, 'w')
2288 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2289 % (note.author, note.version, note.notedate, note.comment))
2291 editor = os.environ.get("EDITOR","vi")
2293 while answer == 'E':
2294 os.system("%s %s" % (editor, temp_filename))
2295 temp_fh = utils.open_file(temp_filename)
2296 reject_message = "".join(temp_fh.readlines())
2298 print "Reject message:"
2299 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2300 prompt = "[R]eject, Edit, Abandon, Quit ?"
2302 while prompt.find(answer) == -1:
2303 answer = utils.our_raw_input(prompt)
2304 m = re_default_answer.search(prompt)
2307 answer = answer[:1].upper()
2308 os.unlink(temp_filename)
2314 print "Rejecting.\n"
2318 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2319 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2321 # Move all the files into the reject directory
2322 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2323 self.force_reject(reject_files)
2325 # If we fail here someone is probably trying to exploit the race
2326 # so let's just raise an exception ...
2327 if os.path.exists(reason_filename):
2328 os.unlink(reason_filename)
2329 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2331 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2335 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2336 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2337 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2338 os.write(reason_fd, reject_message)
2339 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2341 # Build up the rejection email
2342 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2343 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2344 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2345 self.Subst["__REJECT_MESSAGE__"] = ""
2346 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2347 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2348 # Write the rejection email out as the <foo>.reason file
2349 os.write(reason_fd, reject_mail_message)
2351 del self.Subst["__REJECTOR_ADDRESS__"]
2352 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2353 del self.Subst["__CC__"]
2357 # Send the rejection mail
2358 utils.send_mail(reject_mail_message)
2361 self.logger.log(["rejected", self.pkg.changes_file])
2365 ################################################################################
2366 def in_override_p(self, package, component, suite, binary_type, filename, session):
2368 Check if a package already has override entries in the DB
2370 @type package: string
2371 @param package: package name
2373 @type component: string
2374 @param component: database id of the component
2377 @param suite: database id of the suite
2379 @type binary_type: string
2380 @param binary_type: type of the package
2382 @type filename: string
2383 @param filename: filename we check
2385 @return: the database result. But noone cares anyway.
2391 if binary_type == "": # must be source
2394 file_type = binary_type
2396 # Override suite name; used for example with proposed-updates
2397 oldsuite = get_suite(suite, session)
2398 if (not oldsuite is None) and oldsuite.overridesuite:
2399 suite = oldsuite.overridesuite
2401 result = get_override(package, suite, component, file_type, session)
2403 # If checking for a source package fall back on the binary override type
2404 if file_type == "dsc" and len(result) < 1:
2405 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2407 # Remember the section and priority so we can check them later if appropriate
2410 self.pkg.files[filename]["override section"] = result.section.section
2411 self.pkg.files[filename]["override priority"] = result.priority.priority
2416 ################################################################################
2417 def get_anyversion(self, sv_list, suite):
2420 @param sv_list: list of (suite, version) tuples to check
2423 @param suite: suite name
2429 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2430 for (s, v) in sv_list:
2431 if s in [ x.lower() for x in anysuite ]:
2432 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2437 ################################################################################
2439 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2442 @param sv_list: list of (suite, version) tuples to check
2444 @type filename: string
2445 @param filename: XXX
2447 @type new_version: string
2448 @param new_version: XXX
2450 Ensure versions are newer than existing packages in target
2451 suites and that cross-suite version checking rules as
2452 set out in the conf file are satisfied.
2457 # Check versions for each target suite
2458 for target_suite in self.pkg.changes["distribution"].keys():
2459 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2460 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2462 # Enforce "must be newer than target suite" even if conffile omits it
2463 if target_suite not in must_be_newer_than:
2464 must_be_newer_than.append(target_suite)
2466 for (suite, existent_version) in sv_list:
2467 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2469 if suite in must_be_newer_than and sourceful and vercmp < 1:
2470 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2472 if suite in must_be_older_than and vercmp > -1:
2475 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2476 # we really use the other suite, ignoring the conflicting one ...
2477 addsuite = self.pkg.changes["distribution-version"][suite]
2479 add_version = self.get_anyversion(sv_list, addsuite)
2480 target_version = self.get_anyversion(sv_list, target_suite)
2483 # not add_version can only happen if we map to a suite
2484 # that doesn't enhance the suite we're propup'ing from.
2485 # so "propup-ver x a b c; map a d" is a problem only if
2486 # d doesn't enhance a.
2488 # i think we could always propagate in this case, rather
2489 # than complaining. either way, this isn't a REJECT issue
2491 # And - we really should complain to the dorks who configured dak
2492 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2493 self.pkg.changes.setdefault("propdistribution", {})
2494 self.pkg.changes["propdistribution"][addsuite] = 1
2496 elif not target_version:
2497 # not targets_version is true when the package is NEW
2498 # we could just stick with the "...old version..." REJECT
2499 # for this, I think.
2500 self.rejects.append("Won't propogate NEW packages.")
2501 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2502 # propogation would be redundant. no need to reject though.
2503 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2505 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2506 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2508 self.warnings.append("Propogating upload to %s" % (addsuite))
2509 self.pkg.changes.setdefault("propdistribution", {})
2510 self.pkg.changes["propdistribution"][addsuite] = 1
2514 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2516 ################################################################################
2517 def check_binary_against_db(self, filename, session):
2518 # Ensure version is sane
2519 self.cross_suite_version_check( \
2520 get_suite_version_by_package(self.pkg.files[filename]["package"], \
2521 self.pkg.files[filename]["architecture"], session),
2522 filename, self.pkg.files[filename]["version"], sourceful=False)
2524 # Check for any existing copies of the file
2525 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2526 q = q.filter_by(version=self.pkg.files[filename]["version"])
2527 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2530 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2532 ################################################################################
2534 def check_source_against_db(self, filename, session):
2535 source = self.pkg.dsc.get("source")
2536 version = self.pkg.dsc.get("version")
2538 # Ensure version is sane
2539 self.cross_suite_version_check( \
2540 get_suite_version_by_source(source, session), filename, version,
2543 ################################################################################
2544 def check_dsc_against_db(self, filename, session):
2547 @warning: NB: this function can remove entries from the 'files' index [if
2548 the orig tarball is a duplicate of the one in the archive]; if
2549 you're iterating over 'files' and call this function as part of
2550 the loop, be sure to add a check to the top of the loop to
2551 ensure you haven't just tried to dereference the deleted entry.
2556 self.pkg.orig_files = {} # XXX: do we need to clear it?
2557 orig_files = self.pkg.orig_files
2559 # Try and find all files mentioned in the .dsc. This has
2560 # to work harder to cope with the multiple possible
2561 # locations of an .orig.tar.gz.
2562 # The ordering on the select is needed to pick the newest orig
2563 # when it exists in multiple places.
2564 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2566 if self.pkg.files.has_key(dsc_name):
2567 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2568 actual_size = int(self.pkg.files[dsc_name]["size"])
2569 found = "%s in incoming" % (dsc_name)
2571 # Check the file does not already exist in the archive
2572 ql = get_poolfile_like_name(dsc_name, session)
2574 # Strip out anything that isn't '%s' or '/%s$'
2576 if not i.filename.endswith(dsc_name):
2579 # "[dak] has not broken them. [dak] has fixed a
2580 # brokenness. Your crappy hack exploited a bug in
2583 # "(Come on! I thought it was always obvious that
2584 # one just doesn't release different files with
2585 # the same name and version.)"
2586 # -- ajk@ on d-devel@l.d.o
2589 # Ignore exact matches for .orig.tar.gz
2591 if re_is_orig_source.match(dsc_name):
2593 if self.pkg.files.has_key(dsc_name) and \
2594 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2595 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2596 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2597 # TODO: Don't delete the entry, just mark it as not needed
2598 # This would fix the stupidity of changing something we often iterate over
2599 # whilst we're doing it
2600 del self.pkg.files[dsc_name]
2601 dsc_entry["files id"] = i.file_id
2602 if not orig_files.has_key(dsc_name):
2603 orig_files[dsc_name] = {}
2604 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2607 # Don't bitch that we couldn't find this file later
2609 self.later_check_files.remove(dsc_name)
2615 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2617 elif re_is_orig_source.match(dsc_name):
2619 ql = get_poolfile_like_name(dsc_name, session)
2621 # Strip out anything that isn't '%s' or '/%s$'
2622 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2624 if not i.filename.endswith(dsc_name):
2628 # Unfortunately, we may get more than one match here if,
2629 # for example, the package was in potato but had an -sa
2630 # upload in woody. So we need to choose the right one.
2632 # default to something sane in case we don't match any or have only one
2637 old_file = os.path.join(i.location.path, i.filename)
2638 old_file_fh = utils.open_file(old_file)
2639 actual_md5 = apt_pkg.md5sum(old_file_fh)
2641 actual_size = os.stat(old_file)[stat.ST_SIZE]
2642 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2645 old_file = os.path.join(i.location.path, i.filename)
2646 old_file_fh = utils.open_file(old_file)
2647 actual_md5 = apt_pkg.md5sum(old_file_fh)
2649 actual_size = os.stat(old_file)[stat.ST_SIZE]
2651 suite_type = x.location.archive_type
2652 # need this for updating dsc_files in install()
2653 dsc_entry["files id"] = x.file_id
2654 # See install() in process-accepted...
2655 if not orig_files.has_key(dsc_name):
2656 orig_files[dsc_name] = {}
2657 orig_files[dsc_name]["id"] = x.file_id
2658 orig_files[dsc_name]["path"] = old_file
2659 orig_files[dsc_name]["location"] = x.location.location_id
2661 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2662 # Not there? Check the queue directories...
2663 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2664 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2666 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2667 if os.path.exists(in_otherdir):
2668 in_otherdir_fh = utils.open_file(in_otherdir)
2669 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2670 in_otherdir_fh.close()
2671 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2673 if not orig_files.has_key(dsc_name):
2674 orig_files[dsc_name] = {}
2675 orig_files[dsc_name]["path"] = in_otherdir
2678 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2681 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2683 if actual_md5 != dsc_entry["md5sum"]:
2684 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2685 if actual_size != int(dsc_entry["size"]):
2686 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2688 ################################################################################
2689 # This is used by process-new and process-holding to recheck a changes file
2690 # at the time we're running. It mainly wraps various other internal functions
2691 # and is similar to accepted_checks - these should probably be tidied up
2693 def recheck(self, session):
2695 for f in self.pkg.files.keys():
2696 # The .orig.tar.gz can disappear out from under us is it's a
2697 # duplicate of one in the archive.
2698 if not self.pkg.files.has_key(f):
2701 entry = self.pkg.files[f]
2703 # Check that the source still exists
2704 if entry["type"] == "deb":
2705 source_version = entry["source version"]
2706 source_package = entry["source package"]
2707 if not self.pkg.changes["architecture"].has_key("source") \
2708 and not source_exists(source_package, source_version, \
2709 suites = self.pkg.changes["distribution"].keys(), session = session):
2710 source_epochless_version = re_no_epoch.sub('', source_version)
2711 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2713 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2714 if cnf.has_key("Dir::Queue::%s" % (q)):
2715 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2718 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2720 # Version and file overwrite checks
2721 if entry["type"] == "deb":
2722 self.check_binary_against_db(f, session)
2723 elif entry["type"] == "dsc":
2724 self.check_source_against_db(f, session)
2725 self.check_dsc_against_db(f, session)
2727 ################################################################################
2728 def accepted_checks(self, overwrite_checks, session):
2729 # Recheck anything that relies on the database; since that's not
2730 # frozen between accept and our run time when called from p-a.
2732 # overwrite_checks is set to False when installing to stable/oldstable
2737 # Find the .dsc (again)
2739 for f in self.pkg.files.keys():
2740 if self.pkg.files[f]["type"] == "dsc":
2743 for checkfile in self.pkg.files.keys():
2744 # The .orig.tar.gz can disappear out from under us is it's a
2745 # duplicate of one in the archive.
2746 if not self.pkg.files.has_key(checkfile):
2749 entry = self.pkg.files[checkfile]
2751 # Check that the source still exists
2752 if entry["type"] == "deb":
2753 source_version = entry["source version"]
2754 source_package = entry["source package"]
2755 if not self.pkg.changes["architecture"].has_key("source") \
2756 and not source_exists(source_package, source_version, \
2757 suites = self.pkg.changes["distribution"].keys(), \
2759 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2761 # Version and file overwrite checks
2762 if overwrite_checks:
2763 if entry["type"] == "deb":
2764 self.check_binary_against_db(checkfile, session)
2765 elif entry["type"] == "dsc":
2766 self.check_source_against_db(checkfile, session)
2767 self.check_dsc_against_db(dsc_filename, session)
2769 # propogate in the case it is in the override tables:
2770 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2771 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2772 propogate[suite] = 1
2774 nopropogate[suite] = 1
2776 for suite in propogate.keys():
2777 if suite in nopropogate:
2779 self.pkg.changes["distribution"][suite] = 1
2781 for checkfile in self.pkg.files.keys():
2782 # Check the package is still in the override tables
2783 for suite in self.pkg.changes["distribution"].keys():
2784 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2785 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2787 ################################################################################
2788 # If any file of an upload has a recent mtime then chances are good
2789 # the file is still being uploaded.
2791 def upload_too_new(self):
2794 # Move back to the original directory to get accurate time stamps
2796 os.chdir(self.pkg.directory)
2797 file_list = self.pkg.files.keys()
2798 file_list.extend(self.pkg.dsc_files.keys())
2799 file_list.append(self.pkg.changes_file)
2802 last_modified = time.time()-os.path.getmtime(f)
2803 if last_modified < int(cnf["Dinstall::SkipTime"]):
2812 def store_changelog(self):
2814 # Skip binary-only upload if it is not a bin-NMU
2815 if not self.pkg.changes['architecture'].has_key('source'):
2816 from daklib.regexes import re_bin_only_nmu
2817 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2820 session = DBConn().session()
2822 # Check if upload already has a changelog entry
2823 query = """SELECT changelog_id FROM changes WHERE source = :source
2824 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2825 if session.execute(query, {'source': self.pkg.changes['source'], \
2826 'version': self.pkg.changes['version'], \
2827 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2831 # Add current changelog text into changelogs_text table, return created ID
2832 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2833 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2835 # Link ID to the upload available in changes table
2836 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2837 AND version = :version AND architecture = :architecture"""
2838 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2839 'version': self.pkg.changes['version'], \
2840 'architecture': " ".join(self.pkg.changes['architecture'].keys())})