5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
59 ###############################################################################
61 def get_type(f, session):
63 Get the file type of C{f}
66 @param f: file entry from Changes object
68 @type session: SQLA Session
69 @param session: SQL Alchemy session object
76 if f.has_key("dbtype"):
77 file_type = f["dbtype"]
78 elif re_source_ext.match(f["type"]):
82 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
84 # Validate the override type
85 type_id = get_override_type(file_type, session)
87 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
91 ################################################################################
93 # Determine what parts in a .changes are NEW
95 def determine_new(filename, changes, files, warn=1, session = None):
97 Determine what parts in a C{changes} file are NEW.
100 @param filename: changes filename
102 @type changes: Upload.Pkg.changes dict
103 @param changes: Changes dictionary
105 @type files: Upload.Pkg.files dict
106 @param files: Files dictionary
109 @param warn: Warn if overrides are added for (old)stable
112 @return: dictionary of NEW components.
115 # TODO: This should all use the database instead of parsing the changes
120 dbchg = get_dbchange(filename, session)
122 print "Warning: cannot find changes file in database; won't check byhand"
124 # Build up a list of potentially new things
125 for name, f in files.items():
126 # Keep a record of byhand elements
127 if f["section"] == "byhand":
132 priority = f["priority"]
133 section = f["section"]
134 file_type = get_type(f, session)
135 component = f["component"]
137 if file_type == "dsc":
140 if not new.has_key(pkg):
142 new[pkg]["priority"] = priority
143 new[pkg]["section"] = section
144 new[pkg]["type"] = file_type
145 new[pkg]["component"] = component
146 new[pkg]["files"] = []
148 old_type = new[pkg]["type"]
149 if old_type != file_type:
150 # source gets trumped by deb or udeb
151 if old_type == "dsc":
152 new[pkg]["priority"] = priority
153 new[pkg]["section"] = section
154 new[pkg]["type"] = file_type
155 new[pkg]["component"] = component
157 new[pkg]["files"].append(name)
159 if f.has_key("othercomponents"):
160 new[pkg]["othercomponents"] = f["othercomponents"]
162 # Fix up the list of target suites
164 for suite in changes["suite"].keys():
165 oldsuite = get_suite(suite, session)
167 print "WARNING: Invalid suite %s found" % suite
170 if oldsuite.overridesuite:
171 newsuite = get_suite(oldsuite.overridesuite, session)
174 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
175 oldsuite.overridesuite, suite)
176 del changes["suite"][suite]
177 changes["suite"][oldsuite.overridesuite] = 1
179 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist. Bugger" % (
180 oldsuite.overridesuite, suite)
182 # Check for unprocessed byhand files
183 if dbchg is not None:
184 for b in byhand.keys():
185 # Find the file entry in the database
187 for f in dbchg.files:
190 # If it's processed, we can ignore it
196 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
198 # Check for new stuff
199 for suite in changes["suite"].keys():
200 for pkg in new.keys():
201 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
203 for file_entry in new[pkg]["files"]:
204 if files[file_entry].has_key("new"):
205 del files[file_entry]["new"]
209 for s in ['stable', 'oldstable']:
210 if changes["suite"].has_key(s):
211 print "WARNING: overrides will be added for %s!" % s
212 for pkg in new.keys():
213 if new[pkg].has_key("othercomponents"):
214 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
218 ################################################################################
220 def check_valid(new, session = None):
222 Check if section and priority for NEW packages exist in database.
223 Additionally does sanity checks:
224 - debian-installer packages have to be udeb (or source)
225 - non debian-installer packages can not be udeb
226 - source priority can only be assigned to dsc file types
229 @param new: Dict of new packages with their section, priority and type.
232 for pkg in new.keys():
233 section_name = new[pkg]["section"]
234 priority_name = new[pkg]["priority"]
235 file_type = new[pkg]["type"]
237 section = get_section(section_name, session)
239 new[pkg]["section id"] = -1
241 new[pkg]["section id"] = section.section_id
243 priority = get_priority(priority_name, session)
245 new[pkg]["priority id"] = -1
247 new[pkg]["priority id"] = priority.priority_id
250 di = section_name.find("debian-installer") != -1
252 # If d-i, we must be udeb and vice-versa
253 if (di and file_type not in ("udeb", "dsc")) or \
254 (not di and file_type == "udeb"):
255 new[pkg]["section id"] = -1
257 # If dsc we need to be source and vice-versa
258 if (priority == "source" and file_type != "dsc") or \
259 (priority != "source" and file_type == "dsc"):
260 new[pkg]["priority id"] = -1
262 ###############################################################################
264 # Used by Upload.check_timestamps
265 class TarTime(object):
266 def __init__(self, future_cutoff, past_cutoff):
268 self.future_cutoff = future_cutoff
269 self.past_cutoff = past_cutoff
272 self.future_files = {}
273 self.ancient_files = {}
275 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
276 if MTime > self.future_cutoff:
277 self.future_files[Name] = MTime
278 if MTime < self.past_cutoff:
279 self.ancient_files[Name] = MTime
281 ###############################################################################
283 def prod_maintainer(notes, upload):
286 # Here we prepare an editor and get them ready to prod...
287 (fd, temp_filename) = utils.temp_filename()
288 temp_file = os.fdopen(fd, 'w')
290 temp_file.write(note.comment)
292 editor = os.environ.get("EDITOR","vi")
295 os.system("%s %s" % (editor, temp_filename))
296 temp_fh = utils.open_file(temp_filename)
297 prod_message = "".join(temp_fh.readlines())
299 print "Prod message:"
300 print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
301 prompt = "[P]rod, Edit, Abandon, Quit ?"
303 while prompt.find(answer) == -1:
304 answer = utils.our_raw_input(prompt)
305 m = re_default_answer.search(prompt)
308 answer = answer[:1].upper()
309 os.unlink(temp_filename)
315 # Otherwise, do the proding...
316 user_email_address = utils.whoami() + " <%s>" % (
317 cnf["Dinstall::MyAdminAddress"])
321 Subst["__FROM_ADDRESS__"] = user_email_address
322 Subst["__PROD_MESSAGE__"] = prod_message
323 Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
325 prod_mail_message = utils.TemplateSubst(
326 Subst,cnf["Dir::Templates"]+"/process-new.prod")
329 utils.send_mail(prod_mail_message)
331 print "Sent prodding message"
333 ################################################################################
335 def edit_note(note, upload, session):
336 # Write the current data to a temporary file
337 (fd, temp_filename) = utils.temp_filename()
338 editor = os.environ.get("EDITOR","vi")
341 os.system("%s %s" % (editor, temp_filename))
342 temp_file = utils.open_file(temp_filename)
343 newnote = temp_file.read().rstrip()
346 print utils.prefix_multi_line_string(newnote," ")
347 prompt = "[D]one, Edit, Abandon, Quit ?"
349 while prompt.find(answer) == -1:
350 answer = utils.our_raw_input(prompt)
351 m = re_default_answer.search(prompt)
354 answer = answer[:1].upper()
355 os.unlink(temp_filename)
362 comment = NewComment()
363 comment.package = upload.pkg.changes["source"]
364 comment.version = upload.pkg.changes["version"]
365 comment.comment = newnote
366 comment.author = utils.whoami()
367 comment.trainee = bool(Options["Trainee"])
371 ###############################################################################
373 class Upload(object):
375 Everything that has to do with an upload processed.
383 ###########################################################################
386 """ Reset a number of internal variables."""
388 # Initialize the substitution template map
391 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
392 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
393 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
394 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
400 self.later_check_files = []
404 def package_info(self):
406 Format various messages from this Upload to send to the maintainer.
410 ('Reject Reasons', self.rejects),
411 ('Warnings', self.warnings),
412 ('Notes', self.notes),
416 for title, messages in msgs:
418 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
423 ###########################################################################
424 def update_subst(self):
425 """ Set up the per-package template substitution mappings """
429 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
430 if not self.pkg.changes.has_key("architecture") or not \
431 isinstance(self.pkg.changes["architecture"], dict):
432 self.pkg.changes["architecture"] = { "Unknown" : "" }
434 # and maintainer2047 may not exist.
435 if not self.pkg.changes.has_key("maintainer2047"):
436 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
438 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
439 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
440 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
442 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
443 if self.pkg.changes["architecture"].has_key("source") and \
444 self.pkg.changes["changedby822"] != "" and \
445 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
447 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
448 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
449 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
451 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
452 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
453 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
455 # Process policy doesn't set the fingerprint field and I don't want to make it
456 # do it for now as I don't want to have to deal with the case where we accepted
457 # the package into PU-NEW, but the fingerprint has gone away from the keyring in
458 # the meantime so the package will be remarked as rejectable. Urgh.
459 # TODO: Fix this properly
460 if self.pkg.changes.has_key('fingerprint'):
461 session = DBConn().session()
462 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
463 if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
464 if self.pkg.changes.has_key("sponsoremail"):
465 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
468 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
469 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
471 # Apply any global override of the Maintainer field
472 if cnf.get("Dinstall::OverrideMaintainer"):
473 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
474 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
476 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
477 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
478 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
479 self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
481 ###########################################################################
482 def load_changes(self, filename):
484 Load a changes file and setup a dictionary around it. Also checks for mandantory
487 @type filename: string
488 @param filename: Changes filename, full path.
491 @return: whether the changes file was valid or not. We may want to
492 reject even if this is True (see what gets put in self.rejects).
493 This is simply to prevent us even trying things later which will
494 fail because we couldn't properly parse the file.
497 self.pkg.changes_file = filename
499 # Parse the .changes field into a dictionary
501 self.pkg.changes.update(parse_changes(filename))
502 except CantOpenError:
503 self.rejects.append("%s: can't read file." % (filename))
505 except ParseChangesError, line:
506 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
508 except ChangesUnicodeError:
509 self.rejects.append("%s: changes file not proper utf-8" % (filename))
512 # Parse the Files field from the .changes into another dictionary
514 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
515 except ParseChangesError, line:
516 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
518 except UnknownFormatError, format:
519 self.rejects.append("%s: unknown format '%s'." % (filename, format))
522 # Check for mandatory fields
523 for i in ("distribution", "source", "binary", "architecture",
524 "version", "maintainer", "files", "changes", "description"):
525 if not self.pkg.changes.has_key(i):
526 # Avoid undefined errors later
527 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
530 # Strip a source version in brackets from the source field
531 if re_strip_srcver.search(self.pkg.changes["source"]):
532 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
534 # Ensure the source field is a valid package name.
535 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
536 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
538 # Split multi-value fields into a lower-level dictionary
539 for i in ("architecture", "distribution", "binary", "closes"):
540 o = self.pkg.changes.get(i, "")
542 del self.pkg.changes[i]
544 self.pkg.changes[i] = {}
547 self.pkg.changes[i][j] = 1
549 # Fix the Maintainer: field to be RFC822/2047 compatible
551 (self.pkg.changes["maintainer822"],
552 self.pkg.changes["maintainer2047"],
553 self.pkg.changes["maintainername"],
554 self.pkg.changes["maintaineremail"]) = \
555 fix_maintainer (self.pkg.changes["maintainer"])
556 except ParseMaintError, msg:
557 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
558 % (filename, self.pkg.changes["maintainer"], msg))
560 # ...likewise for the Changed-By: field if it exists.
562 (self.pkg.changes["changedby822"],
563 self.pkg.changes["changedby2047"],
564 self.pkg.changes["changedbyname"],
565 self.pkg.changes["changedbyemail"]) = \
566 fix_maintainer (self.pkg.changes.get("changed-by", ""))
567 except ParseMaintError, msg:
568 self.pkg.changes["changedby822"] = ""
569 self.pkg.changes["changedby2047"] = ""
570 self.pkg.changes["changedbyname"] = ""
571 self.pkg.changes["changedbyemail"] = ""
573 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
574 % (filename, self.pkg.changes["changed-by"], msg))
576 # Ensure all the values in Closes: are numbers
577 if self.pkg.changes.has_key("closes"):
578 for i in self.pkg.changes["closes"].keys():
579 if re_isanum.match (i) == None:
580 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
582 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
583 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
584 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
586 # Check the .changes is non-empty
587 if not self.pkg.files:
588 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
591 # Changes was syntactically valid even if we'll reject
594 ###########################################################################
596 def check_distributions(self):
597 "Check and map the Distribution field"
601 # Handle suite mappings
602 for m in Cnf.ValueList("SuiteMappings"):
605 if mtype == "map" or mtype == "silent-map":
606 (source, dest) = args[1:3]
607 if self.pkg.changes["distribution"].has_key(source):
608 del self.pkg.changes["distribution"][source]
609 self.pkg.changes["distribution"][dest] = 1
610 if mtype != "silent-map":
611 self.notes.append("Mapping %s to %s." % (source, dest))
612 if self.pkg.changes.has_key("distribution-version"):
613 if self.pkg.changes["distribution-version"].has_key(source):
614 self.pkg.changes["distribution-version"][source]=dest
615 elif mtype == "map-unreleased":
616 (source, dest) = args[1:3]
617 if self.pkg.changes["distribution"].has_key(source):
618 for arch in self.pkg.changes["architecture"].keys():
619 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
620 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
621 del self.pkg.changes["distribution"][source]
622 self.pkg.changes["distribution"][dest] = 1
624 elif mtype == "ignore":
626 if self.pkg.changes["distribution"].has_key(suite):
627 del self.pkg.changes["distribution"][suite]
628 self.warnings.append("Ignoring %s as a target suite." % (suite))
629 elif mtype == "reject":
631 if self.pkg.changes["distribution"].has_key(suite):
632 self.rejects.append("Uploads to %s are not accepted." % (suite))
633 elif mtype == "propup-version":
634 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
636 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
637 if self.pkg.changes["distribution"].has_key(args[1]):
638 self.pkg.changes.setdefault("distribution-version", {})
639 for suite in args[2:]:
640 self.pkg.changes["distribution-version"][suite] = suite
642 # Ensure there is (still) a target distribution
643 if len(self.pkg.changes["distribution"].keys()) < 1:
644 self.rejects.append("No valid distribution remaining.")
646 # Ensure target distributions exist
647 for suite in self.pkg.changes["distribution"].keys():
648 if not Cnf.has_key("Suite::%s" % (suite)):
649 self.rejects.append("Unknown distribution `%s'." % (suite))
651 ###########################################################################
653 def binary_file_checks(self, f, session):
655 entry = self.pkg.files[f]
657 # Extract package control information
658 deb_file = utils.open_file(f)
660 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
662 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
664 # Can't continue, none of the checks on control would work.
667 # Check for mandantory "Description:"
670 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
672 self.rejects.append("%s: Missing Description in binary package" % (f))
677 # Check for mandatory fields
678 for field in [ "Package", "Architecture", "Version" ]:
679 if control.Find(field) == None:
681 self.rejects.append("%s: No %s field in control." % (f, field))
684 # Ensure the package name matches the one give in the .changes
685 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
686 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
688 # Validate the package field
689 package = control.Find("Package")
690 if not re_valid_pkg_name.match(package):
691 self.rejects.append("%s: invalid package name '%s'." % (f, package))
693 # Validate the version field
694 version = control.Find("Version")
695 if not re_valid_version.match(version):
696 self.rejects.append("%s: invalid version number '%s'." % (f, version))
698 # Ensure the architecture of the .deb is one we know about.
699 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
700 architecture = control.Find("Architecture")
701 upload_suite = self.pkg.changes["distribution"].keys()[0]
703 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
704 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
705 self.rejects.append("Unknown architecture '%s'." % (architecture))
707 # Ensure the architecture of the .deb is one of the ones
708 # listed in the .changes.
709 if not self.pkg.changes["architecture"].has_key(architecture):
710 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
712 # Sanity-check the Depends field
713 depends = control.Find("Depends")
715 self.rejects.append("%s: Depends field is empty." % (f))
717 # Sanity-check the Provides field
718 provides = control.Find("Provides")
720 provide = re_spacestrip.sub('', provides)
722 self.rejects.append("%s: Provides field is empty." % (f))
723 prov_list = provide.split(",")
724 for prov in prov_list:
725 if not re_valid_pkg_name.match(prov):
726 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
728 # Check the section & priority match those given in the .changes (non-fatal)
729 if control.Find("Section") and entry["section"] != "" \
730 and entry["section"] != control.Find("Section"):
731 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
732 (f, control.Find("Section", ""), entry["section"]))
733 if control.Find("Priority") and entry["priority"] != "" \
734 and entry["priority"] != control.Find("Priority"):
735 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
736 (f, control.Find("Priority", ""), entry["priority"]))
738 entry["package"] = package
739 entry["architecture"] = architecture
740 entry["version"] = version
741 entry["maintainer"] = control.Find("Maintainer", "")
743 if f.endswith(".udeb"):
744 self.pkg.files[f]["dbtype"] = "udeb"
745 elif f.endswith(".deb"):
746 self.pkg.files[f]["dbtype"] = "deb"
748 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
750 entry["source"] = control.Find("Source", entry["package"])
752 # Get the source version
753 source = entry["source"]
756 if source.find("(") != -1:
757 m = re_extract_src_version.match(source)
759 source_version = m.group(2)
761 if not source_version:
762 source_version = self.pkg.files[f]["version"]
764 entry["source package"] = source
765 entry["source version"] = source_version
767 # Ensure the filename matches the contents of the .deb
768 m = re_isadeb.match(f)
771 file_package = m.group(1)
772 if entry["package"] != file_package:
773 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
774 (f, file_package, entry["dbtype"], entry["package"]))
775 epochless_version = re_no_epoch.sub('', control.Find("Version"))
778 file_version = m.group(2)
779 if epochless_version != file_version:
780 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
781 (f, file_version, entry["dbtype"], epochless_version))
784 file_architecture = m.group(3)
785 if entry["architecture"] != file_architecture:
786 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
787 (f, file_architecture, entry["dbtype"], entry["architecture"]))
789 # Check for existent source
790 source_version = entry["source version"]
791 source_package = entry["source package"]
792 if self.pkg.changes["architecture"].has_key("source"):
793 if source_version != self.pkg.changes["version"]:
794 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
795 (source_version, f, self.pkg.changes["version"]))
797 # Check in the SQL database
798 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
799 # Check in one of the other directories
800 source_epochless_version = re_no_epoch.sub('', source_version)
801 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
802 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
804 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
807 dsc_file_exists = False
808 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates", "Lenny-Volatile-Proposed-Updates"]:
809 if cnf.has_key("Dir::Queue::%s" % (myq)):
810 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
811 dsc_file_exists = True
814 if not dsc_file_exists:
815 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
817 # Check the version and for file overwrites
818 self.check_binary_against_db(f, session)
820 # Temporarily disable contents generation until we change the table storage layout
823 #if len(b.rejects) > 0:
824 # for j in b.rejects:
825 # self.rejects.append(j)
827 def source_file_checks(self, f, session):
828 entry = self.pkg.files[f]
830 m = re_issource.match(f)
834 entry["package"] = m.group(1)
835 entry["version"] = m.group(2)
836 entry["type"] = m.group(3)
838 # Ensure the source package name matches the Source filed in the .changes
839 if self.pkg.changes["source"] != entry["package"]:
840 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
842 # Ensure the source version matches the version in the .changes file
843 if re_is_orig_source.match(f):
844 changes_version = self.pkg.changes["chopversion2"]
846 changes_version = self.pkg.changes["chopversion"]
848 if changes_version != entry["version"]:
849 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
851 # Ensure the .changes lists source in the Architecture field
852 if not self.pkg.changes["architecture"].has_key("source"):
853 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
855 # Check the signature of a .dsc file
856 if entry["type"] == "dsc":
857 # check_signature returns either:
858 # (None, [list, of, rejects]) or (signature, [])
859 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
861 self.rejects.append(j)
863 entry["architecture"] = "source"
865 def per_suite_file_checks(self, f, suite, session):
867 entry = self.pkg.files[f]
870 if entry.has_key("byhand"):
873 # Check we have fields we need to do these checks
875 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
876 if not entry.has_key(m):
877 self.rejects.append("file '%s' does not have field %s set" % (f, m))
883 # Handle component mappings
884 for m in cnf.ValueList("ComponentMappings"):
885 (source, dest) = m.split()
886 if entry["component"] == source:
887 entry["original component"] = source
888 entry["component"] = dest
890 # Ensure the component is valid for the target suite
891 if cnf.has_key("Suite:%s::Components" % (suite)) and \
892 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
893 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
896 # Validate the component
897 if not get_component(entry["component"], session):
898 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
901 # See if the package is NEW
902 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
905 # Validate the priority
906 if entry["priority"].find('/') != -1:
907 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
909 # Determine the location
910 location = cnf["Dir::Pool"]
911 l = get_location(location, entry["component"], session=session)
913 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
914 entry["location id"] = -1
916 entry["location id"] = l.location_id
918 # Check the md5sum & size against existing files (if any)
919 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
921 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
922 entry["size"], entry["md5sum"], entry["location id"])
925 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
926 elif found is False and poolfile is not None:
927 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
930 entry["files id"] = None
932 entry["files id"] = poolfile.file_id
934 # Check for packages that have moved from one component to another
935 entry['suite'] = suite
936 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
938 entry["othercomponents"] = res.fetchone()[0]
940 def check_files(self, action=True):
941 file_keys = self.pkg.files.keys()
947 os.chdir(self.pkg.directory)
949 ret = holding.copy_to_holding(f)
951 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
955 # check we already know the changes file
956 # [NB: this check must be done post-suite mapping]
957 base_filename = os.path.basename(self.pkg.changes_file)
959 session = DBConn().session()
962 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
963 # if in the pool or in a queue other than unchecked, reject
964 if (dbc.in_queue is None) \
965 or (dbc.in_queue is not None
966 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
967 self.rejects.append("%s file already known to dak" % base_filename)
968 except NoResultFound, e:
975 for f, entry in self.pkg.files.items():
976 # Ensure the file does not already exist in one of the accepted directories
977 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
978 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
979 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
980 self.rejects.append("%s file already exists in the %s directory." % (f, d))
982 if not re_taint_free.match(f):
983 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
985 # Check the file is readable
986 if os.access(f, os.R_OK) == 0:
987 # When running in -n, copy_to_holding() won't have
988 # generated the reject_message, so we need to.
990 if os.path.exists(f):
991 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
993 # Don't directly reject, mark to check later to deal with orig's
994 # we can find in the pool
995 self.later_check_files.append(f)
996 entry["type"] = "unreadable"
999 # If it's byhand skip remaining checks
1000 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1002 entry["type"] = "byhand"
1004 # Checks for a binary package...
1005 elif re_isadeb.match(f):
1007 entry["type"] = "deb"
1009 # This routine appends to self.rejects/warnings as appropriate
1010 self.binary_file_checks(f, session)
1012 # Checks for a source package...
1013 elif re_issource.match(f):
1016 # This routine appends to self.rejects/warnings as appropriate
1017 self.source_file_checks(f, session)
1019 # Not a binary or source package? Assume byhand...
1022 entry["type"] = "byhand"
1024 # Per-suite file checks
1025 entry["oldfiles"] = {}
1026 for suite in self.pkg.changes["distribution"].keys():
1027 self.per_suite_file_checks(f, suite, session)
1031 # If the .changes file says it has source, it must have source.
1032 if self.pkg.changes["architecture"].has_key("source"):
1034 self.rejects.append("no source found and Architecture line in changes mention source.")
1036 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1037 self.rejects.append("source only uploads are not supported.")
1039 ###########################################################################
1040 def check_dsc(self, action=True, session=None):
1041 """Returns bool indicating whether or not the source changes are valid"""
1042 # Ensure there is source to check
1043 if not self.pkg.changes["architecture"].has_key("source"):
1048 for f, entry in self.pkg.files.items():
1049 if entry["type"] == "dsc":
1051 self.rejects.append("can not process a .changes file with multiple .dsc's.")
1056 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1057 if not dsc_filename:
1058 self.rejects.append("source uploads must contain a dsc file")
1061 # Parse the .dsc file
1063 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1064 except CantOpenError:
1065 # if not -n copy_to_holding() will have done this for us...
1067 self.rejects.append("%s: can't read file." % (dsc_filename))
1068 except ParseChangesError, line:
1069 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1070 except InvalidDscError, line:
1071 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1072 except ChangesUnicodeError:
1073 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1075 # Build up the file list of files mentioned by the .dsc
1077 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1078 except NoFilesFieldError:
1079 self.rejects.append("%s: no Files: field." % (dsc_filename))
1081 except UnknownFormatError, format:
1082 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1084 except ParseChangesError, line:
1085 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1088 # Enforce mandatory fields
1089 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1090 if not self.pkg.dsc.has_key(i):
1091 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1094 # Validate the source and version fields
1095 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1096 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1097 if not re_valid_version.match(self.pkg.dsc["version"]):
1098 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1100 # Only a limited list of source formats are allowed in each suite
1101 for dist in self.pkg.changes["distribution"].keys():
1102 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1103 if self.pkg.dsc["format"] not in allowed:
1104 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1106 # Validate the Maintainer field
1108 # We ignore the return value
1109 fix_maintainer(self.pkg.dsc["maintainer"])
1110 except ParseMaintError, msg:
1111 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1112 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1114 # Validate the build-depends field(s)
1115 for field_name in [ "build-depends", "build-depends-indep" ]:
1116 field = self.pkg.dsc.get(field_name)
1118 # Have apt try to parse them...
1120 apt_pkg.ParseSrcDepends(field)
1122 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1124 # Ensure the version number in the .dsc matches the version number in the .changes
1125 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1126 changes_version = self.pkg.files[dsc_filename]["version"]
1128 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1129 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1131 # Ensure the Files field contain only what's expected
1132 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1134 # Ensure source is newer than existing source in target suites
1135 session = DBConn().session()
1136 self.check_source_against_db(dsc_filename, session)
1137 self.check_dsc_against_db(dsc_filename, session)
1139 dbchg = get_dbchange(self.pkg.changes_file, session)
1141 # Finally, check if we're missing any files
1142 for f in self.later_check_files:
1144 # Check if we've already processed this file if we have a dbchg object
1147 for pf in dbchg.files:
1148 if pf.filename == f and pf.processed:
1149 self.notes.append('%s was already processed so we can go ahead' % f)
1151 del self.pkg.files[f]
1153 self.rejects.append("Could not find file %s references in changes" % f)
1159 ###########################################################################
1161 def get_changelog_versions(self, source_dir):
1162 """Extracts a the source package and (optionally) grabs the
1163 version history out of debian/changelog for the BTS."""
1167 # Find the .dsc (again)
1169 for f in self.pkg.files.keys():
1170 if self.pkg.files[f]["type"] == "dsc":
1173 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1174 if not dsc_filename:
1177 # Create a symlink mirror of the source files in our temporary directory
1178 for f in self.pkg.files.keys():
1179 m = re_issource.match(f)
1181 src = os.path.join(source_dir, f)
1182 # If a file is missing for whatever reason, give up.
1183 if not os.path.exists(src):
1186 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1187 self.pkg.orig_files[f].has_key("path"):
1189 dest = os.path.join(os.getcwd(), f)
1190 os.symlink(src, dest)
1192 # If the orig files are not a part of the upload, create symlinks to the
1194 for orig_file in self.pkg.orig_files.keys():
1195 if not self.pkg.orig_files[orig_file].has_key("path"):
1197 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1198 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1200 # Extract the source
1201 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1202 (result, output) = commands.getstatusoutput(cmd)
1204 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1205 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1208 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1211 # Get the upstream version
1212 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1213 if re_strip_revision.search(upstr_version):
1214 upstr_version = re_strip_revision.sub('', upstr_version)
1216 # Ensure the changelog file exists
1217 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1218 if not os.path.exists(changelog_filename):
1219 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1222 # Parse the changelog
1223 self.pkg.dsc["bts changelog"] = ""
1224 changelog_file = utils.open_file(changelog_filename)
1225 for line in changelog_file.readlines():
1226 m = re_changelog_versions.match(line)
1228 self.pkg.dsc["bts changelog"] += line
1229 changelog_file.close()
1231 # Check we found at least one revision in the changelog
1232 if not self.pkg.dsc["bts changelog"]:
1233 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1235 def check_source(self):
1237 # a) there's no source
1238 if not self.pkg.changes["architecture"].has_key("source"):
1241 tmpdir = utils.temp_dirname()
1243 # Move into the temporary directory
1247 # Get the changelog version history
1248 self.get_changelog_versions(cwd)
1250 # Move back and cleanup the temporary tree
1254 shutil.rmtree(tmpdir)
1256 if e.errno != errno.EACCES:
1258 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1260 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1261 # We probably have u-r or u-w directories so chmod everything
1263 cmd = "chmod -R u+rwx %s" % (tmpdir)
1264 result = os.system(cmd)
1266 utils.fubar("'%s' failed with result %s." % (cmd, result))
1267 shutil.rmtree(tmpdir)
1268 except Exception, e:
1269 print "foobar2 (%s)" % e
1270 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1272 ###########################################################################
1273 def ensure_hashes(self):
1274 # Make sure we recognise the format of the Files: field in the .changes
1275 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1276 if len(format) == 2:
1277 format = int(format[0]), int(format[1])
1279 format = int(float(format[0])), 0
1281 # We need to deal with the original changes blob, as the fields we need
1282 # might not be in the changes dict serialised into the .dak anymore.
1283 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1285 # Copy the checksums over to the current changes dict. This will keep
1286 # the existing modifications to it intact.
1287 for field in orig_changes:
1288 if field.startswith('checksums-'):
1289 self.pkg.changes[field] = orig_changes[field]
1291 # Check for unsupported hashes
1292 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1293 self.rejects.append(j)
1295 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1296 self.rejects.append(j)
1298 # We have to calculate the hash if we have an earlier changes version than
1299 # the hash appears in rather than require it exist in the changes file
1300 for hashname, hashfunc, version in utils.known_hashes:
1301 # TODO: Move _ensure_changes_hash into this class
1302 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1303 self.rejects.append(j)
1304 if "source" in self.pkg.changes["architecture"]:
1305 # TODO: Move _ensure_dsc_hash into this class
1306 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1307 self.rejects.append(j)
1309 def check_hashes(self):
1310 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1311 self.rejects.append(m)
1313 for m in utils.check_size(".changes", self.pkg.files):
1314 self.rejects.append(m)
1316 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1317 self.rejects.append(m)
1319 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1320 self.rejects.append(m)
1322 self.ensure_hashes()
1324 ###########################################################################
1326 def ensure_orig(self, target_dir='.', session=None):
1328 Ensures that all orig files mentioned in the changes file are present
1329 in target_dir. If they do not exist, they are symlinked into place.
1331 An list containing the symlinks that were created are returned (so they
1338 for filename, entry in self.pkg.dsc_files.iteritems():
1339 if not re_is_orig_source.match(filename):
1340 # File is not an orig; ignore
1343 if os.path.exists(filename):
1344 # File exists, no need to continue
1347 def symlink_if_valid(path):
1348 f = utils.open_file(path)
1349 md5sum = apt_pkg.md5sum(f)
1352 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1353 expected = (int(entry['size']), entry['md5sum'])
1355 if fingerprint != expected:
1358 dest = os.path.join(target_dir, filename)
1360 os.symlink(path, dest)
1361 symlinked.append(dest)
1367 session_ = DBConn().session()
1372 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1373 poolfile_path = os.path.join(
1374 poolfile.location.path, poolfile.filename
1377 if symlink_if_valid(poolfile_path):
1387 # Look in some other queues for the file
1388 queues = ('New', 'Byhand', 'ProposedUpdates',
1389 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1391 for queue in queues:
1392 if not cnf.get('Dir::Queue::%s' % queue):
1395 queuefile_path = os.path.join(
1396 cnf['Dir::Queue::%s' % queue], filename
1399 if not os.path.exists(queuefile_path):
1400 # Does not exist in this queue
1403 if symlink_if_valid(queuefile_path):
1408 ###########################################################################
1410 def check_lintian(self):
1412 Extends self.rejects by checking the output of lintian against tags
1413 specified in Dinstall::LintianTags.
1418 # Don't reject binary uploads
1419 if not self.pkg.changes['architecture'].has_key('source'):
1422 # Only check some distributions
1423 for dist in ('unstable', 'experimental'):
1424 if dist in self.pkg.changes['distribution']:
1429 # If we do not have a tagfile, don't do anything
1430 tagfile = cnf.get("Dinstall::LintianTags")
1434 # Parse the yaml file
1435 sourcefile = file(tagfile, 'r')
1436 sourcecontent = sourcefile.read()
1440 lintiantags = yaml.load(sourcecontent)['lintian']
1441 except yaml.YAMLError, msg:
1442 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1445 # Try and find all orig mentioned in the .dsc
1446 symlinked = self.ensure_orig()
1448 # Setup the input file for lintian
1449 fd, temp_filename = utils.temp_filename()
1450 temptagfile = os.fdopen(fd, 'w')
1451 for tags in lintiantags.values():
1452 temptagfile.writelines(['%s\n' % x for x in tags])
1456 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1457 (temp_filename, self.pkg.changes_file)
1459 result, output = commands.getstatusoutput(cmd)
1461 # Remove our tempfile and any symlinks we created
1462 os.unlink(temp_filename)
1464 for symlink in symlinked:
1468 utils.warn("lintian failed for %s [return code: %s]." % \
1469 (self.pkg.changes_file, result))
1470 utils.warn(utils.prefix_multi_line_string(output, \
1471 " [possible output:] "))
1476 [self.pkg.changes_file, "check_lintian"] + list(txt)
1480 parsed_tags = parse_lintian_output(output)
1481 self.rejects.extend(
1482 generate_reject_messages(parsed_tags, lintiantags, log=log)
1485 ###########################################################################
1486 def check_urgency(self):
1488 if self.pkg.changes["architecture"].has_key("source"):
1489 if not self.pkg.changes.has_key("urgency"):
1490 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1491 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1492 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1493 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1494 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1495 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1497 ###########################################################################
1499 # Sanity check the time stamps of files inside debs.
1500 # [Files in the near future cause ugly warnings and extreme time
1501 # travel can cause errors on extraction]
1503 def check_timestamps(self):
1506 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1507 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1508 tar = TarTime(future_cutoff, past_cutoff)
1510 for filename, entry in self.pkg.files.items():
1511 if entry["type"] == "deb":
1514 deb_file = utils.open_file(filename)
1515 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1518 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1519 except SystemError, e:
1520 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1521 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1524 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1528 future_files = tar.future_files.keys()
1530 num_future_files = len(future_files)
1531 future_file = future_files[0]
1532 future_date = tar.future_files[future_file]
1533 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1534 % (filename, num_future_files, future_file, time.ctime(future_date)))
1536 ancient_files = tar.ancient_files.keys()
1538 num_ancient_files = len(ancient_files)
1539 ancient_file = ancient_files[0]
1540 ancient_date = tar.ancient_files[ancient_file]
1541 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1542 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1544 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1546 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1547 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1549 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1555 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1556 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1557 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1558 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1559 self.pkg.changes["sponsoremail"] = uid_email
1564 ###########################################################################
1565 # check_signed_by_key checks
1566 ###########################################################################
1568 def check_signed_by_key(self):
1569 """Ensure the .changes is signed by an authorized uploader."""
1570 session = DBConn().session()
1572 # First of all we check that the person has proper upload permissions
1573 # and that this upload isn't blocked
1574 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1577 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1580 # TODO: Check that import-keyring adds UIDs properly
1582 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1585 # Check that the fingerprint which uploaded has permission to do so
1586 self.check_upload_permissions(fpr, session)
1588 # Check that this package is not in a transition
1589 self.check_transition(session)
1594 def check_upload_permissions(self, fpr, session):
1595 # Check any one-off upload blocks
1596 self.check_upload_blocks(fpr, session)
1598 # Start with DM as a special case
1599 # DM is a special case unfortunately, so we check it first
1600 # (keys with no source access get more access than DMs in one
1601 # way; DMs can only upload for their packages whether source
1602 # or binary, whereas keys with no access might be able to
1603 # upload some binaries)
1604 if fpr.source_acl.access_level == 'dm':
1605 self.check_dm_upload(fpr, session)
1607 # Check source-based permissions for other types
1608 if self.pkg.changes["architecture"].has_key("source") and \
1609 fpr.source_acl.access_level is None:
1610 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1611 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1612 self.rejects.append(rej)
1614 # If not a DM, we allow full upload rights
1615 uid_email = "%s@debian.org" % (fpr.uid.uid)
1616 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1619 # Check binary upload permissions
1620 # By this point we know that DMs can't have got here unless they
1621 # are allowed to deal with the package concerned so just apply
1623 if fpr.binary_acl.access_level == 'full':
1626 # Otherwise we're in the map case
1627 tmparches = self.pkg.changes["architecture"].copy()
1628 tmparches.pop('source', None)
1630 for bam in fpr.binary_acl_map:
1631 tmparches.pop(bam.architecture.arch_string, None)
1633 if len(tmparches.keys()) > 0:
1634 if fpr.binary_reject:
1635 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1636 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1637 self.rejects.append(rej)
1639 # TODO: This is where we'll implement reject vs throw away binaries later
1640 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1641 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1642 rej += "\nFingerprint: %s", (fpr.fingerprint)
1643 self.rejects.append(rej)
1646 def check_upload_blocks(self, fpr, session):
1647 """Check whether any upload blocks apply to this source, source
1648 version, uid / fpr combination"""
1650 def block_rej_template(fb):
1651 rej = 'Manual upload block in place for package %s' % fb.source
1652 if fb.version is not None:
1653 rej += ', version %s' % fb.version
1656 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1657 # version is None if the block applies to all versions
1658 if fb.version is None or fb.version == self.pkg.changes['version']:
1659 # Check both fpr and uid - either is enough to cause a reject
1660 if fb.fpr is not None:
1661 if fb.fpr.fingerprint == fpr.fingerprint:
1662 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1663 if fb.uid is not None:
1664 if fb.uid == fpr.uid:
1665 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1668 def check_dm_upload(self, fpr, session):
1669 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1670 ## none of the uploaded packages are NEW
1672 for f in self.pkg.files.keys():
1673 if self.pkg.files[f].has_key("byhand"):
1674 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1676 if self.pkg.files[f].has_key("new"):
1677 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1683 ## the most recent version of the package uploaded to unstable or
1684 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1685 ## section of its control file
1686 q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1687 q = q.join(SrcAssociation)
1688 q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1689 q = q.order_by(desc('source.version')).limit(1)
1694 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1695 self.rejects.append(rej)
1699 if not r.dm_upload_allowed:
1700 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1701 self.rejects.append(rej)
1704 ## the Maintainer: field of the uploaded .changes file corresponds with
1705 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1707 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1708 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1710 ## the most recent version of the package uploaded to unstable or
1711 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1712 ## non-developer maintainers cannot NMU or hijack packages)
1714 # srcuploaders includes the maintainer
1716 for sup in r.srcuploaders:
1717 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1718 # Eww - I hope we never have two people with the same name in Debian
1719 if email == fpr.uid.uid or name == fpr.uid.name:
1724 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1727 ## none of the packages are being taken over from other source packages
1728 for b in self.pkg.changes["binary"].keys():
1729 for suite in self.pkg.changes["distribution"].keys():
1730 q = session.query(DBSource)
1731 q = q.join(DBBinary).filter_by(package=b)
1732 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1735 if s.source != self.pkg.changes["source"]:
1736 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1740 def check_transition(self, session):
1743 sourcepkg = self.pkg.changes["source"]
1745 # No sourceful upload -> no need to do anything else, direct return
1746 # We also work with unstable uploads, not experimental or those going to some
1747 # proposed-updates queue
1748 if "source" not in self.pkg.changes["architecture"] or \
1749 "unstable" not in self.pkg.changes["distribution"]:
1752 # Also only check if there is a file defined (and existant) with
1754 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1755 if transpath == "" or not os.path.exists(transpath):
1758 # Parse the yaml file
1759 sourcefile = file(transpath, 'r')
1760 sourcecontent = sourcefile.read()
1762 transitions = yaml.load(sourcecontent)
1763 except yaml.YAMLError, msg:
1764 # This shouldn't happen, there is a wrapper to edit the file which
1765 # checks it, but we prefer to be safe than ending up rejecting
1767 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1770 # Now look through all defined transitions
1771 for trans in transitions:
1772 t = transitions[trans]
1773 source = t["source"]
1776 # Will be None if nothing is in testing.
1777 current = get_source_in_suite(source, "testing", session)
1778 if current is not None:
1779 compare = apt_pkg.VersionCompare(current.version, expected)
1781 if current is None or compare < 0:
1782 # This is still valid, the current version in testing is older than
1783 # the new version we wait for, or there is none in testing yet
1785 # Check if the source we look at is affected by this.
1786 if sourcepkg in t['packages']:
1787 # The source is affected, lets reject it.
1789 rejectmsg = "%s: part of the %s transition.\n\n" % (
1792 if current is not None:
1793 currentlymsg = "at version %s" % (current.version)
1795 currentlymsg = "not present in testing"
1797 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1799 rejectmsg += "\n".join(textwrap.wrap("""Your package
1800 is part of a testing transition designed to get %s migrated (it is
1801 currently %s, we need version %s). This transition is managed by the
1802 Release Team, and %s is the Release-Team member responsible for it.
1803 Please mail debian-release@lists.debian.org or contact %s directly if you
1804 need further assistance. You might want to upload to experimental until this
1805 transition is done."""
1806 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1808 self.rejects.append(rejectmsg)
1811 ###########################################################################
1812 # End check_signed_by_key checks
1813 ###########################################################################
1815 def build_summaries(self):
1816 """ Build a summary of changes the upload introduces. """
1818 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1820 short_summary = summary
1822 # This is for direport's benefit...
1823 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1826 summary += "Changes: " + f
1828 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1830 summary += self.announce(short_summary, 0)
1832 return (summary, short_summary)
1834 ###########################################################################
1836 def close_bugs(self, summary, action):
1838 Send mail to close bugs as instructed by the closes field in the changes file.
1839 Also add a line to summary if any work was done.
1841 @type summary: string
1842 @param summary: summary text, as given by L{build_summaries}
1845 @param action: Set to false no real action will be done.
1848 @return: summary. If action was taken, extended by the list of closed bugs.
1852 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1854 bugs = self.pkg.changes["closes"].keys()
1860 summary += "Closing bugs: "
1862 summary += "%s " % (bug)
1865 self.Subst["__BUG_NUMBER__"] = bug
1866 if self.pkg.changes["distribution"].has_key("stable"):
1867 self.Subst["__STABLE_WARNING__"] = """
1868 Note that this package is not part of the released stable Debian
1869 distribution. It may have dependencies on other unreleased software,
1870 or other instabilities. Please take care if you wish to install it.
1871 The update will eventually make its way into the next released Debian
1874 self.Subst["__STABLE_WARNING__"] = ""
1875 mail_message = utils.TemplateSubst(self.Subst, template)
1876 utils.send_mail(mail_message)
1878 # Clear up after ourselves
1879 del self.Subst["__BUG_NUMBER__"]
1880 del self.Subst["__STABLE_WARNING__"]
1882 if action and self.logger:
1883 self.logger.log(["closing bugs"] + bugs)
1889 ###########################################################################
1891 def announce(self, short_summary, action):
1893 Send an announce mail about a new upload.
1895 @type short_summary: string
1896 @param short_summary: Short summary text to include in the mail
1899 @param action: Set to false no real action will be done.
1902 @return: Textstring about action taken.
1907 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1909 # Only do announcements for source uploads with a recent dpkg-dev installed
1910 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1911 self.pkg.changes["architecture"].has_key("source"):
1917 self.Subst["__SHORT_SUMMARY__"] = short_summary
1919 for dist in self.pkg.changes["distribution"].keys():
1920 suite = get_suite(dist)
1921 if suite is None: continue
1922 announce_list = suite.announce
1923 if announce_list == "" or lists_done.has_key(announce_list):
1926 lists_done[announce_list] = 1
1927 summary += "Announcing to %s\n" % (announce_list)
1931 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1932 if cnf.get("Dinstall::TrackingServer") and \
1933 self.pkg.changes["architecture"].has_key("source"):
1934 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1935 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1937 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1938 utils.send_mail(mail_message)
1940 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1942 if cnf.FindB("Dinstall::CloseBugs"):
1943 summary = self.close_bugs(summary, action)
1945 del self.Subst["__SHORT_SUMMARY__"]
1949 ###########################################################################
1951 def accept (self, summary, short_summary, session=None):
1955 This moves all files referenced from the .changes into the pool,
1956 sends the accepted mail, announces to lists, closes bugs and
1957 also checks for override disparities. If enabled it will write out
1958 the version history for the BTS Version Tracking and will finally call
1961 @type summary: string
1962 @param summary: Summary text
1964 @type short_summary: string
1965 @param short_summary: Short summary
1969 stats = SummaryStats()
1972 self.logger.log(["installing changes", self.pkg.changes_file])
1976 # Add the .dsc file to the DB first
1977 for newfile, entry in self.pkg.files.items():
1978 if entry["type"] == "dsc":
1979 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1983 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1984 for newfile, entry in self.pkg.files.items():
1985 if entry["type"] == "deb":
1986 poolfiles.append(add_deb_to_db(self, newfile, session))
1988 # If this is a sourceful diff only upload that is moving
1989 # cross-component we need to copy the .orig files into the new
1990 # component too for the same reasons as above.
1991 # XXX: mhy: I think this should be in add_dsc_to_db
1992 if self.pkg.changes["architecture"].has_key("source"):
1993 for orig_file in self.pkg.orig_files.keys():
1994 if not self.pkg.orig_files[orig_file].has_key("id"):
1995 continue # Skip if it's not in the pool
1996 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1997 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1998 continue # Skip if the location didn't change
2001 oldf = get_poolfile_by_id(orig_file_id, session)
2002 old_filename = os.path.join(oldf.location.path, oldf.filename)
2003 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
2004 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2006 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2008 # TODO: Care about size/md5sum collisions etc
2009 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2011 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2013 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2014 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2018 # Don't reference the old file from this changes
2020 if p.file_id == oldf.file_id:
2023 poolfiles.append(newf)
2025 # Fix up the DSC references
2028 for df in source.srcfiles:
2029 if df.poolfile.file_id == oldf.file_id:
2030 # Add a new DSC entry and mark the old one for deletion
2031 # Don't do it in the loop so we don't change the thing we're iterating over
2033 newdscf.source_id = source.source_id
2034 newdscf.poolfile_id = newf.file_id
2035 session.add(newdscf)
2045 # Make sure that our source object is up-to-date
2046 session.expire(source)
2048 # Add changelog information to the database
2049 self.store_changelog()
2051 # Install the files into the pool
2052 for newfile, entry in self.pkg.files.items():
2053 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2054 utils.move(newfile, destination)
2055 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2056 stats.accept_bytes += float(entry["size"])
2058 # Copy the .changes file across for suite which need it.
2059 copy_changes = dict([(x.copychanges, '')
2060 for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2061 if x.copychanges is not None])
2063 for dest in copy_changes.keys():
2064 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2066 # We're done - commit the database changes
2068 # Our SQL session will automatically start a new transaction after
2071 # Move the .changes into the 'done' directory
2072 utils.move(self.pkg.changes_file,
2073 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2075 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2076 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2079 self.Subst["__SUMMARY__"] = summary
2080 mail_message = utils.TemplateSubst(self.Subst,
2081 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2082 utils.send_mail(mail_message)
2083 self.announce(short_summary, 1)
2085 ## Helper stuff for DebBugs Version Tracking
2086 if cnf.Find("Dir::Queue::BTSVersionTrack"):
2087 if self.pkg.changes["architecture"].has_key("source"):
2088 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2089 version_history = os.fdopen(fd, 'w')
2090 version_history.write(self.pkg.dsc["bts changelog"])
2091 version_history.close()
2092 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2093 self.pkg.changes_file[:-8]+".versions")
2094 os.rename(temp_filename, filename)
2095 os.chmod(filename, 0644)
2097 # Write out the binary -> source mapping.
2098 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2099 debinfo = os.fdopen(fd, 'w')
2100 for name, entry in sorted(self.pkg.files.items()):
2101 if entry["type"] == "deb":
2102 line = " ".join([entry["package"], entry["version"],
2103 entry["architecture"], entry["source package"],
2104 entry["source version"]])
2105 debinfo.write(line+"\n")
2107 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2108 self.pkg.changes_file[:-8]+".debinfo")
2109 os.rename(temp_filename, filename)
2110 os.chmod(filename, 0644)
2114 # Set up our copy queues (e.g. buildd queues)
2115 for suite_name in self.pkg.changes["distribution"].keys():
2116 suite = get_suite(suite_name, session)
2117 for q in suite.copy_queues:
2119 q.add_file_from_pool(f)
2124 stats.accept_count += 1
2126 def check_override(self):
2128 Checks override entries for validity. Mails "Override disparity" warnings,
2129 if that feature is enabled.
2131 Abandons the check if
2132 - override disparity checks are disabled
2133 - mail sending is disabled
2138 # Abandon the check if override disparity checks have been disabled
2139 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2142 summary = self.pkg.check_override()
2147 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2150 self.Subst["__SUMMARY__"] = summary
2151 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2152 utils.send_mail(mail_message)
2153 del self.Subst["__SUMMARY__"]
2155 ###########################################################################
2157 def remove(self, from_dir=None):
2159 Used (for instance) in p-u to remove the package from unchecked
2161 Also removes the package from holding area.
2163 if from_dir is None:
2164 from_dir = self.pkg.directory
2167 for f in self.pkg.files.keys():
2168 os.unlink(os.path.join(from_dir, f))
2169 if os.path.exists(os.path.join(h.holding_dir, f)):
2170 os.unlink(os.path.join(h.holding_dir, f))
2172 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2173 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2174 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2176 ###########################################################################
2178 def move_to_queue (self, queue):
2180 Move files to a destination queue using the permissions in the table
2183 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2184 queue.path, perms=int(queue.change_perms, 8))
2185 for f in self.pkg.files.keys():
2186 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2188 ###########################################################################
2190 def force_reject(self, reject_files):
2192 Forcefully move files from the current directory to the
2193 reject directory. If any file already exists in the reject
2194 directory it will be moved to the morgue to make way for
2197 @type reject_files: dict
2198 @param reject_files: file dictionary
2204 for file_entry in reject_files:
2205 # Skip any files which don't exist or which we don't have permission to copy.
2206 if os.access(file_entry, os.R_OK) == 0:
2209 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2212 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2214 # File exists? Let's find a new name by adding a number
2215 if e.errno == errno.EEXIST:
2217 dest_file = utils.find_next_free(dest_file, 255)
2218 except NoFreeFilenameError:
2219 # Something's either gone badly Pete Tong, or
2220 # someone is trying to exploit us.
2221 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2224 # Make sure we really got it
2226 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2229 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2233 # If we got here, we own the destination file, so we can
2234 # safely overwrite it.
2235 utils.move(file_entry, dest_file, 1, perms=0660)
2238 ###########################################################################
2239 def do_reject (self, manual=0, reject_message="", notes=""):
2241 Reject an upload. If called without a reject message or C{manual} is
2242 true, spawn an editor so the user can write one.
2245 @param manual: manual or automated rejection
2247 @type reject_message: string
2248 @param reject_message: A reject message
2253 # If we weren't given a manual rejection message, spawn an
2254 # editor so the user can add one in...
2255 if manual and not reject_message:
2256 (fd, temp_filename) = utils.temp_filename()
2257 temp_file = os.fdopen(fd, 'w')
2260 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2261 % (note.author, note.version, note.notedate, note.comment))
2263 editor = os.environ.get("EDITOR","vi")
2265 while answer == 'E':
2266 os.system("%s %s" % (editor, temp_filename))
2267 temp_fh = utils.open_file(temp_filename)
2268 reject_message = "".join(temp_fh.readlines())
2270 print "Reject message:"
2271 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2272 prompt = "[R]eject, Edit, Abandon, Quit ?"
2274 while prompt.find(answer) == -1:
2275 answer = utils.our_raw_input(prompt)
2276 m = re_default_answer.search(prompt)
2279 answer = answer[:1].upper()
2280 os.unlink(temp_filename)
2286 print "Rejecting.\n"
2290 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2291 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2293 # Move all the files into the reject directory
2294 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2295 self.force_reject(reject_files)
2297 # If we fail here someone is probably trying to exploit the race
2298 # so let's just raise an exception ...
2299 if os.path.exists(reason_filename):
2300 os.unlink(reason_filename)
2301 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2303 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2307 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2308 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2309 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2310 os.write(reason_fd, reject_message)
2311 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2313 # Build up the rejection email
2314 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2315 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2316 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2317 self.Subst["__REJECT_MESSAGE__"] = ""
2318 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2319 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2320 # Write the rejection email out as the <foo>.reason file
2321 os.write(reason_fd, reject_mail_message)
2323 del self.Subst["__REJECTOR_ADDRESS__"]
2324 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2325 del self.Subst["__CC__"]
2329 # Send the rejection mail
2330 utils.send_mail(reject_mail_message)
2333 self.logger.log(["rejected", self.pkg.changes_file])
2337 ################################################################################
2338 def in_override_p(self, package, component, suite, binary_type, filename, session):
2340 Check if a package already has override entries in the DB
2342 @type package: string
2343 @param package: package name
2345 @type component: string
2346 @param component: database id of the component
2349 @param suite: database id of the suite
2351 @type binary_type: string
2352 @param binary_type: type of the package
2354 @type filename: string
2355 @param filename: filename we check
2357 @return: the database result. But noone cares anyway.
2363 if binary_type == "": # must be source
2366 file_type = binary_type
2368 # Override suite name; used for example with proposed-updates
2369 oldsuite = get_suite(suite, session)
2370 if (not oldsuite is None) and oldsuite.overridesuite:
2371 suite = oldsuite.overridesuite
2373 result = get_override(package, suite, component, file_type, session)
2375 # If checking for a source package fall back on the binary override type
2376 if file_type == "dsc" and len(result) < 1:
2377 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2379 # Remember the section and priority so we can check them later if appropriate
2382 self.pkg.files[filename]["override section"] = result.section.section
2383 self.pkg.files[filename]["override priority"] = result.priority.priority
2388 ################################################################################
2389 def get_anyversion(self, sv_list, suite):
2392 @param sv_list: list of (suite, version) tuples to check
2395 @param suite: suite name
2401 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2402 for (s, v) in sv_list:
2403 if s in [ x.lower() for x in anysuite ]:
2404 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2409 ################################################################################
2411 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2414 @param sv_list: list of (suite, version) tuples to check
2416 @type filename: string
2417 @param filename: XXX
2419 @type new_version: string
2420 @param new_version: XXX
2422 Ensure versions are newer than existing packages in target
2423 suites and that cross-suite version checking rules as
2424 set out in the conf file are satisfied.
2429 # Check versions for each target suite
2430 for target_suite in self.pkg.changes["distribution"].keys():
2431 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2432 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2434 # Enforce "must be newer than target suite" even if conffile omits it
2435 if target_suite not in must_be_newer_than:
2436 must_be_newer_than.append(target_suite)
2438 for (suite, existent_version) in sv_list:
2439 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2441 if suite in must_be_newer_than and sourceful and vercmp < 1:
2442 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2444 if suite in must_be_older_than and vercmp > -1:
2447 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2448 # we really use the other suite, ignoring the conflicting one ...
2449 addsuite = self.pkg.changes["distribution-version"][suite]
2451 add_version = self.get_anyversion(sv_list, addsuite)
2452 target_version = self.get_anyversion(sv_list, target_suite)
2455 # not add_version can only happen if we map to a suite
2456 # that doesn't enhance the suite we're propup'ing from.
2457 # so "propup-ver x a b c; map a d" is a problem only if
2458 # d doesn't enhance a.
2460 # i think we could always propagate in this case, rather
2461 # than complaining. either way, this isn't a REJECT issue
2463 # And - we really should complain to the dorks who configured dak
2464 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2465 self.pkg.changes.setdefault("propdistribution", {})
2466 self.pkg.changes["propdistribution"][addsuite] = 1
2468 elif not target_version:
2469 # not targets_version is true when the package is NEW
2470 # we could just stick with the "...old version..." REJECT
2471 # for this, I think.
2472 self.rejects.append("Won't propogate NEW packages.")
2473 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2474 # propogation would be redundant. no need to reject though.
2475 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2477 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2478 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2480 self.warnings.append("Propogating upload to %s" % (addsuite))
2481 self.pkg.changes.setdefault("propdistribution", {})
2482 self.pkg.changes["propdistribution"][addsuite] = 1
2486 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2488 ################################################################################
2489 def check_binary_against_db(self, filename, session):
2490 # Ensure version is sane
2491 q = session.query(BinAssociation)
2492 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2493 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2495 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2496 filename, self.pkg.files[filename]["version"], sourceful=False)
2498 # Check for any existing copies of the file
2499 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2500 q = q.filter_by(version=self.pkg.files[filename]["version"])
2501 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2504 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2506 ################################################################################
2508 def check_source_against_db(self, filename, session):
2509 source = self.pkg.dsc.get("source")
2510 version = self.pkg.dsc.get("version")
2512 # Ensure version is sane
2513 q = session.query(SrcAssociation)
2514 q = q.join(DBSource).filter(DBSource.source==source)
2516 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2517 filename, version, sourceful=True)
2519 ################################################################################
2520 def check_dsc_against_db(self, filename, session):
2523 @warning: NB: this function can remove entries from the 'files' index [if
2524 the orig tarball is a duplicate of the one in the archive]; if
2525 you're iterating over 'files' and call this function as part of
2526 the loop, be sure to add a check to the top of the loop to
2527 ensure you haven't just tried to dereference the deleted entry.
2532 self.pkg.orig_files = {} # XXX: do we need to clear it?
2533 orig_files = self.pkg.orig_files
2535 # Try and find all files mentioned in the .dsc. This has
2536 # to work harder to cope with the multiple possible
2537 # locations of an .orig.tar.gz.
2538 # The ordering on the select is needed to pick the newest orig
2539 # when it exists in multiple places.
2540 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2542 if self.pkg.files.has_key(dsc_name):
2543 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2544 actual_size = int(self.pkg.files[dsc_name]["size"])
2545 found = "%s in incoming" % (dsc_name)
2547 # Check the file does not already exist in the archive
2548 ql = get_poolfile_like_name(dsc_name, session)
2550 # Strip out anything that isn't '%s' or '/%s$'
2552 if not i.filename.endswith(dsc_name):
2555 # "[dak] has not broken them. [dak] has fixed a
2556 # brokenness. Your crappy hack exploited a bug in
2559 # "(Come on! I thought it was always obvious that
2560 # one just doesn't release different files with
2561 # the same name and version.)"
2562 # -- ajk@ on d-devel@l.d.o
2565 # Ignore exact matches for .orig.tar.gz
2567 if re_is_orig_source.match(dsc_name):
2569 if self.pkg.files.has_key(dsc_name) and \
2570 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2571 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2572 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2573 # TODO: Don't delete the entry, just mark it as not needed
2574 # This would fix the stupidity of changing something we often iterate over
2575 # whilst we're doing it
2576 del self.pkg.files[dsc_name]
2577 dsc_entry["files id"] = i.file_id
2578 if not orig_files.has_key(dsc_name):
2579 orig_files[dsc_name] = {}
2580 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2583 # Don't bitch that we couldn't find this file later
2585 self.later_check_files.remove(dsc_name)
2591 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2593 elif re_is_orig_source.match(dsc_name):
2595 ql = get_poolfile_like_name(dsc_name, session)
2597 # Strip out anything that isn't '%s' or '/%s$'
2598 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2600 if not i.filename.endswith(dsc_name):
2604 # Unfortunately, we may get more than one match here if,
2605 # for example, the package was in potato but had an -sa
2606 # upload in woody. So we need to choose the right one.
2608 # default to something sane in case we don't match any or have only one
2613 old_file = os.path.join(i.location.path, i.filename)
2614 old_file_fh = utils.open_file(old_file)
2615 actual_md5 = apt_pkg.md5sum(old_file_fh)
2617 actual_size = os.stat(old_file)[stat.ST_SIZE]
2618 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2621 old_file = os.path.join(i.location.path, i.filename)
2622 old_file_fh = utils.open_file(old_file)
2623 actual_md5 = apt_pkg.md5sum(old_file_fh)
2625 actual_size = os.stat(old_file)[stat.ST_SIZE]
2627 suite_type = x.location.archive_type
2628 # need this for updating dsc_files in install()
2629 dsc_entry["files id"] = x.file_id
2630 # See install() in process-accepted...
2631 if not orig_files.has_key(dsc_name):
2632 orig_files[dsc_name] = {}
2633 orig_files[dsc_name]["id"] = x.file_id
2634 orig_files[dsc_name]["path"] = old_file
2635 orig_files[dsc_name]["location"] = x.location.location_id
2637 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2638 # Not there? Check the queue directories...
2639 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2640 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2642 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2643 if os.path.exists(in_otherdir):
2644 in_otherdir_fh = utils.open_file(in_otherdir)
2645 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2646 in_otherdir_fh.close()
2647 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2649 if not orig_files.has_key(dsc_name):
2650 orig_files[dsc_name] = {}
2651 orig_files[dsc_name]["path"] = in_otherdir
2654 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2657 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2659 if actual_md5 != dsc_entry["md5sum"]:
2660 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2661 if actual_size != int(dsc_entry["size"]):
2662 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2664 ################################################################################
2665 # This is used by process-new and process-holding to recheck a changes file
2666 # at the time we're running. It mainly wraps various other internal functions
2667 # and is similar to accepted_checks - these should probably be tidied up
2669 def recheck(self, session):
2671 for f in self.pkg.files.keys():
2672 # The .orig.tar.gz can disappear out from under us is it's a
2673 # duplicate of one in the archive.
2674 if not self.pkg.files.has_key(f):
2677 entry = self.pkg.files[f]
2679 # Check that the source still exists
2680 if entry["type"] == "deb":
2681 source_version = entry["source version"]
2682 source_package = entry["source package"]
2683 if not self.pkg.changes["architecture"].has_key("source") \
2684 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2685 source_epochless_version = re_no_epoch.sub('', source_version)
2686 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2688 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2689 if cnf.has_key("Dir::Queue::%s" % (q)):
2690 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2693 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2695 # Version and file overwrite checks
2696 if entry["type"] == "deb":
2697 self.check_binary_against_db(f, session)
2698 elif entry["type"] == "dsc":
2699 self.check_source_against_db(f, session)
2700 self.check_dsc_against_db(f, session)
2702 ################################################################################
2703 def accepted_checks(self, overwrite_checks, session):
2704 # Recheck anything that relies on the database; since that's not
2705 # frozen between accept and our run time when called from p-a.
2707 # overwrite_checks is set to False when installing to stable/oldstable
2712 # Find the .dsc (again)
2714 for f in self.pkg.files.keys():
2715 if self.pkg.files[f]["type"] == "dsc":
2718 for checkfile in self.pkg.files.keys():
2719 # The .orig.tar.gz can disappear out from under us is it's a
2720 # duplicate of one in the archive.
2721 if not self.pkg.files.has_key(checkfile):
2724 entry = self.pkg.files[checkfile]
2726 # Check that the source still exists
2727 if entry["type"] == "deb":
2728 source_version = entry["source version"]
2729 source_package = entry["source package"]
2730 if not self.pkg.changes["architecture"].has_key("source") \
2731 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2732 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2734 # Version and file overwrite checks
2735 if overwrite_checks:
2736 if entry["type"] == "deb":
2737 self.check_binary_against_db(checkfile, session)
2738 elif entry["type"] == "dsc":
2739 self.check_source_against_db(checkfile, session)
2740 self.check_dsc_against_db(dsc_filename, session)
2742 # propogate in the case it is in the override tables:
2743 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2744 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2745 propogate[suite] = 1
2747 nopropogate[suite] = 1
2749 for suite in propogate.keys():
2750 if suite in nopropogate:
2752 self.pkg.changes["distribution"][suite] = 1
2754 for checkfile in self.pkg.files.keys():
2755 # Check the package is still in the override tables
2756 for suite in self.pkg.changes["distribution"].keys():
2757 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2758 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2760 ################################################################################
2761 # If any file of an upload has a recent mtime then chances are good
2762 # the file is still being uploaded.
2764 def upload_too_new(self):
2767 # Move back to the original directory to get accurate time stamps
2769 os.chdir(self.pkg.directory)
2770 file_list = self.pkg.files.keys()
2771 file_list.extend(self.pkg.dsc_files.keys())
2772 file_list.append(self.pkg.changes_file)
2775 last_modified = time.time()-os.path.getmtime(f)
2776 if last_modified < int(cnf["Dinstall::SkipTime"]):
2785 def store_changelog(self):
2787 # Skip binary-only upload if it is not a bin-NMU
2788 if not self.pkg.changes['architecture'].has_key('source'):
2789 from daklib.regexes import re_bin_only_nmu
2790 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2793 session = DBConn().session()
2795 # Check if upload already has a changelog entry
2796 query = """SELECT changelog_id FROM changes WHERE source = :source
2797 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2798 if session.execute(query, {'source': self.pkg.changes['source'], \
2799 'version': self.pkg.changes['version'], \
2800 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2804 # Add current changelog text into changelogs_text table, return created ID
2805 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2806 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2808 # Link ID to the upload available in changes table
2809 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2810 AND version = :version AND architecture = :architecture"""
2811 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2812 'version': self.pkg.changes['version'], \
2813 'architecture': " ".join(self.pkg.changes['architecture'].keys())})