5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
59 ###############################################################################
61 def get_type(f, session):
63 Get the file type of C{f}
66 @param f: file entry from Changes object
68 @type session: SQLA Session
69 @param session: SQL Alchemy session object
76 if f.has_key("dbtype"):
77 file_type = f["dbtype"]
78 elif re_source_ext.match(f["type"]):
82 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
84 # Validate the override type
85 type_id = get_override_type(file_type, session)
87 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
91 ################################################################################
93 # Determine what parts in a .changes are NEW
95 def determine_new(filename, changes, files, warn=1, session = None):
97 Determine what parts in a C{changes} file are NEW.
100 @param filename: changes filename
102 @type changes: Upload.Pkg.changes dict
103 @param changes: Changes dictionary
105 @type files: Upload.Pkg.files dict
106 @param files: Files dictionary
109 @param warn: Warn if overrides are added for (old)stable
112 @return: dictionary of NEW components.
115 # TODO: This should all use the database instead of parsing the changes
120 dbchg = get_dbchange(filename, session)
122 print "Warning: cannot find changes file in database; won't check byhand"
124 # Build up a list of potentially new things
125 for name, f in files.items():
126 # Keep a record of byhand elements
127 if f["section"] == "byhand":
132 priority = f["priority"]
133 section = f["section"]
134 file_type = get_type(f, session)
135 component = f["component"]
137 if file_type == "dsc":
140 if not new.has_key(pkg):
142 new[pkg]["priority"] = priority
143 new[pkg]["section"] = section
144 new[pkg]["type"] = file_type
145 new[pkg]["component"] = component
146 new[pkg]["files"] = []
148 old_type = new[pkg]["type"]
149 if old_type != file_type:
150 # source gets trumped by deb or udeb
151 if old_type == "dsc":
152 new[pkg]["priority"] = priority
153 new[pkg]["section"] = section
154 new[pkg]["type"] = file_type
155 new[pkg]["component"] = component
157 new[pkg]["files"].append(name)
159 if f.has_key("othercomponents"):
160 new[pkg]["othercomponents"] = f["othercomponents"]
162 # Fix up the list of target suites
164 for suite in changes["suite"].keys():
165 override = cnf.Find("Suite::%s::OverrideSuite" % (suite))
167 (olderr, newerr) = (get_suite(suite, session) == None,
168 get_suite(override, session) == None)
170 (oinv, newinv) = ("", "")
171 if olderr: oinv = "invalid "
172 if newerr: ninv = "invalid "
173 print "warning: overriding %ssuite %s to %ssuite %s" % (
174 oinv, suite, ninv, override)
175 del changes["suite"][suite]
176 changes["suite"][override] = 1
178 # Check for unprocessed byhand files
179 if dbchg is not None:
180 for b in byhand.keys():
181 # Find the file entry in the database
183 for f in dbchg.files:
186 # If it's processed, we can ignore it
192 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
194 # Check for new stuff
195 for suite in changes["suite"].keys():
196 for pkg in new.keys():
197 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
199 for file_entry in new[pkg]["files"]:
200 if files[file_entry].has_key("new"):
201 del files[file_entry]["new"]
205 for s in ['stable', 'oldstable']:
206 if changes["suite"].has_key(s):
207 print "WARNING: overrides will be added for %s!" % s
208 for pkg in new.keys():
209 if new[pkg].has_key("othercomponents"):
210 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
214 ################################################################################
216 def check_valid(new, session = None):
218 Check if section and priority for NEW packages exist in database.
219 Additionally does sanity checks:
220 - debian-installer packages have to be udeb (or source)
221 - non debian-installer packages can not be udeb
222 - source priority can only be assigned to dsc file types
225 @param new: Dict of new packages with their section, priority and type.
228 for pkg in new.keys():
229 section_name = new[pkg]["section"]
230 priority_name = new[pkg]["priority"]
231 file_type = new[pkg]["type"]
233 section = get_section(section_name, session)
235 new[pkg]["section id"] = -1
237 new[pkg]["section id"] = section.section_id
239 priority = get_priority(priority_name, session)
241 new[pkg]["priority id"] = -1
243 new[pkg]["priority id"] = priority.priority_id
246 di = section_name.find("debian-installer") != -1
248 # If d-i, we must be udeb and vice-versa
249 if (di and file_type not in ("udeb", "dsc")) or \
250 (not di and file_type == "udeb"):
251 new[pkg]["section id"] = -1
253 # If dsc we need to be source and vice-versa
254 if (priority == "source" and file_type != "dsc") or \
255 (priority != "source" and file_type == "dsc"):
256 new[pkg]["priority id"] = -1
258 ###############################################################################
260 # Used by Upload.check_timestamps
261 class TarTime(object):
262 def __init__(self, future_cutoff, past_cutoff):
264 self.future_cutoff = future_cutoff
265 self.past_cutoff = past_cutoff
268 self.future_files = {}
269 self.ancient_files = {}
271 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
272 if MTime > self.future_cutoff:
273 self.future_files[Name] = MTime
274 if MTime < self.past_cutoff:
275 self.ancient_files[Name] = MTime
277 ###############################################################################
279 def prod_maintainer(notes, upload):
282 # Here we prepare an editor and get them ready to prod...
283 (fd, temp_filename) = utils.temp_filename()
284 temp_file = os.fdopen(fd, 'w')
286 temp_file.write(note.comment)
288 editor = os.environ.get("EDITOR","vi")
291 os.system("%s %s" % (editor, temp_filename))
292 temp_fh = utils.open_file(temp_filename)
293 prod_message = "".join(temp_fh.readlines())
295 print "Prod message:"
296 print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
297 prompt = "[P]rod, Edit, Abandon, Quit ?"
299 while prompt.find(answer) == -1:
300 answer = utils.our_raw_input(prompt)
301 m = re_default_answer.search(prompt)
304 answer = answer[:1].upper()
305 os.unlink(temp_filename)
311 # Otherwise, do the proding...
312 user_email_address = utils.whoami() + " <%s>" % (
313 cnf["Dinstall::MyAdminAddress"])
317 Subst["__FROM_ADDRESS__"] = user_email_address
318 Subst["__PROD_MESSAGE__"] = prod_message
319 Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
321 prod_mail_message = utils.TemplateSubst(
322 Subst,cnf["Dir::Templates"]+"/process-new.prod")
325 utils.send_mail(prod_mail_message)
327 print "Sent prodding message"
329 ################################################################################
331 def edit_note(note, upload, session):
332 # Write the current data to a temporary file
333 (fd, temp_filename) = utils.temp_filename()
334 editor = os.environ.get("EDITOR","vi")
337 os.system("%s %s" % (editor, temp_filename))
338 temp_file = utils.open_file(temp_filename)
339 newnote = temp_file.read().rstrip()
342 print utils.prefix_multi_line_string(newnote," ")
343 prompt = "[D]one, Edit, Abandon, Quit ?"
345 while prompt.find(answer) == -1:
346 answer = utils.our_raw_input(prompt)
347 m = re_default_answer.search(prompt)
350 answer = answer[:1].upper()
351 os.unlink(temp_filename)
358 comment = NewComment()
359 comment.package = upload.pkg.changes["source"]
360 comment.version = upload.pkg.changes["version"]
361 comment.comment = newnote
362 comment.author = utils.whoami()
363 comment.trainee = bool(Options["Trainee"])
367 ###############################################################################
369 class Upload(object):
371 Everything that has to do with an upload processed.
379 ###########################################################################
382 """ Reset a number of internal variables."""
384 # Initialize the substitution template map
387 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
388 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
389 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
390 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
396 self.later_check_files = []
400 def package_info(self):
402 Format various messages from this Upload to send to the maintainer.
406 ('Reject Reasons', self.rejects),
407 ('Warnings', self.warnings),
408 ('Notes', self.notes),
412 for title, messages in msgs:
414 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
419 ###########################################################################
420 def update_subst(self):
421 """ Set up the per-package template substitution mappings """
425 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
426 if not self.pkg.changes.has_key("architecture") or not \
427 isinstance(self.pkg.changes["architecture"], dict):
428 self.pkg.changes["architecture"] = { "Unknown" : "" }
430 # and maintainer2047 may not exist.
431 if not self.pkg.changes.has_key("maintainer2047"):
432 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
434 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
435 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
436 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
438 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
439 if self.pkg.changes["architecture"].has_key("source") and \
440 self.pkg.changes["changedby822"] != "" and \
441 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
443 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
444 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
445 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
447 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
448 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
449 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
451 # Process policy doesn't set the fingerprint field and I don't want to make it
452 # do it for now as I don't want to have to deal with the case where we accepted
453 # the package into PU-NEW, but the fingerprint has gone away from the keyring in
454 # the meantime so the package will be remarked as rejectable. Urgh.
455 # TODO: Fix this properly
456 if self.pkg.changes.has_key('fingerprint'):
457 session = DBConn().session()
458 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
459 if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
460 if self.pkg.changes.has_key("sponsoremail"):
461 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
464 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
465 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
467 # Apply any global override of the Maintainer field
468 if cnf.get("Dinstall::OverrideMaintainer"):
469 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
470 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
472 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
473 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
474 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
475 self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
477 ###########################################################################
478 def load_changes(self, filename):
480 Load a changes file and setup a dictionary around it. Also checks for mandantory
483 @type filename: string
484 @param filename: Changes filename, full path.
487 @return: whether the changes file was valid or not. We may want to
488 reject even if this is True (see what gets put in self.rejects).
489 This is simply to prevent us even trying things later which will
490 fail because we couldn't properly parse the file.
493 self.pkg.changes_file = filename
495 # Parse the .changes field into a dictionary
497 self.pkg.changes.update(parse_changes(filename))
498 except CantOpenError:
499 self.rejects.append("%s: can't read file." % (filename))
501 except ParseChangesError, line:
502 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
504 except ChangesUnicodeError:
505 self.rejects.append("%s: changes file not proper utf-8" % (filename))
508 # Parse the Files field from the .changes into another dictionary
510 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
511 except ParseChangesError, line:
512 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
514 except UnknownFormatError, format:
515 self.rejects.append("%s: unknown format '%s'." % (filename, format))
518 # Check for mandatory fields
519 for i in ("distribution", "source", "binary", "architecture",
520 "version", "maintainer", "files", "changes", "description"):
521 if not self.pkg.changes.has_key(i):
522 # Avoid undefined errors later
523 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
526 # Strip a source version in brackets from the source field
527 if re_strip_srcver.search(self.pkg.changes["source"]):
528 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
530 # Ensure the source field is a valid package name.
531 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
532 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
534 # Split multi-value fields into a lower-level dictionary
535 for i in ("architecture", "distribution", "binary", "closes"):
536 o = self.pkg.changes.get(i, "")
538 del self.pkg.changes[i]
540 self.pkg.changes[i] = {}
543 self.pkg.changes[i][j] = 1
545 # Fix the Maintainer: field to be RFC822/2047 compatible
547 (self.pkg.changes["maintainer822"],
548 self.pkg.changes["maintainer2047"],
549 self.pkg.changes["maintainername"],
550 self.pkg.changes["maintaineremail"]) = \
551 fix_maintainer (self.pkg.changes["maintainer"])
552 except ParseMaintError, msg:
553 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
554 % (filename, self.pkg.changes["maintainer"], msg))
556 # ...likewise for the Changed-By: field if it exists.
558 (self.pkg.changes["changedby822"],
559 self.pkg.changes["changedby2047"],
560 self.pkg.changes["changedbyname"],
561 self.pkg.changes["changedbyemail"]) = \
562 fix_maintainer (self.pkg.changes.get("changed-by", ""))
563 except ParseMaintError, msg:
564 self.pkg.changes["changedby822"] = ""
565 self.pkg.changes["changedby2047"] = ""
566 self.pkg.changes["changedbyname"] = ""
567 self.pkg.changes["changedbyemail"] = ""
569 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
570 % (filename, self.pkg.changes["changed-by"], msg))
572 # Ensure all the values in Closes: are numbers
573 if self.pkg.changes.has_key("closes"):
574 for i in self.pkg.changes["closes"].keys():
575 if re_isanum.match (i) == None:
576 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
578 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
579 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
580 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
582 # Check the .changes is non-empty
583 if not self.pkg.files:
584 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
587 # Changes was syntactically valid even if we'll reject
590 ###########################################################################
592 def check_distributions(self):
593 "Check and map the Distribution field"
597 # Handle suite mappings
598 for m in Cnf.ValueList("SuiteMappings"):
601 if mtype == "map" or mtype == "silent-map":
602 (source, dest) = args[1:3]
603 if self.pkg.changes["distribution"].has_key(source):
604 del self.pkg.changes["distribution"][source]
605 self.pkg.changes["distribution"][dest] = 1
606 if mtype != "silent-map":
607 self.notes.append("Mapping %s to %s." % (source, dest))
608 if self.pkg.changes.has_key("distribution-version"):
609 if self.pkg.changes["distribution-version"].has_key(source):
610 self.pkg.changes["distribution-version"][source]=dest
611 elif mtype == "map-unreleased":
612 (source, dest) = args[1:3]
613 if self.pkg.changes["distribution"].has_key(source):
614 for arch in self.pkg.changes["architecture"].keys():
615 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
616 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
617 del self.pkg.changes["distribution"][source]
618 self.pkg.changes["distribution"][dest] = 1
620 elif mtype == "ignore":
622 if self.pkg.changes["distribution"].has_key(suite):
623 del self.pkg.changes["distribution"][suite]
624 self.warnings.append("Ignoring %s as a target suite." % (suite))
625 elif mtype == "reject":
627 if self.pkg.changes["distribution"].has_key(suite):
628 self.rejects.append("Uploads to %s are not accepted." % (suite))
629 elif mtype == "propup-version":
630 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
632 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
633 if self.pkg.changes["distribution"].has_key(args[1]):
634 self.pkg.changes.setdefault("distribution-version", {})
635 for suite in args[2:]:
636 self.pkg.changes["distribution-version"][suite] = suite
638 # Ensure there is (still) a target distribution
639 if len(self.pkg.changes["distribution"].keys()) < 1:
640 self.rejects.append("No valid distribution remaining.")
642 # Ensure target distributions exist
643 for suite in self.pkg.changes["distribution"].keys():
644 if not Cnf.has_key("Suite::%s" % (suite)):
645 self.rejects.append("Unknown distribution `%s'." % (suite))
647 ###########################################################################
649 def binary_file_checks(self, f, session):
651 entry = self.pkg.files[f]
653 # Extract package control information
654 deb_file = utils.open_file(f)
656 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
658 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
660 # Can't continue, none of the checks on control would work.
663 # Check for mandantory "Description:"
666 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
668 self.rejects.append("%s: Missing Description in binary package" % (f))
673 # Check for mandatory fields
674 for field in [ "Package", "Architecture", "Version" ]:
675 if control.Find(field) == None:
677 self.rejects.append("%s: No %s field in control." % (f, field))
680 # Ensure the package name matches the one give in the .changes
681 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
682 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
684 # Validate the package field
685 package = control.Find("Package")
686 if not re_valid_pkg_name.match(package):
687 self.rejects.append("%s: invalid package name '%s'." % (f, package))
689 # Validate the version field
690 version = control.Find("Version")
691 if not re_valid_version.match(version):
692 self.rejects.append("%s: invalid version number '%s'." % (f, version))
694 # Ensure the architecture of the .deb is one we know about.
695 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
696 architecture = control.Find("Architecture")
697 upload_suite = self.pkg.changes["distribution"].keys()[0]
699 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
700 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
701 self.rejects.append("Unknown architecture '%s'." % (architecture))
703 # Ensure the architecture of the .deb is one of the ones
704 # listed in the .changes.
705 if not self.pkg.changes["architecture"].has_key(architecture):
706 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
708 # Sanity-check the Depends field
709 depends = control.Find("Depends")
711 self.rejects.append("%s: Depends field is empty." % (f))
713 # Sanity-check the Provides field
714 provides = control.Find("Provides")
716 provide = re_spacestrip.sub('', provides)
718 self.rejects.append("%s: Provides field is empty." % (f))
719 prov_list = provide.split(",")
720 for prov in prov_list:
721 if not re_valid_pkg_name.match(prov):
722 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
724 # Check the section & priority match those given in the .changes (non-fatal)
725 if control.Find("Section") and entry["section"] != "" \
726 and entry["section"] != control.Find("Section"):
727 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
728 (f, control.Find("Section", ""), entry["section"]))
729 if control.Find("Priority") and entry["priority"] != "" \
730 and entry["priority"] != control.Find("Priority"):
731 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
732 (f, control.Find("Priority", ""), entry["priority"]))
734 entry["package"] = package
735 entry["architecture"] = architecture
736 entry["version"] = version
737 entry["maintainer"] = control.Find("Maintainer", "")
739 if f.endswith(".udeb"):
740 self.pkg.files[f]["dbtype"] = "udeb"
741 elif f.endswith(".deb"):
742 self.pkg.files[f]["dbtype"] = "deb"
744 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
746 entry["source"] = control.Find("Source", entry["package"])
748 # Get the source version
749 source = entry["source"]
752 if source.find("(") != -1:
753 m = re_extract_src_version.match(source)
755 source_version = m.group(2)
757 if not source_version:
758 source_version = self.pkg.files[f]["version"]
760 entry["source package"] = source
761 entry["source version"] = source_version
763 # Ensure the filename matches the contents of the .deb
764 m = re_isadeb.match(f)
767 file_package = m.group(1)
768 if entry["package"] != file_package:
769 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
770 (f, file_package, entry["dbtype"], entry["package"]))
771 epochless_version = re_no_epoch.sub('', control.Find("Version"))
774 file_version = m.group(2)
775 if epochless_version != file_version:
776 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
777 (f, file_version, entry["dbtype"], epochless_version))
780 file_architecture = m.group(3)
781 if entry["architecture"] != file_architecture:
782 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
783 (f, file_architecture, entry["dbtype"], entry["architecture"]))
785 # Check for existent source
786 source_version = entry["source version"]
787 source_package = entry["source package"]
788 if self.pkg.changes["architecture"].has_key("source"):
789 if source_version != self.pkg.changes["version"]:
790 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
791 (source_version, f, self.pkg.changes["version"]))
793 # Check in the SQL database
794 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
795 # Check in one of the other directories
796 source_epochless_version = re_no_epoch.sub('', source_version)
797 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
798 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
800 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
803 dsc_file_exists = False
804 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates", "Lenny-Volatile-Proposed-Updates"]:
805 if cnf.has_key("Dir::Queue::%s" % (myq)):
806 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
807 dsc_file_exists = True
810 if not dsc_file_exists:
811 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
813 # Check the version and for file overwrites
814 self.check_binary_against_db(f, session)
816 # Temporarily disable contents generation until we change the table storage layout
819 #if len(b.rejects) > 0:
820 # for j in b.rejects:
821 # self.rejects.append(j)
823 def source_file_checks(self, f, session):
824 entry = self.pkg.files[f]
826 m = re_issource.match(f)
830 entry["package"] = m.group(1)
831 entry["version"] = m.group(2)
832 entry["type"] = m.group(3)
834 # Ensure the source package name matches the Source filed in the .changes
835 if self.pkg.changes["source"] != entry["package"]:
836 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
838 # Ensure the source version matches the version in the .changes file
839 if re_is_orig_source.match(f):
840 changes_version = self.pkg.changes["chopversion2"]
842 changes_version = self.pkg.changes["chopversion"]
844 if changes_version != entry["version"]:
845 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
847 # Ensure the .changes lists source in the Architecture field
848 if not self.pkg.changes["architecture"].has_key("source"):
849 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
851 # Check the signature of a .dsc file
852 if entry["type"] == "dsc":
853 # check_signature returns either:
854 # (None, [list, of, rejects]) or (signature, [])
855 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
857 self.rejects.append(j)
859 entry["architecture"] = "source"
861 def per_suite_file_checks(self, f, suite, session):
863 entry = self.pkg.files[f]
866 if entry.has_key("byhand"):
869 # Check we have fields we need to do these checks
871 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
872 if not entry.has_key(m):
873 self.rejects.append("file '%s' does not have field %s set" % (f, m))
879 # Handle component mappings
880 for m in cnf.ValueList("ComponentMappings"):
881 (source, dest) = m.split()
882 if entry["component"] == source:
883 entry["original component"] = source
884 entry["component"] = dest
886 # Ensure the component is valid for the target suite
887 if cnf.has_key("Suite:%s::Components" % (suite)) and \
888 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
889 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
892 # Validate the component
893 if not get_component(entry["component"], session):
894 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
897 # See if the package is NEW
898 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
901 # Validate the priority
902 if entry["priority"].find('/') != -1:
903 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
905 # Determine the location
906 location = cnf["Dir::Pool"]
907 l = get_location(location, entry["component"], session=session)
909 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
910 entry["location id"] = -1
912 entry["location id"] = l.location_id
914 # Check the md5sum & size against existing files (if any)
915 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
917 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
918 entry["size"], entry["md5sum"], entry["location id"])
921 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
922 elif found is False and poolfile is not None:
923 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
926 entry["files id"] = None
928 entry["files id"] = poolfile.file_id
930 # Check for packages that have moved from one component to another
931 entry['suite'] = suite
932 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
934 entry["othercomponents"] = res.fetchone()[0]
936 def check_files(self, action=True):
937 file_keys = self.pkg.files.keys()
943 os.chdir(self.pkg.directory)
945 ret = holding.copy_to_holding(f)
947 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
951 # check we already know the changes file
952 # [NB: this check must be done post-suite mapping]
953 base_filename = os.path.basename(self.pkg.changes_file)
955 session = DBConn().session()
958 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
959 # if in the pool or in a queue other than unchecked, reject
960 if (dbc.in_queue is None) \
961 or (dbc.in_queue is not None
962 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
963 self.rejects.append("%s file already known to dak" % base_filename)
964 except NoResultFound, e:
971 for f, entry in self.pkg.files.items():
972 # Ensure the file does not already exist in one of the accepted directories
973 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
974 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
975 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
976 self.rejects.append("%s file already exists in the %s directory." % (f, d))
978 if not re_taint_free.match(f):
979 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
981 # Check the file is readable
982 if os.access(f, os.R_OK) == 0:
983 # When running in -n, copy_to_holding() won't have
984 # generated the reject_message, so we need to.
986 if os.path.exists(f):
987 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
989 # Don't directly reject, mark to check later to deal with orig's
990 # we can find in the pool
991 self.later_check_files.append(f)
992 entry["type"] = "unreadable"
995 # If it's byhand skip remaining checks
996 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
998 entry["type"] = "byhand"
1000 # Checks for a binary package...
1001 elif re_isadeb.match(f):
1003 entry["type"] = "deb"
1005 # This routine appends to self.rejects/warnings as appropriate
1006 self.binary_file_checks(f, session)
1008 # Checks for a source package...
1009 elif re_issource.match(f):
1012 # This routine appends to self.rejects/warnings as appropriate
1013 self.source_file_checks(f, session)
1015 # Not a binary or source package? Assume byhand...
1018 entry["type"] = "byhand"
1020 # Per-suite file checks
1021 entry["oldfiles"] = {}
1022 for suite in self.pkg.changes["distribution"].keys():
1023 self.per_suite_file_checks(f, suite, session)
1027 # If the .changes file says it has source, it must have source.
1028 if self.pkg.changes["architecture"].has_key("source"):
1030 self.rejects.append("no source found and Architecture line in changes mention source.")
1032 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1033 self.rejects.append("source only uploads are not supported.")
1035 ###########################################################################
1036 def check_dsc(self, action=True, session=None):
1037 """Returns bool indicating whether or not the source changes are valid"""
1038 # Ensure there is source to check
1039 if not self.pkg.changes["architecture"].has_key("source"):
1044 for f, entry in self.pkg.files.items():
1045 if entry["type"] == "dsc":
1047 self.rejects.append("can not process a .changes file with multiple .dsc's.")
1052 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1053 if not dsc_filename:
1054 self.rejects.append("source uploads must contain a dsc file")
1057 # Parse the .dsc file
1059 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1060 except CantOpenError:
1061 # if not -n copy_to_holding() will have done this for us...
1063 self.rejects.append("%s: can't read file." % (dsc_filename))
1064 except ParseChangesError, line:
1065 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1066 except InvalidDscError, line:
1067 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1068 except ChangesUnicodeError:
1069 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1071 # Build up the file list of files mentioned by the .dsc
1073 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1074 except NoFilesFieldError:
1075 self.rejects.append("%s: no Files: field." % (dsc_filename))
1077 except UnknownFormatError, format:
1078 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1080 except ParseChangesError, line:
1081 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1084 # Enforce mandatory fields
1085 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1086 if not self.pkg.dsc.has_key(i):
1087 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1090 # Validate the source and version fields
1091 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1092 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1093 if not re_valid_version.match(self.pkg.dsc["version"]):
1094 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1096 # Only a limited list of source formats are allowed in each suite
1097 for dist in self.pkg.changes["distribution"].keys():
1098 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1099 if self.pkg.dsc["format"] not in allowed:
1100 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1102 # Validate the Maintainer field
1104 # We ignore the return value
1105 fix_maintainer(self.pkg.dsc["maintainer"])
1106 except ParseMaintError, msg:
1107 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1108 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1110 # Validate the build-depends field(s)
1111 for field_name in [ "build-depends", "build-depends-indep" ]:
1112 field = self.pkg.dsc.get(field_name)
1114 # Have apt try to parse them...
1116 apt_pkg.ParseSrcDepends(field)
1118 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1120 # Ensure the version number in the .dsc matches the version number in the .changes
1121 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1122 changes_version = self.pkg.files[dsc_filename]["version"]
1124 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1125 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1127 # Ensure the Files field contain only what's expected
1128 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1130 # Ensure source is newer than existing source in target suites
1131 session = DBConn().session()
1132 self.check_source_against_db(dsc_filename, session)
1133 self.check_dsc_against_db(dsc_filename, session)
1135 dbchg = get_dbchange(self.pkg.changes_file, session)
1137 # Finally, check if we're missing any files
1138 for f in self.later_check_files:
1140 # Check if we've already processed this file if we have a dbchg object
1143 for pf in dbchg.files:
1144 if pf.filename == f and pf.processed:
1145 self.notes.append('%s was already processed so we can go ahead' % f)
1147 del self.pkg.files[f]
1149 self.rejects.append("Could not find file %s references in changes" % f)
1155 ###########################################################################
1157 def get_changelog_versions(self, source_dir):
1158 """Extracts a the source package and (optionally) grabs the
1159 version history out of debian/changelog for the BTS."""
1163 # Find the .dsc (again)
1165 for f in self.pkg.files.keys():
1166 if self.pkg.files[f]["type"] == "dsc":
1169 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1170 if not dsc_filename:
1173 # Create a symlink mirror of the source files in our temporary directory
1174 for f in self.pkg.files.keys():
1175 m = re_issource.match(f)
1177 src = os.path.join(source_dir, f)
1178 # If a file is missing for whatever reason, give up.
1179 if not os.path.exists(src):
1182 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1183 self.pkg.orig_files[f].has_key("path"):
1185 dest = os.path.join(os.getcwd(), f)
1186 os.symlink(src, dest)
1188 # If the orig files are not a part of the upload, create symlinks to the
1190 for orig_file in self.pkg.orig_files.keys():
1191 if not self.pkg.orig_files[orig_file].has_key("path"):
1193 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1194 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1196 # Extract the source
1197 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1198 (result, output) = commands.getstatusoutput(cmd)
1200 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1201 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1204 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1207 # Get the upstream version
1208 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1209 if re_strip_revision.search(upstr_version):
1210 upstr_version = re_strip_revision.sub('', upstr_version)
1212 # Ensure the changelog file exists
1213 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1214 if not os.path.exists(changelog_filename):
1215 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1218 # Parse the changelog
1219 self.pkg.dsc["bts changelog"] = ""
1220 changelog_file = utils.open_file(changelog_filename)
1221 for line in changelog_file.readlines():
1222 m = re_changelog_versions.match(line)
1224 self.pkg.dsc["bts changelog"] += line
1225 changelog_file.close()
1227 # Check we found at least one revision in the changelog
1228 if not self.pkg.dsc["bts changelog"]:
1229 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1231 def check_source(self):
1233 # a) there's no source
1234 if not self.pkg.changes["architecture"].has_key("source"):
1237 tmpdir = utils.temp_dirname()
1239 # Move into the temporary directory
1243 # Get the changelog version history
1244 self.get_changelog_versions(cwd)
1246 # Move back and cleanup the temporary tree
1250 shutil.rmtree(tmpdir)
1252 if e.errno != errno.EACCES:
1254 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1256 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1257 # We probably have u-r or u-w directories so chmod everything
1259 cmd = "chmod -R u+rwx %s" % (tmpdir)
1260 result = os.system(cmd)
1262 utils.fubar("'%s' failed with result %s." % (cmd, result))
1263 shutil.rmtree(tmpdir)
1264 except Exception, e:
1265 print "foobar2 (%s)" % e
1266 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1268 ###########################################################################
1269 def ensure_hashes(self):
1270 # Make sure we recognise the format of the Files: field in the .changes
1271 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1272 if len(format) == 2:
1273 format = int(format[0]), int(format[1])
1275 format = int(float(format[0])), 0
1277 # We need to deal with the original changes blob, as the fields we need
1278 # might not be in the changes dict serialised into the .dak anymore.
1279 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1281 # Copy the checksums over to the current changes dict. This will keep
1282 # the existing modifications to it intact.
1283 for field in orig_changes:
1284 if field.startswith('checksums-'):
1285 self.pkg.changes[field] = orig_changes[field]
1287 # Check for unsupported hashes
1288 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1289 self.rejects.append(j)
1291 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1292 self.rejects.append(j)
1294 # We have to calculate the hash if we have an earlier changes version than
1295 # the hash appears in rather than require it exist in the changes file
1296 for hashname, hashfunc, version in utils.known_hashes:
1297 # TODO: Move _ensure_changes_hash into this class
1298 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1299 self.rejects.append(j)
1300 if "source" in self.pkg.changes["architecture"]:
1301 # TODO: Move _ensure_dsc_hash into this class
1302 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1303 self.rejects.append(j)
1305 def check_hashes(self):
1306 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1307 self.rejects.append(m)
1309 for m in utils.check_size(".changes", self.pkg.files):
1310 self.rejects.append(m)
1312 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1313 self.rejects.append(m)
1315 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1316 self.rejects.append(m)
1318 self.ensure_hashes()
1320 ###########################################################################
1322 def ensure_orig(self, target_dir='.', session=None):
1324 Ensures that all orig files mentioned in the changes file are present
1325 in target_dir. If they do not exist, they are symlinked into place.
1327 An list containing the symlinks that were created are returned (so they
1334 for filename, entry in self.pkg.dsc_files.iteritems():
1335 if not re_is_orig_source.match(filename):
1336 # File is not an orig; ignore
1339 if os.path.exists(filename):
1340 # File exists, no need to continue
1343 def symlink_if_valid(path):
1344 f = utils.open_file(path)
1345 md5sum = apt_pkg.md5sum(f)
1348 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1349 expected = (int(entry['size']), entry['md5sum'])
1351 if fingerprint != expected:
1354 dest = os.path.join(target_dir, filename)
1356 os.symlink(path, dest)
1357 symlinked.append(dest)
1363 session_ = DBConn().session()
1368 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1369 poolfile_path = os.path.join(
1370 poolfile.location.path, poolfile.filename
1373 if symlink_if_valid(poolfile_path):
1383 # Look in some other queues for the file
1384 queues = ('New', 'Byhand', 'ProposedUpdates',
1385 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1387 for queue in queues:
1388 if not cnf.get('Dir::Queue::%s' % queue):
1391 queuefile_path = os.path.join(
1392 cnf['Dir::Queue::%s' % queue], filename
1395 if not os.path.exists(queuefile_path):
1396 # Does not exist in this queue
1399 if symlink_if_valid(queuefile_path):
1404 ###########################################################################
1406 def check_lintian(self):
1408 Extends self.rejects by checking the output of lintian against tags
1409 specified in Dinstall::LintianTags.
1414 # Don't reject binary uploads
1415 if not self.pkg.changes['architecture'].has_key('source'):
1418 # Only check some distributions
1419 for dist in ('unstable', 'experimental'):
1420 if dist in self.pkg.changes['distribution']:
1425 # If we do not have a tagfile, don't do anything
1426 tagfile = cnf.get("Dinstall::LintianTags")
1430 # Parse the yaml file
1431 sourcefile = file(tagfile, 'r')
1432 sourcecontent = sourcefile.read()
1436 lintiantags = yaml.load(sourcecontent)['lintian']
1437 except yaml.YAMLError, msg:
1438 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1441 # Try and find all orig mentioned in the .dsc
1442 symlinked = self.ensure_orig()
1444 # Setup the input file for lintian
1445 fd, temp_filename = utils.temp_filename()
1446 temptagfile = os.fdopen(fd, 'w')
1447 for tags in lintiantags.values():
1448 temptagfile.writelines(['%s\n' % x for x in tags])
1452 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1453 (temp_filename, self.pkg.changes_file)
1455 result, output = commands.getstatusoutput(cmd)
1457 # Remove our tempfile and any symlinks we created
1458 os.unlink(temp_filename)
1460 for symlink in symlinked:
1464 utils.warn("lintian failed for %s [return code: %s]." % \
1465 (self.pkg.changes_file, result))
1466 utils.warn(utils.prefix_multi_line_string(output, \
1467 " [possible output:] "))
1472 [self.pkg.changes_file, "check_lintian"] + list(txt)
1476 parsed_tags = parse_lintian_output(output)
1477 self.rejects.extend(
1478 generate_reject_messages(parsed_tags, lintiantags, log=log)
1481 ###########################################################################
1482 def check_urgency(self):
1484 if self.pkg.changes["architecture"].has_key("source"):
1485 if not self.pkg.changes.has_key("urgency"):
1486 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1487 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1488 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1489 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1490 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1491 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1493 ###########################################################################
1495 # Sanity check the time stamps of files inside debs.
1496 # [Files in the near future cause ugly warnings and extreme time
1497 # travel can cause errors on extraction]
1499 def check_timestamps(self):
1502 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1503 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1504 tar = TarTime(future_cutoff, past_cutoff)
1506 for filename, entry in self.pkg.files.items():
1507 if entry["type"] == "deb":
1510 deb_file = utils.open_file(filename)
1511 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1514 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1515 except SystemError, e:
1516 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1517 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1520 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1524 future_files = tar.future_files.keys()
1526 num_future_files = len(future_files)
1527 future_file = future_files[0]
1528 future_date = tar.future_files[future_file]
1529 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1530 % (filename, num_future_files, future_file, time.ctime(future_date)))
1532 ancient_files = tar.ancient_files.keys()
1534 num_ancient_files = len(ancient_files)
1535 ancient_file = ancient_files[0]
1536 ancient_date = tar.ancient_files[ancient_file]
1537 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1538 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1540 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1542 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1543 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1545 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1551 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1552 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1553 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1554 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1555 self.pkg.changes["sponsoremail"] = uid_email
1560 ###########################################################################
1561 # check_signed_by_key checks
1562 ###########################################################################
1564 def check_signed_by_key(self):
1565 """Ensure the .changes is signed by an authorized uploader."""
1566 session = DBConn().session()
1568 # First of all we check that the person has proper upload permissions
1569 # and that this upload isn't blocked
1570 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1573 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1576 # TODO: Check that import-keyring adds UIDs properly
1578 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1581 # Check that the fingerprint which uploaded has permission to do so
1582 self.check_upload_permissions(fpr, session)
1584 # Check that this package is not in a transition
1585 self.check_transition(session)
1590 def check_upload_permissions(self, fpr, session):
1591 # Check any one-off upload blocks
1592 self.check_upload_blocks(fpr, session)
1594 # Start with DM as a special case
1595 # DM is a special case unfortunately, so we check it first
1596 # (keys with no source access get more access than DMs in one
1597 # way; DMs can only upload for their packages whether source
1598 # or binary, whereas keys with no access might be able to
1599 # upload some binaries)
1600 if fpr.source_acl.access_level == 'dm':
1601 self.check_dm_upload(fpr, session)
1603 # Check source-based permissions for other types
1604 if self.pkg.changes["architecture"].has_key("source") and \
1605 fpr.source_acl.access_level is None:
1606 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1607 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1608 self.rejects.append(rej)
1610 # If not a DM, we allow full upload rights
1611 uid_email = "%s@debian.org" % (fpr.uid.uid)
1612 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1615 # Check binary upload permissions
1616 # By this point we know that DMs can't have got here unless they
1617 # are allowed to deal with the package concerned so just apply
1619 if fpr.binary_acl.access_level == 'full':
1622 # Otherwise we're in the map case
1623 tmparches = self.pkg.changes["architecture"].copy()
1624 tmparches.pop('source', None)
1626 for bam in fpr.binary_acl_map:
1627 tmparches.pop(bam.architecture.arch_string, None)
1629 if len(tmparches.keys()) > 0:
1630 if fpr.binary_reject:
1631 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1632 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1633 self.rejects.append(rej)
1635 # TODO: This is where we'll implement reject vs throw away binaries later
1636 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1637 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1638 rej += "\nFingerprint: %s", (fpr.fingerprint)
1639 self.rejects.append(rej)
1642 def check_upload_blocks(self, fpr, session):
1643 """Check whether any upload blocks apply to this source, source
1644 version, uid / fpr combination"""
1646 def block_rej_template(fb):
1647 rej = 'Manual upload block in place for package %s' % fb.source
1648 if fb.version is not None:
1649 rej += ', version %s' % fb.version
1652 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1653 # version is None if the block applies to all versions
1654 if fb.version is None or fb.version == self.pkg.changes['version']:
1655 # Check both fpr and uid - either is enough to cause a reject
1656 if fb.fpr is not None:
1657 if fb.fpr.fingerprint == fpr.fingerprint:
1658 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1659 if fb.uid is not None:
1660 if fb.uid == fpr.uid:
1661 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1664 def check_dm_upload(self, fpr, session):
1665 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1666 ## none of the uploaded packages are NEW
1668 for f in self.pkg.files.keys():
1669 if self.pkg.files[f].has_key("byhand"):
1670 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1672 if self.pkg.files[f].has_key("new"):
1673 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1679 ## the most recent version of the package uploaded to unstable or
1680 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1681 ## section of its control file
1682 q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1683 q = q.join(SrcAssociation)
1684 q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1685 q = q.order_by(desc('source.version')).limit(1)
1690 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1691 self.rejects.append(rej)
1695 if not r.dm_upload_allowed:
1696 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1697 self.rejects.append(rej)
1700 ## the Maintainer: field of the uploaded .changes file corresponds with
1701 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1703 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1704 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1706 ## the most recent version of the package uploaded to unstable or
1707 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1708 ## non-developer maintainers cannot NMU or hijack packages)
1710 # srcuploaders includes the maintainer
1712 for sup in r.srcuploaders:
1713 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1714 # Eww - I hope we never have two people with the same name in Debian
1715 if email == fpr.uid.uid or name == fpr.uid.name:
1720 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1723 ## none of the packages are being taken over from other source packages
1724 for b in self.pkg.changes["binary"].keys():
1725 for suite in self.pkg.changes["distribution"].keys():
1726 q = session.query(DBSource)
1727 q = q.join(DBBinary).filter_by(package=b)
1728 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1731 if s.source != self.pkg.changes["source"]:
1732 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1736 def check_transition(self, session):
1739 sourcepkg = self.pkg.changes["source"]
1741 # No sourceful upload -> no need to do anything else, direct return
1742 # We also work with unstable uploads, not experimental or those going to some
1743 # proposed-updates queue
1744 if "source" not in self.pkg.changes["architecture"] or \
1745 "unstable" not in self.pkg.changes["distribution"]:
1748 # Also only check if there is a file defined (and existant) with
1750 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1751 if transpath == "" or not os.path.exists(transpath):
1754 # Parse the yaml file
1755 sourcefile = file(transpath, 'r')
1756 sourcecontent = sourcefile.read()
1758 transitions = yaml.load(sourcecontent)
1759 except yaml.YAMLError, msg:
1760 # This shouldn't happen, there is a wrapper to edit the file which
1761 # checks it, but we prefer to be safe than ending up rejecting
1763 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1766 # Now look through all defined transitions
1767 for trans in transitions:
1768 t = transitions[trans]
1769 source = t["source"]
1772 # Will be None if nothing is in testing.
1773 current = get_source_in_suite(source, "testing", session)
1774 if current is not None:
1775 compare = apt_pkg.VersionCompare(current.version, expected)
1777 if current is None or compare < 0:
1778 # This is still valid, the current version in testing is older than
1779 # the new version we wait for, or there is none in testing yet
1781 # Check if the source we look at is affected by this.
1782 if sourcepkg in t['packages']:
1783 # The source is affected, lets reject it.
1785 rejectmsg = "%s: part of the %s transition.\n\n" % (
1788 if current is not None:
1789 currentlymsg = "at version %s" % (current.version)
1791 currentlymsg = "not present in testing"
1793 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1795 rejectmsg += "\n".join(textwrap.wrap("""Your package
1796 is part of a testing transition designed to get %s migrated (it is
1797 currently %s, we need version %s). This transition is managed by the
1798 Release Team, and %s is the Release-Team member responsible for it.
1799 Please mail debian-release@lists.debian.org or contact %s directly if you
1800 need further assistance. You might want to upload to experimental until this
1801 transition is done."""
1802 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1804 self.rejects.append(rejectmsg)
1807 ###########################################################################
1808 # End check_signed_by_key checks
1809 ###########################################################################
1811 def build_summaries(self):
1812 """ Build a summary of changes the upload introduces. """
1814 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1816 short_summary = summary
1818 # This is for direport's benefit...
1819 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1822 summary += "Changes: " + f
1824 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1826 summary += self.announce(short_summary, 0)
1828 return (summary, short_summary)
1830 ###########################################################################
1832 def close_bugs(self, summary, action):
1834 Send mail to close bugs as instructed by the closes field in the changes file.
1835 Also add a line to summary if any work was done.
1837 @type summary: string
1838 @param summary: summary text, as given by L{build_summaries}
1841 @param action: Set to false no real action will be done.
1844 @return: summary. If action was taken, extended by the list of closed bugs.
1848 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1850 bugs = self.pkg.changes["closes"].keys()
1856 summary += "Closing bugs: "
1858 summary += "%s " % (bug)
1861 self.Subst["__BUG_NUMBER__"] = bug
1862 if self.pkg.changes["distribution"].has_key("stable"):
1863 self.Subst["__STABLE_WARNING__"] = """
1864 Note that this package is not part of the released stable Debian
1865 distribution. It may have dependencies on other unreleased software,
1866 or other instabilities. Please take care if you wish to install it.
1867 The update will eventually make its way into the next released Debian
1870 self.Subst["__STABLE_WARNING__"] = ""
1871 mail_message = utils.TemplateSubst(self.Subst, template)
1872 utils.send_mail(mail_message)
1874 # Clear up after ourselves
1875 del self.Subst["__BUG_NUMBER__"]
1876 del self.Subst["__STABLE_WARNING__"]
1878 if action and self.logger:
1879 self.logger.log(["closing bugs"] + bugs)
1885 ###########################################################################
1887 def announce(self, short_summary, action):
1889 Send an announce mail about a new upload.
1891 @type short_summary: string
1892 @param short_summary: Short summary text to include in the mail
1895 @param action: Set to false no real action will be done.
1898 @return: Textstring about action taken.
1903 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1905 # Only do announcements for source uploads with a recent dpkg-dev installed
1906 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1907 self.pkg.changes["architecture"].has_key("source"):
1913 self.Subst["__SHORT_SUMMARY__"] = short_summary
1915 for dist in self.pkg.changes["distribution"].keys():
1916 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1917 if announce_list == "" or lists_done.has_key(announce_list):
1920 lists_done[announce_list] = 1
1921 summary += "Announcing to %s\n" % (announce_list)
1925 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1926 if cnf.get("Dinstall::TrackingServer") and \
1927 self.pkg.changes["architecture"].has_key("source"):
1928 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1929 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1931 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1932 utils.send_mail(mail_message)
1934 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1936 if cnf.FindB("Dinstall::CloseBugs"):
1937 summary = self.close_bugs(summary, action)
1939 del self.Subst["__SHORT_SUMMARY__"]
1943 ###########################################################################
1945 def accept (self, summary, short_summary, session=None):
1949 This moves all files referenced from the .changes into the pool,
1950 sends the accepted mail, announces to lists, closes bugs and
1951 also checks for override disparities. If enabled it will write out
1952 the version history for the BTS Version Tracking and will finally call
1955 @type summary: string
1956 @param summary: Summary text
1958 @type short_summary: string
1959 @param short_summary: Short summary
1963 stats = SummaryStats()
1966 self.logger.log(["installing changes", self.pkg.changes_file])
1970 # Add the .dsc file to the DB first
1971 for newfile, entry in self.pkg.files.items():
1972 if entry["type"] == "dsc":
1973 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1977 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1978 for newfile, entry in self.pkg.files.items():
1979 if entry["type"] == "deb":
1980 poolfiles.append(add_deb_to_db(self, newfile, session))
1982 # If this is a sourceful diff only upload that is moving
1983 # cross-component we need to copy the .orig files into the new
1984 # component too for the same reasons as above.
1985 # XXX: mhy: I think this should be in add_dsc_to_db
1986 if self.pkg.changes["architecture"].has_key("source"):
1987 for orig_file in self.pkg.orig_files.keys():
1988 if not self.pkg.orig_files[orig_file].has_key("id"):
1989 continue # Skip if it's not in the pool
1990 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1991 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1992 continue # Skip if the location didn't change
1995 oldf = get_poolfile_by_id(orig_file_id, session)
1996 old_filename = os.path.join(oldf.location.path, oldf.filename)
1997 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
1998 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2000 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2002 # TODO: Care about size/md5sum collisions etc
2003 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2005 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2007 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2008 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2012 # Don't reference the old file from this changes
2014 if p.file_id == oldf.file_id:
2017 poolfiles.append(newf)
2019 # Fix up the DSC references
2022 for df in source.srcfiles:
2023 if df.poolfile.file_id == oldf.file_id:
2024 # Add a new DSC entry and mark the old one for deletion
2025 # Don't do it in the loop so we don't change the thing we're iterating over
2027 newdscf.source_id = source.source_id
2028 newdscf.poolfile_id = newf.file_id
2029 session.add(newdscf)
2039 # Make sure that our source object is up-to-date
2040 session.expire(source)
2042 # Add changelog information to the database
2043 self.store_changelog()
2045 # Install the files into the pool
2046 for newfile, entry in self.pkg.files.items():
2047 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2048 utils.move(newfile, destination)
2049 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2050 stats.accept_bytes += float(entry["size"])
2052 # Copy the .changes file across for suite which need it.
2054 for suite_name in self.pkg.changes["distribution"].keys():
2055 if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
2056 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
2058 for dest in copy_changes.keys():
2059 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2061 # We're done - commit the database changes
2063 # Our SQL session will automatically start a new transaction after
2066 # Move the .changes into the 'done' directory
2067 utils.move(self.pkg.changes_file,
2068 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2070 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2071 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2074 self.Subst["__SUMMARY__"] = summary
2075 mail_message = utils.TemplateSubst(self.Subst,
2076 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2077 utils.send_mail(mail_message)
2078 self.announce(short_summary, 1)
2080 ## Helper stuff for DebBugs Version Tracking
2081 if cnf.Find("Dir::Queue::BTSVersionTrack"):
2082 if self.pkg.changes["architecture"].has_key("source"):
2083 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2084 version_history = os.fdopen(fd, 'w')
2085 version_history.write(self.pkg.dsc["bts changelog"])
2086 version_history.close()
2087 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2088 self.pkg.changes_file[:-8]+".versions")
2089 os.rename(temp_filename, filename)
2090 os.chmod(filename, 0644)
2092 # Write out the binary -> source mapping.
2093 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2094 debinfo = os.fdopen(fd, 'w')
2095 for name, entry in sorted(self.pkg.files.items()):
2096 if entry["type"] == "deb":
2097 line = " ".join([entry["package"], entry["version"],
2098 entry["architecture"], entry["source package"],
2099 entry["source version"]])
2100 debinfo.write(line+"\n")
2102 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2103 self.pkg.changes_file[:-8]+".debinfo")
2104 os.rename(temp_filename, filename)
2105 os.chmod(filename, 0644)
2109 # Set up our copy queues (e.g. buildd queues)
2110 for suite_name in self.pkg.changes["distribution"].keys():
2111 suite = get_suite(suite_name, session)
2112 for q in suite.copy_queues:
2114 q.add_file_from_pool(f)
2119 stats.accept_count += 1
2121 def check_override(self):
2123 Checks override entries for validity. Mails "Override disparity" warnings,
2124 if that feature is enabled.
2126 Abandons the check if
2127 - override disparity checks are disabled
2128 - mail sending is disabled
2133 # Abandon the check if override disparity checks have been disabled
2134 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2137 summary = self.pkg.check_override()
2142 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2145 self.Subst["__SUMMARY__"] = summary
2146 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2147 utils.send_mail(mail_message)
2148 del self.Subst["__SUMMARY__"]
2150 ###########################################################################
2152 def remove(self, from_dir=None):
2154 Used (for instance) in p-u to remove the package from unchecked
2156 Also removes the package from holding area.
2158 if from_dir is None:
2159 from_dir = self.pkg.directory
2162 for f in self.pkg.files.keys():
2163 os.unlink(os.path.join(from_dir, f))
2164 if os.path.exists(os.path.join(h.holding_dir, f)):
2165 os.unlink(os.path.join(h.holding_dir, f))
2167 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2168 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2169 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2171 ###########################################################################
2173 def move_to_queue (self, queue):
2175 Move files to a destination queue using the permissions in the table
2178 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2179 queue.path, perms=int(queue.change_perms, 8))
2180 for f in self.pkg.files.keys():
2181 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2183 ###########################################################################
2185 def force_reject(self, reject_files):
2187 Forcefully move files from the current directory to the
2188 reject directory. If any file already exists in the reject
2189 directory it will be moved to the morgue to make way for
2192 @type reject_files: dict
2193 @param reject_files: file dictionary
2199 for file_entry in reject_files:
2200 # Skip any files which don't exist or which we don't have permission to copy.
2201 if os.access(file_entry, os.R_OK) == 0:
2204 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2207 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2209 # File exists? Let's find a new name by adding a number
2210 if e.errno == errno.EEXIST:
2212 dest_file = utils.find_next_free(dest_file, 255)
2213 except NoFreeFilenameError:
2214 # Something's either gone badly Pete Tong, or
2215 # someone is trying to exploit us.
2216 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2219 # Make sure we really got it
2221 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2224 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2228 # If we got here, we own the destination file, so we can
2229 # safely overwrite it.
2230 utils.move(file_entry, dest_file, 1, perms=0660)
2233 ###########################################################################
2234 def do_reject (self, manual=0, reject_message="", notes=""):
2236 Reject an upload. If called without a reject message or C{manual} is
2237 true, spawn an editor so the user can write one.
2240 @param manual: manual or automated rejection
2242 @type reject_message: string
2243 @param reject_message: A reject message
2248 # If we weren't given a manual rejection message, spawn an
2249 # editor so the user can add one in...
2250 if manual and not reject_message:
2251 (fd, temp_filename) = utils.temp_filename()
2252 temp_file = os.fdopen(fd, 'w')
2255 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2256 % (note.author, note.version, note.notedate, note.comment))
2258 editor = os.environ.get("EDITOR","vi")
2260 while answer == 'E':
2261 os.system("%s %s" % (editor, temp_filename))
2262 temp_fh = utils.open_file(temp_filename)
2263 reject_message = "".join(temp_fh.readlines())
2265 print "Reject message:"
2266 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2267 prompt = "[R]eject, Edit, Abandon, Quit ?"
2269 while prompt.find(answer) == -1:
2270 answer = utils.our_raw_input(prompt)
2271 m = re_default_answer.search(prompt)
2274 answer = answer[:1].upper()
2275 os.unlink(temp_filename)
2281 print "Rejecting.\n"
2285 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2286 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2288 # Move all the files into the reject directory
2289 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2290 self.force_reject(reject_files)
2292 # If we fail here someone is probably trying to exploit the race
2293 # so let's just raise an exception ...
2294 if os.path.exists(reason_filename):
2295 os.unlink(reason_filename)
2296 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2298 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2302 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2303 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2304 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2305 os.write(reason_fd, reject_message)
2306 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2308 # Build up the rejection email
2309 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2310 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2311 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2312 self.Subst["__REJECT_MESSAGE__"] = ""
2313 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2314 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2315 # Write the rejection email out as the <foo>.reason file
2316 os.write(reason_fd, reject_mail_message)
2318 del self.Subst["__REJECTOR_ADDRESS__"]
2319 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2320 del self.Subst["__CC__"]
2324 # Send the rejection mail
2325 utils.send_mail(reject_mail_message)
2328 self.logger.log(["rejected", self.pkg.changes_file])
2332 ################################################################################
2333 def in_override_p(self, package, component, suite, binary_type, filename, session):
2335 Check if a package already has override entries in the DB
2337 @type package: string
2338 @param package: package name
2340 @type component: string
2341 @param component: database id of the component
2344 @param suite: database id of the suite
2346 @type binary_type: string
2347 @param binary_type: type of the package
2349 @type filename: string
2350 @param filename: filename we check
2352 @return: the database result. But noone cares anyway.
2358 if binary_type == "": # must be source
2361 file_type = binary_type
2363 # Override suite name; used for example with proposed-updates
2364 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2365 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2367 result = get_override(package, suite, component, file_type, session)
2369 # If checking for a source package fall back on the binary override type
2370 if file_type == "dsc" and len(result) < 1:
2371 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2373 # Remember the section and priority so we can check them later if appropriate
2376 self.pkg.files[filename]["override section"] = result.section.section
2377 self.pkg.files[filename]["override priority"] = result.priority.priority
2382 ################################################################################
2383 def get_anyversion(self, sv_list, suite):
2386 @param sv_list: list of (suite, version) tuples to check
2389 @param suite: suite name
2395 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2396 for (s, v) in sv_list:
2397 if s in [ x.lower() for x in anysuite ]:
2398 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2403 ################################################################################
2405 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2408 @param sv_list: list of (suite, version) tuples to check
2410 @type filename: string
2411 @param filename: XXX
2413 @type new_version: string
2414 @param new_version: XXX
2416 Ensure versions are newer than existing packages in target
2417 suites and that cross-suite version checking rules as
2418 set out in the conf file are satisfied.
2423 # Check versions for each target suite
2424 for target_suite in self.pkg.changes["distribution"].keys():
2425 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2426 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2428 # Enforce "must be newer than target suite" even if conffile omits it
2429 if target_suite not in must_be_newer_than:
2430 must_be_newer_than.append(target_suite)
2432 for (suite, existent_version) in sv_list:
2433 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2435 if suite in must_be_newer_than and sourceful and vercmp < 1:
2436 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2438 if suite in must_be_older_than and vercmp > -1:
2441 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2442 # we really use the other suite, ignoring the conflicting one ...
2443 addsuite = self.pkg.changes["distribution-version"][suite]
2445 add_version = self.get_anyversion(sv_list, addsuite)
2446 target_version = self.get_anyversion(sv_list, target_suite)
2449 # not add_version can only happen if we map to a suite
2450 # that doesn't enhance the suite we're propup'ing from.
2451 # so "propup-ver x a b c; map a d" is a problem only if
2452 # d doesn't enhance a.
2454 # i think we could always propagate in this case, rather
2455 # than complaining. either way, this isn't a REJECT issue
2457 # And - we really should complain to the dorks who configured dak
2458 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2459 self.pkg.changes.setdefault("propdistribution", {})
2460 self.pkg.changes["propdistribution"][addsuite] = 1
2462 elif not target_version:
2463 # not targets_version is true when the package is NEW
2464 # we could just stick with the "...old version..." REJECT
2465 # for this, I think.
2466 self.rejects.append("Won't propogate NEW packages.")
2467 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2468 # propogation would be redundant. no need to reject though.
2469 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2471 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2472 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2474 self.warnings.append("Propogating upload to %s" % (addsuite))
2475 self.pkg.changes.setdefault("propdistribution", {})
2476 self.pkg.changes["propdistribution"][addsuite] = 1
2480 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2482 ################################################################################
2483 def check_binary_against_db(self, filename, session):
2484 # Ensure version is sane
2485 q = session.query(BinAssociation)
2486 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2487 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2489 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2490 filename, self.pkg.files[filename]["version"], sourceful=False)
2492 # Check for any existing copies of the file
2493 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2494 q = q.filter_by(version=self.pkg.files[filename]["version"])
2495 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2498 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2500 ################################################################################
2502 def check_source_against_db(self, filename, session):
2503 source = self.pkg.dsc.get("source")
2504 version = self.pkg.dsc.get("version")
2506 # Ensure version is sane
2507 q = session.query(SrcAssociation)
2508 q = q.join(DBSource).filter(DBSource.source==source)
2510 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2511 filename, version, sourceful=True)
2513 ################################################################################
2514 def check_dsc_against_db(self, filename, session):
2517 @warning: NB: this function can remove entries from the 'files' index [if
2518 the orig tarball is a duplicate of the one in the archive]; if
2519 you're iterating over 'files' and call this function as part of
2520 the loop, be sure to add a check to the top of the loop to
2521 ensure you haven't just tried to dereference the deleted entry.
2526 self.pkg.orig_files = {} # XXX: do we need to clear it?
2527 orig_files = self.pkg.orig_files
2529 # Try and find all files mentioned in the .dsc. This has
2530 # to work harder to cope with the multiple possible
2531 # locations of an .orig.tar.gz.
2532 # The ordering on the select is needed to pick the newest orig
2533 # when it exists in multiple places.
2534 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2536 if self.pkg.files.has_key(dsc_name):
2537 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2538 actual_size = int(self.pkg.files[dsc_name]["size"])
2539 found = "%s in incoming" % (dsc_name)
2541 # Check the file does not already exist in the archive
2542 ql = get_poolfile_like_name(dsc_name, session)
2544 # Strip out anything that isn't '%s' or '/%s$'
2546 if not i.filename.endswith(dsc_name):
2549 # "[dak] has not broken them. [dak] has fixed a
2550 # brokenness. Your crappy hack exploited a bug in
2553 # "(Come on! I thought it was always obvious that
2554 # one just doesn't release different files with
2555 # the same name and version.)"
2556 # -- ajk@ on d-devel@l.d.o
2559 # Ignore exact matches for .orig.tar.gz
2561 if re_is_orig_source.match(dsc_name):
2563 if self.pkg.files.has_key(dsc_name) and \
2564 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2565 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2566 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2567 # TODO: Don't delete the entry, just mark it as not needed
2568 # This would fix the stupidity of changing something we often iterate over
2569 # whilst we're doing it
2570 del self.pkg.files[dsc_name]
2571 dsc_entry["files id"] = i.file_id
2572 if not orig_files.has_key(dsc_name):
2573 orig_files[dsc_name] = {}
2574 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2577 # Don't bitch that we couldn't find this file later
2579 self.later_check_files.remove(dsc_name)
2585 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2587 elif re_is_orig_source.match(dsc_name):
2589 ql = get_poolfile_like_name(dsc_name, session)
2591 # Strip out anything that isn't '%s' or '/%s$'
2592 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2594 if not i.filename.endswith(dsc_name):
2598 # Unfortunately, we may get more than one match here if,
2599 # for example, the package was in potato but had an -sa
2600 # upload in woody. So we need to choose the right one.
2602 # default to something sane in case we don't match any or have only one
2607 old_file = os.path.join(i.location.path, i.filename)
2608 old_file_fh = utils.open_file(old_file)
2609 actual_md5 = apt_pkg.md5sum(old_file_fh)
2611 actual_size = os.stat(old_file)[stat.ST_SIZE]
2612 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2615 old_file = os.path.join(i.location.path, i.filename)
2616 old_file_fh = utils.open_file(old_file)
2617 actual_md5 = apt_pkg.md5sum(old_file_fh)
2619 actual_size = os.stat(old_file)[stat.ST_SIZE]
2621 suite_type = x.location.archive_type
2622 # need this for updating dsc_files in install()
2623 dsc_entry["files id"] = x.file_id
2624 # See install() in process-accepted...
2625 if not orig_files.has_key(dsc_name):
2626 orig_files[dsc_name] = {}
2627 orig_files[dsc_name]["id"] = x.file_id
2628 orig_files[dsc_name]["path"] = old_file
2629 orig_files[dsc_name]["location"] = x.location.location_id
2631 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2632 # Not there? Check the queue directories...
2633 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2634 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2636 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2637 if os.path.exists(in_otherdir):
2638 in_otherdir_fh = utils.open_file(in_otherdir)
2639 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2640 in_otherdir_fh.close()
2641 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2643 if not orig_files.has_key(dsc_name):
2644 orig_files[dsc_name] = {}
2645 orig_files[dsc_name]["path"] = in_otherdir
2648 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2651 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2653 if actual_md5 != dsc_entry["md5sum"]:
2654 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2655 if actual_size != int(dsc_entry["size"]):
2656 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2658 ################################################################################
2659 # This is used by process-new and process-holding to recheck a changes file
2660 # at the time we're running. It mainly wraps various other internal functions
2661 # and is similar to accepted_checks - these should probably be tidied up
2663 def recheck(self, session):
2665 for f in self.pkg.files.keys():
2666 # The .orig.tar.gz can disappear out from under us is it's a
2667 # duplicate of one in the archive.
2668 if not self.pkg.files.has_key(f):
2671 entry = self.pkg.files[f]
2673 # Check that the source still exists
2674 if entry["type"] == "deb":
2675 source_version = entry["source version"]
2676 source_package = entry["source package"]
2677 if not self.pkg.changes["architecture"].has_key("source") \
2678 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2679 source_epochless_version = re_no_epoch.sub('', source_version)
2680 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2682 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2683 if cnf.has_key("Dir::Queue::%s" % (q)):
2684 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2687 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2689 # Version and file overwrite checks
2690 if entry["type"] == "deb":
2691 self.check_binary_against_db(f, session)
2692 elif entry["type"] == "dsc":
2693 self.check_source_against_db(f, session)
2694 self.check_dsc_against_db(f, session)
2696 ################################################################################
2697 def accepted_checks(self, overwrite_checks, session):
2698 # Recheck anything that relies on the database; since that's not
2699 # frozen between accept and our run time when called from p-a.
2701 # overwrite_checks is set to False when installing to stable/oldstable
2706 # Find the .dsc (again)
2708 for f in self.pkg.files.keys():
2709 if self.pkg.files[f]["type"] == "dsc":
2712 for checkfile in self.pkg.files.keys():
2713 # The .orig.tar.gz can disappear out from under us is it's a
2714 # duplicate of one in the archive.
2715 if not self.pkg.files.has_key(checkfile):
2718 entry = self.pkg.files[checkfile]
2720 # Check that the source still exists
2721 if entry["type"] == "deb":
2722 source_version = entry["source version"]
2723 source_package = entry["source package"]
2724 if not self.pkg.changes["architecture"].has_key("source") \
2725 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2726 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2728 # Version and file overwrite checks
2729 if overwrite_checks:
2730 if entry["type"] == "deb":
2731 self.check_binary_against_db(checkfile, session)
2732 elif entry["type"] == "dsc":
2733 self.check_source_against_db(checkfile, session)
2734 self.check_dsc_against_db(dsc_filename, session)
2736 # propogate in the case it is in the override tables:
2737 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2738 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2739 propogate[suite] = 1
2741 nopropogate[suite] = 1
2743 for suite in propogate.keys():
2744 if suite in nopropogate:
2746 self.pkg.changes["distribution"][suite] = 1
2748 for checkfile in self.pkg.files.keys():
2749 # Check the package is still in the override tables
2750 for suite in self.pkg.changes["distribution"].keys():
2751 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2752 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2754 ################################################################################
2755 # If any file of an upload has a recent mtime then chances are good
2756 # the file is still being uploaded.
2758 def upload_too_new(self):
2761 # Move back to the original directory to get accurate time stamps
2763 os.chdir(self.pkg.directory)
2764 file_list = self.pkg.files.keys()
2765 file_list.extend(self.pkg.dsc_files.keys())
2766 file_list.append(self.pkg.changes_file)
2769 last_modified = time.time()-os.path.getmtime(f)
2770 if last_modified < int(cnf["Dinstall::SkipTime"]):
2779 def store_changelog(self):
2781 # Skip binary-only upload if it is not a bin-NMU
2782 if not self.pkg.changes['architecture'].has_key('source'):
2783 from daklib.regexes import re_bin_only_nmu
2784 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2787 session = DBConn().session()
2789 # Check if upload already has a changelog entry
2790 query = """SELECT changelog_id FROM changes WHERE source = :source
2791 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2792 if session.execute(query, {'source': self.pkg.changes['source'], \
2793 'version': self.pkg.changes['version'], \
2794 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2798 # Add current changelog text into changelogs_text table, return created ID
2799 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2800 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2802 # Link ID to the upload available in changes table
2803 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2804 AND version = :version AND architecture = :architecture"""
2805 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2806 'version': self.pkg.changes['version'], \
2807 'architecture': " ".join(self.pkg.changes['architecture'].keys())})