5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
59 ###############################################################################
61 def get_type(f, session):
63 Get the file type of C{f}
66 @param f: file entry from Changes object
68 @type session: SQLA Session
69 @param session: SQL Alchemy session object
76 if f.has_key("dbtype"):
77 file_type = f["dbtype"]
78 elif re_source_ext.match(f["type"]):
82 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
84 # Validate the override type
85 type_id = get_override_type(file_type, session)
87 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
91 ################################################################################
93 # Determine what parts in a .changes are NEW
95 def determine_new(filename, changes, files, warn=1, session = None):
97 Determine what parts in a C{changes} file are NEW.
100 @param filename: changes filename
102 @type changes: Upload.Pkg.changes dict
103 @param changes: Changes dictionary
105 @type files: Upload.Pkg.files dict
106 @param files: Files dictionary
109 @param warn: Warn if overrides are added for (old)stable
112 @return: dictionary of NEW components.
115 # TODO: This should all use the database instead of parsing the changes
120 dbchg = get_dbchange(filename, session)
122 print "Warning: cannot find changes file in database; won't check byhand"
124 # Build up a list of potentially new things
125 for name, f in files.items():
126 # Keep a record of byhand elements
127 if f["section"] == "byhand":
132 priority = f["priority"]
133 section = f["section"]
134 file_type = get_type(f, session)
135 component = f["component"]
137 if file_type == "dsc":
140 if not new.has_key(pkg):
142 new[pkg]["priority"] = priority
143 new[pkg]["section"] = section
144 new[pkg]["type"] = file_type
145 new[pkg]["component"] = component
146 new[pkg]["files"] = []
148 old_type = new[pkg]["type"]
149 if old_type != file_type:
150 # source gets trumped by deb or udeb
151 if old_type == "dsc":
152 new[pkg]["priority"] = priority
153 new[pkg]["section"] = section
154 new[pkg]["type"] = file_type
155 new[pkg]["component"] = component
157 new[pkg]["files"].append(name)
159 if f.has_key("othercomponents"):
160 new[pkg]["othercomponents"] = f["othercomponents"]
162 # Fix up the list of target suites
164 for suite in changes["suite"].keys():
165 oldsuite = get_suite(suite, session)
167 print "WARNING: Invalid suite %s found" % suite
170 if oldsuite.overridesuite:
171 newsuite = get_suite(oldsuite.overridesuite, session)
174 print "WARNING: overriding suite %s to suite %s" % (
175 suite, oldsuite.overridesuite)
176 del changes["suite"][suite]
177 changes["suite"][oldsuite.overridesuite] = 1
179 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist. Bugger" % (
180 oldsuite.overridesuite, suite)
182 # Check for unprocessed byhand files
183 if dbchg is not None:
184 for b in byhand.keys():
185 # Find the file entry in the database
187 for f in dbchg.files:
190 # If it's processed, we can ignore it
196 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
198 # Check for new stuff
199 for suite in changes["suite"].keys():
200 for pkg in new.keys():
201 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
203 for file_entry in new[pkg]["files"]:
204 if files[file_entry].has_key("new"):
205 del files[file_entry]["new"]
209 for s in ['stable', 'oldstable']:
210 if changes["suite"].has_key(s):
211 print "WARNING: overrides will be added for %s!" % s
212 for pkg in new.keys():
213 if new[pkg].has_key("othercomponents"):
214 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
218 ################################################################################
220 def check_valid(new, session = None):
222 Check if section and priority for NEW packages exist in database.
223 Additionally does sanity checks:
224 - debian-installer packages have to be udeb (or source)
225 - non debian-installer packages can not be udeb
226 - source priority can only be assigned to dsc file types
229 @param new: Dict of new packages with their section, priority and type.
232 for pkg in new.keys():
233 section_name = new[pkg]["section"]
234 priority_name = new[pkg]["priority"]
235 file_type = new[pkg]["type"]
237 section = get_section(section_name, session)
239 new[pkg]["section id"] = -1
241 new[pkg]["section id"] = section.section_id
243 priority = get_priority(priority_name, session)
245 new[pkg]["priority id"] = -1
247 new[pkg]["priority id"] = priority.priority_id
250 di = section_name.find("debian-installer") != -1
252 # If d-i, we must be udeb and vice-versa
253 if (di and file_type not in ("udeb", "dsc")) or \
254 (not di and file_type == "udeb"):
255 new[pkg]["section id"] = -1
257 # If dsc we need to be source and vice-versa
258 if (priority == "source" and file_type != "dsc") or \
259 (priority != "source" and file_type == "dsc"):
260 new[pkg]["priority id"] = -1
262 ###############################################################################
264 # Used by Upload.check_timestamps
265 class TarTime(object):
266 def __init__(self, future_cutoff, past_cutoff):
268 self.future_cutoff = future_cutoff
269 self.past_cutoff = past_cutoff
272 self.future_files = {}
273 self.ancient_files = {}
275 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
276 if MTime > self.future_cutoff:
277 self.future_files[Name] = MTime
278 if MTime < self.past_cutoff:
279 self.ancient_files[Name] = MTime
281 ###############################################################################
283 def prod_maintainer(notes, upload):
286 # Here we prepare an editor and get them ready to prod...
287 (fd, temp_filename) = utils.temp_filename()
288 temp_file = os.fdopen(fd, 'w')
290 temp_file.write(note.comment)
292 editor = os.environ.get("EDITOR","vi")
295 os.system("%s %s" % (editor, temp_filename))
296 temp_fh = utils.open_file(temp_filename)
297 prod_message = "".join(temp_fh.readlines())
299 print "Prod message:"
300 print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
301 prompt = "[P]rod, Edit, Abandon, Quit ?"
303 while prompt.find(answer) == -1:
304 answer = utils.our_raw_input(prompt)
305 m = re_default_answer.search(prompt)
308 answer = answer[:1].upper()
309 os.unlink(temp_filename)
315 # Otherwise, do the proding...
316 user_email_address = utils.whoami() + " <%s>" % (
317 cnf["Dinstall::MyAdminAddress"])
321 Subst["__FROM_ADDRESS__"] = user_email_address
322 Subst["__PROD_MESSAGE__"] = prod_message
323 Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
325 prod_mail_message = utils.TemplateSubst(
326 Subst,cnf["Dir::Templates"]+"/process-new.prod")
329 utils.send_mail(prod_mail_message)
331 print "Sent prodding message"
333 ################################################################################
335 def edit_note(note, upload, session):
336 # Write the current data to a temporary file
337 (fd, temp_filename) = utils.temp_filename()
338 editor = os.environ.get("EDITOR","vi")
341 os.system("%s %s" % (editor, temp_filename))
342 temp_file = utils.open_file(temp_filename)
343 newnote = temp_file.read().rstrip()
346 print utils.prefix_multi_line_string(newnote," ")
347 prompt = "[D]one, Edit, Abandon, Quit ?"
349 while prompt.find(answer) == -1:
350 answer = utils.our_raw_input(prompt)
351 m = re_default_answer.search(prompt)
354 answer = answer[:1].upper()
355 os.unlink(temp_filename)
362 comment = NewComment()
363 comment.package = upload.pkg.changes["source"]
364 comment.version = upload.pkg.changes["version"]
365 comment.comment = newnote
366 comment.author = utils.whoami()
367 comment.trainee = bool(Options["Trainee"])
371 ###############################################################################
373 class Upload(object):
375 Everything that has to do with an upload processed.
383 ###########################################################################
386 """ Reset a number of internal variables."""
388 # Initialize the substitution template map
391 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
392 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
393 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
394 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
400 self.later_check_files = []
404 def package_info(self):
406 Format various messages from this Upload to send to the maintainer.
410 ('Reject Reasons', self.rejects),
411 ('Warnings', self.warnings),
412 ('Notes', self.notes),
416 for title, messages in msgs:
418 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
423 ###########################################################################
424 def update_subst(self):
425 """ Set up the per-package template substitution mappings """
429 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
430 if not self.pkg.changes.has_key("architecture") or not \
431 isinstance(self.pkg.changes["architecture"], dict):
432 self.pkg.changes["architecture"] = { "Unknown" : "" }
434 # and maintainer2047 may not exist.
435 if not self.pkg.changes.has_key("maintainer2047"):
436 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
438 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
439 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
440 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
442 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
443 if self.pkg.changes["architecture"].has_key("source") and \
444 self.pkg.changes["changedby822"] != "" and \
445 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
447 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
448 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
449 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
451 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
452 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
453 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
455 # Process policy doesn't set the fingerprint field and I don't want to make it
456 # do it for now as I don't want to have to deal with the case where we accepted
457 # the package into PU-NEW, but the fingerprint has gone away from the keyring in
458 # the meantime so the package will be remarked as rejectable. Urgh.
459 # TODO: Fix this properly
460 if self.pkg.changes.has_key('fingerprint'):
461 session = DBConn().session()
462 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
463 if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
464 if self.pkg.changes.has_key("sponsoremail"):
465 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
468 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
469 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
471 # Apply any global override of the Maintainer field
472 if cnf.get("Dinstall::OverrideMaintainer"):
473 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
474 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
476 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
477 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
478 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
479 self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
481 ###########################################################################
482 def load_changes(self, filename):
484 Load a changes file and setup a dictionary around it. Also checks for mandantory
487 @type filename: string
488 @param filename: Changes filename, full path.
491 @return: whether the changes file was valid or not. We may want to
492 reject even if this is True (see what gets put in self.rejects).
493 This is simply to prevent us even trying things later which will
494 fail because we couldn't properly parse the file.
497 self.pkg.changes_file = filename
499 # Parse the .changes field into a dictionary
501 self.pkg.changes.update(parse_changes(filename))
502 except CantOpenError:
503 self.rejects.append("%s: can't read file." % (filename))
505 except ParseChangesError, line:
506 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
508 except ChangesUnicodeError:
509 self.rejects.append("%s: changes file not proper utf-8" % (filename))
512 # Parse the Files field from the .changes into another dictionary
514 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
515 except ParseChangesError, line:
516 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
518 except UnknownFormatError, format:
519 self.rejects.append("%s: unknown format '%s'." % (filename, format))
522 # Check for mandatory fields
523 for i in ("distribution", "source", "binary", "architecture",
524 "version", "maintainer", "files", "changes", "description"):
525 if not self.pkg.changes.has_key(i):
526 # Avoid undefined errors later
527 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
530 # Strip a source version in brackets from the source field
531 if re_strip_srcver.search(self.pkg.changes["source"]):
532 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
534 # Ensure the source field is a valid package name.
535 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
536 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
538 # Split multi-value fields into a lower-level dictionary
539 for i in ("architecture", "distribution", "binary", "closes"):
540 o = self.pkg.changes.get(i, "")
542 del self.pkg.changes[i]
544 self.pkg.changes[i] = {}
547 self.pkg.changes[i][j] = 1
549 # Fix the Maintainer: field to be RFC822/2047 compatible
551 (self.pkg.changes["maintainer822"],
552 self.pkg.changes["maintainer2047"],
553 self.pkg.changes["maintainername"],
554 self.pkg.changes["maintaineremail"]) = \
555 fix_maintainer (self.pkg.changes["maintainer"])
556 except ParseMaintError, msg:
557 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
558 % (filename, self.pkg.changes["maintainer"], msg))
560 # ...likewise for the Changed-By: field if it exists.
562 (self.pkg.changes["changedby822"],
563 self.pkg.changes["changedby2047"],
564 self.pkg.changes["changedbyname"],
565 self.pkg.changes["changedbyemail"]) = \
566 fix_maintainer (self.pkg.changes.get("changed-by", ""))
567 except ParseMaintError, msg:
568 self.pkg.changes["changedby822"] = ""
569 self.pkg.changes["changedby2047"] = ""
570 self.pkg.changes["changedbyname"] = ""
571 self.pkg.changes["changedbyemail"] = ""
573 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
574 % (filename, self.pkg.changes["changed-by"], msg))
576 # Ensure all the values in Closes: are numbers
577 if self.pkg.changes.has_key("closes"):
578 for i in self.pkg.changes["closes"].keys():
579 if re_isanum.match (i) == None:
580 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
582 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
583 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
584 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
586 # Check the .changes is non-empty
587 if not self.pkg.files:
588 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
591 # Changes was syntactically valid even if we'll reject
594 ###########################################################################
596 def check_distributions(self):
597 "Check and map the Distribution field"
601 # Handle suite mappings
602 for m in Cnf.ValueList("SuiteMappings"):
605 if mtype == "map" or mtype == "silent-map":
606 (source, dest) = args[1:3]
607 if self.pkg.changes["distribution"].has_key(source):
608 del self.pkg.changes["distribution"][source]
609 self.pkg.changes["distribution"][dest] = 1
610 if mtype != "silent-map":
611 self.notes.append("Mapping %s to %s." % (source, dest))
612 if self.pkg.changes.has_key("distribution-version"):
613 if self.pkg.changes["distribution-version"].has_key(source):
614 self.pkg.changes["distribution-version"][source]=dest
615 elif mtype == "map-unreleased":
616 (source, dest) = args[1:3]
617 if self.pkg.changes["distribution"].has_key(source):
618 for arch in self.pkg.changes["architecture"].keys():
619 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
620 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
621 del self.pkg.changes["distribution"][source]
622 self.pkg.changes["distribution"][dest] = 1
624 elif mtype == "ignore":
626 if self.pkg.changes["distribution"].has_key(suite):
627 del self.pkg.changes["distribution"][suite]
628 self.warnings.append("Ignoring %s as a target suite." % (suite))
629 elif mtype == "reject":
631 if self.pkg.changes["distribution"].has_key(suite):
632 self.rejects.append("Uploads to %s are not accepted." % (suite))
633 elif mtype == "propup-version":
634 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
636 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
637 if self.pkg.changes["distribution"].has_key(args[1]):
638 self.pkg.changes.setdefault("distribution-version", {})
639 for suite in args[2:]:
640 self.pkg.changes["distribution-version"][suite] = suite
642 # Ensure there is (still) a target distribution
643 if len(self.pkg.changes["distribution"].keys()) < 1:
644 self.rejects.append("No valid distribution remaining.")
646 # Ensure target distributions exist
647 for suite in self.pkg.changes["distribution"].keys():
648 if not Cnf.has_key("Suite::%s" % (suite)):
649 self.rejects.append("Unknown distribution `%s'." % (suite))
651 ###########################################################################
653 def binary_file_checks(self, f, session):
655 entry = self.pkg.files[f]
657 # Extract package control information
658 deb_file = utils.open_file(f)
660 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
662 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
664 # Can't continue, none of the checks on control would work.
667 # Check for mandantory "Description:"
670 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
672 self.rejects.append("%s: Missing Description in binary package" % (f))
677 # Check for mandatory fields
678 for field in [ "Package", "Architecture", "Version" ]:
679 if control.Find(field) == None:
681 self.rejects.append("%s: No %s field in control." % (f, field))
684 # Ensure the package name matches the one give in the .changes
685 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
686 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
688 # Validate the package field
689 package = control.Find("Package")
690 if not re_valid_pkg_name.match(package):
691 self.rejects.append("%s: invalid package name '%s'." % (f, package))
693 # Validate the version field
694 version = control.Find("Version")
695 if not re_valid_version.match(version):
696 self.rejects.append("%s: invalid version number '%s'." % (f, version))
698 # Ensure the architecture of the .deb is one we know about.
699 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
700 architecture = control.Find("Architecture")
701 upload_suite = self.pkg.changes["distribution"].keys()[0]
703 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
704 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
705 self.rejects.append("Unknown architecture '%s'." % (architecture))
707 # Ensure the architecture of the .deb is one of the ones
708 # listed in the .changes.
709 if not self.pkg.changes["architecture"].has_key(architecture):
710 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
712 # Sanity-check the Depends field
713 depends = control.Find("Depends")
715 self.rejects.append("%s: Depends field is empty." % (f))
717 # Sanity-check the Provides field
718 provides = control.Find("Provides")
720 provide = re_spacestrip.sub('', provides)
722 self.rejects.append("%s: Provides field is empty." % (f))
723 prov_list = provide.split(",")
724 for prov in prov_list:
725 if not re_valid_pkg_name.match(prov):
726 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
728 # Check the section & priority match those given in the .changes (non-fatal)
729 if control.Find("Section") and entry["section"] != "" \
730 and entry["section"] != control.Find("Section"):
731 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
732 (f, control.Find("Section", ""), entry["section"]))
733 if control.Find("Priority") and entry["priority"] != "" \
734 and entry["priority"] != control.Find("Priority"):
735 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
736 (f, control.Find("Priority", ""), entry["priority"]))
738 entry["package"] = package
739 entry["architecture"] = architecture
740 entry["version"] = version
741 entry["maintainer"] = control.Find("Maintainer", "")
743 if f.endswith(".udeb"):
744 self.pkg.files[f]["dbtype"] = "udeb"
745 elif f.endswith(".deb"):
746 self.pkg.files[f]["dbtype"] = "deb"
748 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
750 entry["source"] = control.Find("Source", entry["package"])
752 # Get the source version
753 source = entry["source"]
756 if source.find("(") != -1:
757 m = re_extract_src_version.match(source)
759 source_version = m.group(2)
761 if not source_version:
762 source_version = self.pkg.files[f]["version"]
764 entry["source package"] = source
765 entry["source version"] = source_version
767 # Ensure the filename matches the contents of the .deb
768 m = re_isadeb.match(f)
771 file_package = m.group(1)
772 if entry["package"] != file_package:
773 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
774 (f, file_package, entry["dbtype"], entry["package"]))
775 epochless_version = re_no_epoch.sub('', control.Find("Version"))
778 file_version = m.group(2)
779 if epochless_version != file_version:
780 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
781 (f, file_version, entry["dbtype"], epochless_version))
784 file_architecture = m.group(3)
785 if entry["architecture"] != file_architecture:
786 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
787 (f, file_architecture, entry["dbtype"], entry["architecture"]))
789 # Check for existent source
790 source_version = entry["source version"]
791 source_package = entry["source package"]
792 if self.pkg.changes["architecture"].has_key("source"):
793 if source_version != self.pkg.changes["version"]:
794 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
795 (source_version, f, self.pkg.changes["version"]))
797 # Check in the SQL database
798 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
799 # Check in one of the other directories
800 source_epochless_version = re_no_epoch.sub('', source_version)
801 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
802 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
804 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
807 dsc_file_exists = False
808 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates", "Lenny-Volatile-Proposed-Updates"]:
809 if cnf.has_key("Dir::Queue::%s" % (myq)):
810 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
811 dsc_file_exists = True
814 if not dsc_file_exists:
815 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
817 # Check the version and for file overwrites
818 self.check_binary_against_db(f, session)
820 # Temporarily disable contents generation until we change the table storage layout
823 #if len(b.rejects) > 0:
824 # for j in b.rejects:
825 # self.rejects.append(j)
827 def source_file_checks(self, f, session):
828 entry = self.pkg.files[f]
830 m = re_issource.match(f)
834 entry["package"] = m.group(1)
835 entry["version"] = m.group(2)
836 entry["type"] = m.group(3)
838 # Ensure the source package name matches the Source filed in the .changes
839 if self.pkg.changes["source"] != entry["package"]:
840 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
842 # Ensure the source version matches the version in the .changes file
843 if re_is_orig_source.match(f):
844 changes_version = self.pkg.changes["chopversion2"]
846 changes_version = self.pkg.changes["chopversion"]
848 if changes_version != entry["version"]:
849 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
851 # Ensure the .changes lists source in the Architecture field
852 if not self.pkg.changes["architecture"].has_key("source"):
853 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
855 # Check the signature of a .dsc file
856 if entry["type"] == "dsc":
857 # check_signature returns either:
858 # (None, [list, of, rejects]) or (signature, [])
859 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
861 self.rejects.append(j)
863 entry["architecture"] = "source"
865 def per_suite_file_checks(self, f, suite, session):
867 entry = self.pkg.files[f]
870 if entry.has_key("byhand"):
873 # Check we have fields we need to do these checks
875 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
876 if not entry.has_key(m):
877 self.rejects.append("file '%s' does not have field %s set" % (f, m))
883 # Handle component mappings
884 for m in cnf.ValueList("ComponentMappings"):
885 (source, dest) = m.split()
886 if entry["component"] == source:
887 entry["original component"] = source
888 entry["component"] = dest
890 # Ensure the component is valid for the target suite
891 if cnf.has_key("Suite:%s::Components" % (suite)) and \
892 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
893 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
896 # Validate the component
897 if not get_component(entry["component"], session):
898 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
901 # See if the package is NEW
902 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
905 # Validate the priority
906 if entry["priority"].find('/') != -1:
907 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
909 # Determine the location
910 location = cnf["Dir::Pool"]
911 l = get_location(location, entry["component"], session=session)
913 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
914 entry["location id"] = -1
916 entry["location id"] = l.location_id
918 # Check the md5sum & size against existing files (if any)
919 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
921 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
922 entry["size"], entry["md5sum"], entry["location id"])
925 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
926 elif found is False and poolfile is not None:
927 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
930 entry["files id"] = None
932 entry["files id"] = poolfile.file_id
934 # Check for packages that have moved from one component to another
935 entry['suite'] = suite
936 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
938 entry["othercomponents"] = res.fetchone()[0]
940 def check_files(self, action=True):
941 file_keys = self.pkg.files.keys()
947 os.chdir(self.pkg.directory)
949 ret = holding.copy_to_holding(f)
951 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
955 # check we already know the changes file
956 # [NB: this check must be done post-suite mapping]
957 base_filename = os.path.basename(self.pkg.changes_file)
959 session = DBConn().session()
962 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
963 # if in the pool or in a queue other than unchecked, reject
964 if (dbc.in_queue is None) \
965 or (dbc.in_queue is not None
966 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
967 self.rejects.append("%s file already known to dak" % base_filename)
968 except NoResultFound, e:
975 for f, entry in self.pkg.files.items():
976 # Ensure the file does not already exist in one of the accepted directories
977 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
978 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
979 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
980 self.rejects.append("%s file already exists in the %s directory." % (f, d))
982 if not re_taint_free.match(f):
983 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
985 # Check the file is readable
986 if os.access(f, os.R_OK) == 0:
987 # When running in -n, copy_to_holding() won't have
988 # generated the reject_message, so we need to.
990 if os.path.exists(f):
991 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
993 # Don't directly reject, mark to check later to deal with orig's
994 # we can find in the pool
995 self.later_check_files.append(f)
996 entry["type"] = "unreadable"
999 # If it's byhand skip remaining checks
1000 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1002 entry["type"] = "byhand"
1004 # Checks for a binary package...
1005 elif re_isadeb.match(f):
1007 entry["type"] = "deb"
1009 # This routine appends to self.rejects/warnings as appropriate
1010 self.binary_file_checks(f, session)
1012 # Checks for a source package...
1013 elif re_issource.match(f):
1016 # This routine appends to self.rejects/warnings as appropriate
1017 self.source_file_checks(f, session)
1019 # Not a binary or source package? Assume byhand...
1022 entry["type"] = "byhand"
1024 # Per-suite file checks
1025 entry["oldfiles"] = {}
1026 for suite in self.pkg.changes["distribution"].keys():
1027 self.per_suite_file_checks(f, suite, session)
1031 # If the .changes file says it has source, it must have source.
1032 if self.pkg.changes["architecture"].has_key("source"):
1034 self.rejects.append("no source found and Architecture line in changes mention source.")
1036 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1037 self.rejects.append("source only uploads are not supported.")
1039 ###########################################################################
1040 def check_dsc(self, action=True, session=None):
1041 """Returns bool indicating whether or not the source changes are valid"""
1042 # Ensure there is source to check
1043 if not self.pkg.changes["architecture"].has_key("source"):
1048 for f, entry in self.pkg.files.items():
1049 if entry["type"] == "dsc":
1051 self.rejects.append("can not process a .changes file with multiple .dsc's.")
1056 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1057 if not dsc_filename:
1058 self.rejects.append("source uploads must contain a dsc file")
1061 # Parse the .dsc file
1063 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1064 except CantOpenError:
1065 # if not -n copy_to_holding() will have done this for us...
1067 self.rejects.append("%s: can't read file." % (dsc_filename))
1068 except ParseChangesError, line:
1069 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1070 except InvalidDscError, line:
1071 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1072 except ChangesUnicodeError:
1073 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1075 # Build up the file list of files mentioned by the .dsc
1077 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1078 except NoFilesFieldError:
1079 self.rejects.append("%s: no Files: field." % (dsc_filename))
1081 except UnknownFormatError, format:
1082 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1084 except ParseChangesError, line:
1085 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1088 # Enforce mandatory fields
1089 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1090 if not self.pkg.dsc.has_key(i):
1091 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1094 # Validate the source and version fields
1095 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1096 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1097 if not re_valid_version.match(self.pkg.dsc["version"]):
1098 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1100 # Only a limited list of source formats are allowed in each suite
1101 for dist in self.pkg.changes["distribution"].keys():
1102 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1103 if self.pkg.dsc["format"] not in allowed:
1104 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1106 # Validate the Maintainer field
1108 # We ignore the return value
1109 fix_maintainer(self.pkg.dsc["maintainer"])
1110 except ParseMaintError, msg:
1111 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1112 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1114 # Validate the build-depends field(s)
1115 for field_name in [ "build-depends", "build-depends-indep" ]:
1116 field = self.pkg.dsc.get(field_name)
1118 # Have apt try to parse them...
1120 apt_pkg.ParseSrcDepends(field)
1122 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1124 # Ensure the version number in the .dsc matches the version number in the .changes
1125 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1126 changes_version = self.pkg.files[dsc_filename]["version"]
1128 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1129 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1131 # Ensure the Files field contain only what's expected
1132 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1134 # Ensure source is newer than existing source in target suites
1135 session = DBConn().session()
1136 self.check_source_against_db(dsc_filename, session)
1137 self.check_dsc_against_db(dsc_filename, session)
1139 dbchg = get_dbchange(self.pkg.changes_file, session)
1141 # Finally, check if we're missing any files
1142 for f in self.later_check_files:
1144 # Check if we've already processed this file if we have a dbchg object
1147 for pf in dbchg.files:
1148 if pf.filename == f and pf.processed:
1149 self.notes.append('%s was already processed so we can go ahead' % f)
1151 del self.pkg.files[f]
1153 self.rejects.append("Could not find file %s references in changes" % f)
1159 ###########################################################################
1161 def get_changelog_versions(self, source_dir):
1162 """Extracts a the source package and (optionally) grabs the
1163 version history out of debian/changelog for the BTS."""
1167 # Find the .dsc (again)
1169 for f in self.pkg.files.keys():
1170 if self.pkg.files[f]["type"] == "dsc":
1173 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1174 if not dsc_filename:
1177 # Create a symlink mirror of the source files in our temporary directory
1178 for f in self.pkg.files.keys():
1179 m = re_issource.match(f)
1181 src = os.path.join(source_dir, f)
1182 # If a file is missing for whatever reason, give up.
1183 if not os.path.exists(src):
1186 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1187 self.pkg.orig_files[f].has_key("path"):
1189 dest = os.path.join(os.getcwd(), f)
1190 os.symlink(src, dest)
1192 # If the orig files are not a part of the upload, create symlinks to the
1194 for orig_file in self.pkg.orig_files.keys():
1195 if not self.pkg.orig_files[orig_file].has_key("path"):
1197 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1198 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1200 # Extract the source
1201 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1202 (result, output) = commands.getstatusoutput(cmd)
1204 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1205 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1208 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1211 # Get the upstream version
1212 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1213 if re_strip_revision.search(upstr_version):
1214 upstr_version = re_strip_revision.sub('', upstr_version)
1216 # Ensure the changelog file exists
1217 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1218 if not os.path.exists(changelog_filename):
1219 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1222 # Parse the changelog
1223 self.pkg.dsc["bts changelog"] = ""
1224 changelog_file = utils.open_file(changelog_filename)
1225 for line in changelog_file.readlines():
1226 m = re_changelog_versions.match(line)
1228 self.pkg.dsc["bts changelog"] += line
1229 changelog_file.close()
1231 # Check we found at least one revision in the changelog
1232 if not self.pkg.dsc["bts changelog"]:
1233 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1235 def check_source(self):
1237 # a) there's no source
1238 if not self.pkg.changes["architecture"].has_key("source"):
1241 tmpdir = utils.temp_dirname()
1243 # Move into the temporary directory
1247 # Get the changelog version history
1248 self.get_changelog_versions(cwd)
1250 # Move back and cleanup the temporary tree
1254 shutil.rmtree(tmpdir)
1256 if e.errno != errno.EACCES:
1258 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1260 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1261 # We probably have u-r or u-w directories so chmod everything
1263 cmd = "chmod -R u+rwx %s" % (tmpdir)
1264 result = os.system(cmd)
1266 utils.fubar("'%s' failed with result %s." % (cmd, result))
1267 shutil.rmtree(tmpdir)
1268 except Exception, e:
1269 print "foobar2 (%s)" % e
1270 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1272 ###########################################################################
1273 def ensure_hashes(self):
1274 # Make sure we recognise the format of the Files: field in the .changes
1275 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1276 if len(format) == 2:
1277 format = int(format[0]), int(format[1])
1279 format = int(float(format[0])), 0
1281 # We need to deal with the original changes blob, as the fields we need
1282 # might not be in the changes dict serialised into the .dak anymore.
1283 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1285 # Copy the checksums over to the current changes dict. This will keep
1286 # the existing modifications to it intact.
1287 for field in orig_changes:
1288 if field.startswith('checksums-'):
1289 self.pkg.changes[field] = orig_changes[field]
1291 # Check for unsupported hashes
1292 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1293 self.rejects.append(j)
1295 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1296 self.rejects.append(j)
1298 # We have to calculate the hash if we have an earlier changes version than
1299 # the hash appears in rather than require it exist in the changes file
1300 for hashname, hashfunc, version in utils.known_hashes:
1301 # TODO: Move _ensure_changes_hash into this class
1302 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1303 self.rejects.append(j)
1304 if "source" in self.pkg.changes["architecture"]:
1305 # TODO: Move _ensure_dsc_hash into this class
1306 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1307 self.rejects.append(j)
1309 def check_hashes(self):
1310 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1311 self.rejects.append(m)
1313 for m in utils.check_size(".changes", self.pkg.files):
1314 self.rejects.append(m)
1316 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1317 self.rejects.append(m)
1319 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1320 self.rejects.append(m)
1322 self.ensure_hashes()
1324 ###########################################################################
1326 def ensure_orig(self, target_dir='.', session=None):
1328 Ensures that all orig files mentioned in the changes file are present
1329 in target_dir. If they do not exist, they are symlinked into place.
1331 An list containing the symlinks that were created are returned (so they
1338 for filename, entry in self.pkg.dsc_files.iteritems():
1339 if not re_is_orig_source.match(filename):
1340 # File is not an orig; ignore
1343 if os.path.exists(filename):
1344 # File exists, no need to continue
1347 def symlink_if_valid(path):
1348 f = utils.open_file(path)
1349 md5sum = apt_pkg.md5sum(f)
1352 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1353 expected = (int(entry['size']), entry['md5sum'])
1355 if fingerprint != expected:
1358 dest = os.path.join(target_dir, filename)
1360 os.symlink(path, dest)
1361 symlinked.append(dest)
1367 session_ = DBConn().session()
1372 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1373 poolfile_path = os.path.join(
1374 poolfile.location.path, poolfile.filename
1377 if symlink_if_valid(poolfile_path):
1387 # Look in some other queues for the file
1388 queues = ('New', 'Byhand', 'ProposedUpdates',
1389 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1391 for queue in queues:
1392 if not cnf.get('Dir::Queue::%s' % queue):
1395 queuefile_path = os.path.join(
1396 cnf['Dir::Queue::%s' % queue], filename
1399 if not os.path.exists(queuefile_path):
1400 # Does not exist in this queue
1403 if symlink_if_valid(queuefile_path):
1408 ###########################################################################
1410 def check_lintian(self):
1412 Extends self.rejects by checking the output of lintian against tags
1413 specified in Dinstall::LintianTags.
1418 # Don't reject binary uploads
1419 if not self.pkg.changes['architecture'].has_key('source'):
1422 # Only check some distributions
1423 for dist in ('unstable', 'experimental'):
1424 if dist in self.pkg.changes['distribution']:
1429 # If we do not have a tagfile, don't do anything
1430 tagfile = cnf.get("Dinstall::LintianTags")
1434 # Parse the yaml file
1435 sourcefile = file(tagfile, 'r')
1436 sourcecontent = sourcefile.read()
1440 lintiantags = yaml.load(sourcecontent)['lintian']
1441 except yaml.YAMLError, msg:
1442 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1445 # Try and find all orig mentioned in the .dsc
1446 symlinked = self.ensure_orig()
1448 # Setup the input file for lintian
1449 fd, temp_filename = utils.temp_filename()
1450 temptagfile = os.fdopen(fd, 'w')
1451 for tags in lintiantags.values():
1452 temptagfile.writelines(['%s\n' % x for x in tags])
1456 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1457 (temp_filename, self.pkg.changes_file)
1459 result, output = commands.getstatusoutput(cmd)
1461 # Remove our tempfile and any symlinks we created
1462 os.unlink(temp_filename)
1464 for symlink in symlinked:
1468 utils.warn("lintian failed for %s [return code: %s]." % \
1469 (self.pkg.changes_file, result))
1470 utils.warn(utils.prefix_multi_line_string(output, \
1471 " [possible output:] "))
1476 [self.pkg.changes_file, "check_lintian"] + list(txt)
1480 parsed_tags = parse_lintian_output(output)
1481 self.rejects.extend(
1482 generate_reject_messages(parsed_tags, lintiantags, log=log)
1485 ###########################################################################
1486 def check_urgency(self):
1488 if self.pkg.changes["architecture"].has_key("source"):
1489 if not self.pkg.changes.has_key("urgency"):
1490 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1491 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1492 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1493 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1494 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1495 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1497 ###########################################################################
1499 # Sanity check the time stamps of files inside debs.
1500 # [Files in the near future cause ugly warnings and extreme time
1501 # travel can cause errors on extraction]
1503 def check_timestamps(self):
1506 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1507 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1508 tar = TarTime(future_cutoff, past_cutoff)
1510 for filename, entry in self.pkg.files.items():
1511 if entry["type"] == "deb":
1514 deb_file = utils.open_file(filename)
1515 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1518 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1519 except SystemError, e:
1520 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1521 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1524 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1528 future_files = tar.future_files.keys()
1530 num_future_files = len(future_files)
1531 future_file = future_files[0]
1532 future_date = tar.future_files[future_file]
1533 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1534 % (filename, num_future_files, future_file, time.ctime(future_date)))
1536 ancient_files = tar.ancient_files.keys()
1538 num_ancient_files = len(ancient_files)
1539 ancient_file = ancient_files[0]
1540 ancient_date = tar.ancient_files[ancient_file]
1541 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1542 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1544 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1546 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1547 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1549 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1555 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1556 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1557 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1558 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1559 self.pkg.changes["sponsoremail"] = uid_email
1564 ###########################################################################
1565 # check_signed_by_key checks
1566 ###########################################################################
1568 def check_signed_by_key(self):
1569 """Ensure the .changes is signed by an authorized uploader."""
1570 session = DBConn().session()
1572 # First of all we check that the person has proper upload permissions
1573 # and that this upload isn't blocked
1574 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1577 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1580 # TODO: Check that import-keyring adds UIDs properly
1582 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1585 # Check that the fingerprint which uploaded has permission to do so
1586 self.check_upload_permissions(fpr, session)
1588 # Check that this package is not in a transition
1589 self.check_transition(session)
1594 def check_upload_permissions(self, fpr, session):
1595 # Check any one-off upload blocks
1596 self.check_upload_blocks(fpr, session)
1598 # Start with DM as a special case
1599 # DM is a special case unfortunately, so we check it first
1600 # (keys with no source access get more access than DMs in one
1601 # way; DMs can only upload for their packages whether source
1602 # or binary, whereas keys with no access might be able to
1603 # upload some binaries)
1604 if fpr.source_acl.access_level == 'dm':
1605 self.check_dm_upload(fpr, session)
1607 # Check source-based permissions for other types
1608 if self.pkg.changes["architecture"].has_key("source") and \
1609 fpr.source_acl.access_level is None:
1610 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1611 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1612 self.rejects.append(rej)
1614 # If not a DM, we allow full upload rights
1615 uid_email = "%s@debian.org" % (fpr.uid.uid)
1616 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1619 # Check binary upload permissions
1620 # By this point we know that DMs can't have got here unless they
1621 # are allowed to deal with the package concerned so just apply
1623 if fpr.binary_acl.access_level == 'full':
1626 # Otherwise we're in the map case
1627 tmparches = self.pkg.changes["architecture"].copy()
1628 tmparches.pop('source', None)
1630 for bam in fpr.binary_acl_map:
1631 tmparches.pop(bam.architecture.arch_string, None)
1633 if len(tmparches.keys()) > 0:
1634 if fpr.binary_reject:
1635 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1636 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1637 self.rejects.append(rej)
1639 # TODO: This is where we'll implement reject vs throw away binaries later
1640 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1641 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1642 rej += "\nFingerprint: %s", (fpr.fingerprint)
1643 self.rejects.append(rej)
1646 def check_upload_blocks(self, fpr, session):
1647 """Check whether any upload blocks apply to this source, source
1648 version, uid / fpr combination"""
1650 def block_rej_template(fb):
1651 rej = 'Manual upload block in place for package %s' % fb.source
1652 if fb.version is not None:
1653 rej += ', version %s' % fb.version
1656 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1657 # version is None if the block applies to all versions
1658 if fb.version is None or fb.version == self.pkg.changes['version']:
1659 # Check both fpr and uid - either is enough to cause a reject
1660 if fb.fpr is not None:
1661 if fb.fpr.fingerprint == fpr.fingerprint:
1662 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1663 if fb.uid is not None:
1664 if fb.uid == fpr.uid:
1665 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1668 def check_dm_upload(self, fpr, session):
1669 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1670 ## none of the uploaded packages are NEW
1672 for f in self.pkg.files.keys():
1673 if self.pkg.files[f].has_key("byhand"):
1674 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1676 if self.pkg.files[f].has_key("new"):
1677 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1683 ## the most recent version of the package uploaded to unstable or
1684 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1685 ## section of its control file
1686 q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1687 q = q.join(SrcAssociation)
1688 q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1689 q = q.order_by(desc('source.version')).limit(1)
1694 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1695 self.rejects.append(rej)
1699 if not r.dm_upload_allowed:
1700 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1701 self.rejects.append(rej)
1704 ## the Maintainer: field of the uploaded .changes file corresponds with
1705 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1707 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1708 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1710 ## the most recent version of the package uploaded to unstable or
1711 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1712 ## non-developer maintainers cannot NMU or hijack packages)
1714 # srcuploaders includes the maintainer
1716 for sup in r.srcuploaders:
1717 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1718 # Eww - I hope we never have two people with the same name in Debian
1719 if email == fpr.uid.uid or name == fpr.uid.name:
1724 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1727 ## none of the packages are being taken over from other source packages
1728 for b in self.pkg.changes["binary"].keys():
1729 for suite in self.pkg.changes["distribution"].keys():
1730 q = session.query(DBSource)
1731 q = q.join(DBBinary).filter_by(package=b)
1732 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1735 if s.source != self.pkg.changes["source"]:
1736 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1740 def check_transition(self, session):
1743 sourcepkg = self.pkg.changes["source"]
1745 # No sourceful upload -> no need to do anything else, direct return
1746 # We also work with unstable uploads, not experimental or those going to some
1747 # proposed-updates queue
1748 if "source" not in self.pkg.changes["architecture"] or \
1749 "unstable" not in self.pkg.changes["distribution"]:
1752 # Also only check if there is a file defined (and existant) with
1754 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1755 if transpath == "" or not os.path.exists(transpath):
1758 # Parse the yaml file
1759 sourcefile = file(transpath, 'r')
1760 sourcecontent = sourcefile.read()
1762 transitions = yaml.load(sourcecontent)
1763 except yaml.YAMLError, msg:
1764 # This shouldn't happen, there is a wrapper to edit the file which
1765 # checks it, but we prefer to be safe than ending up rejecting
1767 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1770 # Now look through all defined transitions
1771 for trans in transitions:
1772 t = transitions[trans]
1773 source = t["source"]
1776 # Will be None if nothing is in testing.
1777 current = get_source_in_suite(source, "testing", session)
1778 if current is not None:
1779 compare = apt_pkg.VersionCompare(current.version, expected)
1781 if current is None or compare < 0:
1782 # This is still valid, the current version in testing is older than
1783 # the new version we wait for, or there is none in testing yet
1785 # Check if the source we look at is affected by this.
1786 if sourcepkg in t['packages']:
1787 # The source is affected, lets reject it.
1789 rejectmsg = "%s: part of the %s transition.\n\n" % (
1792 if current is not None:
1793 currentlymsg = "at version %s" % (current.version)
1795 currentlymsg = "not present in testing"
1797 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1799 rejectmsg += "\n".join(textwrap.wrap("""Your package
1800 is part of a testing transition designed to get %s migrated (it is
1801 currently %s, we need version %s). This transition is managed by the
1802 Release Team, and %s is the Release-Team member responsible for it.
1803 Please mail debian-release@lists.debian.org or contact %s directly if you
1804 need further assistance. You might want to upload to experimental until this
1805 transition is done."""
1806 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1808 self.rejects.append(rejectmsg)
1811 ###########################################################################
1812 # End check_signed_by_key checks
1813 ###########################################################################
1815 def build_summaries(self):
1816 """ Build a summary of changes the upload introduces. """
1818 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1820 short_summary = summary
1822 # This is for direport's benefit...
1823 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1826 summary += "Changes: " + f
1828 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1830 summary += self.announce(short_summary, 0)
1832 return (summary, short_summary)
1834 ###########################################################################
1836 def close_bugs(self, summary, action):
1838 Send mail to close bugs as instructed by the closes field in the changes file.
1839 Also add a line to summary if any work was done.
1841 @type summary: string
1842 @param summary: summary text, as given by L{build_summaries}
1845 @param action: Set to false no real action will be done.
1848 @return: summary. If action was taken, extended by the list of closed bugs.
1852 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1854 bugs = self.pkg.changes["closes"].keys()
1860 summary += "Closing bugs: "
1862 summary += "%s " % (bug)
1865 self.Subst["__BUG_NUMBER__"] = bug
1866 if self.pkg.changes["distribution"].has_key("stable"):
1867 self.Subst["__STABLE_WARNING__"] = """
1868 Note that this package is not part of the released stable Debian
1869 distribution. It may have dependencies on other unreleased software,
1870 or other instabilities. Please take care if you wish to install it.
1871 The update will eventually make its way into the next released Debian
1874 self.Subst["__STABLE_WARNING__"] = ""
1875 mail_message = utils.TemplateSubst(self.Subst, template)
1876 utils.send_mail(mail_message)
1878 # Clear up after ourselves
1879 del self.Subst["__BUG_NUMBER__"]
1880 del self.Subst["__STABLE_WARNING__"]
1882 if action and self.logger:
1883 self.logger.log(["closing bugs"] + bugs)
1889 ###########################################################################
1891 def announce(self, short_summary, action):
1893 Send an announce mail about a new upload.
1895 @type short_summary: string
1896 @param short_summary: Short summary text to include in the mail
1899 @param action: Set to false no real action will be done.
1902 @return: Textstring about action taken.
1907 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1909 # Only do announcements for source uploads with a recent dpkg-dev installed
1910 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1911 self.pkg.changes["architecture"].has_key("source"):
1917 self.Subst["__SHORT_SUMMARY__"] = short_summary
1919 for dist in self.pkg.changes["distribution"].keys():
1920 suite = get_suite(dist)
1921 announce_list = suite.announce
1922 if announce_list == "" or lists_done.has_key(announce_list):
1925 lists_done[announce_list] = 1
1926 summary += "Announcing to %s\n" % (announce_list)
1930 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1931 if cnf.get("Dinstall::TrackingServer") and \
1932 self.pkg.changes["architecture"].has_key("source"):
1933 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1934 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1936 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1937 utils.send_mail(mail_message)
1939 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1941 if cnf.FindB("Dinstall::CloseBugs"):
1942 summary = self.close_bugs(summary, action)
1944 del self.Subst["__SHORT_SUMMARY__"]
1948 ###########################################################################
1950 def accept (self, summary, short_summary, session=None):
1954 This moves all files referenced from the .changes into the pool,
1955 sends the accepted mail, announces to lists, closes bugs and
1956 also checks for override disparities. If enabled it will write out
1957 the version history for the BTS Version Tracking and will finally call
1960 @type summary: string
1961 @param summary: Summary text
1963 @type short_summary: string
1964 @param short_summary: Short summary
1968 stats = SummaryStats()
1971 self.logger.log(["installing changes", self.pkg.changes_file])
1975 # Add the .dsc file to the DB first
1976 for newfile, entry in self.pkg.files.items():
1977 if entry["type"] == "dsc":
1978 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1982 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1983 for newfile, entry in self.pkg.files.items():
1984 if entry["type"] == "deb":
1985 poolfiles.append(add_deb_to_db(self, newfile, session))
1987 # If this is a sourceful diff only upload that is moving
1988 # cross-component we need to copy the .orig files into the new
1989 # component too for the same reasons as above.
1990 # XXX: mhy: I think this should be in add_dsc_to_db
1991 if self.pkg.changes["architecture"].has_key("source"):
1992 for orig_file in self.pkg.orig_files.keys():
1993 if not self.pkg.orig_files[orig_file].has_key("id"):
1994 continue # Skip if it's not in the pool
1995 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1996 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1997 continue # Skip if the location didn't change
2000 oldf = get_poolfile_by_id(orig_file_id, session)
2001 old_filename = os.path.join(oldf.location.path, oldf.filename)
2002 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
2003 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2005 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2007 # TODO: Care about size/md5sum collisions etc
2008 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2010 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2012 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2013 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2017 # Don't reference the old file from this changes
2019 if p.file_id == oldf.file_id:
2022 poolfiles.append(newf)
2024 # Fix up the DSC references
2027 for df in source.srcfiles:
2028 if df.poolfile.file_id == oldf.file_id:
2029 # Add a new DSC entry and mark the old one for deletion
2030 # Don't do it in the loop so we don't change the thing we're iterating over
2032 newdscf.source_id = source.source_id
2033 newdscf.poolfile_id = newf.file_id
2034 session.add(newdscf)
2044 # Make sure that our source object is up-to-date
2045 session.expire(source)
2047 # Add changelog information to the database
2048 self.store_changelog()
2050 # Install the files into the pool
2051 for newfile, entry in self.pkg.files.items():
2052 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2053 utils.move(newfile, destination)
2054 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2055 stats.accept_bytes += float(entry["size"])
2057 # Copy the .changes file across for suite which need it.
2058 copy_changes = dict([(x.copychanges, '')
2059 for x in session.query(Suite).filter(Suite.suite_name.in_([self.pkg.changes["distribution"].keys()])).all()
2060 if x.copychanges is not None])
2062 for dest in copy_changes.keys():
2063 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2065 # We're done - commit the database changes
2067 # Our SQL session will automatically start a new transaction after
2070 # Move the .changes into the 'done' directory
2071 utils.move(self.pkg.changes_file,
2072 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2074 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2075 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2078 self.Subst["__SUMMARY__"] = summary
2079 mail_message = utils.TemplateSubst(self.Subst,
2080 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2081 utils.send_mail(mail_message)
2082 self.announce(short_summary, 1)
2084 ## Helper stuff for DebBugs Version Tracking
2085 if cnf.Find("Dir::Queue::BTSVersionTrack"):
2086 if self.pkg.changes["architecture"].has_key("source"):
2087 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2088 version_history = os.fdopen(fd, 'w')
2089 version_history.write(self.pkg.dsc["bts changelog"])
2090 version_history.close()
2091 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2092 self.pkg.changes_file[:-8]+".versions")
2093 os.rename(temp_filename, filename)
2094 os.chmod(filename, 0644)
2096 # Write out the binary -> source mapping.
2097 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2098 debinfo = os.fdopen(fd, 'w')
2099 for name, entry in sorted(self.pkg.files.items()):
2100 if entry["type"] == "deb":
2101 line = " ".join([entry["package"], entry["version"],
2102 entry["architecture"], entry["source package"],
2103 entry["source version"]])
2104 debinfo.write(line+"\n")
2106 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2107 self.pkg.changes_file[:-8]+".debinfo")
2108 os.rename(temp_filename, filename)
2109 os.chmod(filename, 0644)
2113 # Set up our copy queues (e.g. buildd queues)
2114 for suite_name in self.pkg.changes["distribution"].keys():
2115 suite = get_suite(suite_name, session)
2116 for q in suite.copy_queues:
2118 q.add_file_from_pool(f)
2123 stats.accept_count += 1
2125 def check_override(self):
2127 Checks override entries for validity. Mails "Override disparity" warnings,
2128 if that feature is enabled.
2130 Abandons the check if
2131 - override disparity checks are disabled
2132 - mail sending is disabled
2137 # Abandon the check if override disparity checks have been disabled
2138 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2141 summary = self.pkg.check_override()
2146 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2149 self.Subst["__SUMMARY__"] = summary
2150 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2151 utils.send_mail(mail_message)
2152 del self.Subst["__SUMMARY__"]
2154 ###########################################################################
2156 def remove(self, from_dir=None):
2158 Used (for instance) in p-u to remove the package from unchecked
2160 Also removes the package from holding area.
2162 if from_dir is None:
2163 from_dir = self.pkg.directory
2166 for f in self.pkg.files.keys():
2167 os.unlink(os.path.join(from_dir, f))
2168 if os.path.exists(os.path.join(h.holding_dir, f)):
2169 os.unlink(os.path.join(h.holding_dir, f))
2171 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2172 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2173 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2175 ###########################################################################
2177 def move_to_queue (self, queue):
2179 Move files to a destination queue using the permissions in the table
2182 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2183 queue.path, perms=int(queue.change_perms, 8))
2184 for f in self.pkg.files.keys():
2185 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2187 ###########################################################################
2189 def force_reject(self, reject_files):
2191 Forcefully move files from the current directory to the
2192 reject directory. If any file already exists in the reject
2193 directory it will be moved to the morgue to make way for
2196 @type reject_files: dict
2197 @param reject_files: file dictionary
2203 for file_entry in reject_files:
2204 # Skip any files which don't exist or which we don't have permission to copy.
2205 if os.access(file_entry, os.R_OK) == 0:
2208 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2211 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2213 # File exists? Let's find a new name by adding a number
2214 if e.errno == errno.EEXIST:
2216 dest_file = utils.find_next_free(dest_file, 255)
2217 except NoFreeFilenameError:
2218 # Something's either gone badly Pete Tong, or
2219 # someone is trying to exploit us.
2220 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2223 # Make sure we really got it
2225 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2228 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2232 # If we got here, we own the destination file, so we can
2233 # safely overwrite it.
2234 utils.move(file_entry, dest_file, 1, perms=0660)
2237 ###########################################################################
2238 def do_reject (self, manual=0, reject_message="", notes=""):
2240 Reject an upload. If called without a reject message or C{manual} is
2241 true, spawn an editor so the user can write one.
2244 @param manual: manual or automated rejection
2246 @type reject_message: string
2247 @param reject_message: A reject message
2252 # If we weren't given a manual rejection message, spawn an
2253 # editor so the user can add one in...
2254 if manual and not reject_message:
2255 (fd, temp_filename) = utils.temp_filename()
2256 temp_file = os.fdopen(fd, 'w')
2259 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2260 % (note.author, note.version, note.notedate, note.comment))
2262 editor = os.environ.get("EDITOR","vi")
2264 while answer == 'E':
2265 os.system("%s %s" % (editor, temp_filename))
2266 temp_fh = utils.open_file(temp_filename)
2267 reject_message = "".join(temp_fh.readlines())
2269 print "Reject message:"
2270 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2271 prompt = "[R]eject, Edit, Abandon, Quit ?"
2273 while prompt.find(answer) == -1:
2274 answer = utils.our_raw_input(prompt)
2275 m = re_default_answer.search(prompt)
2278 answer = answer[:1].upper()
2279 os.unlink(temp_filename)
2285 print "Rejecting.\n"
2289 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2290 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2292 # Move all the files into the reject directory
2293 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2294 self.force_reject(reject_files)
2296 # If we fail here someone is probably trying to exploit the race
2297 # so let's just raise an exception ...
2298 if os.path.exists(reason_filename):
2299 os.unlink(reason_filename)
2300 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2302 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2306 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2307 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2308 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2309 os.write(reason_fd, reject_message)
2310 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2312 # Build up the rejection email
2313 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2314 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2315 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2316 self.Subst["__REJECT_MESSAGE__"] = ""
2317 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2318 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2319 # Write the rejection email out as the <foo>.reason file
2320 os.write(reason_fd, reject_mail_message)
2322 del self.Subst["__REJECTOR_ADDRESS__"]
2323 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2324 del self.Subst["__CC__"]
2328 # Send the rejection mail
2329 utils.send_mail(reject_mail_message)
2332 self.logger.log(["rejected", self.pkg.changes_file])
2336 ################################################################################
2337 def in_override_p(self, package, component, suite, binary_type, filename, session):
2339 Check if a package already has override entries in the DB
2341 @type package: string
2342 @param package: package name
2344 @type component: string
2345 @param component: database id of the component
2348 @param suite: database id of the suite
2350 @type binary_type: string
2351 @param binary_type: type of the package
2353 @type filename: string
2354 @param filename: filename we check
2356 @return: the database result. But noone cares anyway.
2362 if binary_type == "": # must be source
2365 file_type = binary_type
2367 # Override suite name; used for example with proposed-updates
2368 oldsuite = get_suite(suite, session)
2369 if oldsuite.overridesuite:
2370 suite = oldsuite.overridesuite
2372 result = get_override(package, suite, component, file_type, session)
2374 # If checking for a source package fall back on the binary override type
2375 if file_type == "dsc" and len(result) < 1:
2376 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2378 # Remember the section and priority so we can check them later if appropriate
2381 self.pkg.files[filename]["override section"] = result.section.section
2382 self.pkg.files[filename]["override priority"] = result.priority.priority
2387 ################################################################################
2388 def get_anyversion(self, sv_list, suite):
2391 @param sv_list: list of (suite, version) tuples to check
2394 @param suite: suite name
2400 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2401 for (s, v) in sv_list:
2402 if s in [ x.lower() for x in anysuite ]:
2403 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2408 ################################################################################
2410 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2413 @param sv_list: list of (suite, version) tuples to check
2415 @type filename: string
2416 @param filename: XXX
2418 @type new_version: string
2419 @param new_version: XXX
2421 Ensure versions are newer than existing packages in target
2422 suites and that cross-suite version checking rules as
2423 set out in the conf file are satisfied.
2428 # Check versions for each target suite
2429 for target_suite in self.pkg.changes["distribution"].keys():
2430 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2431 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2433 # Enforce "must be newer than target suite" even if conffile omits it
2434 if target_suite not in must_be_newer_than:
2435 must_be_newer_than.append(target_suite)
2437 for (suite, existent_version) in sv_list:
2438 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2440 if suite in must_be_newer_than and sourceful and vercmp < 1:
2441 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2443 if suite in must_be_older_than and vercmp > -1:
2446 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2447 # we really use the other suite, ignoring the conflicting one ...
2448 addsuite = self.pkg.changes["distribution-version"][suite]
2450 add_version = self.get_anyversion(sv_list, addsuite)
2451 target_version = self.get_anyversion(sv_list, target_suite)
2454 # not add_version can only happen if we map to a suite
2455 # that doesn't enhance the suite we're propup'ing from.
2456 # so "propup-ver x a b c; map a d" is a problem only if
2457 # d doesn't enhance a.
2459 # i think we could always propagate in this case, rather
2460 # than complaining. either way, this isn't a REJECT issue
2462 # And - we really should complain to the dorks who configured dak
2463 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2464 self.pkg.changes.setdefault("propdistribution", {})
2465 self.pkg.changes["propdistribution"][addsuite] = 1
2467 elif not target_version:
2468 # not targets_version is true when the package is NEW
2469 # we could just stick with the "...old version..." REJECT
2470 # for this, I think.
2471 self.rejects.append("Won't propogate NEW packages.")
2472 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2473 # propogation would be redundant. no need to reject though.
2474 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2476 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2477 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2479 self.warnings.append("Propogating upload to %s" % (addsuite))
2480 self.pkg.changes.setdefault("propdistribution", {})
2481 self.pkg.changes["propdistribution"][addsuite] = 1
2485 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2487 ################################################################################
2488 def check_binary_against_db(self, filename, session):
2489 # Ensure version is sane
2490 q = session.query(BinAssociation)
2491 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2492 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2494 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2495 filename, self.pkg.files[filename]["version"], sourceful=False)
2497 # Check for any existing copies of the file
2498 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2499 q = q.filter_by(version=self.pkg.files[filename]["version"])
2500 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2503 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2505 ################################################################################
2507 def check_source_against_db(self, filename, session):
2508 source = self.pkg.dsc.get("source")
2509 version = self.pkg.dsc.get("version")
2511 # Ensure version is sane
2512 q = session.query(SrcAssociation)
2513 q = q.join(DBSource).filter(DBSource.source==source)
2515 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2516 filename, version, sourceful=True)
2518 ################################################################################
2519 def check_dsc_against_db(self, filename, session):
2522 @warning: NB: this function can remove entries from the 'files' index [if
2523 the orig tarball is a duplicate of the one in the archive]; if
2524 you're iterating over 'files' and call this function as part of
2525 the loop, be sure to add a check to the top of the loop to
2526 ensure you haven't just tried to dereference the deleted entry.
2531 self.pkg.orig_files = {} # XXX: do we need to clear it?
2532 orig_files = self.pkg.orig_files
2534 # Try and find all files mentioned in the .dsc. This has
2535 # to work harder to cope with the multiple possible
2536 # locations of an .orig.tar.gz.
2537 # The ordering on the select is needed to pick the newest orig
2538 # when it exists in multiple places.
2539 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2541 if self.pkg.files.has_key(dsc_name):
2542 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2543 actual_size = int(self.pkg.files[dsc_name]["size"])
2544 found = "%s in incoming" % (dsc_name)
2546 # Check the file does not already exist in the archive
2547 ql = get_poolfile_like_name(dsc_name, session)
2549 # Strip out anything that isn't '%s' or '/%s$'
2551 if not i.filename.endswith(dsc_name):
2554 # "[dak] has not broken them. [dak] has fixed a
2555 # brokenness. Your crappy hack exploited a bug in
2558 # "(Come on! I thought it was always obvious that
2559 # one just doesn't release different files with
2560 # the same name and version.)"
2561 # -- ajk@ on d-devel@l.d.o
2564 # Ignore exact matches for .orig.tar.gz
2566 if re_is_orig_source.match(dsc_name):
2568 if self.pkg.files.has_key(dsc_name) and \
2569 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2570 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2571 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2572 # TODO: Don't delete the entry, just mark it as not needed
2573 # This would fix the stupidity of changing something we often iterate over
2574 # whilst we're doing it
2575 del self.pkg.files[dsc_name]
2576 dsc_entry["files id"] = i.file_id
2577 if not orig_files.has_key(dsc_name):
2578 orig_files[dsc_name] = {}
2579 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2582 # Don't bitch that we couldn't find this file later
2584 self.later_check_files.remove(dsc_name)
2590 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2592 elif re_is_orig_source.match(dsc_name):
2594 ql = get_poolfile_like_name(dsc_name, session)
2596 # Strip out anything that isn't '%s' or '/%s$'
2597 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2599 if not i.filename.endswith(dsc_name):
2603 # Unfortunately, we may get more than one match here if,
2604 # for example, the package was in potato but had an -sa
2605 # upload in woody. So we need to choose the right one.
2607 # default to something sane in case we don't match any or have only one
2612 old_file = os.path.join(i.location.path, i.filename)
2613 old_file_fh = utils.open_file(old_file)
2614 actual_md5 = apt_pkg.md5sum(old_file_fh)
2616 actual_size = os.stat(old_file)[stat.ST_SIZE]
2617 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2620 old_file = os.path.join(i.location.path, i.filename)
2621 old_file_fh = utils.open_file(old_file)
2622 actual_md5 = apt_pkg.md5sum(old_file_fh)
2624 actual_size = os.stat(old_file)[stat.ST_SIZE]
2626 suite_type = x.location.archive_type
2627 # need this for updating dsc_files in install()
2628 dsc_entry["files id"] = x.file_id
2629 # See install() in process-accepted...
2630 if not orig_files.has_key(dsc_name):
2631 orig_files[dsc_name] = {}
2632 orig_files[dsc_name]["id"] = x.file_id
2633 orig_files[dsc_name]["path"] = old_file
2634 orig_files[dsc_name]["location"] = x.location.location_id
2636 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2637 # Not there? Check the queue directories...
2638 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2639 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2641 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2642 if os.path.exists(in_otherdir):
2643 in_otherdir_fh = utils.open_file(in_otherdir)
2644 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2645 in_otherdir_fh.close()
2646 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2648 if not orig_files.has_key(dsc_name):
2649 orig_files[dsc_name] = {}
2650 orig_files[dsc_name]["path"] = in_otherdir
2653 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2656 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2658 if actual_md5 != dsc_entry["md5sum"]:
2659 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2660 if actual_size != int(dsc_entry["size"]):
2661 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2663 ################################################################################
2664 # This is used by process-new and process-holding to recheck a changes file
2665 # at the time we're running. It mainly wraps various other internal functions
2666 # and is similar to accepted_checks - these should probably be tidied up
2668 def recheck(self, session):
2670 for f in self.pkg.files.keys():
2671 # The .orig.tar.gz can disappear out from under us is it's a
2672 # duplicate of one in the archive.
2673 if not self.pkg.files.has_key(f):
2676 entry = self.pkg.files[f]
2678 # Check that the source still exists
2679 if entry["type"] == "deb":
2680 source_version = entry["source version"]
2681 source_package = entry["source package"]
2682 if not self.pkg.changes["architecture"].has_key("source") \
2683 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2684 source_epochless_version = re_no_epoch.sub('', source_version)
2685 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2687 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2688 if cnf.has_key("Dir::Queue::%s" % (q)):
2689 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2692 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2694 # Version and file overwrite checks
2695 if entry["type"] == "deb":
2696 self.check_binary_against_db(f, session)
2697 elif entry["type"] == "dsc":
2698 self.check_source_against_db(f, session)
2699 self.check_dsc_against_db(f, session)
2701 ################################################################################
2702 def accepted_checks(self, overwrite_checks, session):
2703 # Recheck anything that relies on the database; since that's not
2704 # frozen between accept and our run time when called from p-a.
2706 # overwrite_checks is set to False when installing to stable/oldstable
2711 # Find the .dsc (again)
2713 for f in self.pkg.files.keys():
2714 if self.pkg.files[f]["type"] == "dsc":
2717 for checkfile in self.pkg.files.keys():
2718 # The .orig.tar.gz can disappear out from under us is it's a
2719 # duplicate of one in the archive.
2720 if not self.pkg.files.has_key(checkfile):
2723 entry = self.pkg.files[checkfile]
2725 # Check that the source still exists
2726 if entry["type"] == "deb":
2727 source_version = entry["source version"]
2728 source_package = entry["source package"]
2729 if not self.pkg.changes["architecture"].has_key("source") \
2730 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2731 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2733 # Version and file overwrite checks
2734 if overwrite_checks:
2735 if entry["type"] == "deb":
2736 self.check_binary_against_db(checkfile, session)
2737 elif entry["type"] == "dsc":
2738 self.check_source_against_db(checkfile, session)
2739 self.check_dsc_against_db(dsc_filename, session)
2741 # propogate in the case it is in the override tables:
2742 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2743 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2744 propogate[suite] = 1
2746 nopropogate[suite] = 1
2748 for suite in propogate.keys():
2749 if suite in nopropogate:
2751 self.pkg.changes["distribution"][suite] = 1
2753 for checkfile in self.pkg.files.keys():
2754 # Check the package is still in the override tables
2755 for suite in self.pkg.changes["distribution"].keys():
2756 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2757 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2759 ################################################################################
2760 # If any file of an upload has a recent mtime then chances are good
2761 # the file is still being uploaded.
2763 def upload_too_new(self):
2766 # Move back to the original directory to get accurate time stamps
2768 os.chdir(self.pkg.directory)
2769 file_list = self.pkg.files.keys()
2770 file_list.extend(self.pkg.dsc_files.keys())
2771 file_list.append(self.pkg.changes_file)
2774 last_modified = time.time()-os.path.getmtime(f)
2775 if last_modified < int(cnf["Dinstall::SkipTime"]):
2784 def store_changelog(self):
2786 # Skip binary-only upload if it is not a bin-NMU
2787 if not self.pkg.changes['architecture'].has_key('source'):
2788 from daklib.regexes import re_bin_only_nmu
2789 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2792 session = DBConn().session()
2794 # Check if upload already has a changelog entry
2795 query = """SELECT changelog_id FROM changes WHERE source = :source
2796 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2797 if session.execute(query, {'source': self.pkg.changes['source'], \
2798 'version': self.pkg.changes['version'], \
2799 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2803 # Add current changelog text into changelogs_text table, return created ID
2804 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2805 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2807 # Link ID to the upload available in changes table
2808 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2809 AND version = :version AND architecture = :architecture"""
2810 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2811 'version': self.pkg.changes['version'], \
2812 'architecture': " ".join(self.pkg.changes['architecture'].keys())})