5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
59 ###############################################################################
61 def get_type(f, session):
63 Get the file type of C{f}
66 @param f: file entry from Changes object
68 @type session: SQLA Session
69 @param session: SQL Alchemy session object
76 if f.has_key("dbtype"):
77 file_type = f["dbtype"]
78 elif re_source_ext.match(f["type"]):
82 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
84 # Validate the override type
85 type_id = get_override_type(file_type, session)
87 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
91 ################################################################################
93 # Determine what parts in a .changes are NEW
95 def determine_new(filename, changes, files, warn=1, session = None):
97 Determine what parts in a C{changes} file are NEW.
100 @param filename: changes filename
102 @type changes: Upload.Pkg.changes dict
103 @param changes: Changes dictionary
105 @type files: Upload.Pkg.files dict
106 @param files: Files dictionary
109 @param warn: Warn if overrides are added for (old)stable
112 @return: dictionary of NEW components.
115 # TODO: This should all use the database instead of parsing the changes
120 dbchg = get_dbchange(filename, session)
122 print "Warning: cannot find changes file in database; won't check byhand"
124 # Build up a list of potentially new things
125 for name, f in files.items():
126 # Keep a record of byhand elements
127 if f["section"] == "byhand":
132 priority = f["priority"]
133 section = f["section"]
134 file_type = get_type(f, session)
135 component = f["component"]
137 if file_type == "dsc":
140 if not new.has_key(pkg):
142 new[pkg]["priority"] = priority
143 new[pkg]["section"] = section
144 new[pkg]["type"] = file_type
145 new[pkg]["component"] = component
146 new[pkg]["files"] = []
148 old_type = new[pkg]["type"]
149 if old_type != file_type:
150 # source gets trumped by deb or udeb
151 if old_type == "dsc":
152 new[pkg]["priority"] = priority
153 new[pkg]["section"] = section
154 new[pkg]["type"] = file_type
155 new[pkg]["component"] = component
157 new[pkg]["files"].append(name)
159 if f.has_key("othercomponents"):
160 new[pkg]["othercomponents"] = f["othercomponents"]
162 # Fix up the list of target suites
164 for suite in changes["suite"].keys():
165 oldsuite = get_suite(suite, session)
167 print "WARNING: Invalid suite %s found" % suite
170 if oldsuite.overridesuite:
171 newsuite = get_suite(oldsuite.overridesuite, session)
174 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
175 oldsuite.overridesuite, suite)
176 del changes["suite"][suite]
177 changes["suite"][oldsuite.overridesuite] = 1
179 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist. Bugger" % (
180 oldsuite.overridesuite, suite)
182 # Check for unprocessed byhand files
183 if dbchg is not None:
184 for b in byhand.keys():
185 # Find the file entry in the database
187 for f in dbchg.files:
190 # If it's processed, we can ignore it
196 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
198 # Check for new stuff
199 for suite in changes["suite"].keys():
200 for pkg in new.keys():
201 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
203 for file_entry in new[pkg]["files"]:
204 if files[file_entry].has_key("new"):
205 del files[file_entry]["new"]
209 for s in ['stable', 'oldstable']:
210 if changes["suite"].has_key(s):
211 print "WARNING: overrides will be added for %s!" % s
212 for pkg in new.keys():
213 if new[pkg].has_key("othercomponents"):
214 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
218 ################################################################################
220 def check_valid(new, session = None):
222 Check if section and priority for NEW packages exist in database.
223 Additionally does sanity checks:
224 - debian-installer packages have to be udeb (or source)
225 - non debian-installer packages can not be udeb
226 - source priority can only be assigned to dsc file types
229 @param new: Dict of new packages with their section, priority and type.
232 for pkg in new.keys():
233 section_name = new[pkg]["section"]
234 priority_name = new[pkg]["priority"]
235 file_type = new[pkg]["type"]
237 section = get_section(section_name, session)
239 new[pkg]["section id"] = -1
241 new[pkg]["section id"] = section.section_id
243 priority = get_priority(priority_name, session)
245 new[pkg]["priority id"] = -1
247 new[pkg]["priority id"] = priority.priority_id
250 di = section_name.find("debian-installer") != -1
252 # If d-i, we must be udeb and vice-versa
253 if (di and file_type not in ("udeb", "dsc")) or \
254 (not di and file_type == "udeb"):
255 new[pkg]["section id"] = -1
257 # If dsc we need to be source and vice-versa
258 if (priority == "source" and file_type != "dsc") or \
259 (priority != "source" and file_type == "dsc"):
260 new[pkg]["priority id"] = -1
262 ###############################################################################
264 # Used by Upload.check_timestamps
265 class TarTime(object):
266 def __init__(self, future_cutoff, past_cutoff):
268 self.future_cutoff = future_cutoff
269 self.past_cutoff = past_cutoff
272 self.future_files = {}
273 self.ancient_files = {}
275 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
276 if MTime > self.future_cutoff:
277 self.future_files[Name] = MTime
278 if MTime < self.past_cutoff:
279 self.ancient_files[Name] = MTime
281 ###############################################################################
283 def prod_maintainer(notes, upload):
286 # Here we prepare an editor and get them ready to prod...
287 (fd, temp_filename) = utils.temp_filename()
288 temp_file = os.fdopen(fd, 'w')
290 temp_file.write(note.comment)
292 editor = os.environ.get("EDITOR","vi")
295 os.system("%s %s" % (editor, temp_filename))
296 temp_fh = utils.open_file(temp_filename)
297 prod_message = "".join(temp_fh.readlines())
299 print "Prod message:"
300 print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
301 prompt = "[P]rod, Edit, Abandon, Quit ?"
303 while prompt.find(answer) == -1:
304 answer = utils.our_raw_input(prompt)
305 m = re_default_answer.search(prompt)
308 answer = answer[:1].upper()
309 os.unlink(temp_filename)
315 # Otherwise, do the proding...
316 user_email_address = utils.whoami() + " <%s>" % (
317 cnf["Dinstall::MyAdminAddress"])
321 Subst["__FROM_ADDRESS__"] = user_email_address
322 Subst["__PROD_MESSAGE__"] = prod_message
323 Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
325 prod_mail_message = utils.TemplateSubst(
326 Subst,cnf["Dir::Templates"]+"/process-new.prod")
329 utils.send_mail(prod_mail_message)
331 print "Sent prodding message"
333 ################################################################################
335 def edit_note(note, upload, session, trainee=False):
336 # Write the current data to a temporary file
337 (fd, temp_filename) = utils.temp_filename()
338 editor = os.environ.get("EDITOR","vi")
341 os.system("%s %s" % (editor, temp_filename))
342 temp_file = utils.open_file(temp_filename)
343 newnote = temp_file.read().rstrip()
346 print utils.prefix_multi_line_string(newnote," ")
347 prompt = "[D]one, Edit, Abandon, Quit ?"
349 while prompt.find(answer) == -1:
350 answer = utils.our_raw_input(prompt)
351 m = re_default_answer.search(prompt)
354 answer = answer[:1].upper()
355 os.unlink(temp_filename)
362 comment = NewComment()
363 comment.package = upload.pkg.changes["source"]
364 comment.version = upload.pkg.changes["version"]
365 comment.comment = newnote
366 comment.author = utils.whoami()
367 comment.trainee = trainee
371 ###############################################################################
373 class Upload(object):
375 Everything that has to do with an upload processed.
383 ###########################################################################
386 """ Reset a number of internal variables."""
388 # Initialize the substitution template map
391 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
392 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
393 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
394 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
400 self.later_check_files = []
404 def package_info(self):
406 Format various messages from this Upload to send to the maintainer.
410 ('Reject Reasons', self.rejects),
411 ('Warnings', self.warnings),
412 ('Notes', self.notes),
416 for title, messages in msgs:
418 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
423 ###########################################################################
424 def update_subst(self):
425 """ Set up the per-package template substitution mappings """
429 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
430 if not self.pkg.changes.has_key("architecture") or not \
431 isinstance(self.pkg.changes["architecture"], dict):
432 self.pkg.changes["architecture"] = { "Unknown" : "" }
434 # and maintainer2047 may not exist.
435 if not self.pkg.changes.has_key("maintainer2047"):
436 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
438 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
439 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
440 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
442 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
443 if self.pkg.changes["architecture"].has_key("source") and \
444 self.pkg.changes["changedby822"] != "" and \
445 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
447 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
448 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
449 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
451 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
452 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
453 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
455 # Process policy doesn't set the fingerprint field and I don't want to make it
456 # do it for now as I don't want to have to deal with the case where we accepted
457 # the package into PU-NEW, but the fingerprint has gone away from the keyring in
458 # the meantime so the package will be remarked as rejectable. Urgh.
459 # TODO: Fix this properly
460 if self.pkg.changes.has_key('fingerprint'):
461 session = DBConn().session()
462 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
463 if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
464 if self.pkg.changes.has_key("sponsoremail"):
465 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
468 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
469 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
471 # Apply any global override of the Maintainer field
472 if cnf.get("Dinstall::OverrideMaintainer"):
473 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
474 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
476 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
477 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
478 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
479 self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
481 ###########################################################################
482 def load_changes(self, filename):
484 Load a changes file and setup a dictionary around it. Also checks for mandantory
487 @type filename: string
488 @param filename: Changes filename, full path.
491 @return: whether the changes file was valid or not. We may want to
492 reject even if this is True (see what gets put in self.rejects).
493 This is simply to prevent us even trying things later which will
494 fail because we couldn't properly parse the file.
497 self.pkg.changes_file = filename
499 # Parse the .changes field into a dictionary
501 self.pkg.changes.update(parse_changes(filename))
502 except CantOpenError:
503 self.rejects.append("%s: can't read file." % (filename))
505 except ParseChangesError, line:
506 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
508 except ChangesUnicodeError:
509 self.rejects.append("%s: changes file not proper utf-8" % (filename))
512 # Parse the Files field from the .changes into another dictionary
514 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
515 except ParseChangesError, line:
516 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
518 except UnknownFormatError, format:
519 self.rejects.append("%s: unknown format '%s'." % (filename, format))
522 # Check for mandatory fields
523 for i in ("distribution", "source", "binary", "architecture",
524 "version", "maintainer", "files", "changes", "description"):
525 if not self.pkg.changes.has_key(i):
526 # Avoid undefined errors later
527 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
530 # Strip a source version in brackets from the source field
531 if re_strip_srcver.search(self.pkg.changes["source"]):
532 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
534 # Ensure the source field is a valid package name.
535 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
536 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
538 # Split multi-value fields into a lower-level dictionary
539 for i in ("architecture", "distribution", "binary", "closes"):
540 o = self.pkg.changes.get(i, "")
542 del self.pkg.changes[i]
544 self.pkg.changes[i] = {}
547 self.pkg.changes[i][j] = 1
549 # Fix the Maintainer: field to be RFC822/2047 compatible
551 (self.pkg.changes["maintainer822"],
552 self.pkg.changes["maintainer2047"],
553 self.pkg.changes["maintainername"],
554 self.pkg.changes["maintaineremail"]) = \
555 fix_maintainer (self.pkg.changes["maintainer"])
556 except ParseMaintError, msg:
557 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
558 % (filename, self.pkg.changes["maintainer"], msg))
560 # ...likewise for the Changed-By: field if it exists.
562 (self.pkg.changes["changedby822"],
563 self.pkg.changes["changedby2047"],
564 self.pkg.changes["changedbyname"],
565 self.pkg.changes["changedbyemail"]) = \
566 fix_maintainer (self.pkg.changes.get("changed-by", ""))
567 except ParseMaintError, msg:
568 self.pkg.changes["changedby822"] = ""
569 self.pkg.changes["changedby2047"] = ""
570 self.pkg.changes["changedbyname"] = ""
571 self.pkg.changes["changedbyemail"] = ""
573 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
574 % (filename, self.pkg.changes["changed-by"], msg))
576 # Ensure all the values in Closes: are numbers
577 if self.pkg.changes.has_key("closes"):
578 for i in self.pkg.changes["closes"].keys():
579 if re_isanum.match (i) == None:
580 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
582 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
583 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
584 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
586 # Check the .changes is non-empty
587 if not self.pkg.files:
588 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
591 # Changes was syntactically valid even if we'll reject
594 ###########################################################################
596 def check_distributions(self):
597 "Check and map the Distribution field"
601 # Handle suite mappings
602 for m in Cnf.ValueList("SuiteMappings"):
605 if mtype == "map" or mtype == "silent-map":
606 (source, dest) = args[1:3]
607 if self.pkg.changes["distribution"].has_key(source):
608 del self.pkg.changes["distribution"][source]
609 self.pkg.changes["distribution"][dest] = 1
610 if mtype != "silent-map":
611 self.notes.append("Mapping %s to %s." % (source, dest))
612 if self.pkg.changes.has_key("distribution-version"):
613 if self.pkg.changes["distribution-version"].has_key(source):
614 self.pkg.changes["distribution-version"][source]=dest
615 elif mtype == "map-unreleased":
616 (source, dest) = args[1:3]
617 if self.pkg.changes["distribution"].has_key(source):
618 for arch in self.pkg.changes["architecture"].keys():
619 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
620 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
621 del self.pkg.changes["distribution"][source]
622 self.pkg.changes["distribution"][dest] = 1
624 elif mtype == "ignore":
626 if self.pkg.changes["distribution"].has_key(suite):
627 del self.pkg.changes["distribution"][suite]
628 self.warnings.append("Ignoring %s as a target suite." % (suite))
629 elif mtype == "reject":
631 if self.pkg.changes["distribution"].has_key(suite):
632 self.rejects.append("Uploads to %s are not accepted." % (suite))
633 elif mtype == "propup-version":
634 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
636 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
637 if self.pkg.changes["distribution"].has_key(args[1]):
638 self.pkg.changes.setdefault("distribution-version", {})
639 for suite in args[2:]:
640 self.pkg.changes["distribution-version"][suite] = suite
642 # Ensure there is (still) a target distribution
643 if len(self.pkg.changes["distribution"].keys()) < 1:
644 self.rejects.append("No valid distribution remaining.")
646 # Ensure target distributions exist
647 for suite in self.pkg.changes["distribution"].keys():
648 if not Cnf.has_key("Suite::%s" % (suite)):
649 self.rejects.append("Unknown distribution `%s'." % (suite))
651 ###########################################################################
653 def binary_file_checks(self, f, session):
655 entry = self.pkg.files[f]
657 # Extract package control information
658 deb_file = utils.open_file(f)
660 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
662 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
664 # Can't continue, none of the checks on control would work.
667 # Check for mandantory "Description:"
670 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
672 self.rejects.append("%s: Missing Description in binary package" % (f))
677 # Check for mandatory fields
678 for field in [ "Package", "Architecture", "Version" ]:
679 if control.Find(field) == None:
681 self.rejects.append("%s: No %s field in control." % (f, field))
684 # Ensure the package name matches the one give in the .changes
685 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
686 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
688 # Validate the package field
689 package = control.Find("Package")
690 if not re_valid_pkg_name.match(package):
691 self.rejects.append("%s: invalid package name '%s'." % (f, package))
693 # Validate the version field
694 version = control.Find("Version")
695 if not re_valid_version.match(version):
696 self.rejects.append("%s: invalid version number '%s'." % (f, version))
698 # Ensure the architecture of the .deb is one we know about.
699 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
700 architecture = control.Find("Architecture")
701 upload_suite = self.pkg.changes["distribution"].keys()[0]
703 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
704 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
705 self.rejects.append("Unknown architecture '%s'." % (architecture))
707 # Ensure the architecture of the .deb is one of the ones
708 # listed in the .changes.
709 if not self.pkg.changes["architecture"].has_key(architecture):
710 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
712 # Sanity-check the Depends field
713 depends = control.Find("Depends")
715 self.rejects.append("%s: Depends field is empty." % (f))
717 # Sanity-check the Provides field
718 provides = control.Find("Provides")
720 provide = re_spacestrip.sub('', provides)
722 self.rejects.append("%s: Provides field is empty." % (f))
723 prov_list = provide.split(",")
724 for prov in prov_list:
725 if not re_valid_pkg_name.match(prov):
726 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
728 # Check the section & priority match those given in the .changes (non-fatal)
729 if control.Find("Section") and entry["section"] != "" \
730 and entry["section"] != control.Find("Section"):
731 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
732 (f, control.Find("Section", ""), entry["section"]))
733 if control.Find("Priority") and entry["priority"] != "" \
734 and entry["priority"] != control.Find("Priority"):
735 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
736 (f, control.Find("Priority", ""), entry["priority"]))
738 entry["package"] = package
739 entry["architecture"] = architecture
740 entry["version"] = version
741 entry["maintainer"] = control.Find("Maintainer", "")
743 if f.endswith(".udeb"):
744 self.pkg.files[f]["dbtype"] = "udeb"
745 elif f.endswith(".deb"):
746 self.pkg.files[f]["dbtype"] = "deb"
748 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
750 entry["source"] = control.Find("Source", entry["package"])
752 # Get the source version
753 source = entry["source"]
756 if source.find("(") != -1:
757 m = re_extract_src_version.match(source)
759 source_version = m.group(2)
761 if not source_version:
762 source_version = self.pkg.files[f]["version"]
764 entry["source package"] = source
765 entry["source version"] = source_version
767 # Ensure the filename matches the contents of the .deb
768 m = re_isadeb.match(f)
771 file_package = m.group(1)
772 if entry["package"] != file_package:
773 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
774 (f, file_package, entry["dbtype"], entry["package"]))
775 epochless_version = re_no_epoch.sub('', control.Find("Version"))
778 file_version = m.group(2)
779 if epochless_version != file_version:
780 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
781 (f, file_version, entry["dbtype"], epochless_version))
784 file_architecture = m.group(3)
785 if entry["architecture"] != file_architecture:
786 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
787 (f, file_architecture, entry["dbtype"], entry["architecture"]))
789 # Check for existent source
790 source_version = entry["source version"]
791 source_package = entry["source package"]
792 if self.pkg.changes["architecture"].has_key("source"):
793 if source_version != self.pkg.changes["version"]:
794 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
795 (source_version, f, self.pkg.changes["version"]))
797 # Check in the SQL database
798 if not source_exists(source_package, source_version, suites = \
799 self.pkg.changes["distribution"].keys(), session = session):
800 # Check in one of the other directories
801 source_epochless_version = re_no_epoch.sub('', source_version)
802 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
803 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
805 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
808 dsc_file_exists = False
809 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
810 if cnf.has_key("Dir::Queue::%s" % (myq)):
811 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
812 dsc_file_exists = True
815 if not dsc_file_exists:
816 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
818 # Check the version and for file overwrites
819 self.check_binary_against_db(f, session)
821 # Temporarily disable contents generation until we change the table storage layout
824 #if len(b.rejects) > 0:
825 # for j in b.rejects:
826 # self.rejects.append(j)
828 def source_file_checks(self, f, session):
829 entry = self.pkg.files[f]
831 m = re_issource.match(f)
835 entry["package"] = m.group(1)
836 entry["version"] = m.group(2)
837 entry["type"] = m.group(3)
839 # Ensure the source package name matches the Source filed in the .changes
840 if self.pkg.changes["source"] != entry["package"]:
841 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
843 # Ensure the source version matches the version in the .changes file
844 if re_is_orig_source.match(f):
845 changes_version = self.pkg.changes["chopversion2"]
847 changes_version = self.pkg.changes["chopversion"]
849 if changes_version != entry["version"]:
850 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
852 # Ensure the .changes lists source in the Architecture field
853 if not self.pkg.changes["architecture"].has_key("source"):
854 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
856 # Check the signature of a .dsc file
857 if entry["type"] == "dsc":
858 # check_signature returns either:
859 # (None, [list, of, rejects]) or (signature, [])
860 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
862 self.rejects.append(j)
864 entry["architecture"] = "source"
866 def per_suite_file_checks(self, f, suite, session):
868 entry = self.pkg.files[f]
871 if entry.has_key("byhand"):
874 # Check we have fields we need to do these checks
876 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
877 if not entry.has_key(m):
878 self.rejects.append("file '%s' does not have field %s set" % (f, m))
884 # Handle component mappings
885 for m in cnf.ValueList("ComponentMappings"):
886 (source, dest) = m.split()
887 if entry["component"] == source:
888 entry["original component"] = source
889 entry["component"] = dest
891 # Ensure the component is valid for the target suite
892 if cnf.has_key("Suite:%s::Components" % (suite)) and \
893 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
894 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
897 # Validate the component
898 if not get_component(entry["component"], session):
899 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
902 # See if the package is NEW
903 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
906 # Validate the priority
907 if entry["priority"].find('/') != -1:
908 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
910 # Determine the location
911 location = cnf["Dir::Pool"]
912 l = get_location(location, entry["component"], session=session)
914 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
915 entry["location id"] = -1
917 entry["location id"] = l.location_id
919 # Check the md5sum & size against existing files (if any)
920 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
922 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
923 entry["size"], entry["md5sum"], entry["location id"])
926 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
927 elif found is False and poolfile is not None:
928 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
931 entry["files id"] = None
933 entry["files id"] = poolfile.file_id
935 # Check for packages that have moved from one component to another
936 entry['suite'] = suite
937 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
939 entry["othercomponents"] = res.fetchone()[0]
941 def check_files(self, action=True):
942 file_keys = self.pkg.files.keys()
948 os.chdir(self.pkg.directory)
950 ret = holding.copy_to_holding(f)
952 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
956 # check we already know the changes file
957 # [NB: this check must be done post-suite mapping]
958 base_filename = os.path.basename(self.pkg.changes_file)
960 session = DBConn().session()
963 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
964 # if in the pool or in a queue other than unchecked, reject
965 if (dbc.in_queue is None) \
966 or (dbc.in_queue is not None
967 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
968 self.rejects.append("%s file already known to dak" % base_filename)
969 except NoResultFound, e:
976 for f, entry in self.pkg.files.items():
977 # Ensure the file does not already exist in one of the accepted directories
978 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
979 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
980 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
981 self.rejects.append("%s file already exists in the %s directory." % (f, d))
983 if not re_taint_free.match(f):
984 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
986 # Check the file is readable
987 if os.access(f, os.R_OK) == 0:
988 # When running in -n, copy_to_holding() won't have
989 # generated the reject_message, so we need to.
991 if os.path.exists(f):
992 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
994 # Don't directly reject, mark to check later to deal with orig's
995 # we can find in the pool
996 self.later_check_files.append(f)
997 entry["type"] = "unreadable"
1000 # If it's byhand skip remaining checks
1001 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1003 entry["type"] = "byhand"
1005 # Checks for a binary package...
1006 elif re_isadeb.match(f):
1008 entry["type"] = "deb"
1010 # This routine appends to self.rejects/warnings as appropriate
1011 self.binary_file_checks(f, session)
1013 # Checks for a source package...
1014 elif re_issource.match(f):
1017 # This routine appends to self.rejects/warnings as appropriate
1018 self.source_file_checks(f, session)
1020 # Not a binary or source package? Assume byhand...
1023 entry["type"] = "byhand"
1025 # Per-suite file checks
1026 entry["oldfiles"] = {}
1027 for suite in self.pkg.changes["distribution"].keys():
1028 self.per_suite_file_checks(f, suite, session)
1032 # If the .changes file says it has source, it must have source.
1033 if self.pkg.changes["architecture"].has_key("source"):
1035 self.rejects.append("no source found and Architecture line in changes mention source.")
1037 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1038 self.rejects.append("source only uploads are not supported.")
1040 ###########################################################################
1041 def check_dsc(self, action=True, session=None):
1042 """Returns bool indicating whether or not the source changes are valid"""
1043 # Ensure there is source to check
1044 if not self.pkg.changes["architecture"].has_key("source"):
1049 for f, entry in self.pkg.files.items():
1050 if entry["type"] == "dsc":
1052 self.rejects.append("can not process a .changes file with multiple .dsc's.")
1057 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1058 if not dsc_filename:
1059 self.rejects.append("source uploads must contain a dsc file")
1062 # Parse the .dsc file
1064 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1065 except CantOpenError:
1066 # if not -n copy_to_holding() will have done this for us...
1068 self.rejects.append("%s: can't read file." % (dsc_filename))
1069 except ParseChangesError, line:
1070 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1071 except InvalidDscError, line:
1072 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1073 except ChangesUnicodeError:
1074 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1076 # Build up the file list of files mentioned by the .dsc
1078 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1079 except NoFilesFieldError:
1080 self.rejects.append("%s: no Files: field." % (dsc_filename))
1082 except UnknownFormatError, format:
1083 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1085 except ParseChangesError, line:
1086 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1089 # Enforce mandatory fields
1090 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1091 if not self.pkg.dsc.has_key(i):
1092 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1095 # Validate the source and version fields
1096 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1097 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1098 if not re_valid_version.match(self.pkg.dsc["version"]):
1099 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1101 # Only a limited list of source formats are allowed in each suite
1102 for dist in self.pkg.changes["distribution"].keys():
1103 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1104 if self.pkg.dsc["format"] not in allowed:
1105 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1107 # Validate the Maintainer field
1109 # We ignore the return value
1110 fix_maintainer(self.pkg.dsc["maintainer"])
1111 except ParseMaintError, msg:
1112 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1113 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1115 # Validate the build-depends field(s)
1116 for field_name in [ "build-depends", "build-depends-indep" ]:
1117 field = self.pkg.dsc.get(field_name)
1119 # Have apt try to parse them...
1121 apt_pkg.ParseSrcDepends(field)
1123 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1125 # Ensure the version number in the .dsc matches the version number in the .changes
1126 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1127 changes_version = self.pkg.files[dsc_filename]["version"]
1129 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1130 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1132 # Ensure the Files field contain only what's expected
1133 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1135 # Ensure source is newer than existing source in target suites
1136 session = DBConn().session()
1137 self.check_source_against_db(dsc_filename, session)
1138 self.check_dsc_against_db(dsc_filename, session)
1140 dbchg = get_dbchange(self.pkg.changes_file, session)
1142 # Finally, check if we're missing any files
1143 for f in self.later_check_files:
1145 # Check if we've already processed this file if we have a dbchg object
1148 for pf in dbchg.files:
1149 if pf.filename == f and pf.processed:
1150 self.notes.append('%s was already processed so we can go ahead' % f)
1152 del self.pkg.files[f]
1154 self.rejects.append("Could not find file %s references in changes" % f)
1160 ###########################################################################
1162 def get_changelog_versions(self, source_dir):
1163 """Extracts a the source package and (optionally) grabs the
1164 version history out of debian/changelog for the BTS."""
1168 # Find the .dsc (again)
1170 for f in self.pkg.files.keys():
1171 if self.pkg.files[f]["type"] == "dsc":
1174 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1175 if not dsc_filename:
1178 # Create a symlink mirror of the source files in our temporary directory
1179 for f in self.pkg.files.keys():
1180 m = re_issource.match(f)
1182 src = os.path.join(source_dir, f)
1183 # If a file is missing for whatever reason, give up.
1184 if not os.path.exists(src):
1187 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1188 self.pkg.orig_files[f].has_key("path"):
1190 dest = os.path.join(os.getcwd(), f)
1191 os.symlink(src, dest)
1193 # If the orig files are not a part of the upload, create symlinks to the
1195 for orig_file in self.pkg.orig_files.keys():
1196 if not self.pkg.orig_files[orig_file].has_key("path"):
1198 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1199 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1201 # Extract the source
1202 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1203 (result, output) = commands.getstatusoutput(cmd)
1205 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1206 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1209 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1212 # Get the upstream version
1213 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1214 if re_strip_revision.search(upstr_version):
1215 upstr_version = re_strip_revision.sub('', upstr_version)
1217 # Ensure the changelog file exists
1218 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1219 if not os.path.exists(changelog_filename):
1220 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1223 # Parse the changelog
1224 self.pkg.dsc["bts changelog"] = ""
1225 changelog_file = utils.open_file(changelog_filename)
1226 for line in changelog_file.readlines():
1227 m = re_changelog_versions.match(line)
1229 self.pkg.dsc["bts changelog"] += line
1230 changelog_file.close()
1232 # Check we found at least one revision in the changelog
1233 if not self.pkg.dsc["bts changelog"]:
1234 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1236 def check_source(self):
1238 # a) there's no source
1239 if not self.pkg.changes["architecture"].has_key("source"):
1242 tmpdir = utils.temp_dirname()
1244 # Move into the temporary directory
1248 # Get the changelog version history
1249 self.get_changelog_versions(cwd)
1251 # Move back and cleanup the temporary tree
1255 shutil.rmtree(tmpdir)
1257 if e.errno != errno.EACCES:
1259 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1261 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1262 # We probably have u-r or u-w directories so chmod everything
1264 cmd = "chmod -R u+rwx %s" % (tmpdir)
1265 result = os.system(cmd)
1267 utils.fubar("'%s' failed with result %s." % (cmd, result))
1268 shutil.rmtree(tmpdir)
1269 except Exception, e:
1270 print "foobar2 (%s)" % e
1271 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1273 ###########################################################################
1274 def ensure_hashes(self):
1275 # Make sure we recognise the format of the Files: field in the .changes
1276 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1277 if len(format) == 2:
1278 format = int(format[0]), int(format[1])
1280 format = int(float(format[0])), 0
1282 # We need to deal with the original changes blob, as the fields we need
1283 # might not be in the changes dict serialised into the .dak anymore.
1284 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1286 # Copy the checksums over to the current changes dict. This will keep
1287 # the existing modifications to it intact.
1288 for field in orig_changes:
1289 if field.startswith('checksums-'):
1290 self.pkg.changes[field] = orig_changes[field]
1292 # Check for unsupported hashes
1293 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1294 self.rejects.append(j)
1296 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1297 self.rejects.append(j)
1299 # We have to calculate the hash if we have an earlier changes version than
1300 # the hash appears in rather than require it exist in the changes file
1301 for hashname, hashfunc, version in utils.known_hashes:
1302 # TODO: Move _ensure_changes_hash into this class
1303 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1304 self.rejects.append(j)
1305 if "source" in self.pkg.changes["architecture"]:
1306 # TODO: Move _ensure_dsc_hash into this class
1307 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1308 self.rejects.append(j)
1310 def check_hashes(self):
1311 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1312 self.rejects.append(m)
1314 for m in utils.check_size(".changes", self.pkg.files):
1315 self.rejects.append(m)
1317 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1318 self.rejects.append(m)
1320 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1321 self.rejects.append(m)
1323 self.ensure_hashes()
1325 ###########################################################################
1327 def ensure_orig(self, target_dir='.', session=None):
1329 Ensures that all orig files mentioned in the changes file are present
1330 in target_dir. If they do not exist, they are symlinked into place.
1332 An list containing the symlinks that were created are returned (so they
1339 for filename, entry in self.pkg.dsc_files.iteritems():
1340 if not re_is_orig_source.match(filename):
1341 # File is not an orig; ignore
1344 if os.path.exists(filename):
1345 # File exists, no need to continue
1348 def symlink_if_valid(path):
1349 f = utils.open_file(path)
1350 md5sum = apt_pkg.md5sum(f)
1353 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1354 expected = (int(entry['size']), entry['md5sum'])
1356 if fingerprint != expected:
1359 dest = os.path.join(target_dir, filename)
1361 os.symlink(path, dest)
1362 symlinked.append(dest)
1368 session_ = DBConn().session()
1373 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1374 poolfile_path = os.path.join(
1375 poolfile.location.path, poolfile.filename
1378 if symlink_if_valid(poolfile_path):
1388 # Look in some other queues for the file
1389 queues = ('New', 'Byhand', 'ProposedUpdates',
1390 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1392 for queue in queues:
1393 if not cnf.get('Dir::Queue::%s' % queue):
1396 queuefile_path = os.path.join(
1397 cnf['Dir::Queue::%s' % queue], filename
1400 if not os.path.exists(queuefile_path):
1401 # Does not exist in this queue
1404 if symlink_if_valid(queuefile_path):
1409 ###########################################################################
1411 def check_lintian(self):
1413 Extends self.rejects by checking the output of lintian against tags
1414 specified in Dinstall::LintianTags.
1419 # Don't reject binary uploads
1420 if not self.pkg.changes['architecture'].has_key('source'):
1423 # Only check some distributions
1424 for dist in ('unstable', 'experimental'):
1425 if dist in self.pkg.changes['distribution']:
1430 # If we do not have a tagfile, don't do anything
1431 tagfile = cnf.get("Dinstall::LintianTags")
1435 # Parse the yaml file
1436 sourcefile = file(tagfile, 'r')
1437 sourcecontent = sourcefile.read()
1441 lintiantags = yaml.load(sourcecontent)['lintian']
1442 except yaml.YAMLError, msg:
1443 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1446 # Try and find all orig mentioned in the .dsc
1447 symlinked = self.ensure_orig()
1449 # Setup the input file for lintian
1450 fd, temp_filename = utils.temp_filename()
1451 temptagfile = os.fdopen(fd, 'w')
1452 for tags in lintiantags.values():
1453 temptagfile.writelines(['%s\n' % x for x in tags])
1457 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1458 (temp_filename, self.pkg.changes_file)
1460 result, output = commands.getstatusoutput(cmd)
1462 # Remove our tempfile and any symlinks we created
1463 os.unlink(temp_filename)
1465 for symlink in symlinked:
1469 utils.warn("lintian failed for %s [return code: %s]." % \
1470 (self.pkg.changes_file, result))
1471 utils.warn(utils.prefix_multi_line_string(output, \
1472 " [possible output:] "))
1477 [self.pkg.changes_file, "check_lintian"] + list(txt)
1481 parsed_tags = parse_lintian_output(output)
1482 self.rejects.extend(
1483 generate_reject_messages(parsed_tags, lintiantags, log=log)
1486 ###########################################################################
1487 def check_urgency(self):
1489 if self.pkg.changes["architecture"].has_key("source"):
1490 if not self.pkg.changes.has_key("urgency"):
1491 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1492 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1493 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1494 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1495 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1496 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1498 ###########################################################################
1500 # Sanity check the time stamps of files inside debs.
1501 # [Files in the near future cause ugly warnings and extreme time
1502 # travel can cause errors on extraction]
1504 def check_timestamps(self):
1507 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1508 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1509 tar = TarTime(future_cutoff, past_cutoff)
1511 for filename, entry in self.pkg.files.items():
1512 if entry["type"] == "deb":
1515 deb_file = utils.open_file(filename)
1516 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1519 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1520 except SystemError, e:
1521 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1522 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1525 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1529 future_files = tar.future_files.keys()
1531 num_future_files = len(future_files)
1532 future_file = future_files[0]
1533 future_date = tar.future_files[future_file]
1534 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1535 % (filename, num_future_files, future_file, time.ctime(future_date)))
1537 ancient_files = tar.ancient_files.keys()
1539 num_ancient_files = len(ancient_files)
1540 ancient_file = ancient_files[0]
1541 ancient_date = tar.ancient_files[ancient_file]
1542 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1543 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1545 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1547 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1548 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1550 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1556 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1557 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1558 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1559 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1560 self.pkg.changes["sponsoremail"] = uid_email
1565 ###########################################################################
1566 # check_signed_by_key checks
1567 ###########################################################################
1569 def check_signed_by_key(self):
1570 """Ensure the .changes is signed by an authorized uploader."""
1571 session = DBConn().session()
1573 # First of all we check that the person has proper upload permissions
1574 # and that this upload isn't blocked
1575 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1578 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1581 # TODO: Check that import-keyring adds UIDs properly
1583 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1586 # Check that the fingerprint which uploaded has permission to do so
1587 self.check_upload_permissions(fpr, session)
1589 # Check that this package is not in a transition
1590 self.check_transition(session)
1595 def check_upload_permissions(self, fpr, session):
1596 # Check any one-off upload blocks
1597 self.check_upload_blocks(fpr, session)
1599 # Start with DM as a special case
1600 # DM is a special case unfortunately, so we check it first
1601 # (keys with no source access get more access than DMs in one
1602 # way; DMs can only upload for their packages whether source
1603 # or binary, whereas keys with no access might be able to
1604 # upload some binaries)
1605 if fpr.source_acl.access_level == 'dm':
1606 self.check_dm_upload(fpr, session)
1608 # Check source-based permissions for other types
1609 if self.pkg.changes["architecture"].has_key("source") and \
1610 fpr.source_acl.access_level is None:
1611 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1612 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1613 self.rejects.append(rej)
1615 # If not a DM, we allow full upload rights
1616 uid_email = "%s@debian.org" % (fpr.uid.uid)
1617 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1620 # Check binary upload permissions
1621 # By this point we know that DMs can't have got here unless they
1622 # are allowed to deal with the package concerned so just apply
1624 if fpr.binary_acl.access_level == 'full':
1627 # Otherwise we're in the map case
1628 tmparches = self.pkg.changes["architecture"].copy()
1629 tmparches.pop('source', None)
1631 for bam in fpr.binary_acl_map:
1632 tmparches.pop(bam.architecture.arch_string, None)
1634 if len(tmparches.keys()) > 0:
1635 if fpr.binary_reject:
1636 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1637 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1638 self.rejects.append(rej)
1640 # TODO: This is where we'll implement reject vs throw away binaries later
1641 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1642 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1643 rej += "\nFingerprint: %s", (fpr.fingerprint)
1644 self.rejects.append(rej)
1647 def check_upload_blocks(self, fpr, session):
1648 """Check whether any upload blocks apply to this source, source
1649 version, uid / fpr combination"""
1651 def block_rej_template(fb):
1652 rej = 'Manual upload block in place for package %s' % fb.source
1653 if fb.version is not None:
1654 rej += ', version %s' % fb.version
1657 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1658 # version is None if the block applies to all versions
1659 if fb.version is None or fb.version == self.pkg.changes['version']:
1660 # Check both fpr and uid - either is enough to cause a reject
1661 if fb.fpr is not None:
1662 if fb.fpr.fingerprint == fpr.fingerprint:
1663 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1664 if fb.uid is not None:
1665 if fb.uid == fpr.uid:
1666 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1669 def check_dm_upload(self, fpr, session):
1670 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1671 ## none of the uploaded packages are NEW
1673 for f in self.pkg.files.keys():
1674 if self.pkg.files[f].has_key("byhand"):
1675 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1677 if self.pkg.files[f].has_key("new"):
1678 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1684 ## the most recent version of the package uploaded to unstable or
1685 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1686 ## section of its control file
1687 q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1688 q = q.join(SrcAssociation)
1689 q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1690 q = q.order_by(desc('source.version')).limit(1)
1695 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1696 self.rejects.append(rej)
1700 if not r.dm_upload_allowed:
1701 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1702 self.rejects.append(rej)
1705 ## the Maintainer: field of the uploaded .changes file corresponds with
1706 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1708 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1709 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1711 ## the most recent version of the package uploaded to unstable or
1712 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1713 ## non-developer maintainers cannot NMU or hijack packages)
1715 # srcuploaders includes the maintainer
1717 for sup in r.srcuploaders:
1718 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1719 # Eww - I hope we never have two people with the same name in Debian
1720 if email == fpr.uid.uid or name == fpr.uid.name:
1725 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1728 ## none of the packages are being taken over from other source packages
1729 for b in self.pkg.changes["binary"].keys():
1730 for suite in self.pkg.changes["distribution"].keys():
1731 q = session.query(DBSource)
1732 q = q.join(DBBinary).filter_by(package=b)
1733 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1736 if s.source != self.pkg.changes["source"]:
1737 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1741 def check_transition(self, session):
1744 sourcepkg = self.pkg.changes["source"]
1746 # No sourceful upload -> no need to do anything else, direct return
1747 # We also work with unstable uploads, not experimental or those going to some
1748 # proposed-updates queue
1749 if "source" not in self.pkg.changes["architecture"] or \
1750 "unstable" not in self.pkg.changes["distribution"]:
1753 # Also only check if there is a file defined (and existant) with
1755 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1756 if transpath == "" or not os.path.exists(transpath):
1759 # Parse the yaml file
1760 sourcefile = file(transpath, 'r')
1761 sourcecontent = sourcefile.read()
1763 transitions = yaml.load(sourcecontent)
1764 except yaml.YAMLError, msg:
1765 # This shouldn't happen, there is a wrapper to edit the file which
1766 # checks it, but we prefer to be safe than ending up rejecting
1768 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1771 # Now look through all defined transitions
1772 for trans in transitions:
1773 t = transitions[trans]
1774 source = t["source"]
1777 # Will be None if nothing is in testing.
1778 current = get_source_in_suite(source, "testing", session)
1779 if current is not None:
1780 compare = apt_pkg.VersionCompare(current.version, expected)
1782 if current is None or compare < 0:
1783 # This is still valid, the current version in testing is older than
1784 # the new version we wait for, or there is none in testing yet
1786 # Check if the source we look at is affected by this.
1787 if sourcepkg in t['packages']:
1788 # The source is affected, lets reject it.
1790 rejectmsg = "%s: part of the %s transition.\n\n" % (
1793 if current is not None:
1794 currentlymsg = "at version %s" % (current.version)
1796 currentlymsg = "not present in testing"
1798 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1800 rejectmsg += "\n".join(textwrap.wrap("""Your package
1801 is part of a testing transition designed to get %s migrated (it is
1802 currently %s, we need version %s). This transition is managed by the
1803 Release Team, and %s is the Release-Team member responsible for it.
1804 Please mail debian-release@lists.debian.org or contact %s directly if you
1805 need further assistance. You might want to upload to experimental until this
1806 transition is done."""
1807 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1809 self.rejects.append(rejectmsg)
1812 ###########################################################################
1813 # End check_signed_by_key checks
1814 ###########################################################################
1816 def build_summaries(self):
1817 """ Build a summary of changes the upload introduces. """
1819 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1821 short_summary = summary
1823 # This is for direport's benefit...
1824 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1827 summary += "Changes: " + f
1829 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1831 summary += self.announce(short_summary, 0)
1833 return (summary, short_summary)
1835 ###########################################################################
1837 def close_bugs(self, summary, action):
1839 Send mail to close bugs as instructed by the closes field in the changes file.
1840 Also add a line to summary if any work was done.
1842 @type summary: string
1843 @param summary: summary text, as given by L{build_summaries}
1846 @param action: Set to false no real action will be done.
1849 @return: summary. If action was taken, extended by the list of closed bugs.
1853 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1855 bugs = self.pkg.changes["closes"].keys()
1861 summary += "Closing bugs: "
1863 summary += "%s " % (bug)
1866 self.Subst["__BUG_NUMBER__"] = bug
1867 if self.pkg.changes["distribution"].has_key("stable"):
1868 self.Subst["__STABLE_WARNING__"] = """
1869 Note that this package is not part of the released stable Debian
1870 distribution. It may have dependencies on other unreleased software,
1871 or other instabilities. Please take care if you wish to install it.
1872 The update will eventually make its way into the next released Debian
1875 self.Subst["__STABLE_WARNING__"] = ""
1876 mail_message = utils.TemplateSubst(self.Subst, template)
1877 utils.send_mail(mail_message)
1879 # Clear up after ourselves
1880 del self.Subst["__BUG_NUMBER__"]
1881 del self.Subst["__STABLE_WARNING__"]
1883 if action and self.logger:
1884 self.logger.log(["closing bugs"] + bugs)
1890 ###########################################################################
1892 def announce(self, short_summary, action):
1894 Send an announce mail about a new upload.
1896 @type short_summary: string
1897 @param short_summary: Short summary text to include in the mail
1900 @param action: Set to false no real action will be done.
1903 @return: Textstring about action taken.
1908 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1910 # Only do announcements for source uploads with a recent dpkg-dev installed
1911 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1912 self.pkg.changes["architecture"].has_key("source"):
1918 self.Subst["__SHORT_SUMMARY__"] = short_summary
1920 for dist in self.pkg.changes["distribution"].keys():
1921 suite = get_suite(dist)
1922 if suite is None: continue
1923 announce_list = suite.announce
1924 if announce_list == "" or lists_done.has_key(announce_list):
1927 lists_done[announce_list] = 1
1928 summary += "Announcing to %s\n" % (announce_list)
1932 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1933 if cnf.get("Dinstall::TrackingServer") and \
1934 self.pkg.changes["architecture"].has_key("source"):
1935 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1936 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1938 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1939 utils.send_mail(mail_message)
1941 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1943 if cnf.FindB("Dinstall::CloseBugs"):
1944 summary = self.close_bugs(summary, action)
1946 del self.Subst["__SHORT_SUMMARY__"]
1950 ###########################################################################
1952 def accept (self, summary, short_summary, session=None):
1956 This moves all files referenced from the .changes into the pool,
1957 sends the accepted mail, announces to lists, closes bugs and
1958 also checks for override disparities. If enabled it will write out
1959 the version history for the BTS Version Tracking and will finally call
1962 @type summary: string
1963 @param summary: Summary text
1965 @type short_summary: string
1966 @param short_summary: Short summary
1970 stats = SummaryStats()
1973 self.logger.log(["installing changes", self.pkg.changes_file])
1977 # Add the .dsc file to the DB first
1978 for newfile, entry in self.pkg.files.items():
1979 if entry["type"] == "dsc":
1980 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1984 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1985 for newfile, entry in self.pkg.files.items():
1986 if entry["type"] == "deb":
1987 poolfiles.append(add_deb_to_db(self, newfile, session))
1989 # If this is a sourceful diff only upload that is moving
1990 # cross-component we need to copy the .orig files into the new
1991 # component too for the same reasons as above.
1992 # XXX: mhy: I think this should be in add_dsc_to_db
1993 if self.pkg.changes["architecture"].has_key("source"):
1994 for orig_file in self.pkg.orig_files.keys():
1995 if not self.pkg.orig_files[orig_file].has_key("id"):
1996 continue # Skip if it's not in the pool
1997 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1998 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1999 continue # Skip if the location didn't change
2002 oldf = get_poolfile_by_id(orig_file_id, session)
2003 old_filename = os.path.join(oldf.location.path, oldf.filename)
2004 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
2005 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2007 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2009 # TODO: Care about size/md5sum collisions etc
2010 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2012 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2014 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2015 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2019 # Don't reference the old file from this changes
2021 if p.file_id == oldf.file_id:
2024 poolfiles.append(newf)
2026 # Fix up the DSC references
2029 for df in source.srcfiles:
2030 if df.poolfile.file_id == oldf.file_id:
2031 # Add a new DSC entry and mark the old one for deletion
2032 # Don't do it in the loop so we don't change the thing we're iterating over
2034 newdscf.source_id = source.source_id
2035 newdscf.poolfile_id = newf.file_id
2036 session.add(newdscf)
2046 # Make sure that our source object is up-to-date
2047 session.expire(source)
2049 # Add changelog information to the database
2050 self.store_changelog()
2052 # Install the files into the pool
2053 for newfile, entry in self.pkg.files.items():
2054 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2055 utils.move(newfile, destination)
2056 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2057 stats.accept_bytes += float(entry["size"])
2059 # Copy the .changes file across for suite which need it.
2060 copy_changes = dict([(x.copychanges, '')
2061 for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2062 if x.copychanges is not None])
2064 for dest in copy_changes.keys():
2065 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2067 # We're done - commit the database changes
2069 # Our SQL session will automatically start a new transaction after
2072 # Move the .changes into the 'done' directory
2073 utils.move(self.pkg.changes_file,
2074 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2076 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2077 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2080 self.Subst["__SUMMARY__"] = summary
2081 mail_message = utils.TemplateSubst(self.Subst,
2082 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2083 utils.send_mail(mail_message)
2084 self.announce(short_summary, 1)
2086 ## Helper stuff for DebBugs Version Tracking
2087 if cnf.Find("Dir::Queue::BTSVersionTrack"):
2088 if self.pkg.changes["architecture"].has_key("source"):
2089 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2090 version_history = os.fdopen(fd, 'w')
2091 version_history.write(self.pkg.dsc["bts changelog"])
2092 version_history.close()
2093 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2094 self.pkg.changes_file[:-8]+".versions")
2095 os.rename(temp_filename, filename)
2096 os.chmod(filename, 0644)
2098 # Write out the binary -> source mapping.
2099 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2100 debinfo = os.fdopen(fd, 'w')
2101 for name, entry in sorted(self.pkg.files.items()):
2102 if entry["type"] == "deb":
2103 line = " ".join([entry["package"], entry["version"],
2104 entry["architecture"], entry["source package"],
2105 entry["source version"]])
2106 debinfo.write(line+"\n")
2108 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2109 self.pkg.changes_file[:-8]+".debinfo")
2110 os.rename(temp_filename, filename)
2111 os.chmod(filename, 0644)
2115 # Set up our copy queues (e.g. buildd queues)
2116 for suite_name in self.pkg.changes["distribution"].keys():
2117 suite = get_suite(suite_name, session)
2118 for q in suite.copy_queues:
2120 q.add_file_from_pool(f)
2125 stats.accept_count += 1
2127 def check_override(self):
2129 Checks override entries for validity. Mails "Override disparity" warnings,
2130 if that feature is enabled.
2132 Abandons the check if
2133 - override disparity checks are disabled
2134 - mail sending is disabled
2139 # Abandon the check if override disparity checks have been disabled
2140 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2143 summary = self.pkg.check_override()
2148 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2151 self.Subst["__SUMMARY__"] = summary
2152 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2153 utils.send_mail(mail_message)
2154 del self.Subst["__SUMMARY__"]
2156 ###########################################################################
2158 def remove(self, from_dir=None):
2160 Used (for instance) in p-u to remove the package from unchecked
2162 Also removes the package from holding area.
2164 if from_dir is None:
2165 from_dir = self.pkg.directory
2168 for f in self.pkg.files.keys():
2169 os.unlink(os.path.join(from_dir, f))
2170 if os.path.exists(os.path.join(h.holding_dir, f)):
2171 os.unlink(os.path.join(h.holding_dir, f))
2173 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2174 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2175 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2177 ###########################################################################
2179 def move_to_queue (self, queue):
2181 Move files to a destination queue using the permissions in the table
2184 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2185 queue.path, perms=int(queue.change_perms, 8))
2186 for f in self.pkg.files.keys():
2187 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2189 ###########################################################################
2191 def force_reject(self, reject_files):
2193 Forcefully move files from the current directory to the
2194 reject directory. If any file already exists in the reject
2195 directory it will be moved to the morgue to make way for
2198 @type reject_files: dict
2199 @param reject_files: file dictionary
2205 for file_entry in reject_files:
2206 # Skip any files which don't exist or which we don't have permission to copy.
2207 if os.access(file_entry, os.R_OK) == 0:
2210 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2213 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2215 # File exists? Let's find a new name by adding a number
2216 if e.errno == errno.EEXIST:
2218 dest_file = utils.find_next_free(dest_file, 255)
2219 except NoFreeFilenameError:
2220 # Something's either gone badly Pete Tong, or
2221 # someone is trying to exploit us.
2222 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2225 # Make sure we really got it
2227 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2230 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2234 # If we got here, we own the destination file, so we can
2235 # safely overwrite it.
2236 utils.move(file_entry, dest_file, 1, perms=0660)
2239 ###########################################################################
2240 def do_reject (self, manual=0, reject_message="", notes=""):
2242 Reject an upload. If called without a reject message or C{manual} is
2243 true, spawn an editor so the user can write one.
2246 @param manual: manual or automated rejection
2248 @type reject_message: string
2249 @param reject_message: A reject message
2254 # If we weren't given a manual rejection message, spawn an
2255 # editor so the user can add one in...
2256 if manual and not reject_message:
2257 (fd, temp_filename) = utils.temp_filename()
2258 temp_file = os.fdopen(fd, 'w')
2261 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2262 % (note.author, note.version, note.notedate, note.comment))
2264 editor = os.environ.get("EDITOR","vi")
2266 while answer == 'E':
2267 os.system("%s %s" % (editor, temp_filename))
2268 temp_fh = utils.open_file(temp_filename)
2269 reject_message = "".join(temp_fh.readlines())
2271 print "Reject message:"
2272 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2273 prompt = "[R]eject, Edit, Abandon, Quit ?"
2275 while prompt.find(answer) == -1:
2276 answer = utils.our_raw_input(prompt)
2277 m = re_default_answer.search(prompt)
2280 answer = answer[:1].upper()
2281 os.unlink(temp_filename)
2287 print "Rejecting.\n"
2291 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2292 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2294 # Move all the files into the reject directory
2295 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2296 self.force_reject(reject_files)
2298 # If we fail here someone is probably trying to exploit the race
2299 # so let's just raise an exception ...
2300 if os.path.exists(reason_filename):
2301 os.unlink(reason_filename)
2302 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2304 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2308 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2309 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2310 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2311 os.write(reason_fd, reject_message)
2312 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2314 # Build up the rejection email
2315 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2316 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2317 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2318 self.Subst["__REJECT_MESSAGE__"] = ""
2319 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2320 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2321 # Write the rejection email out as the <foo>.reason file
2322 os.write(reason_fd, reject_mail_message)
2324 del self.Subst["__REJECTOR_ADDRESS__"]
2325 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2326 del self.Subst["__CC__"]
2330 # Send the rejection mail
2331 utils.send_mail(reject_mail_message)
2334 self.logger.log(["rejected", self.pkg.changes_file])
2338 ################################################################################
2339 def in_override_p(self, package, component, suite, binary_type, filename, session):
2341 Check if a package already has override entries in the DB
2343 @type package: string
2344 @param package: package name
2346 @type component: string
2347 @param component: database id of the component
2350 @param suite: database id of the suite
2352 @type binary_type: string
2353 @param binary_type: type of the package
2355 @type filename: string
2356 @param filename: filename we check
2358 @return: the database result. But noone cares anyway.
2364 if binary_type == "": # must be source
2367 file_type = binary_type
2369 # Override suite name; used for example with proposed-updates
2370 oldsuite = get_suite(suite, session)
2371 if (not oldsuite is None) and oldsuite.overridesuite:
2372 suite = oldsuite.overridesuite
2374 result = get_override(package, suite, component, file_type, session)
2376 # If checking for a source package fall back on the binary override type
2377 if file_type == "dsc" and len(result) < 1:
2378 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2380 # Remember the section and priority so we can check them later if appropriate
2383 self.pkg.files[filename]["override section"] = result.section.section
2384 self.pkg.files[filename]["override priority"] = result.priority.priority
2389 ################################################################################
2390 def get_anyversion(self, sv_list, suite):
2393 @param sv_list: list of (suite, version) tuples to check
2396 @param suite: suite name
2402 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2403 for (s, v) in sv_list:
2404 if s in [ x.lower() for x in anysuite ]:
2405 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2410 ################################################################################
2412 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2415 @param sv_list: list of (suite, version) tuples to check
2417 @type filename: string
2418 @param filename: XXX
2420 @type new_version: string
2421 @param new_version: XXX
2423 Ensure versions are newer than existing packages in target
2424 suites and that cross-suite version checking rules as
2425 set out in the conf file are satisfied.
2430 # Check versions for each target suite
2431 for target_suite in self.pkg.changes["distribution"].keys():
2432 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2433 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2435 # Enforce "must be newer than target suite" even if conffile omits it
2436 if target_suite not in must_be_newer_than:
2437 must_be_newer_than.append(target_suite)
2439 for (suite, existent_version) in sv_list:
2440 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2442 if suite in must_be_newer_than and sourceful and vercmp < 1:
2443 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2445 if suite in must_be_older_than and vercmp > -1:
2448 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2449 # we really use the other suite, ignoring the conflicting one ...
2450 addsuite = self.pkg.changes["distribution-version"][suite]
2452 add_version = self.get_anyversion(sv_list, addsuite)
2453 target_version = self.get_anyversion(sv_list, target_suite)
2456 # not add_version can only happen if we map to a suite
2457 # that doesn't enhance the suite we're propup'ing from.
2458 # so "propup-ver x a b c; map a d" is a problem only if
2459 # d doesn't enhance a.
2461 # i think we could always propagate in this case, rather
2462 # than complaining. either way, this isn't a REJECT issue
2464 # And - we really should complain to the dorks who configured dak
2465 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2466 self.pkg.changes.setdefault("propdistribution", {})
2467 self.pkg.changes["propdistribution"][addsuite] = 1
2469 elif not target_version:
2470 # not targets_version is true when the package is NEW
2471 # we could just stick with the "...old version..." REJECT
2472 # for this, I think.
2473 self.rejects.append("Won't propogate NEW packages.")
2474 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2475 # propogation would be redundant. no need to reject though.
2476 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2478 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2479 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2481 self.warnings.append("Propogating upload to %s" % (addsuite))
2482 self.pkg.changes.setdefault("propdistribution", {})
2483 self.pkg.changes["propdistribution"][addsuite] = 1
2487 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2489 ################################################################################
2490 def check_binary_against_db(self, filename, session):
2491 # Ensure version is sane
2492 q = session.query(BinAssociation)
2493 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2494 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2496 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2497 filename, self.pkg.files[filename]["version"], sourceful=False)
2499 # Check for any existing copies of the file
2500 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2501 q = q.filter_by(version=self.pkg.files[filename]["version"])
2502 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2505 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2507 ################################################################################
2509 def check_source_against_db(self, filename, session):
2510 source = self.pkg.dsc.get("source")
2511 version = self.pkg.dsc.get("version")
2513 # Ensure version is sane
2514 q = session.query(SrcAssociation)
2515 q = q.join(DBSource).filter(DBSource.source==source)
2517 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2518 filename, version, sourceful=True)
2520 ################################################################################
2521 def check_dsc_against_db(self, filename, session):
2524 @warning: NB: this function can remove entries from the 'files' index [if
2525 the orig tarball is a duplicate of the one in the archive]; if
2526 you're iterating over 'files' and call this function as part of
2527 the loop, be sure to add a check to the top of the loop to
2528 ensure you haven't just tried to dereference the deleted entry.
2533 self.pkg.orig_files = {} # XXX: do we need to clear it?
2534 orig_files = self.pkg.orig_files
2536 # Try and find all files mentioned in the .dsc. This has
2537 # to work harder to cope with the multiple possible
2538 # locations of an .orig.tar.gz.
2539 # The ordering on the select is needed to pick the newest orig
2540 # when it exists in multiple places.
2541 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2543 if self.pkg.files.has_key(dsc_name):
2544 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2545 actual_size = int(self.pkg.files[dsc_name]["size"])
2546 found = "%s in incoming" % (dsc_name)
2548 # Check the file does not already exist in the archive
2549 ql = get_poolfile_like_name(dsc_name, session)
2551 # Strip out anything that isn't '%s' or '/%s$'
2553 if not i.filename.endswith(dsc_name):
2556 # "[dak] has not broken them. [dak] has fixed a
2557 # brokenness. Your crappy hack exploited a bug in
2560 # "(Come on! I thought it was always obvious that
2561 # one just doesn't release different files with
2562 # the same name and version.)"
2563 # -- ajk@ on d-devel@l.d.o
2566 # Ignore exact matches for .orig.tar.gz
2568 if re_is_orig_source.match(dsc_name):
2570 if self.pkg.files.has_key(dsc_name) and \
2571 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2572 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2573 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2574 # TODO: Don't delete the entry, just mark it as not needed
2575 # This would fix the stupidity of changing something we often iterate over
2576 # whilst we're doing it
2577 del self.pkg.files[dsc_name]
2578 dsc_entry["files id"] = i.file_id
2579 if not orig_files.has_key(dsc_name):
2580 orig_files[dsc_name] = {}
2581 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2584 # Don't bitch that we couldn't find this file later
2586 self.later_check_files.remove(dsc_name)
2592 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2594 elif re_is_orig_source.match(dsc_name):
2596 ql = get_poolfile_like_name(dsc_name, session)
2598 # Strip out anything that isn't '%s' or '/%s$'
2599 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2601 if not i.filename.endswith(dsc_name):
2605 # Unfortunately, we may get more than one match here if,
2606 # for example, the package was in potato but had an -sa
2607 # upload in woody. So we need to choose the right one.
2609 # default to something sane in case we don't match any or have only one
2614 old_file = os.path.join(i.location.path, i.filename)
2615 old_file_fh = utils.open_file(old_file)
2616 actual_md5 = apt_pkg.md5sum(old_file_fh)
2618 actual_size = os.stat(old_file)[stat.ST_SIZE]
2619 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2622 old_file = os.path.join(i.location.path, i.filename)
2623 old_file_fh = utils.open_file(old_file)
2624 actual_md5 = apt_pkg.md5sum(old_file_fh)
2626 actual_size = os.stat(old_file)[stat.ST_SIZE]
2628 suite_type = x.location.archive_type
2629 # need this for updating dsc_files in install()
2630 dsc_entry["files id"] = x.file_id
2631 # See install() in process-accepted...
2632 if not orig_files.has_key(dsc_name):
2633 orig_files[dsc_name] = {}
2634 orig_files[dsc_name]["id"] = x.file_id
2635 orig_files[dsc_name]["path"] = old_file
2636 orig_files[dsc_name]["location"] = x.location.location_id
2638 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2639 # Not there? Check the queue directories...
2640 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2641 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2643 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2644 if os.path.exists(in_otherdir):
2645 in_otherdir_fh = utils.open_file(in_otherdir)
2646 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2647 in_otherdir_fh.close()
2648 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2650 if not orig_files.has_key(dsc_name):
2651 orig_files[dsc_name] = {}
2652 orig_files[dsc_name]["path"] = in_otherdir
2655 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2658 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2660 if actual_md5 != dsc_entry["md5sum"]:
2661 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2662 if actual_size != int(dsc_entry["size"]):
2663 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2665 ################################################################################
2666 # This is used by process-new and process-holding to recheck a changes file
2667 # at the time we're running. It mainly wraps various other internal functions
2668 # and is similar to accepted_checks - these should probably be tidied up
2670 def recheck(self, session):
2672 for f in self.pkg.files.keys():
2673 # The .orig.tar.gz can disappear out from under us is it's a
2674 # duplicate of one in the archive.
2675 if not self.pkg.files.has_key(f):
2678 entry = self.pkg.files[f]
2680 # Check that the source still exists
2681 if entry["type"] == "deb":
2682 source_version = entry["source version"]
2683 source_package = entry["source package"]
2684 if not self.pkg.changes["architecture"].has_key("source") \
2685 and not source_exists(source_package, source_version, \
2686 suites = self.pkg.changes["distribution"].keys(), session = session):
2687 source_epochless_version = re_no_epoch.sub('', source_version)
2688 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2690 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2691 if cnf.has_key("Dir::Queue::%s" % (q)):
2692 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2695 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2697 # Version and file overwrite checks
2698 if entry["type"] == "deb":
2699 self.check_binary_against_db(f, session)
2700 elif entry["type"] == "dsc":
2701 self.check_source_against_db(f, session)
2702 self.check_dsc_against_db(f, session)
2704 ################################################################################
2705 def accepted_checks(self, overwrite_checks, session):
2706 # Recheck anything that relies on the database; since that's not
2707 # frozen between accept and our run time when called from p-a.
2709 # overwrite_checks is set to False when installing to stable/oldstable
2714 # Find the .dsc (again)
2716 for f in self.pkg.files.keys():
2717 if self.pkg.files[f]["type"] == "dsc":
2720 for checkfile in self.pkg.files.keys():
2721 # The .orig.tar.gz can disappear out from under us is it's a
2722 # duplicate of one in the archive.
2723 if not self.pkg.files.has_key(checkfile):
2726 entry = self.pkg.files[checkfile]
2728 # Check that the source still exists
2729 if entry["type"] == "deb":
2730 source_version = entry["source version"]
2731 source_package = entry["source package"]
2732 if not self.pkg.changes["architecture"].has_key("source") \
2733 and not source_exists(source_package, source_version, \
2734 suites = self.pkg.changes["distribution"].keys(), \
2736 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2738 # Version and file overwrite checks
2739 if overwrite_checks:
2740 if entry["type"] == "deb":
2741 self.check_binary_against_db(checkfile, session)
2742 elif entry["type"] == "dsc":
2743 self.check_source_against_db(checkfile, session)
2744 self.check_dsc_against_db(dsc_filename, session)
2746 # propogate in the case it is in the override tables:
2747 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2748 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2749 propogate[suite] = 1
2751 nopropogate[suite] = 1
2753 for suite in propogate.keys():
2754 if suite in nopropogate:
2756 self.pkg.changes["distribution"][suite] = 1
2758 for checkfile in self.pkg.files.keys():
2759 # Check the package is still in the override tables
2760 for suite in self.pkg.changes["distribution"].keys():
2761 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2762 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2764 ################################################################################
2765 # If any file of an upload has a recent mtime then chances are good
2766 # the file is still being uploaded.
2768 def upload_too_new(self):
2771 # Move back to the original directory to get accurate time stamps
2773 os.chdir(self.pkg.directory)
2774 file_list = self.pkg.files.keys()
2775 file_list.extend(self.pkg.dsc_files.keys())
2776 file_list.append(self.pkg.changes_file)
2779 last_modified = time.time()-os.path.getmtime(f)
2780 if last_modified < int(cnf["Dinstall::SkipTime"]):
2789 def store_changelog(self):
2791 # Skip binary-only upload if it is not a bin-NMU
2792 if not self.pkg.changes['architecture'].has_key('source'):
2793 from daklib.regexes import re_bin_only_nmu
2794 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2797 session = DBConn().session()
2799 # Check if upload already has a changelog entry
2800 query = """SELECT changelog_id FROM changes WHERE source = :source
2801 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2802 if session.execute(query, {'source': self.pkg.changes['source'], \
2803 'version': self.pkg.changes['version'], \
2804 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2808 # Add current changelog text into changelogs_text table, return created ID
2809 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2810 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2812 # Link ID to the upload available in changes table
2813 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2814 AND version = :version AND architecture = :architecture"""
2815 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2816 'version': self.pkg.changes['version'], \
2817 'architecture': " ".join(self.pkg.changes['architecture'].keys())})