5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
59 ###############################################################################
61 def get_type(f, session):
63 Get the file type of C{f}
66 @param f: file entry from Changes object
68 @type session: SQLA Session
69 @param session: SQL Alchemy session object
76 if f.has_key("dbtype"):
77 file_type = f["dbtype"]
78 elif re_source_ext.match(f["type"]):
82 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
84 # Validate the override type
85 type_id = get_override_type(file_type, session)
87 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
91 ################################################################################
93 # Determine what parts in a .changes are NEW
95 def determine_new(filename, changes, files, warn=1, session = None):
97 Determine what parts in a C{changes} file are NEW.
100 @param filename: changes filename
102 @type changes: Upload.Pkg.changes dict
103 @param changes: Changes dictionary
105 @type files: Upload.Pkg.files dict
106 @param files: Files dictionary
109 @param warn: Warn if overrides are added for (old)stable
112 @return: dictionary of NEW components.
115 # TODO: This should all use the database instead of parsing the changes
120 dbchg = get_dbchange(filename, session)
122 print "Warning: cannot find changes file in database; won't check byhand"
124 # Build up a list of potentially new things
125 for name, f in files.items():
126 # Keep a record of byhand elements
127 if f["section"] == "byhand":
132 priority = f["priority"]
133 section = f["section"]
134 file_type = get_type(f, session)
135 component = f["component"]
137 if file_type == "dsc":
140 if not new.has_key(pkg):
142 new[pkg]["priority"] = priority
143 new[pkg]["section"] = section
144 new[pkg]["type"] = file_type
145 new[pkg]["component"] = component
146 new[pkg]["files"] = []
148 old_type = new[pkg]["type"]
149 if old_type != file_type:
150 # source gets trumped by deb or udeb
151 if old_type == "dsc":
152 new[pkg]["priority"] = priority
153 new[pkg]["section"] = section
154 new[pkg]["type"] = file_type
155 new[pkg]["component"] = component
157 new[pkg]["files"].append(name)
159 if f.has_key("othercomponents"):
160 new[pkg]["othercomponents"] = f["othercomponents"]
162 # Fix up the list of target suites
164 for suite in changes["suite"].keys():
165 oldsuite = get_suite(suite, session)
167 print "WARNING: Invalid suite %s found" % suite
170 if oldsuite.overridesuite:
171 newsuite = get_suite(oldsuite.overridesuite, session)
174 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
175 oldsuite.overridesuite, suite)
176 del changes["suite"][suite]
177 changes["suite"][oldsuite.overridesuite] = 1
179 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist. Bugger" % (
180 oldsuite.overridesuite, suite)
182 # Check for unprocessed byhand files
183 if dbchg is not None:
184 for b in byhand.keys():
185 # Find the file entry in the database
187 for f in dbchg.files:
190 # If it's processed, we can ignore it
196 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
198 # Check for new stuff
199 for suite in changes["suite"].keys():
200 for pkg in new.keys():
201 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
203 for file_entry in new[pkg]["files"]:
204 if files[file_entry].has_key("new"):
205 del files[file_entry]["new"]
209 for s in ['stable', 'oldstable']:
210 if changes["suite"].has_key(s):
211 print "WARNING: overrides will be added for %s!" % s
212 for pkg in new.keys():
213 if new[pkg].has_key("othercomponents"):
214 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
218 ################################################################################
220 def check_valid(new, session = None):
222 Check if section and priority for NEW packages exist in database.
223 Additionally does sanity checks:
224 - debian-installer packages have to be udeb (or source)
225 - non debian-installer packages can not be udeb
226 - source priority can only be assigned to dsc file types
229 @param new: Dict of new packages with their section, priority and type.
232 for pkg in new.keys():
233 section_name = new[pkg]["section"]
234 priority_name = new[pkg]["priority"]
235 file_type = new[pkg]["type"]
237 section = get_section(section_name, session)
239 new[pkg]["section id"] = -1
241 new[pkg]["section id"] = section.section_id
243 priority = get_priority(priority_name, session)
245 new[pkg]["priority id"] = -1
247 new[pkg]["priority id"] = priority.priority_id
250 di = section_name.find("debian-installer") != -1
252 # If d-i, we must be udeb and vice-versa
253 if (di and file_type not in ("udeb", "dsc")) or \
254 (not di and file_type == "udeb"):
255 new[pkg]["section id"] = -1
257 # If dsc we need to be source and vice-versa
258 if (priority == "source" and file_type != "dsc") or \
259 (priority != "source" and file_type == "dsc"):
260 new[pkg]["priority id"] = -1
262 ###############################################################################
264 # Used by Upload.check_timestamps
265 class TarTime(object):
266 def __init__(self, future_cutoff, past_cutoff):
268 self.future_cutoff = future_cutoff
269 self.past_cutoff = past_cutoff
272 self.future_files = {}
273 self.ancient_files = {}
275 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
276 if MTime > self.future_cutoff:
277 self.future_files[Name] = MTime
278 if MTime < self.past_cutoff:
279 self.ancient_files[Name] = MTime
281 ###############################################################################
283 def prod_maintainer(notes, upload):
286 # Here we prepare an editor and get them ready to prod...
287 (fd, temp_filename) = utils.temp_filename()
288 temp_file = os.fdopen(fd, 'w')
290 temp_file.write(note.comment)
292 editor = os.environ.get("EDITOR","vi")
295 os.system("%s %s" % (editor, temp_filename))
296 temp_fh = utils.open_file(temp_filename)
297 prod_message = "".join(temp_fh.readlines())
299 print "Prod message:"
300 print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
301 prompt = "[P]rod, Edit, Abandon, Quit ?"
303 while prompt.find(answer) == -1:
304 answer = utils.our_raw_input(prompt)
305 m = re_default_answer.search(prompt)
308 answer = answer[:1].upper()
309 os.unlink(temp_filename)
315 # Otherwise, do the proding...
316 user_email_address = utils.whoami() + " <%s>" % (
317 cnf["Dinstall::MyAdminAddress"])
321 Subst["__FROM_ADDRESS__"] = user_email_address
322 Subst["__PROD_MESSAGE__"] = prod_message
323 Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
325 prod_mail_message = utils.TemplateSubst(
326 Subst,cnf["Dir::Templates"]+"/process-new.prod")
329 utils.send_mail(prod_mail_message)
331 print "Sent prodding message"
333 ################################################################################
335 def edit_note(note, upload, session, trainee=False):
336 # Write the current data to a temporary file
337 (fd, temp_filename) = utils.temp_filename()
338 editor = os.environ.get("EDITOR","vi")
341 os.system("%s %s" % (editor, temp_filename))
342 temp_file = utils.open_file(temp_filename)
343 newnote = temp_file.read().rstrip()
346 print utils.prefix_multi_line_string(newnote," ")
347 prompt = "[D]one, Edit, Abandon, Quit ?"
349 while prompt.find(answer) == -1:
350 answer = utils.our_raw_input(prompt)
351 m = re_default_answer.search(prompt)
354 answer = answer[:1].upper()
355 os.unlink(temp_filename)
362 comment = NewComment()
363 comment.package = upload.pkg.changes["source"]
364 comment.version = upload.pkg.changes["version"]
365 comment.comment = newnote
366 comment.author = utils.whoami()
367 comment.trainee = trainee
371 ###############################################################################
373 # suite names DMs can upload to
374 dm_suites = ['unstable', 'experimental']
376 def get_newest_source(source, session):
377 'returns the newest DBSource object in dm_suites'
378 ## the most recent version of the package uploaded to unstable or
379 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
380 ## section of its control file
381 q = session.query(DBSource).filter_by(source = source). \
382 filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
383 order_by(desc('source.version'))
386 class Upload(object):
388 Everything that has to do with an upload processed.
396 ###########################################################################
399 """ Reset a number of internal variables."""
401 # Initialize the substitution template map
404 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
405 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
406 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
407 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
413 self.later_check_files = []
417 def package_info(self):
419 Format various messages from this Upload to send to the maintainer.
423 ('Reject Reasons', self.rejects),
424 ('Warnings', self.warnings),
425 ('Notes', self.notes),
429 for title, messages in msgs:
431 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
436 ###########################################################################
437 def update_subst(self):
438 """ Set up the per-package template substitution mappings """
442 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
443 if not self.pkg.changes.has_key("architecture") or not \
444 isinstance(self.pkg.changes["architecture"], dict):
445 self.pkg.changes["architecture"] = { "Unknown" : "" }
447 # and maintainer2047 may not exist.
448 if not self.pkg.changes.has_key("maintainer2047"):
449 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
451 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
452 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
453 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
455 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
456 if self.pkg.changes["architecture"].has_key("source") and \
457 self.pkg.changes["changedby822"] != "" and \
458 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
460 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
461 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
462 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
464 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
465 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
466 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
468 # Process policy doesn't set the fingerprint field and I don't want to make it
469 # do it for now as I don't want to have to deal with the case where we accepted
470 # the package into PU-NEW, but the fingerprint has gone away from the keyring in
471 # the meantime so the package will be remarked as rejectable. Urgh.
472 # TODO: Fix this properly
473 if self.pkg.changes.has_key('fingerprint'):
474 session = DBConn().session()
475 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
476 if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
477 if self.pkg.changes.has_key("sponsoremail"):
478 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
481 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
482 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
484 # Apply any global override of the Maintainer field
485 if cnf.get("Dinstall::OverrideMaintainer"):
486 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
487 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
489 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
490 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
491 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
492 self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
494 ###########################################################################
495 def load_changes(self, filename):
497 Load a changes file and setup a dictionary around it. Also checks for mandantory
500 @type filename: string
501 @param filename: Changes filename, full path.
504 @return: whether the changes file was valid or not. We may want to
505 reject even if this is True (see what gets put in self.rejects).
506 This is simply to prevent us even trying things later which will
507 fail because we couldn't properly parse the file.
510 self.pkg.changes_file = filename
512 # Parse the .changes field into a dictionary
514 self.pkg.changes.update(parse_changes(filename))
515 except CantOpenError:
516 self.rejects.append("%s: can't read file." % (filename))
518 except ParseChangesError, line:
519 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
521 except ChangesUnicodeError:
522 self.rejects.append("%s: changes file not proper utf-8" % (filename))
525 # Parse the Files field from the .changes into another dictionary
527 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
528 except ParseChangesError, line:
529 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
531 except UnknownFormatError, format:
532 self.rejects.append("%s: unknown format '%s'." % (filename, format))
535 # Check for mandatory fields
536 for i in ("distribution", "source", "binary", "architecture",
537 "version", "maintainer", "files", "changes", "description"):
538 if not self.pkg.changes.has_key(i):
539 # Avoid undefined errors later
540 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
543 # Strip a source version in brackets from the source field
544 if re_strip_srcver.search(self.pkg.changes["source"]):
545 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
547 # Ensure the source field is a valid package name.
548 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
549 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
551 # Split multi-value fields into a lower-level dictionary
552 for i in ("architecture", "distribution", "binary", "closes"):
553 o = self.pkg.changes.get(i, "")
555 del self.pkg.changes[i]
557 self.pkg.changes[i] = {}
560 self.pkg.changes[i][j] = 1
562 # Fix the Maintainer: field to be RFC822/2047 compatible
564 (self.pkg.changes["maintainer822"],
565 self.pkg.changes["maintainer2047"],
566 self.pkg.changes["maintainername"],
567 self.pkg.changes["maintaineremail"]) = \
568 fix_maintainer (self.pkg.changes["maintainer"])
569 except ParseMaintError, msg:
570 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
571 % (filename, self.pkg.changes["maintainer"], msg))
573 # ...likewise for the Changed-By: field if it exists.
575 (self.pkg.changes["changedby822"],
576 self.pkg.changes["changedby2047"],
577 self.pkg.changes["changedbyname"],
578 self.pkg.changes["changedbyemail"]) = \
579 fix_maintainer (self.pkg.changes.get("changed-by", ""))
580 except ParseMaintError, msg:
581 self.pkg.changes["changedby822"] = ""
582 self.pkg.changes["changedby2047"] = ""
583 self.pkg.changes["changedbyname"] = ""
584 self.pkg.changes["changedbyemail"] = ""
586 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
587 % (filename, self.pkg.changes["changed-by"], msg))
589 # Ensure all the values in Closes: are numbers
590 if self.pkg.changes.has_key("closes"):
591 for i in self.pkg.changes["closes"].keys():
592 if re_isanum.match (i) == None:
593 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
595 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
596 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
597 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
599 # Check the .changes is non-empty
600 if not self.pkg.files:
601 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
604 # Changes was syntactically valid even if we'll reject
607 ###########################################################################
609 def check_distributions(self):
610 "Check and map the Distribution field"
614 # Handle suite mappings
615 for m in Cnf.ValueList("SuiteMappings"):
618 if mtype == "map" or mtype == "silent-map":
619 (source, dest) = args[1:3]
620 if self.pkg.changes["distribution"].has_key(source):
621 del self.pkg.changes["distribution"][source]
622 self.pkg.changes["distribution"][dest] = 1
623 if mtype != "silent-map":
624 self.notes.append("Mapping %s to %s." % (source, dest))
625 if self.pkg.changes.has_key("distribution-version"):
626 if self.pkg.changes["distribution-version"].has_key(source):
627 self.pkg.changes["distribution-version"][source]=dest
628 elif mtype == "map-unreleased":
629 (source, dest) = args[1:3]
630 if self.pkg.changes["distribution"].has_key(source):
631 for arch in self.pkg.changes["architecture"].keys():
632 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
633 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
634 del self.pkg.changes["distribution"][source]
635 self.pkg.changes["distribution"][dest] = 1
637 elif mtype == "ignore":
639 if self.pkg.changes["distribution"].has_key(suite):
640 del self.pkg.changes["distribution"][suite]
641 self.warnings.append("Ignoring %s as a target suite." % (suite))
642 elif mtype == "reject":
644 if self.pkg.changes["distribution"].has_key(suite):
645 self.rejects.append("Uploads to %s are not accepted." % (suite))
646 elif mtype == "propup-version":
647 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
649 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
650 if self.pkg.changes["distribution"].has_key(args[1]):
651 self.pkg.changes.setdefault("distribution-version", {})
652 for suite in args[2:]:
653 self.pkg.changes["distribution-version"][suite] = suite
655 # Ensure there is (still) a target distribution
656 if len(self.pkg.changes["distribution"].keys()) < 1:
657 self.rejects.append("No valid distribution remaining.")
659 # Ensure target distributions exist
660 for suite in self.pkg.changes["distribution"].keys():
661 if not Cnf.has_key("Suite::%s" % (suite)):
662 self.rejects.append("Unknown distribution `%s'." % (suite))
664 ###########################################################################
666 def binary_file_checks(self, f, session):
668 entry = self.pkg.files[f]
670 # Extract package control information
671 deb_file = utils.open_file(f)
673 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
675 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
677 # Can't continue, none of the checks on control would work.
680 # Check for mandantory "Description:"
683 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
685 self.rejects.append("%s: Missing Description in binary package" % (f))
690 # Check for mandatory fields
691 for field in [ "Package", "Architecture", "Version" ]:
692 if control.Find(field) == None:
694 self.rejects.append("%s: No %s field in control." % (f, field))
697 # Ensure the package name matches the one give in the .changes
698 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
699 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
701 # Validate the package field
702 package = control.Find("Package")
703 if not re_valid_pkg_name.match(package):
704 self.rejects.append("%s: invalid package name '%s'." % (f, package))
706 # Validate the version field
707 version = control.Find("Version")
708 if not re_valid_version.match(version):
709 self.rejects.append("%s: invalid version number '%s'." % (f, version))
711 # Ensure the architecture of the .deb is one we know about.
712 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
713 architecture = control.Find("Architecture")
714 upload_suite = self.pkg.changes["distribution"].keys()[0]
716 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
717 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
718 self.rejects.append("Unknown architecture '%s'." % (architecture))
720 # Ensure the architecture of the .deb is one of the ones
721 # listed in the .changes.
722 if not self.pkg.changes["architecture"].has_key(architecture):
723 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
725 # Sanity-check the Depends field
726 depends = control.Find("Depends")
728 self.rejects.append("%s: Depends field is empty." % (f))
730 # Sanity-check the Provides field
731 provides = control.Find("Provides")
733 provide = re_spacestrip.sub('', provides)
735 self.rejects.append("%s: Provides field is empty." % (f))
736 prov_list = provide.split(",")
737 for prov in prov_list:
738 if not re_valid_pkg_name.match(prov):
739 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
741 # Check the section & priority match those given in the .changes (non-fatal)
742 if control.Find("Section") and entry["section"] != "" \
743 and entry["section"] != control.Find("Section"):
744 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
745 (f, control.Find("Section", ""), entry["section"]))
746 if control.Find("Priority") and entry["priority"] != "" \
747 and entry["priority"] != control.Find("Priority"):
748 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
749 (f, control.Find("Priority", ""), entry["priority"]))
751 entry["package"] = package
752 entry["architecture"] = architecture
753 entry["version"] = version
754 entry["maintainer"] = control.Find("Maintainer", "")
756 if f.endswith(".udeb"):
757 self.pkg.files[f]["dbtype"] = "udeb"
758 elif f.endswith(".deb"):
759 self.pkg.files[f]["dbtype"] = "deb"
761 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
763 entry["source"] = control.Find("Source", entry["package"])
765 # Get the source version
766 source = entry["source"]
769 if source.find("(") != -1:
770 m = re_extract_src_version.match(source)
772 source_version = m.group(2)
774 if not source_version:
775 source_version = self.pkg.files[f]["version"]
777 entry["source package"] = source
778 entry["source version"] = source_version
780 # Ensure the filename matches the contents of the .deb
781 m = re_isadeb.match(f)
784 file_package = m.group(1)
785 if entry["package"] != file_package:
786 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
787 (f, file_package, entry["dbtype"], entry["package"]))
788 epochless_version = re_no_epoch.sub('', control.Find("Version"))
791 file_version = m.group(2)
792 if epochless_version != file_version:
793 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
794 (f, file_version, entry["dbtype"], epochless_version))
797 file_architecture = m.group(3)
798 if entry["architecture"] != file_architecture:
799 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
800 (f, file_architecture, entry["dbtype"], entry["architecture"]))
802 # Check for existent source
803 source_version = entry["source version"]
804 source_package = entry["source package"]
805 if self.pkg.changes["architecture"].has_key("source"):
806 if source_version != self.pkg.changes["version"]:
807 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
808 (source_version, f, self.pkg.changes["version"]))
810 # Check in the SQL database
811 if not source_exists(source_package, source_version, suites = \
812 self.pkg.changes["distribution"].keys(), session = session):
813 # Check in one of the other directories
814 source_epochless_version = re_no_epoch.sub('', source_version)
815 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
816 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
818 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
821 dsc_file_exists = False
822 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
823 if cnf.has_key("Dir::Queue::%s" % (myq)):
824 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
825 dsc_file_exists = True
828 if not dsc_file_exists:
829 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
831 # Check the version and for file overwrites
832 self.check_binary_against_db(f, session)
834 # Temporarily disable contents generation until we change the table storage layout
837 #if len(b.rejects) > 0:
838 # for j in b.rejects:
839 # self.rejects.append(j)
841 def source_file_checks(self, f, session):
842 entry = self.pkg.files[f]
844 m = re_issource.match(f)
848 entry["package"] = m.group(1)
849 entry["version"] = m.group(2)
850 entry["type"] = m.group(3)
852 # Ensure the source package name matches the Source filed in the .changes
853 if self.pkg.changes["source"] != entry["package"]:
854 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
856 # Ensure the source version matches the version in the .changes file
857 if re_is_orig_source.match(f):
858 changes_version = self.pkg.changes["chopversion2"]
860 changes_version = self.pkg.changes["chopversion"]
862 if changes_version != entry["version"]:
863 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
865 # Ensure the .changes lists source in the Architecture field
866 if not self.pkg.changes["architecture"].has_key("source"):
867 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
869 # Check the signature of a .dsc file
870 if entry["type"] == "dsc":
871 # check_signature returns either:
872 # (None, [list, of, rejects]) or (signature, [])
873 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
875 self.rejects.append(j)
877 entry["architecture"] = "source"
879 def per_suite_file_checks(self, f, suite, session):
881 entry = self.pkg.files[f]
884 if entry.has_key("byhand"):
887 # Check we have fields we need to do these checks
889 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
890 if not entry.has_key(m):
891 self.rejects.append("file '%s' does not have field %s set" % (f, m))
897 # Handle component mappings
898 for m in cnf.ValueList("ComponentMappings"):
899 (source, dest) = m.split()
900 if entry["component"] == source:
901 entry["original component"] = source
902 entry["component"] = dest
904 # Ensure the component is valid for the target suite
905 if cnf.has_key("Suite:%s::Components" % (suite)) and \
906 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
907 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
910 # Validate the component
911 if not get_component(entry["component"], session):
912 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
915 # See if the package is NEW
916 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
919 # Validate the priority
920 if entry["priority"].find('/') != -1:
921 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
923 # Determine the location
924 location = cnf["Dir::Pool"]
925 l = get_location(location, entry["component"], session=session)
927 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
928 entry["location id"] = -1
930 entry["location id"] = l.location_id
932 # Check the md5sum & size against existing files (if any)
933 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
935 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
936 entry["size"], entry["md5sum"], entry["location id"])
939 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
940 elif found is False and poolfile is not None:
941 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
944 entry["files id"] = None
946 entry["files id"] = poolfile.file_id
948 # Check for packages that have moved from one component to another
949 entry['suite'] = suite
950 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
952 entry["othercomponents"] = res.fetchone()[0]
954 def check_files(self, action=True):
955 file_keys = self.pkg.files.keys()
961 os.chdir(self.pkg.directory)
963 ret = holding.copy_to_holding(f)
965 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
969 # check we already know the changes file
970 # [NB: this check must be done post-suite mapping]
971 base_filename = os.path.basename(self.pkg.changes_file)
973 session = DBConn().session()
976 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
977 # if in the pool or in a queue other than unchecked, reject
978 if (dbc.in_queue is None) \
979 or (dbc.in_queue is not None
980 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
981 self.rejects.append("%s file already known to dak" % base_filename)
982 except NoResultFound, e:
989 for f, entry in self.pkg.files.items():
990 # Ensure the file does not already exist in one of the accepted directories
991 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
992 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
993 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
994 self.rejects.append("%s file already exists in the %s directory." % (f, d))
996 if not re_taint_free.match(f):
997 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
999 # Check the file is readable
1000 if os.access(f, os.R_OK) == 0:
1001 # When running in -n, copy_to_holding() won't have
1002 # generated the reject_message, so we need to.
1004 if os.path.exists(f):
1005 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1007 # Don't directly reject, mark to check later to deal with orig's
1008 # we can find in the pool
1009 self.later_check_files.append(f)
1010 entry["type"] = "unreadable"
1013 # If it's byhand skip remaining checks
1014 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1016 entry["type"] = "byhand"
1018 # Checks for a binary package...
1019 elif re_isadeb.match(f):
1021 entry["type"] = "deb"
1023 # This routine appends to self.rejects/warnings as appropriate
1024 self.binary_file_checks(f, session)
1026 # Checks for a source package...
1027 elif re_issource.match(f):
1030 # This routine appends to self.rejects/warnings as appropriate
1031 self.source_file_checks(f, session)
1033 # Not a binary or source package? Assume byhand...
1036 entry["type"] = "byhand"
1038 # Per-suite file checks
1039 entry["oldfiles"] = {}
1040 for suite in self.pkg.changes["distribution"].keys():
1041 self.per_suite_file_checks(f, suite, session)
1045 # If the .changes file says it has source, it must have source.
1046 if self.pkg.changes["architecture"].has_key("source"):
1048 self.rejects.append("no source found and Architecture line in changes mention source.")
1050 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1051 self.rejects.append("source only uploads are not supported.")
1053 ###########################################################################
1054 def check_dsc(self, action=True, session=None):
1055 """Returns bool indicating whether or not the source changes are valid"""
1056 # Ensure there is source to check
1057 if not self.pkg.changes["architecture"].has_key("source"):
1062 for f, entry in self.pkg.files.items():
1063 if entry["type"] == "dsc":
1065 self.rejects.append("can not process a .changes file with multiple .dsc's.")
1070 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1071 if not dsc_filename:
1072 self.rejects.append("source uploads must contain a dsc file")
1075 # Parse the .dsc file
1077 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1078 except CantOpenError:
1079 # if not -n copy_to_holding() will have done this for us...
1081 self.rejects.append("%s: can't read file." % (dsc_filename))
1082 except ParseChangesError, line:
1083 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1084 except InvalidDscError, line:
1085 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1086 except ChangesUnicodeError:
1087 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1089 # Build up the file list of files mentioned by the .dsc
1091 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1092 except NoFilesFieldError:
1093 self.rejects.append("%s: no Files: field." % (dsc_filename))
1095 except UnknownFormatError, format:
1096 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1098 except ParseChangesError, line:
1099 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1102 # Enforce mandatory fields
1103 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1104 if not self.pkg.dsc.has_key(i):
1105 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1108 # Validate the source and version fields
1109 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1110 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1111 if not re_valid_version.match(self.pkg.dsc["version"]):
1112 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1114 # Only a limited list of source formats are allowed in each suite
1115 for dist in self.pkg.changes["distribution"].keys():
1116 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1117 if self.pkg.dsc["format"] not in allowed:
1118 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1120 # Validate the Maintainer field
1122 # We ignore the return value
1123 fix_maintainer(self.pkg.dsc["maintainer"])
1124 except ParseMaintError, msg:
1125 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1126 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1128 # Validate the build-depends field(s)
1129 for field_name in [ "build-depends", "build-depends-indep" ]:
1130 field = self.pkg.dsc.get(field_name)
1132 # Have apt try to parse them...
1134 apt_pkg.ParseSrcDepends(field)
1136 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1138 # Ensure the version number in the .dsc matches the version number in the .changes
1139 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1140 changes_version = self.pkg.files[dsc_filename]["version"]
1142 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1143 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1145 # Ensure the Files field contain only what's expected
1146 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1148 # Ensure source is newer than existing source in target suites
1149 session = DBConn().session()
1150 self.check_source_against_db(dsc_filename, session)
1151 self.check_dsc_against_db(dsc_filename, session)
1153 dbchg = get_dbchange(self.pkg.changes_file, session)
1155 # Finally, check if we're missing any files
1156 for f in self.later_check_files:
1158 # Check if we've already processed this file if we have a dbchg object
1161 for pf in dbchg.files:
1162 if pf.filename == f and pf.processed:
1163 self.notes.append('%s was already processed so we can go ahead' % f)
1165 del self.pkg.files[f]
1167 self.rejects.append("Could not find file %s references in changes" % f)
1173 ###########################################################################
1175 def get_changelog_versions(self, source_dir):
1176 """Extracts a the source package and (optionally) grabs the
1177 version history out of debian/changelog for the BTS."""
1181 # Find the .dsc (again)
1183 for f in self.pkg.files.keys():
1184 if self.pkg.files[f]["type"] == "dsc":
1187 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1188 if not dsc_filename:
1191 # Create a symlink mirror of the source files in our temporary directory
1192 for f in self.pkg.files.keys():
1193 m = re_issource.match(f)
1195 src = os.path.join(source_dir, f)
1196 # If a file is missing for whatever reason, give up.
1197 if not os.path.exists(src):
1200 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1201 self.pkg.orig_files[f].has_key("path"):
1203 dest = os.path.join(os.getcwd(), f)
1204 os.symlink(src, dest)
1206 # If the orig files are not a part of the upload, create symlinks to the
1208 for orig_file in self.pkg.orig_files.keys():
1209 if not self.pkg.orig_files[orig_file].has_key("path"):
1211 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1212 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1214 # Extract the source
1215 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1216 (result, output) = commands.getstatusoutput(cmd)
1218 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1219 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1222 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1225 # Get the upstream version
1226 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1227 if re_strip_revision.search(upstr_version):
1228 upstr_version = re_strip_revision.sub('', upstr_version)
1230 # Ensure the changelog file exists
1231 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1232 if not os.path.exists(changelog_filename):
1233 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1236 # Parse the changelog
1237 self.pkg.dsc["bts changelog"] = ""
1238 changelog_file = utils.open_file(changelog_filename)
1239 for line in changelog_file.readlines():
1240 m = re_changelog_versions.match(line)
1242 self.pkg.dsc["bts changelog"] += line
1243 changelog_file.close()
1245 # Check we found at least one revision in the changelog
1246 if not self.pkg.dsc["bts changelog"]:
1247 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1249 def check_source(self):
1251 # a) there's no source
1252 if not self.pkg.changes["architecture"].has_key("source"):
1255 tmpdir = utils.temp_dirname()
1257 # Move into the temporary directory
1261 # Get the changelog version history
1262 self.get_changelog_versions(cwd)
1264 # Move back and cleanup the temporary tree
1268 shutil.rmtree(tmpdir)
1270 if e.errno != errno.EACCES:
1272 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1274 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1275 # We probably have u-r or u-w directories so chmod everything
1277 cmd = "chmod -R u+rwx %s" % (tmpdir)
1278 result = os.system(cmd)
1280 utils.fubar("'%s' failed with result %s." % (cmd, result))
1281 shutil.rmtree(tmpdir)
1282 except Exception, e:
1283 print "foobar2 (%s)" % e
1284 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1286 ###########################################################################
1287 def ensure_hashes(self):
1288 # Make sure we recognise the format of the Files: field in the .changes
1289 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1290 if len(format) == 2:
1291 format = int(format[0]), int(format[1])
1293 format = int(float(format[0])), 0
1295 # We need to deal with the original changes blob, as the fields we need
1296 # might not be in the changes dict serialised into the .dak anymore.
1297 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1299 # Copy the checksums over to the current changes dict. This will keep
1300 # the existing modifications to it intact.
1301 for field in orig_changes:
1302 if field.startswith('checksums-'):
1303 self.pkg.changes[field] = orig_changes[field]
1305 # Check for unsupported hashes
1306 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1307 self.rejects.append(j)
1309 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1310 self.rejects.append(j)
1312 # We have to calculate the hash if we have an earlier changes version than
1313 # the hash appears in rather than require it exist in the changes file
1314 for hashname, hashfunc, version in utils.known_hashes:
1315 # TODO: Move _ensure_changes_hash into this class
1316 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1317 self.rejects.append(j)
1318 if "source" in self.pkg.changes["architecture"]:
1319 # TODO: Move _ensure_dsc_hash into this class
1320 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1321 self.rejects.append(j)
1323 def check_hashes(self):
1324 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1325 self.rejects.append(m)
1327 for m in utils.check_size(".changes", self.pkg.files):
1328 self.rejects.append(m)
1330 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1331 self.rejects.append(m)
1333 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1334 self.rejects.append(m)
1336 self.ensure_hashes()
1338 ###########################################################################
1340 def ensure_orig(self, target_dir='.', session=None):
1342 Ensures that all orig files mentioned in the changes file are present
1343 in target_dir. If they do not exist, they are symlinked into place.
1345 An list containing the symlinks that were created are returned (so they
1352 for filename, entry in self.pkg.dsc_files.iteritems():
1353 if not re_is_orig_source.match(filename):
1354 # File is not an orig; ignore
1357 if os.path.exists(filename):
1358 # File exists, no need to continue
1361 def symlink_if_valid(path):
1362 f = utils.open_file(path)
1363 md5sum = apt_pkg.md5sum(f)
1366 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1367 expected = (int(entry['size']), entry['md5sum'])
1369 if fingerprint != expected:
1372 dest = os.path.join(target_dir, filename)
1374 os.symlink(path, dest)
1375 symlinked.append(dest)
1381 session_ = DBConn().session()
1386 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1387 poolfile_path = os.path.join(
1388 poolfile.location.path, poolfile.filename
1391 if symlink_if_valid(poolfile_path):
1401 # Look in some other queues for the file
1402 queues = ('New', 'Byhand', 'ProposedUpdates',
1403 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1405 for queue in queues:
1406 if not cnf.get('Dir::Queue::%s' % queue):
1409 queuefile_path = os.path.join(
1410 cnf['Dir::Queue::%s' % queue], filename
1413 if not os.path.exists(queuefile_path):
1414 # Does not exist in this queue
1417 if symlink_if_valid(queuefile_path):
1422 ###########################################################################
1424 def check_lintian(self):
1426 Extends self.rejects by checking the output of lintian against tags
1427 specified in Dinstall::LintianTags.
1432 # Don't reject binary uploads
1433 if not self.pkg.changes['architecture'].has_key('source'):
1436 # Only check some distributions
1437 for dist in ('unstable', 'experimental'):
1438 if dist in self.pkg.changes['distribution']:
1443 # If we do not have a tagfile, don't do anything
1444 tagfile = cnf.get("Dinstall::LintianTags")
1448 # Parse the yaml file
1449 sourcefile = file(tagfile, 'r')
1450 sourcecontent = sourcefile.read()
1454 lintiantags = yaml.load(sourcecontent)['lintian']
1455 except yaml.YAMLError, msg:
1456 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1459 # Try and find all orig mentioned in the .dsc
1460 symlinked = self.ensure_orig()
1462 # Setup the input file for lintian
1463 fd, temp_filename = utils.temp_filename()
1464 temptagfile = os.fdopen(fd, 'w')
1465 for tags in lintiantags.values():
1466 temptagfile.writelines(['%s\n' % x for x in tags])
1470 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1471 (temp_filename, self.pkg.changes_file)
1473 result, output = commands.getstatusoutput(cmd)
1475 # Remove our tempfile and any symlinks we created
1476 os.unlink(temp_filename)
1478 for symlink in symlinked:
1482 utils.warn("lintian failed for %s [return code: %s]." % \
1483 (self.pkg.changes_file, result))
1484 utils.warn(utils.prefix_multi_line_string(output, \
1485 " [possible output:] "))
1490 [self.pkg.changes_file, "check_lintian"] + list(txt)
1494 parsed_tags = parse_lintian_output(output)
1495 self.rejects.extend(
1496 generate_reject_messages(parsed_tags, lintiantags, log=log)
1499 ###########################################################################
1500 def check_urgency(self):
1502 if self.pkg.changes["architecture"].has_key("source"):
1503 if not self.pkg.changes.has_key("urgency"):
1504 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1505 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1506 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1507 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1508 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1509 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1511 ###########################################################################
1513 # Sanity check the time stamps of files inside debs.
1514 # [Files in the near future cause ugly warnings and extreme time
1515 # travel can cause errors on extraction]
1517 def check_timestamps(self):
1520 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1521 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1522 tar = TarTime(future_cutoff, past_cutoff)
1524 for filename, entry in self.pkg.files.items():
1525 if entry["type"] == "deb":
1528 deb_file = utils.open_file(filename)
1529 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1532 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1533 except SystemError, e:
1534 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1535 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1538 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1542 future_files = tar.future_files.keys()
1544 num_future_files = len(future_files)
1545 future_file = future_files[0]
1546 future_date = tar.future_files[future_file]
1547 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1548 % (filename, num_future_files, future_file, time.ctime(future_date)))
1550 ancient_files = tar.ancient_files.keys()
1552 num_ancient_files = len(ancient_files)
1553 ancient_file = ancient_files[0]
1554 ancient_date = tar.ancient_files[ancient_file]
1555 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1556 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1558 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1560 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1561 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1563 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1569 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1570 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1571 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1572 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1573 self.pkg.changes["sponsoremail"] = uid_email
1578 ###########################################################################
1579 # check_signed_by_key checks
1580 ###########################################################################
1582 def check_signed_by_key(self):
1583 """Ensure the .changes is signed by an authorized uploader."""
1584 session = DBConn().session()
1586 # First of all we check that the person has proper upload permissions
1587 # and that this upload isn't blocked
1588 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1591 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1594 # TODO: Check that import-keyring adds UIDs properly
1596 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1599 # Check that the fingerprint which uploaded has permission to do so
1600 self.check_upload_permissions(fpr, session)
1602 # Check that this package is not in a transition
1603 self.check_transition(session)
1608 def check_upload_permissions(self, fpr, session):
1609 # Check any one-off upload blocks
1610 self.check_upload_blocks(fpr, session)
1612 # Start with DM as a special case
1613 # DM is a special case unfortunately, so we check it first
1614 # (keys with no source access get more access than DMs in one
1615 # way; DMs can only upload for their packages whether source
1616 # or binary, whereas keys with no access might be able to
1617 # upload some binaries)
1618 if fpr.source_acl.access_level == 'dm':
1619 self.check_dm_upload(fpr, session)
1621 # Check source-based permissions for other types
1622 if self.pkg.changes["architecture"].has_key("source") and \
1623 fpr.source_acl.access_level is None:
1624 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1625 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1626 self.rejects.append(rej)
1628 # If not a DM, we allow full upload rights
1629 uid_email = "%s@debian.org" % (fpr.uid.uid)
1630 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1633 # Check binary upload permissions
1634 # By this point we know that DMs can't have got here unless they
1635 # are allowed to deal with the package concerned so just apply
1637 if fpr.binary_acl.access_level == 'full':
1640 # Otherwise we're in the map case
1641 tmparches = self.pkg.changes["architecture"].copy()
1642 tmparches.pop('source', None)
1644 for bam in fpr.binary_acl_map:
1645 tmparches.pop(bam.architecture.arch_string, None)
1647 if len(tmparches.keys()) > 0:
1648 if fpr.binary_reject:
1649 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1650 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1651 self.rejects.append(rej)
1653 # TODO: This is where we'll implement reject vs throw away binaries later
1654 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1655 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1656 rej += "\nFingerprint: %s", (fpr.fingerprint)
1657 self.rejects.append(rej)
1660 def check_upload_blocks(self, fpr, session):
1661 """Check whether any upload blocks apply to this source, source
1662 version, uid / fpr combination"""
1664 def block_rej_template(fb):
1665 rej = 'Manual upload block in place for package %s' % fb.source
1666 if fb.version is not None:
1667 rej += ', version %s' % fb.version
1670 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1671 # version is None if the block applies to all versions
1672 if fb.version is None or fb.version == self.pkg.changes['version']:
1673 # Check both fpr and uid - either is enough to cause a reject
1674 if fb.fpr is not None:
1675 if fb.fpr.fingerprint == fpr.fingerprint:
1676 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1677 if fb.uid is not None:
1678 if fb.uid == fpr.uid:
1679 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1682 def check_dm_upload(self, fpr, session):
1683 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1684 ## none of the uploaded packages are NEW
1686 for f in self.pkg.files.keys():
1687 if self.pkg.files[f].has_key("byhand"):
1688 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1690 if self.pkg.files[f].has_key("new"):
1691 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1697 r = get_newest_source(self.pkg.changes["source"], session)
1700 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1701 self.rejects.append(rej)
1704 if not r.dm_upload_allowed:
1705 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1706 self.rejects.append(rej)
1709 ## the Maintainer: field of the uploaded .changes file corresponds with
1710 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1712 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1713 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1715 ## the most recent version of the package uploaded to unstable or
1716 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1717 ## non-developer maintainers cannot NMU or hijack packages)
1719 # srcuploaders includes the maintainer
1721 for sup in r.srcuploaders:
1722 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1723 # Eww - I hope we never have two people with the same name in Debian
1724 if email == fpr.uid.uid or name == fpr.uid.name:
1729 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1732 ## none of the packages are being taken over from other source packages
1733 for b in self.pkg.changes["binary"].keys():
1734 for suite in self.pkg.changes["distribution"].keys():
1735 q = session.query(DBSource)
1736 q = q.join(DBBinary).filter_by(package=b)
1737 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1740 if s.source != self.pkg.changes["source"]:
1741 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1745 def check_transition(self, session):
1748 sourcepkg = self.pkg.changes["source"]
1750 # No sourceful upload -> no need to do anything else, direct return
1751 # We also work with unstable uploads, not experimental or those going to some
1752 # proposed-updates queue
1753 if "source" not in self.pkg.changes["architecture"] or \
1754 "unstable" not in self.pkg.changes["distribution"]:
1757 # Also only check if there is a file defined (and existant) with
1759 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1760 if transpath == "" or not os.path.exists(transpath):
1763 # Parse the yaml file
1764 sourcefile = file(transpath, 'r')
1765 sourcecontent = sourcefile.read()
1767 transitions = yaml.load(sourcecontent)
1768 except yaml.YAMLError, msg:
1769 # This shouldn't happen, there is a wrapper to edit the file which
1770 # checks it, but we prefer to be safe than ending up rejecting
1772 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1775 # Now look through all defined transitions
1776 for trans in transitions:
1777 t = transitions[trans]
1778 source = t["source"]
1781 # Will be None if nothing is in testing.
1782 current = get_source_in_suite(source, "testing", session)
1783 if current is not None:
1784 compare = apt_pkg.VersionCompare(current.version, expected)
1786 if current is None or compare < 0:
1787 # This is still valid, the current version in testing is older than
1788 # the new version we wait for, or there is none in testing yet
1790 # Check if the source we look at is affected by this.
1791 if sourcepkg in t['packages']:
1792 # The source is affected, lets reject it.
1794 rejectmsg = "%s: part of the %s transition.\n\n" % (
1797 if current is not None:
1798 currentlymsg = "at version %s" % (current.version)
1800 currentlymsg = "not present in testing"
1802 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1804 rejectmsg += "\n".join(textwrap.wrap("""Your package
1805 is part of a testing transition designed to get %s migrated (it is
1806 currently %s, we need version %s). This transition is managed by the
1807 Release Team, and %s is the Release-Team member responsible for it.
1808 Please mail debian-release@lists.debian.org or contact %s directly if you
1809 need further assistance. You might want to upload to experimental until this
1810 transition is done."""
1811 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1813 self.rejects.append(rejectmsg)
1816 ###########################################################################
1817 # End check_signed_by_key checks
1818 ###########################################################################
1820 def build_summaries(self):
1821 """ Build a summary of changes the upload introduces. """
1823 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1825 short_summary = summary
1827 # This is for direport's benefit...
1828 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1831 summary += "Changes: " + f
1833 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1835 summary += self.announce(short_summary, 0)
1837 return (summary, short_summary)
1839 ###########################################################################
1841 def close_bugs(self, summary, action):
1843 Send mail to close bugs as instructed by the closes field in the changes file.
1844 Also add a line to summary if any work was done.
1846 @type summary: string
1847 @param summary: summary text, as given by L{build_summaries}
1850 @param action: Set to false no real action will be done.
1853 @return: summary. If action was taken, extended by the list of closed bugs.
1857 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1859 bugs = self.pkg.changes["closes"].keys()
1865 summary += "Closing bugs: "
1867 summary += "%s " % (bug)
1870 self.Subst["__BUG_NUMBER__"] = bug
1871 if self.pkg.changes["distribution"].has_key("stable"):
1872 self.Subst["__STABLE_WARNING__"] = """
1873 Note that this package is not part of the released stable Debian
1874 distribution. It may have dependencies on other unreleased software,
1875 or other instabilities. Please take care if you wish to install it.
1876 The update will eventually make its way into the next released Debian
1879 self.Subst["__STABLE_WARNING__"] = ""
1880 mail_message = utils.TemplateSubst(self.Subst, template)
1881 utils.send_mail(mail_message)
1883 # Clear up after ourselves
1884 del self.Subst["__BUG_NUMBER__"]
1885 del self.Subst["__STABLE_WARNING__"]
1887 if action and self.logger:
1888 self.logger.log(["closing bugs"] + bugs)
1894 ###########################################################################
1896 def announce(self, short_summary, action):
1898 Send an announce mail about a new upload.
1900 @type short_summary: string
1901 @param short_summary: Short summary text to include in the mail
1904 @param action: Set to false no real action will be done.
1907 @return: Textstring about action taken.
1912 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1914 # Only do announcements for source uploads with a recent dpkg-dev installed
1915 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1916 self.pkg.changes["architecture"].has_key("source"):
1922 self.Subst["__SHORT_SUMMARY__"] = short_summary
1924 for dist in self.pkg.changes["distribution"].keys():
1925 suite = get_suite(dist)
1926 if suite is None: continue
1927 announce_list = suite.announce
1928 if announce_list == "" or lists_done.has_key(announce_list):
1931 lists_done[announce_list] = 1
1932 summary += "Announcing to %s\n" % (announce_list)
1936 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1937 if cnf.get("Dinstall::TrackingServer") and \
1938 self.pkg.changes["architecture"].has_key("source"):
1939 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1940 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1942 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1943 utils.send_mail(mail_message)
1945 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1947 if cnf.FindB("Dinstall::CloseBugs"):
1948 summary = self.close_bugs(summary, action)
1950 del self.Subst["__SHORT_SUMMARY__"]
1954 ###########################################################################
1956 def accept (self, summary, short_summary, session=None):
1960 This moves all files referenced from the .changes into the pool,
1961 sends the accepted mail, announces to lists, closes bugs and
1962 also checks for override disparities. If enabled it will write out
1963 the version history for the BTS Version Tracking and will finally call
1966 @type summary: string
1967 @param summary: Summary text
1969 @type short_summary: string
1970 @param short_summary: Short summary
1974 stats = SummaryStats()
1977 self.logger.log(["installing changes", self.pkg.changes_file])
1981 # Add the .dsc file to the DB first
1982 for newfile, entry in self.pkg.files.items():
1983 if entry["type"] == "dsc":
1984 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1988 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1989 for newfile, entry in self.pkg.files.items():
1990 if entry["type"] == "deb":
1991 poolfiles.append(add_deb_to_db(self, newfile, session))
1993 # If this is a sourceful diff only upload that is moving
1994 # cross-component we need to copy the .orig files into the new
1995 # component too for the same reasons as above.
1996 # XXX: mhy: I think this should be in add_dsc_to_db
1997 if self.pkg.changes["architecture"].has_key("source"):
1998 for orig_file in self.pkg.orig_files.keys():
1999 if not self.pkg.orig_files[orig_file].has_key("id"):
2000 continue # Skip if it's not in the pool
2001 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2002 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2003 continue # Skip if the location didn't change
2006 oldf = get_poolfile_by_id(orig_file_id, session)
2007 old_filename = os.path.join(oldf.location.path, oldf.filename)
2008 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
2009 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2011 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2013 # TODO: Care about size/md5sum collisions etc
2014 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2016 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2018 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2019 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2023 # Don't reference the old file from this changes
2025 if p.file_id == oldf.file_id:
2028 poolfiles.append(newf)
2030 # Fix up the DSC references
2033 for df in source.srcfiles:
2034 if df.poolfile.file_id == oldf.file_id:
2035 # Add a new DSC entry and mark the old one for deletion
2036 # Don't do it in the loop so we don't change the thing we're iterating over
2038 newdscf.source_id = source.source_id
2039 newdscf.poolfile_id = newf.file_id
2040 session.add(newdscf)
2050 # Make sure that our source object is up-to-date
2051 session.expire(source)
2053 # Add changelog information to the database
2054 self.store_changelog()
2056 # Install the files into the pool
2057 for newfile, entry in self.pkg.files.items():
2058 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2059 utils.move(newfile, destination)
2060 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2061 stats.accept_bytes += float(entry["size"])
2063 # Copy the .changes file across for suite which need it.
2064 copy_changes = dict([(x.copychanges, '')
2065 for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2066 if x.copychanges is not None])
2068 for dest in copy_changes.keys():
2069 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2071 # We're done - commit the database changes
2073 # Our SQL session will automatically start a new transaction after
2076 # Move the .changes into the 'done' directory
2077 utils.move(self.pkg.changes_file,
2078 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2080 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2081 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2084 self.Subst["__SUMMARY__"] = summary
2085 mail_message = utils.TemplateSubst(self.Subst,
2086 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2087 utils.send_mail(mail_message)
2088 self.announce(short_summary, 1)
2090 ## Helper stuff for DebBugs Version Tracking
2091 if cnf.Find("Dir::Queue::BTSVersionTrack"):
2092 if self.pkg.changes["architecture"].has_key("source"):
2093 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2094 version_history = os.fdopen(fd, 'w')
2095 version_history.write(self.pkg.dsc["bts changelog"])
2096 version_history.close()
2097 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2098 self.pkg.changes_file[:-8]+".versions")
2099 os.rename(temp_filename, filename)
2100 os.chmod(filename, 0644)
2102 # Write out the binary -> source mapping.
2103 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2104 debinfo = os.fdopen(fd, 'w')
2105 for name, entry in sorted(self.pkg.files.items()):
2106 if entry["type"] == "deb":
2107 line = " ".join([entry["package"], entry["version"],
2108 entry["architecture"], entry["source package"],
2109 entry["source version"]])
2110 debinfo.write(line+"\n")
2112 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2113 self.pkg.changes_file[:-8]+".debinfo")
2114 os.rename(temp_filename, filename)
2115 os.chmod(filename, 0644)
2119 # Set up our copy queues (e.g. buildd queues)
2120 for suite_name in self.pkg.changes["distribution"].keys():
2121 suite = get_suite(suite_name, session)
2122 for q in suite.copy_queues:
2124 q.add_file_from_pool(f)
2129 stats.accept_count += 1
2131 def check_override(self):
2133 Checks override entries for validity. Mails "Override disparity" warnings,
2134 if that feature is enabled.
2136 Abandons the check if
2137 - override disparity checks are disabled
2138 - mail sending is disabled
2143 # Abandon the check if override disparity checks have been disabled
2144 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2147 summary = self.pkg.check_override()
2152 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2155 self.Subst["__SUMMARY__"] = summary
2156 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2157 utils.send_mail(mail_message)
2158 del self.Subst["__SUMMARY__"]
2160 ###########################################################################
2162 def remove(self, from_dir=None):
2164 Used (for instance) in p-u to remove the package from unchecked
2166 Also removes the package from holding area.
2168 if from_dir is None:
2169 from_dir = self.pkg.directory
2172 for f in self.pkg.files.keys():
2173 os.unlink(os.path.join(from_dir, f))
2174 if os.path.exists(os.path.join(h.holding_dir, f)):
2175 os.unlink(os.path.join(h.holding_dir, f))
2177 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2178 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2179 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2181 ###########################################################################
2183 def move_to_queue (self, queue):
2185 Move files to a destination queue using the permissions in the table
2188 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2189 queue.path, perms=int(queue.change_perms, 8))
2190 for f in self.pkg.files.keys():
2191 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2193 ###########################################################################
2195 def force_reject(self, reject_files):
2197 Forcefully move files from the current directory to the
2198 reject directory. If any file already exists in the reject
2199 directory it will be moved to the morgue to make way for
2202 @type reject_files: dict
2203 @param reject_files: file dictionary
2209 for file_entry in reject_files:
2210 # Skip any files which don't exist or which we don't have permission to copy.
2211 if os.access(file_entry, os.R_OK) == 0:
2214 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2217 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2219 # File exists? Let's find a new name by adding a number
2220 if e.errno == errno.EEXIST:
2222 dest_file = utils.find_next_free(dest_file, 255)
2223 except NoFreeFilenameError:
2224 # Something's either gone badly Pete Tong, or
2225 # someone is trying to exploit us.
2226 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2229 # Make sure we really got it
2231 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2234 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2238 # If we got here, we own the destination file, so we can
2239 # safely overwrite it.
2240 utils.move(file_entry, dest_file, 1, perms=0660)
2243 ###########################################################################
2244 def do_reject (self, manual=0, reject_message="", notes=""):
2246 Reject an upload. If called without a reject message or C{manual} is
2247 true, spawn an editor so the user can write one.
2250 @param manual: manual or automated rejection
2252 @type reject_message: string
2253 @param reject_message: A reject message
2258 # If we weren't given a manual rejection message, spawn an
2259 # editor so the user can add one in...
2260 if manual and not reject_message:
2261 (fd, temp_filename) = utils.temp_filename()
2262 temp_file = os.fdopen(fd, 'w')
2265 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2266 % (note.author, note.version, note.notedate, note.comment))
2268 editor = os.environ.get("EDITOR","vi")
2270 while answer == 'E':
2271 os.system("%s %s" % (editor, temp_filename))
2272 temp_fh = utils.open_file(temp_filename)
2273 reject_message = "".join(temp_fh.readlines())
2275 print "Reject message:"
2276 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2277 prompt = "[R]eject, Edit, Abandon, Quit ?"
2279 while prompt.find(answer) == -1:
2280 answer = utils.our_raw_input(prompt)
2281 m = re_default_answer.search(prompt)
2284 answer = answer[:1].upper()
2285 os.unlink(temp_filename)
2291 print "Rejecting.\n"
2295 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2296 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2298 # Move all the files into the reject directory
2299 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2300 self.force_reject(reject_files)
2302 # If we fail here someone is probably trying to exploit the race
2303 # so let's just raise an exception ...
2304 if os.path.exists(reason_filename):
2305 os.unlink(reason_filename)
2306 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2308 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2312 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2313 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2314 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2315 os.write(reason_fd, reject_message)
2316 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2318 # Build up the rejection email
2319 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2320 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2321 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2322 self.Subst["__REJECT_MESSAGE__"] = ""
2323 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2324 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2325 # Write the rejection email out as the <foo>.reason file
2326 os.write(reason_fd, reject_mail_message)
2328 del self.Subst["__REJECTOR_ADDRESS__"]
2329 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2330 del self.Subst["__CC__"]
2334 # Send the rejection mail
2335 utils.send_mail(reject_mail_message)
2338 self.logger.log(["rejected", self.pkg.changes_file])
2342 ################################################################################
2343 def in_override_p(self, package, component, suite, binary_type, filename, session):
2345 Check if a package already has override entries in the DB
2347 @type package: string
2348 @param package: package name
2350 @type component: string
2351 @param component: database id of the component
2354 @param suite: database id of the suite
2356 @type binary_type: string
2357 @param binary_type: type of the package
2359 @type filename: string
2360 @param filename: filename we check
2362 @return: the database result. But noone cares anyway.
2368 if binary_type == "": # must be source
2371 file_type = binary_type
2373 # Override suite name; used for example with proposed-updates
2374 oldsuite = get_suite(suite, session)
2375 if (not oldsuite is None) and oldsuite.overridesuite:
2376 suite = oldsuite.overridesuite
2378 result = get_override(package, suite, component, file_type, session)
2380 # If checking for a source package fall back on the binary override type
2381 if file_type == "dsc" and len(result) < 1:
2382 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2384 # Remember the section and priority so we can check them later if appropriate
2387 self.pkg.files[filename]["override section"] = result.section.section
2388 self.pkg.files[filename]["override priority"] = result.priority.priority
2393 ################################################################################
2394 def get_anyversion(self, sv_list, suite):
2397 @param sv_list: list of (suite, version) tuples to check
2400 @param suite: suite name
2406 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2407 for (s, v) in sv_list:
2408 if s in [ x.lower() for x in anysuite ]:
2409 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2414 ################################################################################
2416 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2419 @param sv_list: list of (suite, version) tuples to check
2421 @type filename: string
2422 @param filename: XXX
2424 @type new_version: string
2425 @param new_version: XXX
2427 Ensure versions are newer than existing packages in target
2428 suites and that cross-suite version checking rules as
2429 set out in the conf file are satisfied.
2434 # Check versions for each target suite
2435 for target_suite in self.pkg.changes["distribution"].keys():
2436 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2437 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2439 # Enforce "must be newer than target suite" even if conffile omits it
2440 if target_suite not in must_be_newer_than:
2441 must_be_newer_than.append(target_suite)
2443 for (suite, existent_version) in sv_list:
2444 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2446 if suite in must_be_newer_than and sourceful and vercmp < 1:
2447 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2449 if suite in must_be_older_than and vercmp > -1:
2452 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2453 # we really use the other suite, ignoring the conflicting one ...
2454 addsuite = self.pkg.changes["distribution-version"][suite]
2456 add_version = self.get_anyversion(sv_list, addsuite)
2457 target_version = self.get_anyversion(sv_list, target_suite)
2460 # not add_version can only happen if we map to a suite
2461 # that doesn't enhance the suite we're propup'ing from.
2462 # so "propup-ver x a b c; map a d" is a problem only if
2463 # d doesn't enhance a.
2465 # i think we could always propagate in this case, rather
2466 # than complaining. either way, this isn't a REJECT issue
2468 # And - we really should complain to the dorks who configured dak
2469 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2470 self.pkg.changes.setdefault("propdistribution", {})
2471 self.pkg.changes["propdistribution"][addsuite] = 1
2473 elif not target_version:
2474 # not targets_version is true when the package is NEW
2475 # we could just stick with the "...old version..." REJECT
2476 # for this, I think.
2477 self.rejects.append("Won't propogate NEW packages.")
2478 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2479 # propogation would be redundant. no need to reject though.
2480 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2482 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2483 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2485 self.warnings.append("Propogating upload to %s" % (addsuite))
2486 self.pkg.changes.setdefault("propdistribution", {})
2487 self.pkg.changes["propdistribution"][addsuite] = 1
2491 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2493 ################################################################################
2494 def check_binary_against_db(self, filename, session):
2495 # Ensure version is sane
2496 q = session.query(BinAssociation)
2497 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2498 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2500 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2501 filename, self.pkg.files[filename]["version"], sourceful=False)
2503 # Check for any existing copies of the file
2504 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2505 q = q.filter_by(version=self.pkg.files[filename]["version"])
2506 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2509 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2511 ################################################################################
2513 def check_source_against_db(self, filename, session):
2514 source = self.pkg.dsc.get("source")
2515 version = self.pkg.dsc.get("version")
2517 # Ensure version is sane
2518 q = session.query(SrcAssociation)
2519 q = q.join(DBSource).filter(DBSource.source==source)
2521 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2522 filename, version, sourceful=True)
2524 ################################################################################
2525 def check_dsc_against_db(self, filename, session):
2528 @warning: NB: this function can remove entries from the 'files' index [if
2529 the orig tarball is a duplicate of the one in the archive]; if
2530 you're iterating over 'files' and call this function as part of
2531 the loop, be sure to add a check to the top of the loop to
2532 ensure you haven't just tried to dereference the deleted entry.
2537 self.pkg.orig_files = {} # XXX: do we need to clear it?
2538 orig_files = self.pkg.orig_files
2540 # Try and find all files mentioned in the .dsc. This has
2541 # to work harder to cope with the multiple possible
2542 # locations of an .orig.tar.gz.
2543 # The ordering on the select is needed to pick the newest orig
2544 # when it exists in multiple places.
2545 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2547 if self.pkg.files.has_key(dsc_name):
2548 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2549 actual_size = int(self.pkg.files[dsc_name]["size"])
2550 found = "%s in incoming" % (dsc_name)
2552 # Check the file does not already exist in the archive
2553 ql = get_poolfile_like_name(dsc_name, session)
2555 # Strip out anything that isn't '%s' or '/%s$'
2557 if not i.filename.endswith(dsc_name):
2560 # "[dak] has not broken them. [dak] has fixed a
2561 # brokenness. Your crappy hack exploited a bug in
2564 # "(Come on! I thought it was always obvious that
2565 # one just doesn't release different files with
2566 # the same name and version.)"
2567 # -- ajk@ on d-devel@l.d.o
2570 # Ignore exact matches for .orig.tar.gz
2572 if re_is_orig_source.match(dsc_name):
2574 if self.pkg.files.has_key(dsc_name) and \
2575 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2576 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2577 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2578 # TODO: Don't delete the entry, just mark it as not needed
2579 # This would fix the stupidity of changing something we often iterate over
2580 # whilst we're doing it
2581 del self.pkg.files[dsc_name]
2582 dsc_entry["files id"] = i.file_id
2583 if not orig_files.has_key(dsc_name):
2584 orig_files[dsc_name] = {}
2585 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2588 # Don't bitch that we couldn't find this file later
2590 self.later_check_files.remove(dsc_name)
2596 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2598 elif re_is_orig_source.match(dsc_name):
2600 ql = get_poolfile_like_name(dsc_name, session)
2602 # Strip out anything that isn't '%s' or '/%s$'
2603 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2605 if not i.filename.endswith(dsc_name):
2609 # Unfortunately, we may get more than one match here if,
2610 # for example, the package was in potato but had an -sa
2611 # upload in woody. So we need to choose the right one.
2613 # default to something sane in case we don't match any or have only one
2618 old_file = os.path.join(i.location.path, i.filename)
2619 old_file_fh = utils.open_file(old_file)
2620 actual_md5 = apt_pkg.md5sum(old_file_fh)
2622 actual_size = os.stat(old_file)[stat.ST_SIZE]
2623 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2626 old_file = os.path.join(i.location.path, i.filename)
2627 old_file_fh = utils.open_file(old_file)
2628 actual_md5 = apt_pkg.md5sum(old_file_fh)
2630 actual_size = os.stat(old_file)[stat.ST_SIZE]
2632 suite_type = x.location.archive_type
2633 # need this for updating dsc_files in install()
2634 dsc_entry["files id"] = x.file_id
2635 # See install() in process-accepted...
2636 if not orig_files.has_key(dsc_name):
2637 orig_files[dsc_name] = {}
2638 orig_files[dsc_name]["id"] = x.file_id
2639 orig_files[dsc_name]["path"] = old_file
2640 orig_files[dsc_name]["location"] = x.location.location_id
2642 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2643 # Not there? Check the queue directories...
2644 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2645 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2647 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2648 if os.path.exists(in_otherdir):
2649 in_otherdir_fh = utils.open_file(in_otherdir)
2650 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2651 in_otherdir_fh.close()
2652 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2654 if not orig_files.has_key(dsc_name):
2655 orig_files[dsc_name] = {}
2656 orig_files[dsc_name]["path"] = in_otherdir
2659 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2662 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2664 if actual_md5 != dsc_entry["md5sum"]:
2665 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2666 if actual_size != int(dsc_entry["size"]):
2667 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2669 ################################################################################
2670 # This is used by process-new and process-holding to recheck a changes file
2671 # at the time we're running. It mainly wraps various other internal functions
2672 # and is similar to accepted_checks - these should probably be tidied up
2674 def recheck(self, session):
2676 for f in self.pkg.files.keys():
2677 # The .orig.tar.gz can disappear out from under us is it's a
2678 # duplicate of one in the archive.
2679 if not self.pkg.files.has_key(f):
2682 entry = self.pkg.files[f]
2684 # Check that the source still exists
2685 if entry["type"] == "deb":
2686 source_version = entry["source version"]
2687 source_package = entry["source package"]
2688 if not self.pkg.changes["architecture"].has_key("source") \
2689 and not source_exists(source_package, source_version, \
2690 suites = self.pkg.changes["distribution"].keys(), session = session):
2691 source_epochless_version = re_no_epoch.sub('', source_version)
2692 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2694 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2695 if cnf.has_key("Dir::Queue::%s" % (q)):
2696 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2699 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2701 # Version and file overwrite checks
2702 if entry["type"] == "deb":
2703 self.check_binary_against_db(f, session)
2704 elif entry["type"] == "dsc":
2705 self.check_source_against_db(f, session)
2706 self.check_dsc_against_db(f, session)
2708 ################################################################################
2709 def accepted_checks(self, overwrite_checks, session):
2710 # Recheck anything that relies on the database; since that's not
2711 # frozen between accept and our run time when called from p-a.
2713 # overwrite_checks is set to False when installing to stable/oldstable
2718 # Find the .dsc (again)
2720 for f in self.pkg.files.keys():
2721 if self.pkg.files[f]["type"] == "dsc":
2724 for checkfile in self.pkg.files.keys():
2725 # The .orig.tar.gz can disappear out from under us is it's a
2726 # duplicate of one in the archive.
2727 if not self.pkg.files.has_key(checkfile):
2730 entry = self.pkg.files[checkfile]
2732 # Check that the source still exists
2733 if entry["type"] == "deb":
2734 source_version = entry["source version"]
2735 source_package = entry["source package"]
2736 if not self.pkg.changes["architecture"].has_key("source") \
2737 and not source_exists(source_package, source_version, \
2738 suites = self.pkg.changes["distribution"].keys(), \
2740 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2742 # Version and file overwrite checks
2743 if overwrite_checks:
2744 if entry["type"] == "deb":
2745 self.check_binary_against_db(checkfile, session)
2746 elif entry["type"] == "dsc":
2747 self.check_source_against_db(checkfile, session)
2748 self.check_dsc_against_db(dsc_filename, session)
2750 # propogate in the case it is in the override tables:
2751 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2752 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2753 propogate[suite] = 1
2755 nopropogate[suite] = 1
2757 for suite in propogate.keys():
2758 if suite in nopropogate:
2760 self.pkg.changes["distribution"][suite] = 1
2762 for checkfile in self.pkg.files.keys():
2763 # Check the package is still in the override tables
2764 for suite in self.pkg.changes["distribution"].keys():
2765 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2766 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2768 ################################################################################
2769 # If any file of an upload has a recent mtime then chances are good
2770 # the file is still being uploaded.
2772 def upload_too_new(self):
2775 # Move back to the original directory to get accurate time stamps
2777 os.chdir(self.pkg.directory)
2778 file_list = self.pkg.files.keys()
2779 file_list.extend(self.pkg.dsc_files.keys())
2780 file_list.append(self.pkg.changes_file)
2783 last_modified = time.time()-os.path.getmtime(f)
2784 if last_modified < int(cnf["Dinstall::SkipTime"]):
2793 def store_changelog(self):
2795 # Skip binary-only upload if it is not a bin-NMU
2796 if not self.pkg.changes['architecture'].has_key('source'):
2797 from daklib.regexes import re_bin_only_nmu
2798 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2801 session = DBConn().session()
2803 # Check if upload already has a changelog entry
2804 query = """SELECT changelog_id FROM changes WHERE source = :source
2805 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2806 if session.execute(query, {'source': self.pkg.changes['source'], \
2807 'version': self.pkg.changes['version'], \
2808 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2812 # Add current changelog text into changelogs_text table, return created ID
2813 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2814 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2816 # Link ID to the upload available in changes table
2817 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2818 AND version = :version AND architecture = :architecture"""
2819 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2820 'version': self.pkg.changes['version'], \
2821 'architecture': " ".join(self.pkg.changes['architecture'].keys())})