5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
59 ###############################################################################
61 def get_type(f, session):
63 Get the file type of C{f}
66 @param f: file entry from Changes object
68 @type session: SQLA Session
69 @param session: SQL Alchemy session object
76 if f.has_key("dbtype"):
77 file_type = f["dbtype"]
78 elif re_source_ext.match(f["type"]):
82 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
84 # Validate the override type
85 type_id = get_override_type(file_type, session)
87 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
91 ################################################################################
93 # Determine what parts in a .changes are NEW
95 def determine_new(filename, changes, files, warn=1, session = None):
97 Determine what parts in a C{changes} file are NEW.
100 @param filename: changes filename
102 @type changes: Upload.Pkg.changes dict
103 @param changes: Changes dictionary
105 @type files: Upload.Pkg.files dict
106 @param files: Files dictionary
109 @param warn: Warn if overrides are added for (old)stable
112 @return: dictionary of NEW components.
115 # TODO: This should all use the database instead of parsing the changes
120 dbchg = get_dbchange(filename, session)
122 print "Warning: cannot find changes file in database; won't check byhand"
124 # Build up a list of potentially new things
125 for name, f in files.items():
126 # Keep a record of byhand elements
127 if f["section"] == "byhand":
132 priority = f["priority"]
133 section = f["section"]
134 file_type = get_type(f, session)
135 component = f["component"]
137 if file_type == "dsc":
140 if not new.has_key(pkg):
142 new[pkg]["priority"] = priority
143 new[pkg]["section"] = section
144 new[pkg]["type"] = file_type
145 new[pkg]["component"] = component
146 new[pkg]["files"] = []
148 old_type = new[pkg]["type"]
149 if old_type != file_type:
150 # source gets trumped by deb or udeb
151 if old_type == "dsc":
152 new[pkg]["priority"] = priority
153 new[pkg]["section"] = section
154 new[pkg]["type"] = file_type
155 new[pkg]["component"] = component
157 new[pkg]["files"].append(name)
159 if f.has_key("othercomponents"):
160 new[pkg]["othercomponents"] = f["othercomponents"]
162 # Fix up the list of target suites
164 for suite in changes["suite"].keys():
165 oldsuite = get_suite(suite, session)
167 print "WARNING: Invalid suite %s found" % suite
170 if oldsuite.overridesuite:
171 newsuite = get_suite(oldsuite.overridesuite, session)
174 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
175 oldsuite.overridesuite, suite)
176 del changes["suite"][suite]
177 changes["suite"][oldsuite.overridesuite] = 1
179 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist. Bugger" % (
180 oldsuite.overridesuite, suite)
182 # Check for unprocessed byhand files
183 if dbchg is not None:
184 for b in byhand.keys():
185 # Find the file entry in the database
187 for f in dbchg.files:
190 # If it's processed, we can ignore it
196 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
198 # Check for new stuff
199 for suite in changes["suite"].keys():
200 for pkg in new.keys():
201 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
203 for file_entry in new[pkg]["files"]:
204 if files[file_entry].has_key("new"):
205 del files[file_entry]["new"]
209 for s in ['stable', 'oldstable']:
210 if changes["suite"].has_key(s):
211 print "WARNING: overrides will be added for %s!" % s
212 for pkg in new.keys():
213 if new[pkg].has_key("othercomponents"):
214 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
218 ################################################################################
220 def check_valid(new, session = None):
222 Check if section and priority for NEW packages exist in database.
223 Additionally does sanity checks:
224 - debian-installer packages have to be udeb (or source)
225 - non debian-installer packages can not be udeb
226 - source priority can only be assigned to dsc file types
229 @param new: Dict of new packages with their section, priority and type.
232 for pkg in new.keys():
233 section_name = new[pkg]["section"]
234 priority_name = new[pkg]["priority"]
235 file_type = new[pkg]["type"]
237 section = get_section(section_name, session)
239 new[pkg]["section id"] = -1
241 new[pkg]["section id"] = section.section_id
243 priority = get_priority(priority_name, session)
245 new[pkg]["priority id"] = -1
247 new[pkg]["priority id"] = priority.priority_id
250 di = section_name.find("debian-installer") != -1
252 # If d-i, we must be udeb and vice-versa
253 if (di and file_type not in ("udeb", "dsc")) or \
254 (not di and file_type == "udeb"):
255 new[pkg]["section id"] = -1
257 # If dsc we need to be source and vice-versa
258 if (priority == "source" and file_type != "dsc") or \
259 (priority != "source" and file_type == "dsc"):
260 new[pkg]["priority id"] = -1
262 ###############################################################################
264 # Used by Upload.check_timestamps
265 class TarTime(object):
266 def __init__(self, future_cutoff, past_cutoff):
268 self.future_cutoff = future_cutoff
269 self.past_cutoff = past_cutoff
272 self.future_files = {}
273 self.ancient_files = {}
275 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
276 if MTime > self.future_cutoff:
277 self.future_files[Name] = MTime
278 if MTime < self.past_cutoff:
279 self.ancient_files[Name] = MTime
281 ###############################################################################
283 def prod_maintainer(notes, upload):
286 # Here we prepare an editor and get them ready to prod...
287 (fd, temp_filename) = utils.temp_filename()
288 temp_file = os.fdopen(fd, 'w')
290 temp_file.write(note.comment)
292 editor = os.environ.get("EDITOR","vi")
295 os.system("%s %s" % (editor, temp_filename))
296 temp_fh = utils.open_file(temp_filename)
297 prod_message = "".join(temp_fh.readlines())
299 print "Prod message:"
300 print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
301 prompt = "[P]rod, Edit, Abandon, Quit ?"
303 while prompt.find(answer) == -1:
304 answer = utils.our_raw_input(prompt)
305 m = re_default_answer.search(prompt)
308 answer = answer[:1].upper()
309 os.unlink(temp_filename)
315 # Otherwise, do the proding...
316 user_email_address = utils.whoami() + " <%s>" % (
317 cnf["Dinstall::MyAdminAddress"])
321 Subst["__FROM_ADDRESS__"] = user_email_address
322 Subst["__PROD_MESSAGE__"] = prod_message
323 Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
325 prod_mail_message = utils.TemplateSubst(
326 Subst,cnf["Dir::Templates"]+"/process-new.prod")
329 utils.send_mail(prod_mail_message)
331 print "Sent prodding message"
333 ################################################################################
335 def edit_note(note, upload, session, trainee=False):
336 # Write the current data to a temporary file
337 (fd, temp_filename) = utils.temp_filename()
338 editor = os.environ.get("EDITOR","vi")
341 os.system("%s %s" % (editor, temp_filename))
342 temp_file = utils.open_file(temp_filename)
343 newnote = temp_file.read().rstrip()
346 print utils.prefix_multi_line_string(newnote," ")
347 prompt = "[D]one, Edit, Abandon, Quit ?"
349 while prompt.find(answer) == -1:
350 answer = utils.our_raw_input(prompt)
351 m = re_default_answer.search(prompt)
354 answer = answer[:1].upper()
355 os.unlink(temp_filename)
362 comment = NewComment()
363 comment.package = upload.pkg.changes["source"]
364 comment.version = upload.pkg.changes["version"]
365 comment.comment = newnote
366 comment.author = utils.whoami()
367 comment.trainee = trainee
371 ###############################################################################
373 # suite names DMs can upload to
374 dm_suites = ['unstable', 'experimental']
376 def get_newest_source(source, session):
377 'returns the newest DBSource object in dm_suites'
378 ## the most recent version of the package uploaded to unstable or
379 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
380 ## section of its control file
381 q = session.query(DBSource).filter_by(source = source). \
382 filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
383 order_by(desc('source.version'))
386 def get_suite_version(source, session):
387 'returns a list of tuples (suite_name, version) for source package'
388 q = session.query(Suite.suite_name, DBSource.version). \
389 join(Suite.sources).filter_by(source = source)
392 class Upload(object):
394 Everything that has to do with an upload processed.
402 ###########################################################################
405 """ Reset a number of internal variables."""
407 # Initialize the substitution template map
410 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
411 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
412 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
413 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
419 self.later_check_files = []
423 def package_info(self):
425 Format various messages from this Upload to send to the maintainer.
429 ('Reject Reasons', self.rejects),
430 ('Warnings', self.warnings),
431 ('Notes', self.notes),
435 for title, messages in msgs:
437 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
442 ###########################################################################
443 def update_subst(self):
444 """ Set up the per-package template substitution mappings """
448 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
449 if not self.pkg.changes.has_key("architecture") or not \
450 isinstance(self.pkg.changes["architecture"], dict):
451 self.pkg.changes["architecture"] = { "Unknown" : "" }
453 # and maintainer2047 may not exist.
454 if not self.pkg.changes.has_key("maintainer2047"):
455 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
457 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
458 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
459 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
461 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
462 if self.pkg.changes["architecture"].has_key("source") and \
463 self.pkg.changes["changedby822"] != "" and \
464 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
466 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
467 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
468 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
470 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
471 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
472 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
474 # Process policy doesn't set the fingerprint field and I don't want to make it
475 # do it for now as I don't want to have to deal with the case where we accepted
476 # the package into PU-NEW, but the fingerprint has gone away from the keyring in
477 # the meantime so the package will be remarked as rejectable. Urgh.
478 # TODO: Fix this properly
479 if self.pkg.changes.has_key('fingerprint'):
480 session = DBConn().session()
481 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
482 if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
483 if self.pkg.changes.has_key("sponsoremail"):
484 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
487 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
488 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
490 # Apply any global override of the Maintainer field
491 if cnf.get("Dinstall::OverrideMaintainer"):
492 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
493 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
495 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
496 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
497 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
498 self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
500 ###########################################################################
501 def load_changes(self, filename):
503 Load a changes file and setup a dictionary around it. Also checks for mandantory
506 @type filename: string
507 @param filename: Changes filename, full path.
510 @return: whether the changes file was valid or not. We may want to
511 reject even if this is True (see what gets put in self.rejects).
512 This is simply to prevent us even trying things later which will
513 fail because we couldn't properly parse the file.
516 self.pkg.changes_file = filename
518 # Parse the .changes field into a dictionary
520 self.pkg.changes.update(parse_changes(filename))
521 except CantOpenError:
522 self.rejects.append("%s: can't read file." % (filename))
524 except ParseChangesError, line:
525 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
527 except ChangesUnicodeError:
528 self.rejects.append("%s: changes file not proper utf-8" % (filename))
531 # Parse the Files field from the .changes into another dictionary
533 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
534 except ParseChangesError, line:
535 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
537 except UnknownFormatError, format:
538 self.rejects.append("%s: unknown format '%s'." % (filename, format))
541 # Check for mandatory fields
542 for i in ("distribution", "source", "binary", "architecture",
543 "version", "maintainer", "files", "changes", "description"):
544 if not self.pkg.changes.has_key(i):
545 # Avoid undefined errors later
546 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
549 # Strip a source version in brackets from the source field
550 if re_strip_srcver.search(self.pkg.changes["source"]):
551 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
553 # Ensure the source field is a valid package name.
554 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
555 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
557 # Split multi-value fields into a lower-level dictionary
558 for i in ("architecture", "distribution", "binary", "closes"):
559 o = self.pkg.changes.get(i, "")
561 del self.pkg.changes[i]
563 self.pkg.changes[i] = {}
566 self.pkg.changes[i][j] = 1
568 # Fix the Maintainer: field to be RFC822/2047 compatible
570 (self.pkg.changes["maintainer822"],
571 self.pkg.changes["maintainer2047"],
572 self.pkg.changes["maintainername"],
573 self.pkg.changes["maintaineremail"]) = \
574 fix_maintainer (self.pkg.changes["maintainer"])
575 except ParseMaintError, msg:
576 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
577 % (filename, self.pkg.changes["maintainer"], msg))
579 # ...likewise for the Changed-By: field if it exists.
581 (self.pkg.changes["changedby822"],
582 self.pkg.changes["changedby2047"],
583 self.pkg.changes["changedbyname"],
584 self.pkg.changes["changedbyemail"]) = \
585 fix_maintainer (self.pkg.changes.get("changed-by", ""))
586 except ParseMaintError, msg:
587 self.pkg.changes["changedby822"] = ""
588 self.pkg.changes["changedby2047"] = ""
589 self.pkg.changes["changedbyname"] = ""
590 self.pkg.changes["changedbyemail"] = ""
592 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
593 % (filename, self.pkg.changes["changed-by"], msg))
595 # Ensure all the values in Closes: are numbers
596 if self.pkg.changes.has_key("closes"):
597 for i in self.pkg.changes["closes"].keys():
598 if re_isanum.match (i) == None:
599 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
601 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
602 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
603 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
605 # Check the .changes is non-empty
606 if not self.pkg.files:
607 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
610 # Changes was syntactically valid even if we'll reject
613 ###########################################################################
615 def check_distributions(self):
616 "Check and map the Distribution field"
620 # Handle suite mappings
621 for m in Cnf.ValueList("SuiteMappings"):
624 if mtype == "map" or mtype == "silent-map":
625 (source, dest) = args[1:3]
626 if self.pkg.changes["distribution"].has_key(source):
627 del self.pkg.changes["distribution"][source]
628 self.pkg.changes["distribution"][dest] = 1
629 if mtype != "silent-map":
630 self.notes.append("Mapping %s to %s." % (source, dest))
631 if self.pkg.changes.has_key("distribution-version"):
632 if self.pkg.changes["distribution-version"].has_key(source):
633 self.pkg.changes["distribution-version"][source]=dest
634 elif mtype == "map-unreleased":
635 (source, dest) = args[1:3]
636 if self.pkg.changes["distribution"].has_key(source):
637 for arch in self.pkg.changes["architecture"].keys():
638 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
639 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
640 del self.pkg.changes["distribution"][source]
641 self.pkg.changes["distribution"][dest] = 1
643 elif mtype == "ignore":
645 if self.pkg.changes["distribution"].has_key(suite):
646 del self.pkg.changes["distribution"][suite]
647 self.warnings.append("Ignoring %s as a target suite." % (suite))
648 elif mtype == "reject":
650 if self.pkg.changes["distribution"].has_key(suite):
651 self.rejects.append("Uploads to %s are not accepted." % (suite))
652 elif mtype == "propup-version":
653 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
655 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
656 if self.pkg.changes["distribution"].has_key(args[1]):
657 self.pkg.changes.setdefault("distribution-version", {})
658 for suite in args[2:]:
659 self.pkg.changes["distribution-version"][suite] = suite
661 # Ensure there is (still) a target distribution
662 if len(self.pkg.changes["distribution"].keys()) < 1:
663 self.rejects.append("No valid distribution remaining.")
665 # Ensure target distributions exist
666 for suite in self.pkg.changes["distribution"].keys():
667 if not Cnf.has_key("Suite::%s" % (suite)):
668 self.rejects.append("Unknown distribution `%s'." % (suite))
670 ###########################################################################
672 def binary_file_checks(self, f, session):
674 entry = self.pkg.files[f]
676 # Extract package control information
677 deb_file = utils.open_file(f)
679 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
681 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
683 # Can't continue, none of the checks on control would work.
686 # Check for mandantory "Description:"
689 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
691 self.rejects.append("%s: Missing Description in binary package" % (f))
696 # Check for mandatory fields
697 for field in [ "Package", "Architecture", "Version" ]:
698 if control.Find(field) == None:
700 self.rejects.append("%s: No %s field in control." % (f, field))
703 # Ensure the package name matches the one give in the .changes
704 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
705 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
707 # Validate the package field
708 package = control.Find("Package")
709 if not re_valid_pkg_name.match(package):
710 self.rejects.append("%s: invalid package name '%s'." % (f, package))
712 # Validate the version field
713 version = control.Find("Version")
714 if not re_valid_version.match(version):
715 self.rejects.append("%s: invalid version number '%s'." % (f, version))
717 # Ensure the architecture of the .deb is one we know about.
718 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
719 architecture = control.Find("Architecture")
720 upload_suite = self.pkg.changes["distribution"].keys()[0]
722 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
723 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
724 self.rejects.append("Unknown architecture '%s'." % (architecture))
726 # Ensure the architecture of the .deb is one of the ones
727 # listed in the .changes.
728 if not self.pkg.changes["architecture"].has_key(architecture):
729 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
731 # Sanity-check the Depends field
732 depends = control.Find("Depends")
734 self.rejects.append("%s: Depends field is empty." % (f))
736 # Sanity-check the Provides field
737 provides = control.Find("Provides")
739 provide = re_spacestrip.sub('', provides)
741 self.rejects.append("%s: Provides field is empty." % (f))
742 prov_list = provide.split(",")
743 for prov in prov_list:
744 if not re_valid_pkg_name.match(prov):
745 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
747 # Check the section & priority match those given in the .changes (non-fatal)
748 if control.Find("Section") and entry["section"] != "" \
749 and entry["section"] != control.Find("Section"):
750 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
751 (f, control.Find("Section", ""), entry["section"]))
752 if control.Find("Priority") and entry["priority"] != "" \
753 and entry["priority"] != control.Find("Priority"):
754 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
755 (f, control.Find("Priority", ""), entry["priority"]))
757 entry["package"] = package
758 entry["architecture"] = architecture
759 entry["version"] = version
760 entry["maintainer"] = control.Find("Maintainer", "")
762 if f.endswith(".udeb"):
763 self.pkg.files[f]["dbtype"] = "udeb"
764 elif f.endswith(".deb"):
765 self.pkg.files[f]["dbtype"] = "deb"
767 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
769 entry["source"] = control.Find("Source", entry["package"])
771 # Get the source version
772 source = entry["source"]
775 if source.find("(") != -1:
776 m = re_extract_src_version.match(source)
778 source_version = m.group(2)
780 if not source_version:
781 source_version = self.pkg.files[f]["version"]
783 entry["source package"] = source
784 entry["source version"] = source_version
786 # Ensure the filename matches the contents of the .deb
787 m = re_isadeb.match(f)
790 file_package = m.group(1)
791 if entry["package"] != file_package:
792 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
793 (f, file_package, entry["dbtype"], entry["package"]))
794 epochless_version = re_no_epoch.sub('', control.Find("Version"))
797 file_version = m.group(2)
798 if epochless_version != file_version:
799 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
800 (f, file_version, entry["dbtype"], epochless_version))
803 file_architecture = m.group(3)
804 if entry["architecture"] != file_architecture:
805 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
806 (f, file_architecture, entry["dbtype"], entry["architecture"]))
808 # Check for existent source
809 source_version = entry["source version"]
810 source_package = entry["source package"]
811 if self.pkg.changes["architecture"].has_key("source"):
812 if source_version != self.pkg.changes["version"]:
813 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
814 (source_version, f, self.pkg.changes["version"]))
816 # Check in the SQL database
817 if not source_exists(source_package, source_version, suites = \
818 self.pkg.changes["distribution"].keys(), session = session):
819 # Check in one of the other directories
820 source_epochless_version = re_no_epoch.sub('', source_version)
821 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
822 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
824 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
827 dsc_file_exists = False
828 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
829 if cnf.has_key("Dir::Queue::%s" % (myq)):
830 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
831 dsc_file_exists = True
834 if not dsc_file_exists:
835 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
837 # Check the version and for file overwrites
838 self.check_binary_against_db(f, session)
840 # Temporarily disable contents generation until we change the table storage layout
843 #if len(b.rejects) > 0:
844 # for j in b.rejects:
845 # self.rejects.append(j)
847 def source_file_checks(self, f, session):
848 entry = self.pkg.files[f]
850 m = re_issource.match(f)
854 entry["package"] = m.group(1)
855 entry["version"] = m.group(2)
856 entry["type"] = m.group(3)
858 # Ensure the source package name matches the Source filed in the .changes
859 if self.pkg.changes["source"] != entry["package"]:
860 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
862 # Ensure the source version matches the version in the .changes file
863 if re_is_orig_source.match(f):
864 changes_version = self.pkg.changes["chopversion2"]
866 changes_version = self.pkg.changes["chopversion"]
868 if changes_version != entry["version"]:
869 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
871 # Ensure the .changes lists source in the Architecture field
872 if not self.pkg.changes["architecture"].has_key("source"):
873 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
875 # Check the signature of a .dsc file
876 if entry["type"] == "dsc":
877 # check_signature returns either:
878 # (None, [list, of, rejects]) or (signature, [])
879 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
881 self.rejects.append(j)
883 entry["architecture"] = "source"
885 def per_suite_file_checks(self, f, suite, session):
887 entry = self.pkg.files[f]
890 if entry.has_key("byhand"):
893 # Check we have fields we need to do these checks
895 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
896 if not entry.has_key(m):
897 self.rejects.append("file '%s' does not have field %s set" % (f, m))
903 # Handle component mappings
904 for m in cnf.ValueList("ComponentMappings"):
905 (source, dest) = m.split()
906 if entry["component"] == source:
907 entry["original component"] = source
908 entry["component"] = dest
910 # Ensure the component is valid for the target suite
911 if cnf.has_key("Suite:%s::Components" % (suite)) and \
912 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
913 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
916 # Validate the component
917 if not get_component(entry["component"], session):
918 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
921 # See if the package is NEW
922 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
925 # Validate the priority
926 if entry["priority"].find('/') != -1:
927 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
929 # Determine the location
930 location = cnf["Dir::Pool"]
931 l = get_location(location, entry["component"], session=session)
933 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
934 entry["location id"] = -1
936 entry["location id"] = l.location_id
938 # Check the md5sum & size against existing files (if any)
939 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
941 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
942 entry["size"], entry["md5sum"], entry["location id"])
945 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
946 elif found is False and poolfile is not None:
947 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
950 entry["files id"] = None
952 entry["files id"] = poolfile.file_id
954 # Check for packages that have moved from one component to another
955 entry['suite'] = suite
956 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
958 entry["othercomponents"] = res.fetchone()[0]
960 def check_files(self, action=True):
961 file_keys = self.pkg.files.keys()
967 os.chdir(self.pkg.directory)
969 ret = holding.copy_to_holding(f)
971 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
975 # check we already know the changes file
976 # [NB: this check must be done post-suite mapping]
977 base_filename = os.path.basename(self.pkg.changes_file)
979 session = DBConn().session()
982 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
983 # if in the pool or in a queue other than unchecked, reject
984 if (dbc.in_queue is None) \
985 or (dbc.in_queue is not None
986 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
987 self.rejects.append("%s file already known to dak" % base_filename)
988 except NoResultFound, e:
995 for f, entry in self.pkg.files.items():
996 # Ensure the file does not already exist in one of the accepted directories
997 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
998 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
999 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1000 self.rejects.append("%s file already exists in the %s directory." % (f, d))
1002 if not re_taint_free.match(f):
1003 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1005 # Check the file is readable
1006 if os.access(f, os.R_OK) == 0:
1007 # When running in -n, copy_to_holding() won't have
1008 # generated the reject_message, so we need to.
1010 if os.path.exists(f):
1011 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1013 # Don't directly reject, mark to check later to deal with orig's
1014 # we can find in the pool
1015 self.later_check_files.append(f)
1016 entry["type"] = "unreadable"
1019 # If it's byhand skip remaining checks
1020 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1022 entry["type"] = "byhand"
1024 # Checks for a binary package...
1025 elif re_isadeb.match(f):
1027 entry["type"] = "deb"
1029 # This routine appends to self.rejects/warnings as appropriate
1030 self.binary_file_checks(f, session)
1032 # Checks for a source package...
1033 elif re_issource.match(f):
1036 # This routine appends to self.rejects/warnings as appropriate
1037 self.source_file_checks(f, session)
1039 # Not a binary or source package? Assume byhand...
1042 entry["type"] = "byhand"
1044 # Per-suite file checks
1045 entry["oldfiles"] = {}
1046 for suite in self.pkg.changes["distribution"].keys():
1047 self.per_suite_file_checks(f, suite, session)
1051 # If the .changes file says it has source, it must have source.
1052 if self.pkg.changes["architecture"].has_key("source"):
1054 self.rejects.append("no source found and Architecture line in changes mention source.")
1056 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1057 self.rejects.append("source only uploads are not supported.")
1059 ###########################################################################
1060 def check_dsc(self, action=True, session=None):
1061 """Returns bool indicating whether or not the source changes are valid"""
1062 # Ensure there is source to check
1063 if not self.pkg.changes["architecture"].has_key("source"):
1068 for f, entry in self.pkg.files.items():
1069 if entry["type"] == "dsc":
1071 self.rejects.append("can not process a .changes file with multiple .dsc's.")
1076 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1077 if not dsc_filename:
1078 self.rejects.append("source uploads must contain a dsc file")
1081 # Parse the .dsc file
1083 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1084 except CantOpenError:
1085 # if not -n copy_to_holding() will have done this for us...
1087 self.rejects.append("%s: can't read file." % (dsc_filename))
1088 except ParseChangesError, line:
1089 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1090 except InvalidDscError, line:
1091 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1092 except ChangesUnicodeError:
1093 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1095 # Build up the file list of files mentioned by the .dsc
1097 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1098 except NoFilesFieldError:
1099 self.rejects.append("%s: no Files: field." % (dsc_filename))
1101 except UnknownFormatError, format:
1102 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1104 except ParseChangesError, line:
1105 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1108 # Enforce mandatory fields
1109 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1110 if not self.pkg.dsc.has_key(i):
1111 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1114 # Validate the source and version fields
1115 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1116 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1117 if not re_valid_version.match(self.pkg.dsc["version"]):
1118 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1120 # Only a limited list of source formats are allowed in each suite
1121 for dist in self.pkg.changes["distribution"].keys():
1122 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1123 if self.pkg.dsc["format"] not in allowed:
1124 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1126 # Validate the Maintainer field
1128 # We ignore the return value
1129 fix_maintainer(self.pkg.dsc["maintainer"])
1130 except ParseMaintError, msg:
1131 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1132 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1134 # Validate the build-depends field(s)
1135 for field_name in [ "build-depends", "build-depends-indep" ]:
1136 field = self.pkg.dsc.get(field_name)
1138 # Have apt try to parse them...
1140 apt_pkg.ParseSrcDepends(field)
1142 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1144 # Ensure the version number in the .dsc matches the version number in the .changes
1145 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1146 changes_version = self.pkg.files[dsc_filename]["version"]
1148 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1149 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1151 # Ensure the Files field contain only what's expected
1152 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1154 # Ensure source is newer than existing source in target suites
1155 session = DBConn().session()
1156 self.check_source_against_db(dsc_filename, session)
1157 self.check_dsc_against_db(dsc_filename, session)
1159 dbchg = get_dbchange(self.pkg.changes_file, session)
1161 # Finally, check if we're missing any files
1162 for f in self.later_check_files:
1164 # Check if we've already processed this file if we have a dbchg object
1167 for pf in dbchg.files:
1168 if pf.filename == f and pf.processed:
1169 self.notes.append('%s was already processed so we can go ahead' % f)
1171 del self.pkg.files[f]
1173 self.rejects.append("Could not find file %s references in changes" % f)
1179 ###########################################################################
1181 def get_changelog_versions(self, source_dir):
1182 """Extracts a the source package and (optionally) grabs the
1183 version history out of debian/changelog for the BTS."""
1187 # Find the .dsc (again)
1189 for f in self.pkg.files.keys():
1190 if self.pkg.files[f]["type"] == "dsc":
1193 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1194 if not dsc_filename:
1197 # Create a symlink mirror of the source files in our temporary directory
1198 for f in self.pkg.files.keys():
1199 m = re_issource.match(f)
1201 src = os.path.join(source_dir, f)
1202 # If a file is missing for whatever reason, give up.
1203 if not os.path.exists(src):
1206 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1207 self.pkg.orig_files[f].has_key("path"):
1209 dest = os.path.join(os.getcwd(), f)
1210 os.symlink(src, dest)
1212 # If the orig files are not a part of the upload, create symlinks to the
1214 for orig_file in self.pkg.orig_files.keys():
1215 if not self.pkg.orig_files[orig_file].has_key("path"):
1217 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1218 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1220 # Extract the source
1221 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1222 (result, output) = commands.getstatusoutput(cmd)
1224 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1225 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1228 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1231 # Get the upstream version
1232 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1233 if re_strip_revision.search(upstr_version):
1234 upstr_version = re_strip_revision.sub('', upstr_version)
1236 # Ensure the changelog file exists
1237 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1238 if not os.path.exists(changelog_filename):
1239 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1242 # Parse the changelog
1243 self.pkg.dsc["bts changelog"] = ""
1244 changelog_file = utils.open_file(changelog_filename)
1245 for line in changelog_file.readlines():
1246 m = re_changelog_versions.match(line)
1248 self.pkg.dsc["bts changelog"] += line
1249 changelog_file.close()
1251 # Check we found at least one revision in the changelog
1252 if not self.pkg.dsc["bts changelog"]:
1253 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1255 def check_source(self):
1257 # a) there's no source
1258 if not self.pkg.changes["architecture"].has_key("source"):
1261 tmpdir = utils.temp_dirname()
1263 # Move into the temporary directory
1267 # Get the changelog version history
1268 self.get_changelog_versions(cwd)
1270 # Move back and cleanup the temporary tree
1274 shutil.rmtree(tmpdir)
1276 if e.errno != errno.EACCES:
1278 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1280 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1281 # We probably have u-r or u-w directories so chmod everything
1283 cmd = "chmod -R u+rwx %s" % (tmpdir)
1284 result = os.system(cmd)
1286 utils.fubar("'%s' failed with result %s." % (cmd, result))
1287 shutil.rmtree(tmpdir)
1288 except Exception, e:
1289 print "foobar2 (%s)" % e
1290 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1292 ###########################################################################
1293 def ensure_hashes(self):
1294 # Make sure we recognise the format of the Files: field in the .changes
1295 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1296 if len(format) == 2:
1297 format = int(format[0]), int(format[1])
1299 format = int(float(format[0])), 0
1301 # We need to deal with the original changes blob, as the fields we need
1302 # might not be in the changes dict serialised into the .dak anymore.
1303 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1305 # Copy the checksums over to the current changes dict. This will keep
1306 # the existing modifications to it intact.
1307 for field in orig_changes:
1308 if field.startswith('checksums-'):
1309 self.pkg.changes[field] = orig_changes[field]
1311 # Check for unsupported hashes
1312 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1313 self.rejects.append(j)
1315 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1316 self.rejects.append(j)
1318 # We have to calculate the hash if we have an earlier changes version than
1319 # the hash appears in rather than require it exist in the changes file
1320 for hashname, hashfunc, version in utils.known_hashes:
1321 # TODO: Move _ensure_changes_hash into this class
1322 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1323 self.rejects.append(j)
1324 if "source" in self.pkg.changes["architecture"]:
1325 # TODO: Move _ensure_dsc_hash into this class
1326 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1327 self.rejects.append(j)
1329 def check_hashes(self):
1330 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1331 self.rejects.append(m)
1333 for m in utils.check_size(".changes", self.pkg.files):
1334 self.rejects.append(m)
1336 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1337 self.rejects.append(m)
1339 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1340 self.rejects.append(m)
1342 self.ensure_hashes()
1344 ###########################################################################
1346 def ensure_orig(self, target_dir='.', session=None):
1348 Ensures that all orig files mentioned in the changes file are present
1349 in target_dir. If they do not exist, they are symlinked into place.
1351 An list containing the symlinks that were created are returned (so they
1358 for filename, entry in self.pkg.dsc_files.iteritems():
1359 if not re_is_orig_source.match(filename):
1360 # File is not an orig; ignore
1363 if os.path.exists(filename):
1364 # File exists, no need to continue
1367 def symlink_if_valid(path):
1368 f = utils.open_file(path)
1369 md5sum = apt_pkg.md5sum(f)
1372 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1373 expected = (int(entry['size']), entry['md5sum'])
1375 if fingerprint != expected:
1378 dest = os.path.join(target_dir, filename)
1380 os.symlink(path, dest)
1381 symlinked.append(dest)
1387 session_ = DBConn().session()
1392 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1393 poolfile_path = os.path.join(
1394 poolfile.location.path, poolfile.filename
1397 if symlink_if_valid(poolfile_path):
1407 # Look in some other queues for the file
1408 queues = ('New', 'Byhand', 'ProposedUpdates',
1409 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1411 for queue in queues:
1412 if not cnf.get('Dir::Queue::%s' % queue):
1415 queuefile_path = os.path.join(
1416 cnf['Dir::Queue::%s' % queue], filename
1419 if not os.path.exists(queuefile_path):
1420 # Does not exist in this queue
1423 if symlink_if_valid(queuefile_path):
1428 ###########################################################################
1430 def check_lintian(self):
1432 Extends self.rejects by checking the output of lintian against tags
1433 specified in Dinstall::LintianTags.
1438 # Don't reject binary uploads
1439 if not self.pkg.changes['architecture'].has_key('source'):
1442 # Only check some distributions
1443 for dist in ('unstable', 'experimental'):
1444 if dist in self.pkg.changes['distribution']:
1449 # If we do not have a tagfile, don't do anything
1450 tagfile = cnf.get("Dinstall::LintianTags")
1454 # Parse the yaml file
1455 sourcefile = file(tagfile, 'r')
1456 sourcecontent = sourcefile.read()
1460 lintiantags = yaml.load(sourcecontent)['lintian']
1461 except yaml.YAMLError, msg:
1462 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1465 # Try and find all orig mentioned in the .dsc
1466 symlinked = self.ensure_orig()
1468 # Setup the input file for lintian
1469 fd, temp_filename = utils.temp_filename()
1470 temptagfile = os.fdopen(fd, 'w')
1471 for tags in lintiantags.values():
1472 temptagfile.writelines(['%s\n' % x for x in tags])
1476 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1477 (temp_filename, self.pkg.changes_file)
1479 result, output = commands.getstatusoutput(cmd)
1481 # Remove our tempfile and any symlinks we created
1482 os.unlink(temp_filename)
1484 for symlink in symlinked:
1488 utils.warn("lintian failed for %s [return code: %s]." % \
1489 (self.pkg.changes_file, result))
1490 utils.warn(utils.prefix_multi_line_string(output, \
1491 " [possible output:] "))
1496 [self.pkg.changes_file, "check_lintian"] + list(txt)
1500 parsed_tags = parse_lintian_output(output)
1501 self.rejects.extend(
1502 generate_reject_messages(parsed_tags, lintiantags, log=log)
1505 ###########################################################################
1506 def check_urgency(self):
1508 if self.pkg.changes["architecture"].has_key("source"):
1509 if not self.pkg.changes.has_key("urgency"):
1510 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1511 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1512 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1513 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1514 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1515 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1517 ###########################################################################
1519 # Sanity check the time stamps of files inside debs.
1520 # [Files in the near future cause ugly warnings and extreme time
1521 # travel can cause errors on extraction]
1523 def check_timestamps(self):
1526 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1527 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1528 tar = TarTime(future_cutoff, past_cutoff)
1530 for filename, entry in self.pkg.files.items():
1531 if entry["type"] == "deb":
1534 deb_file = utils.open_file(filename)
1535 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1538 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1539 except SystemError, e:
1540 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1541 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1544 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1548 future_files = tar.future_files.keys()
1550 num_future_files = len(future_files)
1551 future_file = future_files[0]
1552 future_date = tar.future_files[future_file]
1553 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1554 % (filename, num_future_files, future_file, time.ctime(future_date)))
1556 ancient_files = tar.ancient_files.keys()
1558 num_ancient_files = len(ancient_files)
1559 ancient_file = ancient_files[0]
1560 ancient_date = tar.ancient_files[ancient_file]
1561 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1562 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1564 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1566 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1567 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1569 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1575 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1576 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1577 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1578 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1579 self.pkg.changes["sponsoremail"] = uid_email
1584 ###########################################################################
1585 # check_signed_by_key checks
1586 ###########################################################################
1588 def check_signed_by_key(self):
1589 """Ensure the .changes is signed by an authorized uploader."""
1590 session = DBConn().session()
1592 # First of all we check that the person has proper upload permissions
1593 # and that this upload isn't blocked
1594 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1597 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1600 # TODO: Check that import-keyring adds UIDs properly
1602 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1605 # Check that the fingerprint which uploaded has permission to do so
1606 self.check_upload_permissions(fpr, session)
1608 # Check that this package is not in a transition
1609 self.check_transition(session)
1614 def check_upload_permissions(self, fpr, session):
1615 # Check any one-off upload blocks
1616 self.check_upload_blocks(fpr, session)
1618 # Start with DM as a special case
1619 # DM is a special case unfortunately, so we check it first
1620 # (keys with no source access get more access than DMs in one
1621 # way; DMs can only upload for their packages whether source
1622 # or binary, whereas keys with no access might be able to
1623 # upload some binaries)
1624 if fpr.source_acl.access_level == 'dm':
1625 self.check_dm_upload(fpr, session)
1627 # Check source-based permissions for other types
1628 if self.pkg.changes["architecture"].has_key("source") and \
1629 fpr.source_acl.access_level is None:
1630 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1631 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1632 self.rejects.append(rej)
1634 # If not a DM, we allow full upload rights
1635 uid_email = "%s@debian.org" % (fpr.uid.uid)
1636 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1639 # Check binary upload permissions
1640 # By this point we know that DMs can't have got here unless they
1641 # are allowed to deal with the package concerned so just apply
1643 if fpr.binary_acl.access_level == 'full':
1646 # Otherwise we're in the map case
1647 tmparches = self.pkg.changes["architecture"].copy()
1648 tmparches.pop('source', None)
1650 for bam in fpr.binary_acl_map:
1651 tmparches.pop(bam.architecture.arch_string, None)
1653 if len(tmparches.keys()) > 0:
1654 if fpr.binary_reject:
1655 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1656 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1657 self.rejects.append(rej)
1659 # TODO: This is where we'll implement reject vs throw away binaries later
1660 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1661 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1662 rej += "\nFingerprint: %s", (fpr.fingerprint)
1663 self.rejects.append(rej)
1666 def check_upload_blocks(self, fpr, session):
1667 """Check whether any upload blocks apply to this source, source
1668 version, uid / fpr combination"""
1670 def block_rej_template(fb):
1671 rej = 'Manual upload block in place for package %s' % fb.source
1672 if fb.version is not None:
1673 rej += ', version %s' % fb.version
1676 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1677 # version is None if the block applies to all versions
1678 if fb.version is None or fb.version == self.pkg.changes['version']:
1679 # Check both fpr and uid - either is enough to cause a reject
1680 if fb.fpr is not None:
1681 if fb.fpr.fingerprint == fpr.fingerprint:
1682 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1683 if fb.uid is not None:
1684 if fb.uid == fpr.uid:
1685 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1688 def check_dm_upload(self, fpr, session):
1689 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1690 ## none of the uploaded packages are NEW
1692 for f in self.pkg.files.keys():
1693 if self.pkg.files[f].has_key("byhand"):
1694 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1696 if self.pkg.files[f].has_key("new"):
1697 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1703 r = get_newest_source(self.pkg.changes["source"], session)
1706 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1707 self.rejects.append(rej)
1710 if not r.dm_upload_allowed:
1711 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1712 self.rejects.append(rej)
1715 ## the Maintainer: field of the uploaded .changes file corresponds with
1716 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1718 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1719 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1721 ## the most recent version of the package uploaded to unstable or
1722 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1723 ## non-developer maintainers cannot NMU or hijack packages)
1725 # srcuploaders includes the maintainer
1727 for sup in r.srcuploaders:
1728 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1729 # Eww - I hope we never have two people with the same name in Debian
1730 if email == fpr.uid.uid or name == fpr.uid.name:
1735 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1738 ## none of the packages are being taken over from other source packages
1739 for b in self.pkg.changes["binary"].keys():
1740 for suite in self.pkg.changes["distribution"].keys():
1741 q = session.query(DBSource)
1742 q = q.join(DBBinary).filter_by(package=b)
1743 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1746 if s.source != self.pkg.changes["source"]:
1747 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1751 def check_transition(self, session):
1754 sourcepkg = self.pkg.changes["source"]
1756 # No sourceful upload -> no need to do anything else, direct return
1757 # We also work with unstable uploads, not experimental or those going to some
1758 # proposed-updates queue
1759 if "source" not in self.pkg.changes["architecture"] or \
1760 "unstable" not in self.pkg.changes["distribution"]:
1763 # Also only check if there is a file defined (and existant) with
1765 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1766 if transpath == "" or not os.path.exists(transpath):
1769 # Parse the yaml file
1770 sourcefile = file(transpath, 'r')
1771 sourcecontent = sourcefile.read()
1773 transitions = yaml.load(sourcecontent)
1774 except yaml.YAMLError, msg:
1775 # This shouldn't happen, there is a wrapper to edit the file which
1776 # checks it, but we prefer to be safe than ending up rejecting
1778 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1781 # Now look through all defined transitions
1782 for trans in transitions:
1783 t = transitions[trans]
1784 source = t["source"]
1787 # Will be None if nothing is in testing.
1788 current = get_source_in_suite(source, "testing", session)
1789 if current is not None:
1790 compare = apt_pkg.VersionCompare(current.version, expected)
1792 if current is None or compare < 0:
1793 # This is still valid, the current version in testing is older than
1794 # the new version we wait for, or there is none in testing yet
1796 # Check if the source we look at is affected by this.
1797 if sourcepkg in t['packages']:
1798 # The source is affected, lets reject it.
1800 rejectmsg = "%s: part of the %s transition.\n\n" % (
1803 if current is not None:
1804 currentlymsg = "at version %s" % (current.version)
1806 currentlymsg = "not present in testing"
1808 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1810 rejectmsg += "\n".join(textwrap.wrap("""Your package
1811 is part of a testing transition designed to get %s migrated (it is
1812 currently %s, we need version %s). This transition is managed by the
1813 Release Team, and %s is the Release-Team member responsible for it.
1814 Please mail debian-release@lists.debian.org or contact %s directly if you
1815 need further assistance. You might want to upload to experimental until this
1816 transition is done."""
1817 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1819 self.rejects.append(rejectmsg)
1822 ###########################################################################
1823 # End check_signed_by_key checks
1824 ###########################################################################
1826 def build_summaries(self):
1827 """ Build a summary of changes the upload introduces. """
1829 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1831 short_summary = summary
1833 # This is for direport's benefit...
1834 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1837 summary += "Changes: " + f
1839 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1841 summary += self.announce(short_summary, 0)
1843 return (summary, short_summary)
1845 ###########################################################################
1847 def close_bugs(self, summary, action):
1849 Send mail to close bugs as instructed by the closes field in the changes file.
1850 Also add a line to summary if any work was done.
1852 @type summary: string
1853 @param summary: summary text, as given by L{build_summaries}
1856 @param action: Set to false no real action will be done.
1859 @return: summary. If action was taken, extended by the list of closed bugs.
1863 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1865 bugs = self.pkg.changes["closes"].keys()
1871 summary += "Closing bugs: "
1873 summary += "%s " % (bug)
1876 self.Subst["__BUG_NUMBER__"] = bug
1877 if self.pkg.changes["distribution"].has_key("stable"):
1878 self.Subst["__STABLE_WARNING__"] = """
1879 Note that this package is not part of the released stable Debian
1880 distribution. It may have dependencies on other unreleased software,
1881 or other instabilities. Please take care if you wish to install it.
1882 The update will eventually make its way into the next released Debian
1885 self.Subst["__STABLE_WARNING__"] = ""
1886 mail_message = utils.TemplateSubst(self.Subst, template)
1887 utils.send_mail(mail_message)
1889 # Clear up after ourselves
1890 del self.Subst["__BUG_NUMBER__"]
1891 del self.Subst["__STABLE_WARNING__"]
1893 if action and self.logger:
1894 self.logger.log(["closing bugs"] + bugs)
1900 ###########################################################################
1902 def announce(self, short_summary, action):
1904 Send an announce mail about a new upload.
1906 @type short_summary: string
1907 @param short_summary: Short summary text to include in the mail
1910 @param action: Set to false no real action will be done.
1913 @return: Textstring about action taken.
1918 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1920 # Only do announcements for source uploads with a recent dpkg-dev installed
1921 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1922 self.pkg.changes["architecture"].has_key("source"):
1928 self.Subst["__SHORT_SUMMARY__"] = short_summary
1930 for dist in self.pkg.changes["distribution"].keys():
1931 suite = get_suite(dist)
1932 if suite is None: continue
1933 announce_list = suite.announce
1934 if announce_list == "" or lists_done.has_key(announce_list):
1937 lists_done[announce_list] = 1
1938 summary += "Announcing to %s\n" % (announce_list)
1942 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1943 if cnf.get("Dinstall::TrackingServer") and \
1944 self.pkg.changes["architecture"].has_key("source"):
1945 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1946 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1948 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1949 utils.send_mail(mail_message)
1951 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1953 if cnf.FindB("Dinstall::CloseBugs"):
1954 summary = self.close_bugs(summary, action)
1956 del self.Subst["__SHORT_SUMMARY__"]
1960 ###########################################################################
1962 def accept (self, summary, short_summary, session=None):
1966 This moves all files referenced from the .changes into the pool,
1967 sends the accepted mail, announces to lists, closes bugs and
1968 also checks for override disparities. If enabled it will write out
1969 the version history for the BTS Version Tracking and will finally call
1972 @type summary: string
1973 @param summary: Summary text
1975 @type short_summary: string
1976 @param short_summary: Short summary
1980 stats = SummaryStats()
1983 self.logger.log(["installing changes", self.pkg.changes_file])
1987 # Add the .dsc file to the DB first
1988 for newfile, entry in self.pkg.files.items():
1989 if entry["type"] == "dsc":
1990 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1994 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1995 for newfile, entry in self.pkg.files.items():
1996 if entry["type"] == "deb":
1997 poolfiles.append(add_deb_to_db(self, newfile, session))
1999 # If this is a sourceful diff only upload that is moving
2000 # cross-component we need to copy the .orig files into the new
2001 # component too for the same reasons as above.
2002 # XXX: mhy: I think this should be in add_dsc_to_db
2003 if self.pkg.changes["architecture"].has_key("source"):
2004 for orig_file in self.pkg.orig_files.keys():
2005 if not self.pkg.orig_files[orig_file].has_key("id"):
2006 continue # Skip if it's not in the pool
2007 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2008 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2009 continue # Skip if the location didn't change
2012 oldf = get_poolfile_by_id(orig_file_id, session)
2013 old_filename = os.path.join(oldf.location.path, oldf.filename)
2014 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
2015 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2017 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2019 # TODO: Care about size/md5sum collisions etc
2020 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2022 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2024 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2025 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2029 # Don't reference the old file from this changes
2031 if p.file_id == oldf.file_id:
2034 poolfiles.append(newf)
2036 # Fix up the DSC references
2039 for df in source.srcfiles:
2040 if df.poolfile.file_id == oldf.file_id:
2041 # Add a new DSC entry and mark the old one for deletion
2042 # Don't do it in the loop so we don't change the thing we're iterating over
2044 newdscf.source_id = source.source_id
2045 newdscf.poolfile_id = newf.file_id
2046 session.add(newdscf)
2056 # Make sure that our source object is up-to-date
2057 session.expire(source)
2059 # Add changelog information to the database
2060 self.store_changelog()
2062 # Install the files into the pool
2063 for newfile, entry in self.pkg.files.items():
2064 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2065 utils.move(newfile, destination)
2066 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2067 stats.accept_bytes += float(entry["size"])
2069 # Copy the .changes file across for suite which need it.
2070 copy_changes = dict([(x.copychanges, '')
2071 for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2072 if x.copychanges is not None])
2074 for dest in copy_changes.keys():
2075 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2077 # We're done - commit the database changes
2079 # Our SQL session will automatically start a new transaction after
2082 # Move the .changes into the 'done' directory
2083 utils.move(self.pkg.changes_file,
2084 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2086 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2087 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2090 self.Subst["__SUMMARY__"] = summary
2091 mail_message = utils.TemplateSubst(self.Subst,
2092 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2093 utils.send_mail(mail_message)
2094 self.announce(short_summary, 1)
2096 ## Helper stuff for DebBugs Version Tracking
2097 if cnf.Find("Dir::Queue::BTSVersionTrack"):
2098 if self.pkg.changes["architecture"].has_key("source"):
2099 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2100 version_history = os.fdopen(fd, 'w')
2101 version_history.write(self.pkg.dsc["bts changelog"])
2102 version_history.close()
2103 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2104 self.pkg.changes_file[:-8]+".versions")
2105 os.rename(temp_filename, filename)
2106 os.chmod(filename, 0644)
2108 # Write out the binary -> source mapping.
2109 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2110 debinfo = os.fdopen(fd, 'w')
2111 for name, entry in sorted(self.pkg.files.items()):
2112 if entry["type"] == "deb":
2113 line = " ".join([entry["package"], entry["version"],
2114 entry["architecture"], entry["source package"],
2115 entry["source version"]])
2116 debinfo.write(line+"\n")
2118 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2119 self.pkg.changes_file[:-8]+".debinfo")
2120 os.rename(temp_filename, filename)
2121 os.chmod(filename, 0644)
2125 # Set up our copy queues (e.g. buildd queues)
2126 for suite_name in self.pkg.changes["distribution"].keys():
2127 suite = get_suite(suite_name, session)
2128 for q in suite.copy_queues:
2130 q.add_file_from_pool(f)
2135 stats.accept_count += 1
2137 def check_override(self):
2139 Checks override entries for validity. Mails "Override disparity" warnings,
2140 if that feature is enabled.
2142 Abandons the check if
2143 - override disparity checks are disabled
2144 - mail sending is disabled
2149 # Abandon the check if override disparity checks have been disabled
2150 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2153 summary = self.pkg.check_override()
2158 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2161 self.Subst["__SUMMARY__"] = summary
2162 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2163 utils.send_mail(mail_message)
2164 del self.Subst["__SUMMARY__"]
2166 ###########################################################################
2168 def remove(self, from_dir=None):
2170 Used (for instance) in p-u to remove the package from unchecked
2172 Also removes the package from holding area.
2174 if from_dir is None:
2175 from_dir = self.pkg.directory
2178 for f in self.pkg.files.keys():
2179 os.unlink(os.path.join(from_dir, f))
2180 if os.path.exists(os.path.join(h.holding_dir, f)):
2181 os.unlink(os.path.join(h.holding_dir, f))
2183 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2184 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2185 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2187 ###########################################################################
2189 def move_to_queue (self, queue):
2191 Move files to a destination queue using the permissions in the table
2194 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2195 queue.path, perms=int(queue.change_perms, 8))
2196 for f in self.pkg.files.keys():
2197 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2199 ###########################################################################
2201 def force_reject(self, reject_files):
2203 Forcefully move files from the current directory to the
2204 reject directory. If any file already exists in the reject
2205 directory it will be moved to the morgue to make way for
2208 @type reject_files: dict
2209 @param reject_files: file dictionary
2215 for file_entry in reject_files:
2216 # Skip any files which don't exist or which we don't have permission to copy.
2217 if os.access(file_entry, os.R_OK) == 0:
2220 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2223 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2225 # File exists? Let's find a new name by adding a number
2226 if e.errno == errno.EEXIST:
2228 dest_file = utils.find_next_free(dest_file, 255)
2229 except NoFreeFilenameError:
2230 # Something's either gone badly Pete Tong, or
2231 # someone is trying to exploit us.
2232 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2235 # Make sure we really got it
2237 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2240 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2244 # If we got here, we own the destination file, so we can
2245 # safely overwrite it.
2246 utils.move(file_entry, dest_file, 1, perms=0660)
2249 ###########################################################################
2250 def do_reject (self, manual=0, reject_message="", notes=""):
2252 Reject an upload. If called without a reject message or C{manual} is
2253 true, spawn an editor so the user can write one.
2256 @param manual: manual or automated rejection
2258 @type reject_message: string
2259 @param reject_message: A reject message
2264 # If we weren't given a manual rejection message, spawn an
2265 # editor so the user can add one in...
2266 if manual and not reject_message:
2267 (fd, temp_filename) = utils.temp_filename()
2268 temp_file = os.fdopen(fd, 'w')
2271 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2272 % (note.author, note.version, note.notedate, note.comment))
2274 editor = os.environ.get("EDITOR","vi")
2276 while answer == 'E':
2277 os.system("%s %s" % (editor, temp_filename))
2278 temp_fh = utils.open_file(temp_filename)
2279 reject_message = "".join(temp_fh.readlines())
2281 print "Reject message:"
2282 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2283 prompt = "[R]eject, Edit, Abandon, Quit ?"
2285 while prompt.find(answer) == -1:
2286 answer = utils.our_raw_input(prompt)
2287 m = re_default_answer.search(prompt)
2290 answer = answer[:1].upper()
2291 os.unlink(temp_filename)
2297 print "Rejecting.\n"
2301 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2302 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2304 # Move all the files into the reject directory
2305 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2306 self.force_reject(reject_files)
2308 # If we fail here someone is probably trying to exploit the race
2309 # so let's just raise an exception ...
2310 if os.path.exists(reason_filename):
2311 os.unlink(reason_filename)
2312 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2314 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2318 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2319 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2320 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2321 os.write(reason_fd, reject_message)
2322 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2324 # Build up the rejection email
2325 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2326 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2327 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2328 self.Subst["__REJECT_MESSAGE__"] = ""
2329 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2330 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2331 # Write the rejection email out as the <foo>.reason file
2332 os.write(reason_fd, reject_mail_message)
2334 del self.Subst["__REJECTOR_ADDRESS__"]
2335 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2336 del self.Subst["__CC__"]
2340 # Send the rejection mail
2341 utils.send_mail(reject_mail_message)
2344 self.logger.log(["rejected", self.pkg.changes_file])
2348 ################################################################################
2349 def in_override_p(self, package, component, suite, binary_type, filename, session):
2351 Check if a package already has override entries in the DB
2353 @type package: string
2354 @param package: package name
2356 @type component: string
2357 @param component: database id of the component
2360 @param suite: database id of the suite
2362 @type binary_type: string
2363 @param binary_type: type of the package
2365 @type filename: string
2366 @param filename: filename we check
2368 @return: the database result. But noone cares anyway.
2374 if binary_type == "": # must be source
2377 file_type = binary_type
2379 # Override suite name; used for example with proposed-updates
2380 oldsuite = get_suite(suite, session)
2381 if (not oldsuite is None) and oldsuite.overridesuite:
2382 suite = oldsuite.overridesuite
2384 result = get_override(package, suite, component, file_type, session)
2386 # If checking for a source package fall back on the binary override type
2387 if file_type == "dsc" and len(result) < 1:
2388 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2390 # Remember the section and priority so we can check them later if appropriate
2393 self.pkg.files[filename]["override section"] = result.section.section
2394 self.pkg.files[filename]["override priority"] = result.priority.priority
2399 ################################################################################
2400 def get_anyversion(self, sv_list, suite):
2403 @param sv_list: list of (suite, version) tuples to check
2406 @param suite: suite name
2412 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2413 for (s, v) in sv_list:
2414 if s in [ x.lower() for x in anysuite ]:
2415 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2420 ################################################################################
2422 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2425 @param sv_list: list of (suite, version) tuples to check
2427 @type filename: string
2428 @param filename: XXX
2430 @type new_version: string
2431 @param new_version: XXX
2433 Ensure versions are newer than existing packages in target
2434 suites and that cross-suite version checking rules as
2435 set out in the conf file are satisfied.
2440 # Check versions for each target suite
2441 for target_suite in self.pkg.changes["distribution"].keys():
2442 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2443 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2445 # Enforce "must be newer than target suite" even if conffile omits it
2446 if target_suite not in must_be_newer_than:
2447 must_be_newer_than.append(target_suite)
2449 for (suite, existent_version) in sv_list:
2450 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2452 if suite in must_be_newer_than and sourceful and vercmp < 1:
2453 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2455 if suite in must_be_older_than and vercmp > -1:
2458 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2459 # we really use the other suite, ignoring the conflicting one ...
2460 addsuite = self.pkg.changes["distribution-version"][suite]
2462 add_version = self.get_anyversion(sv_list, addsuite)
2463 target_version = self.get_anyversion(sv_list, target_suite)
2466 # not add_version can only happen if we map to a suite
2467 # that doesn't enhance the suite we're propup'ing from.
2468 # so "propup-ver x a b c; map a d" is a problem only if
2469 # d doesn't enhance a.
2471 # i think we could always propagate in this case, rather
2472 # than complaining. either way, this isn't a REJECT issue
2474 # And - we really should complain to the dorks who configured dak
2475 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2476 self.pkg.changes.setdefault("propdistribution", {})
2477 self.pkg.changes["propdistribution"][addsuite] = 1
2479 elif not target_version:
2480 # not targets_version is true when the package is NEW
2481 # we could just stick with the "...old version..." REJECT
2482 # for this, I think.
2483 self.rejects.append("Won't propogate NEW packages.")
2484 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2485 # propogation would be redundant. no need to reject though.
2486 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2488 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2489 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2491 self.warnings.append("Propogating upload to %s" % (addsuite))
2492 self.pkg.changes.setdefault("propdistribution", {})
2493 self.pkg.changes["propdistribution"][addsuite] = 1
2497 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2499 ################################################################################
2500 def check_binary_against_db(self, filename, session):
2501 # Ensure version is sane
2502 q = session.query(BinAssociation)
2503 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2504 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2506 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2507 filename, self.pkg.files[filename]["version"], sourceful=False)
2509 # Check for any existing copies of the file
2510 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2511 q = q.filter_by(version=self.pkg.files[filename]["version"])
2512 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2515 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2517 ################################################################################
2519 def check_source_against_db(self, filename, session):
2520 source = self.pkg.dsc.get("source")
2521 version = self.pkg.dsc.get("version")
2523 # Ensure version is sane
2524 self.cross_suite_version_check(get_suite_version(source, session),
2525 filename, version, sourceful=True)
2527 ################################################################################
2528 def check_dsc_against_db(self, filename, session):
2531 @warning: NB: this function can remove entries from the 'files' index [if
2532 the orig tarball is a duplicate of the one in the archive]; if
2533 you're iterating over 'files' and call this function as part of
2534 the loop, be sure to add a check to the top of the loop to
2535 ensure you haven't just tried to dereference the deleted entry.
2540 self.pkg.orig_files = {} # XXX: do we need to clear it?
2541 orig_files = self.pkg.orig_files
2543 # Try and find all files mentioned in the .dsc. This has
2544 # to work harder to cope with the multiple possible
2545 # locations of an .orig.tar.gz.
2546 # The ordering on the select is needed to pick the newest orig
2547 # when it exists in multiple places.
2548 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2550 if self.pkg.files.has_key(dsc_name):
2551 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2552 actual_size = int(self.pkg.files[dsc_name]["size"])
2553 found = "%s in incoming" % (dsc_name)
2555 # Check the file does not already exist in the archive
2556 ql = get_poolfile_like_name(dsc_name, session)
2558 # Strip out anything that isn't '%s' or '/%s$'
2560 if not i.filename.endswith(dsc_name):
2563 # "[dak] has not broken them. [dak] has fixed a
2564 # brokenness. Your crappy hack exploited a bug in
2567 # "(Come on! I thought it was always obvious that
2568 # one just doesn't release different files with
2569 # the same name and version.)"
2570 # -- ajk@ on d-devel@l.d.o
2573 # Ignore exact matches for .orig.tar.gz
2575 if re_is_orig_source.match(dsc_name):
2577 if self.pkg.files.has_key(dsc_name) and \
2578 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2579 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2580 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2581 # TODO: Don't delete the entry, just mark it as not needed
2582 # This would fix the stupidity of changing something we often iterate over
2583 # whilst we're doing it
2584 del self.pkg.files[dsc_name]
2585 dsc_entry["files id"] = i.file_id
2586 if not orig_files.has_key(dsc_name):
2587 orig_files[dsc_name] = {}
2588 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2591 # Don't bitch that we couldn't find this file later
2593 self.later_check_files.remove(dsc_name)
2599 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2601 elif re_is_orig_source.match(dsc_name):
2603 ql = get_poolfile_like_name(dsc_name, session)
2605 # Strip out anything that isn't '%s' or '/%s$'
2606 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2608 if not i.filename.endswith(dsc_name):
2612 # Unfortunately, we may get more than one match here if,
2613 # for example, the package was in potato but had an -sa
2614 # upload in woody. So we need to choose the right one.
2616 # default to something sane in case we don't match any or have only one
2621 old_file = os.path.join(i.location.path, i.filename)
2622 old_file_fh = utils.open_file(old_file)
2623 actual_md5 = apt_pkg.md5sum(old_file_fh)
2625 actual_size = os.stat(old_file)[stat.ST_SIZE]
2626 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2629 old_file = os.path.join(i.location.path, i.filename)
2630 old_file_fh = utils.open_file(old_file)
2631 actual_md5 = apt_pkg.md5sum(old_file_fh)
2633 actual_size = os.stat(old_file)[stat.ST_SIZE]
2635 suite_type = x.location.archive_type
2636 # need this for updating dsc_files in install()
2637 dsc_entry["files id"] = x.file_id
2638 # See install() in process-accepted...
2639 if not orig_files.has_key(dsc_name):
2640 orig_files[dsc_name] = {}
2641 orig_files[dsc_name]["id"] = x.file_id
2642 orig_files[dsc_name]["path"] = old_file
2643 orig_files[dsc_name]["location"] = x.location.location_id
2645 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2646 # Not there? Check the queue directories...
2647 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2648 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2650 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2651 if os.path.exists(in_otherdir):
2652 in_otherdir_fh = utils.open_file(in_otherdir)
2653 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2654 in_otherdir_fh.close()
2655 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2657 if not orig_files.has_key(dsc_name):
2658 orig_files[dsc_name] = {}
2659 orig_files[dsc_name]["path"] = in_otherdir
2662 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2665 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2667 if actual_md5 != dsc_entry["md5sum"]:
2668 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2669 if actual_size != int(dsc_entry["size"]):
2670 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2672 ################################################################################
2673 # This is used by process-new and process-holding to recheck a changes file
2674 # at the time we're running. It mainly wraps various other internal functions
2675 # and is similar to accepted_checks - these should probably be tidied up
2677 def recheck(self, session):
2679 for f in self.pkg.files.keys():
2680 # The .orig.tar.gz can disappear out from under us is it's a
2681 # duplicate of one in the archive.
2682 if not self.pkg.files.has_key(f):
2685 entry = self.pkg.files[f]
2687 # Check that the source still exists
2688 if entry["type"] == "deb":
2689 source_version = entry["source version"]
2690 source_package = entry["source package"]
2691 if not self.pkg.changes["architecture"].has_key("source") \
2692 and not source_exists(source_package, source_version, \
2693 suites = self.pkg.changes["distribution"].keys(), session = session):
2694 source_epochless_version = re_no_epoch.sub('', source_version)
2695 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2697 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2698 if cnf.has_key("Dir::Queue::%s" % (q)):
2699 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2702 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2704 # Version and file overwrite checks
2705 if entry["type"] == "deb":
2706 self.check_binary_against_db(f, session)
2707 elif entry["type"] == "dsc":
2708 self.check_source_against_db(f, session)
2709 self.check_dsc_against_db(f, session)
2711 ################################################################################
2712 def accepted_checks(self, overwrite_checks, session):
2713 # Recheck anything that relies on the database; since that's not
2714 # frozen between accept and our run time when called from p-a.
2716 # overwrite_checks is set to False when installing to stable/oldstable
2721 # Find the .dsc (again)
2723 for f in self.pkg.files.keys():
2724 if self.pkg.files[f]["type"] == "dsc":
2727 for checkfile in self.pkg.files.keys():
2728 # The .orig.tar.gz can disappear out from under us is it's a
2729 # duplicate of one in the archive.
2730 if not self.pkg.files.has_key(checkfile):
2733 entry = self.pkg.files[checkfile]
2735 # Check that the source still exists
2736 if entry["type"] == "deb":
2737 source_version = entry["source version"]
2738 source_package = entry["source package"]
2739 if not self.pkg.changes["architecture"].has_key("source") \
2740 and not source_exists(source_package, source_version, \
2741 suites = self.pkg.changes["distribution"].keys(), \
2743 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2745 # Version and file overwrite checks
2746 if overwrite_checks:
2747 if entry["type"] == "deb":
2748 self.check_binary_against_db(checkfile, session)
2749 elif entry["type"] == "dsc":
2750 self.check_source_against_db(checkfile, session)
2751 self.check_dsc_against_db(dsc_filename, session)
2753 # propogate in the case it is in the override tables:
2754 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2755 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2756 propogate[suite] = 1
2758 nopropogate[suite] = 1
2760 for suite in propogate.keys():
2761 if suite in nopropogate:
2763 self.pkg.changes["distribution"][suite] = 1
2765 for checkfile in self.pkg.files.keys():
2766 # Check the package is still in the override tables
2767 for suite in self.pkg.changes["distribution"].keys():
2768 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2769 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2771 ################################################################################
2772 # If any file of an upload has a recent mtime then chances are good
2773 # the file is still being uploaded.
2775 def upload_too_new(self):
2778 # Move back to the original directory to get accurate time stamps
2780 os.chdir(self.pkg.directory)
2781 file_list = self.pkg.files.keys()
2782 file_list.extend(self.pkg.dsc_files.keys())
2783 file_list.append(self.pkg.changes_file)
2786 last_modified = time.time()-os.path.getmtime(f)
2787 if last_modified < int(cnf["Dinstall::SkipTime"]):
2796 def store_changelog(self):
2798 # Skip binary-only upload if it is not a bin-NMU
2799 if not self.pkg.changes['architecture'].has_key('source'):
2800 from daklib.regexes import re_bin_only_nmu
2801 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2804 session = DBConn().session()
2806 # Check if upload already has a changelog entry
2807 query = """SELECT changelog_id FROM changes WHERE source = :source
2808 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2809 if session.execute(query, {'source': self.pkg.changes['source'], \
2810 'version': self.pkg.changes['version'], \
2811 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2815 # Add current changelog text into changelogs_text table, return created ID
2816 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2817 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2819 # Link ID to the upload available in changes table
2820 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2821 AND version = :version AND architecture = :architecture"""
2822 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2823 'version': self.pkg.changes['version'], \
2824 'architecture': " ".join(self.pkg.changes['architecture'].keys())})