5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
58 # suppress some deprecation warnings in squeeze related to apt_pkg
61 warnings.filterwarnings('ignore', \
62 "apt_pkg.ParseSection\(\) is deprecated. Please see apt_pkg\.TagSection\(\) for the replacement\.", \
64 warnings.filterwarnings('ignore', \
65 "Attribute 'Find' of the 'apt_pkg\.TagSection' object is deprecated, use 'find' instead\.", \
68 ###############################################################################
70 def get_type(f, session):
72 Get the file type of C{f}
75 @param f: file entry from Changes object
77 @type session: SQLA Session
78 @param session: SQL Alchemy session object
85 if f.has_key("dbtype"):
86 file_type = f["dbtype"]
87 elif re_source_ext.match(f["type"]):
91 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
93 # Validate the override type
94 type_id = get_override_type(file_type, session)
96 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
100 ################################################################################
102 # Determine what parts in a .changes are NEW
104 def determine_new(filename, changes, files, warn=1, session = None):
106 Determine what parts in a C{changes} file are NEW.
109 @param filename: changes filename
111 @type changes: Upload.Pkg.changes dict
112 @param changes: Changes dictionary
114 @type files: Upload.Pkg.files dict
115 @param files: Files dictionary
118 @param warn: Warn if overrides are added for (old)stable
121 @return: dictionary of NEW components.
124 # TODO: This should all use the database instead of parsing the changes
129 dbchg = get_dbchange(filename, session)
131 print "Warning: cannot find changes file in database; won't check byhand"
133 # Build up a list of potentially new things
134 for name, f in files.items():
135 # Keep a record of byhand elements
136 if f["section"] == "byhand":
141 priority = f["priority"]
142 section = f["section"]
143 file_type = get_type(f, session)
144 component = f["component"]
146 if file_type == "dsc":
149 if not new.has_key(pkg):
151 new[pkg]["priority"] = priority
152 new[pkg]["section"] = section
153 new[pkg]["type"] = file_type
154 new[pkg]["component"] = component
155 new[pkg]["files"] = []
157 old_type = new[pkg]["type"]
158 if old_type != file_type:
159 # source gets trumped by deb or udeb
160 if old_type == "dsc":
161 new[pkg]["priority"] = priority
162 new[pkg]["section"] = section
163 new[pkg]["type"] = file_type
164 new[pkg]["component"] = component
166 new[pkg]["files"].append(name)
168 if f.has_key("othercomponents"):
169 new[pkg]["othercomponents"] = f["othercomponents"]
171 # Fix up the list of target suites
173 for suite in changes["suite"].keys():
174 oldsuite = get_suite(suite, session)
176 print "WARNING: Invalid suite %s found" % suite
179 if oldsuite.overridesuite:
180 newsuite = get_suite(oldsuite.overridesuite, session)
183 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
184 oldsuite.overridesuite, suite)
185 del changes["suite"][suite]
186 changes["suite"][oldsuite.overridesuite] = 1
188 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist. Bugger" % (
189 oldsuite.overridesuite, suite)
191 # Check for unprocessed byhand files
192 if dbchg is not None:
193 for b in byhand.keys():
194 # Find the file entry in the database
196 for f in dbchg.files:
199 # If it's processed, we can ignore it
205 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
207 # Check for new stuff
208 for suite in changes["suite"].keys():
209 for pkg in new.keys():
210 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
212 for file_entry in new[pkg]["files"]:
213 if files[file_entry].has_key("new"):
214 del files[file_entry]["new"]
218 for s in ['stable', 'oldstable']:
219 if changes["suite"].has_key(s):
220 print "WARNING: overrides will be added for %s!" % s
221 for pkg in new.keys():
222 if new[pkg].has_key("othercomponents"):
223 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
227 ################################################################################
229 def check_valid(new, session = None):
231 Check if section and priority for NEW packages exist in database.
232 Additionally does sanity checks:
233 - debian-installer packages have to be udeb (or source)
234 - non debian-installer packages can not be udeb
235 - source priority can only be assigned to dsc file types
238 @param new: Dict of new packages with their section, priority and type.
241 for pkg in new.keys():
242 section_name = new[pkg]["section"]
243 priority_name = new[pkg]["priority"]
244 file_type = new[pkg]["type"]
246 section = get_section(section_name, session)
248 new[pkg]["section id"] = -1
250 new[pkg]["section id"] = section.section_id
252 priority = get_priority(priority_name, session)
254 new[pkg]["priority id"] = -1
256 new[pkg]["priority id"] = priority.priority_id
259 di = section_name.find("debian-installer") != -1
261 # If d-i, we must be udeb and vice-versa
262 if (di and file_type not in ("udeb", "dsc")) or \
263 (not di and file_type == "udeb"):
264 new[pkg]["section id"] = -1
266 # If dsc we need to be source and vice-versa
267 if (priority == "source" and file_type != "dsc") or \
268 (priority != "source" and file_type == "dsc"):
269 new[pkg]["priority id"] = -1
271 ###############################################################################
273 # Used by Upload.check_timestamps
274 class TarTime(object):
275 def __init__(self, future_cutoff, past_cutoff):
277 self.future_cutoff = future_cutoff
278 self.past_cutoff = past_cutoff
281 self.future_files = {}
282 self.ancient_files = {}
284 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
285 if MTime > self.future_cutoff:
286 self.future_files[Name] = MTime
287 if MTime < self.past_cutoff:
288 self.ancient_files[Name] = MTime
290 ###############################################################################
292 def prod_maintainer(notes, upload):
295 # Here we prepare an editor and get them ready to prod...
296 (fd, temp_filename) = utils.temp_filename()
297 temp_file = os.fdopen(fd, 'w')
299 temp_file.write(note.comment)
301 editor = os.environ.get("EDITOR","vi")
304 os.system("%s %s" % (editor, temp_filename))
305 temp_fh = utils.open_file(temp_filename)
306 prod_message = "".join(temp_fh.readlines())
308 print "Prod message:"
309 print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
310 prompt = "[P]rod, Edit, Abandon, Quit ?"
312 while prompt.find(answer) == -1:
313 answer = utils.our_raw_input(prompt)
314 m = re_default_answer.search(prompt)
317 answer = answer[:1].upper()
318 os.unlink(temp_filename)
324 # Otherwise, do the proding...
325 user_email_address = utils.whoami() + " <%s>" % (
326 cnf["Dinstall::MyAdminAddress"])
330 Subst["__FROM_ADDRESS__"] = user_email_address
331 Subst["__PROD_MESSAGE__"] = prod_message
332 Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
334 prod_mail_message = utils.TemplateSubst(
335 Subst,cnf["Dir::Templates"]+"/process-new.prod")
338 utils.send_mail(prod_mail_message)
340 print "Sent prodding message"
342 ################################################################################
344 def edit_note(note, upload, session, trainee=False):
345 # Write the current data to a temporary file
346 (fd, temp_filename) = utils.temp_filename()
347 editor = os.environ.get("EDITOR","vi")
350 os.system("%s %s" % (editor, temp_filename))
351 temp_file = utils.open_file(temp_filename)
352 newnote = temp_file.read().rstrip()
355 print utils.prefix_multi_line_string(newnote," ")
356 prompt = "[D]one, Edit, Abandon, Quit ?"
358 while prompt.find(answer) == -1:
359 answer = utils.our_raw_input(prompt)
360 m = re_default_answer.search(prompt)
363 answer = answer[:1].upper()
364 os.unlink(temp_filename)
371 comment = NewComment()
372 comment.package = upload.pkg.changes["source"]
373 comment.version = upload.pkg.changes["version"]
374 comment.comment = newnote
375 comment.author = utils.whoami()
376 comment.trainee = trainee
380 ###############################################################################
382 # suite names DMs can upload to
383 dm_suites = ['unstable', 'experimental']
385 def get_newest_source(source, session):
386 'returns the newest DBSource object in dm_suites'
387 ## the most recent version of the package uploaded to unstable or
388 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
389 ## section of its control file
390 q = session.query(DBSource).filter_by(source = source). \
391 filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
392 order_by(desc('source.version'))
395 def get_suite_version_by_source(source, session):
396 'returns a list of tuples (suite_name, version) for source package'
397 q = session.query(Suite.suite_name, DBSource.version). \
398 join(Suite.sources).filter_by(source = source)
401 def get_source_by_package_and_suite(package, suite_name, session):
403 returns a DBSource query filtered by DBBinary.package and this package's
406 return session.query(DBSource). \
407 join(DBSource.binaries).filter_by(package = package). \
408 join(DBBinary.suites).filter_by(suite_name = suite_name)
410 def get_suite_version_by_package(package, arch_string, session):
412 returns a list of tuples (suite_name, version) for binary package and
415 return session.query(Suite.suite_name, DBBinary.version). \
416 join(Suite.binaries).filter_by(package = package). \
417 join(DBBinary.architecture). \
418 filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
420 class Upload(object):
422 Everything that has to do with an upload processed.
430 ###########################################################################
433 """ Reset a number of internal variables."""
435 # Initialize the substitution template map
438 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
439 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
440 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
441 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
447 self.later_check_files = []
451 def package_info(self):
453 Format various messages from this Upload to send to the maintainer.
457 ('Reject Reasons', self.rejects),
458 ('Warnings', self.warnings),
459 ('Notes', self.notes),
463 for title, messages in msgs:
465 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
470 ###########################################################################
471 def update_subst(self):
472 """ Set up the per-package template substitution mappings """
476 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
477 if not self.pkg.changes.has_key("architecture") or not \
478 isinstance(self.pkg.changes["architecture"], dict):
479 self.pkg.changes["architecture"] = { "Unknown" : "" }
481 # and maintainer2047 may not exist.
482 if not self.pkg.changes.has_key("maintainer2047"):
483 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
485 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
486 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
487 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
489 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
490 if self.pkg.changes["architecture"].has_key("source") and \
491 self.pkg.changes["changedby822"] != "" and \
492 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
494 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
495 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
496 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
498 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
499 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
500 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
502 # Process policy doesn't set the fingerprint field and I don't want to make it
503 # do it for now as I don't want to have to deal with the case where we accepted
504 # the package into PU-NEW, but the fingerprint has gone away from the keyring in
505 # the meantime so the package will be remarked as rejectable. Urgh.
506 # TODO: Fix this properly
507 if self.pkg.changes.has_key('fingerprint'):
508 session = DBConn().session()
509 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
510 if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
511 if self.pkg.changes.has_key("sponsoremail"):
512 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
515 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
516 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
518 # Apply any global override of the Maintainer field
519 if cnf.get("Dinstall::OverrideMaintainer"):
520 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
521 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
523 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
524 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
525 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
526 self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
528 ###########################################################################
529 def load_changes(self, filename):
531 Load a changes file and setup a dictionary around it. Also checks for mandantory
534 @type filename: string
535 @param filename: Changes filename, full path.
538 @return: whether the changes file was valid or not. We may want to
539 reject even if this is True (see what gets put in self.rejects).
540 This is simply to prevent us even trying things later which will
541 fail because we couldn't properly parse the file.
544 self.pkg.changes_file = filename
546 # Parse the .changes field into a dictionary
548 self.pkg.changes.update(parse_changes(filename))
549 except CantOpenError:
550 self.rejects.append("%s: can't read file." % (filename))
552 except ParseChangesError, line:
553 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
555 except ChangesUnicodeError:
556 self.rejects.append("%s: changes file not proper utf-8" % (filename))
559 # Parse the Files field from the .changes into another dictionary
561 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
562 except ParseChangesError, line:
563 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
565 except UnknownFormatError, format:
566 self.rejects.append("%s: unknown format '%s'." % (filename, format))
569 # Check for mandatory fields
570 for i in ("distribution", "source", "binary", "architecture",
571 "version", "maintainer", "files", "changes", "description"):
572 if not self.pkg.changes.has_key(i):
573 # Avoid undefined errors later
574 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
577 # Strip a source version in brackets from the source field
578 if re_strip_srcver.search(self.pkg.changes["source"]):
579 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
581 # Ensure the source field is a valid package name.
582 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
583 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
585 # Split multi-value fields into a lower-level dictionary
586 for i in ("architecture", "distribution", "binary", "closes"):
587 o = self.pkg.changes.get(i, "")
589 del self.pkg.changes[i]
591 self.pkg.changes[i] = {}
594 self.pkg.changes[i][j] = 1
596 # Fix the Maintainer: field to be RFC822/2047 compatible
598 (self.pkg.changes["maintainer822"],
599 self.pkg.changes["maintainer2047"],
600 self.pkg.changes["maintainername"],
601 self.pkg.changes["maintaineremail"]) = \
602 fix_maintainer (self.pkg.changes["maintainer"])
603 except ParseMaintError, msg:
604 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
605 % (filename, self.pkg.changes["maintainer"], msg))
607 # ...likewise for the Changed-By: field if it exists.
609 (self.pkg.changes["changedby822"],
610 self.pkg.changes["changedby2047"],
611 self.pkg.changes["changedbyname"],
612 self.pkg.changes["changedbyemail"]) = \
613 fix_maintainer (self.pkg.changes.get("changed-by", ""))
614 except ParseMaintError, msg:
615 self.pkg.changes["changedby822"] = ""
616 self.pkg.changes["changedby2047"] = ""
617 self.pkg.changes["changedbyname"] = ""
618 self.pkg.changes["changedbyemail"] = ""
620 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
621 % (filename, self.pkg.changes["changed-by"], msg))
623 # Ensure all the values in Closes: are numbers
624 if self.pkg.changes.has_key("closes"):
625 for i in self.pkg.changes["closes"].keys():
626 if re_isanum.match (i) == None:
627 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
629 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
630 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
631 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
633 # Check the .changes is non-empty
634 if not self.pkg.files:
635 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
638 # Changes was syntactically valid even if we'll reject
641 ###########################################################################
643 def check_distributions(self):
644 "Check and map the Distribution field"
648 # Handle suite mappings
649 for m in Cnf.ValueList("SuiteMappings"):
652 if mtype == "map" or mtype == "silent-map":
653 (source, dest) = args[1:3]
654 if self.pkg.changes["distribution"].has_key(source):
655 del self.pkg.changes["distribution"][source]
656 self.pkg.changes["distribution"][dest] = 1
657 if mtype != "silent-map":
658 self.notes.append("Mapping %s to %s." % (source, dest))
659 if self.pkg.changes.has_key("distribution-version"):
660 if self.pkg.changes["distribution-version"].has_key(source):
661 self.pkg.changes["distribution-version"][source]=dest
662 elif mtype == "map-unreleased":
663 (source, dest) = args[1:3]
664 if self.pkg.changes["distribution"].has_key(source):
665 for arch in self.pkg.changes["architecture"].keys():
666 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
667 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
668 del self.pkg.changes["distribution"][source]
669 self.pkg.changes["distribution"][dest] = 1
671 elif mtype == "ignore":
673 if self.pkg.changes["distribution"].has_key(suite):
674 del self.pkg.changes["distribution"][suite]
675 self.warnings.append("Ignoring %s as a target suite." % (suite))
676 elif mtype == "reject":
678 if self.pkg.changes["distribution"].has_key(suite):
679 self.rejects.append("Uploads to %s are not accepted." % (suite))
680 elif mtype == "propup-version":
681 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
683 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
684 if self.pkg.changes["distribution"].has_key(args[1]):
685 self.pkg.changes.setdefault("distribution-version", {})
686 for suite in args[2:]:
687 self.pkg.changes["distribution-version"][suite] = suite
689 # Ensure there is (still) a target distribution
690 if len(self.pkg.changes["distribution"].keys()) < 1:
691 self.rejects.append("No valid distribution remaining.")
693 # Ensure target distributions exist
694 for suite in self.pkg.changes["distribution"].keys():
695 if not Cnf.has_key("Suite::%s" % (suite)):
696 self.rejects.append("Unknown distribution `%s'." % (suite))
698 ###########################################################################
700 def binary_file_checks(self, f, session):
702 entry = self.pkg.files[f]
704 # Extract package control information
705 deb_file = utils.open_file(f)
707 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
709 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
711 # Can't continue, none of the checks on control would work.
714 # Check for mandantory "Description:"
717 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
719 self.rejects.append("%s: Missing Description in binary package" % (f))
724 # Check for mandatory fields
725 for field in [ "Package", "Architecture", "Version" ]:
726 if control.Find(field) == None:
728 self.rejects.append("%s: No %s field in control." % (f, field))
731 # Ensure the package name matches the one give in the .changes
732 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
733 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
735 # Validate the package field
736 package = control.Find("Package")
737 if not re_valid_pkg_name.match(package):
738 self.rejects.append("%s: invalid package name '%s'." % (f, package))
740 # Validate the version field
741 version = control.Find("Version")
742 if not re_valid_version.match(version):
743 self.rejects.append("%s: invalid version number '%s'." % (f, version))
745 # Ensure the architecture of the .deb is one we know about.
746 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
747 architecture = control.Find("Architecture")
748 upload_suite = self.pkg.changes["distribution"].keys()[0]
750 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
751 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
752 self.rejects.append("Unknown architecture '%s'." % (architecture))
754 # Ensure the architecture of the .deb is one of the ones
755 # listed in the .changes.
756 if not self.pkg.changes["architecture"].has_key(architecture):
757 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
759 # Sanity-check the Depends field
760 depends = control.Find("Depends")
762 self.rejects.append("%s: Depends field is empty." % (f))
764 # Sanity-check the Provides field
765 provides = control.Find("Provides")
767 provide = re_spacestrip.sub('', provides)
769 self.rejects.append("%s: Provides field is empty." % (f))
770 prov_list = provide.split(",")
771 for prov in prov_list:
772 if not re_valid_pkg_name.match(prov):
773 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
775 # Check the section & priority match those given in the .changes (non-fatal)
776 if control.Find("Section") and entry["section"] != "" \
777 and entry["section"] != control.Find("Section"):
778 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
779 (f, control.Find("Section", ""), entry["section"]))
780 if control.Find("Priority") and entry["priority"] != "" \
781 and entry["priority"] != control.Find("Priority"):
782 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
783 (f, control.Find("Priority", ""), entry["priority"]))
785 entry["package"] = package
786 entry["architecture"] = architecture
787 entry["version"] = version
788 entry["maintainer"] = control.Find("Maintainer", "")
790 if f.endswith(".udeb"):
791 self.pkg.files[f]["dbtype"] = "udeb"
792 elif f.endswith(".deb"):
793 self.pkg.files[f]["dbtype"] = "deb"
795 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
797 entry["source"] = control.Find("Source", entry["package"])
799 # Get the source version
800 source = entry["source"]
803 if source.find("(") != -1:
804 m = re_extract_src_version.match(source)
806 source_version = m.group(2)
808 if not source_version:
809 source_version = self.pkg.files[f]["version"]
811 entry["source package"] = source
812 entry["source version"] = source_version
814 # Ensure the filename matches the contents of the .deb
815 m = re_isadeb.match(f)
818 file_package = m.group(1)
819 if entry["package"] != file_package:
820 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
821 (f, file_package, entry["dbtype"], entry["package"]))
822 epochless_version = re_no_epoch.sub('', control.Find("Version"))
825 file_version = m.group(2)
826 if epochless_version != file_version:
827 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
828 (f, file_version, entry["dbtype"], epochless_version))
831 file_architecture = m.group(3)
832 if entry["architecture"] != file_architecture:
833 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
834 (f, file_architecture, entry["dbtype"], entry["architecture"]))
836 # Check for existent source
837 source_version = entry["source version"]
838 source_package = entry["source package"]
839 if self.pkg.changes["architecture"].has_key("source"):
840 if source_version != self.pkg.changes["version"]:
841 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
842 (source_version, f, self.pkg.changes["version"]))
844 # Check in the SQL database
845 if not source_exists(source_package, source_version, suites = \
846 self.pkg.changes["distribution"].keys(), session = session):
847 # Check in one of the other directories
848 source_epochless_version = re_no_epoch.sub('', source_version)
849 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
850 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
852 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
855 dsc_file_exists = False
856 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
857 if cnf.has_key("Dir::Queue::%s" % (myq)):
858 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
859 dsc_file_exists = True
862 if not dsc_file_exists:
863 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
865 # Check the version and for file overwrites
866 self.check_binary_against_db(f, session)
868 def source_file_checks(self, f, session):
869 entry = self.pkg.files[f]
871 m = re_issource.match(f)
875 entry["package"] = m.group(1)
876 entry["version"] = m.group(2)
877 entry["type"] = m.group(3)
879 # Ensure the source package name matches the Source filed in the .changes
880 if self.pkg.changes["source"] != entry["package"]:
881 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
883 # Ensure the source version matches the version in the .changes file
884 if re_is_orig_source.match(f):
885 changes_version = self.pkg.changes["chopversion2"]
887 changes_version = self.pkg.changes["chopversion"]
889 if changes_version != entry["version"]:
890 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
892 # Ensure the .changes lists source in the Architecture field
893 if not self.pkg.changes["architecture"].has_key("source"):
894 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
896 # Check the signature of a .dsc file
897 if entry["type"] == "dsc":
898 # check_signature returns either:
899 # (None, [list, of, rejects]) or (signature, [])
900 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
902 self.rejects.append(j)
904 entry["architecture"] = "source"
906 def per_suite_file_checks(self, f, suite, session):
908 entry = self.pkg.files[f]
911 if entry.has_key("byhand"):
914 # Check we have fields we need to do these checks
916 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
917 if not entry.has_key(m):
918 self.rejects.append("file '%s' does not have field %s set" % (f, m))
924 # Handle component mappings
925 for m in cnf.ValueList("ComponentMappings"):
926 (source, dest) = m.split()
927 if entry["component"] == source:
928 entry["original component"] = source
929 entry["component"] = dest
931 # Ensure the component is valid for the target suite
932 if cnf.has_key("Suite:%s::Components" % (suite)) and \
933 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
934 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
937 # Validate the component
938 if not get_component(entry["component"], session):
939 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
942 # See if the package is NEW
943 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
946 # Validate the priority
947 if entry["priority"].find('/') != -1:
948 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
950 # Determine the location
951 location = cnf["Dir::Pool"]
952 l = get_location(location, entry["component"], session=session)
954 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
955 entry["location id"] = -1
957 entry["location id"] = l.location_id
959 # Check the md5sum & size against existing files (if any)
960 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
962 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
963 entry["size"], entry["md5sum"], entry["location id"])
966 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
967 elif found is False and poolfile is not None:
968 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
971 entry["files id"] = None
973 entry["files id"] = poolfile.file_id
975 # Check for packages that have moved from one component to another
976 entry['suite'] = suite
977 arch_list = [entry["architecture"], 'all']
978 component = get_component_by_package_suite(self.pkg.files[f]['package'], \
979 [suite], arch_list = arch_list, session = session)
980 if component is not None:
981 entry["othercomponents"] = component
983 def check_files(self, action=True):
984 file_keys = self.pkg.files.keys()
990 os.chdir(self.pkg.directory)
992 ret = holding.copy_to_holding(f)
994 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
998 # check we already know the changes file
999 # [NB: this check must be done post-suite mapping]
1000 base_filename = os.path.basename(self.pkg.changes_file)
1002 session = DBConn().session()
1005 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1006 # if in the pool or in a queue other than unchecked, reject
1007 if (dbc.in_queue is None) \
1008 or (dbc.in_queue is not None
1009 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1010 self.rejects.append("%s file already known to dak" % base_filename)
1011 except NoResultFound, e:
1015 has_binaries = False
1018 for f, entry in self.pkg.files.items():
1019 # Ensure the file does not already exist in one of the accepted directories
1020 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1021 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1022 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1023 self.rejects.append("%s file already exists in the %s directory." % (f, d))
1025 if not re_taint_free.match(f):
1026 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1028 # Check the file is readable
1029 if os.access(f, os.R_OK) == 0:
1030 # When running in -n, copy_to_holding() won't have
1031 # generated the reject_message, so we need to.
1033 if os.path.exists(f):
1034 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1036 # Don't directly reject, mark to check later to deal with orig's
1037 # we can find in the pool
1038 self.later_check_files.append(f)
1039 entry["type"] = "unreadable"
1042 # If it's byhand skip remaining checks
1043 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1045 entry["type"] = "byhand"
1047 # Checks for a binary package...
1048 elif re_isadeb.match(f):
1050 entry["type"] = "deb"
1052 # This routine appends to self.rejects/warnings as appropriate
1053 self.binary_file_checks(f, session)
1055 # Checks for a source package...
1056 elif re_issource.match(f):
1059 # This routine appends to self.rejects/warnings as appropriate
1060 self.source_file_checks(f, session)
1062 # Not a binary or source package? Assume byhand...
1065 entry["type"] = "byhand"
1067 # Per-suite file checks
1068 entry["oldfiles"] = {}
1069 for suite in self.pkg.changes["distribution"].keys():
1070 self.per_suite_file_checks(f, suite, session)
1074 # If the .changes file says it has source, it must have source.
1075 if self.pkg.changes["architecture"].has_key("source"):
1077 self.rejects.append("no source found and Architecture line in changes mention source.")
1079 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1080 self.rejects.append("source only uploads are not supported.")
1082 ###########################################################################
1083 def check_dsc(self, action=True, session=None):
1084 """Returns bool indicating whether or not the source changes are valid"""
1085 # Ensure there is source to check
1086 if not self.pkg.changes["architecture"].has_key("source"):
1091 for f, entry in self.pkg.files.items():
1092 if entry["type"] == "dsc":
1094 self.rejects.append("can not process a .changes file with multiple .dsc's.")
1099 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1100 if not dsc_filename:
1101 self.rejects.append("source uploads must contain a dsc file")
1104 # Parse the .dsc file
1106 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1107 except CantOpenError:
1108 # if not -n copy_to_holding() will have done this for us...
1110 self.rejects.append("%s: can't read file." % (dsc_filename))
1111 except ParseChangesError, line:
1112 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1113 except InvalidDscError, line:
1114 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1115 except ChangesUnicodeError:
1116 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1118 # Build up the file list of files mentioned by the .dsc
1120 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1121 except NoFilesFieldError:
1122 self.rejects.append("%s: no Files: field." % (dsc_filename))
1124 except UnknownFormatError, format:
1125 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1127 except ParseChangesError, line:
1128 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1131 # Enforce mandatory fields
1132 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1133 if not self.pkg.dsc.has_key(i):
1134 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1137 # Validate the source and version fields
1138 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1139 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1140 if not re_valid_version.match(self.pkg.dsc["version"]):
1141 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1143 # Only a limited list of source formats are allowed in each suite
1144 for dist in self.pkg.changes["distribution"].keys():
1145 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1146 if self.pkg.dsc["format"] not in allowed:
1147 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1149 # Validate the Maintainer field
1151 # We ignore the return value
1152 fix_maintainer(self.pkg.dsc["maintainer"])
1153 except ParseMaintError, msg:
1154 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1155 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1157 # Validate the build-depends field(s)
1158 for field_name in [ "build-depends", "build-depends-indep" ]:
1159 field = self.pkg.dsc.get(field_name)
1161 # Have apt try to parse them...
1163 apt_pkg.ParseSrcDepends(field)
1165 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1167 # Ensure the version number in the .dsc matches the version number in the .changes
1168 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1169 changes_version = self.pkg.files[dsc_filename]["version"]
1171 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1172 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1174 # Ensure the Files field contain only what's expected
1175 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1177 # Ensure source is newer than existing source in target suites
1178 session = DBConn().session()
1179 self.check_source_against_db(dsc_filename, session)
1180 self.check_dsc_against_db(dsc_filename, session)
1182 dbchg = get_dbchange(self.pkg.changes_file, session)
1184 # Finally, check if we're missing any files
1185 for f in self.later_check_files:
1187 # Check if we've already processed this file if we have a dbchg object
1190 for pf in dbchg.files:
1191 if pf.filename == f and pf.processed:
1192 self.notes.append('%s was already processed so we can go ahead' % f)
1194 del self.pkg.files[f]
1196 self.rejects.append("Could not find file %s references in changes" % f)
1202 ###########################################################################
1204 def get_changelog_versions(self, source_dir):
1205 """Extracts a the source package and (optionally) grabs the
1206 version history out of debian/changelog for the BTS."""
1210 # Find the .dsc (again)
1212 for f in self.pkg.files.keys():
1213 if self.pkg.files[f]["type"] == "dsc":
1216 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1217 if not dsc_filename:
1220 # Create a symlink mirror of the source files in our temporary directory
1221 for f in self.pkg.files.keys():
1222 m = re_issource.match(f)
1224 src = os.path.join(source_dir, f)
1225 # If a file is missing for whatever reason, give up.
1226 if not os.path.exists(src):
1229 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1230 self.pkg.orig_files[f].has_key("path"):
1232 dest = os.path.join(os.getcwd(), f)
1233 os.symlink(src, dest)
1235 # If the orig files are not a part of the upload, create symlinks to the
1237 for orig_file in self.pkg.orig_files.keys():
1238 if not self.pkg.orig_files[orig_file].has_key("path"):
1240 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1241 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1243 # Extract the source
1244 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1245 (result, output) = commands.getstatusoutput(cmd)
1247 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1248 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1251 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1254 # Get the upstream version
1255 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1256 if re_strip_revision.search(upstr_version):
1257 upstr_version = re_strip_revision.sub('', upstr_version)
1259 # Ensure the changelog file exists
1260 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1261 if not os.path.exists(changelog_filename):
1262 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1265 # Parse the changelog
1266 self.pkg.dsc["bts changelog"] = ""
1267 changelog_file = utils.open_file(changelog_filename)
1268 for line in changelog_file.readlines():
1269 m = re_changelog_versions.match(line)
1271 self.pkg.dsc["bts changelog"] += line
1272 changelog_file.close()
1274 # Check we found at least one revision in the changelog
1275 if not self.pkg.dsc["bts changelog"]:
1276 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1278 def check_source(self):
1280 # a) there's no source
1281 if not self.pkg.changes["architecture"].has_key("source"):
1284 tmpdir = utils.temp_dirname()
1286 # Move into the temporary directory
1290 # Get the changelog version history
1291 self.get_changelog_versions(cwd)
1293 # Move back and cleanup the temporary tree
1297 shutil.rmtree(tmpdir)
1299 if e.errno != errno.EACCES:
1301 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1303 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1304 # We probably have u-r or u-w directories so chmod everything
1306 cmd = "chmod -R u+rwx %s" % (tmpdir)
1307 result = os.system(cmd)
1309 utils.fubar("'%s' failed with result %s." % (cmd, result))
1310 shutil.rmtree(tmpdir)
1311 except Exception, e:
1312 print "foobar2 (%s)" % e
1313 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1315 ###########################################################################
1316 def ensure_hashes(self):
1317 # Make sure we recognise the format of the Files: field in the .changes
1318 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1319 if len(format) == 2:
1320 format = int(format[0]), int(format[1])
1322 format = int(float(format[0])), 0
1324 # We need to deal with the original changes blob, as the fields we need
1325 # might not be in the changes dict serialised into the .dak anymore.
1326 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1328 # Copy the checksums over to the current changes dict. This will keep
1329 # the existing modifications to it intact.
1330 for field in orig_changes:
1331 if field.startswith('checksums-'):
1332 self.pkg.changes[field] = orig_changes[field]
1334 # Check for unsupported hashes
1335 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1336 self.rejects.append(j)
1338 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1339 self.rejects.append(j)
1341 # We have to calculate the hash if we have an earlier changes version than
1342 # the hash appears in rather than require it exist in the changes file
1343 for hashname, hashfunc, version in utils.known_hashes:
1344 # TODO: Move _ensure_changes_hash into this class
1345 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1346 self.rejects.append(j)
1347 if "source" in self.pkg.changes["architecture"]:
1348 # TODO: Move _ensure_dsc_hash into this class
1349 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1350 self.rejects.append(j)
1352 def check_hashes(self):
1353 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1354 self.rejects.append(m)
1356 for m in utils.check_size(".changes", self.pkg.files):
1357 self.rejects.append(m)
1359 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1360 self.rejects.append(m)
1362 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1363 self.rejects.append(m)
1365 self.ensure_hashes()
1367 ###########################################################################
1369 def ensure_orig(self, target_dir='.', session=None):
1371 Ensures that all orig files mentioned in the changes file are present
1372 in target_dir. If they do not exist, they are symlinked into place.
1374 An list containing the symlinks that were created are returned (so they
1381 for filename, entry in self.pkg.dsc_files.iteritems():
1382 if not re_is_orig_source.match(filename):
1383 # File is not an orig; ignore
1386 if os.path.exists(filename):
1387 # File exists, no need to continue
1390 def symlink_if_valid(path):
1391 f = utils.open_file(path)
1392 md5sum = apt_pkg.md5sum(f)
1395 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1396 expected = (int(entry['size']), entry['md5sum'])
1398 if fingerprint != expected:
1401 dest = os.path.join(target_dir, filename)
1403 os.symlink(path, dest)
1404 symlinked.append(dest)
1410 session_ = DBConn().session()
1415 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1416 poolfile_path = os.path.join(
1417 poolfile.location.path, poolfile.filename
1420 if symlink_if_valid(poolfile_path):
1430 # Look in some other queues for the file
1431 queues = ('New', 'Byhand', 'ProposedUpdates',
1432 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1434 for queue in queues:
1435 if not cnf.get('Dir::Queue::%s' % queue):
1438 queuefile_path = os.path.join(
1439 cnf['Dir::Queue::%s' % queue], filename
1442 if not os.path.exists(queuefile_path):
1443 # Does not exist in this queue
1446 if symlink_if_valid(queuefile_path):
1451 ###########################################################################
1453 def check_lintian(self):
1455 Extends self.rejects by checking the output of lintian against tags
1456 specified in Dinstall::LintianTags.
1461 # Don't reject binary uploads
1462 if not self.pkg.changes['architecture'].has_key('source'):
1465 # Only check some distributions
1466 for dist in ('unstable', 'experimental'):
1467 if dist in self.pkg.changes['distribution']:
1472 # If we do not have a tagfile, don't do anything
1473 tagfile = cnf.get("Dinstall::LintianTags")
1477 # Parse the yaml file
1478 sourcefile = file(tagfile, 'r')
1479 sourcecontent = sourcefile.read()
1483 lintiantags = yaml.load(sourcecontent)['lintian']
1484 except yaml.YAMLError, msg:
1485 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1488 # Try and find all orig mentioned in the .dsc
1489 symlinked = self.ensure_orig()
1491 # Setup the input file for lintian
1492 fd, temp_filename = utils.temp_filename()
1493 temptagfile = os.fdopen(fd, 'w')
1494 for tags in lintiantags.values():
1495 temptagfile.writelines(['%s\n' % x for x in tags])
1499 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1500 (temp_filename, self.pkg.changes_file)
1502 result, output = commands.getstatusoutput(cmd)
1504 # Remove our tempfile and any symlinks we created
1505 os.unlink(temp_filename)
1507 for symlink in symlinked:
1511 utils.warn("lintian failed for %s [return code: %s]." % \
1512 (self.pkg.changes_file, result))
1513 utils.warn(utils.prefix_multi_line_string(output, \
1514 " [possible output:] "))
1519 [self.pkg.changes_file, "check_lintian"] + list(txt)
1523 parsed_tags = parse_lintian_output(output)
1524 self.rejects.extend(
1525 generate_reject_messages(parsed_tags, lintiantags, log=log)
1528 ###########################################################################
1529 def check_urgency(self):
1531 if self.pkg.changes["architecture"].has_key("source"):
1532 if not self.pkg.changes.has_key("urgency"):
1533 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1534 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1535 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1536 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1537 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1538 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1540 ###########################################################################
1542 # Sanity check the time stamps of files inside debs.
1543 # [Files in the near future cause ugly warnings and extreme time
1544 # travel can cause errors on extraction]
1546 def check_timestamps(self):
1549 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1550 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1551 tar = TarTime(future_cutoff, past_cutoff)
1553 for filename, entry in self.pkg.files.items():
1554 if entry["type"] == "deb":
1557 deb_file = utils.open_file(filename)
1558 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1561 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1562 except SystemError, e:
1563 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1564 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1567 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1571 future_files = tar.future_files.keys()
1573 num_future_files = len(future_files)
1574 future_file = future_files[0]
1575 future_date = tar.future_files[future_file]
1576 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1577 % (filename, num_future_files, future_file, time.ctime(future_date)))
1579 ancient_files = tar.ancient_files.keys()
1581 num_ancient_files = len(ancient_files)
1582 ancient_file = ancient_files[0]
1583 ancient_date = tar.ancient_files[ancient_file]
1584 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1585 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1587 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1589 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1590 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1592 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1598 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1599 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1600 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1601 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1602 self.pkg.changes["sponsoremail"] = uid_email
1607 ###########################################################################
1608 # check_signed_by_key checks
1609 ###########################################################################
1611 def check_signed_by_key(self):
1612 """Ensure the .changes is signed by an authorized uploader."""
1613 session = DBConn().session()
1615 # First of all we check that the person has proper upload permissions
1616 # and that this upload isn't blocked
1617 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1620 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1623 # TODO: Check that import-keyring adds UIDs properly
1625 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1628 # Check that the fingerprint which uploaded has permission to do so
1629 self.check_upload_permissions(fpr, session)
1631 # Check that this package is not in a transition
1632 self.check_transition(session)
1637 def check_upload_permissions(self, fpr, session):
1638 # Check any one-off upload blocks
1639 self.check_upload_blocks(fpr, session)
1641 # Start with DM as a special case
1642 # DM is a special case unfortunately, so we check it first
1643 # (keys with no source access get more access than DMs in one
1644 # way; DMs can only upload for their packages whether source
1645 # or binary, whereas keys with no access might be able to
1646 # upload some binaries)
1647 if fpr.source_acl.access_level == 'dm':
1648 self.check_dm_upload(fpr, session)
1650 # Check source-based permissions for other types
1651 if self.pkg.changes["architecture"].has_key("source") and \
1652 fpr.source_acl.access_level is None:
1653 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1654 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1655 self.rejects.append(rej)
1657 # If not a DM, we allow full upload rights
1658 uid_email = "%s@debian.org" % (fpr.uid.uid)
1659 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1662 # Check binary upload permissions
1663 # By this point we know that DMs can't have got here unless they
1664 # are allowed to deal with the package concerned so just apply
1666 if fpr.binary_acl.access_level == 'full':
1669 # Otherwise we're in the map case
1670 tmparches = self.pkg.changes["architecture"].copy()
1671 tmparches.pop('source', None)
1673 for bam in fpr.binary_acl_map:
1674 tmparches.pop(bam.architecture.arch_string, None)
1676 if len(tmparches.keys()) > 0:
1677 if fpr.binary_reject:
1678 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1679 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1680 self.rejects.append(rej)
1682 # TODO: This is where we'll implement reject vs throw away binaries later
1683 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1684 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1685 rej += "\nFingerprint: %s", (fpr.fingerprint)
1686 self.rejects.append(rej)
1689 def check_upload_blocks(self, fpr, session):
1690 """Check whether any upload blocks apply to this source, source
1691 version, uid / fpr combination"""
1693 def block_rej_template(fb):
1694 rej = 'Manual upload block in place for package %s' % fb.source
1695 if fb.version is not None:
1696 rej += ', version %s' % fb.version
1699 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1700 # version is None if the block applies to all versions
1701 if fb.version is None or fb.version == self.pkg.changes['version']:
1702 # Check both fpr and uid - either is enough to cause a reject
1703 if fb.fpr is not None:
1704 if fb.fpr.fingerprint == fpr.fingerprint:
1705 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1706 if fb.uid is not None:
1707 if fb.uid == fpr.uid:
1708 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1711 def check_dm_upload(self, fpr, session):
1712 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1713 ## none of the uploaded packages are NEW
1715 for f in self.pkg.files.keys():
1716 if self.pkg.files[f].has_key("byhand"):
1717 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1719 if self.pkg.files[f].has_key("new"):
1720 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1726 r = get_newest_source(self.pkg.changes["source"], session)
1729 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1730 self.rejects.append(rej)
1733 if not r.dm_upload_allowed:
1734 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1735 self.rejects.append(rej)
1738 ## the Maintainer: field of the uploaded .changes file corresponds with
1739 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1741 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1742 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1744 ## the most recent version of the package uploaded to unstable or
1745 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1746 ## non-developer maintainers cannot NMU or hijack packages)
1748 # srcuploaders includes the maintainer
1750 for sup in r.srcuploaders:
1751 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1752 # Eww - I hope we never have two people with the same name in Debian
1753 if email == fpr.uid.uid or name == fpr.uid.name:
1758 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1761 ## none of the packages are being taken over from other source packages
1762 for b in self.pkg.changes["binary"].keys():
1763 for suite in self.pkg.changes["distribution"].keys():
1764 for s in get_source_by_package_and_suite(b, suite, session):
1765 if s.source != self.pkg.changes["source"]:
1766 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1770 def check_transition(self, session):
1773 sourcepkg = self.pkg.changes["source"]
1775 # No sourceful upload -> no need to do anything else, direct return
1776 # We also work with unstable uploads, not experimental or those going to some
1777 # proposed-updates queue
1778 if "source" not in self.pkg.changes["architecture"] or \
1779 "unstable" not in self.pkg.changes["distribution"]:
1782 # Also only check if there is a file defined (and existant) with
1784 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1785 if transpath == "" or not os.path.exists(transpath):
1788 # Parse the yaml file
1789 sourcefile = file(transpath, 'r')
1790 sourcecontent = sourcefile.read()
1792 transitions = yaml.load(sourcecontent)
1793 except yaml.YAMLError, msg:
1794 # This shouldn't happen, there is a wrapper to edit the file which
1795 # checks it, but we prefer to be safe than ending up rejecting
1797 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1800 # Now look through all defined transitions
1801 for trans in transitions:
1802 t = transitions[trans]
1803 source = t["source"]
1806 # Will be None if nothing is in testing.
1807 current = get_source_in_suite(source, "testing", session)
1808 if current is not None:
1809 compare = apt_pkg.VersionCompare(current.version, expected)
1811 if current is None or compare < 0:
1812 # This is still valid, the current version in testing is older than
1813 # the new version we wait for, or there is none in testing yet
1815 # Check if the source we look at is affected by this.
1816 if sourcepkg in t['packages']:
1817 # The source is affected, lets reject it.
1819 rejectmsg = "%s: part of the %s transition.\n\n" % (
1822 if current is not None:
1823 currentlymsg = "at version %s" % (current.version)
1825 currentlymsg = "not present in testing"
1827 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1829 rejectmsg += "\n".join(textwrap.wrap("""Your package
1830 is part of a testing transition designed to get %s migrated (it is
1831 currently %s, we need version %s). This transition is managed by the
1832 Release Team, and %s is the Release-Team member responsible for it.
1833 Please mail debian-release@lists.debian.org or contact %s directly if you
1834 need further assistance. You might want to upload to experimental until this
1835 transition is done."""
1836 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1838 self.rejects.append(rejectmsg)
1841 ###########################################################################
1842 # End check_signed_by_key checks
1843 ###########################################################################
1845 def build_summaries(self):
1846 """ Build a summary of changes the upload introduces. """
1848 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1850 short_summary = summary
1852 # This is for direport's benefit...
1853 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1856 summary += "Changes: " + f
1858 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1860 summary += self.announce(short_summary, 0)
1862 return (summary, short_summary)
1864 ###########################################################################
1866 def close_bugs(self, summary, action):
1868 Send mail to close bugs as instructed by the closes field in the changes file.
1869 Also add a line to summary if any work was done.
1871 @type summary: string
1872 @param summary: summary text, as given by L{build_summaries}
1875 @param action: Set to false no real action will be done.
1878 @return: summary. If action was taken, extended by the list of closed bugs.
1882 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1884 bugs = self.pkg.changes["closes"].keys()
1890 summary += "Closing bugs: "
1892 summary += "%s " % (bug)
1895 self.Subst["__BUG_NUMBER__"] = bug
1896 if self.pkg.changes["distribution"].has_key("stable"):
1897 self.Subst["__STABLE_WARNING__"] = """
1898 Note that this package is not part of the released stable Debian
1899 distribution. It may have dependencies on other unreleased software,
1900 or other instabilities. Please take care if you wish to install it.
1901 The update will eventually make its way into the next released Debian
1904 self.Subst["__STABLE_WARNING__"] = ""
1905 mail_message = utils.TemplateSubst(self.Subst, template)
1906 utils.send_mail(mail_message)
1908 # Clear up after ourselves
1909 del self.Subst["__BUG_NUMBER__"]
1910 del self.Subst["__STABLE_WARNING__"]
1912 if action and self.logger:
1913 self.logger.log(["closing bugs"] + bugs)
1919 ###########################################################################
1921 def announce(self, short_summary, action):
1923 Send an announce mail about a new upload.
1925 @type short_summary: string
1926 @param short_summary: Short summary text to include in the mail
1929 @param action: Set to false no real action will be done.
1932 @return: Textstring about action taken.
1937 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1939 # Only do announcements for source uploads with a recent dpkg-dev installed
1940 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1941 self.pkg.changes["architecture"].has_key("source"):
1947 self.Subst["__SHORT_SUMMARY__"] = short_summary
1949 for dist in self.pkg.changes["distribution"].keys():
1950 suite = get_suite(dist)
1951 if suite is None: continue
1952 announce_list = suite.announce
1953 if announce_list == "" or lists_done.has_key(announce_list):
1956 lists_done[announce_list] = 1
1957 summary += "Announcing to %s\n" % (announce_list)
1961 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1962 if cnf.get("Dinstall::TrackingServer") and \
1963 self.pkg.changes["architecture"].has_key("source"):
1964 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1965 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1967 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1968 utils.send_mail(mail_message)
1970 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1972 if cnf.FindB("Dinstall::CloseBugs"):
1973 summary = self.close_bugs(summary, action)
1975 del self.Subst["__SHORT_SUMMARY__"]
1979 ###########################################################################
1981 def accept (self, summary, short_summary, session=None):
1985 This moves all files referenced from the .changes into the pool,
1986 sends the accepted mail, announces to lists, closes bugs and
1987 also checks for override disparities. If enabled it will write out
1988 the version history for the BTS Version Tracking and will finally call
1991 @type summary: string
1992 @param summary: Summary text
1994 @type short_summary: string
1995 @param short_summary: Short summary
1999 stats = SummaryStats()
2002 self.logger.log(["installing changes", self.pkg.changes_file])
2006 # Add the .dsc file to the DB first
2007 for newfile, entry in self.pkg.files.items():
2008 if entry["type"] == "dsc":
2009 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2013 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2014 for newfile, entry in self.pkg.files.items():
2015 if entry["type"] == "deb":
2016 poolfiles.append(add_deb_to_db(self, newfile, session))
2018 # If this is a sourceful diff only upload that is moving
2019 # cross-component we need to copy the .orig files into the new
2020 # component too for the same reasons as above.
2021 # XXX: mhy: I think this should be in add_dsc_to_db
2022 if self.pkg.changes["architecture"].has_key("source"):
2023 for orig_file in self.pkg.orig_files.keys():
2024 if not self.pkg.orig_files[orig_file].has_key("id"):
2025 continue # Skip if it's not in the pool
2026 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2027 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2028 continue # Skip if the location didn't change
2031 oldf = get_poolfile_by_id(orig_file_id, session)
2032 old_filename = os.path.join(oldf.location.path, oldf.filename)
2033 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
2034 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2036 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2038 # TODO: Care about size/md5sum collisions etc
2039 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2041 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2043 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2044 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2048 # Don't reference the old file from this changes
2050 if p.file_id == oldf.file_id:
2053 poolfiles.append(newf)
2055 # Fix up the DSC references
2058 for df in source.srcfiles:
2059 if df.poolfile.file_id == oldf.file_id:
2060 # Add a new DSC entry and mark the old one for deletion
2061 # Don't do it in the loop so we don't change the thing we're iterating over
2063 newdscf.source_id = source.source_id
2064 newdscf.poolfile_id = newf.file_id
2065 session.add(newdscf)
2075 # Make sure that our source object is up-to-date
2076 session.expire(source)
2078 # Add changelog information to the database
2079 self.store_changelog()
2081 # Install the files into the pool
2082 for newfile, entry in self.pkg.files.items():
2083 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2084 utils.move(newfile, destination)
2085 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2086 stats.accept_bytes += float(entry["size"])
2088 # Copy the .changes file across for suite which need it.
2089 copy_changes = dict([(x.copychanges, '')
2090 for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2091 if x.copychanges is not None])
2093 for dest in copy_changes.keys():
2094 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2096 # We're done - commit the database changes
2098 # Our SQL session will automatically start a new transaction after
2101 # Move the .changes into the 'done' directory
2102 utils.move(self.pkg.changes_file,
2103 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2105 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2106 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2109 self.Subst["__SUMMARY__"] = summary
2110 mail_message = utils.TemplateSubst(self.Subst,
2111 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2112 utils.send_mail(mail_message)
2113 self.announce(short_summary, 1)
2115 ## Helper stuff for DebBugs Version Tracking
2116 if cnf.Find("Dir::Queue::BTSVersionTrack"):
2117 if self.pkg.changes["architecture"].has_key("source"):
2118 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2119 version_history = os.fdopen(fd, 'w')
2120 version_history.write(self.pkg.dsc["bts changelog"])
2121 version_history.close()
2122 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2123 self.pkg.changes_file[:-8]+".versions")
2124 os.rename(temp_filename, filename)
2125 os.chmod(filename, 0644)
2127 # Write out the binary -> source mapping.
2128 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2129 debinfo = os.fdopen(fd, 'w')
2130 for name, entry in sorted(self.pkg.files.items()):
2131 if entry["type"] == "deb":
2132 line = " ".join([entry["package"], entry["version"],
2133 entry["architecture"], entry["source package"],
2134 entry["source version"]])
2135 debinfo.write(line+"\n")
2137 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2138 self.pkg.changes_file[:-8]+".debinfo")
2139 os.rename(temp_filename, filename)
2140 os.chmod(filename, 0644)
2144 # Set up our copy queues (e.g. buildd queues)
2145 for suite_name in self.pkg.changes["distribution"].keys():
2146 suite = get_suite(suite_name, session)
2147 for q in suite.copy_queues:
2149 q.add_file_from_pool(f)
2154 stats.accept_count += 1
2156 def check_override(self):
2158 Checks override entries for validity. Mails "Override disparity" warnings,
2159 if that feature is enabled.
2161 Abandons the check if
2162 - override disparity checks are disabled
2163 - mail sending is disabled
2168 # Abandon the check if override disparity checks have been disabled
2169 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2172 summary = self.pkg.check_override()
2177 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2180 self.Subst["__SUMMARY__"] = summary
2181 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2182 utils.send_mail(mail_message)
2183 del self.Subst["__SUMMARY__"]
2185 ###########################################################################
2187 def remove(self, from_dir=None):
2189 Used (for instance) in p-u to remove the package from unchecked
2191 Also removes the package from holding area.
2193 if from_dir is None:
2194 from_dir = self.pkg.directory
2197 for f in self.pkg.files.keys():
2198 os.unlink(os.path.join(from_dir, f))
2199 if os.path.exists(os.path.join(h.holding_dir, f)):
2200 os.unlink(os.path.join(h.holding_dir, f))
2202 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2203 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2204 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2206 ###########################################################################
2208 def move_to_queue (self, queue):
2210 Move files to a destination queue using the permissions in the table
2213 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2214 queue.path, perms=int(queue.change_perms, 8))
2215 for f in self.pkg.files.keys():
2216 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2218 ###########################################################################
2220 def force_reject(self, reject_files):
2222 Forcefully move files from the current directory to the
2223 reject directory. If any file already exists in the reject
2224 directory it will be moved to the morgue to make way for
2227 @type reject_files: dict
2228 @param reject_files: file dictionary
2234 for file_entry in reject_files:
2235 # Skip any files which don't exist or which we don't have permission to copy.
2236 if os.access(file_entry, os.R_OK) == 0:
2239 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2242 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2244 # File exists? Let's find a new name by adding a number
2245 if e.errno == errno.EEXIST:
2247 dest_file = utils.find_next_free(dest_file, 255)
2248 except NoFreeFilenameError:
2249 # Something's either gone badly Pete Tong, or
2250 # someone is trying to exploit us.
2251 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2254 # Make sure we really got it
2256 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2259 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2263 # If we got here, we own the destination file, so we can
2264 # safely overwrite it.
2265 utils.move(file_entry, dest_file, 1, perms=0660)
2268 ###########################################################################
2269 def do_reject (self, manual=0, reject_message="", notes=""):
2271 Reject an upload. If called without a reject message or C{manual} is
2272 true, spawn an editor so the user can write one.
2275 @param manual: manual or automated rejection
2277 @type reject_message: string
2278 @param reject_message: A reject message
2283 # If we weren't given a manual rejection message, spawn an
2284 # editor so the user can add one in...
2285 if manual and not reject_message:
2286 (fd, temp_filename) = utils.temp_filename()
2287 temp_file = os.fdopen(fd, 'w')
2290 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2291 % (note.author, note.version, note.notedate, note.comment))
2293 editor = os.environ.get("EDITOR","vi")
2295 while answer == 'E':
2296 os.system("%s %s" % (editor, temp_filename))
2297 temp_fh = utils.open_file(temp_filename)
2298 reject_message = "".join(temp_fh.readlines())
2300 print "Reject message:"
2301 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2302 prompt = "[R]eject, Edit, Abandon, Quit ?"
2304 while prompt.find(answer) == -1:
2305 answer = utils.our_raw_input(prompt)
2306 m = re_default_answer.search(prompt)
2309 answer = answer[:1].upper()
2310 os.unlink(temp_filename)
2316 print "Rejecting.\n"
2320 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2321 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2323 # Move all the files into the reject directory
2324 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2325 self.force_reject(reject_files)
2327 # If we fail here someone is probably trying to exploit the race
2328 # so let's just raise an exception ...
2329 if os.path.exists(reason_filename):
2330 os.unlink(reason_filename)
2331 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2333 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2337 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2338 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2339 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2340 os.write(reason_fd, reject_message)
2341 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2343 # Build up the rejection email
2344 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2345 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2346 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2347 self.Subst["__REJECT_MESSAGE__"] = ""
2348 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2349 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2350 # Write the rejection email out as the <foo>.reason file
2351 os.write(reason_fd, reject_mail_message)
2353 del self.Subst["__REJECTOR_ADDRESS__"]
2354 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2355 del self.Subst["__CC__"]
2359 # Send the rejection mail
2360 utils.send_mail(reject_mail_message)
2363 self.logger.log(["rejected", self.pkg.changes_file])
2367 ################################################################################
2368 def in_override_p(self, package, component, suite, binary_type, filename, session):
2370 Check if a package already has override entries in the DB
2372 @type package: string
2373 @param package: package name
2375 @type component: string
2376 @param component: database id of the component
2379 @param suite: database id of the suite
2381 @type binary_type: string
2382 @param binary_type: type of the package
2384 @type filename: string
2385 @param filename: filename we check
2387 @return: the database result. But noone cares anyway.
2393 if binary_type == "": # must be source
2396 file_type = binary_type
2398 # Override suite name; used for example with proposed-updates
2399 oldsuite = get_suite(suite, session)
2400 if (not oldsuite is None) and oldsuite.overridesuite:
2401 suite = oldsuite.overridesuite
2403 result = get_override(package, suite, component, file_type, session)
2405 # If checking for a source package fall back on the binary override type
2406 if file_type == "dsc" and len(result) < 1:
2407 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2409 # Remember the section and priority so we can check them later if appropriate
2412 self.pkg.files[filename]["override section"] = result.section.section
2413 self.pkg.files[filename]["override priority"] = result.priority.priority
2418 ################################################################################
2419 def get_anyversion(self, sv_list, suite):
2422 @param sv_list: list of (suite, version) tuples to check
2425 @param suite: suite name
2431 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2432 for (s, v) in sv_list:
2433 if s in [ x.lower() for x in anysuite ]:
2434 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2439 ################################################################################
2441 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2444 @param sv_list: list of (suite, version) tuples to check
2446 @type filename: string
2447 @param filename: XXX
2449 @type new_version: string
2450 @param new_version: XXX
2452 Ensure versions are newer than existing packages in target
2453 suites and that cross-suite version checking rules as
2454 set out in the conf file are satisfied.
2459 # Check versions for each target suite
2460 for target_suite in self.pkg.changes["distribution"].keys():
2461 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2462 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2464 # Enforce "must be newer than target suite" even if conffile omits it
2465 if target_suite not in must_be_newer_than:
2466 must_be_newer_than.append(target_suite)
2468 for (suite, existent_version) in sv_list:
2469 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2471 if suite in must_be_newer_than and sourceful and vercmp < 1:
2472 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2474 if suite in must_be_older_than and vercmp > -1:
2477 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2478 # we really use the other suite, ignoring the conflicting one ...
2479 addsuite = self.pkg.changes["distribution-version"][suite]
2481 add_version = self.get_anyversion(sv_list, addsuite)
2482 target_version = self.get_anyversion(sv_list, target_suite)
2485 # not add_version can only happen if we map to a suite
2486 # that doesn't enhance the suite we're propup'ing from.
2487 # so "propup-ver x a b c; map a d" is a problem only if
2488 # d doesn't enhance a.
2490 # i think we could always propagate in this case, rather
2491 # than complaining. either way, this isn't a REJECT issue
2493 # And - we really should complain to the dorks who configured dak
2494 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2495 self.pkg.changes.setdefault("propdistribution", {})
2496 self.pkg.changes["propdistribution"][addsuite] = 1
2498 elif not target_version:
2499 # not targets_version is true when the package is NEW
2500 # we could just stick with the "...old version..." REJECT
2501 # for this, I think.
2502 self.rejects.append("Won't propogate NEW packages.")
2503 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2504 # propogation would be redundant. no need to reject though.
2505 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2507 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2508 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2510 self.warnings.append("Propogating upload to %s" % (addsuite))
2511 self.pkg.changes.setdefault("propdistribution", {})
2512 self.pkg.changes["propdistribution"][addsuite] = 1
2516 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2518 ################################################################################
2519 def check_binary_against_db(self, filename, session):
2520 # Ensure version is sane
2521 self.cross_suite_version_check( \
2522 get_suite_version_by_package(self.pkg.files[filename]["package"], \
2523 self.pkg.files[filename]["architecture"], session),
2524 filename, self.pkg.files[filename]["version"], sourceful=False)
2526 # Check for any existing copies of the file
2527 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2528 q = q.filter_by(version=self.pkg.files[filename]["version"])
2529 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2532 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2534 ################################################################################
2536 def check_source_against_db(self, filename, session):
2537 source = self.pkg.dsc.get("source")
2538 version = self.pkg.dsc.get("version")
2540 # Ensure version is sane
2541 self.cross_suite_version_check( \
2542 get_suite_version_by_source(source, session), filename, version,
2545 ################################################################################
2546 def check_dsc_against_db(self, filename, session):
2549 @warning: NB: this function can remove entries from the 'files' index [if
2550 the orig tarball is a duplicate of the one in the archive]; if
2551 you're iterating over 'files' and call this function as part of
2552 the loop, be sure to add a check to the top of the loop to
2553 ensure you haven't just tried to dereference the deleted entry.
2558 self.pkg.orig_files = {} # XXX: do we need to clear it?
2559 orig_files = self.pkg.orig_files
2561 # Try and find all files mentioned in the .dsc. This has
2562 # to work harder to cope with the multiple possible
2563 # locations of an .orig.tar.gz.
2564 # The ordering on the select is needed to pick the newest orig
2565 # when it exists in multiple places.
2566 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2568 if self.pkg.files.has_key(dsc_name):
2569 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2570 actual_size = int(self.pkg.files[dsc_name]["size"])
2571 found = "%s in incoming" % (dsc_name)
2573 # Check the file does not already exist in the archive
2574 ql = get_poolfile_like_name(dsc_name, session)
2576 # Strip out anything that isn't '%s' or '/%s$'
2578 if not i.filename.endswith(dsc_name):
2581 # "[dak] has not broken them. [dak] has fixed a
2582 # brokenness. Your crappy hack exploited a bug in
2585 # "(Come on! I thought it was always obvious that
2586 # one just doesn't release different files with
2587 # the same name and version.)"
2588 # -- ajk@ on d-devel@l.d.o
2591 # Ignore exact matches for .orig.tar.gz
2593 if re_is_orig_source.match(dsc_name):
2595 if self.pkg.files.has_key(dsc_name) and \
2596 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2597 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2598 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2599 # TODO: Don't delete the entry, just mark it as not needed
2600 # This would fix the stupidity of changing something we often iterate over
2601 # whilst we're doing it
2602 del self.pkg.files[dsc_name]
2603 dsc_entry["files id"] = i.file_id
2604 if not orig_files.has_key(dsc_name):
2605 orig_files[dsc_name] = {}
2606 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2609 # Don't bitch that we couldn't find this file later
2611 self.later_check_files.remove(dsc_name)
2617 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2619 elif re_is_orig_source.match(dsc_name):
2621 ql = get_poolfile_like_name(dsc_name, session)
2623 # Strip out anything that isn't '%s' or '/%s$'
2624 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2626 if not i.filename.endswith(dsc_name):
2630 # Unfortunately, we may get more than one match here if,
2631 # for example, the package was in potato but had an -sa
2632 # upload in woody. So we need to choose the right one.
2634 # default to something sane in case we don't match any or have only one
2639 old_file = os.path.join(i.location.path, i.filename)
2640 old_file_fh = utils.open_file(old_file)
2641 actual_md5 = apt_pkg.md5sum(old_file_fh)
2643 actual_size = os.stat(old_file)[stat.ST_SIZE]
2644 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2647 old_file = os.path.join(i.location.path, i.filename)
2648 old_file_fh = utils.open_file(old_file)
2649 actual_md5 = apt_pkg.md5sum(old_file_fh)
2651 actual_size = os.stat(old_file)[stat.ST_SIZE]
2653 suite_type = x.location.archive_type
2654 # need this for updating dsc_files in install()
2655 dsc_entry["files id"] = x.file_id
2656 # See install() in process-accepted...
2657 if not orig_files.has_key(dsc_name):
2658 orig_files[dsc_name] = {}
2659 orig_files[dsc_name]["id"] = x.file_id
2660 orig_files[dsc_name]["path"] = old_file
2661 orig_files[dsc_name]["location"] = x.location.location_id
2663 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2664 # Not there? Check the queue directories...
2665 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2666 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2668 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2669 if os.path.exists(in_otherdir):
2670 in_otherdir_fh = utils.open_file(in_otherdir)
2671 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2672 in_otherdir_fh.close()
2673 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2675 if not orig_files.has_key(dsc_name):
2676 orig_files[dsc_name] = {}
2677 orig_files[dsc_name]["path"] = in_otherdir
2680 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2683 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2685 if actual_md5 != dsc_entry["md5sum"]:
2686 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2687 if actual_size != int(dsc_entry["size"]):
2688 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2690 ################################################################################
2691 # This is used by process-new and process-holding to recheck a changes file
2692 # at the time we're running. It mainly wraps various other internal functions
2693 # and is similar to accepted_checks - these should probably be tidied up
2695 def recheck(self, session):
2697 for f in self.pkg.files.keys():
2698 # The .orig.tar.gz can disappear out from under us is it's a
2699 # duplicate of one in the archive.
2700 if not self.pkg.files.has_key(f):
2703 entry = self.pkg.files[f]
2705 # Check that the source still exists
2706 if entry["type"] == "deb":
2707 source_version = entry["source version"]
2708 source_package = entry["source package"]
2709 if not self.pkg.changes["architecture"].has_key("source") \
2710 and not source_exists(source_package, source_version, \
2711 suites = self.pkg.changes["distribution"].keys(), session = session):
2712 source_epochless_version = re_no_epoch.sub('', source_version)
2713 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2715 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2716 if cnf.has_key("Dir::Queue::%s" % (q)):
2717 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2720 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2722 # Version and file overwrite checks
2723 if entry["type"] == "deb":
2724 self.check_binary_against_db(f, session)
2725 elif entry["type"] == "dsc":
2726 self.check_source_against_db(f, session)
2727 self.check_dsc_against_db(f, session)
2729 ################################################################################
2730 def accepted_checks(self, overwrite_checks, session):
2731 # Recheck anything that relies on the database; since that's not
2732 # frozen between accept and our run time when called from p-a.
2734 # overwrite_checks is set to False when installing to stable/oldstable
2739 # Find the .dsc (again)
2741 for f in self.pkg.files.keys():
2742 if self.pkg.files[f]["type"] == "dsc":
2745 for checkfile in self.pkg.files.keys():
2746 # The .orig.tar.gz can disappear out from under us is it's a
2747 # duplicate of one in the archive.
2748 if not self.pkg.files.has_key(checkfile):
2751 entry = self.pkg.files[checkfile]
2753 # Check that the source still exists
2754 if entry["type"] == "deb":
2755 source_version = entry["source version"]
2756 source_package = entry["source package"]
2757 if not self.pkg.changes["architecture"].has_key("source") \
2758 and not source_exists(source_package, source_version, \
2759 suites = self.pkg.changes["distribution"].keys(), \
2761 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2763 # Version and file overwrite checks
2764 if overwrite_checks:
2765 if entry["type"] == "deb":
2766 self.check_binary_against_db(checkfile, session)
2767 elif entry["type"] == "dsc":
2768 self.check_source_against_db(checkfile, session)
2769 self.check_dsc_against_db(dsc_filename, session)
2771 # propogate in the case it is in the override tables:
2772 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2773 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2774 propogate[suite] = 1
2776 nopropogate[suite] = 1
2778 for suite in propogate.keys():
2779 if suite in nopropogate:
2781 self.pkg.changes["distribution"][suite] = 1
2783 for checkfile in self.pkg.files.keys():
2784 # Check the package is still in the override tables
2785 for suite in self.pkg.changes["distribution"].keys():
2786 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2787 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2789 ################################################################################
2790 # If any file of an upload has a recent mtime then chances are good
2791 # the file is still being uploaded.
2793 def upload_too_new(self):
2796 # Move back to the original directory to get accurate time stamps
2798 os.chdir(self.pkg.directory)
2799 file_list = self.pkg.files.keys()
2800 file_list.extend(self.pkg.dsc_files.keys())
2801 file_list.append(self.pkg.changes_file)
2804 last_modified = time.time()-os.path.getmtime(f)
2805 if last_modified < int(cnf["Dinstall::SkipTime"]):
2814 def store_changelog(self):
2816 # Skip binary-only upload if it is not a bin-NMU
2817 if not self.pkg.changes['architecture'].has_key('source'):
2818 from daklib.regexes import re_bin_only_nmu
2819 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2822 session = DBConn().session()
2824 # Check if upload already has a changelog entry
2825 query = """SELECT changelog_id FROM changes WHERE source = :source
2826 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2827 if session.execute(query, {'source': self.pkg.changes['source'], \
2828 'version': self.pkg.changes['version'], \
2829 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2833 # Add current changelog text into changelogs_text table, return created ID
2834 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2835 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2837 # Link ID to the upload available in changes table
2838 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2839 AND version = :version AND architecture = :architecture"""
2840 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2841 'version': self.pkg.changes['version'], \
2842 'architecture': " ".join(self.pkg.changes['architecture'].keys())})