4 """ Queue utility functions for dak """
5 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 ###############################################################################
23 import cPickle, errno, os, pg, re, stat, sys, time
24 import apt_inst, apt_pkg
25 import utils, database
26 from dak_exceptions import *
27 from regexes import re_default_answer, re_fdnic, re_bin_only_nmu
31 ###############################################################################
33 # Determine what parts in a .changes are NEW
35 def determine_new(changes, files, projectB, warn=1):
38 # Build up a list of potentially new things
39 for file_entry in files.keys():
41 # Skip byhand elements
42 if f["type"] == "byhand":
45 priority = f["priority"]
46 section = f["section"]
47 file_type = get_type(f)
48 component = f["component"]
50 if file_type == "dsc":
52 if not new.has_key(pkg):
54 new[pkg]["priority"] = priority
55 new[pkg]["section"] = section
56 new[pkg]["type"] = file_type
57 new[pkg]["component"] = component
58 new[pkg]["files"] = []
60 old_type = new[pkg]["type"]
61 if old_type != file_type:
62 # source gets trumped by deb or udeb
64 new[pkg]["priority"] = priority
65 new[pkg]["section"] = section
66 new[pkg]["type"] = file_type
67 new[pkg]["component"] = component
68 new[pkg]["files"].append(file_entry)
69 if f.has_key("othercomponents"):
70 new[pkg]["othercomponents"] = f["othercomponents"]
72 for suite in changes["suite"].keys():
73 suite_id = database.get_suite_id(suite)
74 for pkg in new.keys():
75 component_id = database.get_component_id(new[pkg]["component"])
76 type_id = database.get_override_type_id(new[pkg]["type"])
77 q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
80 for file_entry in new[pkg]["files"]:
81 if files[file_entry].has_key("new"):
82 del files[file_entry]["new"]
86 if changes["suite"].has_key("stable"):
87 print "WARNING: overrides will be added for stable!"
88 if changes["suite"].has_key("oldstable"):
89 print "WARNING: overrides will be added for OLDstable!"
90 for pkg in new.keys():
91 if new[pkg].has_key("othercomponents"):
92 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
96 ################################################################################
100 if f.has_key("dbtype"):
101 file_type = f["dbtype"]
102 elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
105 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
107 # Validate the override type
108 type_id = database.get_override_type_id(file_type)
110 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
114 ################################################################################
116 # check if section/priority values are valid
118 def check_valid(new):
119 for pkg in new.keys():
120 section = new[pkg]["section"]
121 priority = new[pkg]["priority"]
122 file_type = new[pkg]["type"]
123 new[pkg]["section id"] = database.get_section_id(section)
124 new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
126 di = section.find("debian-installer") != -1
127 if (di and file_type not in ("udeb", "dsc")) or (not di and file_type == "udeb"):
128 new[pkg]["section id"] = -1
129 if (priority == "source" and file_type != "dsc") or \
130 (priority != "source" and file_type == "dsc"):
131 new[pkg]["priority id"] = -1
134 ###############################################################################
136 # Convenience wrapper to carry around all the package information in
139 def __init__(self, **kwds):
140 self.__dict__.update(kwds)
142 def update(self, **kwds):
143 self.__dict__.update(kwds)
145 ###############################################################################
149 def __init__(self, Cnf):
151 self.accept_count = 0
152 self.accept_bytes = 0L
153 self.reject_message = ""
154 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
155 legacy_source_untouchable = {})
157 # Initialize the substitution template mapping global
158 Subst = self.Subst = {}
159 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
160 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
161 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
162 Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
164 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
165 database.init(Cnf, self.projectB)
167 ###########################################################################
169 def init_vars (self):
170 self.pkg.changes.clear()
172 self.pkg.files.clear()
173 self.pkg.dsc_files.clear()
174 self.pkg.legacy_source_untouchable.clear()
175 self.pkg.orig_tar_id = None
176 self.pkg.orig_tar_location = ""
177 self.pkg.orig_tar_gz = None
179 ###########################################################################
181 def update_vars (self):
182 dump_filename = self.pkg.changes_file[:-8]+".dak"
183 dump_file = utils.open_file(dump_filename)
184 p = cPickle.Unpickler(dump_file)
186 self.pkg.changes.update(p.load())
187 self.pkg.dsc.update(p.load())
188 self.pkg.files.update(p.load())
189 self.pkg.dsc_files.update(p.load())
190 self.pkg.legacy_source_untouchable.update(p.load())
192 self.pkg.orig_tar_id = p.load()
193 self.pkg.orig_tar_location = p.load()
197 ###########################################################################
199 # This could just dump the dictionaries as is, but I'd like to
200 # avoid this so there's some idea of what process-accepted &
201 # process-new use from process-unchecked
203 def dump_vars(self, dest_dir):
205 changes = self.pkg.changes
207 files = self.pkg.files
208 dsc_files = self.pkg.dsc_files
209 legacy_source_untouchable = self.pkg.legacy_source_untouchable
210 orig_tar_id = self.pkg.orig_tar_id
211 orig_tar_location = self.pkg.orig_tar_location
213 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
214 dump_file = utils.open_file(dump_filename, 'w')
216 os.chmod(dump_filename, 0664)
218 # chmod may fail when the dumpfile is not owned by the user
219 # invoking dak (like e.g. when NEW is processed by a member
221 if errno.errorcode[e.errno] == 'EPERM':
222 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
223 # security precaution, should never happen unless a weird
224 # umask is set anywhere
225 if perms & stat.S_IWOTH:
226 utils.fubar("%s is world writable and chmod failed." % \
228 # ignore the failed chmod otherwise as the file should
229 # already have the right privileges and is just, at worst,
230 # unreadable for world
234 p = cPickle.Pickler(dump_file, 1)
241 for file_entry in files.keys():
242 d_files[file_entry] = {}
243 for i in [ "package", "version", "architecture", "type", "size",
244 "md5sum", "sha1sum", "sha256sum", "component",
245 "location id", "source package", "source version",
246 "maintainer", "dbtype", "files id", "new",
247 "section", "priority", "othercomponents",
248 "pool name", "original component" ]:
249 if files[file_entry].has_key(i):
250 d_files[file_entry][i] = files[file_entry][i]
252 # Mandatory changes fields
253 for i in [ "distribution", "source", "architecture", "version",
254 "maintainer", "urgency", "fingerprint", "changedby822",
255 "changedby2047", "changedbyname", "maintainer822",
256 "maintainer2047", "maintainername", "maintaineremail",
257 "closes", "changes" ]:
258 d_changes[i] = changes[i]
259 # Optional changes fields
260 for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version",
262 if changes.has_key(i):
263 d_changes[i] = changes[i]
265 for i in [ "source", "version", "maintainer", "fingerprint",
266 "uploaders", "bts changelog", "dm-upload-allowed" ]:
270 for file_entry in dsc_files.keys():
271 d_dsc_files[file_entry] = {}
272 # Mandatory dsc_files fields
273 for i in [ "size", "md5sum" ]:
274 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
275 # Optional dsc_files fields
276 for i in [ "files id" ]:
277 if dsc_files[file_entry].has_key(i):
278 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
280 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
281 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
285 ###########################################################################
287 # Set up the per-package template substitution mappings
289 def update_subst (self, reject_message = ""):
291 changes = self.pkg.changes
292 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
293 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
294 changes["architecture"] = { "Unknown" : "" }
295 # and maintainer2047 may not exist.
296 if not changes.has_key("maintainer2047"):
297 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
299 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
300 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
301 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
303 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
304 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
305 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
306 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
307 changes["maintainer2047"])
308 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
310 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
311 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
312 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
314 if "sponsoremail" in changes:
315 Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
317 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
318 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
320 # Apply any global override of the Maintainer field
321 if self.Cnf.get("Dinstall::OverrideMaintainer"):
322 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
323 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
325 Subst["__REJECT_MESSAGE__"] = reject_message
326 Subst["__SOURCE__"] = changes.get("source", "Unknown")
327 Subst["__VERSION__"] = changes.get("version", "Unknown")
329 ###########################################################################
331 def build_summaries(self):
332 changes = self.pkg.changes
333 files = self.pkg.files
335 byhand = summary = new = ""
337 # changes["distribution"] may not exist in corner cases
338 # (e.g. unreadable changes files)
339 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
340 changes["distribution"] = {}
343 file_keys = files.keys()
345 for file_entry in file_keys:
346 if files[file_entry].has_key("byhand"):
348 summary += file_entry + " byhand\n"
349 elif files[file_entry].has_key("new"):
351 summary += "(new) %s %s %s\n" % (file_entry, files[file_entry]["priority"], files[file_entry]["section"])
352 if files[file_entry].has_key("othercomponents"):
353 summary += "WARNING: Already present in %s distribution.\n" % (files[file_entry]["othercomponents"])
354 if files[file_entry]["type"] == "deb":
355 deb_fh = utils.open_file(file_entry)
356 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
359 files[file_entry]["pool name"] = utils.poolify (changes.get("source",""), files[file_entry]["component"])
360 destination = self.Cnf["Dir::PoolRoot"] + files[file_entry]["pool name"] + file_entry
361 summary += file_entry + "\n to " + destination + "\n"
362 if not files[file_entry].has_key("type"):
363 files[file_entry]["type"] = "unknown"
364 if files[file_entry]["type"] in ["deb", "udeb", "dsc"]:
365 # (queue/unchecked), there we have override entries already, use them
366 # (process-new), there we dont have override entries, use the newly generated ones.
367 override_prio = files[file_entry].get("override priority", files[file_entry]["priority"])
368 override_sect = files[file_entry].get("override section", files[file_entry]["section"])
369 override_summary += "%s - %s %s\n" % (file_entry, override_prio, override_sect)
371 short_summary = summary
373 # This is for direport's benefit...
374 f = re_fdnic.sub("\n .\n", changes.get("changes",""))
377 summary += "Changes: " + f
379 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
381 summary += self.announce(short_summary, 0)
383 return (summary, short_summary)
385 ###########################################################################
387 def close_bugs (self, summary, action):
388 changes = self.pkg.changes
392 bugs = changes["closes"].keys()
398 summary += "Closing bugs: "
400 summary += "%s " % (bug)
402 Subst["__BUG_NUMBER__"] = bug
403 if changes["distribution"].has_key("stable"):
404 Subst["__STABLE_WARNING__"] = """
405 Note that this package is not part of the released stable Debian
406 distribution. It may have dependencies on other unreleased software,
407 or other instabilities. Please take care if you wish to install it.
408 The update will eventually make its way into the next released Debian
411 Subst["__STABLE_WARNING__"] = ""
412 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
413 utils.send_mail (mail_message)
415 self.Logger.log(["closing bugs"]+bugs)
420 ###########################################################################
422 def announce (self, short_summary, action):
425 changes = self.pkg.changes
427 # Only do announcements for source uploads with a recent dpkg-dev installed
428 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
433 Subst["__SHORT_SUMMARY__"] = short_summary
435 for dist in changes["distribution"].keys():
436 announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
437 if announce_list == "" or lists_done.has_key(announce_list):
439 lists_done[announce_list] = 1
440 summary += "Announcing to %s\n" % (announce_list)
443 Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
444 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
445 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
446 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
447 utils.send_mail (mail_message)
449 if Cnf.FindB("Dinstall::CloseBugs"):
450 summary = self.close_bugs(summary, action)
454 ###########################################################################
456 def accept (self, summary, short_summary):
459 files = self.pkg.files
460 changes = self.pkg.changes
461 changes_file = self.pkg.changes_file
465 self.Logger.log(["Accepting changes",changes_file])
467 self.dump_vars(Cnf["Dir::Queue::Accepted"])
469 # Move all the files into the accepted directory
470 utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
471 file_keys = files.keys()
472 for file_entry in file_keys:
473 utils.move(file_entry, Cnf["Dir::Queue::Accepted"])
474 self.accept_bytes += float(files[file_entry]["size"])
475 self.accept_count += 1
477 # Send accept mail, announce to lists, close bugs and check for
478 # override disparities
479 if not Cnf["Dinstall::Options::No-Mail"]:
480 Subst["__SUITE__"] = ""
481 Subst["__SUMMARY__"] = summary
482 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
483 utils.send_mail(mail_message)
484 self.announce(short_summary, 1)
487 ## Helper stuff for DebBugs Version Tracking
488 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
489 # ??? once queue/* is cleared on *.d.o and/or reprocessed
490 # the conditionalization on dsc["bts changelog"] should be
493 # Write out the version history from the changelog
494 if changes["architecture"].has_key("source") and \
495 dsc.has_key("bts changelog"):
497 (fd, temp_filename) = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
498 version_history = os.fdopen(fd, 'w')
499 version_history.write(dsc["bts changelog"])
500 version_history.close()
501 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
502 changes_file[:-8]+".versions")
503 os.rename(temp_filename, filename)
504 os.chmod(filename, 0644)
506 # Write out the binary -> source mapping.
507 (fd, temp_filename) = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
508 debinfo = os.fdopen(fd, 'w')
509 for file_entry in file_keys:
510 f = files[file_entry]
511 if f["type"] == "deb":
512 line = " ".join([f["package"], f["version"],
513 f["architecture"], f["source package"],
514 f["source version"]])
515 debinfo.write(line+"\n")
517 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
518 changes_file[:-8]+".debinfo")
519 os.rename(temp_filename, filename)
520 os.chmod(filename, 0644)
522 self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
524 ###########################################################################
526 def queue_build (self, queue, path):
529 files = self.pkg.files
530 changes = self.pkg.changes
531 changes_file = self.pkg.changes_file
533 file_keys = files.keys()
535 ## Special support to enable clean auto-building of queued packages
536 queue_id = database.get_or_set_queue_id(queue)
538 self.projectB.query("BEGIN WORK")
539 for suite in changes["distribution"].keys():
540 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
542 suite_id = database.get_suite_id(suite)
543 dest_dir = Cnf["Dir::QueueBuild"]
544 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
545 dest_dir = os.path.join(dest_dir, suite)
546 for file_entry in file_keys:
547 src = os.path.join(path, file_entry)
548 dest = os.path.join(dest_dir, file_entry)
549 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
550 # Copy it since the original won't be readable by www-data
551 utils.copy(src, dest)
553 # Create a symlink to it
554 os.symlink(src, dest)
555 # Add it to the list of packages for later processing by apt-ftparchive
556 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
557 # If the .orig.tar.gz is in the pool, create a symlink to
558 # it (if one doesn't already exist)
559 if self.pkg.orig_tar_id:
560 # Determine the .orig.tar.gz file name
561 for dsc_file in self.pkg.dsc_files.keys():
562 if dsc_file.endswith(".orig.tar.gz"):
564 dest = os.path.join(dest_dir, filename)
565 # If it doesn't exist, create a symlink
566 if not os.path.exists(dest):
567 # Find the .orig.tar.gz in the pool
568 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
571 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
572 src = os.path.join(ql[0][0], ql[0][1])
573 os.symlink(src, dest)
574 # Add it to the list of packages for later processing by apt-ftparchive
575 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
576 # if it does, update things to ensure it's not removed prematurely
578 self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
580 self.projectB.query("COMMIT WORK")
582 ###########################################################################
584 def check_override (self):
586 changes = self.pkg.changes
587 files = self.pkg.files
590 # Abandon the check if:
591 # a) it's a non-sourceful upload
592 # b) override disparity checks have been disabled
593 # c) we're not sending mail
594 if not changes["architecture"].has_key("source") or \
595 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
596 Cnf["Dinstall::Options::No-Mail"]:
600 file_keys = files.keys()
602 for file_entry in file_keys:
603 if not files[file_entry].has_key("new") and files[file_entry]["type"] == "deb":
604 section = files[file_entry]["section"]
605 override_section = files[file_entry]["override section"]
606 if section.lower() != override_section.lower() and section != "-":
607 summary += "%s: package says section is %s, override says %s.\n" % (file_entry, section, override_section)
608 priority = files[file_entry]["priority"]
609 override_priority = files[file_entry]["override priority"]
610 if priority != override_priority and priority != "-":
611 summary += "%s: package says priority is %s, override says %s.\n" % (file_entry, priority, override_priority)
616 Subst["__SUMMARY__"] = summary
617 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
618 utils.send_mail(mail_message)
620 ###########################################################################
622 def force_reject (self, files):
623 """Forcefully move files from the current directory to the
624 reject directory. If any file already exists in the reject
625 directory it will be moved to the morgue to make way for
630 for file_entry in files:
631 # Skip any files which don't exist or which we don't have permission to copy.
632 if os.access(file_entry,os.R_OK) == 0:
634 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file_entry)
636 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
638 # File exists? Let's try and move it to the morgue
639 if errno.errorcode[e.errno] == 'EEXIST':
640 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file_entry)
642 morgue_file = utils.find_next_free(morgue_file)
643 except NoFreeFilenameError:
644 # Something's either gone badly Pete Tong, or
645 # someone is trying to exploit us.
646 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
648 utils.move(dest_file, morgue_file, perms=0660)
650 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
653 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
657 # If we got here, we own the destination file, so we can
658 # safely overwrite it.
659 utils.move(file_entry, dest_file, 1, perms=0660)
662 ###########################################################################
664 def do_reject (self, manual = 0, reject_message = ""):
665 # If we weren't given a manual rejection message, spawn an
666 # editor so the user can add one in...
667 if manual and not reject_message:
668 (fd, temp_filename) = utils.temp_filename()
669 editor = os.environ.get("EDITOR","vi")
672 os.system("%s %s" % (editor, temp_filename))
673 temp_fh = utils.open_file(temp_filename)
674 reject_message = "".join(temp_fh.readlines())
676 print "Reject message:"
677 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
678 prompt = "[R]eject, Edit, Abandon, Quit ?"
680 while prompt.find(answer) == -1:
681 answer = utils.our_raw_input(prompt)
682 m = re_default_answer.search(prompt)
685 answer = answer[:1].upper()
686 os.unlink(temp_filename)
698 reason_filename = pkg.changes_file[:-8] + ".reason"
699 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
701 # Move all the files into the reject directory
702 reject_files = pkg.files.keys() + [pkg.changes_file]
703 self.force_reject(reject_files)
705 # If we fail here someone is probably trying to exploit the race
706 # so let's just raise an exception ...
707 if os.path.exists(reason_filename):
708 os.unlink(reason_filename)
709 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
712 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
713 Subst["__MANUAL_REJECT_MESSAGE__"] = ""
714 Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
715 os.write(reason_fd, reject_message)
716 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
718 # Build up the rejection email
719 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
721 Subst["__REJECTOR_ADDRESS__"] = user_email_address
722 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
723 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
724 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
725 # Write the rejection email out as the <foo>.reason file
726 os.write(reason_fd, reject_mail_message)
730 # Send the rejection mail if appropriate
731 if not Cnf["Dinstall::Options::No-Mail"]:
732 utils.send_mail(reject_mail_message)
734 self.Logger.log(["rejected", pkg.changes_file])
737 ################################################################################
739 # Ensure that source exists somewhere in the archive for the binary
740 # upload being processed.
742 # (1) exact match => 1.0-3
743 # (2) Bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
745 def source_exists (self, package, source_version, suites = ["any"]):
749 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
752 # source must exist in suite X, or in some other suite that's
753 # mapped to X, recursively... silent-maps are counted too,
754 # unreleased-maps aren't.
755 maps = self.Cnf.ValueList("SuiteMappings")[:]
757 maps = [ m.split() for m in maps ]
758 maps = [ (x[1], x[2]) for x in maps
759 if x[0] == "map" or x[0] == "silent-map" ]
762 if x[1] in s and x[0] not in s:
765 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
766 q = self.projectB.query(que)
768 # Reduce the query results to a list of version numbers
769 ql = [ i[0] for i in q.getresult() ]
772 if source_version in ql:
776 orig_source_version = re_bin_only_nmu.sub('', source_version)
777 if orig_source_version in ql:
785 ################################################################################
787 def in_override_p (self, package, component, suite, binary_type, file):
788 files = self.pkg.files
790 if binary_type == "": # must be source
793 file_type = binary_type
795 # Override suite name; used for example with proposed-updates
796 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
797 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
799 # Avoid <undef> on unknown distributions
800 suite_id = database.get_suite_id(suite)
803 component_id = database.get_component_id(component)
804 type_id = database.get_override_type_id(file_type)
806 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
807 % (package, suite_id, component_id, type_id))
808 result = q.getresult()
809 # If checking for a source package fall back on the binary override type
810 if file_type == "dsc" and not result:
811 deb_type_id = database.get_override_type_id("deb")
812 udeb_type_id = database.get_override_type_id("udeb")
813 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
814 % (package, suite_id, component_id, deb_type_id, udeb_type_id))
815 result = q.getresult()
817 # Remember the section and priority so we can check them later if appropriate
819 files[file]["override section"] = result[0][0]
820 files[file]["override priority"] = result[0][1]
824 ################################################################################
826 def reject (self, str, prefix="Rejected: "):
828 # Unlike other rejects we add new lines first to avoid trailing
829 # new lines when this message is passed back up to a caller.
830 if self.reject_message:
831 self.reject_message += "\n"
832 self.reject_message += prefix + str
834 ################################################################################
836 def get_anyversion(self, query_result, suite):
838 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
839 for (v, s) in query_result:
840 if s in [ x.lower() for x in anysuite ]:
841 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
845 ################################################################################
847 def cross_suite_version_check(self, query_result, file, new_version,
849 """Ensure versions are newer than existing packages in target
850 suites and that cross-suite version checking rules as
851 set out in the conf file are satisfied."""
853 # Check versions for each target suite
854 for target_suite in self.pkg.changes["distribution"].keys():
855 must_be_newer_than = [ i.lower() for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
856 must_be_older_than = [ i.lower() for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
857 # Enforce "must be newer than target suite" even if conffile omits it
858 if target_suite not in must_be_newer_than:
859 must_be_newer_than.append(target_suite)
860 for entry in query_result:
861 existent_version = entry[0]
863 if suite in must_be_newer_than and sourceful and \
864 apt_pkg.VersionCompare(new_version, existent_version) < 1:
865 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
866 if suite in must_be_older_than and \
867 apt_pkg.VersionCompare(new_version, existent_version) > -1:
868 ch = self.pkg.changes
870 if ch.get('distribution-version', {}).has_key(suite):
871 # we really use the other suite, ignoring the conflicting one ...
872 addsuite = ch["distribution-version"][suite]
874 add_version = self.get_anyversion(query_result, addsuite)
875 target_version = self.get_anyversion(query_result, target_suite)
878 # not add_version can only happen if we map to a suite
879 # that doesn't enhance the suite we're propup'ing from.
880 # so "propup-ver x a b c; map a d" is a problem only if
881 # d doesn't enhance a.
883 # i think we could always propagate in this case, rather
884 # than complaining. either way, this isn't a REJECT issue
886 # And - we really should complain to the dorks who configured dak
887 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
888 self.pkg.changes.setdefault("propdistribution", {})
889 self.pkg.changes["propdistribution"][addsuite] = 1
891 elif not target_version:
892 # not targets_version is true when the package is NEW
893 # we could just stick with the "...old version..." REJECT
895 self.reject("Won't propogate NEW packages.")
896 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
897 # propogation would be redundant. no need to reject though.
898 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
900 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
901 apt_pkg.VersionCompare(add_version, target_version) >= 0:
903 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
904 self.pkg.changes.setdefault("propdistribution", {})
905 self.pkg.changes["propdistribution"][addsuite] = 1
909 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
911 ################################################################################
913 def check_binary_against_db(self, file):
914 self.reject_message = ""
915 files = self.pkg.files
917 # Ensure version is sane
918 q = self.projectB.query("""
919 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
921 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
922 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
923 % (files[file]["package"],
924 files[file]["architecture"]))
925 self.cross_suite_version_check(q.getresult(), file,
926 files[file]["version"], sourceful=False)
928 # Check for any existing copies of the file
929 q = self.projectB.query("""
930 SELECT b.id FROM binaries b, architecture a
931 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
932 AND a.id = b.architecture"""
933 % (files[file]["package"],
934 files[file]["version"],
935 files[file]["architecture"]))
937 self.reject("%s: can not overwrite existing copy already in the archive." % (file))
939 return self.reject_message
941 ################################################################################
943 def check_source_against_db(self, file):
944 self.reject_message = ""
947 # Ensure version is sane
948 q = self.projectB.query("""
949 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
950 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
951 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"),
954 return self.reject_message
956 ################################################################################
959 # NB: this function can remove entries from the 'files' index [if
960 # the .orig.tar.gz is a duplicate of the one in the archive]; if
961 # you're iterating over 'files' and call this function as part of
962 # the loop, be sure to add a check to the top of the loop to
963 # ensure you haven't just tried to dereference the deleted entry.
966 def check_dsc_against_db(self, file):
967 self.reject_message = ""
968 files = self.pkg.files
969 dsc_files = self.pkg.dsc_files
970 legacy_source_untouchable = self.pkg.legacy_source_untouchable
971 self.pkg.orig_tar_gz = None
973 # Try and find all files mentioned in the .dsc. This has
974 # to work harder to cope with the multiple possible
975 # locations of an .orig.tar.gz.
976 # The ordering on the select is needed to pick the newest orig
977 # when it exists in multiple places.
978 for dsc_file in dsc_files.keys():
980 if files.has_key(dsc_file):
981 actual_md5 = files[dsc_file]["md5sum"]
982 actual_size = int(files[dsc_file]["size"])
983 found = "%s in incoming" % (dsc_file)
984 # Check the file does not already exist in the archive
985 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
987 # Strip out anything that isn't '%s' or '/%s$'
989 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
992 # "[dak] has not broken them. [dak] has fixed a
993 # brokenness. Your crappy hack exploited a bug in
996 # "(Come on! I thought it was always obvious that
997 # one just doesn't release different files with
998 # the same name and version.)"
999 # -- ajk@ on d-devel@l.d.o
1002 # Ignore exact matches for .orig.tar.gz
1004 if dsc_file.endswith(".orig.tar.gz"):
1006 if files.has_key(dsc_file) and \
1007 int(files[dsc_file]["size"]) == int(i[0]) and \
1008 files[dsc_file]["md5sum"] == i[1]:
1009 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
1011 self.pkg.orig_tar_gz = i[2] + i[3]
1015 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
1016 elif dsc_file.endswith(".orig.tar.gz"):
1018 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
1020 # Strip out anything that isn't '%s' or '/%s$'
1022 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
1026 # Unfortunately, we may get more than one match here if,
1027 # for example, the package was in potato but had an -sa
1028 # upload in woody. So we need to choose the right one.
1030 # default to something sane in case we don't match any or have only one
1035 old_file = i[0] + i[1]
1036 old_file_fh = utils.open_file(old_file)
1037 actual_md5 = apt_pkg.md5sum(old_file_fh)
1039 actual_size = os.stat(old_file)[stat.ST_SIZE]
1040 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1043 legacy_source_untouchable[i[3]] = ""
1045 old_file = x[0] + x[1]
1046 old_file_fh = utils.open_file(old_file)
1047 actual_md5 = apt_pkg.md5sum(old_file_fh)
1049 actual_size = os.stat(old_file)[stat.ST_SIZE]
1052 # need this for updating dsc_files in install()
1053 dsc_files[dsc_file]["files id"] = x[3]
1054 # See install() in process-accepted...
1055 self.pkg.orig_tar_id = x[3]
1056 self.pkg.orig_tar_gz = old_file
1057 if suite_type == "legacy" or suite_type == "legacy-mixed":
1058 self.pkg.orig_tar_location = "legacy"
1060 self.pkg.orig_tar_location = x[4]
1062 # Not there? Check the queue directories...
1064 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1065 # See process_it() in 'dak process-unchecked' for explanation of this
1066 # in_unchecked check dropped by ajt 2007-08-28, how did that
1068 if os.path.exists(in_unchecked) and False:
1069 return (self.reject_message, in_unchecked)
1071 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1072 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)],dsc_file)
1073 if os.path.exists(in_otherdir):
1074 in_otherdir_fh = utils.open_file(in_otherdir)
1075 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1076 in_otherdir_fh.close()
1077 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1079 self.pkg.orig_tar_gz = in_otherdir
1082 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1083 self.pkg.orig_tar_gz = -1
1086 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1088 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1089 self.reject("md5sum for %s doesn't match %s." % (found, file))
1090 if actual_size != int(dsc_files[dsc_file]["size"]):
1091 self.reject("size for %s doesn't match %s." % (found, file))
1093 return (self.reject_message, None)
1095 def do_query(self, q):
1096 sys.stderr.write("query: \"%s\" ... " % (q))
1097 before = time.time()
1098 r = self.projectB.query(q)
1099 time_diff = time.time()-before
1100 sys.stderr.write("took %.3f seconds.\n" % (time_diff))