4 # Queue utility functions for dak
5 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 ###############################################################################
23 import cPickle, errno, os, pg, re, stat, sys, time
24 import apt_inst, apt_pkg
25 import utils, database
26 from dak_exceptions import *
27 from regexes import re_default_answer, re_fdnic, re_bin_only_nmu
31 ###############################################################################
33 # Determine what parts in a .changes are NEW
35 def determine_new(changes, files, projectB, warn=1):
38 # Build up a list of potentially new things
39 for file_entry in files.keys():
41 # Skip byhand elements
42 if f["type"] == "byhand":
45 priority = f["priority"]
46 section = f["section"]
47 file_type = get_type(f)
48 component = f["component"]
50 if file_type == "dsc":
52 if not new.has_key(pkg):
54 new[pkg]["priority"] = priority
55 new[pkg]["section"] = section
56 new[pkg]["type"] = file_type
57 new[pkg]["component"] = component
58 new[pkg]["files"] = []
60 old_type = new[pkg]["type"]
61 if old_type != file_type:
62 # source gets trumped by deb or udeb
64 new[pkg]["priority"] = priority
65 new[pkg]["section"] = section
66 new[pkg]["type"] = file_type
67 new[pkg]["component"] = component
68 new[pkg]["files"].append(file_entry)
69 if f.has_key("othercomponents"):
70 new[pkg]["othercomponents"] = f["othercomponents"]
72 for suite in changes["suite"].keys():
73 suite_id = database.get_suite_id(suite)
74 for pkg in new.keys():
75 component_id = database.get_component_id(new[pkg]["component"])
76 type_id = database.get_override_type_id(new[pkg]["type"])
77 q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
80 for file_entry in new[pkg]["files"]:
81 if files[file_entry].has_key("new"):
82 del files[file_entry]["new"]
86 if changes["suite"].has_key("stable"):
87 print "WARNING: overrides will be added for stable!"
88 if changes["suite"].has_key("oldstable"):
89 print "WARNING: overrides will be added for OLDstable!"
90 for pkg in new.keys():
91 if new[pkg].has_key("othercomponents"):
92 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
96 ################################################################################
100 if f.has_key("dbtype"):
101 file_type = f["dbtype"]
102 elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
105 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
107 # Validate the override type
108 type_id = database.get_override_type_id(file_type)
110 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
114 ################################################################################
116 # check if section/priority values are valid
118 def check_valid(new):
119 for pkg in new.keys():
120 section = new[pkg]["section"]
121 priority = new[pkg]["priority"]
122 file_type = new[pkg]["type"]
123 new[pkg]["section id"] = database.get_section_id(section)
124 new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
126 di = section.find("debian-installer") != -1
127 if (di and file_type not in ("udeb", "dsc")) or (not di and file_type == "udeb"):
128 new[pkg]["section id"] = -1
129 if (priority == "source" and file_type != "dsc") or \
130 (priority != "source" and file_type == "dsc"):
131 new[pkg]["priority id"] = -1
134 ###############################################################################
136 # Convenience wrapper to carry around all the package information in
139 def __init__(self, **kwds):
140 self.__dict__.update(kwds)
142 def update(self, **kwds):
143 self.__dict__.update(kwds)
145 ###############################################################################
149 def __init__(self, Cnf):
151 self.accept_count = 0
152 self.accept_bytes = 0L
153 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
154 legacy_source_untouchable = {})
156 # Initialize the substitution template mapping global
157 Subst = self.Subst = {}
158 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
159 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
160 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
161 Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
163 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
164 database.init(Cnf, self.projectB)
166 ###########################################################################
168 def init_vars (self):
169 self.pkg.changes.clear()
171 self.pkg.files.clear()
172 self.pkg.dsc_files.clear()
173 self.pkg.legacy_source_untouchable.clear()
174 self.pkg.orig_tar_id = None
175 self.pkg.orig_tar_location = ""
176 self.pkg.orig_tar_gz = None
178 ###########################################################################
180 def update_vars (self):
181 dump_filename = self.pkg.changes_file[:-8]+".dak"
182 dump_file = utils.open_file(dump_filename)
183 p = cPickle.Unpickler(dump_file)
185 self.pkg.changes.update(p.load())
186 self.pkg.dsc.update(p.load())
187 self.pkg.files.update(p.load())
188 self.pkg.dsc_files.update(p.load())
189 self.pkg.legacy_source_untouchable.update(p.load())
191 self.pkg.orig_tar_id = p.load()
192 self.pkg.orig_tar_location = p.load()
196 ###########################################################################
198 # This could just dump the dictionaries as is, but I'd like to
199 # avoid this so there's some idea of what process-accepted &
200 # process-new use from process-unchecked
202 def dump_vars(self, dest_dir):
204 changes = self.pkg.changes
206 files = self.pkg.files
207 dsc_files = self.pkg.dsc_files
208 legacy_source_untouchable = self.pkg.legacy_source_untouchable
209 orig_tar_id = self.pkg.orig_tar_id
210 orig_tar_location = self.pkg.orig_tar_location
212 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
213 dump_file = utils.open_file(dump_filename, 'w')
215 os.chmod(dump_filename, 0664)
217 # chmod may fail when the dumpfile is not owned by the user
218 # invoking dak (like e.g. when NEW is processed by a member
220 if errno.errorcode[e.errno] == 'EPERM':
221 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
222 # security precaution, should never happen unless a weird
223 # umask is set anywhere
224 if perms & stat.S_IWOTH:
225 utils.fubar("%s is world writable and chmod failed." % \
227 # ignore the failed chmod otherwise as the file should
228 # already have the right privileges and is just, at worst,
229 # unreadable for world
233 p = cPickle.Pickler(dump_file, 1)
240 for file_entry in files.keys():
241 d_files[file_entry] = {}
242 for i in [ "package", "version", "architecture", "type", "size",
243 "md5sum", "sha1sum", "sha256sum", "component",
244 "location id", "source package", "source version",
245 "maintainer", "dbtype", "files id", "new",
246 "section", "priority", "othercomponents",
247 "pool name", "original component" ]:
248 if files[file_entry].has_key(i):
249 d_files[file_entry][i] = files[file_entry][i]
251 # Mandatory changes fields
252 for i in [ "distribution", "source", "architecture", "version",
253 "maintainer", "urgency", "fingerprint", "changedby822",
254 "changedby2047", "changedbyname", "maintainer822",
255 "maintainer2047", "maintainername", "maintaineremail",
256 "closes", "changes" ]:
257 d_changes[i] = changes[i]
258 # Optional changes fields
259 for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version",
261 if changes.has_key(i):
262 d_changes[i] = changes[i]
264 for i in [ "source", "version", "maintainer", "fingerprint",
265 "uploaders", "bts changelog", "dm-upload-allowed" ]:
269 for file_entry in dsc_files.keys():
270 d_dsc_files[file_entry] = {}
271 # Mandatory dsc_files fields
272 for i in [ "size", "md5sum" ]:
273 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
274 # Optional dsc_files fields
275 for i in [ "files id" ]:
276 if dsc_files[file_entry].has_key(i):
277 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
279 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
280 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
284 ###########################################################################
286 # Set up the per-package template substitution mappings
288 def update_subst (self, reject_message = ""):
290 changes = self.pkg.changes
291 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
292 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
293 changes["architecture"] = { "Unknown" : "" }
294 # and maintainer2047 may not exist.
295 if not changes.has_key("maintainer2047"):
296 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
298 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
299 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
300 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
302 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
303 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
304 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
305 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
306 changes["maintainer2047"])
307 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
309 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
310 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
311 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
313 if "sponsoremail" in changes:
314 Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
316 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
317 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
319 # Apply any global override of the Maintainer field
320 if self.Cnf.get("Dinstall::OverrideMaintainer"):
321 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
322 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
324 Subst["__REJECT_MESSAGE__"] = reject_message
325 Subst["__SOURCE__"] = changes.get("source", "Unknown")
326 Subst["__VERSION__"] = changes.get("version", "Unknown")
328 ###########################################################################
330 def build_summaries(self):
331 changes = self.pkg.changes
332 files = self.pkg.files
334 byhand = summary = new = ""
336 # changes["distribution"] may not exist in corner cases
337 # (e.g. unreadable changes files)
338 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
339 changes["distribution"] = {}
341 override_summary ="";
342 file_keys = files.keys()
344 for file_entry in file_keys:
345 if files[file_entry].has_key("byhand"):
347 summary += file_entry + " byhand\n"
348 elif files[file_entry].has_key("new"):
350 summary += "(new) %s %s %s\n" % (file_entry, files[file_entry]["priority"], files[file_entry]["section"])
351 if files[file_entry].has_key("othercomponents"):
352 summary += "WARNING: Already present in %s distribution.\n" % (files[file_entry]["othercomponents"])
353 if files[file_entry]["type"] == "deb":
354 deb_fh = utils.open_file(file_entry)
355 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
358 files[file_entry]["pool name"] = utils.poolify (changes.get("source",""), files[file_entry]["component"])
359 destination = self.Cnf["Dir::PoolRoot"] + files[file_entry]["pool name"] + file_entry
360 summary += file_entry + "\n to " + destination + "\n"
361 if not files[file_entry].has_key("type"):
362 files[file_entry]["type"] = "unknown"
363 if files[file_entry]["type"] in ["deb", "udeb", "dsc"]:
364 # (queue/unchecked), there we have override entries already, use them
365 # (process-new), there we dont have override entries, use the newly generated ones.
366 override_prio = files[file_entry].get("override priority", files[file_entry]["priority"])
367 override_sect = files[file_entry].get("override section", files[file_entry]["section"])
368 override_summary += "%s - %s %s\n" % (file_entry, override_prio, override_sect)
370 short_summary = summary
372 # This is for direport's benefit...
373 f = re_fdnic.sub("\n .\n", changes.get("changes",""))
376 summary += "Changes: " + f
378 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
380 summary += self.announce(short_summary, 0)
382 return (summary, short_summary)
384 ###########################################################################
386 def close_bugs (self, summary, action):
387 changes = self.pkg.changes
391 bugs = changes["closes"].keys()
397 summary += "Closing bugs: "
399 summary += "%s " % (bug)
401 Subst["__BUG_NUMBER__"] = bug
402 if changes["distribution"].has_key("stable"):
403 Subst["__STABLE_WARNING__"] = """
404 Note that this package is not part of the released stable Debian
405 distribution. It may have dependencies on other unreleased software,
406 or other instabilities. Please take care if you wish to install it.
407 The update will eventually make its way into the next released Debian
410 Subst["__STABLE_WARNING__"] = ""
411 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
412 utils.send_mail (mail_message)
414 self.Logger.log(["closing bugs"]+bugs)
419 ###########################################################################
421 def announce (self, short_summary, action):
424 changes = self.pkg.changes
426 # Only do announcements for source uploads with a recent dpkg-dev installed
427 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
432 Subst["__SHORT_SUMMARY__"] = short_summary
434 for dist in changes["distribution"].keys():
435 announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
436 if announce_list == "" or lists_done.has_key(announce_list):
438 lists_done[announce_list] = 1
439 summary += "Announcing to %s\n" % (announce_list)
442 Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
443 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
444 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
445 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
446 utils.send_mail (mail_message)
448 if Cnf.FindB("Dinstall::CloseBugs"):
449 summary = self.close_bugs(summary, action)
453 ###########################################################################
455 def accept (self, summary, short_summary):
458 files = self.pkg.files
459 changes = self.pkg.changes
460 changes_file = self.pkg.changes_file
464 self.Logger.log(["Accepting changes",changes_file])
466 self.dump_vars(Cnf["Dir::Queue::Accepted"])
468 # Move all the files into the accepted directory
469 utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
470 file_keys = files.keys()
471 for file_entry in file_keys:
472 utils.move(file_entry, Cnf["Dir::Queue::Accepted"])
473 self.accept_bytes += float(files[file_entry]["size"])
474 self.accept_count += 1
476 # Send accept mail, announce to lists, close bugs and check for
477 # override disparities
478 if not Cnf["Dinstall::Options::No-Mail"]:
479 Subst["__SUITE__"] = ""
480 Subst["__SUMMARY__"] = summary
481 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
482 utils.send_mail(mail_message)
483 self.announce(short_summary, 1)
486 ## Helper stuff for DebBugs Version Tracking
487 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
488 # ??? once queue/* is cleared on *.d.o and/or reprocessed
489 # the conditionalization on dsc["bts changelog"] should be
492 # Write out the version history from the changelog
493 if changes["architecture"].has_key("source") and \
494 dsc.has_key("bts changelog"):
496 (fd, temp_filename) = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
497 version_history = os.fdopen(temp_filename, 'w')
498 version_history.write(dsc["bts changelog"])
499 version_history.close()
500 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
501 changes_file[:-8]+".versions")
502 os.rename(temp_filename, filename)
503 os.chmod(filename, "0644")
505 # Write out the binary -> source mapping.
506 (fd, temp_filename) = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
507 debinfo = os.fdopen(temp_filename, 'w')
508 for file_entry in file_keys:
509 f = files[file_entry]
510 if f["type"] == "deb":
511 line = " ".join([f["package"], f["version"],
512 f["architecture"], f["source package"],
513 f["source version"]])
514 debinfo.write(line+"\n")
516 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
517 changes_file[:-8]+".debinfo")
518 os.rename(temp_filename, filename)
519 os.chmod(filename, "0644")
521 self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
523 ###########################################################################
525 def queue_build (self, queue, path):
528 files = self.pkg.files
529 changes = self.pkg.changes
530 changes_file = self.pkg.changes_file
532 file_keys = files.keys()
534 ## Special support to enable clean auto-building of queued packages
535 queue_id = database.get_or_set_queue_id(queue)
537 self.projectB.query("BEGIN WORK")
538 for suite in changes["distribution"].keys():
539 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
541 suite_id = database.get_suite_id(suite)
542 dest_dir = Cnf["Dir::QueueBuild"]
543 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
544 dest_dir = os.path.join(dest_dir, suite)
545 for file_entry in file_keys:
546 src = os.path.join(path, file_entry)
547 dest = os.path.join(dest_dir, file_entry)
548 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
549 # Copy it since the original won't be readable by www-data
550 utils.copy(src, dest)
552 # Create a symlink to it
553 os.symlink(src, dest)
554 # Add it to the list of packages for later processing by apt-ftparchive
555 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
556 # If the .orig.tar.gz is in the pool, create a symlink to
557 # it (if one doesn't already exist)
558 if self.pkg.orig_tar_id:
559 # Determine the .orig.tar.gz file name
560 for dsc_file in self.pkg.dsc_files.keys():
561 if dsc_file.endswith(".orig.tar.gz"):
563 dest = os.path.join(dest_dir, filename)
564 # If it doesn't exist, create a symlink
565 if not os.path.exists(dest):
566 # Find the .orig.tar.gz in the pool
567 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
570 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
571 src = os.path.join(ql[0][0], ql[0][1])
572 os.symlink(src, dest)
573 # Add it to the list of packages for later processing by apt-ftparchive
574 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
575 # if it does, update things to ensure it's not removed prematurely
577 self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
579 self.projectB.query("COMMIT WORK")
581 ###########################################################################
583 def check_override (self):
585 changes = self.pkg.changes
586 files = self.pkg.files
589 # Abandon the check if:
590 # a) it's a non-sourceful upload
591 # b) override disparity checks have been disabled
592 # c) we're not sending mail
593 if not changes["architecture"].has_key("source") or \
594 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
595 Cnf["Dinstall::Options::No-Mail"]:
599 file_keys = files.keys()
601 for file_entry in file_keys:
602 if not files[file_entry].has_key("new") and files[file_entry]["type"] == "deb":
603 section = files[file_entry]["section"]
604 override_section = files[file_entry]["override section"]
605 if section.lower() != override_section.lower() and section != "-":
606 summary += "%s: package says section is %s, override says %s.\n" % (file_entry, section, override_section)
607 priority = files[file_entry]["priority"]
608 override_priority = files[file_entry]["override priority"]
609 if priority != override_priority and priority != "-":
610 summary += "%s: package says priority is %s, override says %s.\n" % (file_entry, priority, override_priority)
615 Subst["__SUMMARY__"] = summary
616 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
617 utils.send_mail(mail_message)
619 ###########################################################################
621 def force_reject (self, files):
622 """Forcefully move files from the current directory to the
623 reject directory. If any file already exists in the reject
624 directory it will be moved to the morgue to make way for
629 for file_entry in files:
630 # Skip any files which don't exist or which we don't have permission to copy.
631 if os.access(file_entry,os.R_OK) == 0:
633 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file_entry)
635 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
637 # File exists? Let's try and move it to the morgue
638 if errno.errorcode[e.errno] == 'EEXIST':
639 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file_entry)
641 morgue_file = utils.find_next_free(morgue_file)
642 except NoFreeFilenameError:
643 # Something's either gone badly Pete Tong, or
644 # someone is trying to exploit us.
645 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
647 utils.move(dest_file, morgue_file, perms=0660)
649 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
652 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
656 # If we got here, we own the destination file, so we can
657 # safely overwrite it.
658 utils.move(file_entry, dest_file, 1, perms=0660)
661 ###########################################################################
663 def do_reject (self, manual = 0, reject_message = ""):
664 # If we weren't given a manual rejection message, spawn an
665 # editor so the user can add one in...
666 if manual and not reject_message:
667 (fd, temp_filename) = utils.temp_filename()
668 editor = os.environ.get("EDITOR","vi")
671 os.system("%s %s" % (editor, temp_filename))
672 temp_fh = utils.open_file(temp_filename)
673 reject_message = "".join(temp_fh.readlines())
675 print "Reject message:"
676 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
677 prompt = "[R]eject, Edit, Abandon, Quit ?"
679 while prompt.find(answer) == -1:
680 answer = utils.our_raw_input(prompt)
681 m = re_default_answer.search(prompt)
684 answer = answer[:1].upper()
685 os.unlink(temp_filename)
697 reason_filename = pkg.changes_file[:-8] + ".reason"
698 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
700 # Move all the files into the reject directory
701 reject_files = pkg.files.keys() + [pkg.changes_file]
702 self.force_reject(reject_files)
704 # If we fail here someone is probably trying to exploit the race
705 # so let's just raise an exception ...
706 if os.path.exists(reason_filename):
707 os.unlink(reason_filename)
708 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
711 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
712 Subst["__MANUAL_REJECT_MESSAGE__"] = ""
713 Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
714 os.write(reason_fd, reject_message)
715 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
717 # Build up the rejection email
718 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
720 Subst["__REJECTOR_ADDRESS__"] = user_email_address
721 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
722 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
723 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
724 # Write the rejection email out as the <foo>.reason file
725 os.write(reason_fd, reject_mail_message)
729 # Send the rejection mail if appropriate
730 if not Cnf["Dinstall::Options::No-Mail"]:
731 utils.send_mail(reject_mail_message)
733 self.Logger.log(["rejected", pkg.changes_file])
736 ################################################################################
738 # Ensure that source exists somewhere in the archive for the binary
739 # upload being processed.
741 # (1) exact match => 1.0-3
742 # (2) Bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
744 def source_exists (self, package, source_version, suites = ["any"]):
748 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
751 # source must exist in suite X, or in some other suite that's
752 # mapped to X, recursively... silent-maps are counted too,
753 # unreleased-maps aren't.
754 maps = self.Cnf.ValueList("SuiteMappings")[:]
756 maps = [ m.split() for m in maps ]
757 maps = [ (x[1], x[2]) for x in maps
758 if x[0] == "map" or x[0] == "silent-map" ]
761 if x[1] in s and x[0] not in s:
764 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
765 q = self.projectB.query(que)
767 # Reduce the query results to a list of version numbers
768 ql = [ i[0] for i in q.getresult() ]
771 if source_version in ql:
775 orig_source_version = re_bin_only_nmu.sub('', source_version)
776 if orig_source_version in ql:
784 ################################################################################
786 def in_override_p (self, package, component, suite, binary_type, file):
787 files = self.pkg.files
789 if binary_type == "": # must be source
792 file_type = binary_type
794 # Override suite name; used for example with proposed-updates
795 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
796 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
798 # Avoid <undef> on unknown distributions
799 suite_id = database.get_suite_id(suite)
802 component_id = database.get_component_id(component)
803 type_id = database.get_override_type_id(file_type)
805 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
806 % (package, suite_id, component_id, type_id))
807 result = q.getresult()
808 # If checking for a source package fall back on the binary override type
809 if file_type == "dsc" and not result:
810 deb_type_id = database.get_override_type_id("deb")
811 udeb_type_id = database.get_override_type_id("udeb")
812 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
813 % (package, suite_id, component_id, deb_type_id, udeb_type_id))
814 result = q.getresult()
816 # Remember the section and priority so we can check them later if appropriate
818 files[file]["override section"] = result[0][0]
819 files[file]["override priority"] = result[0][1]
823 ################################################################################
825 def reject (self, str, prefix="Rejected: "):
827 # Unlike other rejects we add new lines first to avoid trailing
828 # new lines when this message is passed back up to a caller.
829 if self.reject_message:
830 self.reject_message += "\n"
831 self.reject_message += prefix + str
833 ################################################################################
835 def get_anyversion(self, query_result, suite):
837 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
838 for (v, s) in query_result:
839 if s in [ x.lower() for x in anysuite ]:
840 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
844 ################################################################################
846 def cross_suite_version_check(self, query_result, file, new_version,
848 """Ensure versions are newer than existing packages in target
849 suites and that cross-suite version checking rules as
850 set out in the conf file are satisfied."""
852 # Check versions for each target suite
853 for target_suite in self.pkg.changes["distribution"].keys():
854 must_be_newer_than = [ i.lower() for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
855 must_be_older_than = [ i.lower() for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
856 # Enforce "must be newer than target suite" even if conffile omits it
857 if target_suite not in must_be_newer_than:
858 must_be_newer_than.append(target_suite)
859 for entry in query_result:
860 existent_version = entry[0]
862 if suite in must_be_newer_than and sourceful and \
863 apt_pkg.VersionCompare(new_version, existent_version) < 1:
864 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
865 if suite in must_be_older_than and \
866 apt_pkg.VersionCompare(new_version, existent_version) > -1:
867 ch = self.pkg.changes
869 if ch.get('distribution-version', {}).has_key(suite):
870 # we really use the other suite, ignoring the conflicting one ...
871 addsuite = ch["distribution-version"][suite]
873 add_version = self.get_anyversion(query_result, addsuite)
874 target_version = self.get_anyversion(query_result, target_suite)
877 # not add_version can only happen if we map to a suite
878 # that doesn't enhance the suite we're propup'ing from.
879 # so "propup-ver x a b c; map a d" is a problem only if
880 # d doesn't enhance a.
882 # i think we could always propagate in this case, rather
883 # than complaining. either way, this isn't a REJECT issue
885 # And - we really should complain to the dorks who configured dak
886 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
887 self.pkg.changes.setdefault("propdistribution", {})
888 self.pkg.changes["propdistribution"][addsuite] = 1
890 elif not target_version:
891 # not targets_version is true when the package is NEW
892 # we could just stick with the "...old version..." REJECT
894 self.reject("Won't propogate NEW packages.")
895 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
896 # propogation would be redundant. no need to reject though.
897 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
899 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
900 apt_pkg.VersionCompare(add_version, target_version) >= 0:
902 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
903 self.pkg.changes.setdefault("propdistribution", {})
904 self.pkg.changes["propdistribution"][addsuite] = 1
908 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
910 ################################################################################
912 def check_binary_against_db(self, file):
913 self.reject_message = ""
914 files = self.pkg.files
916 # Ensure version is sane
917 q = self.projectB.query("""
918 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
920 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
921 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
922 % (files[file]["package"],
923 files[file]["architecture"]))
924 self.cross_suite_version_check(q.getresult(), file,
925 files[file]["version"], sourceful=False)
927 # Check for any existing copies of the file
928 q = self.projectB.query("""
929 SELECT b.id FROM binaries b, architecture a
930 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
931 AND a.id = b.architecture"""
932 % (files[file]["package"],
933 files[file]["version"],
934 files[file]["architecture"]))
936 self.reject("%s: can not overwrite existing copy already in the archive." % (file))
938 return self.reject_message
940 ################################################################################
942 def check_source_against_db(self, file):
943 self.reject_message = ""
946 # Ensure version is sane
947 q = self.projectB.query("""
948 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
949 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
950 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"),
953 return self.reject_message
955 ################################################################################
958 # NB: this function can remove entries from the 'files' index [if
959 # the .orig.tar.gz is a duplicate of the one in the archive]; if
960 # you're iterating over 'files' and call this function as part of
961 # the loop, be sure to add a check to the top of the loop to
962 # ensure you haven't just tried to dereference the deleted entry.
965 def check_dsc_against_db(self, file):
966 self.reject_message = ""
967 files = self.pkg.files
968 dsc_files = self.pkg.dsc_files
969 legacy_source_untouchable = self.pkg.legacy_source_untouchable
970 self.pkg.orig_tar_gz = None
972 # Try and find all files mentioned in the .dsc. This has
973 # to work harder to cope with the multiple possible
974 # locations of an .orig.tar.gz.
975 # The ordering on the select is needed to pick the newest orig
976 # when it exists in multiple places.
977 for dsc_file in dsc_files.keys():
979 if files.has_key(dsc_file):
980 actual_md5 = files[dsc_file]["md5sum"]
981 actual_size = int(files[dsc_file]["size"])
982 found = "%s in incoming" % (dsc_file)
983 # Check the file does not already exist in the archive
984 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
986 # Strip out anything that isn't '%s' or '/%s$'
988 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
991 # "[dak] has not broken them. [dak] has fixed a
992 # brokenness. Your crappy hack exploited a bug in
995 # "(Come on! I thought it was always obvious that
996 # one just doesn't release different files with
997 # the same name and version.)"
998 # -- ajk@ on d-devel@l.d.o
1001 # Ignore exact matches for .orig.tar.gz
1003 if dsc_file.endswith(".orig.tar.gz"):
1005 if files.has_key(dsc_file) and \
1006 int(files[dsc_file]["size"]) == int(i[0]) and \
1007 files[dsc_file]["md5sum"] == i[1]:
1008 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
1010 self.pkg.orig_tar_gz = i[2] + i[3]
1014 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
1015 elif dsc_file.endswith(".orig.tar.gz"):
1017 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
1019 # Strip out anything that isn't '%s' or '/%s$'
1021 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
1025 # Unfortunately, we may get more than one match here if,
1026 # for example, the package was in potato but had an -sa
1027 # upload in woody. So we need to choose the right one.
1029 x = ql[0]; # default to something sane in case we don't match any or have only one
1033 old_file = i[0] + i[1]
1034 old_file_fh = utils.open_file(old_file)
1035 actual_md5 = apt_pkg.md5sum(old_file_fh)
1037 actual_size = os.stat(old_file)[stat.ST_SIZE]
1038 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1041 legacy_source_untouchable[i[3]] = ""
1043 old_file = x[0] + x[1]
1044 old_file_fh = utils.open_file(old_file)
1045 actual_md5 = apt_pkg.md5sum(old_file_fh)
1047 actual_size = os.stat(old_file)[stat.ST_SIZE]
1050 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
1051 # See install() in process-accepted...
1052 self.pkg.orig_tar_id = x[3]
1053 self.pkg.orig_tar_gz = old_file
1054 if suite_type == "legacy" or suite_type == "legacy-mixed":
1055 self.pkg.orig_tar_location = "legacy"
1057 self.pkg.orig_tar_location = x[4]
1059 # Not there? Check the queue directories...
1061 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1062 # See process_it() in 'dak process-unchecked' for explanation of this
1063 # in_unchecked check dropped by ajt 2007-08-28, how did that
1065 if os.path.exists(in_unchecked) and False:
1066 return (self.reject_message, in_unchecked)
1068 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1069 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)],dsc_file)
1070 if os.path.exists(in_otherdir):
1071 in_otherdir_fh = utils.open_file(in_otherdir)
1072 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1073 in_otherdir_fh.close()
1074 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1076 self.pkg.orig_tar_gz = in_otherdir
1079 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1080 self.pkg.orig_tar_gz = -1
1083 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1085 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1086 self.reject("md5sum for %s doesn't match %s." % (found, file))
1087 if actual_size != int(dsc_files[dsc_file]["size"]):
1088 self.reject("size for %s doesn't match %s." % (found, file))
1090 return (self.reject_message, None)
1092 def do_query(self, q):
1093 sys.stderr.write("query: \"%s\" ... " % (q))
1094 before = time.time()
1095 r = self.projectB.query(q)
1096 time_diff = time.time()-before
1097 sys.stderr.write("took %.3f seconds.\n" % (time_diff))