3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 ###############################################################################
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
25 from dak_exceptions import *
29 ###############################################################################
31 re_isanum = re.compile (r"^\d+$")
32 re_default_answer = re.compile(r"\[(.*)\]")
33 re_fdnic = re.compile(r"\n\n")
34 re_bin_only_nmu = re.compile(r"\+b\d+$")
36 ################################################################################
38 # Determine what parts in a .changes are NEW
40 def determine_new(changes, files, projectB, warn=1):
43 # Build up a list of potentially new things
44 for file_entry in files.keys():
46 # Skip byhand elements
47 if f["type"] == "byhand":
50 priority = f["priority"]
51 section = f["section"]
52 file_type = get_type(f)
53 component = f["component"]
55 if file_type == "dsc":
57 if not new.has_key(pkg):
59 new[pkg]["priority"] = priority
60 new[pkg]["section"] = section
61 new[pkg]["type"] = file_type
62 new[pkg]["component"] = component
63 new[pkg]["files"] = []
65 old_type = new[pkg]["type"]
66 if old_type != file_type:
67 # source gets trumped by deb or udeb
69 new[pkg]["priority"] = priority
70 new[pkg]["section"] = section
71 new[pkg]["type"] = file_type
72 new[pkg]["component"] = component
73 new[pkg]["files"].append(file_entry)
74 if f.has_key("othercomponents"):
75 new[pkg]["othercomponents"] = f["othercomponents"]
77 for suite in changes["suite"].keys():
78 suite_id = database.get_suite_id(suite)
79 for pkg in new.keys():
80 component_id = database.get_component_id(new[pkg]["component"])
81 type_id = database.get_override_type_id(new[pkg]["type"])
82 q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
85 for file_entry in new[pkg]["files"]:
86 if files[file_entry].has_key("new"):
87 del files[file_entry]["new"]
91 if changes["suite"].has_key("stable"):
92 print "WARNING: overrides will be added for stable!"
93 if changes["suite"].has_key("oldstable"):
94 print "WARNING: overrides will be added for OLDstable!"
95 for pkg in new.keys():
96 if new[pkg].has_key("othercomponents"):
97 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
101 ################################################################################
105 if f.has_key("dbtype"):
106 file_type = f["dbtype"]
107 elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
110 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
112 # Validate the override type
113 type_id = database.get_override_type_id(file_type)
115 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
119 ################################################################################
121 # check if section/priority values are valid
123 def check_valid(new):
124 for pkg in new.keys():
125 section = new[pkg]["section"]
126 priority = new[pkg]["priority"]
127 file_type = new[pkg]["type"]
128 new[pkg]["section id"] = database.get_section_id(section)
129 new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
131 di = section.find("debian-installer") != -1
132 if (di and file_type not in ("udeb", "dsc")) or (not di and file_type == "udeb"):
133 new[pkg]["section id"] = -1
134 if (priority == "source" and file_type != "dsc") or \
135 (priority != "source" and file_type == "dsc"):
136 new[pkg]["priority id"] = -1
139 ###############################################################################
141 # Convenience wrapper to carry around all the package information in
144 def __init__(self, **kwds):
145 self.__dict__.update(kwds)
147 def update(self, **kwds):
148 self.__dict__.update(kwds)
150 ###############################################################################
154 def __init__(self, Cnf):
156 self.accept_count = 0
157 self.accept_bytes = 0L
158 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
159 legacy_source_untouchable = {})
161 # Initialize the substitution template mapping global
162 Subst = self.Subst = {}
163 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
164 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
165 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
166 Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
168 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
169 database.init(Cnf, self.projectB)
171 ###########################################################################
173 def init_vars (self):
174 self.pkg.changes.clear()
176 self.pkg.files.clear()
177 self.pkg.dsc_files.clear()
178 self.pkg.legacy_source_untouchable.clear()
179 self.pkg.orig_tar_id = None
180 self.pkg.orig_tar_location = ""
181 self.pkg.orig_tar_gz = None
183 ###########################################################################
185 def update_vars (self):
186 dump_filename = self.pkg.changes_file[:-8]+".dak"
187 dump_file = utils.open_file(dump_filename)
188 p = cPickle.Unpickler(dump_file)
190 self.pkg.changes.update(p.load())
191 self.pkg.dsc.update(p.load())
192 self.pkg.files.update(p.load())
193 self.pkg.dsc_files.update(p.load())
194 self.pkg.legacy_source_untouchable.update(p.load())
196 self.pkg.orig_tar_id = p.load()
197 self.pkg.orig_tar_location = p.load()
201 ###########################################################################
203 # This could just dump the dictionaries as is, but I'd like to
204 # avoid this so there's some idea of what process-accepted &
205 # process-new use from process-unchecked
207 def dump_vars(self, dest_dir):
209 changes = self.pkg.changes
211 files = self.pkg.files
212 dsc_files = self.pkg.dsc_files
213 legacy_source_untouchable = self.pkg.legacy_source_untouchable
214 orig_tar_id = self.pkg.orig_tar_id
215 orig_tar_location = self.pkg.orig_tar_location
217 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
218 dump_file = utils.open_file(dump_filename, 'w')
220 os.chmod(dump_filename, 0660)
222 if errno.errorcode[e.errno] == 'EPERM':
223 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
224 if perms & stat.S_IROTH:
225 utils.fubar("%s is world readable and chmod failed." % (dump_filename))
229 p = cPickle.Pickler(dump_file, 1)
236 for file_entry in files.keys():
237 d_files[file_entry] = {}
238 for i in [ "package", "version", "architecture", "type", "size",
239 "md5sum", "sha1sum", "sha256sum", "component",
240 "location id", "source package", "source version",
241 "maintainer", "dbtype", "files id", "new",
242 "section", "priority", "othercomponents",
243 "pool name", "original component" ]:
244 if files[file_entry].has_key(i):
245 d_files[file_entry][i] = files[file_entry][i]
247 # Mandatory changes fields
248 for i in [ "distribution", "source", "architecture", "version",
249 "maintainer", "urgency", "fingerprint", "changedby822",
250 "changedby2047", "changedbyname", "maintainer822",
251 "maintainer2047", "maintainername", "maintaineremail",
252 "closes", "changes" ]:
253 d_changes[i] = changes[i]
254 # Optional changes fields
255 for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version",
257 if changes.has_key(i):
258 d_changes[i] = changes[i]
260 for i in [ "source", "version", "maintainer", "fingerprint",
261 "uploaders", "bts changelog", "dm-upload-allowed" ]:
265 for file_entry in dsc_files.keys():
266 d_dsc_files[file_entry] = {}
267 # Mandatory dsc_files fields
268 for i in [ "size", "md5sum" ]:
269 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
270 # Optional dsc_files fields
271 for i in [ "files id" ]:
272 if dsc_files[file_entry].has_key(i):
273 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
275 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
276 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
280 ###########################################################################
282 # Set up the per-package template substitution mappings
284 def update_subst (self, reject_message = ""):
286 changes = self.pkg.changes
287 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
288 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
289 changes["architecture"] = { "Unknown" : "" }
290 # and maintainer2047 may not exist.
291 if not changes.has_key("maintainer2047"):
292 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
294 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
295 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
296 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
298 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
299 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
300 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
301 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
302 changes["maintainer2047"])
303 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
305 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
306 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
307 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
309 if "sponsoremail" in changes:
310 Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
312 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
313 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
315 # Apply any global override of the Maintainer field
316 if self.Cnf.get("Dinstall::OverrideMaintainer"):
317 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
318 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
320 Subst["__REJECT_MESSAGE__"] = reject_message
321 Subst["__SOURCE__"] = changes.get("source", "Unknown")
322 Subst["__VERSION__"] = changes.get("version", "Unknown")
324 ###########################################################################
326 def build_summaries(self):
327 changes = self.pkg.changes
328 files = self.pkg.files
330 byhand = summary = new = ""
332 # changes["distribution"] may not exist in corner cases
333 # (e.g. unreadable changes files)
334 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
335 changes["distribution"] = {}
337 override_summary ="";
338 file_keys = files.keys()
340 for file_entry in file_keys:
341 if files[file_entry].has_key("byhand"):
343 summary += file_entry + " byhand\n"
344 elif files[file_entry].has_key("new"):
346 summary += "(new) %s %s %s\n" % (file_entry, files[file_entry]["priority"], files[file_entry]["section"])
347 if files[file_entry].has_key("othercomponents"):
348 summary += "WARNING: Already present in %s distribution.\n" % (files[file_entry]["othercomponents"])
349 if files[file_entry]["type"] == "deb":
350 deb_fh = utils.open_file(file_entry)
351 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
354 files[file_entry]["pool name"] = utils.poolify (changes.get("source",""), files[file_entry]["component"])
355 destination = self.Cnf["Dir::PoolRoot"] + files[file_entry]["pool name"] + file_entry
356 summary += file_entry + "\n to " + destination + "\n"
357 if not files[file_entry].has_key("type"):
358 files[file_entry]["type"] = "unknown"
359 if files[file_entry]["type"] in ["deb", "udeb", "dsc"]:
360 # (queue/unchecked), there we have override entries already, use them
361 # (process-new), there we dont have override entries, use the newly generated ones.
362 override_prio = files[file_entry].get("override priority", files[file_entry]["priority"])
363 override_sect = files[file_entry].get("override section", files[file_entry]["section"])
364 override_summary += "%s - %s %s\n" % (file_entry, override_prio, override_sect)
366 short_summary = summary
368 # This is for direport's benefit...
369 f = re_fdnic.sub("\n .\n", changes.get("changes",""))
372 summary += "Changes: " + f
374 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
376 summary += self.announce(short_summary, 0)
378 return (summary, short_summary)
380 ###########################################################################
382 def close_bugs (self, summary, action):
383 changes = self.pkg.changes
387 bugs = changes["closes"].keys()
393 summary += "Closing bugs: "
395 summary += "%s " % (bug)
397 Subst["__BUG_NUMBER__"] = bug
398 if changes["distribution"].has_key("stable"):
399 Subst["__STABLE_WARNING__"] = """
400 Note that this package is not part of the released stable Debian
401 distribution. It may have dependencies on other unreleased software,
402 or other instabilities. Please take care if you wish to install it.
403 The update will eventually make its way into the next released Debian
406 Subst["__STABLE_WARNING__"] = ""
407 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
408 utils.send_mail (mail_message)
410 self.Logger.log(["closing bugs"]+bugs)
415 ###########################################################################
417 def announce (self, short_summary, action):
420 changes = self.pkg.changes
422 # Only do announcements for source uploads with a recent dpkg-dev installed
423 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
428 Subst["__SHORT_SUMMARY__"] = short_summary
430 for dist in changes["distribution"].keys():
431 announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
432 if announce_list == "" or lists_done.has_key(announce_list):
434 lists_done[announce_list] = 1
435 summary += "Announcing to %s\n" % (announce_list)
438 Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
439 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
440 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
441 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
442 utils.send_mail (mail_message)
444 if Cnf.FindB("Dinstall::CloseBugs"):
445 summary = self.close_bugs(summary, action)
449 ###########################################################################
451 def accept (self, summary, short_summary):
454 files = self.pkg.files
455 changes = self.pkg.changes
456 changes_file = self.pkg.changes_file
460 self.Logger.log(["Accepting changes",changes_file])
462 self.dump_vars(Cnf["Dir::Queue::Accepted"])
464 # Move all the files into the accepted directory
465 utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
466 file_keys = files.keys()
467 for file_entry in file_keys:
468 utils.move(file_entry, Cnf["Dir::Queue::Accepted"])
469 self.accept_bytes += float(files[file_entry]["size"])
470 self.accept_count += 1
472 # Send accept mail, announce to lists, close bugs and check for
473 # override disparities
474 if not Cnf["Dinstall::Options::No-Mail"]:
475 Subst["__SUITE__"] = ""
476 Subst["__SUMMARY__"] = summary
477 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
478 utils.send_mail(mail_message)
479 self.announce(short_summary, 1)
482 ## Helper stuff for DebBugs Version Tracking
483 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
484 # ??? once queue/* is cleared on *.d.o and/or reprocessed
485 # the conditionalization on dsc["bts changelog"] should be
488 # Write out the version history from the changelog
489 if changes["architecture"].has_key("source") and \
490 dsc.has_key("bts changelog"):
492 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
493 dotprefix=1, perms=0644)
494 version_history = utils.open_file(temp_filename, 'w')
495 version_history.write(dsc["bts changelog"])
496 version_history.close()
497 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
498 changes_file[:-8]+".versions")
499 os.rename(temp_filename, filename)
501 # Write out the binary -> source mapping.
502 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
503 dotprefix=1, perms=0644)
504 debinfo = utils.open_file(temp_filename, 'w')
505 for file_entry in file_keys:
506 f = files[file_entry]
507 if f["type"] == "deb":
508 line = " ".join([f["package"], f["version"],
509 f["architecture"], f["source package"],
510 f["source version"]])
511 debinfo.write(line+"\n")
513 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
514 changes_file[:-8]+".debinfo")
515 os.rename(temp_filename, filename)
517 self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
519 ###########################################################################
521 def queue_build (self, queue, path):
524 files = self.pkg.files
525 changes = self.pkg.changes
526 changes_file = self.pkg.changes_file
528 file_keys = files.keys()
530 ## Special support to enable clean auto-building of queued packages
531 queue_id = database.get_or_set_queue_id(queue)
533 self.projectB.query("BEGIN WORK")
534 for suite in changes["distribution"].keys():
535 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
537 suite_id = database.get_suite_id(suite)
538 dest_dir = Cnf["Dir::QueueBuild"]
539 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
540 dest_dir = os.path.join(dest_dir, suite)
541 for file_entry in file_keys:
542 src = os.path.join(path, file_entry)
543 dest = os.path.join(dest_dir, file_entry)
544 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
545 # Copy it since the original won't be readable by www-data
546 utils.copy(src, dest)
548 # Create a symlink to it
549 os.symlink(src, dest)
550 # Add it to the list of packages for later processing by apt-ftparchive
551 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
552 # If the .orig.tar.gz is in the pool, create a symlink to
553 # it (if one doesn't already exist)
554 if self.pkg.orig_tar_id:
555 # Determine the .orig.tar.gz file name
556 for dsc_file in self.pkg.dsc_files.keys():
557 if dsc_file.endswith(".orig.tar.gz"):
559 dest = os.path.join(dest_dir, filename)
560 # If it doesn't exist, create a symlink
561 if not os.path.exists(dest):
562 # Find the .orig.tar.gz in the pool
563 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
566 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
567 src = os.path.join(ql[0][0], ql[0][1])
568 os.symlink(src, dest)
569 # Add it to the list of packages for later processing by apt-ftparchive
570 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
571 # if it does, update things to ensure it's not removed prematurely
573 self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
575 self.projectB.query("COMMIT WORK")
577 ###########################################################################
579 def check_override (self):
581 changes = self.pkg.changes
582 files = self.pkg.files
585 # Abandon the check if:
586 # a) it's a non-sourceful upload
587 # b) override disparity checks have been disabled
588 # c) we're not sending mail
589 if not changes["architecture"].has_key("source") or \
590 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
591 Cnf["Dinstall::Options::No-Mail"]:
595 file_keys = files.keys()
597 for file_entry in file_keys:
598 if not files[file_entry].has_key("new") and files[file_entry]["type"] == "deb":
599 section = files[file_entry]["section"]
600 override_section = files[file_entry]["override section"]
601 if section.lower() != override_section.lower() and section != "-":
602 summary += "%s: package says section is %s, override says %s.\n" % (file_entry, section, override_section)
603 priority = files[file_entry]["priority"]
604 override_priority = files[file_entry]["override priority"]
605 if priority != override_priority and priority != "-":
606 summary += "%s: package says priority is %s, override says %s.\n" % (file_entry, priority, override_priority)
611 Subst["__SUMMARY__"] = summary
612 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
613 utils.send_mail(mail_message)
615 ###########################################################################
617 def force_reject (self, files):
618 """Forcefully move files from the current directory to the
619 reject directory. If any file already exists in the reject
620 directory it will be moved to the morgue to make way for
625 for file_entry in files:
626 # Skip any files which don't exist or which we don't have permission to copy.
627 if os.access(file_entry,os.R_OK) == 0:
629 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file_entry)
631 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
633 # File exists? Let's try and move it to the morgue
634 if errno.errorcode[e.errno] == 'EEXIST':
635 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file_entry)
637 morgue_file = utils.find_next_free(morgue_file)
638 except NoFreeFilenameError:
639 # Something's either gone badly Pete Tong, or
640 # someone is trying to exploit us.
641 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
643 utils.move(dest_file, morgue_file, perms=0660)
645 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
648 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
652 # If we got here, we own the destination file, so we can
653 # safely overwrite it.
654 utils.move(file_entry, dest_file, 1, perms=0660)
657 ###########################################################################
659 def do_reject (self, manual = 0, reject_message = ""):
660 # If we weren't given a manual rejection message, spawn an
661 # editor so the user can add one in...
662 if manual and not reject_message:
663 temp_filename = utils.temp_filename()
664 editor = os.environ.get("EDITOR","vi")
667 os.system("%s %s" % (editor, temp_filename))
668 temp_fh = utils.open_file(temp_filename)
669 reject_message = "".join(temp_fh.readlines())
671 print "Reject message:"
672 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
673 prompt = "[R]eject, Edit, Abandon, Quit ?"
675 while prompt.find(answer) == -1:
676 answer = utils.our_raw_input(prompt)
677 m = re_default_answer.search(prompt)
680 answer = answer[:1].upper()
681 os.unlink(temp_filename)
693 reason_filename = pkg.changes_file[:-8] + ".reason"
694 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
696 # Move all the files into the reject directory
697 reject_files = pkg.files.keys() + [pkg.changes_file]
698 self.force_reject(reject_files)
700 # If we fail here someone is probably trying to exploit the race
701 # so let's just raise an exception ...
702 if os.path.exists(reason_filename):
703 os.unlink(reason_filename)
704 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
707 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
708 Subst["__MANUAL_REJECT_MESSAGE__"] = ""
709 Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
710 os.write(reason_fd, reject_message)
711 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
713 # Build up the rejection email
714 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
716 Subst["__REJECTOR_ADDRESS__"] = user_email_address
717 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
718 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
719 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
720 # Write the rejection email out as the <foo>.reason file
721 os.write(reason_fd, reject_mail_message)
725 # Send the rejection mail if appropriate
726 if not Cnf["Dinstall::Options::No-Mail"]:
727 utils.send_mail(reject_mail_message)
729 self.Logger.log(["rejected", pkg.changes_file])
732 ################################################################################
734 # Ensure that source exists somewhere in the archive for the binary
735 # upload being processed.
737 # (1) exact match => 1.0-3
738 # (2) Bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
740 def source_exists (self, package, source_version, suites = ["any"]):
744 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
747 # source must exist in suite X, or in some other suite that's
748 # mapped to X, recursively... silent-maps are counted too,
749 # unreleased-maps aren't.
750 maps = self.Cnf.ValueList("SuiteMappings")[:]
752 maps = [ m.split() for m in maps ]
753 maps = [ (x[1], x[2]) for x in maps
754 if x[0] == "map" or x[0] == "silent-map" ]
757 if x[1] in s and x[0] not in s:
760 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
761 q = self.projectB.query(que)
763 # Reduce the query results to a list of version numbers
764 ql = [ i[0] for i in q.getresult() ]
767 if source_version in ql:
771 orig_source_version = re_bin_only_nmu.sub('', source_version)
772 if orig_source_version in ql:
780 ################################################################################
782 def in_override_p (self, package, component, suite, binary_type, file):
783 files = self.pkg.files
785 if binary_type == "": # must be source
788 file_type = binary_type
790 # Override suite name; used for example with proposed-updates
791 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
792 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
794 # Avoid <undef> on unknown distributions
795 suite_id = database.get_suite_id(suite)
798 component_id = database.get_component_id(component)
799 type_id = database.get_override_type_id(file_type)
801 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
802 % (package, suite_id, component_id, type_id))
803 result = q.getresult()
804 # If checking for a source package fall back on the binary override type
805 if file_type == "dsc" and not result:
806 deb_type_id = database.get_override_type_id("deb")
807 udeb_type_id = database.get_override_type_id("udeb")
808 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
809 % (package, suite_id, component_id, deb_type_id, udeb_type_id))
810 result = q.getresult()
812 # Remember the section and priority so we can check them later if appropriate
814 files[file]["override section"] = result[0][0]
815 files[file]["override priority"] = result[0][1]
819 ################################################################################
821 def reject (self, str, prefix="Rejected: "):
823 # Unlike other rejects we add new lines first to avoid trailing
824 # new lines when this message is passed back up to a caller.
825 if self.reject_message:
826 self.reject_message += "\n"
827 self.reject_message += prefix + str
829 ################################################################################
831 def get_anyversion(self, query_result, suite):
833 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
834 for (v, s) in query_result:
835 if s in [ x.lower() for x in anysuite ]:
836 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
840 ################################################################################
842 def cross_suite_version_check(self, query_result, file, new_version):
843 """Ensure versions are newer than existing packages in target
844 suites and that cross-suite version checking rules as
845 set out in the conf file are satisfied."""
847 # Check versions for each target suite
848 for target_suite in self.pkg.changes["distribution"].keys():
849 must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
850 must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
851 # Enforce "must be newer than target suite" even if conffile omits it
852 if target_suite not in must_be_newer_than:
853 must_be_newer_than.append(target_suite)
854 for entry in query_result:
855 existent_version = entry[0]
857 if suite in must_be_newer_than and \
858 apt_pkg.VersionCompare(new_version, existent_version) < 1:
859 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
860 if suite in must_be_older_than and \
861 apt_pkg.VersionCompare(new_version, existent_version) > -1:
862 ch = self.pkg.changes
864 if ch.get('distribution-version', {}).has_key(suite):
865 # we really use the other suite, ignoring the conflicting one ...
866 addsuite = ch["distribution-version"][suite]
868 add_version = self.get_anyversion(query_result, addsuite)
869 target_version = self.get_anyversion(query_result, target_suite)
872 # not add_version can only happen if we map to a suite
873 # that doesn't enhance the suite we're propup'ing from.
874 # so "propup-ver x a b c; map a d" is a problem only if
875 # d doesn't enhance a.
877 # i think we could always propagate in this case, rather
878 # than complaining. either way, this isn't a REJECT issue
880 # And - we really should complain to the dorks who configured dak
881 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
882 self.pkg.changes.setdefault("propdistribution", {})
883 self.pkg.changes["propdistribution"][addsuite] = 1
885 elif not target_version:
886 # not targets_version is true when the package is NEW
887 # we could just stick with the "...old version..." REJECT
889 self.reject("Won't propogate NEW packages.")
890 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
891 # propogation would be redundant. no need to reject though.
892 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
894 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
895 apt_pkg.VersionCompare(add_version, target_version) >= 0:
897 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
898 self.pkg.changes.setdefault("propdistribution", {})
899 self.pkg.changes["propdistribution"][addsuite] = 1
903 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
905 ################################################################################
907 def check_binary_against_db(self, file):
908 self.reject_message = ""
909 files = self.pkg.files
911 # Ensure version is sane
912 q = self.projectB.query("""
913 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
915 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
916 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
917 % (files[file]["package"],
918 files[file]["architecture"]))
919 self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
921 # Check for any existing copies of the file
922 q = self.projectB.query("""
923 SELECT b.id FROM binaries b, architecture a
924 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
925 AND a.id = b.architecture"""
926 % (files[file]["package"],
927 files[file]["version"],
928 files[file]["architecture"]))
930 self.reject("%s: can not overwrite existing copy already in the archive." % (file))
932 return self.reject_message
934 ################################################################################
936 def check_source_against_db(self, file):
937 self.reject_message = ""
940 # Ensure version is sane
941 q = self.projectB.query("""
942 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
943 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
944 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
946 return self.reject_message
948 ################################################################################
951 # NB: this function can remove entries from the 'files' index [if
952 # the .orig.tar.gz is a duplicate of the one in the archive]; if
953 # you're iterating over 'files' and call this function as part of
954 # the loop, be sure to add a check to the top of the loop to
955 # ensure you haven't just tried to dereference the deleted entry.
958 def check_dsc_against_db(self, file):
959 self.reject_message = ""
960 files = self.pkg.files
961 dsc_files = self.pkg.dsc_files
962 legacy_source_untouchable = self.pkg.legacy_source_untouchable
963 self.pkg.orig_tar_gz = None
965 # Try and find all files mentioned in the .dsc. This has
966 # to work harder to cope with the multiple possible
967 # locations of an .orig.tar.gz.
968 # The ordering on the select is needed to pick the newest orig
969 # when it exists in multiple places.
970 for dsc_file in dsc_files.keys():
972 if files.has_key(dsc_file):
973 actual_md5 = files[dsc_file]["md5sum"]
974 actual_size = int(files[dsc_file]["size"])
975 found = "%s in incoming" % (dsc_file)
976 # Check the file does not already exist in the archive
977 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
979 # Strip out anything that isn't '%s' or '/%s$'
981 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
984 # "[dak] has not broken them. [dak] has fixed a
985 # brokenness. Your crappy hack exploited a bug in
988 # "(Come on! I thought it was always obvious that
989 # one just doesn't release different files with
990 # the same name and version.)"
991 # -- ajk@ on d-devel@l.d.o
994 # Ignore exact matches for .orig.tar.gz
996 if dsc_file.endswith(".orig.tar.gz"):
998 if files.has_key(dsc_file) and \
999 int(files[dsc_file]["size"]) == int(i[0]) and \
1000 files[dsc_file]["md5sum"] == i[1]:
1001 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
1003 self.pkg.orig_tar_gz = i[2] + i[3]
1007 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
1008 elif dsc_file.endswith(".orig.tar.gz"):
1010 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
1012 # Strip out anything that isn't '%s' or '/%s$'
1014 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
1018 # Unfortunately, we may get more than one match here if,
1019 # for example, the package was in potato but had an -sa
1020 # upload in woody. So we need to choose the right one.
1022 x = ql[0]; # default to something sane in case we don't match any or have only one
1026 old_file = i[0] + i[1]
1027 old_file_fh = utils.open_file(old_file)
1028 actual_md5 = apt_pkg.md5sum(old_file_fh)
1030 actual_size = os.stat(old_file)[stat.ST_SIZE]
1031 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1034 legacy_source_untouchable[i[3]] = ""
1036 old_file = x[0] + x[1]
1037 old_file_fh = utils.open_file(old_file)
1038 actual_md5 = apt_pkg.md5sum(old_file_fh)
1040 actual_size = os.stat(old_file)[stat.ST_SIZE]
1043 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
1044 # See install() in process-accepted...
1045 self.pkg.orig_tar_id = x[3]
1046 self.pkg.orig_tar_gz = old_file
1047 if suite_type == "legacy" or suite_type == "legacy-mixed":
1048 self.pkg.orig_tar_location = "legacy"
1050 self.pkg.orig_tar_location = x[4]
1052 # Not there? Check the queue directories...
1054 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1055 # See process_it() in 'dak process-unchecked' for explanation of this
1056 # in_unchecked check dropped by ajt 2007-08-28, how did that
1058 if os.path.exists(in_unchecked) and False:
1059 return (self.reject_message, in_unchecked)
1061 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1062 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)],dsc_file)
1063 if os.path.exists(in_otherdir):
1064 in_otherdir_fh = utils.open_file(in_otherdir)
1065 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1066 in_otherdir_fh.close()
1067 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1069 self.pkg.orig_tar_gz = in_otherdir
1072 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1073 self.pkg.orig_tar_gz = -1
1076 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1078 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1079 self.reject("md5sum for %s doesn't match %s." % (found, file))
1080 if actual_size != int(dsc_files[dsc_file]["size"]):
1081 self.reject("size for %s doesn't match %s." % (found, file))
1083 return (self.reject_message, None)
1085 def do_query(self, q):
1086 sys.stderr.write("query: \"%s\" ... " % (q))
1087 before = time.time()
1088 r = self.projectB.query(q)
1089 time_diff = time.time()-before
1090 sys.stderr.write("took %.3f seconds.\n" % (time_diff))