4 # Queue utility functions for dak
5 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 ###############################################################################
23 import cPickle, errno, os, pg, re, stat, sys, time
24 import apt_inst, apt_pkg
25 import utils, database
26 from dak_exceptions import *
30 ###############################################################################
32 re_isanum = re.compile (r"^\d+$")
33 re_default_answer = re.compile(r"\[(.*)\]")
34 re_fdnic = re.compile(r"\n\n")
35 re_bin_only_nmu = re.compile(r"\+b\d+$")
37 ################################################################################
39 # Determine what parts in a .changes are NEW
41 def determine_new(changes, files, projectB, warn=1):
44 # Build up a list of potentially new things
45 for file_entry in files.keys():
47 # Skip byhand elements
48 if f["type"] == "byhand":
51 priority = f["priority"]
52 section = f["section"]
53 file_type = get_type(f)
54 component = f["component"]
56 if file_type == "dsc":
58 if not new.has_key(pkg):
60 new[pkg]["priority"] = priority
61 new[pkg]["section"] = section
62 new[pkg]["type"] = file_type
63 new[pkg]["component"] = component
64 new[pkg]["files"] = []
66 old_type = new[pkg]["type"]
67 if old_type != file_type:
68 # source gets trumped by deb or udeb
70 new[pkg]["priority"] = priority
71 new[pkg]["section"] = section
72 new[pkg]["type"] = file_type
73 new[pkg]["component"] = component
74 new[pkg]["files"].append(file_entry)
75 if f.has_key("othercomponents"):
76 new[pkg]["othercomponents"] = f["othercomponents"]
78 for suite in changes["suite"].keys():
79 suite_id = database.get_suite_id(suite)
80 for pkg in new.keys():
81 component_id = database.get_component_id(new[pkg]["component"])
82 type_id = database.get_override_type_id(new[pkg]["type"])
83 q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
86 for file_entry in new[pkg]["files"]:
87 if files[file_entry].has_key("new"):
88 del files[file_entry]["new"]
92 if changes["suite"].has_key("stable"):
93 print "WARNING: overrides will be added for stable!"
94 if changes["suite"].has_key("oldstable"):
95 print "WARNING: overrides will be added for OLDstable!"
96 for pkg in new.keys():
97 if new[pkg].has_key("othercomponents"):
98 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
102 ################################################################################
106 if f.has_key("dbtype"):
107 file_type = f["dbtype"]
108 elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
111 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
113 # Validate the override type
114 type_id = database.get_override_type_id(file_type)
116 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
120 ################################################################################
122 # check if section/priority values are valid
124 def check_valid(new):
125 for pkg in new.keys():
126 section = new[pkg]["section"]
127 priority = new[pkg]["priority"]
128 file_type = new[pkg]["type"]
129 new[pkg]["section id"] = database.get_section_id(section)
130 new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
132 di = section.find("debian-installer") != -1
133 if (di and file_type not in ("udeb", "dsc")) or (not di and file_type == "udeb"):
134 new[pkg]["section id"] = -1
135 if (priority == "source" and file_type != "dsc") or \
136 (priority != "source" and file_type == "dsc"):
137 new[pkg]["priority id"] = -1
140 ###############################################################################
142 # Convenience wrapper to carry around all the package information in
145 def __init__(self, **kwds):
146 self.__dict__.update(kwds)
148 def update(self, **kwds):
149 self.__dict__.update(kwds)
151 ###############################################################################
155 def __init__(self, Cnf):
157 self.accept_count = 0
158 self.accept_bytes = 0L
159 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
160 legacy_source_untouchable = {})
162 # Initialize the substitution template mapping global
163 Subst = self.Subst = {}
164 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
165 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
166 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
167 Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
169 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
170 database.init(Cnf, self.projectB)
172 ###########################################################################
174 def init_vars (self):
175 self.pkg.changes.clear()
177 self.pkg.files.clear()
178 self.pkg.dsc_files.clear()
179 self.pkg.legacy_source_untouchable.clear()
180 self.pkg.orig_tar_id = None
181 self.pkg.orig_tar_location = ""
182 self.pkg.orig_tar_gz = None
184 ###########################################################################
186 def update_vars (self):
187 dump_filename = self.pkg.changes_file[:-8]+".dak"
188 dump_file = utils.open_file(dump_filename)
189 p = cPickle.Unpickler(dump_file)
191 self.pkg.changes.update(p.load())
192 self.pkg.dsc.update(p.load())
193 self.pkg.files.update(p.load())
194 self.pkg.dsc_files.update(p.load())
195 self.pkg.legacy_source_untouchable.update(p.load())
197 self.pkg.orig_tar_id = p.load()
198 self.pkg.orig_tar_location = p.load()
202 ###########################################################################
204 # This could just dump the dictionaries as is, but I'd like to
205 # avoid this so there's some idea of what process-accepted &
206 # process-new use from process-unchecked
208 def dump_vars(self, dest_dir):
210 changes = self.pkg.changes
212 files = self.pkg.files
213 dsc_files = self.pkg.dsc_files
214 legacy_source_untouchable = self.pkg.legacy_source_untouchable
215 orig_tar_id = self.pkg.orig_tar_id
216 orig_tar_location = self.pkg.orig_tar_location
218 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
219 dump_file = utils.open_file(dump_filename, 'w')
221 os.chmod(dump_filename, 0664)
223 # chmod may fail when the dumpfile is not owned by the user
224 # invoking dak (like e.g. when NEW is processed by a member
226 if errno.errorcode[e.errno] == 'EPERM':
227 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
228 # security precaution, should never happen unless a weird
229 # umask is set anywhere
230 if perms & stat.S_IWOTH:
231 utils.fubar("%s is world writable and chmod failed." % \
233 # ignore the failed chmod otherwise as the file should
234 # already have the right privileges and is just, at worst,
235 # unreadable for world
239 p = cPickle.Pickler(dump_file, 1)
246 for file_entry in files.keys():
247 d_files[file_entry] = {}
248 for i in [ "package", "version", "architecture", "type", "size",
249 "md5sum", "sha1sum", "sha256sum", "component",
250 "location id", "source package", "source version",
251 "maintainer", "dbtype", "files id", "new",
252 "section", "priority", "othercomponents",
253 "pool name", "original component" ]:
254 if files[file_entry].has_key(i):
255 d_files[file_entry][i] = files[file_entry][i]
257 # Mandatory changes fields
258 for i in [ "distribution", "source", "architecture", "version",
259 "maintainer", "urgency", "fingerprint", "changedby822",
260 "changedby2047", "changedbyname", "maintainer822",
261 "maintainer2047", "maintainername", "maintaineremail",
262 "closes", "changes" ]:
263 d_changes[i] = changes[i]
264 # Optional changes fields
265 for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version",
267 if changes.has_key(i):
268 d_changes[i] = changes[i]
270 for i in [ "source", "version", "maintainer", "fingerprint",
271 "uploaders", "bts changelog", "dm-upload-allowed" ]:
275 for file_entry in dsc_files.keys():
276 d_dsc_files[file_entry] = {}
277 # Mandatory dsc_files fields
278 for i in [ "size", "md5sum" ]:
279 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
280 # Optional dsc_files fields
281 for i in [ "files id" ]:
282 if dsc_files[file_entry].has_key(i):
283 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
285 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
286 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
290 ###########################################################################
292 # Set up the per-package template substitution mappings
294 def update_subst (self, reject_message = ""):
296 changes = self.pkg.changes
297 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
298 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
299 changes["architecture"] = { "Unknown" : "" }
300 # and maintainer2047 may not exist.
301 if not changes.has_key("maintainer2047"):
302 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
304 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
305 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
306 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
308 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
309 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
310 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
311 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
312 changes["maintainer2047"])
313 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
315 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
316 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
317 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
319 if "sponsoremail" in changes:
320 Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
322 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
323 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
325 # Apply any global override of the Maintainer field
326 if self.Cnf.get("Dinstall::OverrideMaintainer"):
327 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
328 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
330 Subst["__REJECT_MESSAGE__"] = reject_message
331 Subst["__SOURCE__"] = changes.get("source", "Unknown")
332 Subst["__VERSION__"] = changes.get("version", "Unknown")
334 ###########################################################################
336 def build_summaries(self):
337 changes = self.pkg.changes
338 files = self.pkg.files
340 byhand = summary = new = ""
342 # changes["distribution"] may not exist in corner cases
343 # (e.g. unreadable changes files)
344 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
345 changes["distribution"] = {}
347 override_summary ="";
348 file_keys = files.keys()
350 for file_entry in file_keys:
351 if files[file_entry].has_key("byhand"):
353 summary += file_entry + " byhand\n"
354 elif files[file_entry].has_key("new"):
356 summary += "(new) %s %s %s\n" % (file_entry, files[file_entry]["priority"], files[file_entry]["section"])
357 if files[file_entry].has_key("othercomponents"):
358 summary += "WARNING: Already present in %s distribution.\n" % (files[file_entry]["othercomponents"])
359 if files[file_entry]["type"] == "deb":
360 deb_fh = utils.open_file(file_entry)
361 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
364 files[file_entry]["pool name"] = utils.poolify (changes.get("source",""), files[file_entry]["component"])
365 destination = self.Cnf["Dir::PoolRoot"] + files[file_entry]["pool name"] + file_entry
366 summary += file_entry + "\n to " + destination + "\n"
367 if not files[file_entry].has_key("type"):
368 files[file_entry]["type"] = "unknown"
369 if files[file_entry]["type"] in ["deb", "udeb", "dsc"]:
370 # (queue/unchecked), there we have override entries already, use them
371 # (process-new), there we dont have override entries, use the newly generated ones.
372 override_prio = files[file_entry].get("override priority", files[file_entry]["priority"])
373 override_sect = files[file_entry].get("override section", files[file_entry]["section"])
374 override_summary += "%s - %s %s\n" % (file_entry, override_prio, override_sect)
376 short_summary = summary
378 # This is for direport's benefit...
379 f = re_fdnic.sub("\n .\n", changes.get("changes",""))
382 summary += "Changes: " + f
384 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
386 summary += self.announce(short_summary, 0)
388 return (summary, short_summary)
390 ###########################################################################
392 def close_bugs (self, summary, action):
393 changes = self.pkg.changes
397 bugs = changes["closes"].keys()
403 summary += "Closing bugs: "
405 summary += "%s " % (bug)
407 Subst["__BUG_NUMBER__"] = bug
408 if changes["distribution"].has_key("stable"):
409 Subst["__STABLE_WARNING__"] = """
410 Note that this package is not part of the released stable Debian
411 distribution. It may have dependencies on other unreleased software,
412 or other instabilities. Please take care if you wish to install it.
413 The update will eventually make its way into the next released Debian
416 Subst["__STABLE_WARNING__"] = ""
417 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
418 utils.send_mail (mail_message)
420 self.Logger.log(["closing bugs"]+bugs)
425 ###########################################################################
427 def announce (self, short_summary, action):
430 changes = self.pkg.changes
432 # Only do announcements for source uploads with a recent dpkg-dev installed
433 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
438 Subst["__SHORT_SUMMARY__"] = short_summary
440 for dist in changes["distribution"].keys():
441 announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
442 if announce_list == "" or lists_done.has_key(announce_list):
444 lists_done[announce_list] = 1
445 summary += "Announcing to %s\n" % (announce_list)
448 Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
449 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
450 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
451 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
452 utils.send_mail (mail_message)
454 if Cnf.FindB("Dinstall::CloseBugs"):
455 summary = self.close_bugs(summary, action)
459 ###########################################################################
461 def accept (self, summary, short_summary):
464 files = self.pkg.files
465 changes = self.pkg.changes
466 changes_file = self.pkg.changes_file
470 self.Logger.log(["Accepting changes",changes_file])
472 self.dump_vars(Cnf["Dir::Queue::Accepted"])
474 # Move all the files into the accepted directory
475 utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
476 file_keys = files.keys()
477 for file_entry in file_keys:
478 utils.move(file_entry, Cnf["Dir::Queue::Accepted"])
479 self.accept_bytes += float(files[file_entry]["size"])
480 self.accept_count += 1
482 # Send accept mail, announce to lists, close bugs and check for
483 # override disparities
484 if not Cnf["Dinstall::Options::No-Mail"]:
485 Subst["__SUITE__"] = ""
486 Subst["__SUMMARY__"] = summary
487 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
488 utils.send_mail(mail_message)
489 self.announce(short_summary, 1)
492 ## Helper stuff for DebBugs Version Tracking
493 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
494 # ??? once queue/* is cleared on *.d.o and/or reprocessed
495 # the conditionalization on dsc["bts changelog"] should be
498 # Write out the version history from the changelog
499 if changes["architecture"].has_key("source") and \
500 dsc.has_key("bts changelog"):
502 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
503 dotprefix=1, perms=0644)
504 version_history = utils.open_file(temp_filename, 'w')
505 version_history.write(dsc["bts changelog"])
506 version_history.close()
507 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
508 changes_file[:-8]+".versions")
509 os.rename(temp_filename, filename)
511 # Write out the binary -> source mapping.
512 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
513 dotprefix=1, perms=0644)
514 debinfo = utils.open_file(temp_filename, 'w')
515 for file_entry in file_keys:
516 f = files[file_entry]
517 if f["type"] == "deb":
518 line = " ".join([f["package"], f["version"],
519 f["architecture"], f["source package"],
520 f["source version"]])
521 debinfo.write(line+"\n")
523 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
524 changes_file[:-8]+".debinfo")
525 os.rename(temp_filename, filename)
527 self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
529 ###########################################################################
531 def queue_build (self, queue, path):
534 files = self.pkg.files
535 changes = self.pkg.changes
536 changes_file = self.pkg.changes_file
538 file_keys = files.keys()
540 ## Special support to enable clean auto-building of queued packages
541 queue_id = database.get_or_set_queue_id(queue)
543 self.projectB.query("BEGIN WORK")
544 for suite in changes["distribution"].keys():
545 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
547 suite_id = database.get_suite_id(suite)
548 dest_dir = Cnf["Dir::QueueBuild"]
549 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
550 dest_dir = os.path.join(dest_dir, suite)
551 for file_entry in file_keys:
552 src = os.path.join(path, file_entry)
553 dest = os.path.join(dest_dir, file_entry)
554 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
555 # Copy it since the original won't be readable by www-data
556 utils.copy(src, dest)
558 # Create a symlink to it
559 os.symlink(src, dest)
560 # Add it to the list of packages for later processing by apt-ftparchive
561 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
562 # If the .orig.tar.gz is in the pool, create a symlink to
563 # it (if one doesn't already exist)
564 if self.pkg.orig_tar_id:
565 # Determine the .orig.tar.gz file name
566 for dsc_file in self.pkg.dsc_files.keys():
567 if dsc_file.endswith(".orig.tar.gz"):
569 dest = os.path.join(dest_dir, filename)
570 # If it doesn't exist, create a symlink
571 if not os.path.exists(dest):
572 # Find the .orig.tar.gz in the pool
573 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
576 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
577 src = os.path.join(ql[0][0], ql[0][1])
578 os.symlink(src, dest)
579 # Add it to the list of packages for later processing by apt-ftparchive
580 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
581 # if it does, update things to ensure it's not removed prematurely
583 self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
585 self.projectB.query("COMMIT WORK")
587 ###########################################################################
589 def check_override (self):
591 changes = self.pkg.changes
592 files = self.pkg.files
595 # Abandon the check if:
596 # a) it's a non-sourceful upload
597 # b) override disparity checks have been disabled
598 # c) we're not sending mail
599 if not changes["architecture"].has_key("source") or \
600 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
601 Cnf["Dinstall::Options::No-Mail"]:
605 file_keys = files.keys()
607 for file_entry in file_keys:
608 if not files[file_entry].has_key("new") and files[file_entry]["type"] == "deb":
609 section = files[file_entry]["section"]
610 override_section = files[file_entry]["override section"]
611 if section.lower() != override_section.lower() and section != "-":
612 summary += "%s: package says section is %s, override says %s.\n" % (file_entry, section, override_section)
613 priority = files[file_entry]["priority"]
614 override_priority = files[file_entry]["override priority"]
615 if priority != override_priority and priority != "-":
616 summary += "%s: package says priority is %s, override says %s.\n" % (file_entry, priority, override_priority)
621 Subst["__SUMMARY__"] = summary
622 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
623 utils.send_mail(mail_message)
625 ###########################################################################
627 def force_reject (self, files):
628 """Forcefully move files from the current directory to the
629 reject directory. If any file already exists in the reject
630 directory it will be moved to the morgue to make way for
635 for file_entry in files:
636 # Skip any files which don't exist or which we don't have permission to copy.
637 if os.access(file_entry,os.R_OK) == 0:
639 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file_entry)
641 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
643 # File exists? Let's try and move it to the morgue
644 if errno.errorcode[e.errno] == 'EEXIST':
645 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file_entry)
647 morgue_file = utils.find_next_free(morgue_file)
648 except NoFreeFilenameError:
649 # Something's either gone badly Pete Tong, or
650 # someone is trying to exploit us.
651 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
653 utils.move(dest_file, morgue_file, perms=0660)
655 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
658 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
662 # If we got here, we own the destination file, so we can
663 # safely overwrite it.
664 utils.move(file_entry, dest_file, 1, perms=0660)
667 ###########################################################################
669 def do_reject (self, manual = 0, reject_message = ""):
670 # If we weren't given a manual rejection message, spawn an
671 # editor so the user can add one in...
672 if manual and not reject_message:
673 temp_filename = utils.temp_filename()
674 editor = os.environ.get("EDITOR","vi")
677 os.system("%s %s" % (editor, temp_filename))
678 temp_fh = utils.open_file(temp_filename)
679 reject_message = "".join(temp_fh.readlines())
681 print "Reject message:"
682 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
683 prompt = "[R]eject, Edit, Abandon, Quit ?"
685 while prompt.find(answer) == -1:
686 answer = utils.our_raw_input(prompt)
687 m = re_default_answer.search(prompt)
690 answer = answer[:1].upper()
691 os.unlink(temp_filename)
703 reason_filename = pkg.changes_file[:-8] + ".reason"
704 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
706 # Move all the files into the reject directory
707 reject_files = pkg.files.keys() + [pkg.changes_file]
708 self.force_reject(reject_files)
710 # If we fail here someone is probably trying to exploit the race
711 # so let's just raise an exception ...
712 if os.path.exists(reason_filename):
713 os.unlink(reason_filename)
714 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
717 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
718 Subst["__MANUAL_REJECT_MESSAGE__"] = ""
719 Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
720 os.write(reason_fd, reject_message)
721 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
723 # Build up the rejection email
724 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
726 Subst["__REJECTOR_ADDRESS__"] = user_email_address
727 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
728 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
729 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
730 # Write the rejection email out as the <foo>.reason file
731 os.write(reason_fd, reject_mail_message)
735 # Send the rejection mail if appropriate
736 if not Cnf["Dinstall::Options::No-Mail"]:
737 utils.send_mail(reject_mail_message)
739 self.Logger.log(["rejected", pkg.changes_file])
742 ################################################################################
744 # Ensure that source exists somewhere in the archive for the binary
745 # upload being processed.
747 # (1) exact match => 1.0-3
748 # (2) Bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
750 def source_exists (self, package, source_version, suites = ["any"]):
754 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
757 # source must exist in suite X, or in some other suite that's
758 # mapped to X, recursively... silent-maps are counted too,
759 # unreleased-maps aren't.
760 maps = self.Cnf.ValueList("SuiteMappings")[:]
762 maps = [ m.split() for m in maps ]
763 maps = [ (x[1], x[2]) for x in maps
764 if x[0] == "map" or x[0] == "silent-map" ]
767 if x[1] in s and x[0] not in s:
770 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
771 q = self.projectB.query(que)
773 # Reduce the query results to a list of version numbers
774 ql = [ i[0] for i in q.getresult() ]
777 if source_version in ql:
781 orig_source_version = re_bin_only_nmu.sub('', source_version)
782 if orig_source_version in ql:
790 ################################################################################
792 def in_override_p (self, package, component, suite, binary_type, file):
793 files = self.pkg.files
795 if binary_type == "": # must be source
798 file_type = binary_type
800 # Override suite name; used for example with proposed-updates
801 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
802 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
804 # Avoid <undef> on unknown distributions
805 suite_id = database.get_suite_id(suite)
808 component_id = database.get_component_id(component)
809 type_id = database.get_override_type_id(file_type)
811 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
812 % (package, suite_id, component_id, type_id))
813 result = q.getresult()
814 # If checking for a source package fall back on the binary override type
815 if file_type == "dsc" and not result:
816 deb_type_id = database.get_override_type_id("deb")
817 udeb_type_id = database.get_override_type_id("udeb")
818 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
819 % (package, suite_id, component_id, deb_type_id, udeb_type_id))
820 result = q.getresult()
822 # Remember the section and priority so we can check them later if appropriate
824 files[file]["override section"] = result[0][0]
825 files[file]["override priority"] = result[0][1]
829 ################################################################################
831 def reject (self, str, prefix="Rejected: "):
833 # Unlike other rejects we add new lines first to avoid trailing
834 # new lines when this message is passed back up to a caller.
835 if self.reject_message:
836 self.reject_message += "\n"
837 self.reject_message += prefix + str
839 ################################################################################
841 def get_anyversion(self, query_result, suite):
843 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
844 for (v, s) in query_result:
845 if s in [ x.lower() for x in anysuite ]:
846 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
850 ################################################################################
852 def cross_suite_version_check(self, query_result, file, new_version,
854 """Ensure versions are newer than existing packages in target
855 suites and that cross-suite version checking rules as
856 set out in the conf file are satisfied."""
858 # Check versions for each target suite
859 for target_suite in self.pkg.changes["distribution"].keys():
860 must_be_newer_than = [ i.lower() for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
861 must_be_older_than = [ i.lower() for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
862 # Enforce "must be newer than target suite" even if conffile omits it
863 if target_suite not in must_be_newer_than:
864 must_be_newer_than.append(target_suite)
865 for entry in query_result:
866 existent_version = entry[0]
868 if suite in must_be_newer_than and sourceful and \
869 apt_pkg.VersionCompare(new_version, existent_version) < 1:
870 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
871 if suite in must_be_older_than and \
872 apt_pkg.VersionCompare(new_version, existent_version) > -1:
873 ch = self.pkg.changes
875 if ch.get('distribution-version', {}).has_key(suite):
876 # we really use the other suite, ignoring the conflicting one ...
877 addsuite = ch["distribution-version"][suite]
879 add_version = self.get_anyversion(query_result, addsuite)
880 target_version = self.get_anyversion(query_result, target_suite)
883 # not add_version can only happen if we map to a suite
884 # that doesn't enhance the suite we're propup'ing from.
885 # so "propup-ver x a b c; map a d" is a problem only if
886 # d doesn't enhance a.
888 # i think we could always propagate in this case, rather
889 # than complaining. either way, this isn't a REJECT issue
891 # And - we really should complain to the dorks who configured dak
892 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
893 self.pkg.changes.setdefault("propdistribution", {})
894 self.pkg.changes["propdistribution"][addsuite] = 1
896 elif not target_version:
897 # not targets_version is true when the package is NEW
898 # we could just stick with the "...old version..." REJECT
900 self.reject("Won't propogate NEW packages.")
901 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
902 # propogation would be redundant. no need to reject though.
903 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
905 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
906 apt_pkg.VersionCompare(add_version, target_version) >= 0:
908 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
909 self.pkg.changes.setdefault("propdistribution", {})
910 self.pkg.changes["propdistribution"][addsuite] = 1
914 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
916 ################################################################################
918 def check_binary_against_db(self, file):
919 self.reject_message = ""
920 files = self.pkg.files
922 # Ensure version is sane
923 q = self.projectB.query("""
924 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
926 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
927 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
928 % (files[file]["package"],
929 files[file]["architecture"]))
930 self.cross_suite_version_check(q.getresult(), file,
931 files[file]["version"], sourceful=False)
933 # Check for any existing copies of the file
934 q = self.projectB.query("""
935 SELECT b.id FROM binaries b, architecture a
936 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
937 AND a.id = b.architecture"""
938 % (files[file]["package"],
939 files[file]["version"],
940 files[file]["architecture"]))
942 self.reject("%s: can not overwrite existing copy already in the archive." % (file))
944 return self.reject_message
946 ################################################################################
948 def check_source_against_db(self, file):
949 self.reject_message = ""
952 # Ensure version is sane
953 q = self.projectB.query("""
954 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
955 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
956 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"),
959 return self.reject_message
961 ################################################################################
964 # NB: this function can remove entries from the 'files' index [if
965 # the .orig.tar.gz is a duplicate of the one in the archive]; if
966 # you're iterating over 'files' and call this function as part of
967 # the loop, be sure to add a check to the top of the loop to
968 # ensure you haven't just tried to dereference the deleted entry.
971 def check_dsc_against_db(self, file):
972 self.reject_message = ""
973 files = self.pkg.files
974 dsc_files = self.pkg.dsc_files
975 legacy_source_untouchable = self.pkg.legacy_source_untouchable
976 self.pkg.orig_tar_gz = None
978 # Try and find all files mentioned in the .dsc. This has
979 # to work harder to cope with the multiple possible
980 # locations of an .orig.tar.gz.
981 # The ordering on the select is needed to pick the newest orig
982 # when it exists in multiple places.
983 for dsc_file in dsc_files.keys():
985 if files.has_key(dsc_file):
986 actual_md5 = files[dsc_file]["md5sum"]
987 actual_size = int(files[dsc_file]["size"])
988 found = "%s in incoming" % (dsc_file)
989 # Check the file does not already exist in the archive
990 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
992 # Strip out anything that isn't '%s' or '/%s$'
994 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
997 # "[dak] has not broken them. [dak] has fixed a
998 # brokenness. Your crappy hack exploited a bug in
1001 # "(Come on! I thought it was always obvious that
1002 # one just doesn't release different files with
1003 # the same name and version.)"
1004 # -- ajk@ on d-devel@l.d.o
1007 # Ignore exact matches for .orig.tar.gz
1009 if dsc_file.endswith(".orig.tar.gz"):
1011 if files.has_key(dsc_file) and \
1012 int(files[dsc_file]["size"]) == int(i[0]) and \
1013 files[dsc_file]["md5sum"] == i[1]:
1014 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
1016 self.pkg.orig_tar_gz = i[2] + i[3]
1020 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
1021 elif dsc_file.endswith(".orig.tar.gz"):
1023 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
1025 # Strip out anything that isn't '%s' or '/%s$'
1027 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
1031 # Unfortunately, we may get more than one match here if,
1032 # for example, the package was in potato but had an -sa
1033 # upload in woody. So we need to choose the right one.
1035 x = ql[0]; # default to something sane in case we don't match any or have only one
1039 old_file = i[0] + i[1]
1040 old_file_fh = utils.open_file(old_file)
1041 actual_md5 = apt_pkg.md5sum(old_file_fh)
1043 actual_size = os.stat(old_file)[stat.ST_SIZE]
1044 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1047 legacy_source_untouchable[i[3]] = ""
1049 old_file = x[0] + x[1]
1050 old_file_fh = utils.open_file(old_file)
1051 actual_md5 = apt_pkg.md5sum(old_file_fh)
1053 actual_size = os.stat(old_file)[stat.ST_SIZE]
1056 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
1057 # See install() in process-accepted...
1058 self.pkg.orig_tar_id = x[3]
1059 self.pkg.orig_tar_gz = old_file
1060 if suite_type == "legacy" or suite_type == "legacy-mixed":
1061 self.pkg.orig_tar_location = "legacy"
1063 self.pkg.orig_tar_location = x[4]
1065 # Not there? Check the queue directories...
1067 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1068 # See process_it() in 'dak process-unchecked' for explanation of this
1069 # in_unchecked check dropped by ajt 2007-08-28, how did that
1071 if os.path.exists(in_unchecked) and False:
1072 return (self.reject_message, in_unchecked)
1074 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1075 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)],dsc_file)
1076 if os.path.exists(in_otherdir):
1077 in_otherdir_fh = utils.open_file(in_otherdir)
1078 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1079 in_otherdir_fh.close()
1080 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1082 self.pkg.orig_tar_gz = in_otherdir
1085 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1086 self.pkg.orig_tar_gz = -1
1089 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1091 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1092 self.reject("md5sum for %s doesn't match %s." % (found, file))
1093 if actual_size != int(dsc_files[dsc_file]["size"]):
1094 self.reject("size for %s doesn't match %s." % (found, file))
1096 return (self.reject_message, None)
1098 def do_query(self, q):
1099 sys.stderr.write("query: \"%s\" ... " % (q))
1100 before = time.time()
1101 r = self.projectB.query(q)
1102 time_diff = time.time()-before
1103 sys.stderr.write("took %.3f seconds.\n" % (time_diff))