3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 ###############################################################################
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
25 from dak_exceptions import *
29 ###############################################################################
31 re_isanum = re.compile (r"^\d+$")
32 re_default_answer = re.compile(r"\[(.*)\]")
33 re_fdnic = re.compile(r"\n\n")
34 re_bin_only_nmu = re.compile(r"\+b\d+$")
36 ################################################################################
38 # Determine what parts in a .changes are NEW
40 def determine_new(changes, files, projectB, warn=1):
43 # Build up a list of potentially new things
44 for file_entry in files.keys():
46 # Skip byhand elements
47 if f["type"] == "byhand":
50 priority = f["priority"]
51 section = f["section"]
52 file_type = get_type(f)
53 component = f["component"]
55 if file_type == "dsc":
57 if not new.has_key(pkg):
59 new[pkg]["priority"] = priority
60 new[pkg]["section"] = section
61 new[pkg]["type"] = file_type
62 new[pkg]["component"] = component
63 new[pkg]["files"] = []
65 old_type = new[pkg]["type"]
66 if old_type != file_type:
67 # source gets trumped by deb or udeb
69 new[pkg]["priority"] = priority
70 new[pkg]["section"] = section
71 new[pkg]["type"] = file_type
72 new[pkg]["component"] = component
73 new[pkg]["files"].append(file_entry)
74 if f.has_key("othercomponents"):
75 new[pkg]["othercomponents"] = f["othercomponents"]
77 for suite in changes["suite"].keys():
78 suite_id = database.get_suite_id(suite)
79 for pkg in new.keys():
80 component_id = database.get_component_id(new[pkg]["component"])
81 type_id = database.get_override_type_id(new[pkg]["type"])
82 q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
85 for file_entry in new[pkg]["files"]:
86 if files[file_entry].has_key("new"):
87 del files[file_entry]["new"]
91 if changes["suite"].has_key("stable"):
92 print "WARNING: overrides will be added for stable!"
93 if changes["suite"].has_key("oldstable"):
94 print "WARNING: overrides will be added for OLDstable!"
95 for pkg in new.keys():
96 if new[pkg].has_key("othercomponents"):
97 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
101 ################################################################################
105 if f.has_key("dbtype"):
106 file_type = f["dbtype"]
107 elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
110 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
112 # Validate the override type
113 type_id = database.get_override_type_id(file_type)
115 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
119 ################################################################################
121 # check if section/priority values are valid
123 def check_valid(new):
124 for pkg in new.keys():
125 section = new[pkg]["section"]
126 priority = new[pkg]["priority"]
127 file_type = new[pkg]["type"]
128 new[pkg]["section id"] = database.get_section_id(section)
129 new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
131 di = section.find("debian-installer") != -1
132 if (di and file_type != "udeb") or (not di and file_type == "udeb"):
133 new[pkg]["section id"] = -1
134 if (priority == "source" and file_type != "dsc") or \
135 (priority != "source" and file_type == "dsc"):
136 new[pkg]["priority id"] = -1
139 ###############################################################################
141 # Convenience wrapper to carry around all the package information in
144 def __init__(self, **kwds):
145 self.__dict__.update(kwds)
147 def update(self, **kwds):
148 self.__dict__.update(kwds)
150 ###############################################################################
154 def __init__(self, Cnf):
156 self.accept_count = 0
157 self.accept_bytes = 0L
158 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
159 legacy_source_untouchable = {})
161 # Initialize the substitution template mapping global
162 Subst = self.Subst = {}
163 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
164 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
165 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
166 Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
168 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
169 database.init(Cnf, self.projectB)
171 ###########################################################################
173 def init_vars (self):
174 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
175 exec "self.pkg.%s.clear();" % (i)
176 self.pkg.orig_tar_id = None
177 self.pkg.orig_tar_location = ""
178 self.pkg.orig_tar_gz = None
180 ###########################################################################
182 def update_vars (self):
183 dump_filename = self.pkg.changes_file[:-8]+".dak"
184 dump_file = utils.open_file(dump_filename)
185 p = cPickle.Unpickler(dump_file)
186 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
187 exec "self.pkg.%s.update(p.load());" % (i)
188 for i in [ "orig_tar_id", "orig_tar_location" ]:
189 exec "self.pkg.%s = p.load();" % (i)
192 ###########################################################################
194 # This could just dump the dictionaries as is, but I'd like to
195 # avoid this so there's some idea of what process-accepted &
196 # process-new use from process-unchecked
198 def dump_vars(self, dest_dir):
199 for i in [ "changes", "dsc", "files", "dsc_files",
200 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
201 exec "%s = self.pkg.%s;" % (i,i)
202 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
203 dump_file = utils.open_file(dump_filename, 'w')
205 os.chmod(dump_filename, 0660)
207 if errno.errorcode[e.errno] == 'EPERM':
208 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
209 if perms & stat.S_IROTH:
210 utils.fubar("%s is world readable and chmod failed." % (dump_filename))
214 p = cPickle.Pickler(dump_file, 1)
215 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
218 for file_entry in files.keys():
219 d_files[file_entry] = {}
220 for i in [ "package", "version", "architecture", "type", "size",
221 "md5sum", "component", "location id", "source package",
222 "source version", "maintainer", "dbtype", "files id",
223 "new", "section", "priority", "othercomponents",
224 "pool name", "original component" ]:
225 if files[file_entry].has_key(i):
226 d_files[file_entry][i] = files[file_entry][i]
228 # Mandatory changes fields
229 for i in [ "distribution", "source", "architecture", "version",
230 "maintainer", "urgency", "fingerprint", "changedby822",
231 "changedby2047", "changedbyname", "maintainer822",
232 "maintainer2047", "maintainername", "maintaineremail",
233 "closes", "changes" ]:
234 d_changes[i] = changes[i]
235 # Optional changes fields
236 for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version",
238 if changes.has_key(i):
239 d_changes[i] = changes[i]
241 for i in [ "source", "version", "maintainer", "fingerprint",
242 "uploaders", "bts changelog", "dm-upload-allowed" ]:
246 for file_entry in dsc_files.keys():
247 d_dsc_files[file_entry] = {}
248 # Mandatory dsc_files fields
249 for i in [ "size", "md5sum" ]:
250 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
251 # Optional dsc_files fields
252 for i in [ "files id" ]:
253 if dsc_files[file_entry].has_key(i):
254 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
256 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
257 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
261 ###########################################################################
263 # Set up the per-package template substitution mappings
265 def update_subst (self, reject_message = ""):
267 changes = self.pkg.changes
268 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
269 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
270 changes["architecture"] = { "Unknown" : "" }
271 # and maintainer2047 may not exist.
272 if not changes.has_key("maintainer2047"):
273 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
275 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
276 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
277 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
279 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
280 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
281 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
282 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
283 changes["maintainer2047"])
284 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
286 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
287 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
288 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
290 if "sponsoremail" in changes:
291 Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
293 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
294 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
296 # Apply any global override of the Maintainer field
297 if self.Cnf.get("Dinstall::OverrideMaintainer"):
298 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
299 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
301 Subst["__REJECT_MESSAGE__"] = reject_message
302 Subst["__SOURCE__"] = changes.get("source", "Unknown")
303 Subst["__VERSION__"] = changes.get("version", "Unknown")
305 ###########################################################################
307 def build_summaries(self):
308 changes = self.pkg.changes
309 files = self.pkg.files
311 byhand = summary = new = ""
313 # changes["distribution"] may not exist in corner cases
314 # (e.g. unreadable changes files)
315 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
316 changes["distribution"] = {}
318 override_summary ="";
319 file_keys = files.keys()
321 for file_entry in file_keys:
322 if files[file_entry].has_key("byhand"):
324 summary += file_entry + " byhand\n"
325 elif files[file_entry].has_key("new"):
327 summary += "(new) %s %s %s\n" % (file_entry, files[file_entry]["priority"], files[file_entry]["section"])
328 if files[file_entry].has_key("othercomponents"):
329 summary += "WARNING: Already present in %s distribution.\n" % (files[file_entry]["othercomponents"])
330 if files[file_entry]["type"] == "deb":
331 deb_fh = utils.open_file(file_entry)
332 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
335 files[file_entry]["pool name"] = utils.poolify (changes.get("source",""), files[file_entry]["component"])
336 destination = self.Cnf["Dir::PoolRoot"] + files[file_entry]["pool name"] + file_entry
337 summary += file_entry + "\n to " + destination + "\n"
338 if not files[file_entry].has_key("type"):
339 files[file_entry]["type"] = "unknown"
340 if files[file_entry]["type"] in ["deb", "udeb", "dsc"]:
341 # (queue/unchecked), there we have override entries already, use them
342 # (process-new), there we dont have override entries, use the newly generated ones.
343 override_prio = files[file_entry].get("override priority", files[file_entry]["priority"])
344 override_sect = files[file_entry].get("override section", files[file_entry]["section"])
345 override_summary += "%s - %s %s\n" % (file_entry, override_prio, override_sect)
347 short_summary = summary
349 # This is for direport's benefit...
350 f = re_fdnic.sub("\n .\n", changes.get("changes",""))
353 summary += "Changes: " + f
355 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
357 summary += self.announce(short_summary, 0)
359 return (summary, short_summary)
361 ###########################################################################
363 def close_bugs (self, summary, action):
364 changes = self.pkg.changes
368 bugs = changes["closes"].keys()
374 summary += "Closing bugs: "
376 summary += "%s " % (bug)
378 Subst["__BUG_NUMBER__"] = bug
379 if changes["distribution"].has_key("stable"):
380 Subst["__STABLE_WARNING__"] = """
381 Note that this package is not part of the released stable Debian
382 distribution. It may have dependencies on other unreleased software,
383 or other instabilities. Please take care if you wish to install it.
384 The update will eventually make its way into the next released Debian
387 Subst["__STABLE_WARNING__"] = ""
388 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
389 utils.send_mail (mail_message)
391 self.Logger.log(["closing bugs"]+bugs)
396 ###########################################################################
398 def announce (self, short_summary, action):
401 changes = self.pkg.changes
403 # Only do announcements for source uploads with a recent dpkg-dev installed
404 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
409 Subst["__SHORT_SUMMARY__"] = short_summary
411 for dist in changes["distribution"].keys():
412 announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
413 if announce_list == "" or lists_done.has_key(announce_list):
415 lists_done[announce_list] = 1
416 summary += "Announcing to %s\n" % (announce_list)
419 Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
420 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
421 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
422 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
423 utils.send_mail (mail_message)
425 if Cnf.FindB("Dinstall::CloseBugs"):
426 summary = self.close_bugs(summary, action)
430 ###########################################################################
432 def accept (self, summary, short_summary):
435 files = self.pkg.files
436 changes = self.pkg.changes
437 changes_file = self.pkg.changes_file
441 self.Logger.log(["Accepting changes",changes_file])
443 self.dump_vars(Cnf["Dir::Queue::Accepted"])
445 # Move all the files into the accepted directory
446 utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
447 file_keys = files.keys()
448 for file_entry in file_keys:
449 utils.move(file_entry, Cnf["Dir::Queue::Accepted"])
450 self.accept_bytes += float(files[file_entry]["size"])
451 self.accept_count += 1
453 # Send accept mail, announce to lists, close bugs and check for
454 # override disparities
455 if not Cnf["Dinstall::Options::No-Mail"]:
456 Subst["__SUITE__"] = ""
457 Subst["__SUMMARY__"] = summary
458 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
459 utils.send_mail(mail_message)
460 self.announce(short_summary, 1)
463 ## Helper stuff for DebBugs Version Tracking
464 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
465 # ??? once queue/* is cleared on *.d.o and/or reprocessed
466 # the conditionalization on dsc["bts changelog"] should be
469 # Write out the version history from the changelog
470 if changes["architecture"].has_key("source") and \
471 dsc.has_key("bts changelog"):
473 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
474 dotprefix=1, perms=0644)
475 version_history = utils.open_file(temp_filename, 'w')
476 version_history.write(dsc["bts changelog"])
477 version_history.close()
478 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
479 changes_file[:-8]+".versions")
480 os.rename(temp_filename, filename)
482 # Write out the binary -> source mapping.
483 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
484 dotprefix=1, perms=0644)
485 debinfo = utils.open_file(temp_filename, 'w')
486 for file_entry in file_keys:
487 f = files[file_entry]
488 if f["type"] == "deb":
489 line = " ".join([f["package"], f["version"],
490 f["architecture"], f["source package"],
491 f["source version"]])
492 debinfo.write(line+"\n")
494 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
495 changes_file[:-8]+".debinfo")
496 os.rename(temp_filename, filename)
498 self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
500 ###########################################################################
502 def queue_build (self, queue, path):
505 files = self.pkg.files
506 changes = self.pkg.changes
507 changes_file = self.pkg.changes_file
509 file_keys = files.keys()
511 ## Special support to enable clean auto-building of queued packages
512 queue_id = database.get_or_set_queue_id(queue)
514 self.projectB.query("BEGIN WORK")
515 for suite in changes["distribution"].keys():
516 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
518 suite_id = database.get_suite_id(suite)
519 dest_dir = Cnf["Dir::QueueBuild"]
520 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
521 dest_dir = os.path.join(dest_dir, suite)
522 for file_entry in file_keys:
523 src = os.path.join(path, file_entry)
524 dest = os.path.join(dest_dir, file_entry)
525 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
526 # Copy it since the original won't be readable by www-data
527 utils.copy(src, dest)
529 # Create a symlink to it
530 os.symlink(src, dest)
531 # Add it to the list of packages for later processing by apt-ftparchive
532 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
533 # If the .orig.tar.gz is in the pool, create a symlink to
534 # it (if one doesn't already exist)
535 if self.pkg.orig_tar_id:
536 # Determine the .orig.tar.gz file name
537 for dsc_file in self.pkg.dsc_files.keys():
538 if dsc_file.endswith(".orig.tar.gz"):
540 dest = os.path.join(dest_dir, filename)
541 # If it doesn't exist, create a symlink
542 if not os.path.exists(dest):
543 # Find the .orig.tar.gz in the pool
544 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
547 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
548 src = os.path.join(ql[0][0], ql[0][1])
549 os.symlink(src, dest)
550 # Add it to the list of packages for later processing by apt-ftparchive
551 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
552 # if it does, update things to ensure it's not removed prematurely
554 self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
556 self.projectB.query("COMMIT WORK")
558 ###########################################################################
560 def check_override (self):
562 changes = self.pkg.changes
563 files = self.pkg.files
566 # Abandon the check if:
567 # a) it's a non-sourceful upload
568 # b) override disparity checks have been disabled
569 # c) we're not sending mail
570 if not changes["architecture"].has_key("source") or \
571 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
572 Cnf["Dinstall::Options::No-Mail"]:
576 file_keys = files.keys()
578 for file_entry in file_keys:
579 if not files[file_entry].has_key("new") and files[file_entry]["type"] == "deb":
580 section = files[file_entry]["section"]
581 override_section = files[file_entry]["override section"]
582 if section.lower() != override_section.lower() and section != "-":
583 summary += "%s: package says section is %s, override says %s.\n" % (file_entry, section, override_section)
584 priority = files[file_entry]["priority"]
585 override_priority = files[file_entry]["override priority"]
586 if priority != override_priority and priority != "-":
587 summary += "%s: package says priority is %s, override says %s.\n" % (file_entry, priority, override_priority)
592 Subst["__SUMMARY__"] = summary
593 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
594 utils.send_mail(mail_message)
596 ###########################################################################
598 def force_reject (self, files):
599 """Forcefully move files from the current directory to the
600 reject directory. If any file already exists in the reject
601 directory it will be moved to the morgue to make way for
606 for file_entry in files:
607 # Skip any files which don't exist or which we don't have permission to copy.
608 if os.access(file_entry,os.R_OK) == 0:
610 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file_entry)
612 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
614 # File exists? Let's try and move it to the morgue
615 if errno.errorcode[e.errno] == 'EEXIST':
616 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file_entry)
618 morgue_file = utils.find_next_free(morgue_file)
619 except NoFreeFilenameError:
620 # Something's either gone badly Pete Tong, or
621 # someone is trying to exploit us.
622 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
624 utils.move(dest_file, morgue_file, perms=0660)
626 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
629 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
633 # If we got here, we own the destination file, so we can
634 # safely overwrite it.
635 utils.move(file_entry, dest_file, 1, perms=0660)
638 ###########################################################################
640 def do_reject (self, manual = 0, reject_message = ""):
641 # If we weren't given a manual rejection message, spawn an
642 # editor so the user can add one in...
643 if manual and not reject_message:
644 temp_filename = utils.temp_filename()
645 editor = os.environ.get("EDITOR","vi")
648 os.system("%s %s" % (editor, temp_filename))
649 temp_fh = utils.open_file(temp_filename)
650 reject_message = "".join(temp_fh.readlines())
652 print "Reject message:"
653 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
654 prompt = "[R]eject, Edit, Abandon, Quit ?"
656 while prompt.find(answer) == -1:
657 answer = utils.our_raw_input(prompt)
658 m = re_default_answer.search(prompt)
661 answer = answer[:1].upper()
662 os.unlink(temp_filename)
674 reason_filename = pkg.changes_file[:-8] + ".reason"
675 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
677 # Move all the files into the reject directory
678 reject_files = pkg.files.keys() + [pkg.changes_file]
679 self.force_reject(reject_files)
681 # If we fail here someone is probably trying to exploit the race
682 # so let's just raise an exception ...
683 if os.path.exists(reason_filename):
684 os.unlink(reason_filename)
685 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
688 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
689 Subst["__MANUAL_REJECT_MESSAGE__"] = ""
690 Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
691 os.write(reason_fd, reject_message)
692 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
694 # Build up the rejection email
695 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
697 Subst["__REJECTOR_ADDRESS__"] = user_email_address
698 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
699 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
700 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
701 # Write the rejection email out as the <foo>.reason file
702 os.write(reason_fd, reject_mail_message)
706 # Send the rejection mail if appropriate
707 if not Cnf["Dinstall::Options::No-Mail"]:
708 utils.send_mail(reject_mail_message)
710 self.Logger.log(["rejected", pkg.changes_file])
713 ################################################################################
715 # Ensure that source exists somewhere in the archive for the binary
716 # upload being processed.
718 # (1) exact match => 1.0-3
719 # (2) Bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
721 def source_exists (self, package, source_version, suites = ["any"]):
725 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
728 # source must exist in suite X, or in some other suite that's
729 # mapped to X, recursively... silent-maps are counted too,
730 # unreleased-maps aren't.
731 maps = self.Cnf.ValueList("SuiteMappings")[:]
733 maps = [ m.split() for m in maps ]
734 maps = [ (x[1], x[2]) for x in maps
735 if x[0] == "map" or x[0] == "silent-map" ]
738 if x[1] in s and x[0] not in s:
741 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
742 q = self.projectB.query(que)
744 # Reduce the query results to a list of version numbers
745 ql = [ i[0] for i in q.getresult() ]
748 if source_version in ql:
752 orig_source_version = re_bin_only_nmu.sub('', source_version)
753 if orig_source_version in ql:
761 ################################################################################
763 def in_override_p (self, package, component, suite, binary_type, file):
764 files = self.pkg.files
766 if binary_type == "": # must be source
769 file_type = binary_type
771 # Override suite name; used for example with proposed-updates
772 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
773 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
775 # Avoid <undef> on unknown distributions
776 suite_id = database.get_suite_id(suite)
779 component_id = database.get_component_id(component)
780 type_id = database.get_override_type_id(file_type)
782 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
783 % (package, suite_id, component_id, type_id))
784 result = q.getresult()
785 # If checking for a source package fall back on the binary override type
786 if file_type == "dsc" and not result:
787 deb_type_id = database.get_override_type_id("deb")
788 udeb_type_id = database.get_override_type_id("udeb")
789 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
790 % (package, suite_id, component_id, deb_type_id, udeb_type_id))
791 result = q.getresult()
793 # Remember the section and priority so we can check them later if appropriate
795 files[file]["override section"] = result[0][0]
796 files[file]["override priority"] = result[0][1]
800 ################################################################################
802 def reject (self, str, prefix="Rejected: "):
804 # Unlike other rejects we add new lines first to avoid trailing
805 # new lines when this message is passed back up to a caller.
806 if self.reject_message:
807 self.reject_message += "\n"
808 self.reject_message += prefix + str
810 ################################################################################
812 def get_anyversion(self, query_result, suite):
814 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
815 for (v, s) in query_result:
816 if s in [ x.lower() for x in anysuite ]:
817 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
821 ################################################################################
823 def cross_suite_version_check(self, query_result, file, new_version):
824 """Ensure versions are newer than existing packages in target
825 suites and that cross-suite version checking rules as
826 set out in the conf file are satisfied."""
828 # Check versions for each target suite
829 for target_suite in self.pkg.changes["distribution"].keys():
830 must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
831 must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
832 # Enforce "must be newer than target suite" even if conffile omits it
833 if target_suite not in must_be_newer_than:
834 must_be_newer_than.append(target_suite)
835 for entry in query_result:
836 existent_version = entry[0]
838 if suite in must_be_newer_than and \
839 apt_pkg.VersionCompare(new_version, existent_version) < 1:
840 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
841 if suite in must_be_older_than and \
842 apt_pkg.VersionCompare(new_version, existent_version) > -1:
843 ch = self.pkg.changes
845 if ch.get('distribution-version', {}).has_key(suite):
846 # we really use the other suite, ignoring the conflicting one ...
847 addsuite = ch["distribution-version"][suite]
849 add_version = self.get_anyversion(query_result, addsuite)
850 target_version = self.get_anyversion(query_result, target_suite)
853 # not add_version can only happen if we map to a suite
854 # that doesn't enhance the suite we're propup'ing from.
855 # so "propup-ver x a b c; map a d" is a problem only if
856 # d doesn't enhance a.
858 # i think we could always propagate in this case, rather
859 # than complaining. either way, this isn't a REJECT issue
861 # And - we really should complain to the dorks who configured dak
862 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
863 self.pkg.changes.setdefault("propdistribution", {})
864 self.pkg.changes["propdistribution"][addsuite] = 1
866 elif not target_version:
867 # not targets_version is true when the package is NEW
868 # we could just stick with the "...old version..." REJECT
870 self.reject("Won't propogate NEW packages.")
871 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
872 # propogation would be redundant. no need to reject though.
873 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
875 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
876 apt_pkg.VersionCompare(add_version, target_version) >= 0:
878 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
879 self.pkg.changes.setdefault("propdistribution", {})
880 self.pkg.changes["propdistribution"][addsuite] = 1
884 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
886 ################################################################################
888 def check_binary_against_db(self, file):
889 self.reject_message = ""
890 files = self.pkg.files
892 # Ensure version is sane
893 q = self.projectB.query("""
894 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
896 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
897 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
898 % (files[file]["package"],
899 files[file]["architecture"]))
900 self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
902 # Check for any existing copies of the file
903 q = self.projectB.query("""
904 SELECT b.id FROM binaries b, architecture a
905 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
906 AND a.id = b.architecture"""
907 % (files[file]["package"],
908 files[file]["version"],
909 files[file]["architecture"]))
911 self.reject("%s: can not overwrite existing copy already in the archive." % (file))
913 return self.reject_message
915 ################################################################################
917 def check_source_against_db(self, file):
918 self.reject_message = ""
921 # Ensure version is sane
922 q = self.projectB.query("""
923 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
924 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
925 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
927 return self.reject_message
929 ################################################################################
932 # NB: this function can remove entries from the 'files' index [if
933 # the .orig.tar.gz is a duplicate of the one in the archive]; if
934 # you're iterating over 'files' and call this function as part of
935 # the loop, be sure to add a check to the top of the loop to
936 # ensure you haven't just tried to dereference the deleted entry.
939 def check_dsc_against_db(self, file):
940 self.reject_message = ""
941 files = self.pkg.files
942 dsc_files = self.pkg.dsc_files
943 legacy_source_untouchable = self.pkg.legacy_source_untouchable
944 self.pkg.orig_tar_gz = None
946 # Try and find all files mentioned in the .dsc. This has
947 # to work harder to cope with the multiple possible
948 # locations of an .orig.tar.gz.
949 # The ordering on the select is needed to pick the newest orig
950 # when it exists in multiple places.
951 for dsc_file in dsc_files.keys():
953 if files.has_key(dsc_file):
954 actual_md5 = files[dsc_file]["md5sum"]
955 actual_size = int(files[dsc_file]["size"])
956 found = "%s in incoming" % (dsc_file)
957 # Check the file does not already exist in the archive
958 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
960 # Strip out anything that isn't '%s' or '/%s$'
962 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
965 # "[dak] has not broken them. [dak] has fixed a
966 # brokenness. Your crappy hack exploited a bug in
969 # "(Come on! I thought it was always obvious that
970 # one just doesn't release different files with
971 # the same name and version.)"
972 # -- ajk@ on d-devel@l.d.o
975 # Ignore exact matches for .orig.tar.gz
977 if dsc_file.endswith(".orig.tar.gz"):
979 if files.has_key(dsc_file) and \
980 int(files[dsc_file]["size"]) == int(i[0]) and \
981 files[dsc_file]["md5sum"] == i[1]:
982 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
984 self.pkg.orig_tar_gz = i[2] + i[3]
988 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
989 elif dsc_file.endswith(".orig.tar.gz"):
991 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
993 # Strip out anything that isn't '%s' or '/%s$'
995 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
999 # Unfortunately, we may get more than one match here if,
1000 # for example, the package was in potato but had an -sa
1001 # upload in woody. So we need to choose the right one.
1003 x = ql[0]; # default to something sane in case we don't match any or have only one
1007 old_file = i[0] + i[1]
1008 old_file_fh = utils.open_file(old_file)
1009 actual_md5 = apt_pkg.md5sum(old_file_fh)
1011 actual_size = os.stat(old_file)[stat.ST_SIZE]
1012 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1015 legacy_source_untouchable[i[3]] = ""
1017 old_file = x[0] + x[1]
1018 old_file_fh = utils.open_file(old_file)
1019 actual_md5 = apt_pkg.md5sum(old_file_fh)
1021 actual_size = os.stat(old_file)[stat.ST_SIZE]
1024 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
1025 # See install() in process-accepted...
1026 self.pkg.orig_tar_id = x[3]
1027 self.pkg.orig_tar_gz = old_file
1028 if suite_type == "legacy" or suite_type == "legacy-mixed":
1029 self.pkg.orig_tar_location = "legacy"
1031 self.pkg.orig_tar_location = x[4]
1033 # Not there? Check the queue directories...
1035 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1036 # See process_it() in 'dak process-unchecked' for explanation of this
1037 # in_unchecked check dropped by ajt 2007-08-28, how did that
1039 if os.path.exists(in_unchecked) and False:
1040 return (self.reject_message, in_unchecked)
1042 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1043 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)],dsc_file)
1044 if os.path.exists(in_otherdir):
1045 in_otherdir_fh = utils.open_file(in_otherdir)
1046 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1047 in_otherdir_fh.close()
1048 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1050 self.pkg.orig_tar_gz = in_otherdir
1053 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1054 self.pkg.orig_tar_gz = -1
1057 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1059 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1060 self.reject("md5sum for %s doesn't match %s." % (found, file))
1061 if actual_size != int(dsc_files[dsc_file]["size"]):
1062 self.reject("size for %s doesn't match %s." % (found, file))
1064 return (self.reject_message, None)
1066 def do_query(self, q):
1067 sys.stderr.write("query: \"%s\" ... " % (q))
1068 before = time.time()
1069 r = self.projectB.query(q)
1070 time_diff = time.time()-before
1071 sys.stderr.write("took %.3f seconds.\n" % (time_diff))