4 # Queue utility functions for dak
5 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 ###############################################################################
23 import cPickle, errno, os, pg, re, stat, sys, time
24 import apt_inst, apt_pkg
25 import utils, database
26 from dak_exceptions import *
30 ###############################################################################
32 re_isanum = re.compile (r"^\d+$")
33 re_default_answer = re.compile(r"\[(.*)\]")
34 re_fdnic = re.compile(r"\n\n")
35 re_bin_only_nmu = re.compile(r"\+b\d+$")
37 ################################################################################
39 # Determine what parts in a .changes are NEW
41 def determine_new(changes, files, projectB, warn=1):
44 # Build up a list of potentially new things
45 for file_entry in files.keys():
47 # Skip byhand elements
48 if f["type"] == "byhand":
51 priority = f["priority"]
52 section = f["section"]
53 file_type = get_type(f)
54 component = f["component"]
56 if file_type == "dsc":
58 if not new.has_key(pkg):
60 new[pkg]["priority"] = priority
61 new[pkg]["section"] = section
62 new[pkg]["type"] = file_type
63 new[pkg]["component"] = component
64 new[pkg]["files"] = []
66 old_type = new[pkg]["type"]
67 if old_type != file_type:
68 # source gets trumped by deb or udeb
70 new[pkg]["priority"] = priority
71 new[pkg]["section"] = section
72 new[pkg]["type"] = file_type
73 new[pkg]["component"] = component
74 new[pkg]["files"].append(file_entry)
75 if f.has_key("othercomponents"):
76 new[pkg]["othercomponents"] = f["othercomponents"]
78 for suite in changes["suite"].keys():
79 suite_id = database.get_suite_id(suite)
80 for pkg in new.keys():
81 component_id = database.get_component_id(new[pkg]["component"])
82 type_id = database.get_override_type_id(new[pkg]["type"])
83 q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
86 for file_entry in new[pkg]["files"]:
87 if files[file_entry].has_key("new"):
88 del files[file_entry]["new"]
92 if changes["suite"].has_key("stable"):
93 print "WARNING: overrides will be added for stable!"
94 if changes["suite"].has_key("oldstable"):
95 print "WARNING: overrides will be added for OLDstable!"
96 for pkg in new.keys():
97 if new[pkg].has_key("othercomponents"):
98 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
102 ################################################################################
106 if f.has_key("dbtype"):
107 file_type = f["dbtype"]
108 elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
111 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
113 # Validate the override type
114 type_id = database.get_override_type_id(file_type)
116 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
120 ################################################################################
122 # check if section/priority values are valid
124 def check_valid(new):
125 for pkg in new.keys():
126 section = new[pkg]["section"]
127 priority = new[pkg]["priority"]
128 file_type = new[pkg]["type"]
129 new[pkg]["section id"] = database.get_section_id(section)
130 new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
132 di = section.find("debian-installer") != -1
133 if (di and file_type not in ("udeb", "dsc")) or (not di and file_type == "udeb"):
134 new[pkg]["section id"] = -1
135 if (priority == "source" and file_type != "dsc") or \
136 (priority != "source" and file_type == "dsc"):
137 new[pkg]["priority id"] = -1
140 ###############################################################################
142 # Convenience wrapper to carry around all the package information in
145 def __init__(self, **kwds):
146 self.__dict__.update(kwds)
148 def update(self, **kwds):
149 self.__dict__.update(kwds)
151 ###############################################################################
155 def __init__(self, Cnf):
157 self.accept_count = 0
158 self.accept_bytes = 0L
159 self.reject_message = ""
160 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
161 legacy_source_untouchable = {})
163 # Initialize the substitution template mapping global
164 Subst = self.Subst = {}
165 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
166 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
167 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
168 Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
170 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
171 database.init(Cnf, self.projectB)
173 ###########################################################################
175 def init_vars (self):
176 self.pkg.changes.clear()
178 self.pkg.files.clear()
179 self.pkg.dsc_files.clear()
180 self.pkg.legacy_source_untouchable.clear()
181 self.pkg.orig_tar_id = None
182 self.pkg.orig_tar_location = ""
183 self.pkg.orig_tar_gz = None
185 ###########################################################################
187 def update_vars (self):
188 dump_filename = self.pkg.changes_file[:-8]+".dak"
189 dump_file = utils.open_file(dump_filename)
190 p = cPickle.Unpickler(dump_file)
192 self.pkg.changes.update(p.load())
193 self.pkg.dsc.update(p.load())
194 self.pkg.files.update(p.load())
195 self.pkg.dsc_files.update(p.load())
196 self.pkg.legacy_source_untouchable.update(p.load())
198 self.pkg.orig_tar_id = p.load()
199 self.pkg.orig_tar_location = p.load()
203 ###########################################################################
205 # This could just dump the dictionaries as is, but I'd like to
206 # avoid this so there's some idea of what process-accepted &
207 # process-new use from process-unchecked
209 def dump_vars(self, dest_dir):
211 changes = self.pkg.changes
213 files = self.pkg.files
214 dsc_files = self.pkg.dsc_files
215 legacy_source_untouchable = self.pkg.legacy_source_untouchable
216 orig_tar_id = self.pkg.orig_tar_id
217 orig_tar_location = self.pkg.orig_tar_location
219 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
220 dump_file = utils.open_file(dump_filename, 'w')
222 os.chmod(dump_filename, 0664)
224 # chmod may fail when the dumpfile is not owned by the user
225 # invoking dak (like e.g. when NEW is processed by a member
227 if errno.errorcode[e.errno] == 'EPERM':
228 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
229 # security precaution, should never happen unless a weird
230 # umask is set anywhere
231 if perms & stat.S_IWOTH:
232 utils.fubar("%s is world writable and chmod failed." % \
234 # ignore the failed chmod otherwise as the file should
235 # already have the right privileges and is just, at worst,
236 # unreadable for world
240 p = cPickle.Pickler(dump_file, 1)
247 for file_entry in files.keys():
248 d_files[file_entry] = {}
249 for i in [ "package", "version", "architecture", "type", "size",
250 "md5sum", "sha1sum", "sha256sum", "component",
251 "location id", "source package", "source version",
252 "maintainer", "dbtype", "files id", "new",
253 "section", "priority", "othercomponents",
254 "pool name", "original component" ]:
255 if files[file_entry].has_key(i):
256 d_files[file_entry][i] = files[file_entry][i]
258 # Mandatory changes fields
259 for i in [ "distribution", "source", "architecture", "version",
260 "maintainer", "urgency", "fingerprint", "changedby822",
261 "changedby2047", "changedbyname", "maintainer822",
262 "maintainer2047", "maintainername", "maintaineremail",
263 "closes", "changes" ]:
264 d_changes[i] = changes[i]
265 # Optional changes fields
266 for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version",
268 if changes.has_key(i):
269 d_changes[i] = changes[i]
271 for i in [ "source", "version", "maintainer", "fingerprint",
272 "uploaders", "bts changelog", "dm-upload-allowed" ]:
276 for file_entry in dsc_files.keys():
277 d_dsc_files[file_entry] = {}
278 # Mandatory dsc_files fields
279 for i in [ "size", "md5sum" ]:
280 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
281 # Optional dsc_files fields
282 for i in [ "files id" ]:
283 if dsc_files[file_entry].has_key(i):
284 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
286 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
287 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
291 ###########################################################################
293 # Set up the per-package template substitution mappings
295 def update_subst (self, reject_message = ""):
297 changes = self.pkg.changes
298 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
299 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
300 changes["architecture"] = { "Unknown" : "" }
301 # and maintainer2047 may not exist.
302 if not changes.has_key("maintainer2047"):
303 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
305 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
306 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
307 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
309 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
310 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
311 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
312 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
313 changes["maintainer2047"])
314 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
316 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
317 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
318 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
320 if "sponsoremail" in changes:
321 Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
323 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
324 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
326 # Apply any global override of the Maintainer field
327 if self.Cnf.get("Dinstall::OverrideMaintainer"):
328 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
329 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
331 Subst["__REJECT_MESSAGE__"] = reject_message
332 Subst["__SOURCE__"] = changes.get("source", "Unknown")
333 Subst["__VERSION__"] = changes.get("version", "Unknown")
335 ###########################################################################
337 def build_summaries(self):
338 changes = self.pkg.changes
339 files = self.pkg.files
341 byhand = summary = new = ""
343 # changes["distribution"] may not exist in corner cases
344 # (e.g. unreadable changes files)
345 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
346 changes["distribution"] = {}
349 file_keys = files.keys()
351 for file_entry in file_keys:
352 if files[file_entry].has_key("byhand"):
354 summary += file_entry + " byhand\n"
355 elif files[file_entry].has_key("new"):
357 summary += "(new) %s %s %s\n" % (file_entry, files[file_entry]["priority"], files[file_entry]["section"])
358 if files[file_entry].has_key("othercomponents"):
359 summary += "WARNING: Already present in %s distribution.\n" % (files[file_entry]["othercomponents"])
360 if files[file_entry]["type"] == "deb":
361 deb_fh = utils.open_file(file_entry)
362 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
365 files[file_entry]["pool name"] = utils.poolify (changes.get("source",""), files[file_entry]["component"])
366 destination = self.Cnf["Dir::PoolRoot"] + files[file_entry]["pool name"] + file_entry
367 summary += file_entry + "\n to " + destination + "\n"
368 if not files[file_entry].has_key("type"):
369 files[file_entry]["type"] = "unknown"
370 if files[file_entry]["type"] in ["deb", "udeb", "dsc"]:
371 # (queue/unchecked), there we have override entries already, use them
372 # (process-new), there we dont have override entries, use the newly generated ones.
373 override_prio = files[file_entry].get("override priority", files[file_entry]["priority"])
374 override_sect = files[file_entry].get("override section", files[file_entry]["section"])
375 override_summary += "%s - %s %s\n" % (file_entry, override_prio, override_sect)
377 short_summary = summary
379 # This is for direport's benefit...
380 f = re_fdnic.sub("\n .\n", changes.get("changes",""))
383 summary += "Changes: " + f
385 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
387 summary += self.announce(short_summary, 0)
389 return (summary, short_summary)
391 ###########################################################################
393 def close_bugs (self, summary, action):
394 changes = self.pkg.changes
398 bugs = changes["closes"].keys()
404 summary += "Closing bugs: "
406 summary += "%s " % (bug)
408 Subst["__BUG_NUMBER__"] = bug
409 if changes["distribution"].has_key("stable"):
410 Subst["__STABLE_WARNING__"] = """
411 Note that this package is not part of the released stable Debian
412 distribution. It may have dependencies on other unreleased software,
413 or other instabilities. Please take care if you wish to install it.
414 The update will eventually make its way into the next released Debian
417 Subst["__STABLE_WARNING__"] = ""
418 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
419 utils.send_mail (mail_message)
421 self.Logger.log(["closing bugs"]+bugs)
426 ###########################################################################
428 def announce (self, short_summary, action):
431 changes = self.pkg.changes
433 # Only do announcements for source uploads with a recent dpkg-dev installed
434 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
439 Subst["__SHORT_SUMMARY__"] = short_summary
441 for dist in changes["distribution"].keys():
442 announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
443 if announce_list == "" or lists_done.has_key(announce_list):
445 lists_done[announce_list] = 1
446 summary += "Announcing to %s\n" % (announce_list)
449 Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
450 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
451 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
452 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
453 utils.send_mail (mail_message)
455 if Cnf.FindB("Dinstall::CloseBugs"):
456 summary = self.close_bugs(summary, action)
460 ###########################################################################
462 def accept (self, summary, short_summary):
465 files = self.pkg.files
466 changes = self.pkg.changes
467 changes_file = self.pkg.changes_file
471 self.Logger.log(["Accepting changes",changes_file])
473 self.dump_vars(Cnf["Dir::Queue::Accepted"])
475 # Move all the files into the accepted directory
476 utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
477 file_keys = files.keys()
478 for file_entry in file_keys:
479 utils.move(file_entry, Cnf["Dir::Queue::Accepted"])
480 self.accept_bytes += float(files[file_entry]["size"])
481 self.accept_count += 1
483 # Send accept mail, announce to lists, close bugs and check for
484 # override disparities
485 if not Cnf["Dinstall::Options::No-Mail"]:
486 Subst["__SUITE__"] = ""
487 Subst["__SUMMARY__"] = summary
488 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
489 utils.send_mail(mail_message)
490 self.announce(short_summary, 1)
493 ## Helper stuff for DebBugs Version Tracking
494 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
495 # ??? once queue/* is cleared on *.d.o and/or reprocessed
496 # the conditionalization on dsc["bts changelog"] should be
499 # Write out the version history from the changelog
500 if changes["architecture"].has_key("source") and \
501 dsc.has_key("bts changelog"):
503 (fd, temp_filename) = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
504 version_history = os.fdopen(fd, 'w')
505 version_history.write(dsc["bts changelog"])
506 version_history.close()
507 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
508 changes_file[:-8]+".versions")
509 os.rename(temp_filename, filename)
510 os.chmod(filename, 0644)
512 # Write out the binary -> source mapping.
513 (fd, temp_filename) = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
514 debinfo = os.fdopen(fd, 'w')
515 for file_entry in file_keys:
516 f = files[file_entry]
517 if f["type"] == "deb":
518 line = " ".join([f["package"], f["version"],
519 f["architecture"], f["source package"],
520 f["source version"]])
521 debinfo.write(line+"\n")
523 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
524 changes_file[:-8]+".debinfo")
525 os.rename(temp_filename, filename)
526 os.chmod(filename, 0644)
528 self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
530 ###########################################################################
532 def queue_build (self, queue, path):
535 files = self.pkg.files
536 changes = self.pkg.changes
537 changes_file = self.pkg.changes_file
539 file_keys = files.keys()
541 ## Special support to enable clean auto-building of queued packages
542 queue_id = database.get_or_set_queue_id(queue)
544 self.projectB.query("BEGIN WORK")
545 for suite in changes["distribution"].keys():
546 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
548 suite_id = database.get_suite_id(suite)
549 dest_dir = Cnf["Dir::QueueBuild"]
550 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
551 dest_dir = os.path.join(dest_dir, suite)
552 for file_entry in file_keys:
553 src = os.path.join(path, file_entry)
554 dest = os.path.join(dest_dir, file_entry)
555 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
556 # Copy it since the original won't be readable by www-data
557 utils.copy(src, dest)
559 # Create a symlink to it
560 os.symlink(src, dest)
561 # Add it to the list of packages for later processing by apt-ftparchive
562 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
563 # If the .orig.tar.gz is in the pool, create a symlink to
564 # it (if one doesn't already exist)
565 if self.pkg.orig_tar_id:
566 # Determine the .orig.tar.gz file name
567 for dsc_file in self.pkg.dsc_files.keys():
568 if dsc_file.endswith(".orig.tar.gz"):
570 dest = os.path.join(dest_dir, filename)
571 # If it doesn't exist, create a symlink
572 if not os.path.exists(dest):
573 # Find the .orig.tar.gz in the pool
574 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
577 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
578 src = os.path.join(ql[0][0], ql[0][1])
579 os.symlink(src, dest)
580 # Add it to the list of packages for later processing by apt-ftparchive
581 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
582 # if it does, update things to ensure it's not removed prematurely
584 self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
586 self.projectB.query("COMMIT WORK")
588 ###########################################################################
590 def check_override (self):
592 changes = self.pkg.changes
593 files = self.pkg.files
596 # Abandon the check if:
597 # a) it's a non-sourceful upload
598 # b) override disparity checks have been disabled
599 # c) we're not sending mail
600 if not changes["architecture"].has_key("source") or \
601 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
602 Cnf["Dinstall::Options::No-Mail"]:
606 file_keys = files.keys()
608 for file_entry in file_keys:
609 if not files[file_entry].has_key("new") and files[file_entry]["type"] == "deb":
610 section = files[file_entry]["section"]
611 override_section = files[file_entry]["override section"]
612 if section.lower() != override_section.lower() and section != "-":
613 summary += "%s: package says section is %s, override says %s.\n" % (file_entry, section, override_section)
614 priority = files[file_entry]["priority"]
615 override_priority = files[file_entry]["override priority"]
616 if priority != override_priority and priority != "-":
617 summary += "%s: package says priority is %s, override says %s.\n" % (file_entry, priority, override_priority)
622 Subst["__SUMMARY__"] = summary
623 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
624 utils.send_mail(mail_message)
626 ###########################################################################
628 def force_reject (self, files):
629 """Forcefully move files from the current directory to the
630 reject directory. If any file already exists in the reject
631 directory it will be moved to the morgue to make way for
636 for file_entry in files:
637 # Skip any files which don't exist or which we don't have permission to copy.
638 if os.access(file_entry,os.R_OK) == 0:
640 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file_entry)
642 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
644 # File exists? Let's try and move it to the morgue
645 if errno.errorcode[e.errno] == 'EEXIST':
646 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file_entry)
648 morgue_file = utils.find_next_free(morgue_file)
649 except NoFreeFilenameError:
650 # Something's either gone badly Pete Tong, or
651 # someone is trying to exploit us.
652 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
654 utils.move(dest_file, morgue_file, perms=0660)
656 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
659 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
663 # If we got here, we own the destination file, so we can
664 # safely overwrite it.
665 utils.move(file_entry, dest_file, 1, perms=0660)
668 ###########################################################################
670 def do_reject (self, manual = 0, reject_message = ""):
671 # If we weren't given a manual rejection message, spawn an
672 # editor so the user can add one in...
673 if manual and not reject_message:
674 (fd, temp_filename) = utils.temp_filename()
675 editor = os.environ.get("EDITOR","vi")
678 os.system("%s %s" % (editor, temp_filename))
679 temp_fh = utils.open_file(temp_filename)
680 reject_message = "".join(temp_fh.readlines())
682 print "Reject message:"
683 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
684 prompt = "[R]eject, Edit, Abandon, Quit ?"
686 while prompt.find(answer) == -1:
687 answer = utils.our_raw_input(prompt)
688 m = re_default_answer.search(prompt)
691 answer = answer[:1].upper()
692 os.unlink(temp_filename)
704 reason_filename = pkg.changes_file[:-8] + ".reason"
705 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
707 # Move all the files into the reject directory
708 reject_files = pkg.files.keys() + [pkg.changes_file]
709 self.force_reject(reject_files)
711 # If we fail here someone is probably trying to exploit the race
712 # so let's just raise an exception ...
713 if os.path.exists(reason_filename):
714 os.unlink(reason_filename)
715 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
718 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
719 Subst["__MANUAL_REJECT_MESSAGE__"] = ""
720 Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
721 os.write(reason_fd, reject_message)
722 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
724 # Build up the rejection email
725 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
727 Subst["__REJECTOR_ADDRESS__"] = user_email_address
728 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
729 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
730 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
731 # Write the rejection email out as the <foo>.reason file
732 os.write(reason_fd, reject_mail_message)
736 # Send the rejection mail if appropriate
737 if not Cnf["Dinstall::Options::No-Mail"]:
738 utils.send_mail(reject_mail_message)
740 self.Logger.log(["rejected", pkg.changes_file])
743 ################################################################################
745 # Ensure that source exists somewhere in the archive for the binary
746 # upload being processed.
748 # (1) exact match => 1.0-3
749 # (2) Bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
751 def source_exists (self, package, source_version, suites = ["any"]):
755 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
758 # source must exist in suite X, or in some other suite that's
759 # mapped to X, recursively... silent-maps are counted too,
760 # unreleased-maps aren't.
761 maps = self.Cnf.ValueList("SuiteMappings")[:]
763 maps = [ m.split() for m in maps ]
764 maps = [ (x[1], x[2]) for x in maps
765 if x[0] == "map" or x[0] == "silent-map" ]
768 if x[1] in s and x[0] not in s:
771 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
772 q = self.projectB.query(que)
774 # Reduce the query results to a list of version numbers
775 ql = [ i[0] for i in q.getresult() ]
778 if source_version in ql:
782 orig_source_version = re_bin_only_nmu.sub('', source_version)
783 if orig_source_version in ql:
791 ################################################################################
793 def in_override_p (self, package, component, suite, binary_type, file):
794 files = self.pkg.files
796 if binary_type == "": # must be source
799 file_type = binary_type
801 # Override suite name; used for example with proposed-updates
802 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
803 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
805 # Avoid <undef> on unknown distributions
806 suite_id = database.get_suite_id(suite)
809 component_id = database.get_component_id(component)
810 type_id = database.get_override_type_id(file_type)
812 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
813 % (package, suite_id, component_id, type_id))
814 result = q.getresult()
815 # If checking for a source package fall back on the binary override type
816 if file_type == "dsc" and not result:
817 deb_type_id = database.get_override_type_id("deb")
818 udeb_type_id = database.get_override_type_id("udeb")
819 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
820 % (package, suite_id, component_id, deb_type_id, udeb_type_id))
821 result = q.getresult()
823 # Remember the section and priority so we can check them later if appropriate
825 files[file]["override section"] = result[0][0]
826 files[file]["override priority"] = result[0][1]
830 ################################################################################
832 def reject (self, str, prefix="Rejected: "):
834 # Unlike other rejects we add new lines first to avoid trailing
835 # new lines when this message is passed back up to a caller.
836 if self.reject_message:
837 self.reject_message += "\n"
838 self.reject_message += prefix + str
840 ################################################################################
842 def get_anyversion(self, query_result, suite):
844 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
845 for (v, s) in query_result:
846 if s in [ x.lower() for x in anysuite ]:
847 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
851 ################################################################################
853 def cross_suite_version_check(self, query_result, file, new_version,
855 """Ensure versions are newer than existing packages in target
856 suites and that cross-suite version checking rules as
857 set out in the conf file are satisfied."""
859 # Check versions for each target suite
860 for target_suite in self.pkg.changes["distribution"].keys():
861 must_be_newer_than = [ i.lower() for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
862 must_be_older_than = [ i.lower() for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
863 # Enforce "must be newer than target suite" even if conffile omits it
864 if target_suite not in must_be_newer_than:
865 must_be_newer_than.append(target_suite)
866 for entry in query_result:
867 existent_version = entry[0]
869 if suite in must_be_newer_than and sourceful and \
870 apt_pkg.VersionCompare(new_version, existent_version) < 1:
871 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
872 if suite in must_be_older_than and \
873 apt_pkg.VersionCompare(new_version, existent_version) > -1:
874 ch = self.pkg.changes
876 if ch.get('distribution-version', {}).has_key(suite):
877 # we really use the other suite, ignoring the conflicting one ...
878 addsuite = ch["distribution-version"][suite]
880 add_version = self.get_anyversion(query_result, addsuite)
881 target_version = self.get_anyversion(query_result, target_suite)
884 # not add_version can only happen if we map to a suite
885 # that doesn't enhance the suite we're propup'ing from.
886 # so "propup-ver x a b c; map a d" is a problem only if
887 # d doesn't enhance a.
889 # i think we could always propagate in this case, rather
890 # than complaining. either way, this isn't a REJECT issue
892 # And - we really should complain to the dorks who configured dak
893 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
894 self.pkg.changes.setdefault("propdistribution", {})
895 self.pkg.changes["propdistribution"][addsuite] = 1
897 elif not target_version:
898 # not targets_version is true when the package is NEW
899 # we could just stick with the "...old version..." REJECT
901 self.reject("Won't propogate NEW packages.")
902 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
903 # propogation would be redundant. no need to reject though.
904 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
906 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
907 apt_pkg.VersionCompare(add_version, target_version) >= 0:
909 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
910 self.pkg.changes.setdefault("propdistribution", {})
911 self.pkg.changes["propdistribution"][addsuite] = 1
915 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
917 ################################################################################
919 def check_binary_against_db(self, file):
920 self.reject_message = ""
921 files = self.pkg.files
923 # Ensure version is sane
924 q = self.projectB.query("""
925 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
927 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
928 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
929 % (files[file]["package"],
930 files[file]["architecture"]))
931 self.cross_suite_version_check(q.getresult(), file,
932 files[file]["version"], sourceful=False)
934 # Check for any existing copies of the file
935 q = self.projectB.query("""
936 SELECT b.id FROM binaries b, architecture a
937 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
938 AND a.id = b.architecture"""
939 % (files[file]["package"],
940 files[file]["version"],
941 files[file]["architecture"]))
943 self.reject("%s: can not overwrite existing copy already in the archive." % (file))
945 return self.reject_message
947 ################################################################################
949 def check_source_against_db(self, file):
950 self.reject_message = ""
953 # Ensure version is sane
954 q = self.projectB.query("""
955 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
956 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
957 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"),
960 return self.reject_message
962 ################################################################################
965 # NB: this function can remove entries from the 'files' index [if
966 # the .orig.tar.gz is a duplicate of the one in the archive]; if
967 # you're iterating over 'files' and call this function as part of
968 # the loop, be sure to add a check to the top of the loop to
969 # ensure you haven't just tried to dereference the deleted entry.
972 def check_dsc_against_db(self, file):
973 self.reject_message = ""
974 files = self.pkg.files
975 dsc_files = self.pkg.dsc_files
976 legacy_source_untouchable = self.pkg.legacy_source_untouchable
977 self.pkg.orig_tar_gz = None
979 # Try and find all files mentioned in the .dsc. This has
980 # to work harder to cope with the multiple possible
981 # locations of an .orig.tar.gz.
982 # The ordering on the select is needed to pick the newest orig
983 # when it exists in multiple places.
984 for dsc_file in dsc_files.keys():
986 if files.has_key(dsc_file):
987 actual_md5 = files[dsc_file]["md5sum"]
988 actual_size = int(files[dsc_file]["size"])
989 found = "%s in incoming" % (dsc_file)
990 # Check the file does not already exist in the archive
991 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
993 # Strip out anything that isn't '%s' or '/%s$'
995 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
998 # "[dak] has not broken them. [dak] has fixed a
999 # brokenness. Your crappy hack exploited a bug in
1002 # "(Come on! I thought it was always obvious that
1003 # one just doesn't release different files with
1004 # the same name and version.)"
1005 # -- ajk@ on d-devel@l.d.o
1008 # Ignore exact matches for .orig.tar.gz
1010 if dsc_file.endswith(".orig.tar.gz"):
1012 if files.has_key(dsc_file) and \
1013 int(files[dsc_file]["size"]) == int(i[0]) and \
1014 files[dsc_file]["md5sum"] == i[1]:
1015 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
1017 self.pkg.orig_tar_gz = i[2] + i[3]
1021 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
1022 elif dsc_file.endswith(".orig.tar.gz"):
1024 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
1026 # Strip out anything that isn't '%s' or '/%s$'
1028 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
1032 # Unfortunately, we may get more than one match here if,
1033 # for example, the package was in potato but had an -sa
1034 # upload in woody. So we need to choose the right one.
1036 # default to something sane in case we don't match any or have only one
1041 old_file = i[0] + i[1]
1042 old_file_fh = utils.open_file(old_file)
1043 actual_md5 = apt_pkg.md5sum(old_file_fh)
1045 actual_size = os.stat(old_file)[stat.ST_SIZE]
1046 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1049 legacy_source_untouchable[i[3]] = ""
1051 old_file = x[0] + x[1]
1052 old_file_fh = utils.open_file(old_file)
1053 actual_md5 = apt_pkg.md5sum(old_file_fh)
1055 actual_size = os.stat(old_file)[stat.ST_SIZE]
1058 # need this for updating dsc_files in install()
1059 dsc_files[dsc_file]["files id"] = x[3]
1060 # See install() in process-accepted...
1061 self.pkg.orig_tar_id = x[3]
1062 self.pkg.orig_tar_gz = old_file
1063 if suite_type == "legacy" or suite_type == "legacy-mixed":
1064 self.pkg.orig_tar_location = "legacy"
1066 self.pkg.orig_tar_location = x[4]
1068 # Not there? Check the queue directories...
1070 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1071 # See process_it() in 'dak process-unchecked' for explanation of this
1072 # in_unchecked check dropped by ajt 2007-08-28, how did that
1074 if os.path.exists(in_unchecked) and False:
1075 return (self.reject_message, in_unchecked)
1077 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1078 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)],dsc_file)
1079 if os.path.exists(in_otherdir):
1080 in_otherdir_fh = utils.open_file(in_otherdir)
1081 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1082 in_otherdir_fh.close()
1083 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1085 self.pkg.orig_tar_gz = in_otherdir
1088 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1089 self.pkg.orig_tar_gz = -1
1092 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1094 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1095 self.reject("md5sum for %s doesn't match %s." % (found, file))
1096 if actual_size != int(dsc_files[dsc_file]["size"]):
1097 self.reject("size for %s doesn't match %s." % (found, file))
1099 return (self.reject_message, None)
1101 def do_query(self, q):
1102 sys.stderr.write("query: \"%s\" ... " % (q))
1103 before = time.time()
1104 r = self.projectB.query(q)
1105 time_diff = time.time()-before
1106 sys.stderr.write("took %.3f seconds.\n" % (time_diff))