3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 ###############################################################################
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
25 from dak_exceptions import *
29 ###############################################################################
31 re_isanum = re.compile (r"^\d+$")
32 re_default_answer = re.compile(r"\[(.*)\]")
33 re_fdnic = re.compile(r"\n\n")
34 re_bin_only_nmu = re.compile(r"\+b\d+$")
36 ################################################################################
38 # Determine what parts in a .changes are NEW
40 def determine_new(changes, files, projectB, warn=1):
43 # Build up a list of potentially new things
44 for file_entry in files.keys():
46 # Skip byhand elements
47 if f["type"] == "byhand":
50 priority = f["priority"]
51 section = f["section"]
52 file_type = get_type(f)
53 component = f["component"]
55 if file_type == "dsc":
57 if not new.has_key(pkg):
59 new[pkg]["priority"] = priority
60 new[pkg]["section"] = section
61 new[pkg]["type"] = file_type
62 new[pkg]["component"] = component
63 new[pkg]["files"] = []
65 old_type = new[pkg]["type"]
66 if old_type != file_type:
67 # source gets trumped by deb or udeb
69 new[pkg]["priority"] = priority
70 new[pkg]["section"] = section
71 new[pkg]["type"] = file_type
72 new[pkg]["component"] = component
73 new[pkg]["files"].append(file_entry)
74 if f.has_key("othercomponents"):
75 new[pkg]["othercomponents"] = f["othercomponents"]
77 for suite in changes["suite"].keys():
78 suite_id = database.get_suite_id(suite)
79 for pkg in new.keys():
80 component_id = database.get_component_id(new[pkg]["component"])
81 type_id = database.get_override_type_id(new[pkg]["type"])
82 q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
85 for file_entry in new[pkg]["files"]:
86 if files[file_entry].has_key("new"):
87 del files[file_entry]["new"]
91 if changes["suite"].has_key("stable"):
92 print "WARNING: overrides will be added for stable!"
93 if changes["suite"].has_key("oldstable"):
94 print "WARNING: overrides will be added for OLDstable!"
95 for pkg in new.keys():
96 if new[pkg].has_key("othercomponents"):
97 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
101 ################################################################################
105 if f.has_key("dbtype"):
106 file_type = f["dbtype"]
107 elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
110 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
112 # Validate the override type
113 type_id = database.get_override_type_id(file_type)
115 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
119 ################################################################################
121 # check if section/priority values are valid
123 def check_valid(new):
124 for pkg in new.keys():
125 section = new[pkg]["section"]
126 priority = new[pkg]["priority"]
127 file_type = new[pkg]["type"]
128 new[pkg]["section id"] = database.get_section_id(section)
129 new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
131 di = section.find("debian-installer") != -1
132 if (di and file_type not in ("udeb", "dsc")) or (not di and file_type == "udeb"):
133 new[pkg]["section id"] = -1
134 if (priority == "source" and file_type != "dsc") or \
135 (priority != "source" and file_type == "dsc"):
136 new[pkg]["priority id"] = -1
139 ###############################################################################
141 # Convenience wrapper to carry around all the package information in
144 def __init__(self, **kwds):
145 self.__dict__.update(kwds)
147 def update(self, **kwds):
148 self.__dict__.update(kwds)
150 ###############################################################################
154 def __init__(self, Cnf):
156 self.accept_count = 0
157 self.accept_bytes = 0L
158 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
159 legacy_source_untouchable = {})
161 # Initialize the substitution template mapping global
162 Subst = self.Subst = {}
163 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
164 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
165 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
166 Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
168 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
169 database.init(Cnf, self.projectB)
171 ###########################################################################
173 def init_vars (self):
174 self.pkg.changes.clear()
176 self.pkg.files.clear()
177 self.pkg.dsc_files.clear()
178 self.pkg.legacy_source_untouchable.clear()
179 self.pkg.orig_tar_id = None
180 self.pkg.orig_tar_location = ""
181 self.pkg.orig_tar_gz = None
183 ###########################################################################
185 def update_vars (self):
186 dump_filename = self.pkg.changes_file[:-8]+".dak"
187 dump_file = utils.open_file(dump_filename)
188 p = cPickle.Unpickler(dump_file)
190 self.pkg.changes.update(p.load())
191 self.pkg.dsc.update(p.load())
192 self.pkg.files.update(p.load())
193 self.pkg.dsc_files.update(p.load())
194 self.pkg.legacy_source_untouchable.update(p.load())
196 self.pkg.orig_tar_id = p.load()
197 self.pkg.orig_tar_location = p.load()
201 ###########################################################################
203 # This could just dump the dictionaries as is, but I'd like to
204 # avoid this so there's some idea of what process-accepted &
205 # process-new use from process-unchecked
207 def dump_vars(self, dest_dir):
209 changes = self.pkg.changes
211 files = self.pkg.files
212 dsc_files = self.pkg.dsc_files
213 legacy_source_untouchable = self.pkg.legacy_source_untouchable
214 orig_tar_id = self.pkg.orig_tar_id
215 orig_tar_location = self.pkg.orig_tar_location
217 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
218 dump_file = utils.open_file(dump_filename, 'w')
220 os.chmod(dump_filename, 0660)
222 if errno.errorcode[e.errno] == 'EPERM':
223 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
224 if perms & stat.S_IROTH:
225 utils.fubar("%s is world readable and chmod failed." % (dump_filename))
229 p = cPickle.Pickler(dump_file, 1)
236 for file_entry in files.keys():
237 d_files[file_entry] = {}
238 for i in [ "package", "version", "architecture", "type", "size",
239 "md5sum", "component", "location id", "source package",
240 "source version", "maintainer", "dbtype", "files id",
241 "new", "section", "priority", "othercomponents",
242 "pool name", "original component" ]:
243 if files[file_entry].has_key(i):
244 d_files[file_entry][i] = files[file_entry][i]
246 # Mandatory changes fields
247 for i in [ "distribution", "source", "architecture", "version",
248 "maintainer", "urgency", "fingerprint", "changedby822",
249 "changedby2047", "changedbyname", "maintainer822",
250 "maintainer2047", "maintainername", "maintaineremail",
251 "closes", "changes" ]:
252 d_changes[i] = changes[i]
253 # Optional changes fields
254 for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version",
256 if changes.has_key(i):
257 d_changes[i] = changes[i]
259 for i in [ "source", "version", "maintainer", "fingerprint",
260 "uploaders", "bts changelog", "dm-upload-allowed" ]:
264 for file_entry in dsc_files.keys():
265 d_dsc_files[file_entry] = {}
266 # Mandatory dsc_files fields
267 for i in [ "size", "md5sum" ]:
268 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
269 # Optional dsc_files fields
270 for i in [ "files id" ]:
271 if dsc_files[file_entry].has_key(i):
272 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
274 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
275 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
279 ###########################################################################
281 # Set up the per-package template substitution mappings
283 def update_subst (self, reject_message = ""):
285 changes = self.pkg.changes
286 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
287 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
288 changes["architecture"] = { "Unknown" : "" }
289 # and maintainer2047 may not exist.
290 if not changes.has_key("maintainer2047"):
291 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
293 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
294 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
295 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
297 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
298 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
299 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
300 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
301 changes["maintainer2047"])
302 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
304 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
305 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
306 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
308 if "sponsoremail" in changes:
309 Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
311 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
312 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
314 # Apply any global override of the Maintainer field
315 if self.Cnf.get("Dinstall::OverrideMaintainer"):
316 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
317 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
319 Subst["__REJECT_MESSAGE__"] = reject_message
320 Subst["__SOURCE__"] = changes.get("source", "Unknown")
321 Subst["__VERSION__"] = changes.get("version", "Unknown")
323 ###########################################################################
325 def build_summaries(self):
326 changes = self.pkg.changes
327 files = self.pkg.files
329 byhand = summary = new = ""
331 # changes["distribution"] may not exist in corner cases
332 # (e.g. unreadable changes files)
333 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
334 changes["distribution"] = {}
336 override_summary ="";
337 file_keys = files.keys()
339 for file_entry in file_keys:
340 if files[file_entry].has_key("byhand"):
342 summary += file_entry + " byhand\n"
343 elif files[file_entry].has_key("new"):
345 summary += "(new) %s %s %s\n" % (file_entry, files[file_entry]["priority"], files[file_entry]["section"])
346 if files[file_entry].has_key("othercomponents"):
347 summary += "WARNING: Already present in %s distribution.\n" % (files[file_entry]["othercomponents"])
348 if files[file_entry]["type"] == "deb":
349 deb_fh = utils.open_file(file_entry)
350 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
353 files[file_entry]["pool name"] = utils.poolify (changes.get("source",""), files[file_entry]["component"])
354 destination = self.Cnf["Dir::PoolRoot"] + files[file_entry]["pool name"] + file_entry
355 summary += file_entry + "\n to " + destination + "\n"
356 if not files[file_entry].has_key("type"):
357 files[file_entry]["type"] = "unknown"
358 if files[file_entry]["type"] in ["deb", "udeb", "dsc"]:
359 # (queue/unchecked), there we have override entries already, use them
360 # (process-new), there we dont have override entries, use the newly generated ones.
361 override_prio = files[file_entry].get("override priority", files[file_entry]["priority"])
362 override_sect = files[file_entry].get("override section", files[file_entry]["section"])
363 override_summary += "%s - %s %s\n" % (file_entry, override_prio, override_sect)
365 short_summary = summary
367 # This is for direport's benefit...
368 f = re_fdnic.sub("\n .\n", changes.get("changes",""))
371 summary += "Changes: " + f
373 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
375 summary += self.announce(short_summary, 0)
377 return (summary, short_summary)
379 ###########################################################################
381 def close_bugs (self, summary, action):
382 changes = self.pkg.changes
386 bugs = changes["closes"].keys()
392 summary += "Closing bugs: "
394 summary += "%s " % (bug)
396 Subst["__BUG_NUMBER__"] = bug
397 if changes["distribution"].has_key("stable"):
398 Subst["__STABLE_WARNING__"] = """
399 Note that this package is not part of the released stable Debian
400 distribution. It may have dependencies on other unreleased software,
401 or other instabilities. Please take care if you wish to install it.
402 The update will eventually make its way into the next released Debian
405 Subst["__STABLE_WARNING__"] = ""
406 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
407 utils.send_mail (mail_message)
409 self.Logger.log(["closing bugs"]+bugs)
414 ###########################################################################
416 def announce (self, short_summary, action):
419 changes = self.pkg.changes
421 # Only do announcements for source uploads with a recent dpkg-dev installed
422 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
427 Subst["__SHORT_SUMMARY__"] = short_summary
429 for dist in changes["distribution"].keys():
430 announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
431 if announce_list == "" or lists_done.has_key(announce_list):
433 lists_done[announce_list] = 1
434 summary += "Announcing to %s\n" % (announce_list)
437 Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
438 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
439 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
440 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
441 utils.send_mail (mail_message)
443 if Cnf.FindB("Dinstall::CloseBugs"):
444 summary = self.close_bugs(summary, action)
448 ###########################################################################
450 def accept (self, summary, short_summary):
453 files = self.pkg.files
454 changes = self.pkg.changes
455 changes_file = self.pkg.changes_file
459 self.Logger.log(["Accepting changes",changes_file])
461 self.dump_vars(Cnf["Dir::Queue::Accepted"])
463 # Move all the files into the accepted directory
464 utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
465 file_keys = files.keys()
466 for file_entry in file_keys:
467 utils.move(file_entry, Cnf["Dir::Queue::Accepted"])
468 self.accept_bytes += float(files[file_entry]["size"])
469 self.accept_count += 1
471 # Send accept mail, announce to lists, close bugs and check for
472 # override disparities
473 if not Cnf["Dinstall::Options::No-Mail"]:
474 Subst["__SUITE__"] = ""
475 Subst["__SUMMARY__"] = summary
476 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
477 utils.send_mail(mail_message)
478 self.announce(short_summary, 1)
481 ## Helper stuff for DebBugs Version Tracking
482 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
483 # ??? once queue/* is cleared on *.d.o and/or reprocessed
484 # the conditionalization on dsc["bts changelog"] should be
487 # Write out the version history from the changelog
488 if changes["architecture"].has_key("source") and \
489 dsc.has_key("bts changelog"):
491 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
492 dotprefix=1, perms=0644)
493 version_history = utils.open_file(temp_filename, 'w')
494 version_history.write(dsc["bts changelog"])
495 version_history.close()
496 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
497 changes_file[:-8]+".versions")
498 os.rename(temp_filename, filename)
500 # Write out the binary -> source mapping.
501 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
502 dotprefix=1, perms=0644)
503 debinfo = utils.open_file(temp_filename, 'w')
504 for file_entry in file_keys:
505 f = files[file_entry]
506 if f["type"] == "deb":
507 line = " ".join([f["package"], f["version"],
508 f["architecture"], f["source package"],
509 f["source version"]])
510 debinfo.write(line+"\n")
512 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
513 changes_file[:-8]+".debinfo")
514 os.rename(temp_filename, filename)
516 self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
518 ###########################################################################
520 def queue_build (self, queue, path):
523 files = self.pkg.files
524 changes = self.pkg.changes
525 changes_file = self.pkg.changes_file
527 file_keys = files.keys()
529 ## Special support to enable clean auto-building of queued packages
530 queue_id = database.get_or_set_queue_id(queue)
532 self.projectB.query("BEGIN WORK")
533 for suite in changes["distribution"].keys():
534 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
536 suite_id = database.get_suite_id(suite)
537 dest_dir = Cnf["Dir::QueueBuild"]
538 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
539 dest_dir = os.path.join(dest_dir, suite)
540 for file_entry in file_keys:
541 src = os.path.join(path, file_entry)
542 dest = os.path.join(dest_dir, file_entry)
543 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
544 # Copy it since the original won't be readable by www-data
545 utils.copy(src, dest)
547 # Create a symlink to it
548 os.symlink(src, dest)
549 # Add it to the list of packages for later processing by apt-ftparchive
550 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
551 # If the .orig.tar.gz is in the pool, create a symlink to
552 # it (if one doesn't already exist)
553 if self.pkg.orig_tar_id:
554 # Determine the .orig.tar.gz file name
555 for dsc_file in self.pkg.dsc_files.keys():
556 if dsc_file.endswith(".orig.tar.gz"):
558 dest = os.path.join(dest_dir, filename)
559 # If it doesn't exist, create a symlink
560 if not os.path.exists(dest):
561 # Find the .orig.tar.gz in the pool
562 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
565 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
566 src = os.path.join(ql[0][0], ql[0][1])
567 os.symlink(src, dest)
568 # Add it to the list of packages for later processing by apt-ftparchive
569 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
570 # if it does, update things to ensure it's not removed prematurely
572 self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
574 self.projectB.query("COMMIT WORK")
576 ###########################################################################
578 def check_override (self):
580 changes = self.pkg.changes
581 files = self.pkg.files
584 # Abandon the check if:
585 # a) it's a non-sourceful upload
586 # b) override disparity checks have been disabled
587 # c) we're not sending mail
588 if not changes["architecture"].has_key("source") or \
589 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
590 Cnf["Dinstall::Options::No-Mail"]:
594 file_keys = files.keys()
596 for file_entry in file_keys:
597 if not files[file_entry].has_key("new") and files[file_entry]["type"] == "deb":
598 section = files[file_entry]["section"]
599 override_section = files[file_entry]["override section"]
600 if section.lower() != override_section.lower() and section != "-":
601 summary += "%s: package says section is %s, override says %s.\n" % (file_entry, section, override_section)
602 priority = files[file_entry]["priority"]
603 override_priority = files[file_entry]["override priority"]
604 if priority != override_priority and priority != "-":
605 summary += "%s: package says priority is %s, override says %s.\n" % (file_entry, priority, override_priority)
610 Subst["__SUMMARY__"] = summary
611 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
612 utils.send_mail(mail_message)
614 ###########################################################################
616 def force_reject (self, files):
617 """Forcefully move files from the current directory to the
618 reject directory. If any file already exists in the reject
619 directory it will be moved to the morgue to make way for
624 for file_entry in files:
625 # Skip any files which don't exist or which we don't have permission to copy.
626 if os.access(file_entry,os.R_OK) == 0:
628 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file_entry)
630 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
632 # File exists? Let's try and move it to the morgue
633 if errno.errorcode[e.errno] == 'EEXIST':
634 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file_entry)
636 morgue_file = utils.find_next_free(morgue_file)
637 except NoFreeFilenameError:
638 # Something's either gone badly Pete Tong, or
639 # someone is trying to exploit us.
640 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
642 utils.move(dest_file, morgue_file, perms=0660)
644 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
647 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
651 # If we got here, we own the destination file, so we can
652 # safely overwrite it.
653 utils.move(file_entry, dest_file, 1, perms=0660)
656 ###########################################################################
658 def do_reject (self, manual = 0, reject_message = ""):
659 # If we weren't given a manual rejection message, spawn an
660 # editor so the user can add one in...
661 if manual and not reject_message:
662 temp_filename = utils.temp_filename()
663 editor = os.environ.get("EDITOR","vi")
666 os.system("%s %s" % (editor, temp_filename))
667 temp_fh = utils.open_file(temp_filename)
668 reject_message = "".join(temp_fh.readlines())
670 print "Reject message:"
671 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
672 prompt = "[R]eject, Edit, Abandon, Quit ?"
674 while prompt.find(answer) == -1:
675 answer = utils.our_raw_input(prompt)
676 m = re_default_answer.search(prompt)
679 answer = answer[:1].upper()
680 os.unlink(temp_filename)
692 reason_filename = pkg.changes_file[:-8] + ".reason"
693 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
695 # Move all the files into the reject directory
696 reject_files = pkg.files.keys() + [pkg.changes_file]
697 self.force_reject(reject_files)
699 # If we fail here someone is probably trying to exploit the race
700 # so let's just raise an exception ...
701 if os.path.exists(reason_filename):
702 os.unlink(reason_filename)
703 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
706 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
707 Subst["__MANUAL_REJECT_MESSAGE__"] = ""
708 Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
709 os.write(reason_fd, reject_message)
710 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
712 # Build up the rejection email
713 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
715 Subst["__REJECTOR_ADDRESS__"] = user_email_address
716 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
717 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
718 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
719 # Write the rejection email out as the <foo>.reason file
720 os.write(reason_fd, reject_mail_message)
724 # Send the rejection mail if appropriate
725 if not Cnf["Dinstall::Options::No-Mail"]:
726 utils.send_mail(reject_mail_message)
728 self.Logger.log(["rejected", pkg.changes_file])
731 ################################################################################
733 # Ensure that source exists somewhere in the archive for the binary
734 # upload being processed.
736 # (1) exact match => 1.0-3
737 # (2) Bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
739 def source_exists (self, package, source_version, suites = ["any"]):
743 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
746 # source must exist in suite X, or in some other suite that's
747 # mapped to X, recursively... silent-maps are counted too,
748 # unreleased-maps aren't.
749 maps = self.Cnf.ValueList("SuiteMappings")[:]
751 maps = [ m.split() for m in maps ]
752 maps = [ (x[1], x[2]) for x in maps
753 if x[0] == "map" or x[0] == "silent-map" ]
756 if x[1] in s and x[0] not in s:
759 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
760 q = self.projectB.query(que)
762 # Reduce the query results to a list of version numbers
763 ql = [ i[0] for i in q.getresult() ]
766 if source_version in ql:
770 orig_source_version = re_bin_only_nmu.sub('', source_version)
771 if orig_source_version in ql:
779 ################################################################################
781 def in_override_p (self, package, component, suite, binary_type, file):
782 files = self.pkg.files
784 if binary_type == "": # must be source
787 file_type = binary_type
789 # Override suite name; used for example with proposed-updates
790 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
791 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
793 # Avoid <undef> on unknown distributions
794 suite_id = database.get_suite_id(suite)
797 component_id = database.get_component_id(component)
798 type_id = database.get_override_type_id(file_type)
800 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
801 % (package, suite_id, component_id, type_id))
802 result = q.getresult()
803 # If checking for a source package fall back on the binary override type
804 if file_type == "dsc" and not result:
805 deb_type_id = database.get_override_type_id("deb")
806 udeb_type_id = database.get_override_type_id("udeb")
807 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
808 % (package, suite_id, component_id, deb_type_id, udeb_type_id))
809 result = q.getresult()
811 # Remember the section and priority so we can check them later if appropriate
813 files[file]["override section"] = result[0][0]
814 files[file]["override priority"] = result[0][1]
818 ################################################################################
820 def reject (self, str, prefix="Rejected: "):
822 # Unlike other rejects we add new lines first to avoid trailing
823 # new lines when this message is passed back up to a caller.
824 if self.reject_message:
825 self.reject_message += "\n"
826 self.reject_message += prefix + str
828 ################################################################################
830 def get_anyversion(self, query_result, suite):
832 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
833 for (v, s) in query_result:
834 if s in [ x.lower() for x in anysuite ]:
835 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
839 ################################################################################
841 def cross_suite_version_check(self, query_result, file, new_version):
842 """Ensure versions are newer than existing packages in target
843 suites and that cross-suite version checking rules as
844 set out in the conf file are satisfied."""
846 # Check versions for each target suite
847 for target_suite in self.pkg.changes["distribution"].keys():
848 must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
849 must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
850 # Enforce "must be newer than target suite" even if conffile omits it
851 if target_suite not in must_be_newer_than:
852 must_be_newer_than.append(target_suite)
853 for entry in query_result:
854 existent_version = entry[0]
856 if suite in must_be_newer_than and \
857 apt_pkg.VersionCompare(new_version, existent_version) < 1:
858 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
859 if suite in must_be_older_than and \
860 apt_pkg.VersionCompare(new_version, existent_version) > -1:
861 ch = self.pkg.changes
863 if ch.get('distribution-version', {}).has_key(suite):
864 # we really use the other suite, ignoring the conflicting one ...
865 addsuite = ch["distribution-version"][suite]
867 add_version = self.get_anyversion(query_result, addsuite)
868 target_version = self.get_anyversion(query_result, target_suite)
871 # not add_version can only happen if we map to a suite
872 # that doesn't enhance the suite we're propup'ing from.
873 # so "propup-ver x a b c; map a d" is a problem only if
874 # d doesn't enhance a.
876 # i think we could always propagate in this case, rather
877 # than complaining. either way, this isn't a REJECT issue
879 # And - we really should complain to the dorks who configured dak
880 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
881 self.pkg.changes.setdefault("propdistribution", {})
882 self.pkg.changes["propdistribution"][addsuite] = 1
884 elif not target_version:
885 # not targets_version is true when the package is NEW
886 # we could just stick with the "...old version..." REJECT
888 self.reject("Won't propogate NEW packages.")
889 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
890 # propogation would be redundant. no need to reject though.
891 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
893 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
894 apt_pkg.VersionCompare(add_version, target_version) >= 0:
896 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
897 self.pkg.changes.setdefault("propdistribution", {})
898 self.pkg.changes["propdistribution"][addsuite] = 1
902 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
904 ################################################################################
906 def check_binary_against_db(self, file):
907 self.reject_message = ""
908 files = self.pkg.files
910 # Ensure version is sane
911 q = self.projectB.query("""
912 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
914 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
915 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
916 % (files[file]["package"],
917 files[file]["architecture"]))
918 self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
920 # Check for any existing copies of the file
921 q = self.projectB.query("""
922 SELECT b.id FROM binaries b, architecture a
923 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
924 AND a.id = b.architecture"""
925 % (files[file]["package"],
926 files[file]["version"],
927 files[file]["architecture"]))
929 self.reject("%s: can not overwrite existing copy already in the archive." % (file))
931 return self.reject_message
933 ################################################################################
935 def check_source_against_db(self, file):
936 self.reject_message = ""
939 # Ensure version is sane
940 q = self.projectB.query("""
941 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
942 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
943 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
945 return self.reject_message
947 ################################################################################
950 # NB: this function can remove entries from the 'files' index [if
951 # the .orig.tar.gz is a duplicate of the one in the archive]; if
952 # you're iterating over 'files' and call this function as part of
953 # the loop, be sure to add a check to the top of the loop to
954 # ensure you haven't just tried to dereference the deleted entry.
957 def check_dsc_against_db(self, file):
958 self.reject_message = ""
959 files = self.pkg.files
960 dsc_files = self.pkg.dsc_files
961 legacy_source_untouchable = self.pkg.legacy_source_untouchable
962 self.pkg.orig_tar_gz = None
964 # Try and find all files mentioned in the .dsc. This has
965 # to work harder to cope with the multiple possible
966 # locations of an .orig.tar.gz.
967 # The ordering on the select is needed to pick the newest orig
968 # when it exists in multiple places.
969 for dsc_file in dsc_files.keys():
971 if files.has_key(dsc_file):
972 actual_md5 = files[dsc_file]["md5sum"]
973 actual_size = int(files[dsc_file]["size"])
974 found = "%s in incoming" % (dsc_file)
975 # Check the file does not already exist in the archive
976 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
978 # Strip out anything that isn't '%s' or '/%s$'
980 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
983 # "[dak] has not broken them. [dak] has fixed a
984 # brokenness. Your crappy hack exploited a bug in
987 # "(Come on! I thought it was always obvious that
988 # one just doesn't release different files with
989 # the same name and version.)"
990 # -- ajk@ on d-devel@l.d.o
993 # Ignore exact matches for .orig.tar.gz
995 if dsc_file.endswith(".orig.tar.gz"):
997 if files.has_key(dsc_file) and \
998 int(files[dsc_file]["size"]) == int(i[0]) and \
999 files[dsc_file]["md5sum"] == i[1]:
1000 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
1002 self.pkg.orig_tar_gz = i[2] + i[3]
1006 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
1007 elif dsc_file.endswith(".orig.tar.gz"):
1009 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
1011 # Strip out anything that isn't '%s' or '/%s$'
1013 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
1017 # Unfortunately, we may get more than one match here if,
1018 # for example, the package was in potato but had an -sa
1019 # upload in woody. So we need to choose the right one.
1021 x = ql[0]; # default to something sane in case we don't match any or have only one
1025 old_file = i[0] + i[1]
1026 old_file_fh = utils.open_file(old_file)
1027 actual_md5 = apt_pkg.md5sum(old_file_fh)
1029 actual_size = os.stat(old_file)[stat.ST_SIZE]
1030 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1033 legacy_source_untouchable[i[3]] = ""
1035 old_file = x[0] + x[1]
1036 old_file_fh = utils.open_file(old_file)
1037 actual_md5 = apt_pkg.md5sum(old_file_fh)
1039 actual_size = os.stat(old_file)[stat.ST_SIZE]
1042 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
1043 # See install() in process-accepted...
1044 self.pkg.orig_tar_id = x[3]
1045 self.pkg.orig_tar_gz = old_file
1046 if suite_type == "legacy" or suite_type == "legacy-mixed":
1047 self.pkg.orig_tar_location = "legacy"
1049 self.pkg.orig_tar_location = x[4]
1051 # Not there? Check the queue directories...
1053 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1054 # See process_it() in 'dak process-unchecked' for explanation of this
1055 # in_unchecked check dropped by ajt 2007-08-28, how did that
1057 if os.path.exists(in_unchecked) and False:
1058 return (self.reject_message, in_unchecked)
1060 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1061 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)],dsc_file)
1062 if os.path.exists(in_otherdir):
1063 in_otherdir_fh = utils.open_file(in_otherdir)
1064 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1065 in_otherdir_fh.close()
1066 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1068 self.pkg.orig_tar_gz = in_otherdir
1071 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1072 self.pkg.orig_tar_gz = -1
1075 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1077 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1078 self.reject("md5sum for %s doesn't match %s." % (found, file))
1079 if actual_size != int(dsc_files[dsc_file]["size"]):
1080 self.reject("size for %s doesn't match %s." % (found, file))
1082 return (self.reject_message, None)
1084 def do_query(self, q):
1085 sys.stderr.write("query: \"%s\" ... " % (q))
1086 before = time.time()
1087 r = self.projectB.query(q)
1088 time_diff = time.time()-before
1089 sys.stderr.write("took %.3f seconds.\n" % (time_diff))