3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 ###############################################################################
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
25 from dak_exceptions import *
29 ###############################################################################
31 re_isanum = re.compile (r"^\d+$")
32 re_default_answer = re.compile(r"\[(.*)\]")
33 re_fdnic = re.compile(r"\n\n")
34 re_bin_only_nmu = re.compile(r"\+b\d+$")
36 ################################################################################
38 # Determine what parts in a .changes are NEW
40 def determine_new(changes, files, projectB, warn=1):
43 # Build up a list of potentially new things
44 for file_entry in files.keys():
46 # Skip byhand elements
47 if f["type"] == "byhand":
50 priority = f["priority"]
51 section = f["section"]
52 file_type = get_type(f)
53 component = f["component"]
55 if file_type == "dsc":
57 if not new.has_key(pkg):
59 new[pkg]["priority"] = priority
60 new[pkg]["section"] = section
61 new[pkg]["type"] = file_type
62 new[pkg]["component"] = component
63 new[pkg]["files"] = []
65 old_type = new[pkg]["type"]
66 if old_type != file_type:
67 # source gets trumped by deb or udeb
69 new[pkg]["priority"] = priority
70 new[pkg]["section"] = section
71 new[pkg]["type"] = file_type
72 new[pkg]["component"] = component
73 new[pkg]["files"].append(file_entry)
74 if f.has_key("othercomponents"):
75 new[pkg]["othercomponents"] = f["othercomponents"]
77 for suite in changes["suite"].keys():
78 suite_id = database.get_suite_id(suite)
79 for pkg in new.keys():
80 component_id = database.get_component_id(new[pkg]["component"])
81 type_id = database.get_override_type_id(new[pkg]["type"])
82 q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
85 for file_entry in new[pkg]["files"]:
86 if files[file_entry].has_key("new"):
87 del files[file_entry]["new"]
91 if changes["suite"].has_key("stable"):
92 print "WARNING: overrides will be added for stable!"
93 if changes["suite"].has_key("oldstable"):
94 print "WARNING: overrides will be added for OLDstable!"
95 for pkg in new.keys():
96 if new[pkg].has_key("othercomponents"):
97 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
101 ################################################################################
105 if f.has_key("dbtype"):
106 file_type = f["dbtype"]
107 elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
110 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
112 # Validate the override type
113 type_id = database.get_override_type_id(file_type)
115 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
119 ################################################################################
121 # check if section/priority values are valid
123 def check_valid(new):
124 for pkg in new.keys():
125 section = new[pkg]["section"]
126 priority = new[pkg]["priority"]
127 file_type = new[pkg]["type"]
128 new[pkg]["section id"] = database.get_section_id(section)
129 new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
131 di = section.find("debian-installer") != -1
132 if (di and file_type not in ("udeb", "dsc")) or (not di and file_type == "udeb"):
133 new[pkg]["section id"] = -1
134 if (priority == "source" and file_type != "dsc") or \
135 (priority != "source" and file_type == "dsc"):
136 new[pkg]["priority id"] = -1
139 ###############################################################################
141 # Convenience wrapper to carry around all the package information in
144 def __init__(self, **kwds):
145 self.__dict__.update(kwds)
147 def update(self, **kwds):
148 self.__dict__.update(kwds)
150 ###############################################################################
154 def __init__(self, Cnf):
156 self.accept_count = 0
157 self.accept_bytes = 0L
158 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
159 legacy_source_untouchable = {})
161 # Initialize the substitution template mapping global
162 Subst = self.Subst = {}
163 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
164 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
165 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
166 Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
168 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
169 database.init(Cnf, self.projectB)
171 ###########################################################################
173 def init_vars (self):
174 self.pkg.changes.clear()
176 self.pkg.files.clear()
177 self.pkg.dsc_files.clear()
178 self.pkg.legacy_source_untouchable.clear()
179 self.pkg.orig_tar_id = None
180 self.pkg.orig_tar_location = ""
181 self.pkg.orig_tar_gz = None
183 ###########################################################################
185 def update_vars (self):
186 dump_filename = self.pkg.changes_file[:-8]+".dak"
187 dump_file = utils.open_file(dump_filename)
188 p = cPickle.Unpickler(dump_file)
190 self.pkg.changes.update(p.load())
191 self.pkg.dsc.update(p.load())
192 self.pkg.files.update(p.load())
193 self.pkg.dsc_files.update(p.load())
194 self.pkg.legacy_source_untouchable.update(p.load())
196 self.pkg.orig_tar_id = p.load()
197 self.pkg.orig_tar_location = p.load()
201 ###########################################################################
203 # This could just dump the dictionaries as is, but I'd like to
204 # avoid this so there's some idea of what process-accepted &
205 # process-new use from process-unchecked
207 def dump_vars(self, dest_dir):
209 changes = self.pkg.changes
211 files = self.pkg.files
212 dsc_files = self.pkg.dsc_files
213 legacy_source_untouchable = self.pkg.legacy_source_untouchable
214 orig_tar_id = self.pkg.orig_tar_id
215 orig_tar_location = self.pkg.orig_tar_location
217 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
218 dump_file = utils.open_file(dump_filename, 'w')
219 os.chmod(dump_filename, 0664)
221 p = cPickle.Pickler(dump_file, 1)
228 for file_entry in files.keys():
229 d_files[file_entry] = {}
230 for i in [ "package", "version", "architecture", "type", "size",
231 "md5sum", "sha1sum", "sha256sum", "component",
232 "location id", "source package", "source version",
233 "maintainer", "dbtype", "files id", "new",
234 "section", "priority", "othercomponents",
235 "pool name", "original component" ]:
236 if files[file_entry].has_key(i):
237 d_files[file_entry][i] = files[file_entry][i]
239 # Mandatory changes fields
240 for i in [ "distribution", "source", "architecture", "version",
241 "maintainer", "urgency", "fingerprint", "changedby822",
242 "changedby2047", "changedbyname", "maintainer822",
243 "maintainer2047", "maintainername", "maintaineremail",
244 "closes", "changes" ]:
245 d_changes[i] = changes[i]
246 # Optional changes fields
247 for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version",
249 if changes.has_key(i):
250 d_changes[i] = changes[i]
252 for i in [ "source", "version", "maintainer", "fingerprint",
253 "uploaders", "bts changelog", "dm-upload-allowed" ]:
257 for file_entry in dsc_files.keys():
258 d_dsc_files[file_entry] = {}
259 # Mandatory dsc_files fields
260 for i in [ "size", "md5sum" ]:
261 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
262 # Optional dsc_files fields
263 for i in [ "files id" ]:
264 if dsc_files[file_entry].has_key(i):
265 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
267 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
268 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
272 ###########################################################################
274 # Set up the per-package template substitution mappings
276 def update_subst (self, reject_message = ""):
278 changes = self.pkg.changes
279 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
280 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
281 changes["architecture"] = { "Unknown" : "" }
282 # and maintainer2047 may not exist.
283 if not changes.has_key("maintainer2047"):
284 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
286 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
287 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
288 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
290 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
291 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
292 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
293 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
294 changes["maintainer2047"])
295 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
297 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
298 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
299 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
301 if "sponsoremail" in changes:
302 Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
304 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
305 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
307 # Apply any global override of the Maintainer field
308 if self.Cnf.get("Dinstall::OverrideMaintainer"):
309 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
310 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
312 Subst["__REJECT_MESSAGE__"] = reject_message
313 Subst["__SOURCE__"] = changes.get("source", "Unknown")
314 Subst["__VERSION__"] = changes.get("version", "Unknown")
316 ###########################################################################
318 def build_summaries(self):
319 changes = self.pkg.changes
320 files = self.pkg.files
322 byhand = summary = new = ""
324 # changes["distribution"] may not exist in corner cases
325 # (e.g. unreadable changes files)
326 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
327 changes["distribution"] = {}
329 override_summary ="";
330 file_keys = files.keys()
332 for file_entry in file_keys:
333 if files[file_entry].has_key("byhand"):
335 summary += file_entry + " byhand\n"
336 elif files[file_entry].has_key("new"):
338 summary += "(new) %s %s %s\n" % (file_entry, files[file_entry]["priority"], files[file_entry]["section"])
339 if files[file_entry].has_key("othercomponents"):
340 summary += "WARNING: Already present in %s distribution.\n" % (files[file_entry]["othercomponents"])
341 if files[file_entry]["type"] == "deb":
342 deb_fh = utils.open_file(file_entry)
343 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
346 files[file_entry]["pool name"] = utils.poolify (changes.get("source",""), files[file_entry]["component"])
347 destination = self.Cnf["Dir::PoolRoot"] + files[file_entry]["pool name"] + file_entry
348 summary += file_entry + "\n to " + destination + "\n"
349 if not files[file_entry].has_key("type"):
350 files[file_entry]["type"] = "unknown"
351 if files[file_entry]["type"] in ["deb", "udeb", "dsc"]:
352 # (queue/unchecked), there we have override entries already, use them
353 # (process-new), there we dont have override entries, use the newly generated ones.
354 override_prio = files[file_entry].get("override priority", files[file_entry]["priority"])
355 override_sect = files[file_entry].get("override section", files[file_entry]["section"])
356 override_summary += "%s - %s %s\n" % (file_entry, override_prio, override_sect)
358 short_summary = summary
360 # This is for direport's benefit...
361 f = re_fdnic.sub("\n .\n", changes.get("changes",""))
364 summary += "Changes: " + f
366 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
368 summary += self.announce(short_summary, 0)
370 return (summary, short_summary)
372 ###########################################################################
374 def close_bugs (self, summary, action):
375 changes = self.pkg.changes
379 bugs = changes["closes"].keys()
385 summary += "Closing bugs: "
387 summary += "%s " % (bug)
389 Subst["__BUG_NUMBER__"] = bug
390 if changes["distribution"].has_key("stable"):
391 Subst["__STABLE_WARNING__"] = """
392 Note that this package is not part of the released stable Debian
393 distribution. It may have dependencies on other unreleased software,
394 or other instabilities. Please take care if you wish to install it.
395 The update will eventually make its way into the next released Debian
398 Subst["__STABLE_WARNING__"] = ""
399 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
400 utils.send_mail (mail_message)
402 self.Logger.log(["closing bugs"]+bugs)
407 ###########################################################################
409 def announce (self, short_summary, action):
412 changes = self.pkg.changes
414 # Only do announcements for source uploads with a recent dpkg-dev installed
415 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
420 Subst["__SHORT_SUMMARY__"] = short_summary
422 for dist in changes["distribution"].keys():
423 announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
424 if announce_list == "" or lists_done.has_key(announce_list):
426 lists_done[announce_list] = 1
427 summary += "Announcing to %s\n" % (announce_list)
430 Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
431 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
432 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
433 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
434 utils.send_mail (mail_message)
436 if Cnf.FindB("Dinstall::CloseBugs"):
437 summary = self.close_bugs(summary, action)
441 ###########################################################################
443 def accept (self, summary, short_summary):
446 files = self.pkg.files
447 changes = self.pkg.changes
448 changes_file = self.pkg.changes_file
452 self.Logger.log(["Accepting changes",changes_file])
454 self.dump_vars(Cnf["Dir::Queue::Accepted"])
456 # Move all the files into the accepted directory
457 utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
458 file_keys = files.keys()
459 for file_entry in file_keys:
460 utils.move(file_entry, Cnf["Dir::Queue::Accepted"])
461 self.accept_bytes += float(files[file_entry]["size"])
462 self.accept_count += 1
464 # Send accept mail, announce to lists, close bugs and check for
465 # override disparities
466 if not Cnf["Dinstall::Options::No-Mail"]:
467 Subst["__SUITE__"] = ""
468 Subst["__SUMMARY__"] = summary
469 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
470 utils.send_mail(mail_message)
471 self.announce(short_summary, 1)
474 ## Helper stuff for DebBugs Version Tracking
475 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
476 # ??? once queue/* is cleared on *.d.o and/or reprocessed
477 # the conditionalization on dsc["bts changelog"] should be
480 # Write out the version history from the changelog
481 if changes["architecture"].has_key("source") and \
482 dsc.has_key("bts changelog"):
484 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
485 dotprefix=1, perms=0644)
486 version_history = utils.open_file(temp_filename, 'w')
487 version_history.write(dsc["bts changelog"])
488 version_history.close()
489 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
490 changes_file[:-8]+".versions")
491 os.rename(temp_filename, filename)
493 # Write out the binary -> source mapping.
494 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
495 dotprefix=1, perms=0644)
496 debinfo = utils.open_file(temp_filename, 'w')
497 for file_entry in file_keys:
498 f = files[file_entry]
499 if f["type"] == "deb":
500 line = " ".join([f["package"], f["version"],
501 f["architecture"], f["source package"],
502 f["source version"]])
503 debinfo.write(line+"\n")
505 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
506 changes_file[:-8]+".debinfo")
507 os.rename(temp_filename, filename)
509 self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
511 ###########################################################################
513 def queue_build (self, queue, path):
516 files = self.pkg.files
517 changes = self.pkg.changes
518 changes_file = self.pkg.changes_file
520 file_keys = files.keys()
522 ## Special support to enable clean auto-building of queued packages
523 queue_id = database.get_or_set_queue_id(queue)
525 self.projectB.query("BEGIN WORK")
526 for suite in changes["distribution"].keys():
527 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
529 suite_id = database.get_suite_id(suite)
530 dest_dir = Cnf["Dir::QueueBuild"]
531 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
532 dest_dir = os.path.join(dest_dir, suite)
533 for file_entry in file_keys:
534 src = os.path.join(path, file_entry)
535 dest = os.path.join(dest_dir, file_entry)
536 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
537 # Copy it since the original won't be readable by www-data
538 utils.copy(src, dest)
540 # Create a symlink to it
541 os.symlink(src, dest)
542 # Add it to the list of packages for later processing by apt-ftparchive
543 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
544 # If the .orig.tar.gz is in the pool, create a symlink to
545 # it (if one doesn't already exist)
546 if self.pkg.orig_tar_id:
547 # Determine the .orig.tar.gz file name
548 for dsc_file in self.pkg.dsc_files.keys():
549 if dsc_file.endswith(".orig.tar.gz"):
551 dest = os.path.join(dest_dir, filename)
552 # If it doesn't exist, create a symlink
553 if not os.path.exists(dest):
554 # Find the .orig.tar.gz in the pool
555 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
558 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
559 src = os.path.join(ql[0][0], ql[0][1])
560 os.symlink(src, dest)
561 # Add it to the list of packages for later processing by apt-ftparchive
562 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
563 # if it does, update things to ensure it's not removed prematurely
565 self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
567 self.projectB.query("COMMIT WORK")
569 ###########################################################################
571 def check_override (self):
573 changes = self.pkg.changes
574 files = self.pkg.files
577 # Abandon the check if:
578 # a) it's a non-sourceful upload
579 # b) override disparity checks have been disabled
580 # c) we're not sending mail
581 if not changes["architecture"].has_key("source") or \
582 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
583 Cnf["Dinstall::Options::No-Mail"]:
587 file_keys = files.keys()
589 for file_entry in file_keys:
590 if not files[file_entry].has_key("new") and files[file_entry]["type"] == "deb":
591 section = files[file_entry]["section"]
592 override_section = files[file_entry]["override section"]
593 if section.lower() != override_section.lower() and section != "-":
594 summary += "%s: package says section is %s, override says %s.\n" % (file_entry, section, override_section)
595 priority = files[file_entry]["priority"]
596 override_priority = files[file_entry]["override priority"]
597 if priority != override_priority and priority != "-":
598 summary += "%s: package says priority is %s, override says %s.\n" % (file_entry, priority, override_priority)
603 Subst["__SUMMARY__"] = summary
604 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
605 utils.send_mail(mail_message)
607 ###########################################################################
609 def force_reject (self, files):
610 """Forcefully move files from the current directory to the
611 reject directory. If any file already exists in the reject
612 directory it will be moved to the morgue to make way for
617 for file_entry in files:
618 # Skip any files which don't exist or which we don't have permission to copy.
619 if os.access(file_entry,os.R_OK) == 0:
621 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file_entry)
623 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
625 # File exists? Let's try and move it to the morgue
626 if errno.errorcode[e.errno] == 'EEXIST':
627 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file_entry)
629 morgue_file = utils.find_next_free(morgue_file)
630 except NoFreeFilenameError:
631 # Something's either gone badly Pete Tong, or
632 # someone is trying to exploit us.
633 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
635 utils.move(dest_file, morgue_file, perms=0660)
637 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
640 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
644 # If we got here, we own the destination file, so we can
645 # safely overwrite it.
646 utils.move(file_entry, dest_file, 1, perms=0660)
649 ###########################################################################
651 def do_reject (self, manual = 0, reject_message = ""):
652 # If we weren't given a manual rejection message, spawn an
653 # editor so the user can add one in...
654 if manual and not reject_message:
655 temp_filename = utils.temp_filename()
656 editor = os.environ.get("EDITOR","vi")
659 os.system("%s %s" % (editor, temp_filename))
660 temp_fh = utils.open_file(temp_filename)
661 reject_message = "".join(temp_fh.readlines())
663 print "Reject message:"
664 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
665 prompt = "[R]eject, Edit, Abandon, Quit ?"
667 while prompt.find(answer) == -1:
668 answer = utils.our_raw_input(prompt)
669 m = re_default_answer.search(prompt)
672 answer = answer[:1].upper()
673 os.unlink(temp_filename)
685 reason_filename = pkg.changes_file[:-8] + ".reason"
686 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
688 # Move all the files into the reject directory
689 reject_files = pkg.files.keys() + [pkg.changes_file]
690 self.force_reject(reject_files)
692 # If we fail here someone is probably trying to exploit the race
693 # so let's just raise an exception ...
694 if os.path.exists(reason_filename):
695 os.unlink(reason_filename)
696 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
699 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
700 Subst["__MANUAL_REJECT_MESSAGE__"] = ""
701 Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
702 os.write(reason_fd, reject_message)
703 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
705 # Build up the rejection email
706 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
708 Subst["__REJECTOR_ADDRESS__"] = user_email_address
709 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
710 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
711 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
712 # Write the rejection email out as the <foo>.reason file
713 os.write(reason_fd, reject_mail_message)
717 # Send the rejection mail if appropriate
718 if not Cnf["Dinstall::Options::No-Mail"]:
719 utils.send_mail(reject_mail_message)
721 self.Logger.log(["rejected", pkg.changes_file])
724 ################################################################################
726 # Ensure that source exists somewhere in the archive for the binary
727 # upload being processed.
729 # (1) exact match => 1.0-3
730 # (2) Bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
732 def source_exists (self, package, source_version, suites = ["any"]):
736 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
739 # source must exist in suite X, or in some other suite that's
740 # mapped to X, recursively... silent-maps are counted too,
741 # unreleased-maps aren't.
742 maps = self.Cnf.ValueList("SuiteMappings")[:]
744 maps = [ m.split() for m in maps ]
745 maps = [ (x[1], x[2]) for x in maps
746 if x[0] == "map" or x[0] == "silent-map" ]
749 if x[1] in s and x[0] not in s:
752 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
753 q = self.projectB.query(que)
755 # Reduce the query results to a list of version numbers
756 ql = [ i[0] for i in q.getresult() ]
759 if source_version in ql:
763 orig_source_version = re_bin_only_nmu.sub('', source_version)
764 if orig_source_version in ql:
772 ################################################################################
774 def in_override_p (self, package, component, suite, binary_type, file):
775 files = self.pkg.files
777 if binary_type == "": # must be source
780 file_type = binary_type
782 # Override suite name; used for example with proposed-updates
783 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
784 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
786 # Avoid <undef> on unknown distributions
787 suite_id = database.get_suite_id(suite)
790 component_id = database.get_component_id(component)
791 type_id = database.get_override_type_id(file_type)
793 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
794 % (package, suite_id, component_id, type_id))
795 result = q.getresult()
796 # If checking for a source package fall back on the binary override type
797 if file_type == "dsc" and not result:
798 deb_type_id = database.get_override_type_id("deb")
799 udeb_type_id = database.get_override_type_id("udeb")
800 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
801 % (package, suite_id, component_id, deb_type_id, udeb_type_id))
802 result = q.getresult()
804 # Remember the section and priority so we can check them later if appropriate
806 files[file]["override section"] = result[0][0]
807 files[file]["override priority"] = result[0][1]
811 ################################################################################
813 def reject (self, str, prefix="Rejected: "):
815 # Unlike other rejects we add new lines first to avoid trailing
816 # new lines when this message is passed back up to a caller.
817 if self.reject_message:
818 self.reject_message += "\n"
819 self.reject_message += prefix + str
821 ################################################################################
823 def get_anyversion(self, query_result, suite):
825 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
826 for (v, s) in query_result:
827 if s in [ x.lower() for x in anysuite ]:
828 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
832 ################################################################################
834 def cross_suite_version_check(self, query_result, file, new_version):
835 """Ensure versions are newer than existing packages in target
836 suites and that cross-suite version checking rules as
837 set out in the conf file are satisfied."""
839 # Check versions for each target suite
840 for target_suite in self.pkg.changes["distribution"].keys():
841 must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
842 must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
843 # Enforce "must be newer than target suite" even if conffile omits it
844 if target_suite not in must_be_newer_than:
845 must_be_newer_than.append(target_suite)
846 for entry in query_result:
847 existent_version = entry[0]
849 if suite in must_be_newer_than and \
850 apt_pkg.VersionCompare(new_version, existent_version) < 1:
851 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
852 if suite in must_be_older_than and \
853 apt_pkg.VersionCompare(new_version, existent_version) > -1:
854 ch = self.pkg.changes
856 if ch.get('distribution-version', {}).has_key(suite):
857 # we really use the other suite, ignoring the conflicting one ...
858 addsuite = ch["distribution-version"][suite]
860 add_version = self.get_anyversion(query_result, addsuite)
861 target_version = self.get_anyversion(query_result, target_suite)
864 # not add_version can only happen if we map to a suite
865 # that doesn't enhance the suite we're propup'ing from.
866 # so "propup-ver x a b c; map a d" is a problem only if
867 # d doesn't enhance a.
869 # i think we could always propagate in this case, rather
870 # than complaining. either way, this isn't a REJECT issue
872 # And - we really should complain to the dorks who configured dak
873 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
874 self.pkg.changes.setdefault("propdistribution", {})
875 self.pkg.changes["propdistribution"][addsuite] = 1
877 elif not target_version:
878 # not targets_version is true when the package is NEW
879 # we could just stick with the "...old version..." REJECT
881 self.reject("Won't propogate NEW packages.")
882 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
883 # propogation would be redundant. no need to reject though.
884 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
886 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
887 apt_pkg.VersionCompare(add_version, target_version) >= 0:
889 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
890 self.pkg.changes.setdefault("propdistribution", {})
891 self.pkg.changes["propdistribution"][addsuite] = 1
895 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
897 ################################################################################
899 def check_binary_against_db(self, file):
900 self.reject_message = ""
901 files = self.pkg.files
903 # Ensure version is sane
904 q = self.projectB.query("""
905 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
907 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
908 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
909 % (files[file]["package"],
910 files[file]["architecture"]))
911 self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
913 # Check for any existing copies of the file
914 q = self.projectB.query("""
915 SELECT b.id FROM binaries b, architecture a
916 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
917 AND a.id = b.architecture"""
918 % (files[file]["package"],
919 files[file]["version"],
920 files[file]["architecture"]))
922 self.reject("%s: can not overwrite existing copy already in the archive." % (file))
924 return self.reject_message
926 ################################################################################
928 def check_source_against_db(self, file):
929 self.reject_message = ""
932 # Ensure version is sane
933 q = self.projectB.query("""
934 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
935 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
936 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
938 return self.reject_message
940 ################################################################################
943 # NB: this function can remove entries from the 'files' index [if
944 # the .orig.tar.gz is a duplicate of the one in the archive]; if
945 # you're iterating over 'files' and call this function as part of
946 # the loop, be sure to add a check to the top of the loop to
947 # ensure you haven't just tried to dereference the deleted entry.
950 def check_dsc_against_db(self, file):
951 self.reject_message = ""
952 files = self.pkg.files
953 dsc_files = self.pkg.dsc_files
954 legacy_source_untouchable = self.pkg.legacy_source_untouchable
955 self.pkg.orig_tar_gz = None
957 # Try and find all files mentioned in the .dsc. This has
958 # to work harder to cope with the multiple possible
959 # locations of an .orig.tar.gz.
960 # The ordering on the select is needed to pick the newest orig
961 # when it exists in multiple places.
962 for dsc_file in dsc_files.keys():
964 if files.has_key(dsc_file):
965 actual_md5 = files[dsc_file]["md5sum"]
966 actual_size = int(files[dsc_file]["size"])
967 found = "%s in incoming" % (dsc_file)
968 # Check the file does not already exist in the archive
969 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
971 # Strip out anything that isn't '%s' or '/%s$'
973 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
976 # "[dak] has not broken them. [dak] has fixed a
977 # brokenness. Your crappy hack exploited a bug in
980 # "(Come on! I thought it was always obvious that
981 # one just doesn't release different files with
982 # the same name and version.)"
983 # -- ajk@ on d-devel@l.d.o
986 # Ignore exact matches for .orig.tar.gz
988 if dsc_file.endswith(".orig.tar.gz"):
990 if files.has_key(dsc_file) and \
991 int(files[dsc_file]["size"]) == int(i[0]) and \
992 files[dsc_file]["md5sum"] == i[1]:
993 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
995 self.pkg.orig_tar_gz = i[2] + i[3]
999 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
1000 elif dsc_file.endswith(".orig.tar.gz"):
1002 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
1004 # Strip out anything that isn't '%s' or '/%s$'
1006 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
1010 # Unfortunately, we may get more than one match here if,
1011 # for example, the package was in potato but had an -sa
1012 # upload in woody. So we need to choose the right one.
1014 x = ql[0]; # default to something sane in case we don't match any or have only one
1018 old_file = i[0] + i[1]
1019 old_file_fh = utils.open_file(old_file)
1020 actual_md5 = apt_pkg.md5sum(old_file_fh)
1022 actual_size = os.stat(old_file)[stat.ST_SIZE]
1023 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1026 legacy_source_untouchable[i[3]] = ""
1028 old_file = x[0] + x[1]
1029 old_file_fh = utils.open_file(old_file)
1030 actual_md5 = apt_pkg.md5sum(old_file_fh)
1032 actual_size = os.stat(old_file)[stat.ST_SIZE]
1035 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
1036 # See install() in process-accepted...
1037 self.pkg.orig_tar_id = x[3]
1038 self.pkg.orig_tar_gz = old_file
1039 if suite_type == "legacy" or suite_type == "legacy-mixed":
1040 self.pkg.orig_tar_location = "legacy"
1042 self.pkg.orig_tar_location = x[4]
1044 # Not there? Check the queue directories...
1046 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1047 # See process_it() in 'dak process-unchecked' for explanation of this
1048 # in_unchecked check dropped by ajt 2007-08-28, how did that
1050 if os.path.exists(in_unchecked) and False:
1051 return (self.reject_message, in_unchecked)
1053 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1054 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)],dsc_file)
1055 if os.path.exists(in_otherdir):
1056 in_otherdir_fh = utils.open_file(in_otherdir)
1057 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1058 in_otherdir_fh.close()
1059 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1061 self.pkg.orig_tar_gz = in_otherdir
1064 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1065 self.pkg.orig_tar_gz = -1
1068 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1070 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1071 self.reject("md5sum for %s doesn't match %s." % (found, file))
1072 if actual_size != int(dsc_files[dsc_file]["size"]):
1073 self.reject("size for %s doesn't match %s." % (found, file))
1075 return (self.reject_message, None)
1077 def do_query(self, q):
1078 sys.stderr.write("query: \"%s\" ... " % (q))
1079 before = time.time()
1080 r = self.projectB.query(q)
1081 time_diff = time.time()-before
1082 sys.stderr.write("took %.3f seconds.\n" % (time_diff))