3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 ###############################################################################
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
25 from dak_exceptions import *
29 ###############################################################################
31 re_isanum = re.compile (r"^\d+$")
32 re_default_answer = re.compile(r"\[(.*)\]")
33 re_fdnic = re.compile(r"\n\n")
34 re_bin_only_nmu = re.compile(r"\+b\d+$")
36 ################################################################################
38 # Determine what parts in a .changes are NEW
40 def determine_new(changes, files, projectB, warn=1):
43 # Build up a list of potentially new things
44 for file_entry in files.keys():
46 # Skip byhand elements
47 if f["type"] == "byhand":
50 priority = f["priority"]
51 section = f["section"]
52 file_type = get_type(f)
53 component = f["component"]
55 if file_type == "dsc":
57 if not new.has_key(pkg):
59 new[pkg]["priority"] = priority
60 new[pkg]["section"] = section
61 new[pkg]["type"] = file_type
62 new[pkg]["component"] = component
63 new[pkg]["files"] = []
65 old_type = new[pkg]["type"]
66 if old_type != file_type:
67 # source gets trumped by deb or udeb
69 new[pkg]["priority"] = priority
70 new[pkg]["section"] = section
71 new[pkg]["type"] = file_type
72 new[pkg]["component"] = component
73 new[pkg]["files"].append(file_entry)
74 if f.has_key("othercomponents"):
75 new[pkg]["othercomponents"] = f["othercomponents"]
77 for suite in changes["suite"].keys():
78 suite_id = database.get_suite_id(suite)
79 for pkg in new.keys():
80 component_id = database.get_component_id(new[pkg]["component"])
81 type_id = database.get_override_type_id(new[pkg]["type"])
82 q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
85 for file_entry in new[pkg]["files"]:
86 if files[file_entry].has_key("new"):
87 del files[file_entry]["new"]
91 if changes["suite"].has_key("stable"):
92 print "WARNING: overrides will be added for stable!"
93 if changes["suite"].has_key("oldstable"):
94 print "WARNING: overrides will be added for OLDstable!"
95 for pkg in new.keys():
96 if new[pkg].has_key("othercomponents"):
97 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
101 ################################################################################
105 if f.has_key("dbtype"):
106 file_type = f["dbtype"]
107 elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
110 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
112 # Validate the override type
113 type_id = database.get_override_type_id(file_type)
115 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
119 ################################################################################
121 # check if section/priority values are valid
123 def check_valid(new):
124 for pkg in new.keys():
125 section = new[pkg]["section"]
126 priority = new[pkg]["priority"]
127 file_type = new[pkg]["type"]
128 new[pkg]["section id"] = database.get_section_id(section)
129 new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
131 di = section.find("debian-installer") != -1
132 if (di and file_type not in ("udeb", "dsc")) or (not di and file_type == "udeb"):
133 new[pkg]["section id"] = -1
134 if (priority == "source" and file_type != "dsc") or \
135 (priority != "source" and file_type == "dsc"):
136 new[pkg]["priority id"] = -1
139 ###############################################################################
141 # Convenience wrapper to carry around all the package information in
144 def __init__(self, **kwds):
145 self.__dict__.update(kwds)
147 def update(self, **kwds):
148 self.__dict__.update(kwds)
150 ###############################################################################
154 def __init__(self, Cnf):
156 self.accept_count = 0
157 self.accept_bytes = 0L
158 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
159 legacy_source_untouchable = {})
161 # Initialize the substitution template mapping global
162 Subst = self.Subst = {}
163 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
164 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
165 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
166 Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
168 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
169 database.init(Cnf, self.projectB)
171 ###########################################################################
173 def init_vars (self):
174 self.pkg.changes.clear()
176 self.pkg.files.clear()
177 self.pkg.dsc_files.clear()
178 self.pkg.legacy_source_untouchable.clear()
179 self.pkg.orig_tar_id = None
180 self.pkg.orig_tar_location = ""
181 self.pkg.orig_tar_gz = None
183 ###########################################################################
185 def update_vars (self):
186 dump_filename = self.pkg.changes_file[:-8]+".dak"
187 dump_file = utils.open_file(dump_filename)
188 p = cPickle.Unpickler(dump_file)
190 self.pkg.changes.update(p.load())
191 self.pkg.dsc.update(p.load())
192 self.pkg.files.update(p.load())
193 self.pkg.dsc_files.update(p.load())
194 self.pkg.legacy_source_untouchable.update(p.load())
196 self.pkg.orig_tar_id = p.load()
197 self.pkg.orig_tar_location = p.load()
201 ###########################################################################
203 # This could just dump the dictionaries as is, but I'd like to
204 # avoid this so there's some idea of what process-accepted &
205 # process-new use from process-unchecked
207 def dump_vars(self, dest_dir):
209 changes = self.pkg.changes
211 files = self.pkg.files
212 dsc_files = self.pkg.dsc_files
213 legacy_source_untouchable = self.pkg.legacy_source_untouchable
214 orig_tar_id = self.pkg.orig_tar_id
215 orig_tar_location = self.pkg.orig_tar_location
217 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
218 dump_file = utils.open_file(dump_filename, 'w')
220 os.chmod(dump_filename, 0664)
224 p = cPickle.Pickler(dump_file, 1)
231 for file_entry in files.keys():
232 d_files[file_entry] = {}
233 for i in [ "package", "version", "architecture", "type", "size",
234 "md5sum", "sha1sum", "sha256sum", "component",
235 "location id", "source package", "source version",
236 "maintainer", "dbtype", "files id", "new",
237 "section", "priority", "othercomponents",
238 "pool name", "original component" ]:
239 if files[file_entry].has_key(i):
240 d_files[file_entry][i] = files[file_entry][i]
242 # Mandatory changes fields
243 for i in [ "distribution", "source", "architecture", "version",
244 "maintainer", "urgency", "fingerprint", "changedby822",
245 "changedby2047", "changedbyname", "maintainer822",
246 "maintainer2047", "maintainername", "maintaineremail",
247 "closes", "changes" ]:
248 d_changes[i] = changes[i]
249 # Optional changes fields
250 for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version",
252 if changes.has_key(i):
253 d_changes[i] = changes[i]
255 for i in [ "source", "version", "maintainer", "fingerprint",
256 "uploaders", "bts changelog", "dm-upload-allowed" ]:
260 for file_entry in dsc_files.keys():
261 d_dsc_files[file_entry] = {}
262 # Mandatory dsc_files fields
263 for i in [ "size", "md5sum" ]:
264 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
265 # Optional dsc_files fields
266 for i in [ "files id" ]:
267 if dsc_files[file_entry].has_key(i):
268 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
270 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
271 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
275 ###########################################################################
277 # Set up the per-package template substitution mappings
279 def update_subst (self, reject_message = ""):
281 changes = self.pkg.changes
282 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
283 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
284 changes["architecture"] = { "Unknown" : "" }
285 # and maintainer2047 may not exist.
286 if not changes.has_key("maintainer2047"):
287 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
289 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
290 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
291 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
293 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
294 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
295 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
296 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
297 changes["maintainer2047"])
298 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
300 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
301 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
302 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
304 if "sponsoremail" in changes:
305 Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
307 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
308 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
310 # Apply any global override of the Maintainer field
311 if self.Cnf.get("Dinstall::OverrideMaintainer"):
312 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
313 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
315 Subst["__REJECT_MESSAGE__"] = reject_message
316 Subst["__SOURCE__"] = changes.get("source", "Unknown")
317 Subst["__VERSION__"] = changes.get("version", "Unknown")
319 ###########################################################################
321 def build_summaries(self):
322 changes = self.pkg.changes
323 files = self.pkg.files
325 byhand = summary = new = ""
327 # changes["distribution"] may not exist in corner cases
328 # (e.g. unreadable changes files)
329 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
330 changes["distribution"] = {}
332 override_summary ="";
333 file_keys = files.keys()
335 for file_entry in file_keys:
336 if files[file_entry].has_key("byhand"):
338 summary += file_entry + " byhand\n"
339 elif files[file_entry].has_key("new"):
341 summary += "(new) %s %s %s\n" % (file_entry, files[file_entry]["priority"], files[file_entry]["section"])
342 if files[file_entry].has_key("othercomponents"):
343 summary += "WARNING: Already present in %s distribution.\n" % (files[file_entry]["othercomponents"])
344 if files[file_entry]["type"] == "deb":
345 deb_fh = utils.open_file(file_entry)
346 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
349 files[file_entry]["pool name"] = utils.poolify (changes.get("source",""), files[file_entry]["component"])
350 destination = self.Cnf["Dir::PoolRoot"] + files[file_entry]["pool name"] + file_entry
351 summary += file_entry + "\n to " + destination + "\n"
352 if not files[file_entry].has_key("type"):
353 files[file_entry]["type"] = "unknown"
354 if files[file_entry]["type"] in ["deb", "udeb", "dsc"]:
355 # (queue/unchecked), there we have override entries already, use them
356 # (process-new), there we dont have override entries, use the newly generated ones.
357 override_prio = files[file_entry].get("override priority", files[file_entry]["priority"])
358 override_sect = files[file_entry].get("override section", files[file_entry]["section"])
359 override_summary += "%s - %s %s\n" % (file_entry, override_prio, override_sect)
361 short_summary = summary
363 # This is for direport's benefit...
364 f = re_fdnic.sub("\n .\n", changes.get("changes",""))
367 summary += "Changes: " + f
369 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
371 summary += self.announce(short_summary, 0)
373 return (summary, short_summary)
375 ###########################################################################
377 def close_bugs (self, summary, action):
378 changes = self.pkg.changes
382 bugs = changes["closes"].keys()
388 summary += "Closing bugs: "
390 summary += "%s " % (bug)
392 Subst["__BUG_NUMBER__"] = bug
393 if changes["distribution"].has_key("stable"):
394 Subst["__STABLE_WARNING__"] = """
395 Note that this package is not part of the released stable Debian
396 distribution. It may have dependencies on other unreleased software,
397 or other instabilities. Please take care if you wish to install it.
398 The update will eventually make its way into the next released Debian
401 Subst["__STABLE_WARNING__"] = ""
402 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
403 utils.send_mail (mail_message)
405 self.Logger.log(["closing bugs"]+bugs)
410 ###########################################################################
412 def announce (self, short_summary, action):
415 changes = self.pkg.changes
417 # Only do announcements for source uploads with a recent dpkg-dev installed
418 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
423 Subst["__SHORT_SUMMARY__"] = short_summary
425 for dist in changes["distribution"].keys():
426 announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
427 if announce_list == "" or lists_done.has_key(announce_list):
429 lists_done[announce_list] = 1
430 summary += "Announcing to %s\n" % (announce_list)
433 Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
434 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
435 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
436 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
437 utils.send_mail (mail_message)
439 if Cnf.FindB("Dinstall::CloseBugs"):
440 summary = self.close_bugs(summary, action)
444 ###########################################################################
446 def accept (self, summary, short_summary):
449 files = self.pkg.files
450 changes = self.pkg.changes
451 changes_file = self.pkg.changes_file
455 self.Logger.log(["Accepting changes",changes_file])
457 self.dump_vars(Cnf["Dir::Queue::Accepted"])
459 # Move all the files into the accepted directory
460 utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
461 file_keys = files.keys()
462 for file_entry in file_keys:
463 utils.move(file_entry, Cnf["Dir::Queue::Accepted"])
464 self.accept_bytes += float(files[file_entry]["size"])
465 self.accept_count += 1
467 # Send accept mail, announce to lists, close bugs and check for
468 # override disparities
469 if not Cnf["Dinstall::Options::No-Mail"]:
470 Subst["__SUITE__"] = ""
471 Subst["__SUMMARY__"] = summary
472 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
473 utils.send_mail(mail_message)
474 self.announce(short_summary, 1)
477 ## Helper stuff for DebBugs Version Tracking
478 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
479 # ??? once queue/* is cleared on *.d.o and/or reprocessed
480 # the conditionalization on dsc["bts changelog"] should be
483 # Write out the version history from the changelog
484 if changes["architecture"].has_key("source") and \
485 dsc.has_key("bts changelog"):
487 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
488 dotprefix=1, perms=0644)
489 version_history = utils.open_file(temp_filename, 'w')
490 version_history.write(dsc["bts changelog"])
491 version_history.close()
492 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
493 changes_file[:-8]+".versions")
494 os.rename(temp_filename, filename)
496 # Write out the binary -> source mapping.
497 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
498 dotprefix=1, perms=0644)
499 debinfo = utils.open_file(temp_filename, 'w')
500 for file_entry in file_keys:
501 f = files[file_entry]
502 if f["type"] == "deb":
503 line = " ".join([f["package"], f["version"],
504 f["architecture"], f["source package"],
505 f["source version"]])
506 debinfo.write(line+"\n")
508 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
509 changes_file[:-8]+".debinfo")
510 os.rename(temp_filename, filename)
512 self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
514 ###########################################################################
516 def queue_build (self, queue, path):
519 files = self.pkg.files
520 changes = self.pkg.changes
521 changes_file = self.pkg.changes_file
523 file_keys = files.keys()
525 ## Special support to enable clean auto-building of queued packages
526 queue_id = database.get_or_set_queue_id(queue)
528 self.projectB.query("BEGIN WORK")
529 for suite in changes["distribution"].keys():
530 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
532 suite_id = database.get_suite_id(suite)
533 dest_dir = Cnf["Dir::QueueBuild"]
534 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
535 dest_dir = os.path.join(dest_dir, suite)
536 for file_entry in file_keys:
537 src = os.path.join(path, file_entry)
538 dest = os.path.join(dest_dir, file_entry)
539 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
540 # Copy it since the original won't be readable by www-data
541 utils.copy(src, dest)
543 # Create a symlink to it
544 os.symlink(src, dest)
545 # Add it to the list of packages for later processing by apt-ftparchive
546 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
547 # If the .orig.tar.gz is in the pool, create a symlink to
548 # it (if one doesn't already exist)
549 if self.pkg.orig_tar_id:
550 # Determine the .orig.tar.gz file name
551 for dsc_file in self.pkg.dsc_files.keys():
552 if dsc_file.endswith(".orig.tar.gz"):
554 dest = os.path.join(dest_dir, filename)
555 # If it doesn't exist, create a symlink
556 if not os.path.exists(dest):
557 # Find the .orig.tar.gz in the pool
558 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
561 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
562 src = os.path.join(ql[0][0], ql[0][1])
563 os.symlink(src, dest)
564 # Add it to the list of packages for later processing by apt-ftparchive
565 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
566 # if it does, update things to ensure it's not removed prematurely
568 self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
570 self.projectB.query("COMMIT WORK")
572 ###########################################################################
574 def check_override (self):
576 changes = self.pkg.changes
577 files = self.pkg.files
580 # Abandon the check if:
581 # a) it's a non-sourceful upload
582 # b) override disparity checks have been disabled
583 # c) we're not sending mail
584 if not changes["architecture"].has_key("source") or \
585 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
586 Cnf["Dinstall::Options::No-Mail"]:
590 file_keys = files.keys()
592 for file_entry in file_keys:
593 if not files[file_entry].has_key("new") and files[file_entry]["type"] == "deb":
594 section = files[file_entry]["section"]
595 override_section = files[file_entry]["override section"]
596 if section.lower() != override_section.lower() and section != "-":
597 summary += "%s: package says section is %s, override says %s.\n" % (file_entry, section, override_section)
598 priority = files[file_entry]["priority"]
599 override_priority = files[file_entry]["override priority"]
600 if priority != override_priority and priority != "-":
601 summary += "%s: package says priority is %s, override says %s.\n" % (file_entry, priority, override_priority)
606 Subst["__SUMMARY__"] = summary
607 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
608 utils.send_mail(mail_message)
610 ###########################################################################
612 def force_reject (self, files):
613 """Forcefully move files from the current directory to the
614 reject directory. If any file already exists in the reject
615 directory it will be moved to the morgue to make way for
620 for file_entry in files:
621 # Skip any files which don't exist or which we don't have permission to copy.
622 if os.access(file_entry,os.R_OK) == 0:
624 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file_entry)
626 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
628 # File exists? Let's try and move it to the morgue
629 if errno.errorcode[e.errno] == 'EEXIST':
630 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file_entry)
632 morgue_file = utils.find_next_free(morgue_file)
633 except NoFreeFilenameError:
634 # Something's either gone badly Pete Tong, or
635 # someone is trying to exploit us.
636 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
638 utils.move(dest_file, morgue_file, perms=0660)
640 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
643 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
647 # If we got here, we own the destination file, so we can
648 # safely overwrite it.
649 utils.move(file_entry, dest_file, 1, perms=0660)
652 ###########################################################################
654 def do_reject (self, manual = 0, reject_message = ""):
655 # If we weren't given a manual rejection message, spawn an
656 # editor so the user can add one in...
657 if manual and not reject_message:
658 temp_filename = utils.temp_filename()
659 editor = os.environ.get("EDITOR","vi")
662 os.system("%s %s" % (editor, temp_filename))
663 temp_fh = utils.open_file(temp_filename)
664 reject_message = "".join(temp_fh.readlines())
666 print "Reject message:"
667 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
668 prompt = "[R]eject, Edit, Abandon, Quit ?"
670 while prompt.find(answer) == -1:
671 answer = utils.our_raw_input(prompt)
672 m = re_default_answer.search(prompt)
675 answer = answer[:1].upper()
676 os.unlink(temp_filename)
688 reason_filename = pkg.changes_file[:-8] + ".reason"
689 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
691 # Move all the files into the reject directory
692 reject_files = pkg.files.keys() + [pkg.changes_file]
693 self.force_reject(reject_files)
695 # If we fail here someone is probably trying to exploit the race
696 # so let's just raise an exception ...
697 if os.path.exists(reason_filename):
698 os.unlink(reason_filename)
699 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
702 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
703 Subst["__MANUAL_REJECT_MESSAGE__"] = ""
704 Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
705 os.write(reason_fd, reject_message)
706 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
708 # Build up the rejection email
709 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
711 Subst["__REJECTOR_ADDRESS__"] = user_email_address
712 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
713 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
714 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
715 # Write the rejection email out as the <foo>.reason file
716 os.write(reason_fd, reject_mail_message)
720 # Send the rejection mail if appropriate
721 if not Cnf["Dinstall::Options::No-Mail"]:
722 utils.send_mail(reject_mail_message)
724 self.Logger.log(["rejected", pkg.changes_file])
727 ################################################################################
729 # Ensure that source exists somewhere in the archive for the binary
730 # upload being processed.
732 # (1) exact match => 1.0-3
733 # (2) Bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
735 def source_exists (self, package, source_version, suites = ["any"]):
739 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
742 # source must exist in suite X, or in some other suite that's
743 # mapped to X, recursively... silent-maps are counted too,
744 # unreleased-maps aren't.
745 maps = self.Cnf.ValueList("SuiteMappings")[:]
747 maps = [ m.split() for m in maps ]
748 maps = [ (x[1], x[2]) for x in maps
749 if x[0] == "map" or x[0] == "silent-map" ]
752 if x[1] in s and x[0] not in s:
755 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
756 q = self.projectB.query(que)
758 # Reduce the query results to a list of version numbers
759 ql = [ i[0] for i in q.getresult() ]
762 if source_version in ql:
766 orig_source_version = re_bin_only_nmu.sub('', source_version)
767 if orig_source_version in ql:
775 ################################################################################
777 def in_override_p (self, package, component, suite, binary_type, file):
778 files = self.pkg.files
780 if binary_type == "": # must be source
783 file_type = binary_type
785 # Override suite name; used for example with proposed-updates
786 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
787 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
789 # Avoid <undef> on unknown distributions
790 suite_id = database.get_suite_id(suite)
793 component_id = database.get_component_id(component)
794 type_id = database.get_override_type_id(file_type)
796 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
797 % (package, suite_id, component_id, type_id))
798 result = q.getresult()
799 # If checking for a source package fall back on the binary override type
800 if file_type == "dsc" and not result:
801 deb_type_id = database.get_override_type_id("deb")
802 udeb_type_id = database.get_override_type_id("udeb")
803 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
804 % (package, suite_id, component_id, deb_type_id, udeb_type_id))
805 result = q.getresult()
807 # Remember the section and priority so we can check them later if appropriate
809 files[file]["override section"] = result[0][0]
810 files[file]["override priority"] = result[0][1]
814 ################################################################################
816 def reject (self, str, prefix="Rejected: "):
818 # Unlike other rejects we add new lines first to avoid trailing
819 # new lines when this message is passed back up to a caller.
820 if self.reject_message:
821 self.reject_message += "\n"
822 self.reject_message += prefix + str
824 ################################################################################
826 def get_anyversion(self, query_result, suite):
828 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
829 for (v, s) in query_result:
830 if s in [ x.lower() for x in anysuite ]:
831 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
835 ################################################################################
837 def cross_suite_version_check(self, query_result, file, new_version):
838 """Ensure versions are newer than existing packages in target
839 suites and that cross-suite version checking rules as
840 set out in the conf file are satisfied."""
842 # Check versions for each target suite
843 for target_suite in self.pkg.changes["distribution"].keys():
844 must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
845 must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
846 # Enforce "must be newer than target suite" even if conffile omits it
847 if target_suite not in must_be_newer_than:
848 must_be_newer_than.append(target_suite)
849 for entry in query_result:
850 existent_version = entry[0]
852 if suite in must_be_newer_than and \
853 apt_pkg.VersionCompare(new_version, existent_version) < 1:
854 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
855 if suite in must_be_older_than and \
856 apt_pkg.VersionCompare(new_version, existent_version) > -1:
857 ch = self.pkg.changes
859 if ch.get('distribution-version', {}).has_key(suite):
860 # we really use the other suite, ignoring the conflicting one ...
861 addsuite = ch["distribution-version"][suite]
863 add_version = self.get_anyversion(query_result, addsuite)
864 target_version = self.get_anyversion(query_result, target_suite)
867 # not add_version can only happen if we map to a suite
868 # that doesn't enhance the suite we're propup'ing from.
869 # so "propup-ver x a b c; map a d" is a problem only if
870 # d doesn't enhance a.
872 # i think we could always propagate in this case, rather
873 # than complaining. either way, this isn't a REJECT issue
875 # And - we really should complain to the dorks who configured dak
876 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
877 self.pkg.changes.setdefault("propdistribution", {})
878 self.pkg.changes["propdistribution"][addsuite] = 1
880 elif not target_version:
881 # not targets_version is true when the package is NEW
882 # we could just stick with the "...old version..." REJECT
884 self.reject("Won't propogate NEW packages.")
885 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
886 # propogation would be redundant. no need to reject though.
887 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
889 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
890 apt_pkg.VersionCompare(add_version, target_version) >= 0:
892 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
893 self.pkg.changes.setdefault("propdistribution", {})
894 self.pkg.changes["propdistribution"][addsuite] = 1
898 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
900 ################################################################################
902 def check_binary_against_db(self, file):
903 self.reject_message = ""
904 files = self.pkg.files
906 # Ensure version is sane
907 q = self.projectB.query("""
908 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
910 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
911 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
912 % (files[file]["package"],
913 files[file]["architecture"]))
914 self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
916 # Check for any existing copies of the file
917 q = self.projectB.query("""
918 SELECT b.id FROM binaries b, architecture a
919 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
920 AND a.id = b.architecture"""
921 % (files[file]["package"],
922 files[file]["version"],
923 files[file]["architecture"]))
925 self.reject("%s: can not overwrite existing copy already in the archive." % (file))
927 return self.reject_message
929 ################################################################################
931 def check_source_against_db(self, file):
932 self.reject_message = ""
935 # Ensure version is sane
936 q = self.projectB.query("""
937 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
938 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
939 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
941 return self.reject_message
943 ################################################################################
946 # NB: this function can remove entries from the 'files' index [if
947 # the .orig.tar.gz is a duplicate of the one in the archive]; if
948 # you're iterating over 'files' and call this function as part of
949 # the loop, be sure to add a check to the top of the loop to
950 # ensure you haven't just tried to dereference the deleted entry.
953 def check_dsc_against_db(self, file):
954 self.reject_message = ""
955 files = self.pkg.files
956 dsc_files = self.pkg.dsc_files
957 legacy_source_untouchable = self.pkg.legacy_source_untouchable
958 self.pkg.orig_tar_gz = None
960 # Try and find all files mentioned in the .dsc. This has
961 # to work harder to cope with the multiple possible
962 # locations of an .orig.tar.gz.
963 # The ordering on the select is needed to pick the newest orig
964 # when it exists in multiple places.
965 for dsc_file in dsc_files.keys():
967 if files.has_key(dsc_file):
968 actual_md5 = files[dsc_file]["md5sum"]
969 actual_size = int(files[dsc_file]["size"])
970 found = "%s in incoming" % (dsc_file)
971 # Check the file does not already exist in the archive
972 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
974 # Strip out anything that isn't '%s' or '/%s$'
976 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
979 # "[dak] has not broken them. [dak] has fixed a
980 # brokenness. Your crappy hack exploited a bug in
983 # "(Come on! I thought it was always obvious that
984 # one just doesn't release different files with
985 # the same name and version.)"
986 # -- ajk@ on d-devel@l.d.o
989 # Ignore exact matches for .orig.tar.gz
991 if dsc_file.endswith(".orig.tar.gz"):
993 if files.has_key(dsc_file) and \
994 int(files[dsc_file]["size"]) == int(i[0]) and \
995 files[dsc_file]["md5sum"] == i[1]:
996 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
998 self.pkg.orig_tar_gz = i[2] + i[3]
1002 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
1003 elif dsc_file.endswith(".orig.tar.gz"):
1005 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
1007 # Strip out anything that isn't '%s' or '/%s$'
1009 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
1013 # Unfortunately, we may get more than one match here if,
1014 # for example, the package was in potato but had an -sa
1015 # upload in woody. So we need to choose the right one.
1017 x = ql[0]; # default to something sane in case we don't match any or have only one
1021 old_file = i[0] + i[1]
1022 old_file_fh = utils.open_file(old_file)
1023 actual_md5 = apt_pkg.md5sum(old_file_fh)
1025 actual_size = os.stat(old_file)[stat.ST_SIZE]
1026 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1029 legacy_source_untouchable[i[3]] = ""
1031 old_file = x[0] + x[1]
1032 old_file_fh = utils.open_file(old_file)
1033 actual_md5 = apt_pkg.md5sum(old_file_fh)
1035 actual_size = os.stat(old_file)[stat.ST_SIZE]
1038 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
1039 # See install() in process-accepted...
1040 self.pkg.orig_tar_id = x[3]
1041 self.pkg.orig_tar_gz = old_file
1042 if suite_type == "legacy" or suite_type == "legacy-mixed":
1043 self.pkg.orig_tar_location = "legacy"
1045 self.pkg.orig_tar_location = x[4]
1047 # Not there? Check the queue directories...
1049 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1050 # See process_it() in 'dak process-unchecked' for explanation of this
1051 # in_unchecked check dropped by ajt 2007-08-28, how did that
1053 if os.path.exists(in_unchecked) and False:
1054 return (self.reject_message, in_unchecked)
1056 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1057 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)],dsc_file)
1058 if os.path.exists(in_otherdir):
1059 in_otherdir_fh = utils.open_file(in_otherdir)
1060 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1061 in_otherdir_fh.close()
1062 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1064 self.pkg.orig_tar_gz = in_otherdir
1067 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1068 self.pkg.orig_tar_gz = -1
1071 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1073 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1074 self.reject("md5sum for %s doesn't match %s." % (found, file))
1075 if actual_size != int(dsc_files[dsc_file]["size"]):
1076 self.reject("size for %s doesn't match %s." % (found, file))
1078 return (self.reject_message, None)
1080 def do_query(self, q):
1081 sys.stderr.write("query: \"%s\" ... " % (q))
1082 before = time.time()
1083 r = self.projectB.query(q)
1084 time_diff = time.time()-before
1085 sys.stderr.write("took %.3f seconds.\n" % (time_diff))