3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 ###############################################################################
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
28 ###############################################################################
30 re_isanum = re.compile (r"^\d+$")
31 re_default_answer = re.compile(r"\[(.*)\]")
32 re_fdnic = re.compile(r"\n\n")
33 re_bin_only_nmu = re.compile(r"\+b\d+$")
35 ################################################################################
37 # Determine what parts in a .changes are NEW
39 def determine_new(changes, files, projectB, warn=1):
42 # Build up a list of potentially new things
43 for file in files.keys():
45 # Skip byhand elements
46 if f["type"] == "byhand":
49 priority = f["priority"]
50 section = f["section"]
52 component = f["component"]
56 if not new.has_key(pkg):
58 new[pkg]["priority"] = priority
59 new[pkg]["section"] = section
60 new[pkg]["type"] = type
61 new[pkg]["component"] = component
62 new[pkg]["files"] = []
64 old_type = new[pkg]["type"]
66 # source gets trumped by deb or udeb
68 new[pkg]["priority"] = priority
69 new[pkg]["section"] = section
70 new[pkg]["type"] = type
71 new[pkg]["component"] = component
72 new[pkg]["files"].append(file)
73 if f.has_key("othercomponents"):
74 new[pkg]["othercomponents"] = f["othercomponents"]
76 for suite in changes["suite"].keys():
77 suite_id = database.get_suite_id(suite)
78 for pkg in new.keys():
79 component_id = database.get_component_id(new[pkg]["component"])
80 type_id = database.get_override_type_id(new[pkg]["type"])
81 q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
84 for file in new[pkg]["files"]:
85 if files[file].has_key("new"):
86 del files[file]["new"]
90 if changes["suite"].has_key("stable"):
91 print "WARNING: overrides will be added for stable!"
92 if changes["suite"].has_key("oldstable"):
93 print "WARNING: overrides will be added for OLDstable!"
94 for pkg in new.keys():
95 if new[pkg].has_key("othercomponents"):
96 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
100 ################################################################################
104 if f.has_key("dbtype"):
106 elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
109 fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (type))
111 # Validate the override type
112 type_id = database.get_override_type_id(type)
114 fubar("invalid type (%s) for new. Say wha?" % (type))
118 ################################################################################
120 # check if section/priority values are valid
122 def check_valid(new):
123 for pkg in new.keys():
124 section = new[pkg]["section"]
125 priority = new[pkg]["priority"]
126 type = new[pkg]["type"]
127 new[pkg]["section id"] = database.get_section_id(section)
128 new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
130 di = section.find("debian-installer") != -1
131 if (di and type != "udeb") or (not di and type == "udeb"):
132 new[pkg]["section id"] = -1
133 if (priority == "source" and type != "dsc") or \
134 (priority != "source" and type == "dsc"):
135 new[pkg]["priority id"] = -1
137 ################################################################################
139 # We reject packages if the release team defined a transition for them
140 def check_transition(sourcepkg, cleanup=0):
143 # Only check if there is a file defined (and existant) with checks. It's a little bit
144 # specific to Debian, not much use for others, so return early there.
145 if not Cnf.has_key("Dinstall::Reject::ReleaseTransitions") and
146 not os.path.exists("%s" % (Cnf["Dinstall::Reject::ReleaseTransitions"])):
149 # Parse the yaml file
150 sourcefile = file(Cnf["Dinstall::Reject::ReleaseTransitions"], 'r')
152 transitions = load(sourcefile)
154 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
157 # Now look through all defined transitions
158 for trans in transition:
159 t = transition[trans]
160 # We check if the transition is still valid
161 # If not we remove the whole setting from the dictionary and later dump it,
162 # so we don't process it again.
165 q = Upload.projectB.query("""
166 SELECT s.version FROM source s, suite su, src_associations sa
169 AND su.suite_name='testing'
173 if ql and apt_pkg.VersionCompare(new_vers, ql[0][0]) == 1:
174 # This is still valid, the current version in database is older than
175 # the new version we wait for
177 # Check if the source we look at is affected by this.
178 if sourcepkg in t['packages']:
179 # The source is affected, lets reject it.
180 reject("""%s: part of the %s transition.
182 Your package is part of a testing transition to get %s migrated.
184 Transition description: %s
186 This transition will finish when %s, version %s, reaches testing.
187 This transition is managed by the Release Team and %s
188 is the Release-Team member responsible for it.
189 Please contact them or debian-release@lists.debian.org if you
190 need further assistance.
192 % (sourcepkg, trans, source, t["reason"], source, new_vers, t["rm"]))
195 # We either have the wanted or a newer version in testing, or the package got
196 # removed completly. In that case we don't need to keep the transition blocker
197 del transition[trans]
200 if cleanup and to_dump:
201 destfile = file(Cnf["Dinstall::Reject::ReleaseTransitions"], 'w')
202 dump(transition, destfile)
204 ###############################################################################
206 # Convenience wrapper to carry around all the package information in
209 def __init__(self, **kwds):
210 self.__dict__.update(kwds)
212 def update(self, **kwds):
213 self.__dict__.update(kwds)
215 ###############################################################################
219 def __init__(self, Cnf):
221 self.accept_count = 0
222 self.accept_bytes = 0L
223 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
224 legacy_source_untouchable = {})
226 # Initialize the substitution template mapping global
227 Subst = self.Subst = {}
228 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
229 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
230 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
231 Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
233 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
234 database.init(Cnf, self.projectB)
236 ###########################################################################
238 def init_vars (self):
239 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
240 exec "self.pkg.%s.clear();" % (i)
241 self.pkg.orig_tar_id = None
242 self.pkg.orig_tar_location = ""
243 self.pkg.orig_tar_gz = None
245 ###########################################################################
247 def update_vars (self):
248 dump_filename = self.pkg.changes_file[:-8]+".dak"
249 dump_file = utils.open_file(dump_filename)
250 p = cPickle.Unpickler(dump_file)
251 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
252 exec "self.pkg.%s.update(p.load());" % (i)
253 for i in [ "orig_tar_id", "orig_tar_location" ]:
254 exec "self.pkg.%s = p.load();" % (i)
257 ###########################################################################
259 # This could just dump the dictionaries as is, but I'd like to
260 # avoid this so there's some idea of what process-accepted &
261 # process-new use from process-unchecked
263 def dump_vars(self, dest_dir):
264 for i in [ "changes", "dsc", "files", "dsc_files",
265 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
266 exec "%s = self.pkg.%s;" % (i,i)
267 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
268 dump_file = utils.open_file(dump_filename, 'w')
270 os.chmod(dump_filename, 0660)
272 if errno.errorcode[e.errno] == 'EPERM':
273 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
274 if perms & stat.S_IROTH:
275 utils.fubar("%s is world readable and chmod failed." % (dump_filename))
279 p = cPickle.Pickler(dump_file, 1)
280 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
283 for file in files.keys():
285 for i in [ "package", "version", "architecture", "type", "size",
286 "md5sum", "component", "location id", "source package",
287 "source version", "maintainer", "dbtype", "files id",
288 "new", "section", "priority", "othercomponents",
289 "pool name", "original component" ]:
290 if files[file].has_key(i):
291 d_files[file][i] = files[file][i]
293 # Mandatory changes fields
294 for i in [ "distribution", "source", "architecture", "version",
295 "maintainer", "urgency", "fingerprint", "changedby822",
296 "changedby2047", "changedbyname", "maintainer822",
297 "maintainer2047", "maintainername", "maintaineremail",
298 "closes", "changes" ]:
299 d_changes[i] = changes[i]
300 # Optional changes fields
301 for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version" ]:
302 if changes.has_key(i):
303 d_changes[i] = changes[i]
305 for i in [ "source", "version", "maintainer", "fingerprint",
306 "uploaders", "bts changelog", "dm-upload-allowed" ]:
310 for file in dsc_files.keys():
311 d_dsc_files[file] = {}
312 # Mandatory dsc_files fields
313 for i in [ "size", "md5sum" ]:
314 d_dsc_files[file][i] = dsc_files[file][i]
315 # Optional dsc_files fields
316 for i in [ "files id" ]:
317 if dsc_files[file].has_key(i):
318 d_dsc_files[file][i] = dsc_files[file][i]
320 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
321 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
325 ###########################################################################
327 # Set up the per-package template substitution mappings
329 def update_subst (self, reject_message = ""):
331 changes = self.pkg.changes
332 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
333 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
334 changes["architecture"] = { "Unknown" : "" }
335 # and maintainer2047 may not exist.
336 if not changes.has_key("maintainer2047"):
337 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
339 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
340 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
341 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
343 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
344 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
345 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
346 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
347 changes["maintainer2047"])
348 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
350 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
351 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
352 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
353 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
354 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
356 # Apply any global override of the Maintainer field
357 if self.Cnf.get("Dinstall::OverrideMaintainer"):
358 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
359 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
361 Subst["__REJECT_MESSAGE__"] = reject_message
362 Subst["__SOURCE__"] = changes.get("source", "Unknown")
363 Subst["__VERSION__"] = changes.get("version", "Unknown")
365 ###########################################################################
367 def build_summaries(self):
368 changes = self.pkg.changes
369 files = self.pkg.files
371 byhand = summary = new = ""
373 # changes["distribution"] may not exist in corner cases
374 # (e.g. unreadable changes files)
375 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
376 changes["distribution"] = {}
378 override_summary ="";
379 file_keys = files.keys()
381 for file in file_keys:
382 if files[file].has_key("byhand"):
384 summary += file + " byhand\n"
385 elif files[file].has_key("new"):
387 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
388 if files[file].has_key("othercomponents"):
389 summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
390 if files[file]["type"] == "deb":
391 deb_fh = utils.open_file(file)
392 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
395 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
396 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
397 summary += file + "\n to " + destination + "\n"
398 if not files[file].has_key("type"):
399 files[file]["type"] = "unknown"
400 if files[file]["type"] in ["deb", "udeb", "dsc"]:
401 # (queue/unchecked), there we have override entries already, use them
402 # (process-new), there we dont have override entries, use the newly generated ones.
403 override_prio = files[file].get("override priority", files[file]["priority"])
404 override_sect = files[file].get("override section", files[file]["section"])
405 override_summary += "%s - %s %s\n" % (file, override_prio, override_sect)
407 short_summary = summary
409 # This is for direport's benefit...
410 f = re_fdnic.sub("\n .\n", changes.get("changes",""))
413 summary += "Changes: " + f
415 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
417 summary += self.announce(short_summary, 0)
419 return (summary, short_summary)
421 ###########################################################################
423 def close_bugs (self, summary, action):
424 changes = self.pkg.changes
428 bugs = changes["closes"].keys()
434 summary += "Closing bugs: "
436 summary += "%s " % (bug)
438 Subst["__BUG_NUMBER__"] = bug
439 if changes["distribution"].has_key("stable"):
440 Subst["__STABLE_WARNING__"] = """
441 Note that this package is not part of the released stable Debian
442 distribution. It may have dependencies on other unreleased software,
443 or other instabilities. Please take care if you wish to install it.
444 The update will eventually make its way into the next released Debian
447 Subst["__STABLE_WARNING__"] = ""
448 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
449 utils.send_mail (mail_message)
451 self.Logger.log(["closing bugs"]+bugs)
456 ###########################################################################
458 def announce (self, short_summary, action):
461 changes = self.pkg.changes
463 # Only do announcements for source uploads with a recent dpkg-dev installed
464 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
469 Subst["__SHORT_SUMMARY__"] = short_summary
471 for dist in changes["distribution"].keys():
472 list = Cnf.Find("Suite::%s::Announce" % (dist))
473 if list == "" or lists_done.has_key(list):
476 summary += "Announcing to %s\n" % (list)
479 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
480 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
481 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
482 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
483 utils.send_mail (mail_message)
485 if Cnf.FindB("Dinstall::CloseBugs"):
486 summary = self.close_bugs(summary, action)
490 ###########################################################################
492 def accept (self, summary, short_summary):
495 files = self.pkg.files
496 changes = self.pkg.changes
497 changes_file = self.pkg.changes_file
501 self.Logger.log(["Accepting changes",changes_file])
503 self.dump_vars(Cnf["Dir::Queue::Accepted"])
505 # Move all the files into the accepted directory
506 utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
507 file_keys = files.keys()
508 for file in file_keys:
509 utils.move(file, Cnf["Dir::Queue::Accepted"])
510 self.accept_bytes += float(files[file]["size"])
511 self.accept_count += 1
513 # Send accept mail, announce to lists, close bugs and check for
514 # override disparities
515 if not Cnf["Dinstall::Options::No-Mail"]:
516 Subst["__SUITE__"] = ""
517 Subst["__SUMMARY__"] = summary
518 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
519 utils.send_mail(mail_message)
520 self.announce(short_summary, 1)
523 ## Helper stuff for DebBugs Version Tracking
524 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
525 # ??? once queue/* is cleared on *.d.o and/or reprocessed
526 # the conditionalization on dsc["bts changelog"] should be
529 # Write out the version history from the changelog
530 if changes["architecture"].has_key("source") and \
531 dsc.has_key("bts changelog"):
533 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
534 dotprefix=1, perms=0644)
535 version_history = utils.open_file(temp_filename, 'w')
536 version_history.write(dsc["bts changelog"])
537 version_history.close()
538 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
539 changes_file[:-8]+".versions")
540 os.rename(temp_filename, filename)
542 # Write out the binary -> source mapping.
543 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
544 dotprefix=1, perms=0644)
545 debinfo = utils.open_file(temp_filename, 'w')
546 for file in file_keys:
548 if f["type"] == "deb":
549 line = " ".join([f["package"], f["version"],
550 f["architecture"], f["source package"],
551 f["source version"]])
552 debinfo.write(line+"\n")
554 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
555 changes_file[:-8]+".debinfo")
556 os.rename(temp_filename, filename)
558 self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
560 ###########################################################################
562 def queue_build (self, queue, path):
565 files = self.pkg.files
566 changes = self.pkg.changes
567 changes_file = self.pkg.changes_file
569 file_keys = files.keys()
571 ## Special support to enable clean auto-building of queued packages
572 queue_id = database.get_or_set_queue_id(queue)
574 self.projectB.query("BEGIN WORK")
575 for suite in changes["distribution"].keys():
576 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
578 suite_id = database.get_suite_id(suite)
579 dest_dir = Cnf["Dir::QueueBuild"]
580 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
581 dest_dir = os.path.join(dest_dir, suite)
582 for file in file_keys:
583 src = os.path.join(path, file)
584 dest = os.path.join(dest_dir, file)
585 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
586 # Copy it since the original won't be readable by www-data
587 utils.copy(src, dest)
589 # Create a symlink to it
590 os.symlink(src, dest)
591 # Add it to the list of packages for later processing by apt-ftparchive
592 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
593 # If the .orig.tar.gz is in the pool, create a symlink to
594 # it (if one doesn't already exist)
595 if self.pkg.orig_tar_id:
596 # Determine the .orig.tar.gz file name
597 for dsc_file in self.pkg.dsc_files.keys():
598 if dsc_file.endswith(".orig.tar.gz"):
600 dest = os.path.join(dest_dir, filename)
601 # If it doesn't exist, create a symlink
602 if not os.path.exists(dest):
603 # Find the .orig.tar.gz in the pool
604 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
607 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
608 src = os.path.join(ql[0][0], ql[0][1])
609 os.symlink(src, dest)
610 # Add it to the list of packages for later processing by apt-ftparchive
611 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
612 # if it does, update things to ensure it's not removed prematurely
614 self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
616 self.projectB.query("COMMIT WORK")
618 ###########################################################################
620 def check_override (self):
622 changes = self.pkg.changes
623 files = self.pkg.files
626 # Abandon the check if:
627 # a) it's a non-sourceful upload
628 # b) override disparity checks have been disabled
629 # c) we're not sending mail
630 if not changes["architecture"].has_key("source") or \
631 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
632 Cnf["Dinstall::Options::No-Mail"]:
636 file_keys = files.keys()
638 for file in file_keys:
639 if not files[file].has_key("new") and files[file]["type"] == "deb":
640 section = files[file]["section"]
641 override_section = files[file]["override section"]
642 if section.lower() != override_section.lower() and section != "-":
643 summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
644 priority = files[file]["priority"]
645 override_priority = files[file]["override priority"]
646 if priority != override_priority and priority != "-":
647 summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
652 Subst["__SUMMARY__"] = summary
653 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
654 utils.send_mail(mail_message)
656 ###########################################################################
658 def force_reject (self, files):
659 """Forcefully move files from the current directory to the
660 reject directory. If any file already exists in the reject
661 directory it will be moved to the morgue to make way for
667 # Skip any files which don't exist or which we don't have permission to copy.
668 if os.access(file,os.R_OK) == 0:
670 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
672 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
674 # File exists? Let's try and move it to the morgue
675 if errno.errorcode[e.errno] == 'EEXIST':
676 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
678 morgue_file = utils.find_next_free(morgue_file)
679 except utils.tried_too_hard_exc:
680 # Something's either gone badly Pete Tong, or
681 # someone is trying to exploit us.
682 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
684 utils.move(dest_file, morgue_file, perms=0660)
686 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
689 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
693 # If we got here, we own the destination file, so we can
694 # safely overwrite it.
695 utils.move(file, dest_file, 1, perms=0660)
698 ###########################################################################
700 def do_reject (self, manual = 0, reject_message = ""):
701 # If we weren't given a manual rejection message, spawn an
702 # editor so the user can add one in...
703 if manual and not reject_message:
704 temp_filename = utils.temp_filename()
705 editor = os.environ.get("EDITOR","vi")
708 os.system("%s %s" % (editor, temp_filename))
709 temp_fh = utils.open_file(temp_filename)
710 reject_message = "".join(temp_fh.readlines())
712 print "Reject message:"
713 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
714 prompt = "[R]eject, Edit, Abandon, Quit ?"
716 while prompt.find(answer) == -1:
717 answer = utils.our_raw_input(prompt)
718 m = re_default_answer.search(prompt)
721 answer = answer[:1].upper()
722 os.unlink(temp_filename)
734 reason_filename = pkg.changes_file[:-8] + ".reason"
735 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
737 # Move all the files into the reject directory
738 reject_files = pkg.files.keys() + [pkg.changes_file]
739 self.force_reject(reject_files)
741 # If we fail here someone is probably trying to exploit the race
742 # so let's just raise an exception ...
743 if os.path.exists(reason_filename):
744 os.unlink(reason_filename)
745 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
748 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
749 Subst["__MANUAL_REJECT_MESSAGE__"] = ""
750 Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
751 os.write(reason_fd, reject_message)
752 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
754 # Build up the rejection email
755 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
757 Subst["__REJECTOR_ADDRESS__"] = user_email_address
758 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
759 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
760 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
761 # Write the rejection email out as the <foo>.reason file
762 os.write(reason_fd, reject_mail_message)
766 # Send the rejection mail if appropriate
767 if not Cnf["Dinstall::Options::No-Mail"]:
768 utils.send_mail(reject_mail_message)
770 self.Logger.log(["rejected", pkg.changes_file])
773 ################################################################################
775 # Ensure that source exists somewhere in the archive for the binary
776 # upload being processed.
778 # (1) exact match => 1.0-3
779 # (2) Bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
781 def source_exists (self, package, source_version, suites = ["any"]):
785 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
788 # source must exist in suite X, or in some other suite that's
789 # mapped to X, recursively... silent-maps are counted too,
790 # unreleased-maps aren't.
791 maps = self.Cnf.ValueList("SuiteMappings")[:]
793 maps = [ m.split() for m in maps ]
794 maps = [ (x[1], x[2]) for x in maps
795 if x[0] == "map" or x[0] == "silent-map" ]
798 if x[1] in s and x[0] not in s:
801 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
802 q = self.projectB.query(que)
804 # Reduce the query results to a list of version numbers
805 ql = [ i[0] for i in q.getresult() ]
808 if source_version in ql:
812 orig_source_version = re_bin_only_nmu.sub('', source_version)
813 if orig_source_version in ql:
821 ################################################################################
823 def in_override_p (self, package, component, suite, binary_type, file):
824 files = self.pkg.files
826 if binary_type == "": # must be source
831 # Override suite name; used for example with proposed-updates
832 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
833 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
835 # Avoid <undef> on unknown distributions
836 suite_id = database.get_suite_id(suite)
839 component_id = database.get_component_id(component)
840 type_id = database.get_override_type_id(type)
842 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
843 % (package, suite_id, component_id, type_id))
844 result = q.getresult()
845 # If checking for a source package fall back on the binary override type
846 if type == "dsc" and not result:
847 deb_type_id = database.get_override_type_id("deb")
848 udeb_type_id = database.get_override_type_id("udeb")
849 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
850 % (package, suite_id, component_id, deb_type_id, udeb_type_id))
851 result = q.getresult()
853 # Remember the section and priority so we can check them later if appropriate
855 files[file]["override section"] = result[0][0]
856 files[file]["override priority"] = result[0][1]
860 ################################################################################
862 def reject (self, str, prefix="Rejected: "):
864 # Unlike other rejects we add new lines first to avoid trailing
865 # new lines when this message is passed back up to a caller.
866 if self.reject_message:
867 self.reject_message += "\n"
868 self.reject_message += prefix + str
870 ################################################################################
872 def get_anyversion(self, query_result, suite):
874 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
875 for (v, s) in query_result:
876 if s in [ x.lower() for x in anysuite ]:
877 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
881 ################################################################################
883 def cross_suite_version_check(self, query_result, file, new_version):
884 """Ensure versions are newer than existing packages in target
885 suites and that cross-suite version checking rules as
886 set out in the conf file are satisfied."""
888 # Check versions for each target suite
889 for target_suite in self.pkg.changes["distribution"].keys():
890 must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
891 must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
892 # Enforce "must be newer than target suite" even if conffile omits it
893 if target_suite not in must_be_newer_than:
894 must_be_newer_than.append(target_suite)
895 for entry in query_result:
896 existent_version = entry[0]
898 if suite in must_be_newer_than and \
899 apt_pkg.VersionCompare(new_version, existent_version) < 1:
900 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
901 if suite in must_be_older_than and \
902 apt_pkg.VersionCompare(new_version, existent_version) > -1:
903 ch = self.pkg.changes
905 if ch.get('distribution-version', {}).has_key(suite):
906 # we really use the other suite, ignoring the conflicting one ...
907 addsuite = ch["distribution-version"][suite]
909 add_version = self.get_anyversion(query_result, addsuite)
910 target_version = self.get_anyversion(query_result, target_suite)
913 # not add_version can only happen if we map to a suite
914 # that doesn't enhance the suite we're propup'ing from.
915 # so "propup-ver x a b c; map a d" is a problem only if
916 # d doesn't enhance a.
918 # i think we could always propagate in this case, rather
919 # than complaining. either way, this isn't a REJECT issue
921 # And - we really should complain to the dorks who configured dak
922 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
923 self.pkg.changes.setdefault("propdistribution", {})
924 self.pkg.changes["propdistribution"][addsuite] = 1
926 elif not target_version:
927 # not targets_version is true when the package is NEW
928 # we could just stick with the "...old version..." REJECT
930 self.reject("Won't propogate NEW packages.")
931 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
932 # propogation would be redundant. no need to reject though.
933 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
935 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
936 apt_pkg.VersionCompare(add_version, target_version) >= 0:
938 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
939 self.pkg.changes.setdefault("propdistribution", {})
940 self.pkg.changes["propdistribution"][addsuite] = 1
944 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
946 ################################################################################
948 def check_binary_against_db(self, file):
949 self.reject_message = ""
950 files = self.pkg.files
952 # Ensure version is sane
953 q = self.projectB.query("""
954 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
956 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
957 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
958 % (files[file]["package"],
959 files[file]["architecture"]))
960 self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
962 # Check for any existing copies of the file
963 q = self.projectB.query("""
964 SELECT b.id FROM binaries b, architecture a
965 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
966 AND a.id = b.architecture"""
967 % (files[file]["package"],
968 files[file]["version"],
969 files[file]["architecture"]))
971 self.reject("%s: can not overwrite existing copy already in the archive." % (file))
973 return self.reject_message
975 ################################################################################
977 def check_source_against_db(self, file):
978 self.reject_message = ""
981 # Ensure version is sane
982 q = self.projectB.query("""
983 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
984 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
985 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
987 return self.reject_message
989 ################################################################################
992 # NB: this function can remove entries from the 'files' index [if
993 # the .orig.tar.gz is a duplicate of the one in the archive]; if
994 # you're iterating over 'files' and call this function as part of
995 # the loop, be sure to add a check to the top of the loop to
996 # ensure you haven't just tried to dereference the deleted entry.
999 def check_dsc_against_db(self, file):
1000 self.reject_message = ""
1001 files = self.pkg.files
1002 dsc_files = self.pkg.dsc_files
1003 legacy_source_untouchable = self.pkg.legacy_source_untouchable
1004 self.pkg.orig_tar_gz = None
1006 # Try and find all files mentioned in the .dsc. This has
1007 # to work harder to cope with the multiple possible
1008 # locations of an .orig.tar.gz.
1009 # The ordering on the select is needed to pick the newest orig
1010 # when it exists in multiple places.
1011 for dsc_file in dsc_files.keys():
1013 if files.has_key(dsc_file):
1014 actual_md5 = files[dsc_file]["md5sum"]
1015 actual_size = int(files[dsc_file]["size"])
1016 found = "%s in incoming" % (dsc_file)
1017 # Check the file does not already exist in the archive
1018 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
1020 # Strip out anything that isn't '%s' or '/%s$'
1022 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
1025 # "[dak] has not broken them. [dak] has fixed a
1026 # brokenness. Your crappy hack exploited a bug in
1029 # "(Come on! I thought it was always obvious that
1030 # one just doesn't release different files with
1031 # the same name and version.)"
1032 # -- ajk@ on d-devel@l.d.o
1035 # Ignore exact matches for .orig.tar.gz
1037 if dsc_file.endswith(".orig.tar.gz"):
1039 if files.has_key(dsc_file) and \
1040 int(files[dsc_file]["size"]) == int(i[0]) and \
1041 files[dsc_file]["md5sum"] == i[1]:
1042 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
1044 self.pkg.orig_tar_gz = i[2] + i[3]
1048 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
1049 elif dsc_file.endswith(".orig.tar.gz"):
1051 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
1053 # Strip out anything that isn't '%s' or '/%s$'
1055 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
1059 # Unfortunately, we may get more than one match here if,
1060 # for example, the package was in potato but had an -sa
1061 # upload in woody. So we need to choose the right one.
1063 x = ql[0]; # default to something sane in case we don't match any or have only one
1067 old_file = i[0] + i[1]
1068 old_file_fh = utils.open_file(old_file)
1069 actual_md5 = apt_pkg.md5sum(old_file_fh)
1071 actual_size = os.stat(old_file)[stat.ST_SIZE]
1072 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1075 legacy_source_untouchable[i[3]] = ""
1077 old_file = x[0] + x[1]
1078 old_file_fh = utils.open_file(old_file)
1079 actual_md5 = apt_pkg.md5sum(old_file_fh)
1081 actual_size = os.stat(old_file)[stat.ST_SIZE]
1084 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
1085 # See install() in process-accepted...
1086 self.pkg.orig_tar_id = x[3]
1087 self.pkg.orig_tar_gz = old_file
1088 if suite_type == "legacy" or suite_type == "legacy-mixed":
1089 self.pkg.orig_tar_location = "legacy"
1091 self.pkg.orig_tar_location = x[4]
1093 # Not there? Check the queue directories...
1095 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1096 # See process_it() in 'dak process-unchecked' for explanation of this
1097 # in_unchecked check dropped by ajt 2007-08-28, how did that
1099 if os.path.exists(in_unchecked) and False:
1100 return (self.reject_message, in_unchecked)
1102 for dir in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1103 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
1104 if os.path.exists(in_otherdir):
1105 in_otherdir_fh = utils.open_file(in_otherdir)
1106 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1107 in_otherdir_fh.close()
1108 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1110 self.pkg.orig_tar_gz = in_otherdir
1113 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1114 self.pkg.orig_tar_gz = -1
1117 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1119 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1120 self.reject("md5sum for %s doesn't match %s." % (found, file))
1121 if actual_size != int(dsc_files[dsc_file]["size"]):
1122 self.reject("size for %s doesn't match %s." % (found, file))
1124 return (self.reject_message, None)
1126 def do_query(self, q):
1127 sys.stderr.write("query: \"%s\" ... " % (q))
1128 before = time.time()
1129 r = self.projectB.query(q)
1130 time_diff = time.time()-before
1131 sys.stderr.write("took %.3f seconds.\n" % (time_diff))