3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 ###############################################################################
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
28 ###############################################################################
30 re_isanum = re.compile (r"^\d+$")
31 re_default_answer = re.compile(r"\[(.*)\]")
32 re_fdnic = re.compile(r"\n\n")
33 re_bin_only_nmu = re.compile(r"\+b\d+$")
35 ###############################################################################
37 # Convenience wrapper to carry around all the package information in
40 def __init__(self, **kwds):
41 self.__dict__.update(kwds)
43 def update(self, **kwds):
44 self.__dict__.update(kwds)
46 ###############################################################################
50 def __init__(self, Cnf):
53 self.accept_bytes = 0L
54 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
55 legacy_source_untouchable = {})
57 # Initialize the substitution template mapping global
58 Subst = self.Subst = {}
59 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
60 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
61 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
62 Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
64 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
65 database.init(Cnf, self.projectB)
67 ###########################################################################
70 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
71 exec "self.pkg.%s.clear();" % (i)
72 self.pkg.orig_tar_id = None
73 self.pkg.orig_tar_location = ""
74 self.pkg.orig_tar_gz = None
76 ###########################################################################
78 def update_vars (self):
79 dump_filename = self.pkg.changes_file[:-8]+".dak"
80 dump_file = utils.open_file(dump_filename)
81 p = cPickle.Unpickler(dump_file)
82 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
83 exec "self.pkg.%s.update(p.load());" % (i)
84 for i in [ "orig_tar_id", "orig_tar_location" ]:
85 exec "self.pkg.%s = p.load();" % (i)
88 ###########################################################################
90 # This could just dump the dictionaries as is, but I'd like to
91 # avoid this so there's some idea of what process-accepted &
92 # process-new use from process-unchecked
94 def dump_vars(self, dest_dir):
95 for i in [ "changes", "dsc", "files", "dsc_files",
96 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
97 exec "%s = self.pkg.%s;" % (i,i)
98 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
99 dump_file = utils.open_file(dump_filename, 'w')
101 os.chmod(dump_filename, 0660)
103 if errno.errorcode[e.errno] == 'EPERM':
104 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
105 if perms & stat.S_IROTH:
106 utils.fubar("%s is world readable and chmod failed." % (dump_filename))
110 p = cPickle.Pickler(dump_file, 1)
111 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
114 for file in files.keys():
116 for i in [ "package", "version", "architecture", "type", "size",
117 "md5sum", "component", "location id", "source package",
118 "source version", "maintainer", "dbtype", "files id",
119 "new", "section", "priority", "othercomponents",
120 "pool name", "original component" ]:
121 if files[file].has_key(i):
122 d_files[file][i] = files[file][i]
124 # Mandatory changes fields
125 for i in [ "distribution", "source", "architecture", "version",
126 "maintainer", "urgency", "fingerprint", "changedby822",
127 "changedby2047", "changedbyname", "maintainer822",
128 "maintainer2047", "maintainername", "maintaineremail",
129 "closes", "changes" ]:
130 d_changes[i] = changes[i]
131 # Optional changes fields
132 for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version" ]:
133 if changes.has_key(i):
134 d_changes[i] = changes[i]
136 for i in [ "source", "version", "maintainer", "fingerprint",
137 "uploaders", "bts changelog", "dm-upload-allowed" ]:
141 for file in dsc_files.keys():
142 d_dsc_files[file] = {}
143 # Mandatory dsc_files fields
144 for i in [ "size", "md5sum" ]:
145 d_dsc_files[file][i] = dsc_files[file][i]
146 # Optional dsc_files fields
147 for i in [ "files id" ]:
148 if dsc_files[file].has_key(i):
149 d_dsc_files[file][i] = dsc_files[file][i]
151 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
152 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
156 ###########################################################################
158 # Set up the per-package template substitution mappings
160 def update_subst (self, reject_message = ""):
162 changes = self.pkg.changes
163 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
164 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
165 changes["architecture"] = { "Unknown" : "" }
166 # and maintainer2047 may not exist.
167 if not changes.has_key("maintainer2047"):
168 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
170 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
171 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
172 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
174 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
175 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
176 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
177 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
178 changes["maintainer2047"])
179 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
181 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
182 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
183 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
184 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
185 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
187 # Apply any global override of the Maintainer field
188 if self.Cnf.get("Dinstall::OverrideMaintainer"):
189 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
190 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
192 Subst["__REJECT_MESSAGE__"] = reject_message
193 Subst["__SOURCE__"] = changes.get("source", "Unknown")
194 Subst["__VERSION__"] = changes.get("version", "Unknown")
196 ###########################################################################
198 def build_summaries(self):
199 changes = self.pkg.changes
200 files = self.pkg.files
202 byhand = summary = new = ""
204 # changes["distribution"] may not exist in corner cases
205 # (e.g. unreadable changes files)
206 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
207 changes["distribution"] = {}
209 override_summary ="";
210 file_keys = files.keys()
212 for file in file_keys:
213 if files[file].has_key("byhand"):
215 summary += file + " byhand\n"
216 elif files[file].has_key("new"):
218 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
219 if files[file].has_key("othercomponents"):
220 summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
221 if files[file]["type"] == "deb":
222 deb_fh = utils.open_file(file)
223 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
226 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
227 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
228 summary += file + "\n to " + destination + "\n"
229 if not files[file].has_key("type"):
230 files[file]["type"] = "unknown"
231 if files[file]["type"] in ["deb", "udeb", "dsc"]:
232 # (queue/unchecked), there we have override entries already, use them
233 # (process-new), there we dont have override entries, use the newly generated ones.
234 override_prio = files[file].get("override priority", files[file]["priority"])
235 override_sect = files[file].get("override section", files[file]["section"])
236 override_summary += "%s - %s %s\n" % (file, override_prio, override_sect)
238 short_summary = summary
240 # This is for direport's benefit...
241 f = re_fdnic.sub("\n .\n", changes.get("changes",""))
244 summary += "Changes: " + f
246 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
248 summary += self.announce(short_summary, 0)
250 return (summary, short_summary)
252 ###########################################################################
254 def close_bugs (self, summary, action):
255 changes = self.pkg.changes
259 bugs = changes["closes"].keys()
265 summary += "Closing bugs: "
267 summary += "%s " % (bug)
269 Subst["__BUG_NUMBER__"] = bug
270 if changes["distribution"].has_key("stable"):
271 Subst["__STABLE_WARNING__"] = """
272 Note that this package is not part of the released stable Debian
273 distribution. It may have dependencies on other unreleased software,
274 or other instabilities. Please take care if you wish to install it.
275 The update will eventually make its way into the next released Debian
278 Subst["__STABLE_WARNING__"] = ""
279 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
280 utils.send_mail (mail_message)
282 self.Logger.log(["closing bugs"]+bugs)
287 ###########################################################################
289 def announce (self, short_summary, action):
292 changes = self.pkg.changes
294 # Only do announcements for source uploads with a recent dpkg-dev installed
295 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
300 Subst["__SHORT_SUMMARY__"] = short_summary
302 for dist in changes["distribution"].keys():
303 list = Cnf.Find("Suite::%s::Announce" % (dist))
304 if list == "" or lists_done.has_key(list):
307 summary += "Announcing to %s\n" % (list)
310 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
311 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
312 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
313 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
314 utils.send_mail (mail_message)
316 if Cnf.FindB("Dinstall::CloseBugs"):
317 summary = self.close_bugs(summary, action)
321 ###########################################################################
323 def accept (self, summary, short_summary):
326 files = self.pkg.files
327 changes = self.pkg.changes
328 changes_file = self.pkg.changes_file
332 self.Logger.log(["Accepting changes",changes_file])
334 self.dump_vars(Cnf["Dir::Queue::Accepted"])
336 # Move all the files into the accepted directory
337 utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
338 file_keys = files.keys()
339 for file in file_keys:
340 utils.move(file, Cnf["Dir::Queue::Accepted"])
341 self.accept_bytes += float(files[file]["size"])
342 self.accept_count += 1
344 # Send accept mail, announce to lists, close bugs and check for
345 # override disparities
346 if not Cnf["Dinstall::Options::No-Mail"]:
347 Subst["__SUITE__"] = ""
348 Subst["__SUMMARY__"] = summary
349 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
350 utils.send_mail(mail_message)
351 self.announce(short_summary, 1)
354 ## Helper stuff for DebBugs Version Tracking
355 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
356 # ??? once queue/* is cleared on *.d.o and/or reprocessed
357 # the conditionalization on dsc["bts changelog"] should be
360 # Write out the version history from the changelog
361 if changes["architecture"].has_key("source") and \
362 dsc.has_key("bts changelog"):
364 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
365 dotprefix=1, perms=0644)
366 version_history = utils.open_file(temp_filename, 'w')
367 version_history.write(dsc["bts changelog"])
368 version_history.close()
369 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
370 changes_file[:-8]+".versions")
371 os.rename(temp_filename, filename)
373 # Write out the binary -> source mapping.
374 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
375 dotprefix=1, perms=0644)
376 debinfo = utils.open_file(temp_filename, 'w')
377 for file in file_keys:
379 if f["type"] == "deb":
380 line = " ".join([f["package"], f["version"],
381 f["architecture"], f["source package"],
382 f["source version"]])
383 debinfo.write(line+"\n")
385 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
386 changes_file[:-8]+".debinfo")
387 os.rename(temp_filename, filename)
389 self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
391 ###########################################################################
393 def queue_build (self, queue, path):
396 files = self.pkg.files
397 changes = self.pkg.changes
398 changes_file = self.pkg.changes_file
400 file_keys = files.keys()
402 ## Special support to enable clean auto-building of queued packages
403 queue_id = database.get_or_set_queue_id(queue)
405 self.projectB.query("BEGIN WORK")
406 for suite in changes["distribution"].keys():
407 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
409 suite_id = database.get_suite_id(suite)
410 dest_dir = Cnf["Dir::QueueBuild"]
411 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
412 dest_dir = os.path.join(dest_dir, suite)
413 for file in file_keys:
414 src = os.path.join(path, file)
415 dest = os.path.join(dest_dir, file)
416 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
417 # Copy it since the original won't be readable by www-data
418 utils.copy(src, dest)
420 # Create a symlink to it
421 os.symlink(src, dest)
422 # Add it to the list of packages for later processing by apt-ftparchive
423 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
424 # If the .orig.tar.gz is in the pool, create a symlink to
425 # it (if one doesn't already exist)
426 if self.pkg.orig_tar_id:
427 # Determine the .orig.tar.gz file name
428 for dsc_file in self.pkg.dsc_files.keys():
429 if dsc_file.endswith(".orig.tar.gz"):
431 dest = os.path.join(dest_dir, filename)
432 # If it doesn't exist, create a symlink
433 if not os.path.exists(dest):
434 # Find the .orig.tar.gz in the pool
435 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
438 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
439 src = os.path.join(ql[0][0], ql[0][1])
440 os.symlink(src, dest)
441 # Add it to the list of packages for later processing by apt-ftparchive
442 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
443 # if it does, update things to ensure it's not removed prematurely
445 self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
447 self.projectB.query("COMMIT WORK")
449 ###########################################################################
451 def check_override (self):
453 changes = self.pkg.changes
454 files = self.pkg.files
457 # Abandon the check if:
458 # a) it's a non-sourceful upload
459 # b) override disparity checks have been disabled
460 # c) we're not sending mail
461 if not changes["architecture"].has_key("source") or \
462 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
463 Cnf["Dinstall::Options::No-Mail"]:
467 file_keys = files.keys()
469 for file in file_keys:
470 if not files[file].has_key("new") and files[file]["type"] == "deb":
471 section = files[file]["section"]
472 override_section = files[file]["override section"]
473 if section.lower() != override_section.lower() and section != "-":
474 summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
475 priority = files[file]["priority"]
476 override_priority = files[file]["override priority"]
477 if priority != override_priority and priority != "-":
478 summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
483 Subst["__SUMMARY__"] = summary
484 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
485 utils.send_mail(mail_message)
487 ###########################################################################
489 def force_reject (self, files):
490 """Forcefully move files from the current directory to the
491 reject directory. If any file already exists in the reject
492 directory it will be moved to the morgue to make way for
498 # Skip any files which don't exist or which we don't have permission to copy.
499 if os.access(file,os.R_OK) == 0:
501 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
503 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
505 # File exists? Let's try and move it to the morgue
506 if errno.errorcode[e.errno] == 'EEXIST':
507 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
509 morgue_file = utils.find_next_free(morgue_file)
510 except utils.tried_too_hard_exc:
511 # Something's either gone badly Pete Tong, or
512 # someone is trying to exploit us.
513 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
515 utils.move(dest_file, morgue_file, perms=0660)
517 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
520 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
524 # If we got here, we own the destination file, so we can
525 # safely overwrite it.
526 utils.move(file, dest_file, 1, perms=0660)
529 ###########################################################################
531 def do_reject (self, manual = 0, reject_message = ""):
532 # If we weren't given a manual rejection message, spawn an
533 # editor so the user can add one in...
534 if manual and not reject_message:
535 temp_filename = utils.temp_filename()
536 editor = os.environ.get("EDITOR","vi")
539 os.system("%s %s" % (editor, temp_filename))
540 temp_fh = utils.open_file(temp_filename)
541 reject_message = "".join(temp_fh.readlines())
543 print "Reject message:"
544 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
545 prompt = "[R]eject, Edit, Abandon, Quit ?"
547 while prompt.find(answer) == -1:
548 answer = utils.our_raw_input(prompt)
549 m = re_default_answer.search(prompt)
552 answer = answer[:1].upper()
553 os.unlink(temp_filename)
565 reason_filename = pkg.changes_file[:-8] + ".reason"
566 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
568 # Move all the files into the reject directory
569 reject_files = pkg.files.keys() + [pkg.changes_file]
570 self.force_reject(reject_files)
572 # If we fail here someone is probably trying to exploit the race
573 # so let's just raise an exception ...
574 if os.path.exists(reason_filename):
575 os.unlink(reason_filename)
576 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
579 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
580 Subst["__MANUAL_REJECT_MESSAGE__"] = ""
581 Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
582 os.write(reason_fd, reject_message)
583 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
585 # Build up the rejection email
586 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
588 Subst["__REJECTOR_ADDRESS__"] = user_email_address
589 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
590 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
591 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
592 # Write the rejection email out as the <foo>.reason file
593 os.write(reason_fd, reject_mail_message)
597 # Send the rejection mail if appropriate
598 if not Cnf["Dinstall::Options::No-Mail"]:
599 utils.send_mail(reject_mail_message)
601 self.Logger.log(["rejected", pkg.changes_file])
604 ################################################################################
606 # Ensure that source exists somewhere in the archive for the binary
607 # upload being processed.
609 # (1) exact match => 1.0-3
610 # (2) Bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
612 def source_exists (self, package, source_version, suites = ["any"]):
616 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
619 # source must exist in suite X, or in some other suite that's
620 # mapped to X, recursively... silent-maps are counted too,
621 # unreleased-maps aren't.
622 maps = self.Cnf.ValueList("SuiteMappings")[:]
624 maps = [ m.split() for m in maps ]
625 maps = [ (x[1], x[2]) for x in maps
626 if x[0] == "map" or x[0] == "silent-map" ]
629 if x[1] in s and x[0] not in s:
632 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
633 q = self.projectB.query(que)
635 # Reduce the query results to a list of version numbers
636 ql = [ i[0] for i in q.getresult() ]
639 if source_version in ql:
643 orig_source_version = re_bin_only_nmu.sub('', source_version)
644 if orig_source_version in ql:
652 ################################################################################
654 def in_override_p (self, package, component, suite, binary_type, file):
655 files = self.pkg.files
657 if binary_type == "": # must be source
662 # Override suite name; used for example with proposed-updates
663 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
664 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
666 # Avoid <undef> on unknown distributions
667 suite_id = database.get_suite_id(suite)
670 component_id = database.get_component_id(component)
671 type_id = database.get_override_type_id(type)
673 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
674 % (package, suite_id, component_id, type_id))
675 result = q.getresult()
676 # If checking for a source package fall back on the binary override type
677 if type == "dsc" and not result:
678 deb_type_id = database.get_override_type_id("deb")
679 udeb_type_id = database.get_override_type_id("udeb")
680 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
681 % (package, suite_id, component_id, deb_type_id, udeb_type_id))
682 result = q.getresult()
684 # Remember the section and priority so we can check them later if appropriate
686 files[file]["override section"] = result[0][0]
687 files[file]["override priority"] = result[0][1]
691 ################################################################################
693 def reject (self, str, prefix="Rejected: "):
695 # Unlike other rejects we add new lines first to avoid trailing
696 # new lines when this message is passed back up to a caller.
697 if self.reject_message:
698 self.reject_message += "\n"
699 self.reject_message += prefix + str
701 ################################################################################
703 def get_anyversion(self, query_result, suite):
705 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
706 for (v, s) in query_result:
707 if s in [ x.lower() for x in anysuite ]:
708 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
712 ################################################################################
714 def cross_suite_version_check(self, query_result, file, new_version):
715 """Ensure versions are newer than existing packages in target
716 suites and that cross-suite version checking rules as
717 set out in the conf file are satisfied."""
719 # Check versions for each target suite
720 for target_suite in self.pkg.changes["distribution"].keys():
721 must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
722 must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
723 # Enforce "must be newer than target suite" even if conffile omits it
724 if target_suite not in must_be_newer_than:
725 must_be_newer_than.append(target_suite)
726 for entry in query_result:
727 existent_version = entry[0]
729 if suite in must_be_newer_than and \
730 apt_pkg.VersionCompare(new_version, existent_version) < 1:
731 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
732 if suite in must_be_older_than and \
733 apt_pkg.VersionCompare(new_version, existent_version) > -1:
734 ch = self.pkg.changes
736 if ch.get('distribution-version', {}).has_key(suite):
737 # we really use the other suite, ignoring the conflicting one ...
738 addsuite = ch["distribution-version"][suite]
740 add_version = self.get_anyversion(query_result, addsuite)
741 target_version = self.get_anyversion(query_result, target_suite)
744 # not add_version can only happen if we map to a suite
745 # that doesn't enhance the suite we're propup'ing from.
746 # so "propup-ver x a b c; map a d" is a problem only if
747 # d doesn't enhance a.
749 # i think we could always propagate in this case, rather
750 # than complaining. either way, this isn't a REJECT issue
752 # And - we really should complain to the dorks who configured dak
753 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
754 self.pkg.changes.setdefault("propdistribution", {})
755 self.pkg.changes["propdistribution"][addsuite] = 1
757 elif not target_version:
758 # not targets_version is true when the package is NEW
759 # we could just stick with the "...old version..." REJECT
761 self.reject("Won't propogate NEW packages.")
762 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
763 # propogation would be redundant. no need to reject though.
764 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
766 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
767 apt_pkg.VersionCompare(add_version, target_version) >= 0:
769 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
770 self.pkg.changes.setdefault("propdistribution", {})
771 self.pkg.changes["propdistribution"][addsuite] = 1
775 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
777 ################################################################################
779 def check_binary_against_db(self, file):
780 self.reject_message = ""
781 files = self.pkg.files
783 # Ensure version is sane
784 q = self.projectB.query("""
785 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
787 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
788 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
789 % (files[file]["package"],
790 files[file]["architecture"]))
791 self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
793 # Check for any existing copies of the file
794 q = self.projectB.query("""
795 SELECT b.id FROM binaries b, architecture a
796 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
797 AND a.id = b.architecture"""
798 % (files[file]["package"],
799 files[file]["version"],
800 files[file]["architecture"]))
802 self.reject("%s: can not overwrite existing copy already in the archive." % (file))
804 return self.reject_message
806 ################################################################################
808 def check_source_against_db(self, file):
809 self.reject_message = ""
812 # Ensure version is sane
813 q = self.projectB.query("""
814 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
815 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
816 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
818 return self.reject_message
820 ################################################################################
823 # NB: this function can remove entries from the 'files' index [if
824 # the .orig.tar.gz is a duplicate of the one in the archive]; if
825 # you're iterating over 'files' and call this function as part of
826 # the loop, be sure to add a check to the top of the loop to
827 # ensure you haven't just tried to dereference the deleted entry.
830 def check_dsc_against_db(self, file):
831 self.reject_message = ""
832 files = self.pkg.files
833 dsc_files = self.pkg.dsc_files
834 legacy_source_untouchable = self.pkg.legacy_source_untouchable
835 self.pkg.orig_tar_gz = None
837 # Try and find all files mentioned in the .dsc. This has
838 # to work harder to cope with the multiple possible
839 # locations of an .orig.tar.gz.
840 # The ordering on the select is needed to pick the newest orig
841 # when it exists in multiple places.
842 for dsc_file in dsc_files.keys():
844 if files.has_key(dsc_file):
845 actual_md5 = files[dsc_file]["md5sum"]
846 actual_size = int(files[dsc_file]["size"])
847 found = "%s in incoming" % (dsc_file)
848 # Check the file does not already exist in the archive
849 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
851 # Strip out anything that isn't '%s' or '/%s$'
853 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
856 # "[dak] has not broken them. [dak] has fixed a
857 # brokenness. Your crappy hack exploited a bug in
860 # "(Come on! I thought it was always obvious that
861 # one just doesn't release different files with
862 # the same name and version.)"
863 # -- ajk@ on d-devel@l.d.o
866 # Ignore exact matches for .orig.tar.gz
868 if dsc_file.endswith(".orig.tar.gz"):
870 if files.has_key(dsc_file) and \
871 int(files[dsc_file]["size"]) == int(i[0]) and \
872 files[dsc_file]["md5sum"] == i[1]:
873 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
875 self.pkg.orig_tar_gz = i[2] + i[3]
879 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
880 elif dsc_file.endswith(".orig.tar.gz"):
882 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
884 # Strip out anything that isn't '%s' or '/%s$'
886 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
890 # Unfortunately, we may get more than one match here if,
891 # for example, the package was in potato but had an -sa
892 # upload in woody. So we need to choose the right one.
894 x = ql[0]; # default to something sane in case we don't match any or have only one
898 old_file = i[0] + i[1]
899 old_file_fh = utils.open_file(old_file)
900 actual_md5 = apt_pkg.md5sum(old_file_fh)
902 actual_size = os.stat(old_file)[stat.ST_SIZE]
903 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
906 legacy_source_untouchable[i[3]] = ""
908 old_file = x[0] + x[1]
909 old_file_fh = utils.open_file(old_file)
910 actual_md5 = apt_pkg.md5sum(old_file_fh)
912 actual_size = os.stat(old_file)[stat.ST_SIZE]
915 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
916 # See install() in process-accepted...
917 self.pkg.orig_tar_id = x[3]
918 self.pkg.orig_tar_gz = old_file
919 if suite_type == "legacy" or suite_type == "legacy-mixed":
920 self.pkg.orig_tar_location = "legacy"
922 self.pkg.orig_tar_location = x[4]
924 # Not there? Check the queue directories...
926 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
927 # See process_it() in 'dak process-unchecked' for explanation of this
928 # in_unchecked check dropped by ajt 2007-08-28, how did that
930 if os.path.exists(in_unchecked) and False:
931 return (self.reject_message, in_unchecked)
933 for dir in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
934 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
935 if os.path.exists(in_otherdir):
936 in_otherdir_fh = utils.open_file(in_otherdir)
937 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
938 in_otherdir_fh.close()
939 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
941 self.pkg.orig_tar_gz = in_otherdir
944 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
945 self.pkg.orig_tar_gz = -1
948 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
950 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
951 self.reject("md5sum for %s doesn't match %s." % (found, file))
952 if actual_size != int(dsc_files[dsc_file]["size"]):
953 self.reject("size for %s doesn't match %s." % (found, file))
955 return (self.reject_message, None)
957 def do_query(self, q):
958 sys.stderr.write("query: \"%s\" ... " % (q))
960 r = self.projectB.query(q)
961 time_diff = time.time()-before
962 sys.stderr.write("took %.3f seconds.\n" % (time_diff))