3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 ###############################################################################
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
28 ###############################################################################
30 re_isanum = re.compile (r"^\d+$")
31 re_default_answer = re.compile(r"\[(.*)\]")
32 re_fdnic = re.compile(r"\n\n")
33 re_bin_only_nmu = re.compile(r"\+b\d+$")
35 ###############################################################################
37 # Convenience wrapper to carry around all the package information in
40 def __init__(self, **kwds):
41 self.__dict__.update(kwds)
43 def update(self, **kwds):
44 self.__dict__.update(kwds)
46 ###############################################################################
50 def __init__(self, Cnf):
53 self.accept_bytes = 0L
54 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
55 legacy_source_untouchable = {})
57 # Initialize the substitution template mapping global
58 Subst = self.Subst = {}
59 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
60 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
61 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
62 Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
64 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
65 database.init(Cnf, self.projectB)
67 ###########################################################################
70 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
71 exec "self.pkg.%s.clear();" % (i)
72 self.pkg.orig_tar_id = None
73 self.pkg.orig_tar_location = ""
74 self.pkg.orig_tar_gz = None
76 ###########################################################################
78 def update_vars (self):
79 dump_filename = self.pkg.changes_file[:-8]+".dak"
80 dump_file = utils.open_file(dump_filename)
81 p = cPickle.Unpickler(dump_file)
82 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
83 exec "self.pkg.%s.update(p.load());" % (i)
84 for i in [ "orig_tar_id", "orig_tar_location" ]:
85 exec "self.pkg.%s = p.load();" % (i)
88 ###########################################################################
90 # This could just dump the dictionaries as is, but I'd like to
91 # avoid this so there's some idea of what process-accepted &
92 # process-new use from process-unchecked
94 def dump_vars(self, dest_dir):
95 for i in [ "changes", "dsc", "files", "dsc_files",
96 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
97 exec "%s = self.pkg.%s;" % (i,i)
98 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
99 dump_file = utils.open_file(dump_filename, 'w')
101 os.chmod(dump_filename, 0660)
103 if errno.errorcode[e.errno] == 'EPERM':
104 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
105 if perms & stat.S_IROTH:
106 utils.fubar("%s is world readable and chmod failed." % (dump_filename))
110 p = cPickle.Pickler(dump_file, 1)
111 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
114 for file in files.keys():
116 for i in [ "package", "version", "architecture", "type", "size",
117 "md5sum", "component", "location id", "source package",
118 "source version", "maintainer", "dbtype", "files id",
119 "new", "section", "priority", "othercomponents",
120 "pool name", "original component" ]:
121 if files[file].has_key(i):
122 d_files[file][i] = files[file][i]
124 # Mandatory changes fields
125 for i in [ "distribution", "source", "architecture", "version",
126 "maintainer", "urgency", "fingerprint", "changedby822",
127 "changedby2047", "changedbyname", "maintainer822",
128 "maintainer2047", "maintainername", "maintaineremail",
129 "closes", "changes" ]:
130 d_changes[i] = changes[i]
131 # Optional changes fields
132 for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version" ]:
133 if changes.has_key(i):
134 d_changes[i] = changes[i]
136 for i in [ "source", "version", "maintainer", "fingerprint",
137 "uploaders", "bts changelog", "dm-upload-allowed" ]:
141 for file in dsc_files.keys():
142 d_dsc_files[file] = {}
143 # Mandatory dsc_files fields
144 for i in [ "size", "md5sum" ]:
145 d_dsc_files[file][i] = dsc_files[file][i]
146 # Optional dsc_files fields
147 for i in [ "files id" ]:
148 if dsc_files[file].has_key(i):
149 d_dsc_files[file][i] = dsc_files[file][i]
151 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
152 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
156 ###########################################################################
158 # Set up the per-package template substitution mappings
160 def update_subst (self, reject_message = ""):
162 changes = self.pkg.changes
163 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
164 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
165 changes["architecture"] = { "Unknown" : "" }
166 # and maintainer2047 may not exist.
167 if not changes.has_key("maintainer2047"):
168 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
170 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
171 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
172 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
174 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
175 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
176 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
177 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
178 changes["maintainer2047"])
179 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
181 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
182 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
183 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
184 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
185 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
187 # Apply any global override of the Maintainer field
188 if self.Cnf.get("Dinstall::OverrideMaintainer"):
189 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
190 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
192 Subst["__REJECT_MESSAGE__"] = reject_message
193 Subst["__SOURCE__"] = changes.get("source", "Unknown")
194 Subst["__VERSION__"] = changes.get("version", "Unknown")
196 ###########################################################################
198 def build_summaries(self):
199 changes = self.pkg.changes
200 files = self.pkg.files
202 byhand = summary = new = ""
204 # changes["distribution"] may not exist in corner cases
205 # (e.g. unreadable changes files)
206 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
207 changes["distribution"] = {}
209 override_summary ="";
210 file_keys = files.keys()
212 for file in file_keys:
213 if files[file].has_key("byhand"):
215 summary += file + " byhand\n"
216 elif files[file].has_key("new"):
218 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
219 if files[file].has_key("othercomponents"):
220 summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
221 if files[file]["type"] == "deb":
222 deb_fh = utils.open_file(file)
223 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
226 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
227 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
228 summary += file + "\n to " + destination + "\n"
229 if not files[file].has_key("type"):
230 files[file]["type"] = "unknown"
231 if files[file]["type"] in ["deb", "udeb", "dsc"]:
232 # (queue/unchecked), there we have override entries already, use them
233 # (process-new), there we dont have override entries, use the newly generated ones.
234 override_prio = files[file].get("override priority", files[file]["priority"])
235 override_sect = files[file].get("override section", files[file]["section"])
236 override_summary += "%s - %s %s\n" % (file, override_prio, override_sect)
238 short_summary = summary
240 # This is for direport's benefit...
241 f = re_fdnic.sub("\n .\n", changes.get("changes",""))
244 summary += "Changes: " + f
246 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
248 summary += self.announce(short_summary, 0)
250 return (summary, short_summary)
252 ###########################################################################
254 def close_bugs (self, summary, action):
255 changes = self.pkg.changes
259 bugs = changes["closes"].keys()
265 summary += "Closing bugs: "
267 summary += "%s " % (bug)
269 Subst["__BUG_NUMBER__"] = bug
270 if changes["distribution"].has_key("stable"):
271 Subst["__STABLE_WARNING__"] = """
272 Note that this package is not part of the released stable Debian
273 distribution. It may have dependencies on other unreleased software,
274 or other instabilities. Please take care if you wish to install it.
275 The update will eventually make its way into the next released Debian
278 Subst["__STABLE_WARNING__"] = ""
279 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
280 utils.send_mail (mail_message)
282 self.Logger.log(["closing bugs"]+bugs)
287 ###########################################################################
289 def announce (self, short_summary, action):
292 changes = self.pkg.changes
294 # Only do announcements for source uploads with a recent dpkg-dev installed
295 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
300 Subst["__SHORT_SUMMARY__"] = short_summary
302 for dist in changes["distribution"].keys():
303 list = Cnf.Find("Suite::%s::Announce" % (dist))
304 if list == "" or lists_done.has_key(list):
307 summary += "Announcing to %s\n" % (list)
310 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
311 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
312 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
313 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
314 utils.send_mail (mail_message)
316 if Cnf.FindB("Dinstall::CloseBugs"):
317 summary = self.close_bugs(summary, action)
321 ###########################################################################
323 def accept (self, summary, short_summary):
326 files = self.pkg.files
327 changes = self.pkg.changes
328 changes_file = self.pkg.changes_file
332 self.Logger.log(["Accepting changes",changes_file])
334 self.dump_vars(Cnf["Dir::Queue::Accepted"])
336 # Move all the files into the accepted directory
337 utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
338 file_keys = files.keys()
339 for file in file_keys:
340 utils.move(file, Cnf["Dir::Queue::Accepted"])
341 self.accept_bytes += float(files[file]["size"])
342 self.accept_count += 1
344 # Send accept mail, announce to lists, close bugs and check for
345 # override disparities
346 if not Cnf["Dinstall::Options::No-Mail"]:
347 Subst["__SUITE__"] = ""
348 Subst["__SUMMARY__"] = summary
349 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
350 utils.send_mail(mail_message)
351 self.announce(short_summary, 1)
354 ## Helper stuff for DebBugs Version Tracking
355 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
356 # ??? once queue/* is cleared on *.d.o and/or reprocessed
357 # the conditionalization on dsc["bts changelog"] should be
360 # Write out the version history from the changelog
361 if changes["architecture"].has_key("source") and \
362 dsc.has_key("bts changelog"):
364 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
365 dotprefix=1, perms=0644)
366 version_history = utils.open_file(temp_filename, 'w')
367 version_history.write(dsc["bts changelog"])
368 version_history.close()
369 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
370 changes_file[:-8]+".versions")
371 os.rename(temp_filename, filename)
373 # Write out the binary -> source mapping.
374 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
375 dotprefix=1, perms=0644)
376 debinfo = utils.open_file(temp_filename, 'w')
377 for file in file_keys:
379 if f["type"] == "deb":
380 line = " ".join([f["package"], f["version"],
381 f["architecture"], f["source package"],
382 f["source version"]])
383 debinfo.write(line+"\n")
385 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
386 changes_file[:-8]+".debinfo")
387 os.rename(temp_filename, filename)
389 self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
391 ###########################################################################
393 def queue_build (self, queue, path):
396 files = self.pkg.files
397 changes = self.pkg.changes
398 changes_file = self.pkg.changes_file
400 file_keys = files.keys()
402 ## Special support to enable clean auto-building of queued packages
403 queue_id = database.get_or_set_queue_id(queue)
405 self.projectB.query("BEGIN WORK")
406 for suite in changes["distribution"].keys():
407 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
409 suite_id = database.get_suite_id(suite)
410 dest_dir = Cnf["Dir::QueueBuild"]
411 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
412 dest_dir = os.path.join(dest_dir, suite)
413 for file in file_keys:
414 src = os.path.join(path, file)
415 dest = os.path.join(dest_dir, file)
416 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
417 # Copy it since the original won't be readable by www-data
418 utils.copy(src, dest)
420 # Create a symlink to it
421 os.symlink(src, dest)
422 # Add it to the list of packages for later processing by apt-ftparchive
423 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
424 # If the .orig.tar.gz is in the pool, create a symlink to
425 # it (if one doesn't already exist)
426 if self.pkg.orig_tar_id:
427 # Determine the .orig.tar.gz file name
428 for dsc_file in self.pkg.dsc_files.keys():
429 if dsc_file.endswith(".orig.tar.gz"):
431 dest = os.path.join(dest_dir, filename)
432 # If it doesn't exist, create a symlink
433 if not os.path.exists(dest):
434 # Find the .orig.tar.gz in the pool
435 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
438 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
439 src = os.path.join(ql[0][0], ql[0][1])
440 os.symlink(src, dest)
441 # Add it to the list of packages for later processing by apt-ftparchive
442 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
443 # if it does, update things to ensure it's not removed prematurely
445 self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
447 self.projectB.query("COMMIT WORK")
449 ###########################################################################
451 def check_override (self):
453 changes = self.pkg.changes
454 files = self.pkg.files
457 # Abandon the check if:
458 # a) it's a non-sourceful upload
459 # b) override disparity checks have been disabled
460 # c) we're not sending mail
461 if not changes["architecture"].has_key("source") or \
462 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
463 Cnf["Dinstall::Options::No-Mail"]:
467 file_keys = files.keys()
469 for file in file_keys:
470 if not files[file].has_key("new") and files[file]["type"] == "deb":
471 section = files[file]["section"]
472 override_section = files[file]["override section"]
473 if section.lower() != override_section.lower() and section != "-":
474 # Ignore this; it's a common mistake and not worth whining about
475 if section.lower() == "non-us/main" and override_section.lower() == "non-us":
477 summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
478 priority = files[file]["priority"]
479 override_priority = files[file]["override priority"]
480 if priority != override_priority and priority != "-":
481 summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
486 Subst["__SUMMARY__"] = summary
487 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
488 utils.send_mail(mail_message)
490 ###########################################################################
492 def force_reject (self, files):
493 """Forcefully move files from the current directory to the
494 reject directory. If any file already exists in the reject
495 directory it will be moved to the morgue to make way for
501 # Skip any files which don't exist or which we don't have permission to copy.
502 if os.access(file,os.R_OK) == 0:
504 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
506 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
508 # File exists? Let's try and move it to the morgue
509 if errno.errorcode[e.errno] == 'EEXIST':
510 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
512 morgue_file = utils.find_next_free(morgue_file)
513 except utils.tried_too_hard_exc:
514 # Something's either gone badly Pete Tong, or
515 # someone is trying to exploit us.
516 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
518 utils.move(dest_file, morgue_file, perms=0660)
520 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
523 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
527 # If we got here, we own the destination file, so we can
528 # safely overwrite it.
529 utils.move(file, dest_file, 1, perms=0660)
532 ###########################################################################
534 def do_reject (self, manual = 0, reject_message = ""):
535 # If we weren't given a manual rejection message, spawn an
536 # editor so the user can add one in...
537 if manual and not reject_message:
538 temp_filename = utils.temp_filename()
539 editor = os.environ.get("EDITOR","vi")
542 os.system("%s %s" % (editor, temp_filename))
543 temp_fh = utils.open_file(temp_filename)
544 reject_message = "".join(temp_fh.readlines())
546 print "Reject message:"
547 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
548 prompt = "[R]eject, Edit, Abandon, Quit ?"
550 while prompt.find(answer) == -1:
551 answer = utils.our_raw_input(prompt)
552 m = re_default_answer.search(prompt)
555 answer = answer[:1].upper()
556 os.unlink(temp_filename)
568 reason_filename = pkg.changes_file[:-8] + ".reason"
569 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
571 # Move all the files into the reject directory
572 reject_files = pkg.files.keys() + [pkg.changes_file]
573 self.force_reject(reject_files)
575 # If we fail here someone is probably trying to exploit the race
576 # so let's just raise an exception ...
577 if os.path.exists(reason_filename):
578 os.unlink(reason_filename)
579 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
582 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
583 Subst["__MANUAL_REJECT_MESSAGE__"] = ""
584 Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
585 os.write(reason_fd, reject_message)
586 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
588 # Build up the rejection email
589 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
591 Subst["__REJECTOR_ADDRESS__"] = user_email_address
592 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
593 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
594 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
595 # Write the rejection email out as the <foo>.reason file
596 os.write(reason_fd, reject_mail_message)
600 # Send the rejection mail if appropriate
601 if not Cnf["Dinstall::Options::No-Mail"]:
602 utils.send_mail(reject_mail_message)
604 self.Logger.log(["rejected", pkg.changes_file])
607 ################################################################################
609 # Ensure that source exists somewhere in the archive for the binary
610 # upload being processed.
612 # (1) exact match => 1.0-3
613 # (2) Bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
615 def source_exists (self, package, source_version, suites = ["any"]):
619 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
622 # source must exist in suite X, or in some other suite that's
623 # mapped to X, recursively... silent-maps are counted too,
624 # unreleased-maps aren't.
625 maps = self.Cnf.ValueList("SuiteMappings")[:]
627 maps = [ m.split() for m in maps ]
628 maps = [ (x[1], x[2]) for x in maps
629 if x[0] == "map" or x[0] == "silent-map" ]
632 if x[1] in s and x[0] not in s:
635 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
636 q = self.projectB.query(que)
638 # Reduce the query results to a list of version numbers
639 ql = [ i[0] for i in q.getresult() ]
642 if source_version in ql:
646 orig_source_version = re_bin_only_nmu.sub('', source_version)
647 if orig_source_version in ql:
655 ################################################################################
657 def in_override_p (self, package, component, suite, binary_type, file):
658 files = self.pkg.files
660 if binary_type == "": # must be source
665 # Override suite name; used for example with proposed-updates
666 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
667 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
669 # Avoid <undef> on unknown distributions
670 suite_id = database.get_suite_id(suite)
673 component_id = database.get_component_id(component)
674 type_id = database.get_override_type_id(type)
676 # FIXME: nasty non-US speficic hack
677 if component.lower().startswith("non-us/"):
678 component = component[7:]
680 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
681 % (package, suite_id, component_id, type_id))
682 result = q.getresult()
683 # If checking for a source package fall back on the binary override type
684 if type == "dsc" and not result:
685 deb_type_id = database.get_override_type_id("deb")
686 udeb_type_id = database.get_override_type_id("udeb")
687 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
688 % (package, suite_id, component_id, deb_type_id, udeb_type_id))
689 result = q.getresult()
691 # Remember the section and priority so we can check them later if appropriate
693 files[file]["override section"] = result[0][0]
694 files[file]["override priority"] = result[0][1]
698 ################################################################################
700 def reject (self, str, prefix="Rejected: "):
702 # Unlike other rejects we add new lines first to avoid trailing
703 # new lines when this message is passed back up to a caller.
704 if self.reject_message:
705 self.reject_message += "\n"
706 self.reject_message += prefix + str
708 ################################################################################
710 def get_anyversion(self, query_result, suite):
712 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
713 for (v, s) in query_result:
714 if s in [ x.lower() for x in anysuite ]:
715 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
719 ################################################################################
721 def cross_suite_version_check(self, query_result, file, new_version):
722 """Ensure versions are newer than existing packages in target
723 suites and that cross-suite version checking rules as
724 set out in the conf file are satisfied."""
726 # Check versions for each target suite
727 for target_suite in self.pkg.changes["distribution"].keys():
728 must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
729 must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
730 # Enforce "must be newer than target suite" even if conffile omits it
731 if target_suite not in must_be_newer_than:
732 must_be_newer_than.append(target_suite)
733 for entry in query_result:
734 existent_version = entry[0]
736 if suite in must_be_newer_than and \
737 apt_pkg.VersionCompare(new_version, existent_version) < 1:
738 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
739 if suite in must_be_older_than and \
740 apt_pkg.VersionCompare(new_version, existent_version) > -1:
741 ch = self.pkg.changes
743 if ch.get('distribution-version', {}).has_key(suite):
744 # we really use the other suite, ignoring the conflicting one ...
745 addsuite = ch["distribution-version"][suite]
747 add_version = self.get_anyversion(query_result, addsuite)
748 target_version = self.get_anyversion(query_result, target_suite)
751 # not add_version can only happen if we map to a suite
752 # that doesn't enhance the suite we're propup'ing from.
753 # so "propup-ver x a b c; map a d" is a problem only if
754 # d doesn't enhance a.
756 # i think we could always propagate in this case, rather
757 # than complaining. either way, this isn't a REJECT issue
759 # And - we really should complain to the dorks who configured dak
760 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
761 self.pkg.changes.setdefault("propdistribution", {})
762 self.pkg.changes["propdistribution"][addsuite] = 1
764 elif not target_version:
765 # not targets_version is true when the package is NEW
766 # we could just stick with the "...old version..." REJECT
768 self.reject("Won't propogate NEW packages.")
769 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
770 # propogation would be redundant. no need to reject though.
771 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
773 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
774 apt_pkg.VersionCompare(add_version, target_version) >= 0:
776 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
777 self.pkg.changes.setdefault("propdistribution", {})
778 self.pkg.changes["propdistribution"][addsuite] = 1
782 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
784 ################################################################################
786 def check_binary_against_db(self, file):
787 self.reject_message = ""
788 files = self.pkg.files
790 # Ensure version is sane
791 q = self.projectB.query("""
792 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
794 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
795 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
796 % (files[file]["package"],
797 files[file]["architecture"]))
798 self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
800 # Check for any existing copies of the file
801 q = self.projectB.query("""
802 SELECT b.id FROM binaries b, architecture a
803 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
804 AND a.id = b.architecture"""
805 % (files[file]["package"],
806 files[file]["version"],
807 files[file]["architecture"]))
809 self.reject("%s: can not overwrite existing copy already in the archive." % (file))
811 return self.reject_message
813 ################################################################################
815 def check_source_against_db(self, file):
816 self.reject_message = ""
819 # Ensure version is sane
820 q = self.projectB.query("""
821 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
822 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
823 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
825 return self.reject_message
827 ################################################################################
830 # NB: this function can remove entries from the 'files' index [if
831 # the .orig.tar.gz is a duplicate of the one in the archive]; if
832 # you're iterating over 'files' and call this function as part of
833 # the loop, be sure to add a check to the top of the loop to
834 # ensure you haven't just tried to derefernece the deleted entry.
837 def check_dsc_against_db(self, file):
838 self.reject_message = ""
839 files = self.pkg.files
840 dsc_files = self.pkg.dsc_files
841 legacy_source_untouchable = self.pkg.legacy_source_untouchable
842 self.pkg.orig_tar_gz = None
844 # Try and find all files mentioned in the .dsc. This has
845 # to work harder to cope with the multiple possible
846 # locations of an .orig.tar.gz.
847 # The ordering on the select is needed to pick the newest orig
848 # when it exists in multiple places.
849 for dsc_file in dsc_files.keys():
851 if files.has_key(dsc_file):
852 actual_md5 = files[dsc_file]["md5sum"]
853 actual_size = int(files[dsc_file]["size"])
854 found = "%s in incoming" % (dsc_file)
855 # Check the file does not already exist in the archive
856 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
858 # Strip out anything that isn't '%s' or '/%s$'
860 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
863 # "[dak] has not broken them. [dak] has fixed a
864 # brokenness. Your crappy hack exploited a bug in
867 # "(Come on! I thought it was always obvious that
868 # one just doesn't release different files with
869 # the same name and version.)"
870 # -- ajk@ on d-devel@l.d.o
873 # Ignore exact matches for .orig.tar.gz
875 if dsc_file.endswith(".orig.tar.gz"):
877 if files.has_key(dsc_file) and \
878 int(files[dsc_file]["size"]) == int(i[0]) and \
879 files[dsc_file]["md5sum"] == i[1]:
880 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
882 self.pkg.orig_tar_gz = i[2] + i[3]
886 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
887 elif dsc_file.endswith(".orig.tar.gz"):
889 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
891 # Strip out anything that isn't '%s' or '/%s$'
893 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
897 # Unfortunately, we may get more than one match here if,
898 # for example, the package was in potato but had an -sa
899 # upload in woody. So we need to choose the right one.
901 x = ql[0]; # default to something sane in case we don't match any or have only one
905 old_file = i[0] + i[1]
906 old_file_fh = utils.open_file(old_file)
907 actual_md5 = apt_pkg.md5sum(old_file_fh)
909 actual_size = os.stat(old_file)[stat.ST_SIZE]
910 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
913 legacy_source_untouchable[i[3]] = ""
915 old_file = x[0] + x[1]
916 old_file_fh = utils.open_file(old_file)
917 actual_md5 = apt_pkg.md5sum(old_file_fh)
919 actual_size = os.stat(old_file)[stat.ST_SIZE]
922 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
923 # See install() in process-accepted...
924 self.pkg.orig_tar_id = x[3]
925 self.pkg.orig_tar_gz = old_file
926 if suite_type == "legacy" or suite_type == "legacy-mixed":
927 self.pkg.orig_tar_location = "legacy"
929 self.pkg.orig_tar_location = x[4]
931 # Not there? Check the queue directories...
933 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
934 # See process_it() in 'dak process-unchecked' for explanation of this
935 # in_unchecked check dropped by ajt 2007-08-28, how did that
937 if os.path.exists(in_unchecked) and False:
938 return (self.reject_message, in_unchecked)
940 for dir in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
941 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
942 if os.path.exists(in_otherdir):
943 in_otherdir_fh = utils.open_file(in_otherdir)
944 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
945 in_otherdir_fh.close()
946 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
948 self.pkg.orig_tar_gz = in_otherdir
951 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
952 self.pkg.orig_tar_gz = -1
955 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
957 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
958 self.reject("md5sum for %s doesn't match %s." % (found, file))
959 if actual_size != int(dsc_files[dsc_file]["size"]):
960 self.reject("size for %s doesn't match %s." % (found, file))
962 return (self.reject_message, None)
964 def do_query(self, q):
965 sys.stderr.write("query: \"%s\" ... " % (q))
967 r = self.projectB.query(q)
968 time_diff = time.time()-before
969 sys.stderr.write("took %.3f seconds.\n" % (time_diff))