]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
f4e4b9087b8923357b3a03aa459ec29027bb7231
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 # Queue utility functions for dak
5 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006  James Troup <james@nocrew.org>
6
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
11
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 # GNU General Public License for more details.
16
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
20
21 ###############################################################################
22
23 import cPickle, errno, os, pg, re, stat, sys, time
24 import apt_inst, apt_pkg
25 import utils, database
26 from dak_exceptions import *
27
28 from types import *
29
30 ###############################################################################
31
32 re_isanum = re.compile (r"^\d+$")
33 re_default_answer = re.compile(r"\[(.*)\]")
34 re_fdnic = re.compile(r"\n\n")
35 re_bin_only_nmu = re.compile(r"\+b\d+$")
36
37 ################################################################################
38
39 # Determine what parts in a .changes are NEW
40
41 def determine_new(changes, files, projectB, warn=1):
42     new = {}
43
44     # Build up a list of potentially new things
45     for file_entry in files.keys():
46         f = files[file_entry]
47         # Skip byhand elements
48         if f["type"] == "byhand":
49             continue
50         pkg = f["package"]
51         priority = f["priority"]
52         section = f["section"]
53         file_type = get_type(f)
54         component = f["component"]
55
56         if file_type == "dsc":
57             priority = "source"
58         if not new.has_key(pkg):
59             new[pkg] = {}
60             new[pkg]["priority"] = priority
61             new[pkg]["section"] = section
62             new[pkg]["type"] = file_type
63             new[pkg]["component"] = component
64             new[pkg]["files"] = []
65         else:
66             old_type = new[pkg]["type"]
67             if old_type != file_type:
68                 # source gets trumped by deb or udeb
69                 if old_type == "dsc":
70                     new[pkg]["priority"] = priority
71                     new[pkg]["section"] = section
72                     new[pkg]["type"] = file_type
73                     new[pkg]["component"] = component
74         new[pkg]["files"].append(file_entry)
75         if f.has_key("othercomponents"):
76             new[pkg]["othercomponents"] = f["othercomponents"]
77
78     for suite in changes["suite"].keys():
79         suite_id = database.get_suite_id(suite)
80         for pkg in new.keys():
81             component_id = database.get_component_id(new[pkg]["component"])
82             type_id = database.get_override_type_id(new[pkg]["type"])
83             q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
84             ql = q.getresult()
85             if ql:
86                 for file_entry in new[pkg]["files"]:
87                     if files[file_entry].has_key("new"):
88                         del files[file_entry]["new"]
89                 del new[pkg]
90
91     if warn:
92         if changes["suite"].has_key("stable"):
93             print "WARNING: overrides will be added for stable!"
94             if changes["suite"].has_key("oldstable"):
95                 print "WARNING: overrides will be added for OLDstable!"
96         for pkg in new.keys():
97             if new[pkg].has_key("othercomponents"):
98                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
99
100     return new
101
102 ################################################################################
103
104 def get_type(f):
105     # Determine the type
106     if f.has_key("dbtype"):
107         file_type = f["dbtype"]
108     elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
109         file_type = "dsc"
110     else:
111         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
112
113     # Validate the override type
114     type_id = database.get_override_type_id(file_type)
115     if type_id == -1:
116         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
117
118     return file_type
119
120 ################################################################################
121
122 # check if section/priority values are valid
123
124 def check_valid(new):
125     for pkg in new.keys():
126         section = new[pkg]["section"]
127         priority = new[pkg]["priority"]
128         file_type = new[pkg]["type"]
129         new[pkg]["section id"] = database.get_section_id(section)
130         new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
131         # Sanity checks
132         di = section.find("debian-installer") != -1
133         if (di and file_type not in ("udeb", "dsc")) or (not di and file_type == "udeb"):
134             new[pkg]["section id"] = -1
135         if (priority == "source" and file_type != "dsc") or \
136            (priority != "source" and file_type == "dsc"):
137             new[pkg]["priority id"] = -1
138
139
140 ###############################################################################
141
142 # Convenience wrapper to carry around all the package information in
143
144 class Pkg:
145     def __init__(self, **kwds):
146         self.__dict__.update(kwds)
147
148     def update(self, **kwds):
149         self.__dict__.update(kwds)
150
151 ###############################################################################
152
153 class Upload:
154
155     def __init__(self, Cnf):
156         self.Cnf = Cnf
157         self.accept_count = 0
158         self.accept_bytes = 0L
159         self.reject_message = ""
160         self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
161                        legacy_source_untouchable = {})
162
163         # Initialize the substitution template mapping global
164         Subst = self.Subst = {}
165         Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
166         Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
167         Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
168         Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
169
170         self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
171         database.init(Cnf, self.projectB)
172
173     ###########################################################################
174
175     def init_vars (self):
176         self.pkg.changes.clear()
177         self.pkg.dsc.clear()
178         self.pkg.files.clear()
179         self.pkg.dsc_files.clear()
180         self.pkg.legacy_source_untouchable.clear()
181         self.pkg.orig_tar_id = None
182         self.pkg.orig_tar_location = ""
183         self.pkg.orig_tar_gz = None
184
185     ###########################################################################
186
187     def update_vars (self):
188         dump_filename = self.pkg.changes_file[:-8]+".dak"
189         dump_file = utils.open_file(dump_filename)
190         p = cPickle.Unpickler(dump_file)
191
192         self.pkg.changes.update(p.load())
193         self.pkg.dsc.update(p.load())
194         self.pkg.files.update(p.load())
195         self.pkg.dsc_files.update(p.load())
196         self.pkg.legacy_source_untouchable.update(p.load())
197
198         self.pkg.orig_tar_id = p.load()
199         self.pkg.orig_tar_location = p.load()
200
201         dump_file.close()
202
203     ###########################################################################
204
205     # This could just dump the dictionaries as is, but I'd like to
206     # avoid this so there's some idea of what process-accepted &
207     # process-new use from process-unchecked
208
209     def dump_vars(self, dest_dir):
210
211         changes = self.pkg.changes
212         dsc = self.pkg.dsc
213         files = self.pkg.files
214         dsc_files = self.pkg.dsc_files
215         legacy_source_untouchable = self.pkg.legacy_source_untouchable
216         orig_tar_id = self.pkg.orig_tar_id
217         orig_tar_location = self.pkg.orig_tar_location
218
219         dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
220         dump_file = utils.open_file(dump_filename, 'w')
221         try:
222             os.chmod(dump_filename, 0664)
223         except OSError, e:
224             # chmod may fail when the dumpfile is not owned by the user
225             # invoking dak (like e.g. when NEW is processed by a member
226             # of ftpteam)
227             if errno.errorcode[e.errno] == 'EPERM':
228                 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
229                 # security precaution, should never happen unless a weird
230                 # umask is set anywhere
231                 if perms & stat.S_IWOTH:
232                     utils.fubar("%s is world writable and chmod failed." % \
233                         (dump_filename,))
234                 # ignore the failed chmod otherwise as the file should
235                 # already have the right privileges and is just, at worst,
236                 # unreadable for world
237             else:
238                 raise
239
240         p = cPickle.Pickler(dump_file, 1)
241         d_changes = {}
242         d_dsc = {}
243         d_files = {}
244         d_dsc_files = {}
245
246         ## files
247         for file_entry in files.keys():
248             d_files[file_entry] = {}
249             for i in [ "package", "version", "architecture", "type", "size",
250                        "md5sum", "sha1sum", "sha256sum", "component",
251                        "location id", "source package", "source version",
252                        "maintainer", "dbtype", "files id", "new",
253                        "section", "priority", "othercomponents",
254                        "pool name", "original component" ]:
255                 if files[file_entry].has_key(i):
256                     d_files[file_entry][i] = files[file_entry][i]
257         ## changes
258         # Mandatory changes fields
259         for i in [ "distribution", "source", "architecture", "version",
260                    "maintainer", "urgency", "fingerprint", "changedby822",
261                    "changedby2047", "changedbyname", "maintainer822",
262                    "maintainer2047", "maintainername", "maintaineremail",
263                    "closes", "changes" ]:
264             d_changes[i] = changes[i]
265         # Optional changes fields
266         for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version",
267                    "sponsoremail" ]:
268             if changes.has_key(i):
269                 d_changes[i] = changes[i]
270         ## dsc
271         for i in [ "source", "version", "maintainer", "fingerprint",
272                    "uploaders", "bts changelog", "dm-upload-allowed" ]:
273             if dsc.has_key(i):
274                 d_dsc[i] = dsc[i]
275         ## dsc_files
276         for file_entry in dsc_files.keys():
277             d_dsc_files[file_entry] = {}
278             # Mandatory dsc_files fields
279             for i in [ "size", "md5sum" ]:
280                 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
281             # Optional dsc_files fields
282             for i in [ "files id" ]:
283                 if dsc_files[file_entry].has_key(i):
284                     d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
285
286         for i in [ d_changes, d_dsc, d_files, d_dsc_files,
287                    legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
288             p.dump(i)
289         dump_file.close()
290
291     ###########################################################################
292
293     # Set up the per-package template substitution mappings
294
295     def update_subst (self, reject_message = ""):
296         Subst = self.Subst
297         changes = self.pkg.changes
298         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
299         if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
300             changes["architecture"] = { "Unknown" : "" }
301         # and maintainer2047 may not exist.
302         if not changes.has_key("maintainer2047"):
303             changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
304
305         Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
306         Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
307         Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
308
309         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
310         if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
311             Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
312             Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
313                                                      changes["maintainer2047"])
314             Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
315         else:
316             Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
317             Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
318             Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
319
320         if "sponsoremail" in changes:
321             Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
322
323         if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
324             Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
325
326         # Apply any global override of the Maintainer field
327         if self.Cnf.get("Dinstall::OverrideMaintainer"):
328             Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
329             Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
330
331         Subst["__REJECT_MESSAGE__"] = reject_message
332         Subst["__SOURCE__"] = changes.get("source", "Unknown")
333         Subst["__VERSION__"] = changes.get("version", "Unknown")
334
335     ###########################################################################
336
337     def build_summaries(self):
338         changes = self.pkg.changes
339         files = self.pkg.files
340
341         byhand = summary = new = ""
342
343         # changes["distribution"] may not exist in corner cases
344         # (e.g. unreadable changes files)
345         if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
346             changes["distribution"] = {}
347
348         override_summary ="";
349         file_keys = files.keys()
350         file_keys.sort()
351         for file_entry in file_keys:
352             if files[file_entry].has_key("byhand"):
353                 byhand = 1
354                 summary += file_entry + " byhand\n"
355             elif files[file_entry].has_key("new"):
356                 new = 1
357                 summary += "(new) %s %s %s\n" % (file_entry, files[file_entry]["priority"], files[file_entry]["section"])
358                 if files[file_entry].has_key("othercomponents"):
359                     summary += "WARNING: Already present in %s distribution.\n" % (files[file_entry]["othercomponents"])
360                 if files[file_entry]["type"] == "deb":
361                     deb_fh = utils.open_file(file_entry)
362                     summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
363                     deb_fh.close()
364             else:
365                 files[file_entry]["pool name"] = utils.poolify (changes.get("source",""), files[file_entry]["component"])
366                 destination = self.Cnf["Dir::PoolRoot"] + files[file_entry]["pool name"] + file_entry
367                 summary += file_entry + "\n  to " + destination + "\n"
368                 if not files[file_entry].has_key("type"):
369                     files[file_entry]["type"] = "unknown"
370                 if files[file_entry]["type"] in ["deb", "udeb", "dsc"]:
371                     # (queue/unchecked), there we have override entries already, use them
372                     # (process-new), there we dont have override entries, use the newly generated ones.
373                     override_prio = files[file_entry].get("override priority", files[file_entry]["priority"])
374                     override_sect = files[file_entry].get("override section", files[file_entry]["section"])
375                     override_summary += "%s - %s %s\n" % (file_entry, override_prio, override_sect)
376
377         short_summary = summary
378
379         # This is for direport's benefit...
380         f = re_fdnic.sub("\n .\n", changes.get("changes",""))
381
382         if byhand or new:
383             summary += "Changes: " + f
384
385         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
386
387         summary += self.announce(short_summary, 0)
388
389         return (summary, short_summary)
390
391     ###########################################################################
392
393     def close_bugs (self, summary, action):
394         changes = self.pkg.changes
395         Subst = self.Subst
396         Cnf = self.Cnf
397
398         bugs = changes["closes"].keys()
399
400         if not bugs:
401             return summary
402
403         bugs.sort()
404         summary += "Closing bugs: "
405         for bug in bugs:
406             summary += "%s " % (bug)
407             if action:
408                 Subst["__BUG_NUMBER__"] = bug
409                 if changes["distribution"].has_key("stable"):
410                     Subst["__STABLE_WARNING__"] = """
411 Note that this package is not part of the released stable Debian
412 distribution.  It may have dependencies on other unreleased software,
413 or other instabilities.  Please take care if you wish to install it.
414 The update will eventually make its way into the next released Debian
415 distribution."""
416                 else:
417                     Subst["__STABLE_WARNING__"] = ""
418                     mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
419                     utils.send_mail (mail_message)
420         if action:
421             self.Logger.log(["closing bugs"]+bugs)
422         summary += "\n"
423
424         return summary
425
426     ###########################################################################
427
428     def announce (self, short_summary, action):
429         Subst = self.Subst
430         Cnf = self.Cnf
431         changes = self.pkg.changes
432
433         # Only do announcements for source uploads with a recent dpkg-dev installed
434         if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
435             return ""
436
437         lists_done = {}
438         summary = ""
439         Subst["__SHORT_SUMMARY__"] = short_summary
440
441         for dist in changes["distribution"].keys():
442             announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
443             if announce_list == "" or lists_done.has_key(announce_list):
444                 continue
445             lists_done[announce_list] = 1
446             summary += "Announcing to %s\n" % (announce_list)
447
448             if action:
449                 Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
450                 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
451                     Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
452                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
453                 utils.send_mail (mail_message)
454
455         if Cnf.FindB("Dinstall::CloseBugs"):
456             summary = self.close_bugs(summary, action)
457
458         return summary
459
460     ###########################################################################
461
462     def accept (self, summary, short_summary):
463         Cnf = self.Cnf
464         Subst = self.Subst
465         files = self.pkg.files
466         changes = self.pkg.changes
467         changes_file = self.pkg.changes_file
468         dsc = self.pkg.dsc
469
470         print "Accepting."
471         self.Logger.log(["Accepting changes",changes_file])
472
473         self.dump_vars(Cnf["Dir::Queue::Accepted"])
474
475         # Move all the files into the accepted directory
476         utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
477         file_keys = files.keys()
478         for file_entry in file_keys:
479             utils.move(file_entry, Cnf["Dir::Queue::Accepted"])
480             self.accept_bytes += float(files[file_entry]["size"])
481         self.accept_count += 1
482
483         # Send accept mail, announce to lists, close bugs and check for
484         # override disparities
485         if not Cnf["Dinstall::Options::No-Mail"]:
486             Subst["__SUITE__"] = ""
487             Subst["__SUMMARY__"] = summary
488             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
489             utils.send_mail(mail_message)
490             self.announce(short_summary, 1)
491
492
493         ## Helper stuff for DebBugs Version Tracking
494         if Cnf.Find("Dir::Queue::BTSVersionTrack"):
495             # ??? once queue/* is cleared on *.d.o and/or reprocessed
496             # the conditionalization on dsc["bts changelog"] should be
497             # dropped.
498
499             # Write out the version history from the changelog
500             if changes["architecture"].has_key("source") and \
501                dsc.has_key("bts changelog"):
502
503                 (fd, temp_filename) = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
504                 version_history = os.fdopen(temp_filename, 'w')
505                 version_history.write(dsc["bts changelog"])
506                 version_history.close()
507                 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
508                                       changes_file[:-8]+".versions")
509                 os.rename(temp_filename, filename)
510                 os.chmod(filename, "0644")
511
512             # Write out the binary -> source mapping.
513             (fd, temp_filename) = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
514             debinfo = os.fdopen(temp_filename, 'w')
515             for file_entry in file_keys:
516                 f = files[file_entry]
517                 if f["type"] == "deb":
518                     line = " ".join([f["package"], f["version"],
519                                      f["architecture"], f["source package"],
520                                      f["source version"]])
521                     debinfo.write(line+"\n")
522             debinfo.close()
523             filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
524                                   changes_file[:-8]+".debinfo")
525             os.rename(temp_filename, filename)
526             os.chmod(filename, "0644")
527
528         self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
529
530     ###########################################################################
531
532     def queue_build (self, queue, path):
533         Cnf = self.Cnf
534         Subst = self.Subst
535         files = self.pkg.files
536         changes = self.pkg.changes
537         changes_file = self.pkg.changes_file
538         dsc = self.pkg.dsc
539         file_keys = files.keys()
540
541         ## Special support to enable clean auto-building of queued packages
542         queue_id = database.get_or_set_queue_id(queue)
543
544         self.projectB.query("BEGIN WORK")
545         for suite in changes["distribution"].keys():
546             if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
547                 continue
548             suite_id = database.get_suite_id(suite)
549             dest_dir = Cnf["Dir::QueueBuild"]
550             if Cnf.FindB("Dinstall::SecurityQueueBuild"):
551                 dest_dir = os.path.join(dest_dir, suite)
552             for file_entry in file_keys:
553                 src = os.path.join(path, file_entry)
554                 dest = os.path.join(dest_dir, file_entry)
555                 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
556                     # Copy it since the original won't be readable by www-data
557                     utils.copy(src, dest)
558                 else:
559                     # Create a symlink to it
560                     os.symlink(src, dest)
561                 # Add it to the list of packages for later processing by apt-ftparchive
562                 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
563             # If the .orig.tar.gz is in the pool, create a symlink to
564             # it (if one doesn't already exist)
565             if self.pkg.orig_tar_id:
566                 # Determine the .orig.tar.gz file name
567                 for dsc_file in self.pkg.dsc_files.keys():
568                     if dsc_file.endswith(".orig.tar.gz"):
569                         filename = dsc_file
570                 dest = os.path.join(dest_dir, filename)
571                 # If it doesn't exist, create a symlink
572                 if not os.path.exists(dest):
573                     # Find the .orig.tar.gz in the pool
574                     q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
575                     ql = q.getresult()
576                     if not ql:
577                         utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
578                     src = os.path.join(ql[0][0], ql[0][1])
579                     os.symlink(src, dest)
580                     # Add it to the list of packages for later processing by apt-ftparchive
581                     self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
582                 # if it does, update things to ensure it's not removed prematurely
583                 else:
584                     self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
585
586         self.projectB.query("COMMIT WORK")
587
588     ###########################################################################
589
590     def check_override (self):
591         Subst = self.Subst
592         changes = self.pkg.changes
593         files = self.pkg.files
594         Cnf = self.Cnf
595
596         # Abandon the check if:
597         #  a) it's a non-sourceful upload
598         #  b) override disparity checks have been disabled
599         #  c) we're not sending mail
600         if not changes["architecture"].has_key("source") or \
601            not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
602            Cnf["Dinstall::Options::No-Mail"]:
603             return
604
605         summary = ""
606         file_keys = files.keys()
607         file_keys.sort()
608         for file_entry in file_keys:
609             if not files[file_entry].has_key("new") and files[file_entry]["type"] == "deb":
610                 section = files[file_entry]["section"]
611                 override_section = files[file_entry]["override section"]
612                 if section.lower() != override_section.lower() and section != "-":
613                     summary += "%s: package says section is %s, override says %s.\n" % (file_entry, section, override_section)
614                 priority = files[file_entry]["priority"]
615                 override_priority = files[file_entry]["override priority"]
616                 if priority != override_priority and priority != "-":
617                     summary += "%s: package says priority is %s, override says %s.\n" % (file_entry, priority, override_priority)
618
619         if summary == "":
620             return
621
622         Subst["__SUMMARY__"] = summary
623         mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
624         utils.send_mail(mail_message)
625
626     ###########################################################################
627
628     def force_reject (self, files):
629         """Forcefully move files from the current directory to the
630            reject directory.  If any file already exists in the reject
631            directory it will be moved to the morgue to make way for
632            the new file."""
633
634         Cnf = self.Cnf
635
636         for file_entry in files:
637             # Skip any files which don't exist or which we don't have permission to copy.
638             if os.access(file_entry,os.R_OK) == 0:
639                 continue
640             dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file_entry)
641             try:
642                 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
643             except OSError, e:
644                 # File exists?  Let's try and move it to the morgue
645                 if errno.errorcode[e.errno] == 'EEXIST':
646                     morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file_entry)
647                     try:
648                         morgue_file = utils.find_next_free(morgue_file)
649                     except NoFreeFilenameError:
650                         # Something's either gone badly Pete Tong, or
651                         # someone is trying to exploit us.
652                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
653                         return
654                     utils.move(dest_file, morgue_file, perms=0660)
655                     try:
656                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
657                     except OSError, e:
658                         # Likewise
659                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
660                         return
661                 else:
662                     raise
663             # If we got here, we own the destination file, so we can
664             # safely overwrite it.
665             utils.move(file_entry, dest_file, 1, perms=0660)
666             os.close(dest_fd)
667
668     ###########################################################################
669
670     def do_reject (self, manual = 0, reject_message = ""):
671         # If we weren't given a manual rejection message, spawn an
672         # editor so the user can add one in...
673         if manual and not reject_message:
674             (fd, temp_filename) = utils.temp_filename()
675             editor = os.environ.get("EDITOR","vi")
676             answer = 'E'
677             while answer == 'E':
678                 os.system("%s %s" % (editor, temp_filename))
679                 temp_fh = utils.open_file(temp_filename)
680                 reject_message = "".join(temp_fh.readlines())
681                 temp_fh.close()
682                 print "Reject message:"
683                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
684                 prompt = "[R]eject, Edit, Abandon, Quit ?"
685                 answer = "XXX"
686                 while prompt.find(answer) == -1:
687                     answer = utils.our_raw_input(prompt)
688                     m = re_default_answer.search(prompt)
689                     if answer == "":
690                         answer = m.group(1)
691                     answer = answer[:1].upper()
692             os.unlink(temp_filename)
693             if answer == 'A':
694                 return 1
695             elif answer == 'Q':
696                 sys.exit(0)
697
698         print "Rejecting.\n"
699
700         Cnf = self.Cnf
701         Subst = self.Subst
702         pkg = self.pkg
703
704         reason_filename = pkg.changes_file[:-8] + ".reason"
705         reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
706
707         # Move all the files into the reject directory
708         reject_files = pkg.files.keys() + [pkg.changes_file]
709         self.force_reject(reject_files)
710
711         # If we fail here someone is probably trying to exploit the race
712         # so let's just raise an exception ...
713         if os.path.exists(reason_filename):
714             os.unlink(reason_filename)
715         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
716
717         if not manual:
718             Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
719             Subst["__MANUAL_REJECT_MESSAGE__"] = ""
720             Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
721             os.write(reason_fd, reject_message)
722             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
723         else:
724             # Build up the rejection email
725             user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
726
727             Subst["__REJECTOR_ADDRESS__"] = user_email_address
728             Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
729             Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
730             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
731             # Write the rejection email out as the <foo>.reason file
732             os.write(reason_fd, reject_mail_message)
733
734         os.close(reason_fd)
735
736         # Send the rejection mail if appropriate
737         if not Cnf["Dinstall::Options::No-Mail"]:
738             utils.send_mail(reject_mail_message)
739
740         self.Logger.log(["rejected", pkg.changes_file])
741         return 0
742
743     ################################################################################
744
745     # Ensure that source exists somewhere in the archive for the binary
746     # upload being processed.
747     #
748     # (1) exact match                      => 1.0-3
749     # (2) Bin-only NMU                     => 1.0-3+b1 , 1.0-3.1+b1
750
751     def source_exists (self, package, source_version, suites = ["any"]):
752         okay = 1
753         for suite in suites:
754             if suite == "any":
755                 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
756                     (package)
757             else:
758                 # source must exist in suite X, or in some other suite that's
759                 # mapped to X, recursively... silent-maps are counted too,
760                 # unreleased-maps aren't.
761                 maps = self.Cnf.ValueList("SuiteMappings")[:]
762                 maps.reverse()
763                 maps = [ m.split() for m in maps ]
764                 maps = [ (x[1], x[2]) for x in maps
765                                 if x[0] == "map" or x[0] == "silent-map" ]
766                 s = [suite]
767                 for x in maps:
768                     if x[1] in s and x[0] not in s:
769                         s.append(x[0])
770
771                 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
772             q = self.projectB.query(que)
773
774             # Reduce the query results to a list of version numbers
775             ql = [ i[0] for i in q.getresult() ]
776
777             # Try (1)
778             if source_version in ql:
779                 continue
780
781             # Try (2)
782             orig_source_version = re_bin_only_nmu.sub('', source_version)
783             if orig_source_version in ql:
784                 continue
785
786             # No source found...
787             okay = 0
788             break
789         return okay
790
791     ################################################################################
792
793     def in_override_p (self, package, component, suite, binary_type, file):
794         files = self.pkg.files
795
796         if binary_type == "": # must be source
797             file_type = "dsc"
798         else:
799             file_type = binary_type
800
801         # Override suite name; used for example with proposed-updates
802         if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
803             suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
804
805         # Avoid <undef> on unknown distributions
806         suite_id = database.get_suite_id(suite)
807         if suite_id == -1:
808             return None
809         component_id = database.get_component_id(component)
810         type_id = database.get_override_type_id(file_type)
811
812         q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
813                            % (package, suite_id, component_id, type_id))
814         result = q.getresult()
815         # If checking for a source package fall back on the binary override type
816         if file_type == "dsc" and not result:
817             deb_type_id = database.get_override_type_id("deb")
818             udeb_type_id = database.get_override_type_id("udeb")
819             q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
820                                % (package, suite_id, component_id, deb_type_id, udeb_type_id))
821             result = q.getresult()
822
823         # Remember the section and priority so we can check them later if appropriate
824         if result:
825             files[file]["override section"] = result[0][0]
826             files[file]["override priority"] = result[0][1]
827
828         return result
829
830     ################################################################################
831
832     def reject (self, str, prefix="Rejected: "):
833         if str:
834             # Unlike other rejects we add new lines first to avoid trailing
835             # new lines when this message is passed back up to a caller.
836             if self.reject_message:
837                 self.reject_message += "\n"
838             self.reject_message += prefix + str
839
840     ################################################################################
841
842     def get_anyversion(self, query_result, suite):
843         anyversion=None
844         anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
845         for (v, s) in query_result:
846             if s in [ x.lower() for x in anysuite ]:
847                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
848                     anyversion=v
849         return anyversion
850
851     ################################################################################
852
853     def cross_suite_version_check(self, query_result, file, new_version,
854             sourceful=False):
855         """Ensure versions are newer than existing packages in target
856         suites and that cross-suite version checking rules as
857         set out in the conf file are satisfied."""
858
859         # Check versions for each target suite
860         for target_suite in self.pkg.changes["distribution"].keys():
861             must_be_newer_than = [ i.lower() for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
862             must_be_older_than = [ i.lower() for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
863             # Enforce "must be newer than target suite" even if conffile omits it
864             if target_suite not in must_be_newer_than:
865                 must_be_newer_than.append(target_suite)
866             for entry in query_result:
867                 existent_version = entry[0]
868                 suite = entry[1]
869                 if suite in must_be_newer_than and sourceful and \
870                    apt_pkg.VersionCompare(new_version, existent_version) < 1:
871                     self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
872                 if suite in must_be_older_than and \
873                    apt_pkg.VersionCompare(new_version, existent_version) > -1:
874                     ch = self.pkg.changes
875                     cansave = 0
876                     if ch.get('distribution-version', {}).has_key(suite):
877                     # we really use the other suite, ignoring the conflicting one ...
878                         addsuite = ch["distribution-version"][suite]
879
880                         add_version = self.get_anyversion(query_result, addsuite)
881                         target_version = self.get_anyversion(query_result, target_suite)
882
883                         if not add_version:
884                             # not add_version can only happen if we map to a suite
885                             # that doesn't enhance the suite we're propup'ing from.
886                             # so "propup-ver x a b c; map a d" is a problem only if
887                             # d doesn't enhance a.
888                             #
889                             # i think we could always propagate in this case, rather
890                             # than complaining. either way, this isn't a REJECT issue
891                             #
892                             # And - we really should complain to the dorks who configured dak
893                             self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
894                             self.pkg.changes.setdefault("propdistribution", {})
895                             self.pkg.changes["propdistribution"][addsuite] = 1
896                             cansave = 1
897                         elif not target_version:
898                             # not targets_version is true when the package is NEW
899                             # we could just stick with the "...old version..." REJECT
900                             # for this, I think.
901                             self.reject("Won't propogate NEW packages.")
902                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
903                             # propogation would be redundant. no need to reject though.
904                             self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
905                             cansave = 1
906                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
907                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
908                             # propogate!!
909                             self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
910                             self.pkg.changes.setdefault("propdistribution", {})
911                             self.pkg.changes["propdistribution"][addsuite] = 1
912                             cansave = 1
913
914                     if not cansave:
915                         self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
916
917     ################################################################################
918
919     def check_binary_against_db(self, file):
920         self.reject_message = ""
921         files = self.pkg.files
922
923         # Ensure version is sane
924         q = self.projectB.query("""
925 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
926                                      architecture a
927  WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
928    AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
929                                 % (files[file]["package"],
930                                    files[file]["architecture"]))
931         self.cross_suite_version_check(q.getresult(), file,
932             files[file]["version"], sourceful=False)
933
934         # Check for any existing copies of the file
935         q = self.projectB.query("""
936 SELECT b.id FROM binaries b, architecture a
937  WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
938    AND a.id = b.architecture"""
939                                 % (files[file]["package"],
940                                    files[file]["version"],
941                                    files[file]["architecture"]))
942         if q.getresult():
943             self.reject("%s: can not overwrite existing copy already in the archive." % (file))
944
945         return self.reject_message
946
947     ################################################################################
948
949     def check_source_against_db(self, file):
950         self.reject_message = ""
951         dsc = self.pkg.dsc
952
953         # Ensure version is sane
954         q = self.projectB.query("""
955 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
956  WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
957         self.cross_suite_version_check(q.getresult(), file, dsc.get("version"),
958             sourceful=True)
959
960         return self.reject_message
961
962     ################################################################################
963
964     # **WARNING**
965     # NB: this function can remove entries from the 'files' index [if
966     # the .orig.tar.gz is a duplicate of the one in the archive]; if
967     # you're iterating over 'files' and call this function as part of
968     # the loop, be sure to add a check to the top of the loop to
969     # ensure you haven't just tried to dereference the deleted entry.
970     # **WARNING**
971
972     def check_dsc_against_db(self, file):
973         self.reject_message = ""
974         files = self.pkg.files
975         dsc_files = self.pkg.dsc_files
976         legacy_source_untouchable = self.pkg.legacy_source_untouchable
977         self.pkg.orig_tar_gz = None
978
979         # Try and find all files mentioned in the .dsc.  This has
980         # to work harder to cope with the multiple possible
981         # locations of an .orig.tar.gz.
982         # The ordering on the select is needed to pick the newest orig
983         # when it exists in multiple places.
984         for dsc_file in dsc_files.keys():
985             found = None
986             if files.has_key(dsc_file):
987                 actual_md5 = files[dsc_file]["md5sum"]
988                 actual_size = int(files[dsc_file]["size"])
989                 found = "%s in incoming" % (dsc_file)
990                 # Check the file does not already exist in the archive
991                 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
992                 ql = q.getresult()
993                 # Strip out anything that isn't '%s' or '/%s$'
994                 for i in ql:
995                     if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
996                         ql.remove(i)
997
998                 # "[dak] has not broken them.  [dak] has fixed a
999                 # brokenness.  Your crappy hack exploited a bug in
1000                 # the old dinstall.
1001                 #
1002                 # "(Come on!  I thought it was always obvious that
1003                 # one just doesn't release different files with
1004                 # the same name and version.)"
1005                 #                        -- ajk@ on d-devel@l.d.o
1006
1007                 if ql:
1008                     # Ignore exact matches for .orig.tar.gz
1009                     match = 0
1010                     if dsc_file.endswith(".orig.tar.gz"):
1011                         for i in ql:
1012                             if files.has_key(dsc_file) and \
1013                                int(files[dsc_file]["size"]) == int(i[0]) and \
1014                                files[dsc_file]["md5sum"] == i[1]:
1015                                 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
1016                                 del files[dsc_file]
1017                                 self.pkg.orig_tar_gz = i[2] + i[3]
1018                                 match = 1
1019
1020                     if not match:
1021                         self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
1022             elif dsc_file.endswith(".orig.tar.gz"):
1023                 # Check in the pool
1024                 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
1025                 ql = q.getresult()
1026                 # Strip out anything that isn't '%s' or '/%s$'
1027                 for i in ql:
1028                     if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
1029                         ql.remove(i)
1030
1031                 if ql:
1032                     # Unfortunately, we may get more than one match here if,
1033                     # for example, the package was in potato but had an -sa
1034                     # upload in woody.  So we need to choose the right one.
1035
1036                     x = ql[0]; # default to something sane in case we don't match any or have only one
1037
1038                     if len(ql) > 1:
1039                         for i in ql:
1040                             old_file = i[0] + i[1]
1041                             old_file_fh = utils.open_file(old_file)
1042                             actual_md5 = apt_pkg.md5sum(old_file_fh)
1043                             old_file_fh.close()
1044                             actual_size = os.stat(old_file)[stat.ST_SIZE]
1045                             if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1046                                 x = i
1047                             else:
1048                                 legacy_source_untouchable[i[3]] = ""
1049
1050                     old_file = x[0] + x[1]
1051                     old_file_fh = utils.open_file(old_file)
1052                     actual_md5 = apt_pkg.md5sum(old_file_fh)
1053                     old_file_fh.close()
1054                     actual_size = os.stat(old_file)[stat.ST_SIZE]
1055                     found = old_file
1056                     suite_type = x[2]
1057                     dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
1058                     # See install() in process-accepted...
1059                     self.pkg.orig_tar_id = x[3]
1060                     self.pkg.orig_tar_gz = old_file
1061                     if suite_type == "legacy" or suite_type == "legacy-mixed":
1062                         self.pkg.orig_tar_location = "legacy"
1063                     else:
1064                         self.pkg.orig_tar_location = x[4]
1065                 else:
1066                     # Not there? Check the queue directories...
1067
1068                     in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1069                     # See process_it() in 'dak process-unchecked' for explanation of this
1070                     # in_unchecked check dropped by ajt 2007-08-28, how did that
1071                     # ever make sense?
1072                     if os.path.exists(in_unchecked) and False:
1073                         return (self.reject_message, in_unchecked)
1074                     else:
1075                         for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1076                             in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)],dsc_file)
1077                             if os.path.exists(in_otherdir):
1078                                 in_otherdir_fh = utils.open_file(in_otherdir)
1079                                 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1080                                 in_otherdir_fh.close()
1081                                 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1082                                 found = in_otherdir
1083                                 self.pkg.orig_tar_gz = in_otherdir
1084
1085                     if not found:
1086                         self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1087                         self.pkg.orig_tar_gz = -1
1088                         continue
1089             else:
1090                 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1091                 continue
1092             if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1093                 self.reject("md5sum for %s doesn't match %s." % (found, file))
1094             if actual_size != int(dsc_files[dsc_file]["size"]):
1095                 self.reject("size for %s doesn't match %s." % (found, file))
1096
1097         return (self.reject_message, None)
1098
1099     def do_query(self, q):
1100         sys.stderr.write("query: \"%s\" ... " % (q))
1101         before = time.time()
1102         r = self.projectB.query(q)
1103         time_diff = time.time()-before
1104         sys.stderr.write("took %.3f seconds.\n" % (time_diff))
1105         return r