]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
6dc0cd9e96c1cb48985cdcc672e8135efd3e6eaa
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2
3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006  James Troup <james@nocrew.org>
5
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
10
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 # GNU General Public License for more details.
15
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
20 ###############################################################################
21
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
25
26 from types import *
27
28 ###############################################################################
29
30 re_isanum = re.compile (r"^\d+$")
31 re_default_answer = re.compile(r"\[(.*)\]")
32 re_fdnic = re.compile(r"\n\n")
33 re_bin_only_nmu = re.compile(r"\+b\d+$")
34 ###############################################################################
35
36 # Convenience wrapper to carry around all the package information in
37
38 class Pkg:
39     def __init__(self, **kwds):
40         self.__dict__.update(kwds)
41
42     def update(self, **kwds):
43         self.__dict__.update(kwds)
44
45 ###############################################################################
46
47 class nmu_p:
48     # Read in the group maintainer override file
49     def __init__ (self, Cnf):
50         self.group_maint = {}
51         self.Cnf = Cnf
52         if Cnf.get("Dinstall::GroupOverrideFilename"):
53             filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"]
54             file = utils.open_file(filename)
55             for line in file.readlines():
56                 line = utils.re_comments.sub('', line).lower().strip()
57                 if line != "":
58                     self.group_maint[line] = 1
59             file.close()
60
61     def is_an_nmu (self, pkg):
62         Cnf = self.Cnf
63         changes = pkg.changes
64         dsc = pkg.dsc
65
66         i = utils.fix_maintainer (dsc.get("maintainer",
67                                           Cnf["Dinstall::MyEmailAddress"]).lower())
68         (dsc_rfc822, dsc_rfc2047, dsc_name, dsc_email) = i
69         # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
70         if dsc_name == changes["maintainername"].lower() and \
71            (changes["changedby822"] == "" or changes["changedbyname"].lower() == dsc_name):
72             return 0
73
74         if dsc.has_key("uploaders"):
75             uploaders = dsc["uploaders"].lower().split(",")
76             uploadernames = {}
77             for i in uploaders:
78                 (rfc822, rfc2047, name, email) = utils.fix_maintainer (i.strip())
79                 uploadernames[name] = ""
80             if uploadernames.has_key(changes["changedbyname"].lower()):
81                 return 0
82
83         # Some group maintained packages (e.g. Debian QA) are never NMU's
84         if self.group_maint.has_key(changes["maintaineremail"].lower()):
85             return 0
86
87         return 1
88
89 ###############################################################################
90
91 class Upload:
92
93     def __init__(self, Cnf):
94         self.Cnf = Cnf
95         # Read in the group-maint override file
96         self.nmu = nmu_p(Cnf)
97         self.accept_count = 0
98         self.accept_bytes = 0L
99         self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
100                        legacy_source_untouchable = {})
101
102         # Initialize the substitution template mapping global
103         Subst = self.Subst = {}
104         Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
105         Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
106         Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
107         Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
108
109         self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
110         database.init(Cnf, self.projectB)
111
112     ###########################################################################
113
114     def init_vars (self):
115         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
116             exec "self.pkg.%s.clear();" % (i)
117         self.pkg.orig_tar_id = None
118         self.pkg.orig_tar_location = ""
119         self.pkg.orig_tar_gz = None
120
121     ###########################################################################
122
123     def update_vars (self):
124         dump_filename = self.pkg.changes_file[:-8]+".dak"
125         dump_file = utils.open_file(dump_filename)
126         p = cPickle.Unpickler(dump_file)
127         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
128             exec "self.pkg.%s.update(p.load());" % (i)
129         for i in [ "orig_tar_id", "orig_tar_location" ]:
130             exec "self.pkg.%s = p.load();" % (i)
131         dump_file.close()
132
133     ###########################################################################
134
135     # This could just dump the dictionaries as is, but I'd like to
136     # avoid this so there's some idea of what process-accepted &
137     # process-new use from process-unchecked
138
139     def dump_vars(self, dest_dir):
140         for i in [ "changes", "dsc", "files", "dsc_files",
141                    "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
142             exec "%s = self.pkg.%s;" % (i,i)
143         dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
144         dump_file = utils.open_file(dump_filename, 'w')
145         try:
146             os.chmod(dump_filename, 0660)
147         except OSError, e:
148             if errno.errorcode[e.errno] == 'EPERM':
149                 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
150                 if perms & stat.S_IROTH:
151                     utils.fubar("%s is world readable and chmod failed." % (dump_filename))
152             else:
153                 raise
154
155         p = cPickle.Pickler(dump_file, 1)
156         for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
157             exec "%s = {}" % i
158         ## files
159         for file in files.keys():
160             d_files[file] = {}
161             for i in [ "package", "version", "architecture", "type", "size",
162                        "md5sum", "component", "location id", "source package",
163                        "source version", "maintainer", "dbtype", "files id",
164                        "new", "section", "priority", "othercomponents",
165                        "pool name", "original component" ]:
166                 if files[file].has_key(i):
167                     d_files[file][i] = files[file][i]
168         ## changes
169         # Mandatory changes fields
170         for i in [ "distribution", "source", "architecture", "version",
171                    "maintainer", "urgency", "fingerprint", "changedby822",
172                    "changedby2047", "changedbyname", "maintainer822",
173                    "maintainer2047", "maintainername", "maintaineremail",
174                    "closes", "changes" ]:
175             d_changes[i] = changes[i]
176         # Optional changes fields
177         for i in [ "changed-by", "filecontents", "format", "process-new note", "distribution-version" ]:
178             if changes.has_key(i):
179                 d_changes[i] = changes[i]
180         ## dsc
181         for i in [ "source", "version", "maintainer", "fingerprint",
182                    "uploaders", "bts changelog" ]:
183             if dsc.has_key(i):
184                 d_dsc[i] = dsc[i]
185         ## dsc_files
186         for file in dsc_files.keys():
187             d_dsc_files[file] = {}
188             # Mandatory dsc_files fields
189             for i in [ "size", "md5sum" ]:
190                 d_dsc_files[file][i] = dsc_files[file][i]
191             # Optional dsc_files fields
192             for i in [ "files id" ]:
193                 if dsc_files[file].has_key(i):
194                     d_dsc_files[file][i] = dsc_files[file][i]
195
196         for i in [ d_changes, d_dsc, d_files, d_dsc_files,
197                    legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
198             p.dump(i)
199         dump_file.close()
200
201     ###########################################################################
202
203     # Set up the per-package template substitution mappings
204
205     def update_subst (self, reject_message = ""):
206         Subst = self.Subst
207         changes = self.pkg.changes
208         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
209         if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
210             changes["architecture"] = { "Unknown" : "" }
211         # and maintainer2047 may not exist.
212         if not changes.has_key("maintainer2047"):
213             changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
214
215         Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
216         Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
217         Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
218
219         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
220         if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
221             Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
222             Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
223                                                      changes["maintainer2047"])
224             Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
225         else:
226             Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
227             Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
228             Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
229         if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
230             Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
231
232         # Apply any global override of the Maintainer field
233         if self.Cnf.get("Dinstall::OverrideMaintainer"):
234             Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
235             Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
236
237         Subst["__REJECT_MESSAGE__"] = reject_message
238         Subst["__SOURCE__"] = changes.get("source", "Unknown")
239         Subst["__VERSION__"] = changes.get("version", "Unknown")
240
241     ###########################################################################
242
243     def build_summaries(self):
244         changes = self.pkg.changes
245         files = self.pkg.files
246
247         byhand = summary = new = ""
248
249         # changes["distribution"] may not exist in corner cases
250         # (e.g. unreadable changes files)
251         if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
252             changes["distribution"] = {}
253
254         file_keys = files.keys()
255         file_keys.sort()
256         for file in file_keys:
257             if files[file].has_key("byhand"):
258                 byhand = 1
259                 summary += file + " byhand\n"
260             elif files[file].has_key("new"):
261                 new = 1
262                 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
263                 if files[file].has_key("othercomponents"):
264                     summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
265                 if files[file]["type"] == "deb":
266                     deb_fh = utils.open_file(file)
267                     summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
268                     deb_fh.close()
269             else:
270                 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
271                 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
272                 summary += file + "\n  to " + destination + "\n"
273
274         short_summary = summary
275
276         # This is for direport's benefit...
277         f = re_fdnic.sub("\n .\n", changes.get("changes",""))
278
279         if byhand or new:
280             summary += "Changes: " + f
281
282         summary += self.announce(short_summary, 0)
283
284         return (summary, short_summary)
285
286     ###########################################################################
287
288     def close_bugs (self, summary, action):
289         changes = self.pkg.changes
290         Subst = self.Subst
291         Cnf = self.Cnf
292
293         bugs = changes["closes"].keys()
294
295         if not bugs:
296             return summary
297
298         bugs.sort()
299         if not self.nmu.is_an_nmu(self.pkg):
300             if changes["distribution"].has_key("experimental"):
301                 # tag bugs as fixed-in-experimental for uploads to experimental
302                 summary += "Setting bugs to severity fixed: "
303                 control_message = ""
304                 for bug in bugs:
305                     summary += "%s " % (bug)
306                     control_message += "tag %s + fixed-in-experimental\n" % (bug)
307                 if action and control_message != "":
308                     Subst["__CONTROL_MESSAGE__"] = control_message
309                     mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-experimental-fixed")
310                     utils.send_mail (mail_message)
311                 if action:
312                     self.Logger.log(["setting bugs to fixed"]+bugs)
313
314
315             else:
316                 summary += "Closing bugs: "
317                 for bug in bugs:
318                     summary += "%s " % (bug)
319                     if action:
320                         Subst["__BUG_NUMBER__"] = bug
321                         if changes["distribution"].has_key("stable"):
322                             Subst["__STABLE_WARNING__"] = """
323 Note that this package is not part of the released stable Debian
324 distribution.  It may have dependencies on other unreleased software,
325 or other instabilities.  Please take care if you wish to install it.
326 The update will eventually make its way into the next released Debian
327 distribution."""
328                         else:
329                             Subst["__STABLE_WARNING__"] = ""
330                             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
331                             utils.send_mail (mail_message)
332                 if action:
333                     self.Logger.log(["closing bugs"]+bugs)
334
335         else:                     # NMU
336             summary += "Setting bugs to severity fixed: "
337             control_message = ""
338             for bug in bugs:
339                 summary += "%s " % (bug)
340                 control_message += "tag %s + fixed\n" % (bug)
341             if action and control_message != "":
342                 Subst["__CONTROL_MESSAGE__"] = control_message
343                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-nmu-fixed")
344                 utils.send_mail (mail_message)
345             if action:
346                 self.Logger.log(["setting bugs to fixed"]+bugs)
347         summary += "\n"
348         return summary
349
350     ###########################################################################
351
352     def announce (self, short_summary, action):
353         Subst = self.Subst
354         Cnf = self.Cnf
355         changes = self.pkg.changes
356
357         # Only do announcements for source uploads with a recent dpkg-dev installed
358         if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
359             return ""
360
361         lists_done = {}
362         summary = ""
363         Subst["__SHORT_SUMMARY__"] = short_summary
364
365         for dist in changes["distribution"].keys():
366             list = Cnf.Find("Suite::%s::Announce" % (dist))
367             if list == "" or lists_done.has_key(list):
368                 continue
369             lists_done[list] = 1
370             summary += "Announcing to %s\n" % (list)
371
372             if action:
373                 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
374                 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
375                     Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
376                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
377                 utils.send_mail (mail_message)
378
379         if Cnf.FindB("Dinstall::CloseBugs"):
380             summary = self.close_bugs(summary, action)
381
382         return summary
383
384     ###########################################################################
385
386     def accept (self, summary, short_summary):
387         Cnf = self.Cnf
388         Subst = self.Subst
389         files = self.pkg.files
390         changes = self.pkg.changes
391         changes_file = self.pkg.changes_file
392         dsc = self.pkg.dsc
393
394         print "Accepting."
395         self.Logger.log(["Accepting changes",changes_file])
396
397         self.dump_vars(Cnf["Dir::Queue::Accepted"])
398
399         # Move all the files into the accepted directory
400         utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
401         file_keys = files.keys()
402         for file in file_keys:
403             utils.move(file, Cnf["Dir::Queue::Accepted"])
404             self.accept_bytes += float(files[file]["size"])
405         self.accept_count += 1
406
407         # Send accept mail, announce to lists, close bugs and check for
408         # override disparities
409         if not Cnf["Dinstall::Options::No-Mail"]:
410             Subst["__SUITE__"] = ""
411             Subst["__SUMMARY__"] = summary
412             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
413             utils.send_mail(mail_message)
414             self.announce(short_summary, 1)
415
416
417         ## Helper stuff for DebBugs Version Tracking
418         if Cnf.Find("Dir::Queue::BTSVersionTrack"):
419             # ??? once queue/* is cleared on *.d.o and/or reprocessed
420             # the conditionalization on dsc["bts changelog"] should be
421             # dropped.
422
423             # Write out the version history from the changelog
424             if changes["architecture"].has_key("source") and \
425                dsc.has_key("bts changelog"):
426
427                 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
428                                                     dotprefix=1, perms=0644)
429                 version_history = utils.open_file(temp_filename, 'w')
430                 version_history.write(dsc["bts changelog"])
431                 version_history.close()
432                 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
433                                       changes_file[:-8]+".versions")
434                 os.rename(temp_filename, filename)
435
436             # Write out the binary -> source mapping.
437             temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
438                                                 dotprefix=1, perms=0644)
439             debinfo = utils.open_file(temp_filename, 'w')
440             for file in file_keys:
441                 f = files[file]
442                 if f["type"] == "deb":
443                     line = " ".join([f["package"], f["version"],
444                                      f["architecture"], f["source package"],
445                                      f["source version"]])
446                     debinfo.write(line+"\n")
447             debinfo.close()
448             filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
449                                   changes_file[:-8]+".debinfo")
450             os.rename(temp_filename, filename)
451
452         self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
453
454     ###########################################################################
455
456     def queue_build (self, queue, path):
457         Cnf = self.Cnf
458         Subst = self.Subst
459         files = self.pkg.files
460         changes = self.pkg.changes
461         changes_file = self.pkg.changes_file
462         dsc = self.pkg.dsc
463         file_keys = files.keys()
464
465         ## Special support to enable clean auto-building of queued packages
466         queue_id = database.get_or_set_queue_id(queue)
467
468         self.projectB.query("BEGIN WORK")
469         for suite in changes["distribution"].keys():
470             if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
471                 continue
472             suite_id = database.get_suite_id(suite)
473             dest_dir = Cnf["Dir::QueueBuild"]
474             if Cnf.FindB("Dinstall::SecurityQueueBuild"):
475                 dest_dir = os.path.join(dest_dir, suite)
476             for file in file_keys:
477                 src = os.path.join(path, file)
478                 dest = os.path.join(dest_dir, file)
479                 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
480                     # Copy it since the original won't be readable by www-data
481                     utils.copy(src, dest)
482                 else:
483                     # Create a symlink to it
484                     os.symlink(src, dest)
485                 # Add it to the list of packages for later processing by apt-ftparchive
486                 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
487             # If the .orig.tar.gz is in the pool, create a symlink to
488             # it (if one doesn't already exist)
489             if self.pkg.orig_tar_id:
490                 # Determine the .orig.tar.gz file name
491                 for dsc_file in self.pkg.dsc_files.keys():
492                     if dsc_file.endswith(".orig.tar.gz"):
493                         filename = dsc_file
494                 dest = os.path.join(dest_dir, filename)
495                 # If it doesn't exist, create a symlink
496                 if not os.path.exists(dest):
497                     # Find the .orig.tar.gz in the pool
498                     q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
499                     ql = q.getresult()
500                     if not ql:
501                         utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
502                     src = os.path.join(ql[0][0], ql[0][1])
503                     os.symlink(src, dest)
504                     # Add it to the list of packages for later processing by apt-ftparchive
505                     self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
506                 # if it does, update things to ensure it's not removed prematurely
507                 else:
508                     self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
509
510         self.projectB.query("COMMIT WORK")
511
512     ###########################################################################
513
514     def check_override (self):
515         Subst = self.Subst
516         changes = self.pkg.changes
517         files = self.pkg.files
518         Cnf = self.Cnf
519
520         # Abandon the check if:
521         #  a) it's a non-sourceful upload
522         #  b) override disparity checks have been disabled
523         #  c) we're not sending mail
524         if not changes["architecture"].has_key("source") or \
525            not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
526            Cnf["Dinstall::Options::No-Mail"]:
527             return
528
529         summary = ""
530         file_keys = files.keys()
531         file_keys.sort()
532         for file in file_keys:
533             if not files[file].has_key("new") and files[file]["type"] == "deb":
534                 section = files[file]["section"]
535                 override_section = files[file]["override section"]
536                 if section.lower() != override_section.lower() and section != "-":
537                     # Ignore this; it's a common mistake and not worth whining about
538                     if section.lower() == "non-us/main" and override_section.lower() == "non-us":
539                         continue
540                     summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
541                 priority = files[file]["priority"]
542                 override_priority = files[file]["override priority"]
543                 if priority != override_priority and priority != "-":
544                     summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
545
546         if summary == "":
547             return
548
549         Subst["__SUMMARY__"] = summary
550         mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
551         utils.send_mail(mail_message)
552
553     ###########################################################################
554
555     def force_reject (self, files):
556         """Forcefully move files from the current directory to the
557            reject directory.  If any file already exists in the reject
558            directory it will be moved to the morgue to make way for
559            the new file."""
560
561         Cnf = self.Cnf
562
563         for file in files:
564             # Skip any files which don't exist or which we don't have permission to copy.
565             if os.access(file,os.R_OK) == 0:
566                 continue
567             dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
568             try:
569                 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
570             except OSError, e:
571                 # File exists?  Let's try and move it to the morgue
572                 if errno.errorcode[e.errno] == 'EEXIST':
573                     morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
574                     try:
575                         morgue_file = utils.find_next_free(morgue_file)
576                     except utils.tried_too_hard_exc:
577                         # Something's either gone badly Pete Tong, or
578                         # someone is trying to exploit us.
579                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
580                         return
581                     utils.move(dest_file, morgue_file, perms=0660)
582                     try:
583                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
584                     except OSError, e:
585                         # Likewise
586                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
587                         return
588                 else:
589                     raise
590             # If we got here, we own the destination file, so we can
591             # safely overwrite it.
592             utils.move(file, dest_file, 1, perms=0660)
593             os.close(dest_fd)
594
595     ###########################################################################
596
597     def do_reject (self, manual = 0, reject_message = ""):
598         # If we weren't given a manual rejection message, spawn an
599         # editor so the user can add one in...
600         if manual and not reject_message:
601             temp_filename = utils.temp_filename()
602             editor = os.environ.get("EDITOR","vi")
603             answer = 'E'
604             while answer == 'E':
605                 os.system("%s %s" % (editor, temp_filename))
606                 temp_fh = utils.open_file(temp_filename)
607                 reject_message = "".join(temp_fh.readlines())
608                 temp_fh.close()
609                 print "Reject message:"
610                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
611                 prompt = "[R]eject, Edit, Abandon, Quit ?"
612                 answer = "XXX"
613                 while prompt.find(answer) == -1:
614                     answer = utils.our_raw_input(prompt)
615                     m = re_default_answer.search(prompt)
616                     if answer == "":
617                         answer = m.group(1)
618                     answer = answer[:1].upper()
619             os.unlink(temp_filename)
620             if answer == 'A':
621                 return 1
622             elif answer == 'Q':
623                 sys.exit(0)
624
625         print "Rejecting.\n"
626
627         Cnf = self.Cnf
628         Subst = self.Subst
629         pkg = self.pkg
630
631         reason_filename = pkg.changes_file[:-8] + ".reason"
632         reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
633
634         # Move all the files into the reject directory
635         reject_files = pkg.files.keys() + [pkg.changes_file]
636         self.force_reject(reject_files)
637
638         # If we fail here someone is probably trying to exploit the race
639         # so let's just raise an exception ...
640         if os.path.exists(reason_filename):
641             os.unlink(reason_filename)
642         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
643
644         if not manual:
645             Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
646             Subst["__MANUAL_REJECT_MESSAGE__"] = ""
647             Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
648             os.write(reason_fd, reject_message)
649             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/dak.rejected")
650         else:
651             # Build up the rejection email
652             user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
653
654             Subst["__REJECTOR_ADDRESS__"] = user_email_address
655             Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
656             Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
657             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/dak.rejected")
658             # Write the rejection email out as the <foo>.reason file
659             os.write(reason_fd, reject_mail_message)
660
661         os.close(reason_fd)
662
663         # Send the rejection mail if appropriate
664         if not Cnf["Dinstall::Options::No-Mail"]:
665             utils.send_mail(reject_mail_message)
666
667         self.Logger.log(["rejected", pkg.changes_file])
668         return 0
669
670     ################################################################################
671
672     # Ensure that source exists somewhere in the archive for the binary
673     # upload being processed.
674     #
675     # (1) exact match                      => 1.0-3
676     # (2) Bin-only NMU                     => 1.0-3+b1 , 1.0-3.1+b1
677
678     def source_exists (self, package, source_version, suites = ["any"]):
679         okay = 1
680         for suite in suites:
681             if suite == "any":
682                 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
683                     (package)
684             else:
685                 # source must exist in suite X, or in some other suite that's
686                 # mapped to X, recursively... silent-maps are counted too,
687                 # unreleased-maps aren't.
688                 maps = self.Cnf.ValueList("SuiteMappings")[:]
689                 maps.reverse()
690                 maps = [ m.split() for m in maps ]
691                 maps = [ (x[1], x[2]) for x in maps
692                                 if x[0] == "map" or x[0] == "silent-map" ]
693                 s = [suite]
694                 for x in maps:
695                         if x[1] in s and x[0] not in s:
696                                 s.append(x[0])
697
698                 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
699             q = self.projectB.query(que)
700
701             # Reduce the query results to a list of version numbers
702             ql = [ i[0] for i in q.getresult() ]
703
704             # Try (1)
705             if source_version in ql:
706                 continue
707
708             # Try (2)
709             orig_source_version = re_bin_only_nmu.sub('', source_version)
710             if orig_source_version in ql:
711                 continue
712
713             # No source found...
714             okay = 0
715             break
716         return okay
717
718     ################################################################################
719     
720     def in_override_p (self, package, component, suite, binary_type, file):
721         files = self.pkg.files
722
723         if binary_type == "": # must be source
724             type = "dsc"
725         else:
726             type = binary_type
727
728         # Override suite name; used for example with proposed-updates
729         if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
730             suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
731
732         # Avoid <undef> on unknown distributions
733         suite_id = database.get_suite_id(suite)
734         if suite_id == -1:
735             return None
736         component_id = database.get_component_id(component)
737         type_id = database.get_override_type_id(type)
738
739         # FIXME: nasty non-US speficic hack
740         if component.lower().startswith("non-us/"):
741             component = component[7:]
742
743         q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
744                            % (package, suite_id, component_id, type_id))
745         result = q.getresult()
746         # If checking for a source package fall back on the binary override type
747         if type == "dsc" and not result:
748             deb_type_id = database.get_override_type_id("deb")
749             udeb_type_id = database.get_override_type_id("udeb")
750             q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
751                                % (package, suite_id, component_id, deb_type_id, udeb_type_id))
752             result = q.getresult()
753
754         # Remember the section and priority so we can check them later if appropriate
755         if result:
756             files[file]["override section"] = result[0][0]
757             files[file]["override priority"] = result[0][1]
758
759         return result
760
761     ################################################################################
762
763     def reject (self, str, prefix="Rejected: "):
764         if str:
765             # Unlike other rejects we add new lines first to avoid trailing
766             # new lines when this message is passed back up to a caller.
767             if self.reject_message:
768                 self.reject_message += "\n"
769             self.reject_message += prefix + str
770
771     ################################################################################
772
773     def get_anyversion(self, query_result, suite):
774         anyversion=None
775         anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
776         for (v, s) in query_result:
777             if s in [ x.lower() for x in anysuite ]:
778                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
779                     anyversion=v
780         return anyversion
781
782     ################################################################################
783
784     def cross_suite_version_check(self, query_result, file, new_version):
785         """Ensure versions are newer than existing packages in target
786         suites and that cross-suite version checking rules as
787         set out in the conf file are satisfied."""
788
789         # Check versions for each target suite
790         for target_suite in self.pkg.changes["distribution"].keys():
791             must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
792             must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
793             # Enforce "must be newer than target suite" even if conffile omits it
794             if target_suite not in must_be_newer_than:
795                 must_be_newer_than.append(target_suite)
796             for entry in query_result:
797                 existent_version = entry[0]
798                 suite = entry[1]
799                 if suite in must_be_newer_than and \
800                    apt_pkg.VersionCompare(new_version, existent_version) < 1:
801                     self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
802                 if suite in must_be_older_than and \
803                    apt_pkg.VersionCompare(new_version, existent_version) > -1:
804                     ch = self.pkg.changes
805                     cansave = 0
806                     if ch.get('distribution-version', {}).has_key(suite):
807                         # we really use the other suite, ignoring the conflicting one ...
808                         addsuite = ch["distribution-version"][suite]
809                     
810                         add_version = self.get_anyversion(query_result, addsuite)
811                         target_version = self.get_anyversion(query_result, target_suite)
812                     
813                         if not add_version:
814                             # not add_version can only happen if we map to a suite
815                             # that doesn't enhance the suite we're propup'ing from.
816                             # so "propup-ver x a b c; map a d" is a problem only if
817                             # d doesn't enhance a.
818                             #
819                             # i think we could always propagate in this case, rather
820                             # than complaining. either way, this isn't a REJECT issue
821                             #
822                             # And - we really should complain to the dorks who configured dak
823                             self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
824                             self.pkg.changes.setdefault("propdistribution", {})
825                             self.pkg.changes["propdistribution"][addsuite] = 1
826                             cansave = 1
827                         elif not target_version:
828                             # not targets_version is true when the package is NEW
829                             # we could just stick with the "...old version..." REJECT
830                             # for this, I think.
831                             self.reject("Won't propogate NEW packages.")
832                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
833                             # propogation would be redundant. no need to reject though.
834                             self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
835                             cansave = 1
836                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
837                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
838                             # propogate!!
839                             self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
840                             self.pkg.changes.setdefault("propdistribution", {})
841                             self.pkg.changes["propdistribution"][addsuite] = 1
842                             cansave = 1
843                 
844                     if not cansave:
845                         self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
846
847     ################################################################################
848
849     def check_binary_against_db(self, file):
850         self.reject_message = ""
851         files = self.pkg.files
852
853         # Ensure version is sane
854         q = self.projectB.query("""
855 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
856                                      architecture a
857  WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
858    AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
859                                 % (files[file]["package"],
860                                    files[file]["architecture"]))
861         self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
862
863         # Check for any existing copies of the file
864         q = self.projectB.query("""
865 SELECT b.id FROM binaries b, architecture a
866  WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
867    AND a.id = b.architecture"""
868                                 % (files[file]["package"],
869                                    files[file]["version"],
870                                    files[file]["architecture"]))
871         if q.getresult():
872             self.reject("%s: can not overwrite existing copy already in the archive." % (file))
873
874         return self.reject_message
875
876     ################################################################################
877
878     def check_source_against_db(self, file):
879         self.reject_message = ""
880         dsc = self.pkg.dsc
881
882         # Ensure version is sane
883         q = self.projectB.query("""
884 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
885  WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
886         self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
887
888         return self.reject_message
889
890     ################################################################################
891
892     # **WARNING**
893     # NB: this function can remove entries from the 'files' index [if
894     # the .orig.tar.gz is a duplicate of the one in the archive]; if
895     # you're iterating over 'files' and call this function as part of
896     # the loop, be sure to add a check to the top of the loop to
897     # ensure you haven't just tried to derefernece the deleted entry.
898     # **WARNING**
899
900     def check_dsc_against_db(self, file):
901         self.reject_message = ""
902         files = self.pkg.files
903         dsc_files = self.pkg.dsc_files
904         legacy_source_untouchable = self.pkg.legacy_source_untouchable
905         self.pkg.orig_tar_gz = None
906
907         # Try and find all files mentioned in the .dsc.  This has
908         # to work harder to cope with the multiple possible
909         # locations of an .orig.tar.gz.
910         for dsc_file in dsc_files.keys():
911             found = None
912             if files.has_key(dsc_file):
913                 actual_md5 = files[dsc_file]["md5sum"]
914                 actual_size = int(files[dsc_file]["size"])
915                 found = "%s in incoming" % (dsc_file)
916                 # Check the file does not already exist in the archive
917                 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
918                 ql = q.getresult()
919                 # Strip out anything that isn't '%s' or '/%s$'
920                 for i in ql:
921                     if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
922                         ql.remove(i)
923
924                 # "[dak] has not broken them.  [dak] has fixed a
925                 # brokenness.  Your crappy hack exploited a bug in
926                 # the old dinstall.
927                 #
928                 # "(Come on!  I thought it was always obvious that
929                 # one just doesn't release different files with
930                 # the same name and version.)"
931                 #                        -- ajk@ on d-devel@l.d.o
932
933                 if ql:
934                     # Ignore exact matches for .orig.tar.gz
935                     match = 0
936                     if dsc_file.endswith(".orig.tar.gz"):
937                         for i in ql:
938                             if files.has_key(dsc_file) and \
939                                int(files[dsc_file]["size"]) == int(i[0]) and \
940                                files[dsc_file]["md5sum"] == i[1]:
941                                 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
942                                 del files[dsc_file]
943                                 self.pkg.orig_tar_gz = i[2] + i[3]
944                                 match = 1
945
946                     if not match:
947                         self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
948             elif dsc_file.endswith(".orig.tar.gz"):
949                 # Check in the pool
950                 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
951                 ql = q.getresult()
952                 # Strip out anything that isn't '%s' or '/%s$'
953                 for i in ql:
954                     if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
955                         ql.remove(i)
956
957                 if ql:
958                     # Unfortunately, we may get more than one match here if,
959                     # for example, the package was in potato but had an -sa
960                     # upload in woody.  So we need to choose the right one.
961
962                     x = ql[0]; # default to something sane in case we don't match any or have only one
963
964                     if len(ql) > 1:
965                         for i in ql:
966                             old_file = i[0] + i[1]
967                             old_file_fh = utils.open_file(old_file)
968                             actual_md5 = apt_pkg.md5sum(old_file_fh)
969                             old_file_fh.close()
970                             actual_size = os.stat(old_file)[stat.ST_SIZE]
971                             if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
972                                 x = i
973                             else:
974                                 legacy_source_untouchable[i[3]] = ""
975
976                     old_file = x[0] + x[1]
977                     old_file_fh = utils.open_file(old_file)
978                     actual_md5 = apt_pkg.md5sum(old_file_fh)
979                     old_file_fh.close()
980                     actual_size = os.stat(old_file)[stat.ST_SIZE]
981                     found = old_file
982                     suite_type = x[2]
983                     dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
984                     # See install() in process-accepted...
985                     self.pkg.orig_tar_id = x[3]
986                     self.pkg.orig_tar_gz = old_file
987                     if suite_type == "legacy" or suite_type == "legacy-mixed":
988                         self.pkg.orig_tar_location = "legacy"
989                     else:
990                         self.pkg.orig_tar_location = x[4]
991                 else:
992                     # Not there? Check the queue directories...
993
994                     in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
995                     # See process_it() in 'dak process-unchecked' for explanation of this
996                     if os.path.exists(in_unchecked):
997                         return (self.reject_message, in_unchecked)
998                     else:
999                         for dir in [ "Accepted", "New", "Byhand" ]:
1000                             in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
1001                             if os.path.exists(in_otherdir):
1002                                 in_otherdir_fh = utils.open_file(in_otherdir)
1003                                 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1004                                 in_otherdir_fh.close()
1005                                 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1006                                 found = in_otherdir
1007                                 self.pkg.orig_tar_gz = in_otherdir
1008
1009                     if not found:
1010                         self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1011                         self.pkg.orig_tar_gz = -1
1012                         continue
1013             else:
1014                 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1015                 continue
1016             if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1017                 self.reject("md5sum for %s doesn't match %s." % (found, file))
1018             if actual_size != int(dsc_files[dsc_file]["size"]):
1019                 self.reject("size for %s doesn't match %s." % (found, file))
1020
1021         return (self.reject_message, None)
1022
1023     def do_query(self, q):
1024         sys.stderr.write("query: \"%s\" ... " % (q))
1025         before = time.time()
1026         r = self.projectB.query(q)
1027         time_diff = time.time()-before
1028         sys.stderr.write("took %.3f seconds.\n" % (time_diff))
1029         return r