]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
e8d569bc7fa6f1898ae5f25fc6ac32dd89f9cbbc
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2
3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006  James Troup <james@nocrew.org>
5
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
10
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 # GNU General Public License for more details.
15
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
20 ###############################################################################
21
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
25
26 from types import *
27
28 ###############################################################################
29
30 re_isanum = re.compile (r"^\d+$")
31 re_default_answer = re.compile(r"\[(.*)\]")
32 re_fdnic = re.compile(r"\n\n")
33 re_bin_only_nmu = re.compile(r"\+b\d+$")
34
35 ###############################################################################
36
37 # Convenience wrapper to carry around all the package information in
38
39 class Pkg:
40     def __init__(self, **kwds):
41         self.__dict__.update(kwds)
42
43     def update(self, **kwds):
44         self.__dict__.update(kwds)
45
46 ###############################################################################
47
48 class nmu_p:
49     # Read in the group maintainer override file
50     def __init__ (self, Cnf):
51         self.group_maint = {}
52         self.Cnf = Cnf
53         if Cnf.get("Dinstall::GroupOverrideFilename"):
54             filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"]
55             file = utils.open_file(filename)
56             for line in file.readlines():
57                 line = utils.re_comments.sub('', line).lower().strip()
58                 if line != "":
59                     self.group_maint[line] = 1
60             file.close()
61
62     def is_an_nmu (self, pkg):
63         Cnf = self.Cnf
64         changes = pkg.changes
65         dsc = pkg.dsc
66
67         i = utils.fix_maintainer (dsc.get("maintainer",
68                                           Cnf["Dinstall::MyEmailAddress"]).lower())
69         (dsc_rfc822, dsc_rfc2047, dsc_name, dsc_email) = i
70         # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
71         if dsc_name == changes["maintainername"].lower() and \
72            (changes["changedby822"] == "" or changes["changedbyname"].lower() == dsc_name):
73             return 0
74
75         if dsc.has_key("uploaders"):
76             uploaders = dsc["uploaders"].lower().split(",")
77             uploadernames = {}
78             for i in uploaders:
79                 (rfc822, rfc2047, name, email) = utils.fix_maintainer (i.strip())
80                 uploadernames[name] = ""
81             if uploadernames.has_key(changes["changedbyname"].lower()):
82                 return 0
83
84         # Some group maintained packages (e.g. Debian QA) are never NMU's
85         if self.group_maint.has_key(changes["maintaineremail"].lower()):
86             return 0
87
88         return 1
89
90 ###############################################################################
91
92 class Upload:
93
94     def __init__(self, Cnf):
95         self.Cnf = Cnf
96         # Read in the group-maint override file
97         self.nmu = nmu_p(Cnf)
98         self.accept_count = 0
99         self.accept_bytes = 0L
100         self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
101                        legacy_source_untouchable = {})
102
103         # Initialize the substitution template mapping global
104         Subst = self.Subst = {}
105         Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
106         Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
107         Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
108         Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
109
110         self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
111         database.init(Cnf, self.projectB)
112
113     ###########################################################################
114
115     def init_vars (self):
116         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
117             exec "self.pkg.%s.clear();" % (i)
118         self.pkg.orig_tar_id = None
119         self.pkg.orig_tar_location = ""
120         self.pkg.orig_tar_gz = None
121
122     ###########################################################################
123
124     def update_vars (self):
125         dump_filename = self.pkg.changes_file[:-8]+".dak"
126         dump_file = utils.open_file(dump_filename)
127         p = cPickle.Unpickler(dump_file)
128         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
129             exec "self.pkg.%s.update(p.load());" % (i)
130         for i in [ "orig_tar_id", "orig_tar_location" ]:
131             exec "self.pkg.%s = p.load();" % (i)
132         dump_file.close()
133
134     ###########################################################################
135
136     # This could just dump the dictionaries as is, but I'd like to
137     # avoid this so there's some idea of what process-accepted &
138     # process-new use from process-unchecked
139
140     def dump_vars(self, dest_dir):
141         for i in [ "changes", "dsc", "files", "dsc_files",
142                    "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
143             exec "%s = self.pkg.%s;" % (i,i)
144         dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
145         dump_file = utils.open_file(dump_filename, 'w')
146         try:
147             os.chmod(dump_filename, 0660)
148         except OSError, e:
149             if errno.errorcode[e.errno] == 'EPERM':
150                 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
151                 if perms & stat.S_IROTH:
152                     utils.fubar("%s is world readable and chmod failed." % (dump_filename))
153             else:
154                 raise
155
156         p = cPickle.Pickler(dump_file, 1)
157         for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
158             exec "%s = {}" % i
159         ## files
160         for file in files.keys():
161             d_files[file] = {}
162             for i in [ "package", "version", "architecture", "type", "size",
163                        "md5sum", "component", "location id", "source package",
164                        "source version", "maintainer", "dbtype", "files id",
165                        "new", "section", "priority", "othercomponents",
166                        "pool name", "original component" ]:
167                 if files[file].has_key(i):
168                     d_files[file][i] = files[file][i]
169         ## changes
170         # Mandatory changes fields
171         for i in [ "distribution", "source", "architecture", "version",
172                    "maintainer", "urgency", "fingerprint", "changedby822",
173                    "changedby2047", "changedbyname", "maintainer822",
174                    "maintainer2047", "maintainername", "maintaineremail",
175                    "closes", "changes" ]:
176             d_changes[i] = changes[i]
177         # Optional changes fields
178         for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version" ]:
179             if changes.has_key(i):
180                 d_changes[i] = changes[i]
181         ## dsc
182         for i in [ "source", "version", "maintainer", "fingerprint",
183                    "uploaders", "bts changelog" ]:
184             if dsc.has_key(i):
185                 d_dsc[i] = dsc[i]
186         ## dsc_files
187         for file in dsc_files.keys():
188             d_dsc_files[file] = {}
189             # Mandatory dsc_files fields
190             for i in [ "size", "md5sum" ]:
191                 d_dsc_files[file][i] = dsc_files[file][i]
192             # Optional dsc_files fields
193             for i in [ "files id" ]:
194                 if dsc_files[file].has_key(i):
195                     d_dsc_files[file][i] = dsc_files[file][i]
196
197         for i in [ d_changes, d_dsc, d_files, d_dsc_files,
198                    legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
199             p.dump(i)
200         dump_file.close()
201
202     ###########################################################################
203
204     # Set up the per-package template substitution mappings
205
206     def update_subst (self, reject_message = ""):
207         Subst = self.Subst
208         changes = self.pkg.changes
209         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
210         if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
211             changes["architecture"] = { "Unknown" : "" }
212         # and maintainer2047 may not exist.
213         if not changes.has_key("maintainer2047"):
214             changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
215
216         Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
217         Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
218         Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
219
220         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
221         if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
222             Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
223             Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
224                                                      changes["maintainer2047"])
225             Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
226         else:
227             Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
228             Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
229             Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
230         if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
231             Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
232
233         # Apply any global override of the Maintainer field
234         if self.Cnf.get("Dinstall::OverrideMaintainer"):
235             Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
236             Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
237
238         Subst["__REJECT_MESSAGE__"] = reject_message
239         Subst["__SOURCE__"] = changes.get("source", "Unknown")
240         Subst["__VERSION__"] = changes.get("version", "Unknown")
241
242     ###########################################################################
243
244     def build_summaries(self):
245         changes = self.pkg.changes
246         files = self.pkg.files
247
248         byhand = summary = new = ""
249
250         # changes["distribution"] may not exist in corner cases
251         # (e.g. unreadable changes files)
252         if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
253             changes["distribution"] = {}
254
255         file_keys = files.keys()
256         file_keys.sort()
257         for file in file_keys:
258             if files[file].has_key("byhand"):
259                 byhand = 1
260                 summary += file + " byhand\n"
261             elif files[file].has_key("new"):
262                 new = 1
263                 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
264                 if files[file].has_key("othercomponents"):
265                     summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
266                 if files[file]["type"] == "deb":
267                     deb_fh = utils.open_file(file)
268                     summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
269                     deb_fh.close()
270             else:
271                 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
272                 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
273                 summary += file + "\n  to " + destination + "\n"
274
275         short_summary = summary
276
277         # This is for direport's benefit...
278         f = re_fdnic.sub("\n .\n", changes.get("changes",""))
279
280         if byhand or new:
281             summary += "Changes: " + f
282
283         summary += self.announce(short_summary, 0)
284
285         return (summary, short_summary)
286
287     ###########################################################################
288
289     def close_bugs (self, summary, action):
290         changes = self.pkg.changes
291         Subst = self.Subst
292         Cnf = self.Cnf
293
294         bugs = changes["closes"].keys()
295
296         if not bugs:
297             return summary
298
299         bugs.sort()
300         if not self.nmu.is_an_nmu(self.pkg):
301             if changes["distribution"].has_key("experimental"):
302                 # tag bugs as fixed-in-experimental for uploads to experimental
303                 summary += "Setting bugs to severity fixed: "
304                 control_message = ""
305                 for bug in bugs:
306                     summary += "%s " % (bug)
307                     control_message += "tag %s + fixed-in-experimental\n" % (bug)
308                 if action and control_message != "":
309                     Subst["__CONTROL_MESSAGE__"] = control_message
310                     mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-experimental-fixed")
311                     utils.send_mail (mail_message)
312                 if action:
313                     self.Logger.log(["setting bugs to fixed"]+bugs)
314
315
316             else:
317                 summary += "Closing bugs: "
318                 for bug in bugs:
319                     summary += "%s " % (bug)
320                     if action:
321                         Subst["__BUG_NUMBER__"] = bug
322                         if changes["distribution"].has_key("stable"):
323                             Subst["__STABLE_WARNING__"] = """
324 Note that this package is not part of the released stable Debian
325 distribution.  It may have dependencies on other unreleased software,
326 or other instabilities.  Please take care if you wish to install it.
327 The update will eventually make its way into the next released Debian
328 distribution."""
329                         else:
330                             Subst["__STABLE_WARNING__"] = ""
331                             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
332                             utils.send_mail (mail_message)
333                 if action:
334                     self.Logger.log(["closing bugs"]+bugs)
335
336         else:                     # NMU
337             summary += "Setting bugs to severity fixed: "
338             control_message = ""
339             for bug in bugs:
340                 summary += "%s " % (bug)
341                 control_message += "tag %s + fixed\n" % (bug)
342             if action and control_message != "":
343                 Subst["__CONTROL_MESSAGE__"] = control_message
344                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-nmu-fixed")
345                 utils.send_mail (mail_message)
346             if action:
347                 self.Logger.log(["setting bugs to fixed"]+bugs)
348         summary += "\n"
349         return summary
350
351     ###########################################################################
352
353     def announce (self, short_summary, action):
354         Subst = self.Subst
355         Cnf = self.Cnf
356         changes = self.pkg.changes
357
358         # Only do announcements for source uploads with a recent dpkg-dev installed
359         if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
360             return ""
361
362         lists_done = {}
363         summary = ""
364         Subst["__SHORT_SUMMARY__"] = short_summary
365
366         for dist in changes["distribution"].keys():
367             list = Cnf.Find("Suite::%s::Announce" % (dist))
368             if list == "" or lists_done.has_key(list):
369                 continue
370             lists_done[list] = 1
371             summary += "Announcing to %s\n" % (list)
372
373             if action:
374                 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
375                 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
376                     Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
377                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
378                 utils.send_mail (mail_message)
379
380         if Cnf.FindB("Dinstall::CloseBugs"):
381             summary = self.close_bugs(summary, action)
382
383         return summary
384
385     ###########################################################################
386
387     def accept (self, summary, short_summary):
388         Cnf = self.Cnf
389         Subst = self.Subst
390         files = self.pkg.files
391         changes = self.pkg.changes
392         changes_file = self.pkg.changes_file
393         dsc = self.pkg.dsc
394
395         print "Accepting."
396         self.Logger.log(["Accepting changes",changes_file])
397
398         self.dump_vars(Cnf["Dir::Queue::Accepted"])
399
400         # Move all the files into the accepted directory
401         utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
402         file_keys = files.keys()
403         for file in file_keys:
404             utils.move(file, Cnf["Dir::Queue::Accepted"])
405             self.accept_bytes += float(files[file]["size"])
406         self.accept_count += 1
407
408         # Send accept mail, announce to lists, close bugs and check for
409         # override disparities
410         if not Cnf["Dinstall::Options::No-Mail"]:
411             Subst["__SUITE__"] = ""
412             Subst["__SUMMARY__"] = summary
413             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
414             utils.send_mail(mail_message)
415             self.announce(short_summary, 1)
416
417
418         ## Helper stuff for DebBugs Version Tracking
419         if Cnf.Find("Dir::Queue::BTSVersionTrack"):
420             # ??? once queue/* is cleared on *.d.o and/or reprocessed
421             # the conditionalization on dsc["bts changelog"] should be
422             # dropped.
423
424             # Write out the version history from the changelog
425             if changes["architecture"].has_key("source") and \
426                dsc.has_key("bts changelog"):
427
428                 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
429                                                     dotprefix=1, perms=0644)
430                 version_history = utils.open_file(temp_filename, 'w')
431                 version_history.write(dsc["bts changelog"])
432                 version_history.close()
433                 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
434                                       changes_file[:-8]+".versions")
435                 os.rename(temp_filename, filename)
436
437             # Write out the binary -> source mapping.
438             temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
439                                                 dotprefix=1, perms=0644)
440             debinfo = utils.open_file(temp_filename, 'w')
441             for file in file_keys:
442                 f = files[file]
443                 if f["type"] == "deb":
444                     line = " ".join([f["package"], f["version"],
445                                      f["architecture"], f["source package"],
446                                      f["source version"]])
447                     debinfo.write(line+"\n")
448             debinfo.close()
449             filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
450                                   changes_file[:-8]+".debinfo")
451             os.rename(temp_filename, filename)
452
453         self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
454
455     ###########################################################################
456
457     def queue_build (self, queue, path):
458         Cnf = self.Cnf
459         Subst = self.Subst
460         files = self.pkg.files
461         changes = self.pkg.changes
462         changes_file = self.pkg.changes_file
463         dsc = self.pkg.dsc
464         file_keys = files.keys()
465
466         ## Special support to enable clean auto-building of queued packages
467         queue_id = database.get_or_set_queue_id(queue)
468
469         self.projectB.query("BEGIN WORK")
470         for suite in changes["distribution"].keys():
471             if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
472                 continue
473             suite_id = database.get_suite_id(suite)
474             dest_dir = Cnf["Dir::QueueBuild"]
475             if Cnf.FindB("Dinstall::SecurityQueueBuild"):
476                 dest_dir = os.path.join(dest_dir, suite)
477             for file in file_keys:
478                 src = os.path.join(path, file)
479                 dest = os.path.join(dest_dir, file)
480                 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
481                     # Copy it since the original won't be readable by www-data
482                     utils.copy(src, dest)
483                 else:
484                     # Create a symlink to it
485                     os.symlink(src, dest)
486                 # Add it to the list of packages for later processing by apt-ftparchive
487                 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
488             # If the .orig.tar.gz is in the pool, create a symlink to
489             # it (if one doesn't already exist)
490             if self.pkg.orig_tar_id:
491                 # Determine the .orig.tar.gz file name
492                 for dsc_file in self.pkg.dsc_files.keys():
493                     if dsc_file.endswith(".orig.tar.gz"):
494                         filename = dsc_file
495                 dest = os.path.join(dest_dir, filename)
496                 # If it doesn't exist, create a symlink
497                 if not os.path.exists(dest):
498                     # Find the .orig.tar.gz in the pool
499                     q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
500                     ql = q.getresult()
501                     if not ql:
502                         utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
503                     src = os.path.join(ql[0][0], ql[0][1])
504                     os.symlink(src, dest)
505                     # Add it to the list of packages for later processing by apt-ftparchive
506                     self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
507                 # if it does, update things to ensure it's not removed prematurely
508                 else:
509                     self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
510
511         self.projectB.query("COMMIT WORK")
512
513     ###########################################################################
514
515     def check_override (self):
516         Subst = self.Subst
517         changes = self.pkg.changes
518         files = self.pkg.files
519         Cnf = self.Cnf
520
521         # Abandon the check if:
522         #  a) it's a non-sourceful upload
523         #  b) override disparity checks have been disabled
524         #  c) we're not sending mail
525         if not changes["architecture"].has_key("source") or \
526            not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
527            Cnf["Dinstall::Options::No-Mail"]:
528             return
529
530         summary = ""
531         file_keys = files.keys()
532         file_keys.sort()
533         for file in file_keys:
534             if not files[file].has_key("new") and files[file]["type"] == "deb":
535                 section = files[file]["section"]
536                 override_section = files[file]["override section"]
537                 if section.lower() != override_section.lower() and section != "-":
538                     # Ignore this; it's a common mistake and not worth whining about
539                     if section.lower() == "non-us/main" and override_section.lower() == "non-us":
540                         continue
541                     summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
542                 priority = files[file]["priority"]
543                 override_priority = files[file]["override priority"]
544                 if priority != override_priority and priority != "-":
545                     summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
546
547         if summary == "":
548             return
549
550         Subst["__SUMMARY__"] = summary
551         mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
552         utils.send_mail(mail_message)
553
554     ###########################################################################
555
556     def force_reject (self, files):
557         """Forcefully move files from the current directory to the
558            reject directory.  If any file already exists in the reject
559            directory it will be moved to the morgue to make way for
560            the new file."""
561
562         Cnf = self.Cnf
563
564         for file in files:
565             # Skip any files which don't exist or which we don't have permission to copy.
566             if os.access(file,os.R_OK) == 0:
567                 continue
568             dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
569             try:
570                 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
571             except OSError, e:
572                 # File exists?  Let's try and move it to the morgue
573                 if errno.errorcode[e.errno] == 'EEXIST':
574                     morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
575                     try:
576                         morgue_file = utils.find_next_free(morgue_file)
577                     except utils.tried_too_hard_exc:
578                         # Something's either gone badly Pete Tong, or
579                         # someone is trying to exploit us.
580                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
581                         return
582                     utils.move(dest_file, morgue_file, perms=0660)
583                     try:
584                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
585                     except OSError, e:
586                         # Likewise
587                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
588                         return
589                 else:
590                     raise
591             # If we got here, we own the destination file, so we can
592             # safely overwrite it.
593             utils.move(file, dest_file, 1, perms=0660)
594             os.close(dest_fd)
595
596     ###########################################################################
597
598     def do_reject (self, manual = 0, reject_message = ""):
599         # If we weren't given a manual rejection message, spawn an
600         # editor so the user can add one in...
601         if manual and not reject_message:
602             temp_filename = utils.temp_filename()
603             editor = os.environ.get("EDITOR","vi")
604             answer = 'E'
605             while answer == 'E':
606                 os.system("%s %s" % (editor, temp_filename))
607                 temp_fh = utils.open_file(temp_filename)
608                 reject_message = "".join(temp_fh.readlines())
609                 temp_fh.close()
610                 print "Reject message:"
611                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
612                 prompt = "[R]eject, Edit, Abandon, Quit ?"
613                 answer = "XXX"
614                 while prompt.find(answer) == -1:
615                     answer = utils.our_raw_input(prompt)
616                     m = re_default_answer.search(prompt)
617                     if answer == "":
618                         answer = m.group(1)
619                     answer = answer[:1].upper()
620             os.unlink(temp_filename)
621             if answer == 'A':
622                 return 1
623             elif answer == 'Q':
624                 sys.exit(0)
625
626         print "Rejecting.\n"
627
628         Cnf = self.Cnf
629         Subst = self.Subst
630         pkg = self.pkg
631
632         reason_filename = pkg.changes_file[:-8] + ".reason"
633         reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
634
635         # Move all the files into the reject directory
636         reject_files = pkg.files.keys() + [pkg.changes_file]
637         self.force_reject(reject_files)
638
639         # If we fail here someone is probably trying to exploit the race
640         # so let's just raise an exception ...
641         if os.path.exists(reason_filename):
642             os.unlink(reason_filename)
643         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
644
645         if not manual:
646             Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
647             Subst["__MANUAL_REJECT_MESSAGE__"] = ""
648             Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
649             os.write(reason_fd, reject_message)
650             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
651         else:
652             # Build up the rejection email
653             user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
654
655             Subst["__REJECTOR_ADDRESS__"] = user_email_address
656             Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
657             Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
658             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
659             # Write the rejection email out as the <foo>.reason file
660             os.write(reason_fd, reject_mail_message)
661
662         os.close(reason_fd)
663
664         # Send the rejection mail if appropriate
665         if not Cnf["Dinstall::Options::No-Mail"]:
666             utils.send_mail(reject_mail_message)
667
668         self.Logger.log(["rejected", pkg.changes_file])
669         return 0
670
671     ################################################################################
672
673     # Ensure that source exists somewhere in the archive for the binary
674     # upload being processed.
675     #
676     # (1) exact match                      => 1.0-3
677     # (2) Bin-only NMU                     => 1.0-3+b1 , 1.0-3.1+b1
678
679     def source_exists (self, package, source_version, suites = ["any"]):
680         okay = 1
681         for suite in suites:
682             if suite == "any":
683                 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
684                     (package)
685             else:
686                 # source must exist in suite X, or in some other suite that's
687                 # mapped to X, recursively... silent-maps are counted too,
688                 # unreleased-maps aren't.
689                 maps = self.Cnf.ValueList("SuiteMappings")[:]
690                 maps.reverse()
691                 maps = [ m.split() for m in maps ]
692                 maps = [ (x[1], x[2]) for x in maps
693                                 if x[0] == "map" or x[0] == "silent-map" ]
694                 s = [suite]
695                 for x in maps:
696                         if x[1] in s and x[0] not in s:
697                                 s.append(x[0])
698
699                 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
700             q = self.projectB.query(que)
701
702             # Reduce the query results to a list of version numbers
703             ql = [ i[0] for i in q.getresult() ]
704
705             # Try (1)
706             if source_version in ql:
707                 continue
708
709             # Try (2)
710             orig_source_version = re_bin_only_nmu.sub('', source_version)
711             if orig_source_version in ql:
712                 continue
713
714             # No source found...
715             okay = 0
716             break
717         return okay
718
719     ################################################################################
720     
721     def in_override_p (self, package, component, suite, binary_type, file):
722         files = self.pkg.files
723
724         if binary_type == "": # must be source
725             type = "dsc"
726         else:
727             type = binary_type
728
729         # Override suite name; used for example with proposed-updates
730         if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
731             suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
732
733         # Avoid <undef> on unknown distributions
734         suite_id = database.get_suite_id(suite)
735         if suite_id == -1:
736             return None
737         component_id = database.get_component_id(component)
738         type_id = database.get_override_type_id(type)
739
740         # FIXME: nasty non-US speficic hack
741         if component.lower().startswith("non-us/"):
742             component = component[7:]
743
744         q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
745                            % (package, suite_id, component_id, type_id))
746         result = q.getresult()
747         # If checking for a source package fall back on the binary override type
748         if type == "dsc" and not result:
749             deb_type_id = database.get_override_type_id("deb")
750             udeb_type_id = database.get_override_type_id("udeb")
751             q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
752                                % (package, suite_id, component_id, deb_type_id, udeb_type_id))
753             result = q.getresult()
754
755         # Remember the section and priority so we can check them later if appropriate
756         if result:
757             files[file]["override section"] = result[0][0]
758             files[file]["override priority"] = result[0][1]
759
760         return result
761
762     ################################################################################
763
764     def reject (self, str, prefix="Rejected: "):
765         if str:
766             # Unlike other rejects we add new lines first to avoid trailing
767             # new lines when this message is passed back up to a caller.
768             if self.reject_message:
769                 self.reject_message += "\n"
770             self.reject_message += prefix + str
771
772     ################################################################################
773
774     def get_anyversion(self, query_result, suite):
775         anyversion=None
776         anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
777         for (v, s) in query_result:
778             if s in [ x.lower() for x in anysuite ]:
779                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
780                     anyversion=v
781         return anyversion
782
783     ################################################################################
784
785     def cross_suite_version_check(self, query_result, file, new_version):
786         """Ensure versions are newer than existing packages in target
787         suites and that cross-suite version checking rules as
788         set out in the conf file are satisfied."""
789
790         # Check versions for each target suite
791         for target_suite in self.pkg.changes["distribution"].keys():
792             must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
793             must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
794             # Enforce "must be newer than target suite" even if conffile omits it
795             if target_suite not in must_be_newer_than:
796                 must_be_newer_than.append(target_suite)
797             for entry in query_result:
798                 existent_version = entry[0]
799                 suite = entry[1]
800                 if suite in must_be_newer_than and \
801                    apt_pkg.VersionCompare(new_version, existent_version) < 1:
802                     self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
803                 if suite in must_be_older_than and \
804                    apt_pkg.VersionCompare(new_version, existent_version) > -1:
805                     ch = self.pkg.changes
806                     cansave = 0
807                     if ch.get('distribution-version', {}).has_key(suite):
808                         # we really use the other suite, ignoring the conflicting one ...
809                         addsuite = ch["distribution-version"][suite]
810                     
811                         add_version = self.get_anyversion(query_result, addsuite)
812                         target_version = self.get_anyversion(query_result, target_suite)
813                     
814                         if not add_version:
815                             # not add_version can only happen if we map to a suite
816                             # that doesn't enhance the suite we're propup'ing from.
817                             # so "propup-ver x a b c; map a d" is a problem only if
818                             # d doesn't enhance a.
819                             #
820                             # i think we could always propagate in this case, rather
821                             # than complaining. either way, this isn't a REJECT issue
822                             #
823                             # And - we really should complain to the dorks who configured dak
824                             self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
825                             self.pkg.changes.setdefault("propdistribution", {})
826                             self.pkg.changes["propdistribution"][addsuite] = 1
827                             cansave = 1
828                         elif not target_version:
829                             # not targets_version is true when the package is NEW
830                             # we could just stick with the "...old version..." REJECT
831                             # for this, I think.
832                             self.reject("Won't propogate NEW packages.")
833                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
834                             # propogation would be redundant. no need to reject though.
835                             self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
836                             cansave = 1
837                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
838                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
839                             # propogate!!
840                             self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
841                             self.pkg.changes.setdefault("propdistribution", {})
842                             self.pkg.changes["propdistribution"][addsuite] = 1
843                             cansave = 1
844                 
845                     if not cansave:
846                         self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
847
848     ################################################################################
849
850     def check_binary_against_db(self, file):
851         self.reject_message = ""
852         files = self.pkg.files
853
854         # Ensure version is sane
855         q = self.projectB.query("""
856 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
857                                      architecture a
858  WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
859    AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
860                                 % (files[file]["package"],
861                                    files[file]["architecture"]))
862         self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
863
864         # Check for any existing copies of the file
865         q = self.projectB.query("""
866 SELECT b.id FROM binaries b, architecture a
867  WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
868    AND a.id = b.architecture"""
869                                 % (files[file]["package"],
870                                    files[file]["version"],
871                                    files[file]["architecture"]))
872         if q.getresult():
873             self.reject("%s: can not overwrite existing copy already in the archive." % (file))
874
875         return self.reject_message
876
877     ################################################################################
878
879     def check_source_against_db(self, file):
880         self.reject_message = ""
881         dsc = self.pkg.dsc
882
883         # Ensure version is sane
884         q = self.projectB.query("""
885 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
886  WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
887         self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
888
889         return self.reject_message
890
891     ################################################################################
892
893     # **WARNING**
894     # NB: this function can remove entries from the 'files' index [if
895     # the .orig.tar.gz is a duplicate of the one in the archive]; if
896     # you're iterating over 'files' and call this function as part of
897     # the loop, be sure to add a check to the top of the loop to
898     # ensure you haven't just tried to derefernece the deleted entry.
899     # **WARNING**
900
901     def check_dsc_against_db(self, file):
902         self.reject_message = ""
903         files = self.pkg.files
904         dsc_files = self.pkg.dsc_files
905         legacy_source_untouchable = self.pkg.legacy_source_untouchable
906         self.pkg.orig_tar_gz = None
907
908         # Try and find all files mentioned in the .dsc.  This has
909         # to work harder to cope with the multiple possible
910         # locations of an .orig.tar.gz.
911         for dsc_file in dsc_files.keys():
912             found = None
913             if files.has_key(dsc_file):
914                 actual_md5 = files[dsc_file]["md5sum"]
915                 actual_size = int(files[dsc_file]["size"])
916                 found = "%s in incoming" % (dsc_file)
917                 # Check the file does not already exist in the archive
918                 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
919                 ql = q.getresult()
920                 # Strip out anything that isn't '%s' or '/%s$'
921                 for i in ql:
922                     if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
923                         ql.remove(i)
924
925                 # "[dak] has not broken them.  [dak] has fixed a
926                 # brokenness.  Your crappy hack exploited a bug in
927                 # the old dinstall.
928                 #
929                 # "(Come on!  I thought it was always obvious that
930                 # one just doesn't release different files with
931                 # the same name and version.)"
932                 #                        -- ajk@ on d-devel@l.d.o
933
934                 if ql:
935                     # Ignore exact matches for .orig.tar.gz
936                     match = 0
937                     if dsc_file.endswith(".orig.tar.gz"):
938                         for i in ql:
939                             if files.has_key(dsc_file) and \
940                                int(files[dsc_file]["size"]) == int(i[0]) and \
941                                files[dsc_file]["md5sum"] == i[1]:
942                                 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
943                                 del files[dsc_file]
944                                 self.pkg.orig_tar_gz = i[2] + i[3]
945                                 match = 1
946
947                     if not match:
948                         self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
949             elif dsc_file.endswith(".orig.tar.gz"):
950                 # Check in the pool
951                 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
952                 ql = q.getresult()
953                 # Strip out anything that isn't '%s' or '/%s$'
954                 for i in ql:
955                     if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
956                         ql.remove(i)
957
958                 if ql:
959                     # Unfortunately, we may get more than one match here if,
960                     # for example, the package was in potato but had an -sa
961                     # upload in woody.  So we need to choose the right one.
962
963                     x = ql[0]; # default to something sane in case we don't match any or have only one
964
965                     if len(ql) > 1:
966                         for i in ql:
967                             old_file = i[0] + i[1]
968                             old_file_fh = utils.open_file(old_file)
969                             actual_md5 = apt_pkg.md5sum(old_file_fh)
970                             old_file_fh.close()
971                             actual_size = os.stat(old_file)[stat.ST_SIZE]
972                             if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
973                                 x = i
974                             else:
975                                 legacy_source_untouchable[i[3]] = ""
976
977                     old_file = x[0] + x[1]
978                     old_file_fh = utils.open_file(old_file)
979                     actual_md5 = apt_pkg.md5sum(old_file_fh)
980                     old_file_fh.close()
981                     actual_size = os.stat(old_file)[stat.ST_SIZE]
982                     found = old_file
983                     suite_type = x[2]
984                     dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
985                     # See install() in process-accepted...
986                     self.pkg.orig_tar_id = x[3]
987                     self.pkg.orig_tar_gz = old_file
988                     if suite_type == "legacy" or suite_type == "legacy-mixed":
989                         self.pkg.orig_tar_location = "legacy"
990                     else:
991                         self.pkg.orig_tar_location = x[4]
992                 else:
993                     # Not there? Check the queue directories...
994
995                     in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
996                     # See process_it() in 'dak process-unchecked' for explanation of this
997                     if os.path.exists(in_unchecked):
998                         return (self.reject_message, in_unchecked)
999                     else:
1000                         for dir in [ "Accepted", "New", "Byhand" ]:
1001                             in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
1002                             if os.path.exists(in_otherdir):
1003                                 in_otherdir_fh = utils.open_file(in_otherdir)
1004                                 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1005                                 in_otherdir_fh.close()
1006                                 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1007                                 found = in_otherdir
1008                                 self.pkg.orig_tar_gz = in_otherdir
1009
1010                     if not found:
1011                         self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1012                         self.pkg.orig_tar_gz = -1
1013                         continue
1014             else:
1015                 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1016                 continue
1017             if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1018                 self.reject("md5sum for %s doesn't match %s." % (found, file))
1019             if actual_size != int(dsc_files[dsc_file]["size"]):
1020                 self.reject("size for %s doesn't match %s." % (found, file))
1021
1022         return (self.reject_message, None)
1023
1024     def do_query(self, q):
1025         sys.stderr.write("query: \"%s\" ... " % (q))
1026         before = time.time()
1027         r = self.projectB.query(q)
1028         time_diff = time.time()-before
1029         sys.stderr.write("took %.3f seconds.\n" % (time_diff))
1030         return r