]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
e1b50b2f34ed5d6e4d7a3207b9af8bed05875900
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2
3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006  James Troup <james@nocrew.org>
5
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
10
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 # GNU General Public License for more details.
15
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
20 ###############################################################################
21
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
25
26 from types import *
27
28 ###############################################################################
29
30 re_isanum = re.compile (r"^\d+$")
31 re_default_answer = re.compile(r"\[(.*)\]")
32 re_fdnic = re.compile(r"\n\n")
33 re_bin_only_nmu = re.compile(r"\+b\d+$")
34
35 ###############################################################################
36
37 # Convenience wrapper to carry around all the package information in
38
39 class Pkg:
40     def __init__(self, **kwds):
41         self.__dict__.update(kwds)
42
43     def update(self, **kwds):
44         self.__dict__.update(kwds)
45
46 ###############################################################################
47
48 class nmu_p:
49     # Read in the group maintainer override file
50     def __init__ (self, Cnf):
51         self.group_maint = {}
52         self.Cnf = Cnf
53         if Cnf.get("Dinstall::GroupOverrideFilename"):
54             filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"]
55             file = utils.open_file(filename)
56             for line in file.readlines():
57                 line = utils.re_comments.sub('', line).lower().strip()
58                 if line != "":
59                     self.group_maint[line] = 1
60             file.close()
61
62     def is_an_nmu (self, pkg):
63         Cnf = self.Cnf
64         changes = pkg.changes
65         dsc = pkg.dsc
66
67         i = utils.fix_maintainer (dsc.get("maintainer",
68                                           Cnf["Dinstall::MyEmailAddress"]).lower())
69         (dsc_rfc822, dsc_rfc2047, dsc_name, dsc_email) = i
70         # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
71         if dsc_name == changes["maintainername"].lower() and \
72            (changes["changedby822"] == "" or changes["changedbyname"].lower() == dsc_name):
73             return 0
74
75         if dsc.has_key("uploaders"):
76             uploaders = dsc["uploaders"].lower().split(",")
77             uploadernames = {}
78             for i in uploaders:
79                 (rfc822, rfc2047, name, email) = utils.fix_maintainer (i.strip())
80                 uploadernames[name] = ""
81             if uploadernames.has_key(changes["changedbyname"].lower()):
82                 return 0
83
84         # Some group maintained packages (e.g. Debian QA) are never NMU's
85         if self.group_maint.has_key(changes["maintaineremail"].lower()):
86             return 0
87
88         return 1
89
90 ###############################################################################
91
92 class Upload:
93
94     def __init__(self, Cnf):
95         self.Cnf = Cnf
96         # Read in the group-maint override file
97         self.nmu = nmu_p(Cnf)
98         self.accept_count = 0
99         self.accept_bytes = 0L
100         self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
101                        legacy_source_untouchable = {})
102
103         # Initialize the substitution template mapping global
104         Subst = self.Subst = {}
105         Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
106         Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
107         Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
108         Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
109
110         self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
111         database.init(Cnf, self.projectB)
112
113     ###########################################################################
114
115     def init_vars (self):
116         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
117             exec "self.pkg.%s.clear();" % (i)
118         self.pkg.orig_tar_id = None
119         self.pkg.orig_tar_location = ""
120         self.pkg.orig_tar_gz = None
121
122     ###########################################################################
123
124     def update_vars (self):
125         dump_filename = self.pkg.changes_file[:-8]+".dak"
126         dump_file = utils.open_file(dump_filename)
127         p = cPickle.Unpickler(dump_file)
128         for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
129             exec "self.pkg.%s.update(p.load());" % (i)
130         for i in [ "orig_tar_id", "orig_tar_location" ]:
131             exec "self.pkg.%s = p.load();" % (i)
132         dump_file.close()
133
134     ###########################################################################
135
136     # This could just dump the dictionaries as is, but I'd like to
137     # avoid this so there's some idea of what process-accepted &
138     # process-new use from process-unchecked
139
140     def dump_vars(self, dest_dir):
141         for i in [ "changes", "dsc", "files", "dsc_files",
142                    "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
143             exec "%s = self.pkg.%s;" % (i,i)
144         dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
145         dump_file = utils.open_file(dump_filename, 'w')
146         try:
147             os.chmod(dump_filename, 0660)
148         except OSError, e:
149             if errno.errorcode[e.errno] == 'EPERM':
150                 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
151                 if perms & stat.S_IROTH:
152                     utils.fubar("%s is world readable and chmod failed." % (dump_filename))
153             else:
154                 raise
155
156         p = cPickle.Pickler(dump_file, 1)
157         for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
158             exec "%s = {}" % i
159         ## files
160         for file in files.keys():
161             d_files[file] = {}
162             for i in [ "package", "version", "architecture", "type", "size",
163                        "md5sum", "component", "location id", "source package",
164                        "source version", "maintainer", "dbtype", "files id",
165                        "new", "section", "priority", "othercomponents",
166                        "pool name", "original component" ]:
167                 if files[file].has_key(i):
168                     d_files[file][i] = files[file][i]
169         ## changes
170         # Mandatory changes fields
171         for i in [ "distribution", "source", "architecture", "version",
172                    "maintainer", "urgency", "fingerprint", "changedby822",
173                    "changedby2047", "changedbyname", "maintainer822",
174                    "maintainer2047", "maintainername", "maintaineremail",
175                    "closes", "changes" ]:
176             d_changes[i] = changes[i]
177         # Optional changes fields
178         for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version" ]:
179             if changes.has_key(i):
180                 d_changes[i] = changes[i]
181         ## dsc
182         for i in [ "source", "version", "maintainer", "fingerprint",
183                    "uploaders", "bts changelog" ]:
184             if dsc.has_key(i):
185                 d_dsc[i] = dsc[i]
186         ## dsc_files
187         for file in dsc_files.keys():
188             d_dsc_files[file] = {}
189             # Mandatory dsc_files fields
190             for i in [ "size", "md5sum" ]:
191                 d_dsc_files[file][i] = dsc_files[file][i]
192             # Optional dsc_files fields
193             for i in [ "files id" ]:
194                 if dsc_files[file].has_key(i):
195                     d_dsc_files[file][i] = dsc_files[file][i]
196
197         for i in [ d_changes, d_dsc, d_files, d_dsc_files,
198                    legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
199             p.dump(i)
200         dump_file.close()
201
202     ###########################################################################
203
204     # Set up the per-package template substitution mappings
205
206     def update_subst (self, reject_message = ""):
207         Subst = self.Subst
208         changes = self.pkg.changes
209         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
210         if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
211             changes["architecture"] = { "Unknown" : "" }
212         # and maintainer2047 may not exist.
213         if not changes.has_key("maintainer2047"):
214             changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
215
216         Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
217         Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
218         Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
219
220         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
221         if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
222             Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
223             Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
224                                                      changes["maintainer2047"])
225             Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
226         else:
227             Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
228             Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
229             Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
230         if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
231             Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
232
233         # Apply any global override of the Maintainer field
234         if self.Cnf.get("Dinstall::OverrideMaintainer"):
235             Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
236             Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
237
238         Subst["__REJECT_MESSAGE__"] = reject_message
239         Subst["__SOURCE__"] = changes.get("source", "Unknown")
240         Subst["__VERSION__"] = changes.get("version", "Unknown")
241
242     ###########################################################################
243
244     def build_summaries(self):
245         changes = self.pkg.changes
246         files = self.pkg.files
247
248         byhand = summary = new = ""
249
250         # changes["distribution"] may not exist in corner cases
251         # (e.g. unreadable changes files)
252         if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
253             changes["distribution"] = {}
254
255         override_summary ="";
256         file_keys = files.keys()
257         file_keys.sort()
258         for file in file_keys:
259             if files[file].has_key("byhand"):
260                 byhand = 1
261                 summary += file + " byhand\n"
262             elif files[file].has_key("new"):
263                 new = 1
264                 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
265                 if files[file].has_key("othercomponents"):
266                     summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
267                 if files[file]["type"] == "deb":
268                     deb_fh = utils.open_file(file)
269                     summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
270                     deb_fh.close()
271             else:
272                 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
273                 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
274                 summary += file + "\n  to " + destination + "\n"
275                 if not files[file].has_key("type"):
276                     files[file]["type"] = "unknown"
277                 if files[file]["type"] in ["deb", "udeb", "dsc"]:
278                     # (queue/unchecked), there we have override entries already, use them
279                     # (process-new), there we dont have override entries, use the newly generated ones.
280                     override_prio = files[file].get("override priority", files[file]["priority"])
281                     override_sect = files[file].get("override section", files[file]["section"])
282                     override_summary += "%s - %s %s\n" % (file, override_prio, override_sect)
283
284         short_summary = summary
285
286         # This is for direport's benefit...
287         f = re_fdnic.sub("\n .\n", changes.get("changes",""))
288
289         if byhand or new:
290             summary += "Changes: " + f
291
292         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
293
294         summary += self.announce(short_summary, 0)
295
296         return (summary, short_summary)
297
298     ###########################################################################
299
300     def close_bugs (self, summary, action):
301         changes = self.pkg.changes
302         Subst = self.Subst
303         Cnf = self.Cnf
304
305         bugs = changes["closes"].keys()
306
307         if not bugs:
308             return summary
309
310         bugs.sort()
311         if not self.nmu.is_an_nmu(self.pkg):
312             if changes["distribution"].has_key("experimental"):
313                 # tag bugs as fixed-in-experimental for uploads to experimental
314                 summary += "Setting bugs to severity fixed: "
315                 control_message = ""
316                 for bug in bugs:
317                     summary += "%s " % (bug)
318                     control_message += "tag %s + fixed-in-experimental\n" % (bug)
319                 if action and control_message != "":
320                     Subst["__CONTROL_MESSAGE__"] = control_message
321                     mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-experimental-fixed")
322                     utils.send_mail (mail_message)
323                 if action:
324                     self.Logger.log(["setting bugs to fixed"]+bugs)
325
326
327             else:
328                 summary += "Closing bugs: "
329                 for bug in bugs:
330                     summary += "%s " % (bug)
331                     if action:
332                         Subst["__BUG_NUMBER__"] = bug
333                         if changes["distribution"].has_key("stable"):
334                             Subst["__STABLE_WARNING__"] = """
335 Note that this package is not part of the released stable Debian
336 distribution.  It may have dependencies on other unreleased software,
337 or other instabilities.  Please take care if you wish to install it.
338 The update will eventually make its way into the next released Debian
339 distribution."""
340                         else:
341                             Subst["__STABLE_WARNING__"] = ""
342                             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
343                             utils.send_mail (mail_message)
344                 if action:
345                     self.Logger.log(["closing bugs"]+bugs)
346
347         else:                     # NMU
348             summary += "Setting bugs to severity fixed: "
349             control_message = ""
350             for bug in bugs:
351                 summary += "%s " % (bug)
352                 control_message += "tag %s + fixed\n" % (bug)
353             if action and control_message != "":
354                 Subst["__CONTROL_MESSAGE__"] = control_message
355                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-nmu-fixed")
356                 utils.send_mail (mail_message)
357             if action:
358                 self.Logger.log(["setting bugs to fixed"]+bugs)
359         summary += "\n"
360         return summary
361
362     ###########################################################################
363
364     def announce (self, short_summary, action):
365         Subst = self.Subst
366         Cnf = self.Cnf
367         changes = self.pkg.changes
368
369         # Only do announcements for source uploads with a recent dpkg-dev installed
370         if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
371             return ""
372
373         lists_done = {}
374         summary = ""
375         Subst["__SHORT_SUMMARY__"] = short_summary
376
377         for dist in changes["distribution"].keys():
378             list = Cnf.Find("Suite::%s::Announce" % (dist))
379             if list == "" or lists_done.has_key(list):
380                 continue
381             lists_done[list] = 1
382             summary += "Announcing to %s\n" % (list)
383
384             if action:
385                 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
386                 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
387                     Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
388                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
389                 utils.send_mail (mail_message)
390
391         if Cnf.FindB("Dinstall::CloseBugs"):
392             summary = self.close_bugs(summary, action)
393
394         return summary
395
396     ###########################################################################
397
398     def accept (self, summary, short_summary):
399         Cnf = self.Cnf
400         Subst = self.Subst
401         files = self.pkg.files
402         changes = self.pkg.changes
403         changes_file = self.pkg.changes_file
404         dsc = self.pkg.dsc
405
406         print "Accepting."
407         self.Logger.log(["Accepting changes",changes_file])
408
409         self.dump_vars(Cnf["Dir::Queue::Accepted"])
410
411         # Move all the files into the accepted directory
412         utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
413         file_keys = files.keys()
414         for file in file_keys:
415             utils.move(file, Cnf["Dir::Queue::Accepted"])
416             self.accept_bytes += float(files[file]["size"])
417         self.accept_count += 1
418
419         # Send accept mail, announce to lists, close bugs and check for
420         # override disparities
421         if not Cnf["Dinstall::Options::No-Mail"]:
422             Subst["__SUITE__"] = ""
423             Subst["__SUMMARY__"] = summary
424             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
425             utils.send_mail(mail_message)
426             self.announce(short_summary, 1)
427
428
429         ## Helper stuff for DebBugs Version Tracking
430         if Cnf.Find("Dir::Queue::BTSVersionTrack"):
431             # ??? once queue/* is cleared on *.d.o and/or reprocessed
432             # the conditionalization on dsc["bts changelog"] should be
433             # dropped.
434
435             # Write out the version history from the changelog
436             if changes["architecture"].has_key("source") and \
437                dsc.has_key("bts changelog"):
438
439                 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
440                                                     dotprefix=1, perms=0644)
441                 version_history = utils.open_file(temp_filename, 'w')
442                 version_history.write(dsc["bts changelog"])
443                 version_history.close()
444                 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
445                                       changes_file[:-8]+".versions")
446                 os.rename(temp_filename, filename)
447
448             # Write out the binary -> source mapping.
449             temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
450                                                 dotprefix=1, perms=0644)
451             debinfo = utils.open_file(temp_filename, 'w')
452             for file in file_keys:
453                 f = files[file]
454                 if f["type"] == "deb":
455                     line = " ".join([f["package"], f["version"],
456                                      f["architecture"], f["source package"],
457                                      f["source version"]])
458                     debinfo.write(line+"\n")
459             debinfo.close()
460             filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
461                                   changes_file[:-8]+".debinfo")
462             os.rename(temp_filename, filename)
463
464         self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
465
466     ###########################################################################
467
468     def queue_build (self, queue, path):
469         Cnf = self.Cnf
470         Subst = self.Subst
471         files = self.pkg.files
472         changes = self.pkg.changes
473         changes_file = self.pkg.changes_file
474         dsc = self.pkg.dsc
475         file_keys = files.keys()
476
477         ## Special support to enable clean auto-building of queued packages
478         queue_id = database.get_or_set_queue_id(queue)
479
480         self.projectB.query("BEGIN WORK")
481         for suite in changes["distribution"].keys():
482             if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
483                 continue
484             suite_id = database.get_suite_id(suite)
485             dest_dir = Cnf["Dir::QueueBuild"]
486             if Cnf.FindB("Dinstall::SecurityQueueBuild"):
487                 dest_dir = os.path.join(dest_dir, suite)
488             for file in file_keys:
489                 src = os.path.join(path, file)
490                 dest = os.path.join(dest_dir, file)
491                 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
492                     # Copy it since the original won't be readable by www-data
493                     utils.copy(src, dest)
494                 else:
495                     # Create a symlink to it
496                     os.symlink(src, dest)
497                 # Add it to the list of packages for later processing by apt-ftparchive
498                 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
499             # If the .orig.tar.gz is in the pool, create a symlink to
500             # it (if one doesn't already exist)
501             if self.pkg.orig_tar_id:
502                 # Determine the .orig.tar.gz file name
503                 for dsc_file in self.pkg.dsc_files.keys():
504                     if dsc_file.endswith(".orig.tar.gz"):
505                         filename = dsc_file
506                 dest = os.path.join(dest_dir, filename)
507                 # If it doesn't exist, create a symlink
508                 if not os.path.exists(dest):
509                     # Find the .orig.tar.gz in the pool
510                     q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
511                     ql = q.getresult()
512                     if not ql:
513                         utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
514                     src = os.path.join(ql[0][0], ql[0][1])
515                     os.symlink(src, dest)
516                     # Add it to the list of packages for later processing by apt-ftparchive
517                     self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
518                 # if it does, update things to ensure it's not removed prematurely
519                 else:
520                     self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
521
522         self.projectB.query("COMMIT WORK")
523
524     ###########################################################################
525
526     def check_override (self):
527         Subst = self.Subst
528         changes = self.pkg.changes
529         files = self.pkg.files
530         Cnf = self.Cnf
531
532         # Abandon the check if:
533         #  a) it's a non-sourceful upload
534         #  b) override disparity checks have been disabled
535         #  c) we're not sending mail
536         if not changes["architecture"].has_key("source") or \
537            not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
538            Cnf["Dinstall::Options::No-Mail"]:
539             return
540
541         summary = ""
542         file_keys = files.keys()
543         file_keys.sort()
544         for file in file_keys:
545             if not files[file].has_key("new") and files[file]["type"] == "deb":
546                 section = files[file]["section"]
547                 override_section = files[file]["override section"]
548                 if section.lower() != override_section.lower() and section != "-":
549                     # Ignore this; it's a common mistake and not worth whining about
550                     if section.lower() == "non-us/main" and override_section.lower() == "non-us":
551                         continue
552                     summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
553                 priority = files[file]["priority"]
554                 override_priority = files[file]["override priority"]
555                 if priority != override_priority and priority != "-":
556                     summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
557
558         if summary == "":
559             return
560
561         Subst["__SUMMARY__"] = summary
562         mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
563         utils.send_mail(mail_message)
564
565     ###########################################################################
566
567     def force_reject (self, files):
568         """Forcefully move files from the current directory to the
569            reject directory.  If any file already exists in the reject
570            directory it will be moved to the morgue to make way for
571            the new file."""
572
573         Cnf = self.Cnf
574
575         for file in files:
576             # Skip any files which don't exist or which we don't have permission to copy.
577             if os.access(file,os.R_OK) == 0:
578                 continue
579             dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
580             try:
581                 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
582             except OSError, e:
583                 # File exists?  Let's try and move it to the morgue
584                 if errno.errorcode[e.errno] == 'EEXIST':
585                     morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
586                     try:
587                         morgue_file = utils.find_next_free(morgue_file)
588                     except utils.tried_too_hard_exc:
589                         # Something's either gone badly Pete Tong, or
590                         # someone is trying to exploit us.
591                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
592                         return
593                     utils.move(dest_file, morgue_file, perms=0660)
594                     try:
595                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
596                     except OSError, e:
597                         # Likewise
598                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
599                         return
600                 else:
601                     raise
602             # If we got here, we own the destination file, so we can
603             # safely overwrite it.
604             utils.move(file, dest_file, 1, perms=0660)
605             os.close(dest_fd)
606
607     ###########################################################################
608
609     def do_reject (self, manual = 0, reject_message = ""):
610         # If we weren't given a manual rejection message, spawn an
611         # editor so the user can add one in...
612         if manual and not reject_message:
613             temp_filename = utils.temp_filename()
614             editor = os.environ.get("EDITOR","vi")
615             answer = 'E'
616             while answer == 'E':
617                 os.system("%s %s" % (editor, temp_filename))
618                 temp_fh = utils.open_file(temp_filename)
619                 reject_message = "".join(temp_fh.readlines())
620                 temp_fh.close()
621                 print "Reject message:"
622                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
623                 prompt = "[R]eject, Edit, Abandon, Quit ?"
624                 answer = "XXX"
625                 while prompt.find(answer) == -1:
626                     answer = utils.our_raw_input(prompt)
627                     m = re_default_answer.search(prompt)
628                     if answer == "":
629                         answer = m.group(1)
630                     answer = answer[:1].upper()
631             os.unlink(temp_filename)
632             if answer == 'A':
633                 return 1
634             elif answer == 'Q':
635                 sys.exit(0)
636
637         print "Rejecting.\n"
638
639         Cnf = self.Cnf
640         Subst = self.Subst
641         pkg = self.pkg
642
643         reason_filename = pkg.changes_file[:-8] + ".reason"
644         reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
645
646         # Move all the files into the reject directory
647         reject_files = pkg.files.keys() + [pkg.changes_file]
648         self.force_reject(reject_files)
649
650         # If we fail here someone is probably trying to exploit the race
651         # so let's just raise an exception ...
652         if os.path.exists(reason_filename):
653             os.unlink(reason_filename)
654         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
655
656         if not manual:
657             Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
658             Subst["__MANUAL_REJECT_MESSAGE__"] = ""
659             Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
660             os.write(reason_fd, reject_message)
661             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
662         else:
663             # Build up the rejection email
664             user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
665
666             Subst["__REJECTOR_ADDRESS__"] = user_email_address
667             Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
668             Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
669             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
670             # Write the rejection email out as the <foo>.reason file
671             os.write(reason_fd, reject_mail_message)
672
673         os.close(reason_fd)
674
675         # Send the rejection mail if appropriate
676         if not Cnf["Dinstall::Options::No-Mail"]:
677             utils.send_mail(reject_mail_message)
678
679         self.Logger.log(["rejected", pkg.changes_file])
680         return 0
681
682     ################################################################################
683
684     # Ensure that source exists somewhere in the archive for the binary
685     # upload being processed.
686     #
687     # (1) exact match                      => 1.0-3
688     # (2) Bin-only NMU                     => 1.0-3+b1 , 1.0-3.1+b1
689
690     def source_exists (self, package, source_version, suites = ["any"]):
691         okay = 1
692         for suite in suites:
693             if suite == "any":
694                 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
695                     (package)
696             else:
697                 # source must exist in suite X, or in some other suite that's
698                 # mapped to X, recursively... silent-maps are counted too,
699                 # unreleased-maps aren't.
700                 maps = self.Cnf.ValueList("SuiteMappings")[:]
701                 maps.reverse()
702                 maps = [ m.split() for m in maps ]
703                 maps = [ (x[1], x[2]) for x in maps
704                                 if x[0] == "map" or x[0] == "silent-map" ]
705                 s = [suite]
706                 for x in maps:
707                         if x[1] in s and x[0] not in s:
708                                 s.append(x[0])
709
710                 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
711             q = self.projectB.query(que)
712
713             # Reduce the query results to a list of version numbers
714             ql = [ i[0] for i in q.getresult() ]
715
716             # Try (1)
717             if source_version in ql:
718                 continue
719
720             # Try (2)
721             orig_source_version = re_bin_only_nmu.sub('', source_version)
722             if orig_source_version in ql:
723                 continue
724
725             # No source found...
726             okay = 0
727             break
728         return okay
729
730     ################################################################################
731     
732     def in_override_p (self, package, component, suite, binary_type, file):
733         files = self.pkg.files
734
735         if binary_type == "": # must be source
736             type = "dsc"
737         else:
738             type = binary_type
739
740         # Override suite name; used for example with proposed-updates
741         if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
742             suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
743
744         # Avoid <undef> on unknown distributions
745         suite_id = database.get_suite_id(suite)
746         if suite_id == -1:
747             return None
748         component_id = database.get_component_id(component)
749         type_id = database.get_override_type_id(type)
750
751         # FIXME: nasty non-US speficic hack
752         if component.lower().startswith("non-us/"):
753             component = component[7:]
754
755         q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
756                            % (package, suite_id, component_id, type_id))
757         result = q.getresult()
758         # If checking for a source package fall back on the binary override type
759         if type == "dsc" and not result:
760             deb_type_id = database.get_override_type_id("deb")
761             udeb_type_id = database.get_override_type_id("udeb")
762             q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
763                                % (package, suite_id, component_id, deb_type_id, udeb_type_id))
764             result = q.getresult()
765
766         # Remember the section and priority so we can check them later if appropriate
767         if result:
768             files[file]["override section"] = result[0][0]
769             files[file]["override priority"] = result[0][1]
770
771         return result
772
773     ################################################################################
774
775     def reject (self, str, prefix="Rejected: "):
776         if str:
777             # Unlike other rejects we add new lines first to avoid trailing
778             # new lines when this message is passed back up to a caller.
779             if self.reject_message:
780                 self.reject_message += "\n"
781             self.reject_message += prefix + str
782
783     ################################################################################
784
785     def get_anyversion(self, query_result, suite):
786         anyversion=None
787         anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
788         for (v, s) in query_result:
789             if s in [ x.lower() for x in anysuite ]:
790                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
791                     anyversion=v
792         return anyversion
793
794     ################################################################################
795
796     def cross_suite_version_check(self, query_result, file, new_version):
797         """Ensure versions are newer than existing packages in target
798         suites and that cross-suite version checking rules as
799         set out in the conf file are satisfied."""
800
801         # Check versions for each target suite
802         for target_suite in self.pkg.changes["distribution"].keys():
803             must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
804             must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
805             # Enforce "must be newer than target suite" even if conffile omits it
806             if target_suite not in must_be_newer_than:
807                 must_be_newer_than.append(target_suite)
808             for entry in query_result:
809                 existent_version = entry[0]
810                 suite = entry[1]
811                 if suite in must_be_newer_than and \
812                    apt_pkg.VersionCompare(new_version, existent_version) < 1:
813                     self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
814                 if suite in must_be_older_than and \
815                    apt_pkg.VersionCompare(new_version, existent_version) > -1:
816                     ch = self.pkg.changes
817                     cansave = 0
818                     if ch.get('distribution-version', {}).has_key(suite):
819                         # we really use the other suite, ignoring the conflicting one ...
820                         addsuite = ch["distribution-version"][suite]
821                     
822                         add_version = self.get_anyversion(query_result, addsuite)
823                         target_version = self.get_anyversion(query_result, target_suite)
824                     
825                         if not add_version:
826                             # not add_version can only happen if we map to a suite
827                             # that doesn't enhance the suite we're propup'ing from.
828                             # so "propup-ver x a b c; map a d" is a problem only if
829                             # d doesn't enhance a.
830                             #
831                             # i think we could always propagate in this case, rather
832                             # than complaining. either way, this isn't a REJECT issue
833                             #
834                             # And - we really should complain to the dorks who configured dak
835                             self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
836                             self.pkg.changes.setdefault("propdistribution", {})
837                             self.pkg.changes["propdistribution"][addsuite] = 1
838                             cansave = 1
839                         elif not target_version:
840                             # not targets_version is true when the package is NEW
841                             # we could just stick with the "...old version..." REJECT
842                             # for this, I think.
843                             self.reject("Won't propogate NEW packages.")
844                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
845                             # propogation would be redundant. no need to reject though.
846                             self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
847                             cansave = 1
848                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
849                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
850                             # propogate!!
851                             self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
852                             self.pkg.changes.setdefault("propdistribution", {})
853                             self.pkg.changes["propdistribution"][addsuite] = 1
854                             cansave = 1
855                 
856                     if not cansave:
857                         self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
858
859     ################################################################################
860
861     def check_binary_against_db(self, file):
862         self.reject_message = ""
863         files = self.pkg.files
864
865         # Ensure version is sane
866         q = self.projectB.query("""
867 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
868                                      architecture a
869  WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
870    AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
871                                 % (files[file]["package"],
872                                    files[file]["architecture"]))
873         self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
874
875         # Check for any existing copies of the file
876         q = self.projectB.query("""
877 SELECT b.id FROM binaries b, architecture a
878  WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
879    AND a.id = b.architecture"""
880                                 % (files[file]["package"],
881                                    files[file]["version"],
882                                    files[file]["architecture"]))
883         if q.getresult():
884             self.reject("%s: can not overwrite existing copy already in the archive." % (file))
885
886         return self.reject_message
887
888     ################################################################################
889
890     def check_source_against_db(self, file):
891         self.reject_message = ""
892         dsc = self.pkg.dsc
893
894         # Ensure version is sane
895         q = self.projectB.query("""
896 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
897  WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
898         self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
899
900         return self.reject_message
901
902     ################################################################################
903
904     # **WARNING**
905     # NB: this function can remove entries from the 'files' index [if
906     # the .orig.tar.gz is a duplicate of the one in the archive]; if
907     # you're iterating over 'files' and call this function as part of
908     # the loop, be sure to add a check to the top of the loop to
909     # ensure you haven't just tried to derefernece the deleted entry.
910     # **WARNING**
911
912     def check_dsc_against_db(self, file):
913         self.reject_message = ""
914         files = self.pkg.files
915         dsc_files = self.pkg.dsc_files
916         legacy_source_untouchable = self.pkg.legacy_source_untouchable
917         self.pkg.orig_tar_gz = None
918
919         # Try and find all files mentioned in the .dsc.  This has
920         # to work harder to cope with the multiple possible
921         # locations of an .orig.tar.gz.
922         # The ordering on the select is needed to pick the newest orig
923         # when it exists in multiple places.
924         for dsc_file in dsc_files.keys():
925             found = None
926             if files.has_key(dsc_file):
927                 actual_md5 = files[dsc_file]["md5sum"]
928                 actual_size = int(files[dsc_file]["size"])
929                 found = "%s in incoming" % (dsc_file)
930                 # Check the file does not already exist in the archive
931                 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
932                 ql = q.getresult()
933                 # Strip out anything that isn't '%s' or '/%s$'
934                 for i in ql:
935                     if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
936                         ql.remove(i)
937
938                 # "[dak] has not broken them.  [dak] has fixed a
939                 # brokenness.  Your crappy hack exploited a bug in
940                 # the old dinstall.
941                 #
942                 # "(Come on!  I thought it was always obvious that
943                 # one just doesn't release different files with
944                 # the same name and version.)"
945                 #                        -- ajk@ on d-devel@l.d.o
946
947                 if ql:
948                     # Ignore exact matches for .orig.tar.gz
949                     match = 0
950                     if dsc_file.endswith(".orig.tar.gz"):
951                         for i in ql:
952                             if files.has_key(dsc_file) and \
953                                int(files[dsc_file]["size"]) == int(i[0]) and \
954                                files[dsc_file]["md5sum"] == i[1]:
955                                 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
956                                 del files[dsc_file]
957                                 self.pkg.orig_tar_gz = i[2] + i[3]
958                                 match = 1
959
960                     if not match:
961                         self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
962             elif dsc_file.endswith(".orig.tar.gz"):
963                 # Check in the pool
964                 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
965                 ql = q.getresult()
966                 # Strip out anything that isn't '%s' or '/%s$'
967                 for i in ql:
968                     if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
969                         ql.remove(i)
970
971                 if ql:
972                     # Unfortunately, we may get more than one match here if,
973                     # for example, the package was in potato but had an -sa
974                     # upload in woody.  So we need to choose the right one.
975
976                     x = ql[0]; # default to something sane in case we don't match any or have only one
977
978                     if len(ql) > 1:
979                         for i in ql:
980                             old_file = i[0] + i[1]
981                             old_file_fh = utils.open_file(old_file)
982                             actual_md5 = apt_pkg.md5sum(old_file_fh)
983                             old_file_fh.close()
984                             actual_size = os.stat(old_file)[stat.ST_SIZE]
985                             if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
986                                 x = i
987                             else:
988                                 legacy_source_untouchable[i[3]] = ""
989
990                     old_file = x[0] + x[1]
991                     old_file_fh = utils.open_file(old_file)
992                     actual_md5 = apt_pkg.md5sum(old_file_fh)
993                     old_file_fh.close()
994                     actual_size = os.stat(old_file)[stat.ST_SIZE]
995                     found = old_file
996                     suite_type = x[2]
997                     dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
998                     # See install() in process-accepted...
999                     self.pkg.orig_tar_id = x[3]
1000                     self.pkg.orig_tar_gz = old_file
1001                     if suite_type == "legacy" or suite_type == "legacy-mixed":
1002                         self.pkg.orig_tar_location = "legacy"
1003                     else:
1004                         self.pkg.orig_tar_location = x[4]
1005                 else:
1006                     # Not there? Check the queue directories...
1007
1008                     in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1009                     # See process_it() in 'dak process-unchecked' for explanation of this
1010                     if os.path.exists(in_unchecked):
1011                         return (self.reject_message, in_unchecked)
1012                     else:
1013                         for dir in [ "Accepted", "New", "Byhand" ]:
1014                             in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
1015                             if os.path.exists(in_otherdir):
1016                                 in_otherdir_fh = utils.open_file(in_otherdir)
1017                                 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1018                                 in_otherdir_fh.close()
1019                                 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1020                                 found = in_otherdir
1021                                 self.pkg.orig_tar_gz = in_otherdir
1022
1023                     if not found:
1024                         self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1025                         self.pkg.orig_tar_gz = -1
1026                         continue
1027             else:
1028                 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1029                 continue
1030             if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1031                 self.reject("md5sum for %s doesn't match %s." % (found, file))
1032             if actual_size != int(dsc_files[dsc_file]["size"]):
1033                 self.reject("size for %s doesn't match %s." % (found, file))
1034
1035         return (self.reject_message, None)
1036
1037     def do_query(self, q):
1038         sys.stderr.write("query: \"%s\" ... " % (q))
1039         before = time.time()
1040         r = self.projectB.query(q)
1041         time_diff = time.time()-before
1042         sys.stderr.write("took %.3f seconds.\n" % (time_diff))
1043         return r