]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Merge commit 'mhy/checksums'
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2
3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006  James Troup <james@nocrew.org>
5
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
10
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 # GNU General Public License for more details.
15
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
20 ###############################################################################
21
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
25 from dak_exceptions import *
26
27 from types import *
28
29 ###############################################################################
30
31 re_isanum = re.compile (r"^\d+$")
32 re_default_answer = re.compile(r"\[(.*)\]")
33 re_fdnic = re.compile(r"\n\n")
34 re_bin_only_nmu = re.compile(r"\+b\d+$")
35
36 ################################################################################
37
38 # Determine what parts in a .changes are NEW
39
40 def determine_new(changes, files, projectB, warn=1):
41     new = {}
42
43     # Build up a list of potentially new things
44     for file_entry in files.keys():
45         f = files[file_entry]
46         # Skip byhand elements
47         if f["type"] == "byhand":
48             continue
49         pkg = f["package"]
50         priority = f["priority"]
51         section = f["section"]
52         file_type = get_type(f)
53         component = f["component"]
54
55         if file_type == "dsc":
56             priority = "source"
57         if not new.has_key(pkg):
58             new[pkg] = {}
59             new[pkg]["priority"] = priority
60             new[pkg]["section"] = section
61             new[pkg]["type"] = file_type
62             new[pkg]["component"] = component
63             new[pkg]["files"] = []
64         else:
65             old_type = new[pkg]["type"]
66             if old_type != file_type:
67                 # source gets trumped by deb or udeb
68                 if old_type == "dsc":
69                     new[pkg]["priority"] = priority
70                     new[pkg]["section"] = section
71                     new[pkg]["type"] = file_type
72                     new[pkg]["component"] = component
73         new[pkg]["files"].append(file_entry)
74         if f.has_key("othercomponents"):
75             new[pkg]["othercomponents"] = f["othercomponents"]
76
77     for suite in changes["suite"].keys():
78         suite_id = database.get_suite_id(suite)
79         for pkg in new.keys():
80             component_id = database.get_component_id(new[pkg]["component"])
81             type_id = database.get_override_type_id(new[pkg]["type"])
82             q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
83             ql = q.getresult()
84             if ql:
85                 for file_entry in new[pkg]["files"]:
86                     if files[file_entry].has_key("new"):
87                         del files[file_entry]["new"]
88                 del new[pkg]
89
90     if warn:
91         if changes["suite"].has_key("stable"):
92             print "WARNING: overrides will be added for stable!"
93             if changes["suite"].has_key("oldstable"):
94                 print "WARNING: overrides will be added for OLDstable!"
95         for pkg in new.keys():
96             if new[pkg].has_key("othercomponents"):
97                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
98
99     return new
100
101 ################################################################################
102
103 def get_type(f):
104     # Determine the type
105     if f.has_key("dbtype"):
106         file_type = f["dbtype"]
107     elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
108         file_type = "dsc"
109     else:
110         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
111
112     # Validate the override type
113     type_id = database.get_override_type_id(file_type)
114     if type_id == -1:
115         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
116
117     return file_type
118
119 ################################################################################
120
121 # check if section/priority values are valid
122
123 def check_valid(new):
124     for pkg in new.keys():
125         section = new[pkg]["section"]
126         priority = new[pkg]["priority"]
127         file_type = new[pkg]["type"]
128         new[pkg]["section id"] = database.get_section_id(section)
129         new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
130         # Sanity checks
131         di = section.find("debian-installer") != -1
132         if (di and file_type not in ("udeb", "dsc")) or (not di and file_type == "udeb"):
133             new[pkg]["section id"] = -1
134         if (priority == "source" and file_type != "dsc") or \
135            (priority != "source" and file_type == "dsc"):
136             new[pkg]["priority id"] = -1
137
138
139 ###############################################################################
140
141 # Convenience wrapper to carry around all the package information in
142
143 class Pkg:
144     def __init__(self, **kwds):
145         self.__dict__.update(kwds)
146
147     def update(self, **kwds):
148         self.__dict__.update(kwds)
149
150 ###############################################################################
151
152 class Upload:
153
154     def __init__(self, Cnf):
155         self.Cnf = Cnf
156         self.accept_count = 0
157         self.accept_bytes = 0L
158         self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
159                        legacy_source_untouchable = {})
160
161         # Initialize the substitution template mapping global
162         Subst = self.Subst = {}
163         Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
164         Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
165         Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
166         Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
167
168         self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
169         database.init(Cnf, self.projectB)
170
171     ###########################################################################
172
173     def init_vars (self):
174         self.pkg.changes.clear()
175         self.pkg.dsc.clear()
176         self.pkg.files.clear()
177         self.pkg.dsc_files.clear()
178         self.pkg.legacy_source_untouchable.clear()
179         self.pkg.orig_tar_id = None
180         self.pkg.orig_tar_location = ""
181         self.pkg.orig_tar_gz = None
182
183     ###########################################################################
184
185     def update_vars (self):
186         dump_filename = self.pkg.changes_file[:-8]+".dak"
187         dump_file = utils.open_file(dump_filename)
188         p = cPickle.Unpickler(dump_file)
189
190         self.pkg.changes.update(p.load())
191         self.pkg.dsc.update(p.load())
192         self.pkg.files.update(p.load())
193         self.pkg.dsc_files.update(p.load())
194         self.pkg.legacy_source_untouchable.update(p.load())
195
196         self.pkg.orig_tar_id = p.load()
197         self.pkg.orig_tar_location = p.load()
198
199         dump_file.close()
200
201     ###########################################################################
202
203     # This could just dump the dictionaries as is, but I'd like to
204     # avoid this so there's some idea of what process-accepted &
205     # process-new use from process-unchecked
206
207     def dump_vars(self, dest_dir):
208
209         changes = self.pkg.changes
210         dsc = self.pkg.dsc
211         files = self.pkg.files
212         dsc_files = self.pkg.dsc_files
213         legacy_source_untouchable = self.pkg.legacy_source_untouchable
214         orig_tar_id = self.pkg.orig_tar_id
215         orig_tar_location = self.pkg.orig_tar_location
216
217         dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
218         dump_file = utils.open_file(dump_filename, 'w')
219         try:
220             os.chmod(dump_filename, 0660)
221         except OSError, e:
222             if errno.errorcode[e.errno] == 'EPERM':
223                 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
224                 if perms & stat.S_IROTH:
225                     utils.fubar("%s is world readable and chmod failed." % (dump_filename))
226             else:
227                 raise
228
229         p = cPickle.Pickler(dump_file, 1)
230         d_changes = {}
231         d_dsc = {}
232         d_files = {}
233         d_dsc_files = {}
234
235         ## files
236         for file_entry in files.keys():
237             d_files[file_entry] = {}
238             for i in [ "package", "version", "architecture", "type", "size",
239                        "md5sum", "sha1sum", "sha256sum", "component",
240                        "location id", "source package", "source version",
241                        "maintainer", "dbtype", "files id", "new",
242                        "section", "priority", "othercomponents",
243                        "pool name", "original component" ]:
244                 if files[file_entry].has_key(i):
245                     d_files[file_entry][i] = files[file_entry][i]
246         ## changes
247         # Mandatory changes fields
248         for i in [ "distribution", "source", "architecture", "version",
249                    "maintainer", "urgency", "fingerprint", "changedby822",
250                    "changedby2047", "changedbyname", "maintainer822",
251                    "maintainer2047", "maintainername", "maintaineremail",
252                    "closes", "changes" ]:
253             d_changes[i] = changes[i]
254         # Optional changes fields
255         for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version",
256                    "sponsoremail" ]:
257             if changes.has_key(i):
258                 d_changes[i] = changes[i]
259         ## dsc
260         for i in [ "source", "version", "maintainer", "fingerprint",
261                    "uploaders", "bts changelog", "dm-upload-allowed" ]:
262             if dsc.has_key(i):
263                 d_dsc[i] = dsc[i]
264         ## dsc_files
265         for file_entry in dsc_files.keys():
266             d_dsc_files[file_entry] = {}
267             # Mandatory dsc_files fields
268             for i in [ "size", "md5sum" ]:
269                 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
270             # Optional dsc_files fields
271             for i in [ "files id" ]:
272                 if dsc_files[file_entry].has_key(i):
273                     d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
274
275         for i in [ d_changes, d_dsc, d_files, d_dsc_files,
276                    legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
277             p.dump(i)
278         dump_file.close()
279
280     ###########################################################################
281
282     # Set up the per-package template substitution mappings
283
284     def update_subst (self, reject_message = ""):
285         Subst = self.Subst
286         changes = self.pkg.changes
287         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
288         if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
289             changes["architecture"] = { "Unknown" : "" }
290         # and maintainer2047 may not exist.
291         if not changes.has_key("maintainer2047"):
292             changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
293
294         Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
295         Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
296         Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
297
298         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
299         if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
300             Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
301             Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
302                                                      changes["maintainer2047"])
303             Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
304         else:
305             Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
306             Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
307             Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
308
309         if "sponsoremail" in changes:
310             Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
311
312         if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
313             Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
314
315         # Apply any global override of the Maintainer field
316         if self.Cnf.get("Dinstall::OverrideMaintainer"):
317             Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
318             Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
319
320         Subst["__REJECT_MESSAGE__"] = reject_message
321         Subst["__SOURCE__"] = changes.get("source", "Unknown")
322         Subst["__VERSION__"] = changes.get("version", "Unknown")
323
324     ###########################################################################
325
326     def build_summaries(self):
327         changes = self.pkg.changes
328         files = self.pkg.files
329
330         byhand = summary = new = ""
331
332         # changes["distribution"] may not exist in corner cases
333         # (e.g. unreadable changes files)
334         if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
335             changes["distribution"] = {}
336
337         override_summary ="";
338         file_keys = files.keys()
339         file_keys.sort()
340         for file_entry in file_keys:
341             if files[file_entry].has_key("byhand"):
342                 byhand = 1
343                 summary += file_entry + " byhand\n"
344             elif files[file_entry].has_key("new"):
345                 new = 1
346                 summary += "(new) %s %s %s\n" % (file_entry, files[file_entry]["priority"], files[file_entry]["section"])
347                 if files[file_entry].has_key("othercomponents"):
348                     summary += "WARNING: Already present in %s distribution.\n" % (files[file_entry]["othercomponents"])
349                 if files[file_entry]["type"] == "deb":
350                     deb_fh = utils.open_file(file_entry)
351                     summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
352                     deb_fh.close()
353             else:
354                 files[file_entry]["pool name"] = utils.poolify (changes.get("source",""), files[file_entry]["component"])
355                 destination = self.Cnf["Dir::PoolRoot"] + files[file_entry]["pool name"] + file_entry
356                 summary += file_entry + "\n  to " + destination + "\n"
357                 if not files[file_entry].has_key("type"):
358                     files[file_entry]["type"] = "unknown"
359                 if files[file_entry]["type"] in ["deb", "udeb", "dsc"]:
360                     # (queue/unchecked), there we have override entries already, use them
361                     # (process-new), there we dont have override entries, use the newly generated ones.
362                     override_prio = files[file_entry].get("override priority", files[file_entry]["priority"])
363                     override_sect = files[file_entry].get("override section", files[file_entry]["section"])
364                     override_summary += "%s - %s %s\n" % (file_entry, override_prio, override_sect)
365
366         short_summary = summary
367
368         # This is for direport's benefit...
369         f = re_fdnic.sub("\n .\n", changes.get("changes",""))
370
371         if byhand or new:
372             summary += "Changes: " + f
373
374         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
375
376         summary += self.announce(short_summary, 0)
377
378         return (summary, short_summary)
379
380     ###########################################################################
381
382     def close_bugs (self, summary, action):
383         changes = self.pkg.changes
384         Subst = self.Subst
385         Cnf = self.Cnf
386
387         bugs = changes["closes"].keys()
388
389         if not bugs:
390             return summary
391
392         bugs.sort()
393         summary += "Closing bugs: "
394         for bug in bugs:
395             summary += "%s " % (bug)
396             if action:
397                 Subst["__BUG_NUMBER__"] = bug
398                 if changes["distribution"].has_key("stable"):
399                     Subst["__STABLE_WARNING__"] = """
400 Note that this package is not part of the released stable Debian
401 distribution.  It may have dependencies on other unreleased software,
402 or other instabilities.  Please take care if you wish to install it.
403 The update will eventually make its way into the next released Debian
404 distribution."""
405                 else:
406                     Subst["__STABLE_WARNING__"] = ""
407                     mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
408                     utils.send_mail (mail_message)
409         if action:
410             self.Logger.log(["closing bugs"]+bugs)
411         summary += "\n"
412
413         return summary
414
415     ###########################################################################
416
417     def announce (self, short_summary, action):
418         Subst = self.Subst
419         Cnf = self.Cnf
420         changes = self.pkg.changes
421
422         # Only do announcements for source uploads with a recent dpkg-dev installed
423         if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
424             return ""
425
426         lists_done = {}
427         summary = ""
428         Subst["__SHORT_SUMMARY__"] = short_summary
429
430         for dist in changes["distribution"].keys():
431             announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
432             if announce_list == "" or lists_done.has_key(announce_list):
433                 continue
434             lists_done[announce_list] = 1
435             summary += "Announcing to %s\n" % (announce_list)
436
437             if action:
438                 Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
439                 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
440                     Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
441                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
442                 utils.send_mail (mail_message)
443
444         if Cnf.FindB("Dinstall::CloseBugs"):
445             summary = self.close_bugs(summary, action)
446
447         return summary
448
449     ###########################################################################
450
451     def accept (self, summary, short_summary):
452         Cnf = self.Cnf
453         Subst = self.Subst
454         files = self.pkg.files
455         changes = self.pkg.changes
456         changes_file = self.pkg.changes_file
457         dsc = self.pkg.dsc
458
459         print "Accepting."
460         self.Logger.log(["Accepting changes",changes_file])
461
462         self.dump_vars(Cnf["Dir::Queue::Accepted"])
463
464         # Move all the files into the accepted directory
465         utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
466         file_keys = files.keys()
467         for file_entry in file_keys:
468             utils.move(file_entry, Cnf["Dir::Queue::Accepted"])
469             self.accept_bytes += float(files[file_entry]["size"])
470         self.accept_count += 1
471
472         # Send accept mail, announce to lists, close bugs and check for
473         # override disparities
474         if not Cnf["Dinstall::Options::No-Mail"]:
475             Subst["__SUITE__"] = ""
476             Subst["__SUMMARY__"] = summary
477             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
478             utils.send_mail(mail_message)
479             self.announce(short_summary, 1)
480
481
482         ## Helper stuff for DebBugs Version Tracking
483         if Cnf.Find("Dir::Queue::BTSVersionTrack"):
484             # ??? once queue/* is cleared on *.d.o and/or reprocessed
485             # the conditionalization on dsc["bts changelog"] should be
486             # dropped.
487
488             # Write out the version history from the changelog
489             if changes["architecture"].has_key("source") and \
490                dsc.has_key("bts changelog"):
491
492                 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
493                                                     dotprefix=1, perms=0644)
494                 version_history = utils.open_file(temp_filename, 'w')
495                 version_history.write(dsc["bts changelog"])
496                 version_history.close()
497                 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
498                                       changes_file[:-8]+".versions")
499                 os.rename(temp_filename, filename)
500
501             # Write out the binary -> source mapping.
502             temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
503                                                 dotprefix=1, perms=0644)
504             debinfo = utils.open_file(temp_filename, 'w')
505             for file_entry in file_keys:
506                 f = files[file_entry]
507                 if f["type"] == "deb":
508                     line = " ".join([f["package"], f["version"],
509                                      f["architecture"], f["source package"],
510                                      f["source version"]])
511                     debinfo.write(line+"\n")
512             debinfo.close()
513             filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
514                                   changes_file[:-8]+".debinfo")
515             os.rename(temp_filename, filename)
516
517         self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
518
519     ###########################################################################
520
521     def queue_build (self, queue, path):
522         Cnf = self.Cnf
523         Subst = self.Subst
524         files = self.pkg.files
525         changes = self.pkg.changes
526         changes_file = self.pkg.changes_file
527         dsc = self.pkg.dsc
528         file_keys = files.keys()
529
530         ## Special support to enable clean auto-building of queued packages
531         queue_id = database.get_or_set_queue_id(queue)
532
533         self.projectB.query("BEGIN WORK")
534         for suite in changes["distribution"].keys():
535             if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
536                 continue
537             suite_id = database.get_suite_id(suite)
538             dest_dir = Cnf["Dir::QueueBuild"]
539             if Cnf.FindB("Dinstall::SecurityQueueBuild"):
540                 dest_dir = os.path.join(dest_dir, suite)
541             for file_entry in file_keys:
542                 src = os.path.join(path, file_entry)
543                 dest = os.path.join(dest_dir, file_entry)
544                 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
545                     # Copy it since the original won't be readable by www-data
546                     utils.copy(src, dest)
547                 else:
548                     # Create a symlink to it
549                     os.symlink(src, dest)
550                 # Add it to the list of packages for later processing by apt-ftparchive
551                 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
552             # If the .orig.tar.gz is in the pool, create a symlink to
553             # it (if one doesn't already exist)
554             if self.pkg.orig_tar_id:
555                 # Determine the .orig.tar.gz file name
556                 for dsc_file in self.pkg.dsc_files.keys():
557                     if dsc_file.endswith(".orig.tar.gz"):
558                         filename = dsc_file
559                 dest = os.path.join(dest_dir, filename)
560                 # If it doesn't exist, create a symlink
561                 if not os.path.exists(dest):
562                     # Find the .orig.tar.gz in the pool
563                     q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
564                     ql = q.getresult()
565                     if not ql:
566                         utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
567                     src = os.path.join(ql[0][0], ql[0][1])
568                     os.symlink(src, dest)
569                     # Add it to the list of packages for later processing by apt-ftparchive
570                     self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
571                 # if it does, update things to ensure it's not removed prematurely
572                 else:
573                     self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
574
575         self.projectB.query("COMMIT WORK")
576
577     ###########################################################################
578
579     def check_override (self):
580         Subst = self.Subst
581         changes = self.pkg.changes
582         files = self.pkg.files
583         Cnf = self.Cnf
584
585         # Abandon the check if:
586         #  a) it's a non-sourceful upload
587         #  b) override disparity checks have been disabled
588         #  c) we're not sending mail
589         if not changes["architecture"].has_key("source") or \
590            not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
591            Cnf["Dinstall::Options::No-Mail"]:
592             return
593
594         summary = ""
595         file_keys = files.keys()
596         file_keys.sort()
597         for file_entry in file_keys:
598             if not files[file_entry].has_key("new") and files[file_entry]["type"] == "deb":
599                 section = files[file_entry]["section"]
600                 override_section = files[file_entry]["override section"]
601                 if section.lower() != override_section.lower() and section != "-":
602                     summary += "%s: package says section is %s, override says %s.\n" % (file_entry, section, override_section)
603                 priority = files[file_entry]["priority"]
604                 override_priority = files[file_entry]["override priority"]
605                 if priority != override_priority and priority != "-":
606                     summary += "%s: package says priority is %s, override says %s.\n" % (file_entry, priority, override_priority)
607
608         if summary == "":
609             return
610
611         Subst["__SUMMARY__"] = summary
612         mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
613         utils.send_mail(mail_message)
614
615     ###########################################################################
616
617     def force_reject (self, files):
618         """Forcefully move files from the current directory to the
619            reject directory.  If any file already exists in the reject
620            directory it will be moved to the morgue to make way for
621            the new file."""
622
623         Cnf = self.Cnf
624
625         for file_entry in files:
626             # Skip any files which don't exist or which we don't have permission to copy.
627             if os.access(file_entry,os.R_OK) == 0:
628                 continue
629             dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file_entry)
630             try:
631                 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
632             except OSError, e:
633                 # File exists?  Let's try and move it to the morgue
634                 if errno.errorcode[e.errno] == 'EEXIST':
635                     morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file_entry)
636                     try:
637                         morgue_file = utils.find_next_free(morgue_file)
638                     except NoFreeFilenameError:
639                         # Something's either gone badly Pete Tong, or
640                         # someone is trying to exploit us.
641                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
642                         return
643                     utils.move(dest_file, morgue_file, perms=0660)
644                     try:
645                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
646                     except OSError, e:
647                         # Likewise
648                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
649                         return
650                 else:
651                     raise
652             # If we got here, we own the destination file, so we can
653             # safely overwrite it.
654             utils.move(file_entry, dest_file, 1, perms=0660)
655             os.close(dest_fd)
656
657     ###########################################################################
658
659     def do_reject (self, manual = 0, reject_message = ""):
660         # If we weren't given a manual rejection message, spawn an
661         # editor so the user can add one in...
662         if manual and not reject_message:
663             temp_filename = utils.temp_filename()
664             editor = os.environ.get("EDITOR","vi")
665             answer = 'E'
666             while answer == 'E':
667                 os.system("%s %s" % (editor, temp_filename))
668                 temp_fh = utils.open_file(temp_filename)
669                 reject_message = "".join(temp_fh.readlines())
670                 temp_fh.close()
671                 print "Reject message:"
672                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
673                 prompt = "[R]eject, Edit, Abandon, Quit ?"
674                 answer = "XXX"
675                 while prompt.find(answer) == -1:
676                     answer = utils.our_raw_input(prompt)
677                     m = re_default_answer.search(prompt)
678                     if answer == "":
679                         answer = m.group(1)
680                     answer = answer[:1].upper()
681             os.unlink(temp_filename)
682             if answer == 'A':
683                 return 1
684             elif answer == 'Q':
685                 sys.exit(0)
686
687         print "Rejecting.\n"
688
689         Cnf = self.Cnf
690         Subst = self.Subst
691         pkg = self.pkg
692
693         reason_filename = pkg.changes_file[:-8] + ".reason"
694         reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
695
696         # Move all the files into the reject directory
697         reject_files = pkg.files.keys() + [pkg.changes_file]
698         self.force_reject(reject_files)
699
700         # If we fail here someone is probably trying to exploit the race
701         # so let's just raise an exception ...
702         if os.path.exists(reason_filename):
703             os.unlink(reason_filename)
704         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
705
706         if not manual:
707             Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
708             Subst["__MANUAL_REJECT_MESSAGE__"] = ""
709             Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
710             os.write(reason_fd, reject_message)
711             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
712         else:
713             # Build up the rejection email
714             user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
715
716             Subst["__REJECTOR_ADDRESS__"] = user_email_address
717             Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
718             Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
719             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
720             # Write the rejection email out as the <foo>.reason file
721             os.write(reason_fd, reject_mail_message)
722
723         os.close(reason_fd)
724
725         # Send the rejection mail if appropriate
726         if not Cnf["Dinstall::Options::No-Mail"]:
727             utils.send_mail(reject_mail_message)
728
729         self.Logger.log(["rejected", pkg.changes_file])
730         return 0
731
732     ################################################################################
733
734     # Ensure that source exists somewhere in the archive for the binary
735     # upload being processed.
736     #
737     # (1) exact match                      => 1.0-3
738     # (2) Bin-only NMU                     => 1.0-3+b1 , 1.0-3.1+b1
739
740     def source_exists (self, package, source_version, suites = ["any"]):
741         okay = 1
742         for suite in suites:
743             if suite == "any":
744                 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
745                     (package)
746             else:
747                 # source must exist in suite X, or in some other suite that's
748                 # mapped to X, recursively... silent-maps are counted too,
749                 # unreleased-maps aren't.
750                 maps = self.Cnf.ValueList("SuiteMappings")[:]
751                 maps.reverse()
752                 maps = [ m.split() for m in maps ]
753                 maps = [ (x[1], x[2]) for x in maps
754                                 if x[0] == "map" or x[0] == "silent-map" ]
755                 s = [suite]
756                 for x in maps:
757                     if x[1] in s and x[0] not in s:
758                         s.append(x[0])
759
760                 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
761             q = self.projectB.query(que)
762
763             # Reduce the query results to a list of version numbers
764             ql = [ i[0] for i in q.getresult() ]
765
766             # Try (1)
767             if source_version in ql:
768                 continue
769
770             # Try (2)
771             orig_source_version = re_bin_only_nmu.sub('', source_version)
772             if orig_source_version in ql:
773                 continue
774
775             # No source found...
776             okay = 0
777             break
778         return okay
779
780     ################################################################################
781
782     def in_override_p (self, package, component, suite, binary_type, file):
783         files = self.pkg.files
784
785         if binary_type == "": # must be source
786             file_type = "dsc"
787         else:
788             file_type = binary_type
789
790         # Override suite name; used for example with proposed-updates
791         if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
792             suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
793
794         # Avoid <undef> on unknown distributions
795         suite_id = database.get_suite_id(suite)
796         if suite_id == -1:
797             return None
798         component_id = database.get_component_id(component)
799         type_id = database.get_override_type_id(file_type)
800
801         q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
802                            % (package, suite_id, component_id, type_id))
803         result = q.getresult()
804         # If checking for a source package fall back on the binary override type
805         if file_type == "dsc" and not result:
806             deb_type_id = database.get_override_type_id("deb")
807             udeb_type_id = database.get_override_type_id("udeb")
808             q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
809                                % (package, suite_id, component_id, deb_type_id, udeb_type_id))
810             result = q.getresult()
811
812         # Remember the section and priority so we can check them later if appropriate
813         if result:
814             files[file]["override section"] = result[0][0]
815             files[file]["override priority"] = result[0][1]
816
817         return result
818
819     ################################################################################
820
821     def reject (self, str, prefix="Rejected: "):
822         if str:
823             # Unlike other rejects we add new lines first to avoid trailing
824             # new lines when this message is passed back up to a caller.
825             if self.reject_message:
826                 self.reject_message += "\n"
827             self.reject_message += prefix + str
828
829     ################################################################################
830
831     def get_anyversion(self, query_result, suite):
832         anyversion=None
833         anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
834         for (v, s) in query_result:
835             if s in [ x.lower() for x in anysuite ]:
836                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
837                     anyversion=v
838         return anyversion
839
840     ################################################################################
841
842     def cross_suite_version_check(self, query_result, file, new_version):
843         """Ensure versions are newer than existing packages in target
844         suites and that cross-suite version checking rules as
845         set out in the conf file are satisfied."""
846
847         # Check versions for each target suite
848         for target_suite in self.pkg.changes["distribution"].keys():
849             must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
850             must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
851             # Enforce "must be newer than target suite" even if conffile omits it
852             if target_suite not in must_be_newer_than:
853                 must_be_newer_than.append(target_suite)
854             for entry in query_result:
855                 existent_version = entry[0]
856                 suite = entry[1]
857                 if suite in must_be_newer_than and \
858                    apt_pkg.VersionCompare(new_version, existent_version) < 1:
859                     self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
860                 if suite in must_be_older_than and \
861                    apt_pkg.VersionCompare(new_version, existent_version) > -1:
862                     ch = self.pkg.changes
863                     cansave = 0
864                     if ch.get('distribution-version', {}).has_key(suite):
865                     # we really use the other suite, ignoring the conflicting one ...
866                         addsuite = ch["distribution-version"][suite]
867
868                         add_version = self.get_anyversion(query_result, addsuite)
869                         target_version = self.get_anyversion(query_result, target_suite)
870
871                         if not add_version:
872                             # not add_version can only happen if we map to a suite
873                             # that doesn't enhance the suite we're propup'ing from.
874                             # so "propup-ver x a b c; map a d" is a problem only if
875                             # d doesn't enhance a.
876                             #
877                             # i think we could always propagate in this case, rather
878                             # than complaining. either way, this isn't a REJECT issue
879                             #
880                             # And - we really should complain to the dorks who configured dak
881                             self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
882                             self.pkg.changes.setdefault("propdistribution", {})
883                             self.pkg.changes["propdistribution"][addsuite] = 1
884                             cansave = 1
885                         elif not target_version:
886                             # not targets_version is true when the package is NEW
887                             # we could just stick with the "...old version..." REJECT
888                             # for this, I think.
889                             self.reject("Won't propogate NEW packages.")
890                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
891                             # propogation would be redundant. no need to reject though.
892                             self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
893                             cansave = 1
894                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
895                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
896                             # propogate!!
897                             self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
898                             self.pkg.changes.setdefault("propdistribution", {})
899                             self.pkg.changes["propdistribution"][addsuite] = 1
900                             cansave = 1
901
902                     if not cansave:
903                         self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
904
905     ################################################################################
906
907     def check_binary_against_db(self, file):
908         self.reject_message = ""
909         files = self.pkg.files
910
911         # Ensure version is sane
912         q = self.projectB.query("""
913 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
914                                      architecture a
915  WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
916    AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
917                                 % (files[file]["package"],
918                                    files[file]["architecture"]))
919         self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
920
921         # Check for any existing copies of the file
922         q = self.projectB.query("""
923 SELECT b.id FROM binaries b, architecture a
924  WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
925    AND a.id = b.architecture"""
926                                 % (files[file]["package"],
927                                    files[file]["version"],
928                                    files[file]["architecture"]))
929         if q.getresult():
930             self.reject("%s: can not overwrite existing copy already in the archive." % (file))
931
932         return self.reject_message
933
934     ################################################################################
935
936     def check_source_against_db(self, file):
937         self.reject_message = ""
938         dsc = self.pkg.dsc
939
940         # Ensure version is sane
941         q = self.projectB.query("""
942 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
943  WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
944         self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
945
946         return self.reject_message
947
948     ################################################################################
949
950     # **WARNING**
951     # NB: this function can remove entries from the 'files' index [if
952     # the .orig.tar.gz is a duplicate of the one in the archive]; if
953     # you're iterating over 'files' and call this function as part of
954     # the loop, be sure to add a check to the top of the loop to
955     # ensure you haven't just tried to dereference the deleted entry.
956     # **WARNING**
957
958     def check_dsc_against_db(self, file):
959         self.reject_message = ""
960         files = self.pkg.files
961         dsc_files = self.pkg.dsc_files
962         legacy_source_untouchable = self.pkg.legacy_source_untouchable
963         self.pkg.orig_tar_gz = None
964
965         # Try and find all files mentioned in the .dsc.  This has
966         # to work harder to cope with the multiple possible
967         # locations of an .orig.tar.gz.
968         # The ordering on the select is needed to pick the newest orig
969         # when it exists in multiple places.
970         for dsc_file in dsc_files.keys():
971             found = None
972             if files.has_key(dsc_file):
973                 actual_md5 = files[dsc_file]["md5sum"]
974                 actual_size = int(files[dsc_file]["size"])
975                 found = "%s in incoming" % (dsc_file)
976                 # Check the file does not already exist in the archive
977                 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
978                 ql = q.getresult()
979                 # Strip out anything that isn't '%s' or '/%s$'
980                 for i in ql:
981                     if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
982                         ql.remove(i)
983
984                 # "[dak] has not broken them.  [dak] has fixed a
985                 # brokenness.  Your crappy hack exploited a bug in
986                 # the old dinstall.
987                 #
988                 # "(Come on!  I thought it was always obvious that
989                 # one just doesn't release different files with
990                 # the same name and version.)"
991                 #                        -- ajk@ on d-devel@l.d.o
992
993                 if ql:
994                     # Ignore exact matches for .orig.tar.gz
995                     match = 0
996                     if dsc_file.endswith(".orig.tar.gz"):
997                         for i in ql:
998                             if files.has_key(dsc_file) and \
999                                int(files[dsc_file]["size"]) == int(i[0]) and \
1000                                files[dsc_file]["md5sum"] == i[1]:
1001                                 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
1002                                 del files[dsc_file]
1003                                 self.pkg.orig_tar_gz = i[2] + i[3]
1004                                 match = 1
1005
1006                     if not match:
1007                         self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
1008             elif dsc_file.endswith(".orig.tar.gz"):
1009                 # Check in the pool
1010                 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
1011                 ql = q.getresult()
1012                 # Strip out anything that isn't '%s' or '/%s$'
1013                 for i in ql:
1014                     if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
1015                         ql.remove(i)
1016
1017                 if ql:
1018                     # Unfortunately, we may get more than one match here if,
1019                     # for example, the package was in potato but had an -sa
1020                     # upload in woody.  So we need to choose the right one.
1021
1022                     x = ql[0]; # default to something sane in case we don't match any or have only one
1023
1024                     if len(ql) > 1:
1025                         for i in ql:
1026                             old_file = i[0] + i[1]
1027                             old_file_fh = utils.open_file(old_file)
1028                             actual_md5 = apt_pkg.md5sum(old_file_fh)
1029                             old_file_fh.close()
1030                             actual_size = os.stat(old_file)[stat.ST_SIZE]
1031                             if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1032                                 x = i
1033                             else:
1034                                 legacy_source_untouchable[i[3]] = ""
1035
1036                     old_file = x[0] + x[1]
1037                     old_file_fh = utils.open_file(old_file)
1038                     actual_md5 = apt_pkg.md5sum(old_file_fh)
1039                     old_file_fh.close()
1040                     actual_size = os.stat(old_file)[stat.ST_SIZE]
1041                     found = old_file
1042                     suite_type = x[2]
1043                     dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
1044                     # See install() in process-accepted...
1045                     self.pkg.orig_tar_id = x[3]
1046                     self.pkg.orig_tar_gz = old_file
1047                     if suite_type == "legacy" or suite_type == "legacy-mixed":
1048                         self.pkg.orig_tar_location = "legacy"
1049                     else:
1050                         self.pkg.orig_tar_location = x[4]
1051                 else:
1052                     # Not there? Check the queue directories...
1053
1054                     in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1055                     # See process_it() in 'dak process-unchecked' for explanation of this
1056                     # in_unchecked check dropped by ajt 2007-08-28, how did that
1057                     # ever make sense?
1058                     if os.path.exists(in_unchecked) and False:
1059                         return (self.reject_message, in_unchecked)
1060                     else:
1061                         for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1062                             in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)],dsc_file)
1063                             if os.path.exists(in_otherdir):
1064                                 in_otherdir_fh = utils.open_file(in_otherdir)
1065                                 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1066                                 in_otherdir_fh.close()
1067                                 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1068                                 found = in_otherdir
1069                                 self.pkg.orig_tar_gz = in_otherdir
1070
1071                     if not found:
1072                         self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1073                         self.pkg.orig_tar_gz = -1
1074                         continue
1075             else:
1076                 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1077                 continue
1078             if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1079                 self.reject("md5sum for %s doesn't match %s." % (found, file))
1080             if actual_size != int(dsc_files[dsc_file]["size"]):
1081                 self.reject("size for %s doesn't match %s." % (found, file))
1082
1083         return (self.reject_message, None)
1084
1085     def do_query(self, q):
1086         sys.stderr.write("query: \"%s\" ... " % (q))
1087         before = time.time()
1088         r = self.projectB.query(q)
1089         time_diff = time.time()-before
1090         sys.stderr.write("took %.3f seconds.\n" % (time_diff))
1091         return r