]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
and revert the remainder
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2
3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006  James Troup <james@nocrew.org>
5
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
10
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 # GNU General Public License for more details.
15
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
20 ###############################################################################
21
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
25 from dak_exceptions import *
26
27 from types import *
28
29 ###############################################################################
30
31 re_isanum = re.compile (r"^\d+$")
32 re_default_answer = re.compile(r"\[(.*)\]")
33 re_fdnic = re.compile(r"\n\n")
34 re_bin_only_nmu = re.compile(r"\+b\d+$")
35
36 ################################################################################
37
38 # Determine what parts in a .changes are NEW
39
40 def determine_new(changes, files, projectB, warn=1):
41     new = {}
42
43     # Build up a list of potentially new things
44     for file_entry in files.keys():
45         f = files[file_entry]
46         # Skip byhand elements
47         if f["type"] == "byhand":
48             continue
49         pkg = f["package"]
50         priority = f["priority"]
51         section = f["section"]
52         file_type = get_type(f)
53         component = f["component"]
54
55         if file_type == "dsc":
56             priority = "source"
57         if not new.has_key(pkg):
58             new[pkg] = {}
59             new[pkg]["priority"] = priority
60             new[pkg]["section"] = section
61             new[pkg]["type"] = file_type
62             new[pkg]["component"] = component
63             new[pkg]["files"] = []
64         else:
65             old_type = new[pkg]["type"]
66             if old_type != file_type:
67                 # source gets trumped by deb or udeb
68                 if old_type == "dsc":
69                     new[pkg]["priority"] = priority
70                     new[pkg]["section"] = section
71                     new[pkg]["type"] = file_type
72                     new[pkg]["component"] = component
73         new[pkg]["files"].append(file_entry)
74         if f.has_key("othercomponents"):
75             new[pkg]["othercomponents"] = f["othercomponents"]
76
77     for suite in changes["suite"].keys():
78         suite_id = database.get_suite_id(suite)
79         for pkg in new.keys():
80             component_id = database.get_component_id(new[pkg]["component"])
81             type_id = database.get_override_type_id(new[pkg]["type"])
82             q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
83             ql = q.getresult()
84             if ql:
85                 for file_entry in new[pkg]["files"]:
86                     if files[file_entry].has_key("new"):
87                         del files[file_entry]["new"]
88                 del new[pkg]
89
90     if warn:
91         if changes["suite"].has_key("stable"):
92             print "WARNING: overrides will be added for stable!"
93             if changes["suite"].has_key("oldstable"):
94                 print "WARNING: overrides will be added for OLDstable!"
95         for pkg in new.keys():
96             if new[pkg].has_key("othercomponents"):
97                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
98
99     return new
100
101 ################################################################################
102
103 def get_type(f):
104     # Determine the type
105     if f.has_key("dbtype"):
106         file_type = f["dbtype"]
107     elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
108         file_type = "dsc"
109     else:
110         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
111
112     # Validate the override type
113     type_id = database.get_override_type_id(file_type)
114     if type_id == -1:
115         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
116
117     return file_type
118
119 ################################################################################
120
121 # check if section/priority values are valid
122
123 def check_valid(new):
124     for pkg in new.keys():
125         section = new[pkg]["section"]
126         priority = new[pkg]["priority"]
127         file_type = new[pkg]["type"]
128         new[pkg]["section id"] = database.get_section_id(section)
129         new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
130         # Sanity checks
131         di = section.find("debian-installer") != -1
132         if (di and file_type not in ("udeb", "dsc")) or (not di and file_type == "udeb"):
133             new[pkg]["section id"] = -1
134         if (priority == "source" and file_type != "dsc") or \
135            (priority != "source" and file_type == "dsc"):
136             new[pkg]["priority id"] = -1
137
138
139 ###############################################################################
140
141 # Convenience wrapper to carry around all the package information in
142
143 class Pkg:
144     def __init__(self, **kwds):
145         self.__dict__.update(kwds)
146
147     def update(self, **kwds):
148         self.__dict__.update(kwds)
149
150 ###############################################################################
151
152 class Upload:
153
154     def __init__(self, Cnf):
155         self.Cnf = Cnf
156         self.accept_count = 0
157         self.accept_bytes = 0L
158         self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
159                        legacy_source_untouchable = {})
160
161         # Initialize the substitution template mapping global
162         Subst = self.Subst = {}
163         Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
164         Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
165         Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
166         Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
167
168         self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
169         database.init(Cnf, self.projectB)
170
171     ###########################################################################
172
173     def init_vars (self):
174         self.pkg.changes.clear()
175         self.pkg.dsc.clear()
176         self.pkg.files.clear()
177         self.pkg.dsc_files.clear()
178         self.pkg.legacy_source_untouchable.clear()
179         self.pkg.orig_tar_id = None
180         self.pkg.orig_tar_location = ""
181         self.pkg.orig_tar_gz = None
182
183     ###########################################################################
184
185     def update_vars (self):
186         dump_filename = self.pkg.changes_file[:-8]+".dak"
187         dump_file = utils.open_file(dump_filename)
188         p = cPickle.Unpickler(dump_file)
189
190         self.pkg.changes.update(p.load())
191         self.pkg.dsc.update(p.load())
192         self.pkg.files.update(p.load())
193         self.pkg.dsc_files.update(p.load())
194         self.pkg.legacy_source_untouchable.update(p.load())
195
196         self.pkg.orig_tar_id = p.load()
197         self.pkg.orig_tar_location = p.load()
198
199         dump_file.close()
200
201     ###########################################################################
202
203     # This could just dump the dictionaries as is, but I'd like to
204     # avoid this so there's some idea of what process-accepted &
205     # process-new use from process-unchecked
206
207     def dump_vars(self, dest_dir):
208
209         changes = self.pkg.changes
210         dsc = self.pkg.dsc
211         files = self.pkg.files
212         dsc_files = self.pkg.dsc_files
213         legacy_source_untouchable = self.pkg.legacy_source_untouchable
214         orig_tar_id = self.pkg.orig_tar_id
215         orig_tar_location = self.pkg.orig_tar_location
216
217         dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
218         dump_file = utils.open_file(dump_filename, 'w')
219         try:
220             os.chmod(dump_filename, 0660)
221         except OSError, e:
222             if errno.errorcode[e.errno] == 'EPERM':
223                 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
224                 if perms & stat.S_IROTH:
225                     utils.fubar("%s is world readable and chmod failed." % (dump_filename))
226             else:
227                 raise
228
229         p = cPickle.Pickler(dump_file, 1)
230         d_changes = {}
231         d_dsc = {}
232         d_files = {}
233         d_dsc_files = {}
234
235         ## files
236         for file_entry in files.keys():
237             d_files[file_entry] = {}
238             for i in [ "package", "version", "architecture", "type", "size",
239                        "md5sum", "component", "location id", "source package",
240                        "source version", "maintainer", "dbtype", "files id",
241                        "new", "section", "priority", "othercomponents",
242                        "pool name", "original component" ]:
243                 if files[file_entry].has_key(i):
244                     d_files[file_entry][i] = files[file_entry][i]
245         ## changes
246         # Mandatory changes fields
247         for i in [ "distribution", "source", "architecture", "version",
248                    "maintainer", "urgency", "fingerprint", "changedby822",
249                    "changedby2047", "changedbyname", "maintainer822",
250                    "maintainer2047", "maintainername", "maintaineremail",
251                    "closes", "changes" ]:
252             d_changes[i] = changes[i]
253         # Optional changes fields
254         for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version",
255                    "sponsoremail" ]:
256             if changes.has_key(i):
257                 d_changes[i] = changes[i]
258         ## dsc
259         for i in [ "source", "version", "maintainer", "fingerprint",
260                    "uploaders", "bts changelog", "dm-upload-allowed" ]:
261             if dsc.has_key(i):
262                 d_dsc[i] = dsc[i]
263         ## dsc_files
264         for file_entry in dsc_files.keys():
265             d_dsc_files[file_entry] = {}
266             # Mandatory dsc_files fields
267             for i in [ "size", "md5sum" ]:
268                 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
269             # Optional dsc_files fields
270             for i in [ "files id" ]:
271                 if dsc_files[file_entry].has_key(i):
272                     d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
273
274         for i in [ d_changes, d_dsc, d_files, d_dsc_files,
275                    legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
276             p.dump(i)
277         dump_file.close()
278
279     ###########################################################################
280
281     # Set up the per-package template substitution mappings
282
283     def update_subst (self, reject_message = ""):
284         Subst = self.Subst
285         changes = self.pkg.changes
286         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
287         if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
288             changes["architecture"] = { "Unknown" : "" }
289         # and maintainer2047 may not exist.
290         if not changes.has_key("maintainer2047"):
291             changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
292
293         Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
294         Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
295         Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
296
297         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
298         if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
299             Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
300             Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
301                                                      changes["maintainer2047"])
302             Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
303         else:
304             Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
305             Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
306             Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
307
308         if "sponsoremail" in changes:
309             Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
310
311         if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
312             Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
313
314         # Apply any global override of the Maintainer field
315         if self.Cnf.get("Dinstall::OverrideMaintainer"):
316             Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
317             Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
318
319         Subst["__REJECT_MESSAGE__"] = reject_message
320         Subst["__SOURCE__"] = changes.get("source", "Unknown")
321         Subst["__VERSION__"] = changes.get("version", "Unknown")
322
323     ###########################################################################
324
325     def build_summaries(self):
326         changes = self.pkg.changes
327         files = self.pkg.files
328
329         byhand = summary = new = ""
330
331         # changes["distribution"] may not exist in corner cases
332         # (e.g. unreadable changes files)
333         if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
334             changes["distribution"] = {}
335
336         override_summary ="";
337         file_keys = files.keys()
338         file_keys.sort()
339         for file_entry in file_keys:
340             if files[file_entry].has_key("byhand"):
341                 byhand = 1
342                 summary += file_entry + " byhand\n"
343             elif files[file_entry].has_key("new"):
344                 new = 1
345                 summary += "(new) %s %s %s\n" % (file_entry, files[file_entry]["priority"], files[file_entry]["section"])
346                 if files[file_entry].has_key("othercomponents"):
347                     summary += "WARNING: Already present in %s distribution.\n" % (files[file_entry]["othercomponents"])
348                 if files[file_entry]["type"] == "deb":
349                     deb_fh = utils.open_file(file_entry)
350                     summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
351                     deb_fh.close()
352             else:
353                 files[file_entry]["pool name"] = utils.poolify (changes.get("source",""), files[file_entry]["component"])
354                 destination = self.Cnf["Dir::PoolRoot"] + files[file_entry]["pool name"] + file_entry
355                 summary += file_entry + "\n  to " + destination + "\n"
356                 if not files[file_entry].has_key("type"):
357                     files[file_entry]["type"] = "unknown"
358                 if files[file_entry]["type"] in ["deb", "udeb", "dsc"]:
359                     # (queue/unchecked), there we have override entries already, use them
360                     # (process-new), there we dont have override entries, use the newly generated ones.
361                     override_prio = files[file_entry].get("override priority", files[file_entry]["priority"])
362                     override_sect = files[file_entry].get("override section", files[file_entry]["section"])
363                     override_summary += "%s - %s %s\n" % (file_entry, override_prio, override_sect)
364
365         short_summary = summary
366
367         # This is for direport's benefit...
368         f = re_fdnic.sub("\n .\n", changes.get("changes",""))
369
370         if byhand or new:
371             summary += "Changes: " + f
372
373         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
374
375         summary += self.announce(short_summary, 0)
376
377         return (summary, short_summary)
378
379     ###########################################################################
380
381     def close_bugs (self, summary, action):
382         changes = self.pkg.changes
383         Subst = self.Subst
384         Cnf = self.Cnf
385
386         bugs = changes["closes"].keys()
387
388         if not bugs:
389             return summary
390
391         bugs.sort()
392         summary += "Closing bugs: "
393         for bug in bugs:
394             summary += "%s " % (bug)
395             if action:
396                 Subst["__BUG_NUMBER__"] = bug
397                 if changes["distribution"].has_key("stable"):
398                     Subst["__STABLE_WARNING__"] = """
399 Note that this package is not part of the released stable Debian
400 distribution.  It may have dependencies on other unreleased software,
401 or other instabilities.  Please take care if you wish to install it.
402 The update will eventually make its way into the next released Debian
403 distribution."""
404                 else:
405                     Subst["__STABLE_WARNING__"] = ""
406                     mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
407                     utils.send_mail (mail_message)
408         if action:
409             self.Logger.log(["closing bugs"]+bugs)
410         summary += "\n"
411
412         return summary
413
414     ###########################################################################
415
416     def announce (self, short_summary, action):
417         Subst = self.Subst
418         Cnf = self.Cnf
419         changes = self.pkg.changes
420
421         # Only do announcements for source uploads with a recent dpkg-dev installed
422         if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
423             return ""
424
425         lists_done = {}
426         summary = ""
427         Subst["__SHORT_SUMMARY__"] = short_summary
428
429         for dist in changes["distribution"].keys():
430             announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
431             if announce_list == "" or lists_done.has_key(announce_list):
432                 continue
433             lists_done[announce_list] = 1
434             summary += "Announcing to %s\n" % (announce_list)
435
436             if action:
437                 Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
438                 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
439                     Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
440                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
441                 utils.send_mail (mail_message)
442
443         if Cnf.FindB("Dinstall::CloseBugs"):
444             summary = self.close_bugs(summary, action)
445
446         return summary
447
448     ###########################################################################
449
450     def accept (self, summary, short_summary):
451         Cnf = self.Cnf
452         Subst = self.Subst
453         files = self.pkg.files
454         changes = self.pkg.changes
455         changes_file = self.pkg.changes_file
456         dsc = self.pkg.dsc
457
458         print "Accepting."
459         self.Logger.log(["Accepting changes",changes_file])
460
461         self.dump_vars(Cnf["Dir::Queue::Accepted"])
462
463         # Move all the files into the accepted directory
464         utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
465         file_keys = files.keys()
466         for file_entry in file_keys:
467             utils.move(file_entry, Cnf["Dir::Queue::Accepted"])
468             self.accept_bytes += float(files[file_entry]["size"])
469         self.accept_count += 1
470
471         # Send accept mail, announce to lists, close bugs and check for
472         # override disparities
473         if not Cnf["Dinstall::Options::No-Mail"]:
474             Subst["__SUITE__"] = ""
475             Subst["__SUMMARY__"] = summary
476             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
477             utils.send_mail(mail_message)
478             self.announce(short_summary, 1)
479
480
481         ## Helper stuff for DebBugs Version Tracking
482         if Cnf.Find("Dir::Queue::BTSVersionTrack"):
483             # ??? once queue/* is cleared on *.d.o and/or reprocessed
484             # the conditionalization on dsc["bts changelog"] should be
485             # dropped.
486
487             # Write out the version history from the changelog
488             if changes["architecture"].has_key("source") and \
489                dsc.has_key("bts changelog"):
490
491                 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
492                                                     dotprefix=1, perms=0644)
493                 version_history = utils.open_file(temp_filename, 'w')
494                 version_history.write(dsc["bts changelog"])
495                 version_history.close()
496                 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
497                                       changes_file[:-8]+".versions")
498                 os.rename(temp_filename, filename)
499
500             # Write out the binary -> source mapping.
501             temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
502                                                 dotprefix=1, perms=0644)
503             debinfo = utils.open_file(temp_filename, 'w')
504             for file_entry in file_keys:
505                 f = files[file_entry]
506                 if f["type"] == "deb":
507                     line = " ".join([f["package"], f["version"],
508                                      f["architecture"], f["source package"],
509                                      f["source version"]])
510                     debinfo.write(line+"\n")
511             debinfo.close()
512             filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
513                                   changes_file[:-8]+".debinfo")
514             os.rename(temp_filename, filename)
515
516         self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
517
518     ###########################################################################
519
520     def queue_build (self, queue, path):
521         Cnf = self.Cnf
522         Subst = self.Subst
523         files = self.pkg.files
524         changes = self.pkg.changes
525         changes_file = self.pkg.changes_file
526         dsc = self.pkg.dsc
527         file_keys = files.keys()
528
529         ## Special support to enable clean auto-building of queued packages
530         queue_id = database.get_or_set_queue_id(queue)
531
532         self.projectB.query("BEGIN WORK")
533         for suite in changes["distribution"].keys():
534             if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
535                 continue
536             suite_id = database.get_suite_id(suite)
537             dest_dir = Cnf["Dir::QueueBuild"]
538             if Cnf.FindB("Dinstall::SecurityQueueBuild"):
539                 dest_dir = os.path.join(dest_dir, suite)
540             for file_entry in file_keys:
541                 src = os.path.join(path, file_entry)
542                 dest = os.path.join(dest_dir, file_entry)
543                 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
544                     # Copy it since the original won't be readable by www-data
545                     utils.copy(src, dest)
546                 else:
547                     # Create a symlink to it
548                     os.symlink(src, dest)
549                 # Add it to the list of packages for later processing by apt-ftparchive
550                 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
551             # If the .orig.tar.gz is in the pool, create a symlink to
552             # it (if one doesn't already exist)
553             if self.pkg.orig_tar_id:
554                 # Determine the .orig.tar.gz file name
555                 for dsc_file in self.pkg.dsc_files.keys():
556                     if dsc_file.endswith(".orig.tar.gz"):
557                         filename = dsc_file
558                 dest = os.path.join(dest_dir, filename)
559                 # If it doesn't exist, create a symlink
560                 if not os.path.exists(dest):
561                     # Find the .orig.tar.gz in the pool
562                     q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
563                     ql = q.getresult()
564                     if not ql:
565                         utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
566                     src = os.path.join(ql[0][0], ql[0][1])
567                     os.symlink(src, dest)
568                     # Add it to the list of packages for later processing by apt-ftparchive
569                     self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
570                 # if it does, update things to ensure it's not removed prematurely
571                 else:
572                     self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
573
574         self.projectB.query("COMMIT WORK")
575
576     ###########################################################################
577
578     def check_override (self):
579         Subst = self.Subst
580         changes = self.pkg.changes
581         files = self.pkg.files
582         Cnf = self.Cnf
583
584         # Abandon the check if:
585         #  a) it's a non-sourceful upload
586         #  b) override disparity checks have been disabled
587         #  c) we're not sending mail
588         if not changes["architecture"].has_key("source") or \
589            not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
590            Cnf["Dinstall::Options::No-Mail"]:
591             return
592
593         summary = ""
594         file_keys = files.keys()
595         file_keys.sort()
596         for file_entry in file_keys:
597             if not files[file_entry].has_key("new") and files[file_entry]["type"] == "deb":
598                 section = files[file_entry]["section"]
599                 override_section = files[file_entry]["override section"]
600                 if section.lower() != override_section.lower() and section != "-":
601                     summary += "%s: package says section is %s, override says %s.\n" % (file_entry, section, override_section)
602                 priority = files[file_entry]["priority"]
603                 override_priority = files[file_entry]["override priority"]
604                 if priority != override_priority and priority != "-":
605                     summary += "%s: package says priority is %s, override says %s.\n" % (file_entry, priority, override_priority)
606
607         if summary == "":
608             return
609
610         Subst["__SUMMARY__"] = summary
611         mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
612         utils.send_mail(mail_message)
613
614     ###########################################################################
615
616     def force_reject (self, files):
617         """Forcefully move files from the current directory to the
618            reject directory.  If any file already exists in the reject
619            directory it will be moved to the morgue to make way for
620            the new file."""
621
622         Cnf = self.Cnf
623
624         for file_entry in files:
625             # Skip any files which don't exist or which we don't have permission to copy.
626             if os.access(file_entry,os.R_OK) == 0:
627                 continue
628             dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file_entry)
629             try:
630                 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
631             except OSError, e:
632                 # File exists?  Let's try and move it to the morgue
633                 if errno.errorcode[e.errno] == 'EEXIST':
634                     morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file_entry)
635                     try:
636                         morgue_file = utils.find_next_free(morgue_file)
637                     except NoFreeFilenameError:
638                         # Something's either gone badly Pete Tong, or
639                         # someone is trying to exploit us.
640                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
641                         return
642                     utils.move(dest_file, morgue_file, perms=0660)
643                     try:
644                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
645                     except OSError, e:
646                         # Likewise
647                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
648                         return
649                 else:
650                     raise
651             # If we got here, we own the destination file, so we can
652             # safely overwrite it.
653             utils.move(file_entry, dest_file, 1, perms=0660)
654             os.close(dest_fd)
655
656     ###########################################################################
657
658     def do_reject (self, manual = 0, reject_message = ""):
659         # If we weren't given a manual rejection message, spawn an
660         # editor so the user can add one in...
661         if manual and not reject_message:
662             temp_filename = utils.temp_filename()
663             editor = os.environ.get("EDITOR","vi")
664             answer = 'E'
665             while answer == 'E':
666                 os.system("%s %s" % (editor, temp_filename))
667                 temp_fh = utils.open_file(temp_filename)
668                 reject_message = "".join(temp_fh.readlines())
669                 temp_fh.close()
670                 print "Reject message:"
671                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
672                 prompt = "[R]eject, Edit, Abandon, Quit ?"
673                 answer = "XXX"
674                 while prompt.find(answer) == -1:
675                     answer = utils.our_raw_input(prompt)
676                     m = re_default_answer.search(prompt)
677                     if answer == "":
678                         answer = m.group(1)
679                     answer = answer[:1].upper()
680             os.unlink(temp_filename)
681             if answer == 'A':
682                 return 1
683             elif answer == 'Q':
684                 sys.exit(0)
685
686         print "Rejecting.\n"
687
688         Cnf = self.Cnf
689         Subst = self.Subst
690         pkg = self.pkg
691
692         reason_filename = pkg.changes_file[:-8] + ".reason"
693         reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
694
695         # Move all the files into the reject directory
696         reject_files = pkg.files.keys() + [pkg.changes_file]
697         self.force_reject(reject_files)
698
699         # If we fail here someone is probably trying to exploit the race
700         # so let's just raise an exception ...
701         if os.path.exists(reason_filename):
702             os.unlink(reason_filename)
703         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
704
705         if not manual:
706             Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
707             Subst["__MANUAL_REJECT_MESSAGE__"] = ""
708             Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
709             os.write(reason_fd, reject_message)
710             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
711         else:
712             # Build up the rejection email
713             user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
714
715             Subst["__REJECTOR_ADDRESS__"] = user_email_address
716             Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
717             Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
718             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
719             # Write the rejection email out as the <foo>.reason file
720             os.write(reason_fd, reject_mail_message)
721
722         os.close(reason_fd)
723
724         # Send the rejection mail if appropriate
725         if not Cnf["Dinstall::Options::No-Mail"]:
726             utils.send_mail(reject_mail_message)
727
728         self.Logger.log(["rejected", pkg.changes_file])
729         return 0
730
731     ################################################################################
732
733     # Ensure that source exists somewhere in the archive for the binary
734     # upload being processed.
735     #
736     # (1) exact match                      => 1.0-3
737     # (2) Bin-only NMU                     => 1.0-3+b1 , 1.0-3.1+b1
738
739     def source_exists (self, package, source_version, suites = ["any"]):
740         okay = 1
741         for suite in suites:
742             if suite == "any":
743                 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
744                     (package)
745             else:
746                 # source must exist in suite X, or in some other suite that's
747                 # mapped to X, recursively... silent-maps are counted too,
748                 # unreleased-maps aren't.
749                 maps = self.Cnf.ValueList("SuiteMappings")[:]
750                 maps.reverse()
751                 maps = [ m.split() for m in maps ]
752                 maps = [ (x[1], x[2]) for x in maps
753                                 if x[0] == "map" or x[0] == "silent-map" ]
754                 s = [suite]
755                 for x in maps:
756                     if x[1] in s and x[0] not in s:
757                         s.append(x[0])
758
759                 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
760             q = self.projectB.query(que)
761
762             # Reduce the query results to a list of version numbers
763             ql = [ i[0] for i in q.getresult() ]
764
765             # Try (1)
766             if source_version in ql:
767                 continue
768
769             # Try (2)
770             orig_source_version = re_bin_only_nmu.sub('', source_version)
771             if orig_source_version in ql:
772                 continue
773
774             # No source found...
775             okay = 0
776             break
777         return okay
778
779     ################################################################################
780
781     def in_override_p (self, package, component, suite, binary_type, file):
782         files = self.pkg.files
783
784         if binary_type == "": # must be source
785             file_type = "dsc"
786         else:
787             file_type = binary_type
788
789         # Override suite name; used for example with proposed-updates
790         if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
791             suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
792
793         # Avoid <undef> on unknown distributions
794         suite_id = database.get_suite_id(suite)
795         if suite_id == -1:
796             return None
797         component_id = database.get_component_id(component)
798         type_id = database.get_override_type_id(file_type)
799
800         q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
801                            % (package, suite_id, component_id, type_id))
802         result = q.getresult()
803         # If checking for a source package fall back on the binary override type
804         if file_type == "dsc" and not result:
805             deb_type_id = database.get_override_type_id("deb")
806             udeb_type_id = database.get_override_type_id("udeb")
807             q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
808                                % (package, suite_id, component_id, deb_type_id, udeb_type_id))
809             result = q.getresult()
810
811         # Remember the section and priority so we can check them later if appropriate
812         if result:
813             files[file]["override section"] = result[0][0]
814             files[file]["override priority"] = result[0][1]
815
816         return result
817
818     ################################################################################
819
820     def reject (self, str, prefix="Rejected: "):
821         if str:
822             # Unlike other rejects we add new lines first to avoid trailing
823             # new lines when this message is passed back up to a caller.
824             if self.reject_message:
825                 self.reject_message += "\n"
826             self.reject_message += prefix + str
827
828     ################################################################################
829
830     def get_anyversion(self, query_result, suite):
831         anyversion=None
832         anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
833         for (v, s) in query_result:
834             if s in [ x.lower() for x in anysuite ]:
835                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
836                     anyversion=v
837         return anyversion
838
839     ################################################################################
840
841     def cross_suite_version_check(self, query_result, file, new_version):
842         """Ensure versions are newer than existing packages in target
843         suites and that cross-suite version checking rules as
844         set out in the conf file are satisfied."""
845
846         # Check versions for each target suite
847         for target_suite in self.pkg.changes["distribution"].keys():
848             must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
849             must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
850             # Enforce "must be newer than target suite" even if conffile omits it
851             if target_suite not in must_be_newer_than:
852                 must_be_newer_than.append(target_suite)
853             for entry in query_result:
854                 existent_version = entry[0]
855                 suite = entry[1]
856                 if suite in must_be_newer_than and \
857                    apt_pkg.VersionCompare(new_version, existent_version) < 1:
858                     self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
859                 if suite in must_be_older_than and \
860                    apt_pkg.VersionCompare(new_version, existent_version) > -1:
861                     ch = self.pkg.changes
862                     cansave = 0
863                     if ch.get('distribution-version', {}).has_key(suite):
864                     # we really use the other suite, ignoring the conflicting one ...
865                         addsuite = ch["distribution-version"][suite]
866
867                         add_version = self.get_anyversion(query_result, addsuite)
868                         target_version = self.get_anyversion(query_result, target_suite)
869
870                         if not add_version:
871                             # not add_version can only happen if we map to a suite
872                             # that doesn't enhance the suite we're propup'ing from.
873                             # so "propup-ver x a b c; map a d" is a problem only if
874                             # d doesn't enhance a.
875                             #
876                             # i think we could always propagate in this case, rather
877                             # than complaining. either way, this isn't a REJECT issue
878                             #
879                             # And - we really should complain to the dorks who configured dak
880                             self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
881                             self.pkg.changes.setdefault("propdistribution", {})
882                             self.pkg.changes["propdistribution"][addsuite] = 1
883                             cansave = 1
884                         elif not target_version:
885                             # not targets_version is true when the package is NEW
886                             # we could just stick with the "...old version..." REJECT
887                             # for this, I think.
888                             self.reject("Won't propogate NEW packages.")
889                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
890                             # propogation would be redundant. no need to reject though.
891                             self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
892                             cansave = 1
893                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
894                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
895                             # propogate!!
896                             self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
897                             self.pkg.changes.setdefault("propdistribution", {})
898                             self.pkg.changes["propdistribution"][addsuite] = 1
899                             cansave = 1
900
901                     if not cansave:
902                         self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
903
904     ################################################################################
905
906     def check_binary_against_db(self, file):
907         self.reject_message = ""
908         files = self.pkg.files
909
910         # Ensure version is sane
911         q = self.projectB.query("""
912 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
913                                      architecture a
914  WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
915    AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
916                                 % (files[file]["package"],
917                                    files[file]["architecture"]))
918         self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
919
920         # Check for any existing copies of the file
921         q = self.projectB.query("""
922 SELECT b.id FROM binaries b, architecture a
923  WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
924    AND a.id = b.architecture"""
925                                 % (files[file]["package"],
926                                    files[file]["version"],
927                                    files[file]["architecture"]))
928         if q.getresult():
929             self.reject("%s: can not overwrite existing copy already in the archive." % (file))
930
931         return self.reject_message
932
933     ################################################################################
934
935     def check_source_against_db(self, file):
936         self.reject_message = ""
937         dsc = self.pkg.dsc
938
939         # Ensure version is sane
940         q = self.projectB.query("""
941 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
942  WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
943         self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
944
945         return self.reject_message
946
947     ################################################################################
948
949     # **WARNING**
950     # NB: this function can remove entries from the 'files' index [if
951     # the .orig.tar.gz is a duplicate of the one in the archive]; if
952     # you're iterating over 'files' and call this function as part of
953     # the loop, be sure to add a check to the top of the loop to
954     # ensure you haven't just tried to dereference the deleted entry.
955     # **WARNING**
956
957     def check_dsc_against_db(self, file):
958         self.reject_message = ""
959         files = self.pkg.files
960         dsc_files = self.pkg.dsc_files
961         legacy_source_untouchable = self.pkg.legacy_source_untouchable
962         self.pkg.orig_tar_gz = None
963
964         # Try and find all files mentioned in the .dsc.  This has
965         # to work harder to cope with the multiple possible
966         # locations of an .orig.tar.gz.
967         # The ordering on the select is needed to pick the newest orig
968         # when it exists in multiple places.
969         for dsc_file in dsc_files.keys():
970             found = None
971             if files.has_key(dsc_file):
972                 actual_md5 = files[dsc_file]["md5sum"]
973                 actual_size = int(files[dsc_file]["size"])
974                 found = "%s in incoming" % (dsc_file)
975                 # Check the file does not already exist in the archive
976                 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
977                 ql = q.getresult()
978                 # Strip out anything that isn't '%s' or '/%s$'
979                 for i in ql:
980                     if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
981                         ql.remove(i)
982
983                 # "[dak] has not broken them.  [dak] has fixed a
984                 # brokenness.  Your crappy hack exploited a bug in
985                 # the old dinstall.
986                 #
987                 # "(Come on!  I thought it was always obvious that
988                 # one just doesn't release different files with
989                 # the same name and version.)"
990                 #                        -- ajk@ on d-devel@l.d.o
991
992                 if ql:
993                     # Ignore exact matches for .orig.tar.gz
994                     match = 0
995                     if dsc_file.endswith(".orig.tar.gz"):
996                         for i in ql:
997                             if files.has_key(dsc_file) and \
998                                int(files[dsc_file]["size"]) == int(i[0]) and \
999                                files[dsc_file]["md5sum"] == i[1]:
1000                                 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
1001                                 del files[dsc_file]
1002                                 self.pkg.orig_tar_gz = i[2] + i[3]
1003                                 match = 1
1004
1005                     if not match:
1006                         self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
1007             elif dsc_file.endswith(".orig.tar.gz"):
1008                 # Check in the pool
1009                 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
1010                 ql = q.getresult()
1011                 # Strip out anything that isn't '%s' or '/%s$'
1012                 for i in ql:
1013                     if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
1014                         ql.remove(i)
1015
1016                 if ql:
1017                     # Unfortunately, we may get more than one match here if,
1018                     # for example, the package was in potato but had an -sa
1019                     # upload in woody.  So we need to choose the right one.
1020
1021                     x = ql[0]; # default to something sane in case we don't match any or have only one
1022
1023                     if len(ql) > 1:
1024                         for i in ql:
1025                             old_file = i[0] + i[1]
1026                             old_file_fh = utils.open_file(old_file)
1027                             actual_md5 = apt_pkg.md5sum(old_file_fh)
1028                             old_file_fh.close()
1029                             actual_size = os.stat(old_file)[stat.ST_SIZE]
1030                             if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1031                                 x = i
1032                             else:
1033                                 legacy_source_untouchable[i[3]] = ""
1034
1035                     old_file = x[0] + x[1]
1036                     old_file_fh = utils.open_file(old_file)
1037                     actual_md5 = apt_pkg.md5sum(old_file_fh)
1038                     old_file_fh.close()
1039                     actual_size = os.stat(old_file)[stat.ST_SIZE]
1040                     found = old_file
1041                     suite_type = x[2]
1042                     dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
1043                     # See install() in process-accepted...
1044                     self.pkg.orig_tar_id = x[3]
1045                     self.pkg.orig_tar_gz = old_file
1046                     if suite_type == "legacy" or suite_type == "legacy-mixed":
1047                         self.pkg.orig_tar_location = "legacy"
1048                     else:
1049                         self.pkg.orig_tar_location = x[4]
1050                 else:
1051                     # Not there? Check the queue directories...
1052
1053                     in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1054                     # See process_it() in 'dak process-unchecked' for explanation of this
1055                     # in_unchecked check dropped by ajt 2007-08-28, how did that
1056                     # ever make sense?
1057                     if os.path.exists(in_unchecked) and False:
1058                         return (self.reject_message, in_unchecked)
1059                     else:
1060                         for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1061                             in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)],dsc_file)
1062                             if os.path.exists(in_otherdir):
1063                                 in_otherdir_fh = utils.open_file(in_otherdir)
1064                                 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1065                                 in_otherdir_fh.close()
1066                                 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1067                                 found = in_otherdir
1068                                 self.pkg.orig_tar_gz = in_otherdir
1069
1070                     if not found:
1071                         self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1072                         self.pkg.orig_tar_gz = -1
1073                         continue
1074             else:
1075                 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1076                 continue
1077             if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1078                 self.reject("md5sum for %s doesn't match %s." % (found, file))
1079             if actual_size != int(dsc_files[dsc_file]["size"]):
1080                 self.reject("size for %s doesn't match %s." % (found, file))
1081
1082         return (self.reject_message, None)
1083
1084     def do_query(self, q):
1085         sys.stderr.write("query: \"%s\" ... " % (q))
1086         before = time.time()
1087         r = self.projectB.query(q)
1088         time_diff = time.time()-before
1089         sys.stderr.write("took %.3f seconds.\n" % (time_diff))
1090         return r