]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Merge commit 'ftpmaster/master' into regexes
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 # Queue utility functions for dak
5 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006  James Troup <james@nocrew.org>
6
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
11
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 # GNU General Public License for more details.
16
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
20
21 ###############################################################################
22
23 import cPickle, errno, os, pg, re, stat, sys, time
24 import apt_inst, apt_pkg
25 import utils, database
26 from dak_exceptions import *
27 from regexes import re_default_answer, re_fdnic, re_bin_only_nmu
28
29 from types import *
30
31 ###############################################################################
32
33 # Determine what parts in a .changes are NEW
34
35 def determine_new(changes, files, projectB, warn=1):
36     new = {}
37
38     # Build up a list of potentially new things
39     for file_entry in files.keys():
40         f = files[file_entry]
41         # Skip byhand elements
42         if f["type"] == "byhand":
43             continue
44         pkg = f["package"]
45         priority = f["priority"]
46         section = f["section"]
47         file_type = get_type(f)
48         component = f["component"]
49
50         if file_type == "dsc":
51             priority = "source"
52         if not new.has_key(pkg):
53             new[pkg] = {}
54             new[pkg]["priority"] = priority
55             new[pkg]["section"] = section
56             new[pkg]["type"] = file_type
57             new[pkg]["component"] = component
58             new[pkg]["files"] = []
59         else:
60             old_type = new[pkg]["type"]
61             if old_type != file_type:
62                 # source gets trumped by deb or udeb
63                 if old_type == "dsc":
64                     new[pkg]["priority"] = priority
65                     new[pkg]["section"] = section
66                     new[pkg]["type"] = file_type
67                     new[pkg]["component"] = component
68         new[pkg]["files"].append(file_entry)
69         if f.has_key("othercomponents"):
70             new[pkg]["othercomponents"] = f["othercomponents"]
71
72     for suite in changes["suite"].keys():
73         suite_id = database.get_suite_id(suite)
74         for pkg in new.keys():
75             component_id = database.get_component_id(new[pkg]["component"])
76             type_id = database.get_override_type_id(new[pkg]["type"])
77             q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
78             ql = q.getresult()
79             if ql:
80                 for file_entry in new[pkg]["files"]:
81                     if files[file_entry].has_key("new"):
82                         del files[file_entry]["new"]
83                 del new[pkg]
84
85     if warn:
86         if changes["suite"].has_key("stable"):
87             print "WARNING: overrides will be added for stable!"
88             if changes["suite"].has_key("oldstable"):
89                 print "WARNING: overrides will be added for OLDstable!"
90         for pkg in new.keys():
91             if new[pkg].has_key("othercomponents"):
92                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
93
94     return new
95
96 ################################################################################
97
98 def get_type(f):
99     # Determine the type
100     if f.has_key("dbtype"):
101         file_type = f["dbtype"]
102     elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
103         file_type = "dsc"
104     else:
105         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
106
107     # Validate the override type
108     type_id = database.get_override_type_id(file_type)
109     if type_id == -1:
110         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
111
112     return file_type
113
114 ################################################################################
115
116 # check if section/priority values are valid
117
118 def check_valid(new):
119     for pkg in new.keys():
120         section = new[pkg]["section"]
121         priority = new[pkg]["priority"]
122         file_type = new[pkg]["type"]
123         new[pkg]["section id"] = database.get_section_id(section)
124         new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
125         # Sanity checks
126         di = section.find("debian-installer") != -1
127         if (di and file_type not in ("udeb", "dsc")) or (not di and file_type == "udeb"):
128             new[pkg]["section id"] = -1
129         if (priority == "source" and file_type != "dsc") or \
130            (priority != "source" and file_type == "dsc"):
131             new[pkg]["priority id"] = -1
132
133
134 ###############################################################################
135
136 # Convenience wrapper to carry around all the package information in
137
138 class Pkg:
139     def __init__(self, **kwds):
140         self.__dict__.update(kwds)
141
142     def update(self, **kwds):
143         self.__dict__.update(kwds)
144
145 ###############################################################################
146
147 class Upload:
148
149     def __init__(self, Cnf):
150         self.Cnf = Cnf
151         self.accept_count = 0
152         self.accept_bytes = 0L
153         self.reject_message = ""
154         self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
155                        legacy_source_untouchable = {})
156
157         # Initialize the substitution template mapping global
158         Subst = self.Subst = {}
159         Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
160         Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
161         Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
162         Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
163
164         self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
165         database.init(Cnf, self.projectB)
166
167     ###########################################################################
168
169     def init_vars (self):
170         self.pkg.changes.clear()
171         self.pkg.dsc.clear()
172         self.pkg.files.clear()
173         self.pkg.dsc_files.clear()
174         self.pkg.legacy_source_untouchable.clear()
175         self.pkg.orig_tar_id = None
176         self.pkg.orig_tar_location = ""
177         self.pkg.orig_tar_gz = None
178
179     ###########################################################################
180
181     def update_vars (self):
182         dump_filename = self.pkg.changes_file[:-8]+".dak"
183         dump_file = utils.open_file(dump_filename)
184         p = cPickle.Unpickler(dump_file)
185
186         self.pkg.changes.update(p.load())
187         self.pkg.dsc.update(p.load())
188         self.pkg.files.update(p.load())
189         self.pkg.dsc_files.update(p.load())
190         self.pkg.legacy_source_untouchable.update(p.load())
191
192         self.pkg.orig_tar_id = p.load()
193         self.pkg.orig_tar_location = p.load()
194
195         dump_file.close()
196
197     ###########################################################################
198
199     # This could just dump the dictionaries as is, but I'd like to
200     # avoid this so there's some idea of what process-accepted &
201     # process-new use from process-unchecked
202
203     def dump_vars(self, dest_dir):
204
205         changes = self.pkg.changes
206         dsc = self.pkg.dsc
207         files = self.pkg.files
208         dsc_files = self.pkg.dsc_files
209         legacy_source_untouchable = self.pkg.legacy_source_untouchable
210         orig_tar_id = self.pkg.orig_tar_id
211         orig_tar_location = self.pkg.orig_tar_location
212
213         dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
214         dump_file = utils.open_file(dump_filename, 'w')
215         try:
216             os.chmod(dump_filename, 0664)
217         except OSError, e:
218             # chmod may fail when the dumpfile is not owned by the user
219             # invoking dak (like e.g. when NEW is processed by a member
220             # of ftpteam)
221             if errno.errorcode[e.errno] == 'EPERM':
222                 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
223                 # security precaution, should never happen unless a weird
224                 # umask is set anywhere
225                 if perms & stat.S_IWOTH:
226                     utils.fubar("%s is world writable and chmod failed." % \
227                         (dump_filename,))
228                 # ignore the failed chmod otherwise as the file should
229                 # already have the right privileges and is just, at worst,
230                 # unreadable for world
231             else:
232                 raise
233
234         p = cPickle.Pickler(dump_file, 1)
235         d_changes = {}
236         d_dsc = {}
237         d_files = {}
238         d_dsc_files = {}
239
240         ## files
241         for file_entry in files.keys():
242             d_files[file_entry] = {}
243             for i in [ "package", "version", "architecture", "type", "size",
244                        "md5sum", "sha1sum", "sha256sum", "component",
245                        "location id", "source package", "source version",
246                        "maintainer", "dbtype", "files id", "new",
247                        "section", "priority", "othercomponents",
248                        "pool name", "original component" ]:
249                 if files[file_entry].has_key(i):
250                     d_files[file_entry][i] = files[file_entry][i]
251         ## changes
252         # Mandatory changes fields
253         for i in [ "distribution", "source", "architecture", "version",
254                    "maintainer", "urgency", "fingerprint", "changedby822",
255                    "changedby2047", "changedbyname", "maintainer822",
256                    "maintainer2047", "maintainername", "maintaineremail",
257                    "closes", "changes" ]:
258             d_changes[i] = changes[i]
259         # Optional changes fields
260         for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version",
261                    "sponsoremail" ]:
262             if changes.has_key(i):
263                 d_changes[i] = changes[i]
264         ## dsc
265         for i in [ "source", "version", "maintainer", "fingerprint",
266                    "uploaders", "bts changelog", "dm-upload-allowed" ]:
267             if dsc.has_key(i):
268                 d_dsc[i] = dsc[i]
269         ## dsc_files
270         for file_entry in dsc_files.keys():
271             d_dsc_files[file_entry] = {}
272             # Mandatory dsc_files fields
273             for i in [ "size", "md5sum" ]:
274                 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
275             # Optional dsc_files fields
276             for i in [ "files id" ]:
277                 if dsc_files[file_entry].has_key(i):
278                     d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
279
280         for i in [ d_changes, d_dsc, d_files, d_dsc_files,
281                    legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
282             p.dump(i)
283         dump_file.close()
284
285     ###########################################################################
286
287     # Set up the per-package template substitution mappings
288
289     def update_subst (self, reject_message = ""):
290         Subst = self.Subst
291         changes = self.pkg.changes
292         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
293         if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
294             changes["architecture"] = { "Unknown" : "" }
295         # and maintainer2047 may not exist.
296         if not changes.has_key("maintainer2047"):
297             changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
298
299         Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
300         Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
301         Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
302
303         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
304         if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
305             Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
306             Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
307                                                      changes["maintainer2047"])
308             Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
309         else:
310             Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
311             Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
312             Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
313
314         if "sponsoremail" in changes:
315             Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
316
317         if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
318             Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
319
320         # Apply any global override of the Maintainer field
321         if self.Cnf.get("Dinstall::OverrideMaintainer"):
322             Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
323             Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
324
325         Subst["__REJECT_MESSAGE__"] = reject_message
326         Subst["__SOURCE__"] = changes.get("source", "Unknown")
327         Subst["__VERSION__"] = changes.get("version", "Unknown")
328
329     ###########################################################################
330
331     def build_summaries(self):
332         changes = self.pkg.changes
333         files = self.pkg.files
334
335         byhand = summary = new = ""
336
337         # changes["distribution"] may not exist in corner cases
338         # (e.g. unreadable changes files)
339         if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
340             changes["distribution"] = {}
341
342         override_summary =""
343         file_keys = files.keys()
344         file_keys.sort()
345         for file_entry in file_keys:
346             if files[file_entry].has_key("byhand"):
347                 byhand = 1
348                 summary += file_entry + " byhand\n"
349             elif files[file_entry].has_key("new"):
350                 new = 1
351                 summary += "(new) %s %s %s\n" % (file_entry, files[file_entry]["priority"], files[file_entry]["section"])
352                 if files[file_entry].has_key("othercomponents"):
353                     summary += "WARNING: Already present in %s distribution.\n" % (files[file_entry]["othercomponents"])
354                 if files[file_entry]["type"] == "deb":
355                     deb_fh = utils.open_file(file_entry)
356                     summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
357                     deb_fh.close()
358             else:
359                 files[file_entry]["pool name"] = utils.poolify (changes.get("source",""), files[file_entry]["component"])
360                 destination = self.Cnf["Dir::PoolRoot"] + files[file_entry]["pool name"] + file_entry
361                 summary += file_entry + "\n  to " + destination + "\n"
362                 if not files[file_entry].has_key("type"):
363                     files[file_entry]["type"] = "unknown"
364                 if files[file_entry]["type"] in ["deb", "udeb", "dsc"]:
365                     # (queue/unchecked), there we have override entries already, use them
366                     # (process-new), there we dont have override entries, use the newly generated ones.
367                     override_prio = files[file_entry].get("override priority", files[file_entry]["priority"])
368                     override_sect = files[file_entry].get("override section", files[file_entry]["section"])
369                     override_summary += "%s - %s %s\n" % (file_entry, override_prio, override_sect)
370
371         short_summary = summary
372
373         # This is for direport's benefit...
374         f = re_fdnic.sub("\n .\n", changes.get("changes",""))
375
376         if byhand or new:
377             summary += "Changes: " + f
378
379         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
380
381         summary += self.announce(short_summary, 0)
382
383         return (summary, short_summary)
384
385     ###########################################################################
386
387     def close_bugs (self, summary, action):
388         changes = self.pkg.changes
389         Subst = self.Subst
390         Cnf = self.Cnf
391
392         bugs = changes["closes"].keys()
393
394         if not bugs:
395             return summary
396
397         bugs.sort()
398         summary += "Closing bugs: "
399         for bug in bugs:
400             summary += "%s " % (bug)
401             if action:
402                 Subst["__BUG_NUMBER__"] = bug
403                 if changes["distribution"].has_key("stable"):
404                     Subst["__STABLE_WARNING__"] = """
405 Note that this package is not part of the released stable Debian
406 distribution.  It may have dependencies on other unreleased software,
407 or other instabilities.  Please take care if you wish to install it.
408 The update will eventually make its way into the next released Debian
409 distribution."""
410                 else:
411                     Subst["__STABLE_WARNING__"] = ""
412                     mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
413                     utils.send_mail (mail_message)
414         if action:
415             self.Logger.log(["closing bugs"]+bugs)
416         summary += "\n"
417
418         return summary
419
420     ###########################################################################
421
422     def announce (self, short_summary, action):
423         Subst = self.Subst
424         Cnf = self.Cnf
425         changes = self.pkg.changes
426
427         # Only do announcements for source uploads with a recent dpkg-dev installed
428         if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
429             return ""
430
431         lists_done = {}
432         summary = ""
433         Subst["__SHORT_SUMMARY__"] = short_summary
434
435         for dist in changes["distribution"].keys():
436             announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
437             if announce_list == "" or lists_done.has_key(announce_list):
438                 continue
439             lists_done[announce_list] = 1
440             summary += "Announcing to %s\n" % (announce_list)
441
442             if action:
443                 Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
444                 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
445                     Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
446                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
447                 utils.send_mail (mail_message)
448
449         if Cnf.FindB("Dinstall::CloseBugs"):
450             summary = self.close_bugs(summary, action)
451
452         return summary
453
454     ###########################################################################
455
456     def accept (self, summary, short_summary):
457         Cnf = self.Cnf
458         Subst = self.Subst
459         files = self.pkg.files
460         changes = self.pkg.changes
461         changes_file = self.pkg.changes_file
462         dsc = self.pkg.dsc
463
464         print "Accepting."
465         self.Logger.log(["Accepting changes",changes_file])
466
467         self.dump_vars(Cnf["Dir::Queue::Accepted"])
468
469         # Move all the files into the accepted directory
470         utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
471         file_keys = files.keys()
472         for file_entry in file_keys:
473             utils.move(file_entry, Cnf["Dir::Queue::Accepted"])
474             self.accept_bytes += float(files[file_entry]["size"])
475         self.accept_count += 1
476
477         # Send accept mail, announce to lists, close bugs and check for
478         # override disparities
479         if not Cnf["Dinstall::Options::No-Mail"]:
480             Subst["__SUITE__"] = ""
481             Subst["__SUMMARY__"] = summary
482             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
483             utils.send_mail(mail_message)
484             self.announce(short_summary, 1)
485
486
487         ## Helper stuff for DebBugs Version Tracking
488         if Cnf.Find("Dir::Queue::BTSVersionTrack"):
489             # ??? once queue/* is cleared on *.d.o and/or reprocessed
490             # the conditionalization on dsc["bts changelog"] should be
491             # dropped.
492
493             # Write out the version history from the changelog
494             if changes["architecture"].has_key("source") and \
495                dsc.has_key("bts changelog"):
496
497                 (fd, temp_filename) = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
498                 version_history = os.fdopen(fd, 'w')
499                 version_history.write(dsc["bts changelog"])
500                 version_history.close()
501                 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
502                                       changes_file[:-8]+".versions")
503                 os.rename(temp_filename, filename)
504                 os.chmod(filename, 0644)
505
506             # Write out the binary -> source mapping.
507             (fd, temp_filename) = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
508             debinfo = os.fdopen(fd, 'w')
509             for file_entry in file_keys:
510                 f = files[file_entry]
511                 if f["type"] == "deb":
512                     line = " ".join([f["package"], f["version"],
513                                      f["architecture"], f["source package"],
514                                      f["source version"]])
515                     debinfo.write(line+"\n")
516             debinfo.close()
517             filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
518                                   changes_file[:-8]+".debinfo")
519             os.rename(temp_filename, filename)
520             os.chmod(filename, 0644)
521
522         self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
523
524     ###########################################################################
525
526     def queue_build (self, queue, path):
527         Cnf = self.Cnf
528         Subst = self.Subst
529         files = self.pkg.files
530         changes = self.pkg.changes
531         changes_file = self.pkg.changes_file
532         dsc = self.pkg.dsc
533         file_keys = files.keys()
534
535         ## Special support to enable clean auto-building of queued packages
536         queue_id = database.get_or_set_queue_id(queue)
537
538         self.projectB.query("BEGIN WORK")
539         for suite in changes["distribution"].keys():
540             if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
541                 continue
542             suite_id = database.get_suite_id(suite)
543             dest_dir = Cnf["Dir::QueueBuild"]
544             if Cnf.FindB("Dinstall::SecurityQueueBuild"):
545                 dest_dir = os.path.join(dest_dir, suite)
546             for file_entry in file_keys:
547                 src = os.path.join(path, file_entry)
548                 dest = os.path.join(dest_dir, file_entry)
549                 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
550                     # Copy it since the original won't be readable by www-data
551                     utils.copy(src, dest)
552                 else:
553                     # Create a symlink to it
554                     os.symlink(src, dest)
555                 # Add it to the list of packages for later processing by apt-ftparchive
556                 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
557             # If the .orig.tar.gz is in the pool, create a symlink to
558             # it (if one doesn't already exist)
559             if self.pkg.orig_tar_id:
560                 # Determine the .orig.tar.gz file name
561                 for dsc_file in self.pkg.dsc_files.keys():
562                     if dsc_file.endswith(".orig.tar.gz"):
563                         filename = dsc_file
564                 dest = os.path.join(dest_dir, filename)
565                 # If it doesn't exist, create a symlink
566                 if not os.path.exists(dest):
567                     # Find the .orig.tar.gz in the pool
568                     q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
569                     ql = q.getresult()
570                     if not ql:
571                         utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
572                     src = os.path.join(ql[0][0], ql[0][1])
573                     os.symlink(src, dest)
574                     # Add it to the list of packages for later processing by apt-ftparchive
575                     self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
576                 # if it does, update things to ensure it's not removed prematurely
577                 else:
578                     self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
579
580         self.projectB.query("COMMIT WORK")
581
582     ###########################################################################
583
584     def check_override (self):
585         Subst = self.Subst
586         changes = self.pkg.changes
587         files = self.pkg.files
588         Cnf = self.Cnf
589
590         # Abandon the check if:
591         #  a) it's a non-sourceful upload
592         #  b) override disparity checks have been disabled
593         #  c) we're not sending mail
594         if not changes["architecture"].has_key("source") or \
595            not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
596            Cnf["Dinstall::Options::No-Mail"]:
597             return
598
599         summary = ""
600         file_keys = files.keys()
601         file_keys.sort()
602         for file_entry in file_keys:
603             if not files[file_entry].has_key("new") and files[file_entry]["type"] == "deb":
604                 section = files[file_entry]["section"]
605                 override_section = files[file_entry]["override section"]
606                 if section.lower() != override_section.lower() and section != "-":
607                     summary += "%s: package says section is %s, override says %s.\n" % (file_entry, section, override_section)
608                 priority = files[file_entry]["priority"]
609                 override_priority = files[file_entry]["override priority"]
610                 if priority != override_priority and priority != "-":
611                     summary += "%s: package says priority is %s, override says %s.\n" % (file_entry, priority, override_priority)
612
613         if summary == "":
614             return
615
616         Subst["__SUMMARY__"] = summary
617         mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
618         utils.send_mail(mail_message)
619
620     ###########################################################################
621
622     def force_reject (self, files):
623         """Forcefully move files from the current directory to the
624            reject directory.  If any file already exists in the reject
625            directory it will be moved to the morgue to make way for
626            the new file."""
627
628         Cnf = self.Cnf
629
630         for file_entry in files:
631             # Skip any files which don't exist or which we don't have permission to copy.
632             if os.access(file_entry,os.R_OK) == 0:
633                 continue
634             dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file_entry)
635             try:
636                 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
637             except OSError, e:
638                 # File exists?  Let's try and move it to the morgue
639                 if errno.errorcode[e.errno] == 'EEXIST':
640                     morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file_entry)
641                     try:
642                         morgue_file = utils.find_next_free(morgue_file)
643                     except NoFreeFilenameError:
644                         # Something's either gone badly Pete Tong, or
645                         # someone is trying to exploit us.
646                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
647                         return
648                     utils.move(dest_file, morgue_file, perms=0660)
649                     try:
650                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
651                     except OSError, e:
652                         # Likewise
653                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
654                         return
655                 else:
656                     raise
657             # If we got here, we own the destination file, so we can
658             # safely overwrite it.
659             utils.move(file_entry, dest_file, 1, perms=0660)
660             os.close(dest_fd)
661
662     ###########################################################################
663
664     def do_reject (self, manual = 0, reject_message = ""):
665         # If we weren't given a manual rejection message, spawn an
666         # editor so the user can add one in...
667         if manual and not reject_message:
668             (fd, temp_filename) = utils.temp_filename()
669             editor = os.environ.get("EDITOR","vi")
670             answer = 'E'
671             while answer == 'E':
672                 os.system("%s %s" % (editor, temp_filename))
673                 temp_fh = utils.open_file(temp_filename)
674                 reject_message = "".join(temp_fh.readlines())
675                 temp_fh.close()
676                 print "Reject message:"
677                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
678                 prompt = "[R]eject, Edit, Abandon, Quit ?"
679                 answer = "XXX"
680                 while prompt.find(answer) == -1:
681                     answer = utils.our_raw_input(prompt)
682                     m = re_default_answer.search(prompt)
683                     if answer == "":
684                         answer = m.group(1)
685                     answer = answer[:1].upper()
686             os.unlink(temp_filename)
687             if answer == 'A':
688                 return 1
689             elif answer == 'Q':
690                 sys.exit(0)
691
692         print "Rejecting.\n"
693
694         Cnf = self.Cnf
695         Subst = self.Subst
696         pkg = self.pkg
697
698         reason_filename = pkg.changes_file[:-8] + ".reason"
699         reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
700
701         # Move all the files into the reject directory
702         reject_files = pkg.files.keys() + [pkg.changes_file]
703         self.force_reject(reject_files)
704
705         # If we fail here someone is probably trying to exploit the race
706         # so let's just raise an exception ...
707         if os.path.exists(reason_filename):
708             os.unlink(reason_filename)
709         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
710
711         if not manual:
712             Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
713             Subst["__MANUAL_REJECT_MESSAGE__"] = ""
714             Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
715             os.write(reason_fd, reject_message)
716             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
717         else:
718             # Build up the rejection email
719             user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
720
721             Subst["__REJECTOR_ADDRESS__"] = user_email_address
722             Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
723             Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
724             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
725             # Write the rejection email out as the <foo>.reason file
726             os.write(reason_fd, reject_mail_message)
727
728         os.close(reason_fd)
729
730         # Send the rejection mail if appropriate
731         if not Cnf["Dinstall::Options::No-Mail"]:
732             utils.send_mail(reject_mail_message)
733
734         self.Logger.log(["rejected", pkg.changes_file])
735         return 0
736
737     ################################################################################
738
739     # Ensure that source exists somewhere in the archive for the binary
740     # upload being processed.
741     #
742     # (1) exact match                      => 1.0-3
743     # (2) Bin-only NMU                     => 1.0-3+b1 , 1.0-3.1+b1
744
745     def source_exists (self, package, source_version, suites = ["any"]):
746         okay = 1
747         for suite in suites:
748             if suite == "any":
749                 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
750                     (package)
751             else:
752                 # source must exist in suite X, or in some other suite that's
753                 # mapped to X, recursively... silent-maps are counted too,
754                 # unreleased-maps aren't.
755                 maps = self.Cnf.ValueList("SuiteMappings")[:]
756                 maps.reverse()
757                 maps = [ m.split() for m in maps ]
758                 maps = [ (x[1], x[2]) for x in maps
759                                 if x[0] == "map" or x[0] == "silent-map" ]
760                 s = [suite]
761                 for x in maps:
762                     if x[1] in s and x[0] not in s:
763                         s.append(x[0])
764
765                 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
766             q = self.projectB.query(que)
767
768             # Reduce the query results to a list of version numbers
769             ql = [ i[0] for i in q.getresult() ]
770
771             # Try (1)
772             if source_version in ql:
773                 continue
774
775             # Try (2)
776             orig_source_version = re_bin_only_nmu.sub('', source_version)
777             if orig_source_version in ql:
778                 continue
779
780             # No source found...
781             okay = 0
782             break
783         return okay
784
785     ################################################################################
786
787     def in_override_p (self, package, component, suite, binary_type, file):
788         files = self.pkg.files
789
790         if binary_type == "": # must be source
791             file_type = "dsc"
792         else:
793             file_type = binary_type
794
795         # Override suite name; used for example with proposed-updates
796         if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
797             suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
798
799         # Avoid <undef> on unknown distributions
800         suite_id = database.get_suite_id(suite)
801         if suite_id == -1:
802             return None
803         component_id = database.get_component_id(component)
804         type_id = database.get_override_type_id(file_type)
805
806         q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
807                            % (package, suite_id, component_id, type_id))
808         result = q.getresult()
809         # If checking for a source package fall back on the binary override type
810         if file_type == "dsc" and not result:
811             deb_type_id = database.get_override_type_id("deb")
812             udeb_type_id = database.get_override_type_id("udeb")
813             q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
814                                % (package, suite_id, component_id, deb_type_id, udeb_type_id))
815             result = q.getresult()
816
817         # Remember the section and priority so we can check them later if appropriate
818         if result:
819             files[file]["override section"] = result[0][0]
820             files[file]["override priority"] = result[0][1]
821
822         return result
823
824     ################################################################################
825
826     def reject (self, str, prefix="Rejected: "):
827         if str:
828             # Unlike other rejects we add new lines first to avoid trailing
829             # new lines when this message is passed back up to a caller.
830             if self.reject_message:
831                 self.reject_message += "\n"
832             self.reject_message += prefix + str
833
834     ################################################################################
835
836     def get_anyversion(self, query_result, suite):
837         anyversion=None
838         anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
839         for (v, s) in query_result:
840             if s in [ x.lower() for x in anysuite ]:
841                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
842                     anyversion=v
843         return anyversion
844
845     ################################################################################
846
847     def cross_suite_version_check(self, query_result, file, new_version,
848             sourceful=False):
849         """Ensure versions are newer than existing packages in target
850         suites and that cross-suite version checking rules as
851         set out in the conf file are satisfied."""
852
853         # Check versions for each target suite
854         for target_suite in self.pkg.changes["distribution"].keys():
855             must_be_newer_than = [ i.lower() for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
856             must_be_older_than = [ i.lower() for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
857             # Enforce "must be newer than target suite" even if conffile omits it
858             if target_suite not in must_be_newer_than:
859                 must_be_newer_than.append(target_suite)
860             for entry in query_result:
861                 existent_version = entry[0]
862                 suite = entry[1]
863                 if suite in must_be_newer_than and sourceful and \
864                    apt_pkg.VersionCompare(new_version, existent_version) < 1:
865                     self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
866                 if suite in must_be_older_than and \
867                    apt_pkg.VersionCompare(new_version, existent_version) > -1:
868                     ch = self.pkg.changes
869                     cansave = 0
870                     if ch.get('distribution-version', {}).has_key(suite):
871                     # we really use the other suite, ignoring the conflicting one ...
872                         addsuite = ch["distribution-version"][suite]
873
874                         add_version = self.get_anyversion(query_result, addsuite)
875                         target_version = self.get_anyversion(query_result, target_suite)
876
877                         if not add_version:
878                             # not add_version can only happen if we map to a suite
879                             # that doesn't enhance the suite we're propup'ing from.
880                             # so "propup-ver x a b c; map a d" is a problem only if
881                             # d doesn't enhance a.
882                             #
883                             # i think we could always propagate in this case, rather
884                             # than complaining. either way, this isn't a REJECT issue
885                             #
886                             # And - we really should complain to the dorks who configured dak
887                             self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
888                             self.pkg.changes.setdefault("propdistribution", {})
889                             self.pkg.changes["propdistribution"][addsuite] = 1
890                             cansave = 1
891                         elif not target_version:
892                             # not targets_version is true when the package is NEW
893                             # we could just stick with the "...old version..." REJECT
894                             # for this, I think.
895                             self.reject("Won't propogate NEW packages.")
896                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
897                             # propogation would be redundant. no need to reject though.
898                             self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
899                             cansave = 1
900                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
901                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
902                             # propogate!!
903                             self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
904                             self.pkg.changes.setdefault("propdistribution", {})
905                             self.pkg.changes["propdistribution"][addsuite] = 1
906                             cansave = 1
907
908                     if not cansave:
909                         self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
910
911     ################################################################################
912
913     def check_binary_against_db(self, file):
914         self.reject_message = ""
915         files = self.pkg.files
916
917         # Ensure version is sane
918         q = self.projectB.query("""
919 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
920                                      architecture a
921  WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
922    AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
923                                 % (files[file]["package"],
924                                    files[file]["architecture"]))
925         self.cross_suite_version_check(q.getresult(), file,
926             files[file]["version"], sourceful=False)
927
928         # Check for any existing copies of the file
929         q = self.projectB.query("""
930 SELECT b.id FROM binaries b, architecture a
931  WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
932    AND a.id = b.architecture"""
933                                 % (files[file]["package"],
934                                    files[file]["version"],
935                                    files[file]["architecture"]))
936         if q.getresult():
937             self.reject("%s: can not overwrite existing copy already in the archive." % (file))
938
939         return self.reject_message
940
941     ################################################################################
942
943     def check_source_against_db(self, file):
944         self.reject_message = ""
945         dsc = self.pkg.dsc
946
947         # Ensure version is sane
948         q = self.projectB.query("""
949 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
950  WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
951         self.cross_suite_version_check(q.getresult(), file, dsc.get("version"),
952             sourceful=True)
953
954         return self.reject_message
955
956     ################################################################################
957
958     # **WARNING**
959     # NB: this function can remove entries from the 'files' index [if
960     # the .orig.tar.gz is a duplicate of the one in the archive]; if
961     # you're iterating over 'files' and call this function as part of
962     # the loop, be sure to add a check to the top of the loop to
963     # ensure you haven't just tried to dereference the deleted entry.
964     # **WARNING**
965
966     def check_dsc_against_db(self, file):
967         self.reject_message = ""
968         files = self.pkg.files
969         dsc_files = self.pkg.dsc_files
970         legacy_source_untouchable = self.pkg.legacy_source_untouchable
971         self.pkg.orig_tar_gz = None
972
973         # Try and find all files mentioned in the .dsc.  This has
974         # to work harder to cope with the multiple possible
975         # locations of an .orig.tar.gz.
976         # The ordering on the select is needed to pick the newest orig
977         # when it exists in multiple places.
978         for dsc_file in dsc_files.keys():
979             found = None
980             if files.has_key(dsc_file):
981                 actual_md5 = files[dsc_file]["md5sum"]
982                 actual_size = int(files[dsc_file]["size"])
983                 found = "%s in incoming" % (dsc_file)
984                 # Check the file does not already exist in the archive
985                 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
986                 ql = q.getresult()
987                 # Strip out anything that isn't '%s' or '/%s$'
988                 for i in ql:
989                     if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
990                         ql.remove(i)
991
992                 # "[dak] has not broken them.  [dak] has fixed a
993                 # brokenness.  Your crappy hack exploited a bug in
994                 # the old dinstall.
995                 #
996                 # "(Come on!  I thought it was always obvious that
997                 # one just doesn't release different files with
998                 # the same name and version.)"
999                 #                        -- ajk@ on d-devel@l.d.o
1000
1001                 if ql:
1002                     # Ignore exact matches for .orig.tar.gz
1003                     match = 0
1004                     if dsc_file.endswith(".orig.tar.gz"):
1005                         for i in ql:
1006                             if files.has_key(dsc_file) and \
1007                                int(files[dsc_file]["size"]) == int(i[0]) and \
1008                                files[dsc_file]["md5sum"] == i[1]:
1009                                 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
1010                                 del files[dsc_file]
1011                                 self.pkg.orig_tar_gz = i[2] + i[3]
1012                                 match = 1
1013
1014                     if not match:
1015                         self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
1016             elif dsc_file.endswith(".orig.tar.gz"):
1017                 # Check in the pool
1018                 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
1019                 ql = q.getresult()
1020                 # Strip out anything that isn't '%s' or '/%s$'
1021                 for i in ql:
1022                     if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
1023                         ql.remove(i)
1024
1025                 if ql:
1026                     # Unfortunately, we may get more than one match here if,
1027                     # for example, the package was in potato but had an -sa
1028                     # upload in woody.  So we need to choose the right one.
1029
1030                     # default to something sane in case we don't match any or have only one
1031                     x = ql[0]
1032
1033                     if len(ql) > 1:
1034                         for i in ql:
1035                             old_file = i[0] + i[1]
1036                             old_file_fh = utils.open_file(old_file)
1037                             actual_md5 = apt_pkg.md5sum(old_file_fh)
1038                             old_file_fh.close()
1039                             actual_size = os.stat(old_file)[stat.ST_SIZE]
1040                             if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1041                                 x = i
1042                             else:
1043                                 legacy_source_untouchable[i[3]] = ""
1044
1045                     old_file = x[0] + x[1]
1046                     old_file_fh = utils.open_file(old_file)
1047                     actual_md5 = apt_pkg.md5sum(old_file_fh)
1048                     old_file_fh.close()
1049                     actual_size = os.stat(old_file)[stat.ST_SIZE]
1050                     found = old_file
1051                     suite_type = x[2]
1052                     # need this for updating dsc_files in install()
1053                     dsc_files[dsc_file]["files id"] = x[3]
1054                     # See install() in process-accepted...
1055                     self.pkg.orig_tar_id = x[3]
1056                     self.pkg.orig_tar_gz = old_file
1057                     if suite_type == "legacy" or suite_type == "legacy-mixed":
1058                         self.pkg.orig_tar_location = "legacy"
1059                     else:
1060                         self.pkg.orig_tar_location = x[4]
1061                 else:
1062                     # Not there? Check the queue directories...
1063
1064                     in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1065                     # See process_it() in 'dak process-unchecked' for explanation of this
1066                     # in_unchecked check dropped by ajt 2007-08-28, how did that
1067                     # ever make sense?
1068                     if os.path.exists(in_unchecked) and False:
1069                         return (self.reject_message, in_unchecked)
1070                     else:
1071                         for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1072                             in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)],dsc_file)
1073                             if os.path.exists(in_otherdir):
1074                                 in_otherdir_fh = utils.open_file(in_otherdir)
1075                                 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1076                                 in_otherdir_fh.close()
1077                                 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1078                                 found = in_otherdir
1079                                 self.pkg.orig_tar_gz = in_otherdir
1080
1081                     if not found:
1082                         self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1083                         self.pkg.orig_tar_gz = -1
1084                         continue
1085             else:
1086                 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1087                 continue
1088             if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1089                 self.reject("md5sum for %s doesn't match %s." % (found, file))
1090             if actual_size != int(dsc_files[dsc_file]["size"]):
1091                 self.reject("size for %s doesn't match %s." % (found, file))
1092
1093         return (self.reject_message, None)
1094
1095     def do_query(self, q):
1096         sys.stderr.write("query: \"%s\" ... " % (q))
1097         before = time.time()
1098         r = self.projectB.query(q)
1099         time_diff = time.time()-before
1100         sys.stderr.write("took %.3f seconds.\n" % (time_diff))
1101         return r