]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Move regexes into a module so we can keep track
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 # Queue utility functions for dak
5 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006  James Troup <james@nocrew.org>
6
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
11
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 # GNU General Public License for more details.
16
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
20
21 ###############################################################################
22
23 import cPickle, errno, os, pg, re, stat, sys, time
24 import apt_inst, apt_pkg
25 import utils, database
26 from dak_exceptions import *
27 from regexes import re_isanum, re_default_answer, re_fdnic, re_bin_only_nmu
28
29 from types import *
30
31 ###############################################################################
32
33 # Determine what parts in a .changes are NEW
34
35 def determine_new(changes, files, projectB, warn=1):
36     new = {}
37
38     # Build up a list of potentially new things
39     for file_entry in files.keys():
40         f = files[file_entry]
41         # Skip byhand elements
42         if f["type"] == "byhand":
43             continue
44         pkg = f["package"]
45         priority = f["priority"]
46         section = f["section"]
47         file_type = get_type(f)
48         component = f["component"]
49
50         if file_type == "dsc":
51             priority = "source"
52         if not new.has_key(pkg):
53             new[pkg] = {}
54             new[pkg]["priority"] = priority
55             new[pkg]["section"] = section
56             new[pkg]["type"] = file_type
57             new[pkg]["component"] = component
58             new[pkg]["files"] = []
59         else:
60             old_type = new[pkg]["type"]
61             if old_type != file_type:
62                 # source gets trumped by deb or udeb
63                 if old_type == "dsc":
64                     new[pkg]["priority"] = priority
65                     new[pkg]["section"] = section
66                     new[pkg]["type"] = file_type
67                     new[pkg]["component"] = component
68         new[pkg]["files"].append(file_entry)
69         if f.has_key("othercomponents"):
70             new[pkg]["othercomponents"] = f["othercomponents"]
71
72     for suite in changes["suite"].keys():
73         suite_id = database.get_suite_id(suite)
74         for pkg in new.keys():
75             component_id = database.get_component_id(new[pkg]["component"])
76             type_id = database.get_override_type_id(new[pkg]["type"])
77             q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
78             ql = q.getresult()
79             if ql:
80                 for file_entry in new[pkg]["files"]:
81                     if files[file_entry].has_key("new"):
82                         del files[file_entry]["new"]
83                 del new[pkg]
84
85     if warn:
86         if changes["suite"].has_key("stable"):
87             print "WARNING: overrides will be added for stable!"
88             if changes["suite"].has_key("oldstable"):
89                 print "WARNING: overrides will be added for OLDstable!"
90         for pkg in new.keys():
91             if new[pkg].has_key("othercomponents"):
92                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
93
94     return new
95
96 ################################################################################
97
98 def get_type(f):
99     # Determine the type
100     if f.has_key("dbtype"):
101         file_type = f["dbtype"]
102     elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
103         file_type = "dsc"
104     else:
105         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
106
107     # Validate the override type
108     type_id = database.get_override_type_id(file_type)
109     if type_id == -1:
110         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
111
112     return file_type
113
114 ################################################################################
115
116 # check if section/priority values are valid
117
118 def check_valid(new):
119     for pkg in new.keys():
120         section = new[pkg]["section"]
121         priority = new[pkg]["priority"]
122         file_type = new[pkg]["type"]
123         new[pkg]["section id"] = database.get_section_id(section)
124         new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
125         # Sanity checks
126         di = section.find("debian-installer") != -1
127         if (di and file_type not in ("udeb", "dsc")) or (not di and file_type == "udeb"):
128             new[pkg]["section id"] = -1
129         if (priority == "source" and file_type != "dsc") or \
130            (priority != "source" and file_type == "dsc"):
131             new[pkg]["priority id"] = -1
132
133
134 ###############################################################################
135
136 # Convenience wrapper to carry around all the package information in
137
138 class Pkg:
139     def __init__(self, **kwds):
140         self.__dict__.update(kwds)
141
142     def update(self, **kwds):
143         self.__dict__.update(kwds)
144
145 ###############################################################################
146
147 class Upload:
148
149     def __init__(self, Cnf):
150         self.Cnf = Cnf
151         self.accept_count = 0
152         self.accept_bytes = 0L
153         self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
154                        legacy_source_untouchable = {})
155
156         # Initialize the substitution template mapping global
157         Subst = self.Subst = {}
158         Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
159         Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
160         Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
161         Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
162
163         self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
164         database.init(Cnf, self.projectB)
165
166     ###########################################################################
167
168     def init_vars (self):
169         self.pkg.changes.clear()
170         self.pkg.dsc.clear()
171         self.pkg.files.clear()
172         self.pkg.dsc_files.clear()
173         self.pkg.legacy_source_untouchable.clear()
174         self.pkg.orig_tar_id = None
175         self.pkg.orig_tar_location = ""
176         self.pkg.orig_tar_gz = None
177
178     ###########################################################################
179
180     def update_vars (self):
181         dump_filename = self.pkg.changes_file[:-8]+".dak"
182         dump_file = utils.open_file(dump_filename)
183         p = cPickle.Unpickler(dump_file)
184
185         self.pkg.changes.update(p.load())
186         self.pkg.dsc.update(p.load())
187         self.pkg.files.update(p.load())
188         self.pkg.dsc_files.update(p.load())
189         self.pkg.legacy_source_untouchable.update(p.load())
190
191         self.pkg.orig_tar_id = p.load()
192         self.pkg.orig_tar_location = p.load()
193
194         dump_file.close()
195
196     ###########################################################################
197
198     # This could just dump the dictionaries as is, but I'd like to
199     # avoid this so there's some idea of what process-accepted &
200     # process-new use from process-unchecked
201
202     def dump_vars(self, dest_dir):
203
204         changes = self.pkg.changes
205         dsc = self.pkg.dsc
206         files = self.pkg.files
207         dsc_files = self.pkg.dsc_files
208         legacy_source_untouchable = self.pkg.legacy_source_untouchable
209         orig_tar_id = self.pkg.orig_tar_id
210         orig_tar_location = self.pkg.orig_tar_location
211
212         dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
213         dump_file = utils.open_file(dump_filename, 'w')
214         try:
215             os.chmod(dump_filename, 0664)
216         except OSError, e:
217             # chmod may fail when the dumpfile is not owned by the user
218             # invoking dak (like e.g. when NEW is processed by a member
219             # of ftpteam)
220             if errno.errorcode[e.errno] == 'EPERM':
221                 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
222                 # security precaution, should never happen unless a weird
223                 # umask is set anywhere
224                 if perms & stat.S_IWOTH:
225                     utils.fubar("%s is world writable and chmod failed." % \
226                         (dump_filename,))
227                 # ignore the failed chmod otherwise as the file should
228                 # already have the right privileges and is just, at worst,
229                 # unreadable for world
230             else:
231                 raise
232
233         p = cPickle.Pickler(dump_file, 1)
234         d_changes = {}
235         d_dsc = {}
236         d_files = {}
237         d_dsc_files = {}
238
239         ## files
240         for file_entry in files.keys():
241             d_files[file_entry] = {}
242             for i in [ "package", "version", "architecture", "type", "size",
243                        "md5sum", "sha1sum", "sha256sum", "component",
244                        "location id", "source package", "source version",
245                        "maintainer", "dbtype", "files id", "new",
246                        "section", "priority", "othercomponents",
247                        "pool name", "original component" ]:
248                 if files[file_entry].has_key(i):
249                     d_files[file_entry][i] = files[file_entry][i]
250         ## changes
251         # Mandatory changes fields
252         for i in [ "distribution", "source", "architecture", "version",
253                    "maintainer", "urgency", "fingerprint", "changedby822",
254                    "changedby2047", "changedbyname", "maintainer822",
255                    "maintainer2047", "maintainername", "maintaineremail",
256                    "closes", "changes" ]:
257             d_changes[i] = changes[i]
258         # Optional changes fields
259         for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version",
260                    "sponsoremail" ]:
261             if changes.has_key(i):
262                 d_changes[i] = changes[i]
263         ## dsc
264         for i in [ "source", "version", "maintainer", "fingerprint",
265                    "uploaders", "bts changelog", "dm-upload-allowed" ]:
266             if dsc.has_key(i):
267                 d_dsc[i] = dsc[i]
268         ## dsc_files
269         for file_entry in dsc_files.keys():
270             d_dsc_files[file_entry] = {}
271             # Mandatory dsc_files fields
272             for i in [ "size", "md5sum" ]:
273                 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
274             # Optional dsc_files fields
275             for i in [ "files id" ]:
276                 if dsc_files[file_entry].has_key(i):
277                     d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
278
279         for i in [ d_changes, d_dsc, d_files, d_dsc_files,
280                    legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
281             p.dump(i)
282         dump_file.close()
283
284     ###########################################################################
285
286     # Set up the per-package template substitution mappings
287
288     def update_subst (self, reject_message = ""):
289         Subst = self.Subst
290         changes = self.pkg.changes
291         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
292         if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
293             changes["architecture"] = { "Unknown" : "" }
294         # and maintainer2047 may not exist.
295         if not changes.has_key("maintainer2047"):
296             changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
297
298         Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
299         Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
300         Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
301
302         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
303         if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
304             Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
305             Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
306                                                      changes["maintainer2047"])
307             Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
308         else:
309             Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
310             Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
311             Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
312
313         if "sponsoremail" in changes:
314             Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
315
316         if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
317             Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
318
319         # Apply any global override of the Maintainer field
320         if self.Cnf.get("Dinstall::OverrideMaintainer"):
321             Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
322             Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
323
324         Subst["__REJECT_MESSAGE__"] = reject_message
325         Subst["__SOURCE__"] = changes.get("source", "Unknown")
326         Subst["__VERSION__"] = changes.get("version", "Unknown")
327
328     ###########################################################################
329
330     def build_summaries(self):
331         changes = self.pkg.changes
332         files = self.pkg.files
333
334         byhand = summary = new = ""
335
336         # changes["distribution"] may not exist in corner cases
337         # (e.g. unreadable changes files)
338         if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
339             changes["distribution"] = {}
340
341         override_summary ="";
342         file_keys = files.keys()
343         file_keys.sort()
344         for file_entry in file_keys:
345             if files[file_entry].has_key("byhand"):
346                 byhand = 1
347                 summary += file_entry + " byhand\n"
348             elif files[file_entry].has_key("new"):
349                 new = 1
350                 summary += "(new) %s %s %s\n" % (file_entry, files[file_entry]["priority"], files[file_entry]["section"])
351                 if files[file_entry].has_key("othercomponents"):
352                     summary += "WARNING: Already present in %s distribution.\n" % (files[file_entry]["othercomponents"])
353                 if files[file_entry]["type"] == "deb":
354                     deb_fh = utils.open_file(file_entry)
355                     summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
356                     deb_fh.close()
357             else:
358                 files[file_entry]["pool name"] = utils.poolify (changes.get("source",""), files[file_entry]["component"])
359                 destination = self.Cnf["Dir::PoolRoot"] + files[file_entry]["pool name"] + file_entry
360                 summary += file_entry + "\n  to " + destination + "\n"
361                 if not files[file_entry].has_key("type"):
362                     files[file_entry]["type"] = "unknown"
363                 if files[file_entry]["type"] in ["deb", "udeb", "dsc"]:
364                     # (queue/unchecked), there we have override entries already, use them
365                     # (process-new), there we dont have override entries, use the newly generated ones.
366                     override_prio = files[file_entry].get("override priority", files[file_entry]["priority"])
367                     override_sect = files[file_entry].get("override section", files[file_entry]["section"])
368                     override_summary += "%s - %s %s\n" % (file_entry, override_prio, override_sect)
369
370         short_summary = summary
371
372         # This is for direport's benefit...
373         f = re_fdnic.sub("\n .\n", changes.get("changes",""))
374
375         if byhand or new:
376             summary += "Changes: " + f
377
378         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
379
380         summary += self.announce(short_summary, 0)
381
382         return (summary, short_summary)
383
384     ###########################################################################
385
386     def close_bugs (self, summary, action):
387         changes = self.pkg.changes
388         Subst = self.Subst
389         Cnf = self.Cnf
390
391         bugs = changes["closes"].keys()
392
393         if not bugs:
394             return summary
395
396         bugs.sort()
397         summary += "Closing bugs: "
398         for bug in bugs:
399             summary += "%s " % (bug)
400             if action:
401                 Subst["__BUG_NUMBER__"] = bug
402                 if changes["distribution"].has_key("stable"):
403                     Subst["__STABLE_WARNING__"] = """
404 Note that this package is not part of the released stable Debian
405 distribution.  It may have dependencies on other unreleased software,
406 or other instabilities.  Please take care if you wish to install it.
407 The update will eventually make its way into the next released Debian
408 distribution."""
409                 else:
410                     Subst["__STABLE_WARNING__"] = ""
411                     mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
412                     utils.send_mail (mail_message)
413         if action:
414             self.Logger.log(["closing bugs"]+bugs)
415         summary += "\n"
416
417         return summary
418
419     ###########################################################################
420
421     def announce (self, short_summary, action):
422         Subst = self.Subst
423         Cnf = self.Cnf
424         changes = self.pkg.changes
425
426         # Only do announcements for source uploads with a recent dpkg-dev installed
427         if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
428             return ""
429
430         lists_done = {}
431         summary = ""
432         Subst["__SHORT_SUMMARY__"] = short_summary
433
434         for dist in changes["distribution"].keys():
435             announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
436             if announce_list == "" or lists_done.has_key(announce_list):
437                 continue
438             lists_done[announce_list] = 1
439             summary += "Announcing to %s\n" % (announce_list)
440
441             if action:
442                 Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
443                 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
444                     Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
445                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
446                 utils.send_mail (mail_message)
447
448         if Cnf.FindB("Dinstall::CloseBugs"):
449             summary = self.close_bugs(summary, action)
450
451         return summary
452
453     ###########################################################################
454
455     def accept (self, summary, short_summary):
456         Cnf = self.Cnf
457         Subst = self.Subst
458         files = self.pkg.files
459         changes = self.pkg.changes
460         changes_file = self.pkg.changes_file
461         dsc = self.pkg.dsc
462
463         print "Accepting."
464         self.Logger.log(["Accepting changes",changes_file])
465
466         self.dump_vars(Cnf["Dir::Queue::Accepted"])
467
468         # Move all the files into the accepted directory
469         utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
470         file_keys = files.keys()
471         for file_entry in file_keys:
472             utils.move(file_entry, Cnf["Dir::Queue::Accepted"])
473             self.accept_bytes += float(files[file_entry]["size"])
474         self.accept_count += 1
475
476         # Send accept mail, announce to lists, close bugs and check for
477         # override disparities
478         if not Cnf["Dinstall::Options::No-Mail"]:
479             Subst["__SUITE__"] = ""
480             Subst["__SUMMARY__"] = summary
481             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
482             utils.send_mail(mail_message)
483             self.announce(short_summary, 1)
484
485
486         ## Helper stuff for DebBugs Version Tracking
487         if Cnf.Find("Dir::Queue::BTSVersionTrack"):
488             # ??? once queue/* is cleared on *.d.o and/or reprocessed
489             # the conditionalization on dsc["bts changelog"] should be
490             # dropped.
491
492             # Write out the version history from the changelog
493             if changes["architecture"].has_key("source") and \
494                dsc.has_key("bts changelog"):
495
496                 (fd, temp_filename) = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
497                 version_history = os.fdopen(temp_filename, 'w')
498                 version_history.write(dsc["bts changelog"])
499                 version_history.close()
500                 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
501                                       changes_file[:-8]+".versions")
502                 os.rename(temp_filename, filename)
503                 os.chmod(filename, "0644")
504
505             # Write out the binary -> source mapping.
506             (fd, temp_filename) = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
507             debinfo = os.fdopen(temp_filename, 'w')
508             for file_entry in file_keys:
509                 f = files[file_entry]
510                 if f["type"] == "deb":
511                     line = " ".join([f["package"], f["version"],
512                                      f["architecture"], f["source package"],
513                                      f["source version"]])
514                     debinfo.write(line+"\n")
515             debinfo.close()
516             filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
517                                   changes_file[:-8]+".debinfo")
518             os.rename(temp_filename, filename)
519             os.chmod(filename, "0644")
520
521         self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
522
523     ###########################################################################
524
525     def queue_build (self, queue, path):
526         Cnf = self.Cnf
527         Subst = self.Subst
528         files = self.pkg.files
529         changes = self.pkg.changes
530         changes_file = self.pkg.changes_file
531         dsc = self.pkg.dsc
532         file_keys = files.keys()
533
534         ## Special support to enable clean auto-building of queued packages
535         queue_id = database.get_or_set_queue_id(queue)
536
537         self.projectB.query("BEGIN WORK")
538         for suite in changes["distribution"].keys():
539             if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
540                 continue
541             suite_id = database.get_suite_id(suite)
542             dest_dir = Cnf["Dir::QueueBuild"]
543             if Cnf.FindB("Dinstall::SecurityQueueBuild"):
544                 dest_dir = os.path.join(dest_dir, suite)
545             for file_entry in file_keys:
546                 src = os.path.join(path, file_entry)
547                 dest = os.path.join(dest_dir, file_entry)
548                 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
549                     # Copy it since the original won't be readable by www-data
550                     utils.copy(src, dest)
551                 else:
552                     # Create a symlink to it
553                     os.symlink(src, dest)
554                 # Add it to the list of packages for later processing by apt-ftparchive
555                 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
556             # If the .orig.tar.gz is in the pool, create a symlink to
557             # it (if one doesn't already exist)
558             if self.pkg.orig_tar_id:
559                 # Determine the .orig.tar.gz file name
560                 for dsc_file in self.pkg.dsc_files.keys():
561                     if dsc_file.endswith(".orig.tar.gz"):
562                         filename = dsc_file
563                 dest = os.path.join(dest_dir, filename)
564                 # If it doesn't exist, create a symlink
565                 if not os.path.exists(dest):
566                     # Find the .orig.tar.gz in the pool
567                     q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
568                     ql = q.getresult()
569                     if not ql:
570                         utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
571                     src = os.path.join(ql[0][0], ql[0][1])
572                     os.symlink(src, dest)
573                     # Add it to the list of packages for later processing by apt-ftparchive
574                     self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
575                 # if it does, update things to ensure it's not removed prematurely
576                 else:
577                     self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
578
579         self.projectB.query("COMMIT WORK")
580
581     ###########################################################################
582
583     def check_override (self):
584         Subst = self.Subst
585         changes = self.pkg.changes
586         files = self.pkg.files
587         Cnf = self.Cnf
588
589         # Abandon the check if:
590         #  a) it's a non-sourceful upload
591         #  b) override disparity checks have been disabled
592         #  c) we're not sending mail
593         if not changes["architecture"].has_key("source") or \
594            not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
595            Cnf["Dinstall::Options::No-Mail"]:
596             return
597
598         summary = ""
599         file_keys = files.keys()
600         file_keys.sort()
601         for file_entry in file_keys:
602             if not files[file_entry].has_key("new") and files[file_entry]["type"] == "deb":
603                 section = files[file_entry]["section"]
604                 override_section = files[file_entry]["override section"]
605                 if section.lower() != override_section.lower() and section != "-":
606                     summary += "%s: package says section is %s, override says %s.\n" % (file_entry, section, override_section)
607                 priority = files[file_entry]["priority"]
608                 override_priority = files[file_entry]["override priority"]
609                 if priority != override_priority and priority != "-":
610                     summary += "%s: package says priority is %s, override says %s.\n" % (file_entry, priority, override_priority)
611
612         if summary == "":
613             return
614
615         Subst["__SUMMARY__"] = summary
616         mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
617         utils.send_mail(mail_message)
618
619     ###########################################################################
620
621     def force_reject (self, files):
622         """Forcefully move files from the current directory to the
623            reject directory.  If any file already exists in the reject
624            directory it will be moved to the morgue to make way for
625            the new file."""
626
627         Cnf = self.Cnf
628
629         for file_entry in files:
630             # Skip any files which don't exist or which we don't have permission to copy.
631             if os.access(file_entry,os.R_OK) == 0:
632                 continue
633             dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file_entry)
634             try:
635                 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
636             except OSError, e:
637                 # File exists?  Let's try and move it to the morgue
638                 if errno.errorcode[e.errno] == 'EEXIST':
639                     morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file_entry)
640                     try:
641                         morgue_file = utils.find_next_free(morgue_file)
642                     except NoFreeFilenameError:
643                         # Something's either gone badly Pete Tong, or
644                         # someone is trying to exploit us.
645                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
646                         return
647                     utils.move(dest_file, morgue_file, perms=0660)
648                     try:
649                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
650                     except OSError, e:
651                         # Likewise
652                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
653                         return
654                 else:
655                     raise
656             # If we got here, we own the destination file, so we can
657             # safely overwrite it.
658             utils.move(file_entry, dest_file, 1, perms=0660)
659             os.close(dest_fd)
660
661     ###########################################################################
662
663     def do_reject (self, manual = 0, reject_message = ""):
664         # If we weren't given a manual rejection message, spawn an
665         # editor so the user can add one in...
666         if manual and not reject_message:
667             (fd, temp_filename) = utils.temp_filename()
668             editor = os.environ.get("EDITOR","vi")
669             answer = 'E'
670             while answer == 'E':
671                 os.system("%s %s" % (editor, temp_filename))
672                 temp_fh = utils.open_file(temp_filename)
673                 reject_message = "".join(temp_fh.readlines())
674                 temp_fh.close()
675                 print "Reject message:"
676                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
677                 prompt = "[R]eject, Edit, Abandon, Quit ?"
678                 answer = "XXX"
679                 while prompt.find(answer) == -1:
680                     answer = utils.our_raw_input(prompt)
681                     m = re_default_answer.search(prompt)
682                     if answer == "":
683                         answer = m.group(1)
684                     answer = answer[:1].upper()
685             os.unlink(temp_filename)
686             if answer == 'A':
687                 return 1
688             elif answer == 'Q':
689                 sys.exit(0)
690
691         print "Rejecting.\n"
692
693         Cnf = self.Cnf
694         Subst = self.Subst
695         pkg = self.pkg
696
697         reason_filename = pkg.changes_file[:-8] + ".reason"
698         reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
699
700         # Move all the files into the reject directory
701         reject_files = pkg.files.keys() + [pkg.changes_file]
702         self.force_reject(reject_files)
703
704         # If we fail here someone is probably trying to exploit the race
705         # so let's just raise an exception ...
706         if os.path.exists(reason_filename):
707             os.unlink(reason_filename)
708         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
709
710         if not manual:
711             Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
712             Subst["__MANUAL_REJECT_MESSAGE__"] = ""
713             Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
714             os.write(reason_fd, reject_message)
715             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
716         else:
717             # Build up the rejection email
718             user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
719
720             Subst["__REJECTOR_ADDRESS__"] = user_email_address
721             Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
722             Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
723             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
724             # Write the rejection email out as the <foo>.reason file
725             os.write(reason_fd, reject_mail_message)
726
727         os.close(reason_fd)
728
729         # Send the rejection mail if appropriate
730         if not Cnf["Dinstall::Options::No-Mail"]:
731             utils.send_mail(reject_mail_message)
732
733         self.Logger.log(["rejected", pkg.changes_file])
734         return 0
735
736     ################################################################################
737
738     # Ensure that source exists somewhere in the archive for the binary
739     # upload being processed.
740     #
741     # (1) exact match                      => 1.0-3
742     # (2) Bin-only NMU                     => 1.0-3+b1 , 1.0-3.1+b1
743
744     def source_exists (self, package, source_version, suites = ["any"]):
745         okay = 1
746         for suite in suites:
747             if suite == "any":
748                 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
749                     (package)
750             else:
751                 # source must exist in suite X, or in some other suite that's
752                 # mapped to X, recursively... silent-maps are counted too,
753                 # unreleased-maps aren't.
754                 maps = self.Cnf.ValueList("SuiteMappings")[:]
755                 maps.reverse()
756                 maps = [ m.split() for m in maps ]
757                 maps = [ (x[1], x[2]) for x in maps
758                                 if x[0] == "map" or x[0] == "silent-map" ]
759                 s = [suite]
760                 for x in maps:
761                     if x[1] in s and x[0] not in s:
762                         s.append(x[0])
763
764                 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
765             q = self.projectB.query(que)
766
767             # Reduce the query results to a list of version numbers
768             ql = [ i[0] for i in q.getresult() ]
769
770             # Try (1)
771             if source_version in ql:
772                 continue
773
774             # Try (2)
775             orig_source_version = re_bin_only_nmu.sub('', source_version)
776             if orig_source_version in ql:
777                 continue
778
779             # No source found...
780             okay = 0
781             break
782         return okay
783
784     ################################################################################
785
786     def in_override_p (self, package, component, suite, binary_type, file):
787         files = self.pkg.files
788
789         if binary_type == "": # must be source
790             file_type = "dsc"
791         else:
792             file_type = binary_type
793
794         # Override suite name; used for example with proposed-updates
795         if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
796             suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
797
798         # Avoid <undef> on unknown distributions
799         suite_id = database.get_suite_id(suite)
800         if suite_id == -1:
801             return None
802         component_id = database.get_component_id(component)
803         type_id = database.get_override_type_id(file_type)
804
805         q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
806                            % (package, suite_id, component_id, type_id))
807         result = q.getresult()
808         # If checking for a source package fall back on the binary override type
809         if file_type == "dsc" and not result:
810             deb_type_id = database.get_override_type_id("deb")
811             udeb_type_id = database.get_override_type_id("udeb")
812             q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
813                                % (package, suite_id, component_id, deb_type_id, udeb_type_id))
814             result = q.getresult()
815
816         # Remember the section and priority so we can check them later if appropriate
817         if result:
818             files[file]["override section"] = result[0][0]
819             files[file]["override priority"] = result[0][1]
820
821         return result
822
823     ################################################################################
824
825     def reject (self, str, prefix="Rejected: "):
826         if str:
827             # Unlike other rejects we add new lines first to avoid trailing
828             # new lines when this message is passed back up to a caller.
829             if self.reject_message:
830                 self.reject_message += "\n"
831             self.reject_message += prefix + str
832
833     ################################################################################
834
835     def get_anyversion(self, query_result, suite):
836         anyversion=None
837         anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
838         for (v, s) in query_result:
839             if s in [ x.lower() for x in anysuite ]:
840                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
841                     anyversion=v
842         return anyversion
843
844     ################################################################################
845
846     def cross_suite_version_check(self, query_result, file, new_version,
847             sourceful=False):
848         """Ensure versions are newer than existing packages in target
849         suites and that cross-suite version checking rules as
850         set out in the conf file are satisfied."""
851
852         # Check versions for each target suite
853         for target_suite in self.pkg.changes["distribution"].keys():
854             must_be_newer_than = [ i.lower() for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
855             must_be_older_than = [ i.lower() for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
856             # Enforce "must be newer than target suite" even if conffile omits it
857             if target_suite not in must_be_newer_than:
858                 must_be_newer_than.append(target_suite)
859             for entry in query_result:
860                 existent_version = entry[0]
861                 suite = entry[1]
862                 if suite in must_be_newer_than and sourceful and \
863                    apt_pkg.VersionCompare(new_version, existent_version) < 1:
864                     self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
865                 if suite in must_be_older_than and \
866                    apt_pkg.VersionCompare(new_version, existent_version) > -1:
867                     ch = self.pkg.changes
868                     cansave = 0
869                     if ch.get('distribution-version', {}).has_key(suite):
870                     # we really use the other suite, ignoring the conflicting one ...
871                         addsuite = ch["distribution-version"][suite]
872
873                         add_version = self.get_anyversion(query_result, addsuite)
874                         target_version = self.get_anyversion(query_result, target_suite)
875
876                         if not add_version:
877                             # not add_version can only happen if we map to a suite
878                             # that doesn't enhance the suite we're propup'ing from.
879                             # so "propup-ver x a b c; map a d" is a problem only if
880                             # d doesn't enhance a.
881                             #
882                             # i think we could always propagate in this case, rather
883                             # than complaining. either way, this isn't a REJECT issue
884                             #
885                             # And - we really should complain to the dorks who configured dak
886                             self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
887                             self.pkg.changes.setdefault("propdistribution", {})
888                             self.pkg.changes["propdistribution"][addsuite] = 1
889                             cansave = 1
890                         elif not target_version:
891                             # not targets_version is true when the package is NEW
892                             # we could just stick with the "...old version..." REJECT
893                             # for this, I think.
894                             self.reject("Won't propogate NEW packages.")
895                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
896                             # propogation would be redundant. no need to reject though.
897                             self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
898                             cansave = 1
899                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
900                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
901                             # propogate!!
902                             self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
903                             self.pkg.changes.setdefault("propdistribution", {})
904                             self.pkg.changes["propdistribution"][addsuite] = 1
905                             cansave = 1
906
907                     if not cansave:
908                         self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
909
910     ################################################################################
911
912     def check_binary_against_db(self, file):
913         self.reject_message = ""
914         files = self.pkg.files
915
916         # Ensure version is sane
917         q = self.projectB.query("""
918 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
919                                      architecture a
920  WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
921    AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
922                                 % (files[file]["package"],
923                                    files[file]["architecture"]))
924         self.cross_suite_version_check(q.getresult(), file,
925             files[file]["version"], sourceful=False)
926
927         # Check for any existing copies of the file
928         q = self.projectB.query("""
929 SELECT b.id FROM binaries b, architecture a
930  WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
931    AND a.id = b.architecture"""
932                                 % (files[file]["package"],
933                                    files[file]["version"],
934                                    files[file]["architecture"]))
935         if q.getresult():
936             self.reject("%s: can not overwrite existing copy already in the archive." % (file))
937
938         return self.reject_message
939
940     ################################################################################
941
942     def check_source_against_db(self, file):
943         self.reject_message = ""
944         dsc = self.pkg.dsc
945
946         # Ensure version is sane
947         q = self.projectB.query("""
948 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
949  WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
950         self.cross_suite_version_check(q.getresult(), file, dsc.get("version"),
951             sourceful=True)
952
953         return self.reject_message
954
955     ################################################################################
956
957     # **WARNING**
958     # NB: this function can remove entries from the 'files' index [if
959     # the .orig.tar.gz is a duplicate of the one in the archive]; if
960     # you're iterating over 'files' and call this function as part of
961     # the loop, be sure to add a check to the top of the loop to
962     # ensure you haven't just tried to dereference the deleted entry.
963     # **WARNING**
964
965     def check_dsc_against_db(self, file):
966         self.reject_message = ""
967         files = self.pkg.files
968         dsc_files = self.pkg.dsc_files
969         legacy_source_untouchable = self.pkg.legacy_source_untouchable
970         self.pkg.orig_tar_gz = None
971
972         # Try and find all files mentioned in the .dsc.  This has
973         # to work harder to cope with the multiple possible
974         # locations of an .orig.tar.gz.
975         # The ordering on the select is needed to pick the newest orig
976         # when it exists in multiple places.
977         for dsc_file in dsc_files.keys():
978             found = None
979             if files.has_key(dsc_file):
980                 actual_md5 = files[dsc_file]["md5sum"]
981                 actual_size = int(files[dsc_file]["size"])
982                 found = "%s in incoming" % (dsc_file)
983                 # Check the file does not already exist in the archive
984                 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
985                 ql = q.getresult()
986                 # Strip out anything that isn't '%s' or '/%s$'
987                 for i in ql:
988                     if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
989                         ql.remove(i)
990
991                 # "[dak] has not broken them.  [dak] has fixed a
992                 # brokenness.  Your crappy hack exploited a bug in
993                 # the old dinstall.
994                 #
995                 # "(Come on!  I thought it was always obvious that
996                 # one just doesn't release different files with
997                 # the same name and version.)"
998                 #                        -- ajk@ on d-devel@l.d.o
999
1000                 if ql:
1001                     # Ignore exact matches for .orig.tar.gz
1002                     match = 0
1003                     if dsc_file.endswith(".orig.tar.gz"):
1004                         for i in ql:
1005                             if files.has_key(dsc_file) and \
1006                                int(files[dsc_file]["size"]) == int(i[0]) and \
1007                                files[dsc_file]["md5sum"] == i[1]:
1008                                 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
1009                                 del files[dsc_file]
1010                                 self.pkg.orig_tar_gz = i[2] + i[3]
1011                                 match = 1
1012
1013                     if not match:
1014                         self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
1015             elif dsc_file.endswith(".orig.tar.gz"):
1016                 # Check in the pool
1017                 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
1018                 ql = q.getresult()
1019                 # Strip out anything that isn't '%s' or '/%s$'
1020                 for i in ql:
1021                     if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
1022                         ql.remove(i)
1023
1024                 if ql:
1025                     # Unfortunately, we may get more than one match here if,
1026                     # for example, the package was in potato but had an -sa
1027                     # upload in woody.  So we need to choose the right one.
1028
1029                     x = ql[0]; # default to something sane in case we don't match any or have only one
1030
1031                     if len(ql) > 1:
1032                         for i in ql:
1033                             old_file = i[0] + i[1]
1034                             old_file_fh = utils.open_file(old_file)
1035                             actual_md5 = apt_pkg.md5sum(old_file_fh)
1036                             old_file_fh.close()
1037                             actual_size = os.stat(old_file)[stat.ST_SIZE]
1038                             if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1039                                 x = i
1040                             else:
1041                                 legacy_source_untouchable[i[3]] = ""
1042
1043                     old_file = x[0] + x[1]
1044                     old_file_fh = utils.open_file(old_file)
1045                     actual_md5 = apt_pkg.md5sum(old_file_fh)
1046                     old_file_fh.close()
1047                     actual_size = os.stat(old_file)[stat.ST_SIZE]
1048                     found = old_file
1049                     suite_type = x[2]
1050                     dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
1051                     # See install() in process-accepted...
1052                     self.pkg.orig_tar_id = x[3]
1053                     self.pkg.orig_tar_gz = old_file
1054                     if suite_type == "legacy" or suite_type == "legacy-mixed":
1055                         self.pkg.orig_tar_location = "legacy"
1056                     else:
1057                         self.pkg.orig_tar_location = x[4]
1058                 else:
1059                     # Not there? Check the queue directories...
1060
1061                     in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1062                     # See process_it() in 'dak process-unchecked' for explanation of this
1063                     # in_unchecked check dropped by ajt 2007-08-28, how did that
1064                     # ever make sense?
1065                     if os.path.exists(in_unchecked) and False:
1066                         return (self.reject_message, in_unchecked)
1067                     else:
1068                         for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1069                             in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)],dsc_file)
1070                             if os.path.exists(in_otherdir):
1071                                 in_otherdir_fh = utils.open_file(in_otherdir)
1072                                 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1073                                 in_otherdir_fh.close()
1074                                 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1075                                 found = in_otherdir
1076                                 self.pkg.orig_tar_gz = in_otherdir
1077
1078                     if not found:
1079                         self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1080                         self.pkg.orig_tar_gz = -1
1081                         continue
1082             else:
1083                 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1084                 continue
1085             if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1086                 self.reject("md5sum for %s doesn't match %s." % (found, file))
1087             if actual_size != int(dsc_files[dsc_file]["size"]):
1088                 self.reject("size for %s doesn't match %s." % (found, file))
1089
1090         return (self.reject_message, None)
1091
1092     def do_query(self, q):
1093         sys.stderr.write("query: \"%s\" ... " % (q))
1094         before = time.time()
1095         r = self.projectB.query(q)
1096         time_diff = time.time()-before
1097         sys.stderr.write("took %.3f seconds.\n" % (time_diff))
1098         return r