]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
move generate_contents_information to utils.py
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 # Queue utility functions for dak
5 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006  James Troup <james@nocrew.org>
6
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
11
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 # GNU General Public License for more details.
16
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
20
21 ###############################################################################
22
23 import cPickle, errno, os, pg, re, stat, sys, time
24 import apt_inst, apt_pkg
25 import utils, database
26 from dak_exceptions import *
27
28 from types import *
29
30 ###############################################################################
31
32 re_isanum = re.compile (r"^\d+$")
33 re_default_answer = re.compile(r"\[(.*)\]")
34 re_fdnic = re.compile(r"\n\n")
35 re_bin_only_nmu = re.compile(r"\+b\d+$")
36
37 ################################################################################
38
39 # Determine what parts in a .changes are NEW
40
41 def determine_new(changes, files, projectB, warn=1):
42     new = {}
43
44     # Build up a list of potentially new things
45     for file_entry in files.keys():
46         f = files[file_entry]
47         # Skip byhand elements
48         if f["type"] == "byhand":
49             continue
50         pkg = f["package"]
51         priority = f["priority"]
52         section = f["section"]
53         file_type = get_type(f)
54         component = f["component"]
55
56         if file_type == "dsc":
57             priority = "source"
58         if not new.has_key(pkg):
59             new[pkg] = {}
60             new[pkg]["priority"] = priority
61             new[pkg]["section"] = section
62             new[pkg]["type"] = file_type
63             new[pkg]["component"] = component
64             new[pkg]["files"] = []
65         else:
66             old_type = new[pkg]["type"]
67             if old_type != file_type:
68                 # source gets trumped by deb or udeb
69                 if old_type == "dsc":
70                     new[pkg]["priority"] = priority
71                     new[pkg]["section"] = section
72                     new[pkg]["type"] = file_type
73                     new[pkg]["component"] = component
74         new[pkg]["files"].append(file_entry)
75         if f.has_key("othercomponents"):
76             new[pkg]["othercomponents"] = f["othercomponents"]
77
78     for suite in changes["suite"].keys():
79         suite_id = database.get_suite_id(suite)
80         for pkg in new.keys():
81             component_id = database.get_component_id(new[pkg]["component"])
82             type_id = database.get_override_type_id(new[pkg]["type"])
83             q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
84             ql = q.getresult()
85             if ql:
86                 for file_entry in new[pkg]["files"]:
87                     if files[file_entry].has_key("new"):
88                         del files[file_entry]["new"]
89                 del new[pkg]
90
91     if warn:
92         if changes["suite"].has_key("stable"):
93             print "WARNING: overrides will be added for stable!"
94             if changes["suite"].has_key("oldstable"):
95                 print "WARNING: overrides will be added for OLDstable!"
96         for pkg in new.keys():
97             if new[pkg].has_key("othercomponents"):
98                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
99
100     return new
101
102 ################################################################################
103
104 def get_type(f):
105     # Determine the type
106     if f.has_key("dbtype"):
107         file_type = f["dbtype"]
108     elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
109         file_type = "dsc"
110     else:
111         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
112
113     # Validate the override type
114     type_id = database.get_override_type_id(file_type)
115     if type_id == -1:
116         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
117
118     return file_type
119
120 ################################################################################
121
122 # check if section/priority values are valid
123
124 def check_valid(new):
125     for pkg in new.keys():
126         section = new[pkg]["section"]
127         priority = new[pkg]["priority"]
128         file_type = new[pkg]["type"]
129         new[pkg]["section id"] = database.get_section_id(section)
130         new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
131         # Sanity checks
132         di = section.find("debian-installer") != -1
133         if (di and file_type not in ("udeb", "dsc")) or (not di and file_type == "udeb"):
134             new[pkg]["section id"] = -1
135         if (priority == "source" and file_type != "dsc") or \
136            (priority != "source" and file_type == "dsc"):
137             new[pkg]["priority id"] = -1
138
139
140 ###############################################################################
141
142 # Convenience wrapper to carry around all the package information in
143
144 class Pkg:
145     def __init__(self, **kwds):
146         self.__dict__.update(kwds)
147
148     def update(self, **kwds):
149         self.__dict__.update(kwds)
150
151 ###############################################################################
152
153 class Upload:
154
155     def __init__(self, Cnf):
156         self.Cnf = Cnf
157         self.accept_count = 0
158         self.accept_bytes = 0L
159         self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
160                        legacy_source_untouchable = {})
161
162         # Initialize the substitution template mapping global
163         Subst = self.Subst = {}
164         Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
165         Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
166         Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
167         Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
168
169         self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
170         database.init(Cnf, self.projectB)
171
172     ###########################################################################
173
174     def init_vars (self):
175         self.pkg.changes.clear()
176         self.pkg.dsc.clear()
177         self.pkg.files.clear()
178         self.pkg.dsc_files.clear()
179         self.pkg.legacy_source_untouchable.clear()
180         self.pkg.orig_tar_id = None
181         self.pkg.orig_tar_location = ""
182         self.pkg.orig_tar_gz = None
183
184     ###########################################################################
185
186     def update_vars (self):
187         dump_filename = self.pkg.changes_file[:-8]+".dak"
188         dump_file = utils.open_file(dump_filename)
189         p = cPickle.Unpickler(dump_file)
190
191         self.pkg.changes.update(p.load())
192         self.pkg.dsc.update(p.load())
193         self.pkg.files.update(p.load())
194         self.pkg.dsc_files.update(p.load())
195         self.pkg.legacy_source_untouchable.update(p.load())
196
197         self.pkg.orig_tar_id = p.load()
198         self.pkg.orig_tar_location = p.load()
199
200         dump_file.close()
201
202     ###########################################################################
203
204     # This could just dump the dictionaries as is, but I'd like to
205     # avoid this so there's some idea of what process-accepted &
206     # process-new use from process-unchecked
207
208     def dump_vars(self, dest_dir):
209
210         changes = self.pkg.changes
211         dsc = self.pkg.dsc
212         files = self.pkg.files
213         dsc_files = self.pkg.dsc_files
214         legacy_source_untouchable = self.pkg.legacy_source_untouchable
215         orig_tar_id = self.pkg.orig_tar_id
216         orig_tar_location = self.pkg.orig_tar_location
217
218         dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
219         dump_file = utils.open_file(dump_filename, 'w')
220         try:
221             os.chmod(dump_filename, 0664)
222         except OSError, e:
223             # chmod may fail when the dumpfile is not owned by the user
224             # invoking dak (like e.g. when NEW is processed by a member
225             # of ftpteam)
226             if errno.errorcode[e.errno] == 'EPERM':
227                 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
228                 # security precaution, should never happen unless a weird
229                 # umask is set anywhere
230                 if perms & stat.S_IWOTH:
231                     utils.fubar("%s is world writable and chmod failed." % \
232                         (dump_filename,))
233                 # ignore the failed chmod otherwise as the file should
234                 # already have the right privileges and is just, at worst,
235                 # unreadable for world
236             else:
237                 raise
238
239         p = cPickle.Pickler(dump_file, 1)
240         d_changes = {}
241         d_dsc = {}
242         d_files = {}
243         d_dsc_files = {}
244
245         ## files
246         for file_entry in files.keys():
247             d_files[file_entry] = {}
248             for i in [ "package", "version", "architecture", "type", "size",
249                        "md5sum", "sha1sum", "sha256sum", "component",
250                        "location id", "source package", "source version",
251                        "maintainer", "dbtype", "files id", "new",
252                        "section", "priority", "othercomponents",
253                        "pool name", "original component" ]:
254                 if files[file_entry].has_key(i):
255                     d_files[file_entry][i] = files[file_entry][i]
256         ## changes
257         # Mandatory changes fields
258         for i in [ "distribution", "source", "architecture", "version",
259                    "maintainer", "urgency", "fingerprint", "changedby822",
260                    "changedby2047", "changedbyname", "maintainer822",
261                    "maintainer2047", "maintainername", "maintaineremail",
262                    "closes", "changes" ]:
263             d_changes[i] = changes[i]
264         # Optional changes fields
265         for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version",
266                    "sponsoremail" ]:
267             if changes.has_key(i):
268                 d_changes[i] = changes[i]
269         ## dsc
270         for i in [ "source", "version", "maintainer", "fingerprint",
271                    "uploaders", "bts changelog", "dm-upload-allowed" ]:
272             if dsc.has_key(i):
273                 d_dsc[i] = dsc[i]
274         ## dsc_files
275         for file_entry in dsc_files.keys():
276             d_dsc_files[file_entry] = {}
277             # Mandatory dsc_files fields
278             for i in [ "size", "md5sum" ]:
279                 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
280             # Optional dsc_files fields
281             for i in [ "files id" ]:
282                 if dsc_files[file_entry].has_key(i):
283                     d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
284
285         for i in [ d_changes, d_dsc, d_files, d_dsc_files,
286                    legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
287             p.dump(i)
288         dump_file.close()
289
290     ###########################################################################
291
292     # Set up the per-package template substitution mappings
293
294     def update_subst (self, reject_message = ""):
295         Subst = self.Subst
296         changes = self.pkg.changes
297         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
298         if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
299             changes["architecture"] = { "Unknown" : "" }
300         # and maintainer2047 may not exist.
301         if not changes.has_key("maintainer2047"):
302             changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
303
304         Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
305         Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
306         Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
307
308         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
309         if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
310             Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
311             Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
312                                                      changes["maintainer2047"])
313             Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
314         else:
315             Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
316             Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
317             Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
318
319         if "sponsoremail" in changes:
320             Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
321
322         if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
323             Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
324
325         # Apply any global override of the Maintainer field
326         if self.Cnf.get("Dinstall::OverrideMaintainer"):
327             Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
328             Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
329
330         Subst["__REJECT_MESSAGE__"] = reject_message
331         Subst["__SOURCE__"] = changes.get("source", "Unknown")
332         Subst["__VERSION__"] = changes.get("version", "Unknown")
333
334     ###########################################################################
335
336     def build_summaries(self):
337         changes = self.pkg.changes
338         files = self.pkg.files
339
340         byhand = summary = new = ""
341
342         # changes["distribution"] may not exist in corner cases
343         # (e.g. unreadable changes files)
344         if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
345             changes["distribution"] = {}
346
347         override_summary ="";
348         file_keys = files.keys()
349         file_keys.sort()
350         for file_entry in file_keys:
351             if files[file_entry].has_key("byhand"):
352                 byhand = 1
353                 summary += file_entry + " byhand\n"
354             elif files[file_entry].has_key("new"):
355                 new = 1
356                 summary += "(new) %s %s %s\n" % (file_entry, files[file_entry]["priority"], files[file_entry]["section"])
357                 if files[file_entry].has_key("othercomponents"):
358                     summary += "WARNING: Already present in %s distribution.\n" % (files[file_entry]["othercomponents"])
359                 if files[file_entry]["type"] == "deb":
360                     deb_fh = utils.open_file(file_entry)
361                     summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
362                     deb_fh.close()
363             else:
364                 files[file_entry]["pool name"] = utils.poolify (changes.get("source",""), files[file_entry]["component"])
365                 destination = self.Cnf["Dir::PoolRoot"] + files[file_entry]["pool name"] + file_entry
366                 summary += file_entry + "\n  to " + destination + "\n"
367                 if not files[file_entry].has_key("type"):
368                     files[file_entry]["type"] = "unknown"
369                 if files[file_entry]["type"] in ["deb", "udeb", "dsc"]:
370                     # (queue/unchecked), there we have override entries already, use them
371                     # (process-new), there we dont have override entries, use the newly generated ones.
372                     override_prio = files[file_entry].get("override priority", files[file_entry]["priority"])
373                     override_sect = files[file_entry].get("override section", files[file_entry]["section"])
374                     override_summary += "%s - %s %s\n" % (file_entry, override_prio, override_sect)
375
376         short_summary = summary
377
378         # This is for direport's benefit...
379         f = re_fdnic.sub("\n .\n", changes.get("changes",""))
380
381         if byhand or new:
382             summary += "Changes: " + f
383
384         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
385
386         summary += self.announce(short_summary, 0)
387
388         return (summary, short_summary)
389
390     ###########################################################################
391
392     def close_bugs (self, summary, action):
393         changes = self.pkg.changes
394         Subst = self.Subst
395         Cnf = self.Cnf
396
397         bugs = changes["closes"].keys()
398
399         if not bugs:
400             return summary
401
402         bugs.sort()
403         summary += "Closing bugs: "
404         for bug in bugs:
405             summary += "%s " % (bug)
406             if action:
407                 Subst["__BUG_NUMBER__"] = bug
408                 if changes["distribution"].has_key("stable"):
409                     Subst["__STABLE_WARNING__"] = """
410 Note that this package is not part of the released stable Debian
411 distribution.  It may have dependencies on other unreleased software,
412 or other instabilities.  Please take care if you wish to install it.
413 The update will eventually make its way into the next released Debian
414 distribution."""
415                 else:
416                     Subst["__STABLE_WARNING__"] = ""
417                     mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
418                     utils.send_mail (mail_message)
419         if action:
420             self.Logger.log(["closing bugs"]+bugs)
421         summary += "\n"
422
423         return summary
424
425     ###########################################################################
426
427     def announce (self, short_summary, action):
428         Subst = self.Subst
429         Cnf = self.Cnf
430         changes = self.pkg.changes
431
432         # Only do announcements for source uploads with a recent dpkg-dev installed
433         if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
434             return ""
435
436         lists_done = {}
437         summary = ""
438         Subst["__SHORT_SUMMARY__"] = short_summary
439
440         for dist in changes["distribution"].keys():
441             announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
442             if announce_list == "" or lists_done.has_key(announce_list):
443                 continue
444             lists_done[announce_list] = 1
445             summary += "Announcing to %s\n" % (announce_list)
446
447             if action:
448                 Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
449                 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
450                     Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
451                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
452                 utils.send_mail (mail_message)
453
454         if Cnf.FindB("Dinstall::CloseBugs"):
455             summary = self.close_bugs(summary, action)
456
457         return summary
458
459     ###########################################################################
460
461     def accept (self, summary, short_summary):
462         Cnf = self.Cnf
463         Subst = self.Subst
464         files = self.pkg.files
465         changes = self.pkg.changes
466         changes_file = self.pkg.changes_file
467         dsc = self.pkg.dsc
468
469         print "Accepting."
470         self.Logger.log(["Accepting changes",changes_file])
471
472         self.dump_vars(Cnf["Dir::Queue::Accepted"])
473
474         # Move all the files into the accepted directory
475         utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
476         file_keys = files.keys()
477         for file_entry in file_keys:
478             utils.move(file_entry, Cnf["Dir::Queue::Accepted"])
479             self.accept_bytes += float(files[file_entry]["size"])
480         self.accept_count += 1
481
482         # Send accept mail, announce to lists, close bugs and check for
483         # override disparities
484         if not Cnf["Dinstall::Options::No-Mail"]:
485             Subst["__SUITE__"] = ""
486             Subst["__SUMMARY__"] = summary
487             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
488             utils.send_mail(mail_message)
489             self.announce(short_summary, 1)
490
491
492         ## Helper stuff for DebBugs Version Tracking
493         if Cnf.Find("Dir::Queue::BTSVersionTrack"):
494             # ??? once queue/* is cleared on *.d.o and/or reprocessed
495             # the conditionalization on dsc["bts changelog"] should be
496             # dropped.
497
498             # Write out the version history from the changelog
499             if changes["architecture"].has_key("source") and \
500                dsc.has_key("bts changelog"):
501
502                 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
503                                                     dotprefix=1, perms=0644)
504                 version_history = utils.open_file(temp_filename, 'w')
505                 version_history.write(dsc["bts changelog"])
506                 version_history.close()
507                 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
508                                       changes_file[:-8]+".versions")
509                 os.rename(temp_filename, filename)
510
511             # Write out the binary -> source mapping.
512             temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
513                                                 dotprefix=1, perms=0644)
514             debinfo = utils.open_file(temp_filename, 'w')
515             for file_entry in file_keys:
516                 f = files[file_entry]
517                 if f["type"] == "deb":
518                     line = " ".join([f["package"], f["version"],
519                                      f["architecture"], f["source package"],
520                                      f["source version"]])
521                     debinfo.write(line+"\n")
522             debinfo.close()
523             filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
524                                   changes_file[:-8]+".debinfo")
525             os.rename(temp_filename, filename)
526
527         self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
528
529     ###########################################################################
530
531     def queue_build (self, queue, path):
532         Cnf = self.Cnf
533         Subst = self.Subst
534         files = self.pkg.files
535         changes = self.pkg.changes
536         changes_file = self.pkg.changes_file
537         dsc = self.pkg.dsc
538         file_keys = files.keys()
539
540         ## Special support to enable clean auto-building of queued packages
541         queue_id = database.get_or_set_queue_id(queue)
542
543         self.projectB.query("BEGIN WORK")
544         for suite in changes["distribution"].keys():
545             if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
546                 continue
547             suite_id = database.get_suite_id(suite)
548             dest_dir = Cnf["Dir::QueueBuild"]
549             if Cnf.FindB("Dinstall::SecurityQueueBuild"):
550                 dest_dir = os.path.join(dest_dir, suite)
551             for file_entry in file_keys:
552                 src = os.path.join(path, file_entry)
553                 dest = os.path.join(dest_dir, file_entry)
554                 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
555                     # Copy it since the original won't be readable by www-data
556                     utils.copy(src, dest)
557                 else:
558                     # Create a symlink to it
559                     os.symlink(src, dest)
560                 # Add it to the list of packages for later processing by apt-ftparchive
561                 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
562             # If the .orig.tar.gz is in the pool, create a symlink to
563             # it (if one doesn't already exist)
564             if self.pkg.orig_tar_id:
565                 # Determine the .orig.tar.gz file name
566                 for dsc_file in self.pkg.dsc_files.keys():
567                     if dsc_file.endswith(".orig.tar.gz"):
568                         filename = dsc_file
569                 dest = os.path.join(dest_dir, filename)
570                 # If it doesn't exist, create a symlink
571                 if not os.path.exists(dest):
572                     # Find the .orig.tar.gz in the pool
573                     q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
574                     ql = q.getresult()
575                     if not ql:
576                         utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
577                     src = os.path.join(ql[0][0], ql[0][1])
578                     os.symlink(src, dest)
579                     # Add it to the list of packages for later processing by apt-ftparchive
580                     self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
581                 # if it does, update things to ensure it's not removed prematurely
582                 else:
583                     self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
584
585         self.projectB.query("COMMIT WORK")
586
587     ###########################################################################
588
589     def check_override (self):
590         Subst = self.Subst
591         changes = self.pkg.changes
592         files = self.pkg.files
593         Cnf = self.Cnf
594
595         # Abandon the check if:
596         #  a) it's a non-sourceful upload
597         #  b) override disparity checks have been disabled
598         #  c) we're not sending mail
599         if not changes["architecture"].has_key("source") or \
600            not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
601            Cnf["Dinstall::Options::No-Mail"]:
602             return
603
604         summary = ""
605         file_keys = files.keys()
606         file_keys.sort()
607         for file_entry in file_keys:
608             if not files[file_entry].has_key("new") and files[file_entry]["type"] == "deb":
609                 section = files[file_entry]["section"]
610                 override_section = files[file_entry]["override section"]
611                 if section.lower() != override_section.lower() and section != "-":
612                     summary += "%s: package says section is %s, override says %s.\n" % (file_entry, section, override_section)
613                 priority = files[file_entry]["priority"]
614                 override_priority = files[file_entry]["override priority"]
615                 if priority != override_priority and priority != "-":
616                     summary += "%s: package says priority is %s, override says %s.\n" % (file_entry, priority, override_priority)
617
618         if summary == "":
619             return
620
621         Subst["__SUMMARY__"] = summary
622         mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
623         utils.send_mail(mail_message)
624
625     ###########################################################################
626
627     def force_reject (self, files):
628         """Forcefully move files from the current directory to the
629            reject directory.  If any file already exists in the reject
630            directory it will be moved to the morgue to make way for
631            the new file."""
632
633         Cnf = self.Cnf
634
635         for file_entry in files:
636             # Skip any files which don't exist or which we don't have permission to copy.
637             if os.access(file_entry,os.R_OK) == 0:
638                 continue
639             dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file_entry)
640             try:
641                 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
642             except OSError, e:
643                 # File exists?  Let's try and move it to the morgue
644                 if errno.errorcode[e.errno] == 'EEXIST':
645                     morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file_entry)
646                     try:
647                         morgue_file = utils.find_next_free(morgue_file)
648                     except NoFreeFilenameError:
649                         # Something's either gone badly Pete Tong, or
650                         # someone is trying to exploit us.
651                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
652                         return
653                     utils.move(dest_file, morgue_file, perms=0660)
654                     try:
655                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
656                     except OSError, e:
657                         # Likewise
658                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
659                         return
660                 else:
661                     raise
662             # If we got here, we own the destination file, so we can
663             # safely overwrite it.
664             utils.move(file_entry, dest_file, 1, perms=0660)
665             os.close(dest_fd)
666
667     ###########################################################################
668
669     def do_reject (self, manual = 0, reject_message = ""):
670         # If we weren't given a manual rejection message, spawn an
671         # editor so the user can add one in...
672         if manual and not reject_message:
673             temp_filename = utils.temp_filename()
674             editor = os.environ.get("EDITOR","vi")
675             answer = 'E'
676             while answer == 'E':
677                 os.system("%s %s" % (editor, temp_filename))
678                 temp_fh = utils.open_file(temp_filename)
679                 reject_message = "".join(temp_fh.readlines())
680                 temp_fh.close()
681                 print "Reject message:"
682                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
683                 prompt = "[R]eject, Edit, Abandon, Quit ?"
684                 answer = "XXX"
685                 while prompt.find(answer) == -1:
686                     answer = utils.our_raw_input(prompt)
687                     m = re_default_answer.search(prompt)
688                     if answer == "":
689                         answer = m.group(1)
690                     answer = answer[:1].upper()
691             os.unlink(temp_filename)
692             if answer == 'A':
693                 return 1
694             elif answer == 'Q':
695                 sys.exit(0)
696
697         print "Rejecting.\n"
698
699         Cnf = self.Cnf
700         Subst = self.Subst
701         pkg = self.pkg
702
703         reason_filename = pkg.changes_file[:-8] + ".reason"
704         reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
705
706         # Move all the files into the reject directory
707         reject_files = pkg.files.keys() + [pkg.changes_file]
708         self.force_reject(reject_files)
709
710         # If we fail here someone is probably trying to exploit the race
711         # so let's just raise an exception ...
712         if os.path.exists(reason_filename):
713             os.unlink(reason_filename)
714         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
715
716         if not manual:
717             Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
718             Subst["__MANUAL_REJECT_MESSAGE__"] = ""
719             Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
720             os.write(reason_fd, reject_message)
721             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
722         else:
723             # Build up the rejection email
724             user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
725
726             Subst["__REJECTOR_ADDRESS__"] = user_email_address
727             Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
728             Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
729             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
730             # Write the rejection email out as the <foo>.reason file
731             os.write(reason_fd, reject_mail_message)
732
733         os.close(reason_fd)
734
735         # Send the rejection mail if appropriate
736         if not Cnf["Dinstall::Options::No-Mail"]:
737             utils.send_mail(reject_mail_message)
738
739         self.Logger.log(["rejected", pkg.changes_file])
740         return 0
741
742     ################################################################################
743
744     # Ensure that source exists somewhere in the archive for the binary
745     # upload being processed.
746     #
747     # (1) exact match                      => 1.0-3
748     # (2) Bin-only NMU                     => 1.0-3+b1 , 1.0-3.1+b1
749
750     def source_exists (self, package, source_version, suites = ["any"]):
751         okay = 1
752         for suite in suites:
753             if suite == "any":
754                 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
755                     (package)
756             else:
757                 # source must exist in suite X, or in some other suite that's
758                 # mapped to X, recursively... silent-maps are counted too,
759                 # unreleased-maps aren't.
760                 maps = self.Cnf.ValueList("SuiteMappings")[:]
761                 maps.reverse()
762                 maps = [ m.split() for m in maps ]
763                 maps = [ (x[1], x[2]) for x in maps
764                                 if x[0] == "map" or x[0] == "silent-map" ]
765                 s = [suite]
766                 for x in maps:
767                     if x[1] in s and x[0] not in s:
768                         s.append(x[0])
769
770                 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
771             q = self.projectB.query(que)
772
773             # Reduce the query results to a list of version numbers
774             ql = [ i[0] for i in q.getresult() ]
775
776             # Try (1)
777             if source_version in ql:
778                 continue
779
780             # Try (2)
781             orig_source_version = re_bin_only_nmu.sub('', source_version)
782             if orig_source_version in ql:
783                 continue
784
785             # No source found...
786             okay = 0
787             break
788         return okay
789
790     ################################################################################
791
792     def in_override_p (self, package, component, suite, binary_type, file):
793         files = self.pkg.files
794
795         if binary_type == "": # must be source
796             file_type = "dsc"
797         else:
798             file_type = binary_type
799
800         # Override suite name; used for example with proposed-updates
801         if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
802             suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
803
804         # Avoid <undef> on unknown distributions
805         suite_id = database.get_suite_id(suite)
806         if suite_id == -1:
807             return None
808         component_id = database.get_component_id(component)
809         type_id = database.get_override_type_id(file_type)
810
811         q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
812                            % (package, suite_id, component_id, type_id))
813         result = q.getresult()
814         # If checking for a source package fall back on the binary override type
815         if file_type == "dsc" and not result:
816             deb_type_id = database.get_override_type_id("deb")
817             udeb_type_id = database.get_override_type_id("udeb")
818             q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
819                                % (package, suite_id, component_id, deb_type_id, udeb_type_id))
820             result = q.getresult()
821
822         # Remember the section and priority so we can check them later if appropriate
823         if result:
824             files[file]["override section"] = result[0][0]
825             files[file]["override priority"] = result[0][1]
826
827         return result
828
829     ################################################################################
830
831     def reject (self, str, prefix="Rejected: "):
832         if str:
833             # Unlike other rejects we add new lines first to avoid trailing
834             # new lines when this message is passed back up to a caller.
835             if self.reject_message:
836                 self.reject_message += "\n"
837             self.reject_message += prefix + str
838
839     ################################################################################
840
841     def get_anyversion(self, query_result, suite):
842         anyversion=None
843         anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
844         for (v, s) in query_result:
845             if s in [ x.lower() for x in anysuite ]:
846                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
847                     anyversion=v
848         return anyversion
849
850     ################################################################################
851
852     def cross_suite_version_check(self, query_result, file, new_version,
853             sourceful=False):
854         """Ensure versions are newer than existing packages in target
855         suites and that cross-suite version checking rules as
856         set out in the conf file are satisfied."""
857
858         # Check versions for each target suite
859         for target_suite in self.pkg.changes["distribution"].keys():
860             must_be_newer_than = [ i.lower() for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
861             must_be_older_than = [ i.lower() for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
862             # Enforce "must be newer than target suite" even if conffile omits it
863             if target_suite not in must_be_newer_than:
864                 must_be_newer_than.append(target_suite)
865             for entry in query_result:
866                 existent_version = entry[0]
867                 suite = entry[1]
868                 if suite in must_be_newer_than and sourceful and \
869                    apt_pkg.VersionCompare(new_version, existent_version) < 1:
870                     self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
871                 if suite in must_be_older_than and \
872                    apt_pkg.VersionCompare(new_version, existent_version) > -1:
873                     ch = self.pkg.changes
874                     cansave = 0
875                     if ch.get('distribution-version', {}).has_key(suite):
876                     # we really use the other suite, ignoring the conflicting one ...
877                         addsuite = ch["distribution-version"][suite]
878
879                         add_version = self.get_anyversion(query_result, addsuite)
880                         target_version = self.get_anyversion(query_result, target_suite)
881
882                         if not add_version:
883                             # not add_version can only happen if we map to a suite
884                             # that doesn't enhance the suite we're propup'ing from.
885                             # so "propup-ver x a b c; map a d" is a problem only if
886                             # d doesn't enhance a.
887                             #
888                             # i think we could always propagate in this case, rather
889                             # than complaining. either way, this isn't a REJECT issue
890                             #
891                             # And - we really should complain to the dorks who configured dak
892                             self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
893                             self.pkg.changes.setdefault("propdistribution", {})
894                             self.pkg.changes["propdistribution"][addsuite] = 1
895                             cansave = 1
896                         elif not target_version:
897                             # not targets_version is true when the package is NEW
898                             # we could just stick with the "...old version..." REJECT
899                             # for this, I think.
900                             self.reject("Won't propogate NEW packages.")
901                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
902                             # propogation would be redundant. no need to reject though.
903                             self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
904                             cansave = 1
905                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
906                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
907                             # propogate!!
908                             self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
909                             self.pkg.changes.setdefault("propdistribution", {})
910                             self.pkg.changes["propdistribution"][addsuite] = 1
911                             cansave = 1
912
913                     if not cansave:
914                         self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
915
916     ################################################################################
917
918     def check_binary_against_db(self, file):
919         self.reject_message = ""
920         files = self.pkg.files
921
922         # Ensure version is sane
923         q = self.projectB.query("""
924 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
925                                      architecture a
926  WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
927    AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
928                                 % (files[file]["package"],
929                                    files[file]["architecture"]))
930         self.cross_suite_version_check(q.getresult(), file,
931             files[file]["version"], sourceful=False)
932
933         # Check for any existing copies of the file
934         q = self.projectB.query("""
935 SELECT b.id FROM binaries b, architecture a
936  WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
937    AND a.id = b.architecture"""
938                                 % (files[file]["package"],
939                                    files[file]["version"],
940                                    files[file]["architecture"]))
941         if q.getresult():
942             self.reject("%s: can not overwrite existing copy already in the archive." % (file))
943
944         return self.reject_message
945
946     ################################################################################
947
948     def check_source_against_db(self, file):
949         self.reject_message = ""
950         dsc = self.pkg.dsc
951
952         # Ensure version is sane
953         q = self.projectB.query("""
954 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
955  WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
956         self.cross_suite_version_check(q.getresult(), file, dsc.get("version"),
957             sourceful=True)
958
959         return self.reject_message
960
961     ################################################################################
962
963     # **WARNING**
964     # NB: this function can remove entries from the 'files' index [if
965     # the .orig.tar.gz is a duplicate of the one in the archive]; if
966     # you're iterating over 'files' and call this function as part of
967     # the loop, be sure to add a check to the top of the loop to
968     # ensure you haven't just tried to dereference the deleted entry.
969     # **WARNING**
970
971     def check_dsc_against_db(self, file):
972         self.reject_message = ""
973         files = self.pkg.files
974         dsc_files = self.pkg.dsc_files
975         legacy_source_untouchable = self.pkg.legacy_source_untouchable
976         self.pkg.orig_tar_gz = None
977
978         # Try and find all files mentioned in the .dsc.  This has
979         # to work harder to cope with the multiple possible
980         # locations of an .orig.tar.gz.
981         # The ordering on the select is needed to pick the newest orig
982         # when it exists in multiple places.
983         for dsc_file in dsc_files.keys():
984             found = None
985             if files.has_key(dsc_file):
986                 actual_md5 = files[dsc_file]["md5sum"]
987                 actual_size = int(files[dsc_file]["size"])
988                 found = "%s in incoming" % (dsc_file)
989                 # Check the file does not already exist in the archive
990                 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
991                 ql = q.getresult()
992                 # Strip out anything that isn't '%s' or '/%s$'
993                 for i in ql:
994                     if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
995                         ql.remove(i)
996
997                 # "[dak] has not broken them.  [dak] has fixed a
998                 # brokenness.  Your crappy hack exploited a bug in
999                 # the old dinstall.
1000                 #
1001                 # "(Come on!  I thought it was always obvious that
1002                 # one just doesn't release different files with
1003                 # the same name and version.)"
1004                 #                        -- ajk@ on d-devel@l.d.o
1005
1006                 if ql:
1007                     # Ignore exact matches for .orig.tar.gz
1008                     match = 0
1009                     if dsc_file.endswith(".orig.tar.gz"):
1010                         for i in ql:
1011                             if files.has_key(dsc_file) and \
1012                                int(files[dsc_file]["size"]) == int(i[0]) and \
1013                                files[dsc_file]["md5sum"] == i[1]:
1014                                 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
1015                                 del files[dsc_file]
1016                                 self.pkg.orig_tar_gz = i[2] + i[3]
1017                                 match = 1
1018
1019                     if not match:
1020                         self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
1021             elif dsc_file.endswith(".orig.tar.gz"):
1022                 # Check in the pool
1023                 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
1024                 ql = q.getresult()
1025                 # Strip out anything that isn't '%s' or '/%s$'
1026                 for i in ql:
1027                     if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
1028                         ql.remove(i)
1029
1030                 if ql:
1031                     # Unfortunately, we may get more than one match here if,
1032                     # for example, the package was in potato but had an -sa
1033                     # upload in woody.  So we need to choose the right one.
1034
1035                     x = ql[0]; # default to something sane in case we don't match any or have only one
1036
1037                     if len(ql) > 1:
1038                         for i in ql:
1039                             old_file = i[0] + i[1]
1040                             old_file_fh = utils.open_file(old_file)
1041                             actual_md5 = apt_pkg.md5sum(old_file_fh)
1042                             old_file_fh.close()
1043                             actual_size = os.stat(old_file)[stat.ST_SIZE]
1044                             if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1045                                 x = i
1046                             else:
1047                                 legacy_source_untouchable[i[3]] = ""
1048
1049                     old_file = x[0] + x[1]
1050                     old_file_fh = utils.open_file(old_file)
1051                     actual_md5 = apt_pkg.md5sum(old_file_fh)
1052                     old_file_fh.close()
1053                     actual_size = os.stat(old_file)[stat.ST_SIZE]
1054                     found = old_file
1055                     suite_type = x[2]
1056                     dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
1057                     # See install() in process-accepted...
1058                     self.pkg.orig_tar_id = x[3]
1059                     self.pkg.orig_tar_gz = old_file
1060                     if suite_type == "legacy" or suite_type == "legacy-mixed":
1061                         self.pkg.orig_tar_location = "legacy"
1062                     else:
1063                         self.pkg.orig_tar_location = x[4]
1064                 else:
1065                     # Not there? Check the queue directories...
1066
1067                     in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1068                     # See process_it() in 'dak process-unchecked' for explanation of this
1069                     # in_unchecked check dropped by ajt 2007-08-28, how did that
1070                     # ever make sense?
1071                     if os.path.exists(in_unchecked) and False:
1072                         return (self.reject_message, in_unchecked)
1073                     else:
1074                         for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1075                             in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)],dsc_file)
1076                             if os.path.exists(in_otherdir):
1077                                 in_otherdir_fh = utils.open_file(in_otherdir)
1078                                 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1079                                 in_otherdir_fh.close()
1080                                 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1081                                 found = in_otherdir
1082                                 self.pkg.orig_tar_gz = in_otherdir
1083
1084                     if not found:
1085                         self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1086                         self.pkg.orig_tar_gz = -1
1087                         continue
1088             else:
1089                 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1090                 continue
1091             if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1092                 self.reject("md5sum for %s doesn't match %s." % (found, file))
1093             if actual_size != int(dsc_files[dsc_file]["size"]):
1094                 self.reject("size for %s doesn't match %s." % (found, file))
1095
1096         return (self.reject_message, None)
1097
1098     def do_query(self, q):
1099         sys.stderr.write("query: \"%s\" ... " % (q))
1100         before = time.time()
1101         r = self.projectB.query(q)
1102         time_diff = time.time()-before
1103         sys.stderr.write("took %.3f seconds.\n" % (time_diff))
1104         return r