]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
ignore EPERM as a result of the chmod of a .dak file
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2
3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006  James Troup <james@nocrew.org>
5
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
10
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 # GNU General Public License for more details.
15
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
19
20 ###############################################################################
21
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
25 from dak_exceptions import *
26
27 from types import *
28
29 ###############################################################################
30
31 re_isanum = re.compile (r"^\d+$")
32 re_default_answer = re.compile(r"\[(.*)\]")
33 re_fdnic = re.compile(r"\n\n")
34 re_bin_only_nmu = re.compile(r"\+b\d+$")
35
36 ################################################################################
37
38 # Determine what parts in a .changes are NEW
39
40 def determine_new(changes, files, projectB, warn=1):
41     new = {}
42
43     # Build up a list of potentially new things
44     for file_entry in files.keys():
45         f = files[file_entry]
46         # Skip byhand elements
47         if f["type"] == "byhand":
48             continue
49         pkg = f["package"]
50         priority = f["priority"]
51         section = f["section"]
52         file_type = get_type(f)
53         component = f["component"]
54
55         if file_type == "dsc":
56             priority = "source"
57         if not new.has_key(pkg):
58             new[pkg] = {}
59             new[pkg]["priority"] = priority
60             new[pkg]["section"] = section
61             new[pkg]["type"] = file_type
62             new[pkg]["component"] = component
63             new[pkg]["files"] = []
64         else:
65             old_type = new[pkg]["type"]
66             if old_type != file_type:
67                 # source gets trumped by deb or udeb
68                 if old_type == "dsc":
69                     new[pkg]["priority"] = priority
70                     new[pkg]["section"] = section
71                     new[pkg]["type"] = file_type
72                     new[pkg]["component"] = component
73         new[pkg]["files"].append(file_entry)
74         if f.has_key("othercomponents"):
75             new[pkg]["othercomponents"] = f["othercomponents"]
76
77     for suite in changes["suite"].keys():
78         suite_id = database.get_suite_id(suite)
79         for pkg in new.keys():
80             component_id = database.get_component_id(new[pkg]["component"])
81             type_id = database.get_override_type_id(new[pkg]["type"])
82             q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
83             ql = q.getresult()
84             if ql:
85                 for file_entry in new[pkg]["files"]:
86                     if files[file_entry].has_key("new"):
87                         del files[file_entry]["new"]
88                 del new[pkg]
89
90     if warn:
91         if changes["suite"].has_key("stable"):
92             print "WARNING: overrides will be added for stable!"
93             if changes["suite"].has_key("oldstable"):
94                 print "WARNING: overrides will be added for OLDstable!"
95         for pkg in new.keys():
96             if new[pkg].has_key("othercomponents"):
97                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
98
99     return new
100
101 ################################################################################
102
103 def get_type(f):
104     # Determine the type
105     if f.has_key("dbtype"):
106         file_type = f["dbtype"]
107     elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
108         file_type = "dsc"
109     else:
110         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
111
112     # Validate the override type
113     type_id = database.get_override_type_id(file_type)
114     if type_id == -1:
115         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
116
117     return file_type
118
119 ################################################################################
120
121 # check if section/priority values are valid
122
123 def check_valid(new):
124     for pkg in new.keys():
125         section = new[pkg]["section"]
126         priority = new[pkg]["priority"]
127         file_type = new[pkg]["type"]
128         new[pkg]["section id"] = database.get_section_id(section)
129         new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
130         # Sanity checks
131         di = section.find("debian-installer") != -1
132         if (di and file_type not in ("udeb", "dsc")) or (not di and file_type == "udeb"):
133             new[pkg]["section id"] = -1
134         if (priority == "source" and file_type != "dsc") or \
135            (priority != "source" and file_type == "dsc"):
136             new[pkg]["priority id"] = -1
137
138
139 ###############################################################################
140
141 # Convenience wrapper to carry around all the package information in
142
143 class Pkg:
144     def __init__(self, **kwds):
145         self.__dict__.update(kwds)
146
147     def update(self, **kwds):
148         self.__dict__.update(kwds)
149
150 ###############################################################################
151
152 class Upload:
153
154     def __init__(self, Cnf):
155         self.Cnf = Cnf
156         self.accept_count = 0
157         self.accept_bytes = 0L
158         self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
159                        legacy_source_untouchable = {})
160
161         # Initialize the substitution template mapping global
162         Subst = self.Subst = {}
163         Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
164         Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
165         Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
166         Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
167
168         self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
169         database.init(Cnf, self.projectB)
170
171     ###########################################################################
172
173     def init_vars (self):
174         self.pkg.changes.clear()
175         self.pkg.dsc.clear()
176         self.pkg.files.clear()
177         self.pkg.dsc_files.clear()
178         self.pkg.legacy_source_untouchable.clear()
179         self.pkg.orig_tar_id = None
180         self.pkg.orig_tar_location = ""
181         self.pkg.orig_tar_gz = None
182
183     ###########################################################################
184
185     def update_vars (self):
186         dump_filename = self.pkg.changes_file[:-8]+".dak"
187         dump_file = utils.open_file(dump_filename)
188         p = cPickle.Unpickler(dump_file)
189
190         self.pkg.changes.update(p.load())
191         self.pkg.dsc.update(p.load())
192         self.pkg.files.update(p.load())
193         self.pkg.dsc_files.update(p.load())
194         self.pkg.legacy_source_untouchable.update(p.load())
195
196         self.pkg.orig_tar_id = p.load()
197         self.pkg.orig_tar_location = p.load()
198
199         dump_file.close()
200
201     ###########################################################################
202
203     # This could just dump the dictionaries as is, but I'd like to
204     # avoid this so there's some idea of what process-accepted &
205     # process-new use from process-unchecked
206
207     def dump_vars(self, dest_dir):
208
209         changes = self.pkg.changes
210         dsc = self.pkg.dsc
211         files = self.pkg.files
212         dsc_files = self.pkg.dsc_files
213         legacy_source_untouchable = self.pkg.legacy_source_untouchable
214         orig_tar_id = self.pkg.orig_tar_id
215         orig_tar_location = self.pkg.orig_tar_location
216
217         dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
218         dump_file = utils.open_file(dump_filename, 'w')
219         try:
220             os.chmod(dump_filename, 0664)
221         except OSError, e:
222             # chmod may fail when the dumpfile is not owned by the user
223             # invoking dak (like e.g. when NEW is processed by a member
224             # of ftpteam)
225             if errno.errorcode[e.errno] == 'EPERM':
226                 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
227                 # security precaution, should never happen unless a weird
228                 # umask is set anywhere
229                 if perms & stat.S_IWOTH:
230                     utils.fubar("%s is world writable and chmod failed." % \
231                         (dump_filename,))
232                 # ignore the failed chmod otherwise as the file should
233                 # already have the right privileges and is just, at worst,
234                 # unreadable for world
235             else:
236                 raise
237
238         p = cPickle.Pickler(dump_file, 1)
239         d_changes = {}
240         d_dsc = {}
241         d_files = {}
242         d_dsc_files = {}
243
244         ## files
245         for file_entry in files.keys():
246             d_files[file_entry] = {}
247             for i in [ "package", "version", "architecture", "type", "size",
248                        "md5sum", "sha1sum", "sha256sum", "component",
249                        "location id", "source package", "source version",
250                        "maintainer", "dbtype", "files id", "new",
251                        "section", "priority", "othercomponents",
252                        "pool name", "original component" ]:
253                 if files[file_entry].has_key(i):
254                     d_files[file_entry][i] = files[file_entry][i]
255         ## changes
256         # Mandatory changes fields
257         for i in [ "distribution", "source", "architecture", "version",
258                    "maintainer", "urgency", "fingerprint", "changedby822",
259                    "changedby2047", "changedbyname", "maintainer822",
260                    "maintainer2047", "maintainername", "maintaineremail",
261                    "closes", "changes" ]:
262             d_changes[i] = changes[i]
263         # Optional changes fields
264         for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version",
265                    "sponsoremail" ]:
266             if changes.has_key(i):
267                 d_changes[i] = changes[i]
268         ## dsc
269         for i in [ "source", "version", "maintainer", "fingerprint",
270                    "uploaders", "bts changelog", "dm-upload-allowed" ]:
271             if dsc.has_key(i):
272                 d_dsc[i] = dsc[i]
273         ## dsc_files
274         for file_entry in dsc_files.keys():
275             d_dsc_files[file_entry] = {}
276             # Mandatory dsc_files fields
277             for i in [ "size", "md5sum" ]:
278                 d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
279             # Optional dsc_files fields
280             for i in [ "files id" ]:
281                 if dsc_files[file_entry].has_key(i):
282                     d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
283
284         for i in [ d_changes, d_dsc, d_files, d_dsc_files,
285                    legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
286             p.dump(i)
287         dump_file.close()
288
289     ###########################################################################
290
291     # Set up the per-package template substitution mappings
292
293     def update_subst (self, reject_message = ""):
294         Subst = self.Subst
295         changes = self.pkg.changes
296         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
297         if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
298             changes["architecture"] = { "Unknown" : "" }
299         # and maintainer2047 may not exist.
300         if not changes.has_key("maintainer2047"):
301             changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
302
303         Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
304         Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
305         Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
306
307         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
308         if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
309             Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
310             Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
311                                                      changes["maintainer2047"])
312             Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
313         else:
314             Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
315             Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
316             Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
317
318         if "sponsoremail" in changes:
319             Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
320
321         if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
322             Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
323
324         # Apply any global override of the Maintainer field
325         if self.Cnf.get("Dinstall::OverrideMaintainer"):
326             Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
327             Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
328
329         Subst["__REJECT_MESSAGE__"] = reject_message
330         Subst["__SOURCE__"] = changes.get("source", "Unknown")
331         Subst["__VERSION__"] = changes.get("version", "Unknown")
332
333     ###########################################################################
334
335     def build_summaries(self):
336         changes = self.pkg.changes
337         files = self.pkg.files
338
339         byhand = summary = new = ""
340
341         # changes["distribution"] may not exist in corner cases
342         # (e.g. unreadable changes files)
343         if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
344             changes["distribution"] = {}
345
346         override_summary ="";
347         file_keys = files.keys()
348         file_keys.sort()
349         for file_entry in file_keys:
350             if files[file_entry].has_key("byhand"):
351                 byhand = 1
352                 summary += file_entry + " byhand\n"
353             elif files[file_entry].has_key("new"):
354                 new = 1
355                 summary += "(new) %s %s %s\n" % (file_entry, files[file_entry]["priority"], files[file_entry]["section"])
356                 if files[file_entry].has_key("othercomponents"):
357                     summary += "WARNING: Already present in %s distribution.\n" % (files[file_entry]["othercomponents"])
358                 if files[file_entry]["type"] == "deb":
359                     deb_fh = utils.open_file(file_entry)
360                     summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
361                     deb_fh.close()
362             else:
363                 files[file_entry]["pool name"] = utils.poolify (changes.get("source",""), files[file_entry]["component"])
364                 destination = self.Cnf["Dir::PoolRoot"] + files[file_entry]["pool name"] + file_entry
365                 summary += file_entry + "\n  to " + destination + "\n"
366                 if not files[file_entry].has_key("type"):
367                     files[file_entry]["type"] = "unknown"
368                 if files[file_entry]["type"] in ["deb", "udeb", "dsc"]:
369                     # (queue/unchecked), there we have override entries already, use them
370                     # (process-new), there we dont have override entries, use the newly generated ones.
371                     override_prio = files[file_entry].get("override priority", files[file_entry]["priority"])
372                     override_sect = files[file_entry].get("override section", files[file_entry]["section"])
373                     override_summary += "%s - %s %s\n" % (file_entry, override_prio, override_sect)
374
375         short_summary = summary
376
377         # This is for direport's benefit...
378         f = re_fdnic.sub("\n .\n", changes.get("changes",""))
379
380         if byhand or new:
381             summary += "Changes: " + f
382
383         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
384
385         summary += self.announce(short_summary, 0)
386
387         return (summary, short_summary)
388
389     ###########################################################################
390
391     def close_bugs (self, summary, action):
392         changes = self.pkg.changes
393         Subst = self.Subst
394         Cnf = self.Cnf
395
396         bugs = changes["closes"].keys()
397
398         if not bugs:
399             return summary
400
401         bugs.sort()
402         summary += "Closing bugs: "
403         for bug in bugs:
404             summary += "%s " % (bug)
405             if action:
406                 Subst["__BUG_NUMBER__"] = bug
407                 if changes["distribution"].has_key("stable"):
408                     Subst["__STABLE_WARNING__"] = """
409 Note that this package is not part of the released stable Debian
410 distribution.  It may have dependencies on other unreleased software,
411 or other instabilities.  Please take care if you wish to install it.
412 The update will eventually make its way into the next released Debian
413 distribution."""
414                 else:
415                     Subst["__STABLE_WARNING__"] = ""
416                     mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
417                     utils.send_mail (mail_message)
418         if action:
419             self.Logger.log(["closing bugs"]+bugs)
420         summary += "\n"
421
422         return summary
423
424     ###########################################################################
425
426     def announce (self, short_summary, action):
427         Subst = self.Subst
428         Cnf = self.Cnf
429         changes = self.pkg.changes
430
431         # Only do announcements for source uploads with a recent dpkg-dev installed
432         if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
433             return ""
434
435         lists_done = {}
436         summary = ""
437         Subst["__SHORT_SUMMARY__"] = short_summary
438
439         for dist in changes["distribution"].keys():
440             announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
441             if announce_list == "" or lists_done.has_key(announce_list):
442                 continue
443             lists_done[announce_list] = 1
444             summary += "Announcing to %s\n" % (announce_list)
445
446             if action:
447                 Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
448                 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
449                     Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
450                 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
451                 utils.send_mail (mail_message)
452
453         if Cnf.FindB("Dinstall::CloseBugs"):
454             summary = self.close_bugs(summary, action)
455
456         return summary
457
458     ###########################################################################
459
460     def accept (self, summary, short_summary):
461         Cnf = self.Cnf
462         Subst = self.Subst
463         files = self.pkg.files
464         changes = self.pkg.changes
465         changes_file = self.pkg.changes_file
466         dsc = self.pkg.dsc
467
468         print "Accepting."
469         self.Logger.log(["Accepting changes",changes_file])
470
471         self.dump_vars(Cnf["Dir::Queue::Accepted"])
472
473         # Move all the files into the accepted directory
474         utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
475         file_keys = files.keys()
476         for file_entry in file_keys:
477             utils.move(file_entry, Cnf["Dir::Queue::Accepted"])
478             self.accept_bytes += float(files[file_entry]["size"])
479         self.accept_count += 1
480
481         # Send accept mail, announce to lists, close bugs and check for
482         # override disparities
483         if not Cnf["Dinstall::Options::No-Mail"]:
484             Subst["__SUITE__"] = ""
485             Subst["__SUMMARY__"] = summary
486             mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
487             utils.send_mail(mail_message)
488             self.announce(short_summary, 1)
489
490
491         ## Helper stuff for DebBugs Version Tracking
492         if Cnf.Find("Dir::Queue::BTSVersionTrack"):
493             # ??? once queue/* is cleared on *.d.o and/or reprocessed
494             # the conditionalization on dsc["bts changelog"] should be
495             # dropped.
496
497             # Write out the version history from the changelog
498             if changes["architecture"].has_key("source") and \
499                dsc.has_key("bts changelog"):
500
501                 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
502                                                     dotprefix=1, perms=0644)
503                 version_history = utils.open_file(temp_filename, 'w')
504                 version_history.write(dsc["bts changelog"])
505                 version_history.close()
506                 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
507                                       changes_file[:-8]+".versions")
508                 os.rename(temp_filename, filename)
509
510             # Write out the binary -> source mapping.
511             temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
512                                                 dotprefix=1, perms=0644)
513             debinfo = utils.open_file(temp_filename, 'w')
514             for file_entry in file_keys:
515                 f = files[file_entry]
516                 if f["type"] == "deb":
517                     line = " ".join([f["package"], f["version"],
518                                      f["architecture"], f["source package"],
519                                      f["source version"]])
520                     debinfo.write(line+"\n")
521             debinfo.close()
522             filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
523                                   changes_file[:-8]+".debinfo")
524             os.rename(temp_filename, filename)
525
526         self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
527
528     ###########################################################################
529
530     def queue_build (self, queue, path):
531         Cnf = self.Cnf
532         Subst = self.Subst
533         files = self.pkg.files
534         changes = self.pkg.changes
535         changes_file = self.pkg.changes_file
536         dsc = self.pkg.dsc
537         file_keys = files.keys()
538
539         ## Special support to enable clean auto-building of queued packages
540         queue_id = database.get_or_set_queue_id(queue)
541
542         self.projectB.query("BEGIN WORK")
543         for suite in changes["distribution"].keys():
544             if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
545                 continue
546             suite_id = database.get_suite_id(suite)
547             dest_dir = Cnf["Dir::QueueBuild"]
548             if Cnf.FindB("Dinstall::SecurityQueueBuild"):
549                 dest_dir = os.path.join(dest_dir, suite)
550             for file_entry in file_keys:
551                 src = os.path.join(path, file_entry)
552                 dest = os.path.join(dest_dir, file_entry)
553                 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
554                     # Copy it since the original won't be readable by www-data
555                     utils.copy(src, dest)
556                 else:
557                     # Create a symlink to it
558                     os.symlink(src, dest)
559                 # Add it to the list of packages for later processing by apt-ftparchive
560                 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
561             # If the .orig.tar.gz is in the pool, create a symlink to
562             # it (if one doesn't already exist)
563             if self.pkg.orig_tar_id:
564                 # Determine the .orig.tar.gz file name
565                 for dsc_file in self.pkg.dsc_files.keys():
566                     if dsc_file.endswith(".orig.tar.gz"):
567                         filename = dsc_file
568                 dest = os.path.join(dest_dir, filename)
569                 # If it doesn't exist, create a symlink
570                 if not os.path.exists(dest):
571                     # Find the .orig.tar.gz in the pool
572                     q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
573                     ql = q.getresult()
574                     if not ql:
575                         utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
576                     src = os.path.join(ql[0][0], ql[0][1])
577                     os.symlink(src, dest)
578                     # Add it to the list of packages for later processing by apt-ftparchive
579                     self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
580                 # if it does, update things to ensure it's not removed prematurely
581                 else:
582                     self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
583
584         self.projectB.query("COMMIT WORK")
585
586     ###########################################################################
587
588     def check_override (self):
589         Subst = self.Subst
590         changes = self.pkg.changes
591         files = self.pkg.files
592         Cnf = self.Cnf
593
594         # Abandon the check if:
595         #  a) it's a non-sourceful upload
596         #  b) override disparity checks have been disabled
597         #  c) we're not sending mail
598         if not changes["architecture"].has_key("source") or \
599            not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
600            Cnf["Dinstall::Options::No-Mail"]:
601             return
602
603         summary = ""
604         file_keys = files.keys()
605         file_keys.sort()
606         for file_entry in file_keys:
607             if not files[file_entry].has_key("new") and files[file_entry]["type"] == "deb":
608                 section = files[file_entry]["section"]
609                 override_section = files[file_entry]["override section"]
610                 if section.lower() != override_section.lower() and section != "-":
611                     summary += "%s: package says section is %s, override says %s.\n" % (file_entry, section, override_section)
612                 priority = files[file_entry]["priority"]
613                 override_priority = files[file_entry]["override priority"]
614                 if priority != override_priority and priority != "-":
615                     summary += "%s: package says priority is %s, override says %s.\n" % (file_entry, priority, override_priority)
616
617         if summary == "":
618             return
619
620         Subst["__SUMMARY__"] = summary
621         mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
622         utils.send_mail(mail_message)
623
624     ###########################################################################
625
626     def force_reject (self, files):
627         """Forcefully move files from the current directory to the
628            reject directory.  If any file already exists in the reject
629            directory it will be moved to the morgue to make way for
630            the new file."""
631
632         Cnf = self.Cnf
633
634         for file_entry in files:
635             # Skip any files which don't exist or which we don't have permission to copy.
636             if os.access(file_entry,os.R_OK) == 0:
637                 continue
638             dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file_entry)
639             try:
640                 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
641             except OSError, e:
642                 # File exists?  Let's try and move it to the morgue
643                 if errno.errorcode[e.errno] == 'EEXIST':
644                     morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file_entry)
645                     try:
646                         morgue_file = utils.find_next_free(morgue_file)
647                     except NoFreeFilenameError:
648                         # Something's either gone badly Pete Tong, or
649                         # someone is trying to exploit us.
650                         utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
651                         return
652                     utils.move(dest_file, morgue_file, perms=0660)
653                     try:
654                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
655                     except OSError, e:
656                         # Likewise
657                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
658                         return
659                 else:
660                     raise
661             # If we got here, we own the destination file, so we can
662             # safely overwrite it.
663             utils.move(file_entry, dest_file, 1, perms=0660)
664             os.close(dest_fd)
665
666     ###########################################################################
667
668     def do_reject (self, manual = 0, reject_message = ""):
669         # If we weren't given a manual rejection message, spawn an
670         # editor so the user can add one in...
671         if manual and not reject_message:
672             temp_filename = utils.temp_filename()
673             editor = os.environ.get("EDITOR","vi")
674             answer = 'E'
675             while answer == 'E':
676                 os.system("%s %s" % (editor, temp_filename))
677                 temp_fh = utils.open_file(temp_filename)
678                 reject_message = "".join(temp_fh.readlines())
679                 temp_fh.close()
680                 print "Reject message:"
681                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
682                 prompt = "[R]eject, Edit, Abandon, Quit ?"
683                 answer = "XXX"
684                 while prompt.find(answer) == -1:
685                     answer = utils.our_raw_input(prompt)
686                     m = re_default_answer.search(prompt)
687                     if answer == "":
688                         answer = m.group(1)
689                     answer = answer[:1].upper()
690             os.unlink(temp_filename)
691             if answer == 'A':
692                 return 1
693             elif answer == 'Q':
694                 sys.exit(0)
695
696         print "Rejecting.\n"
697
698         Cnf = self.Cnf
699         Subst = self.Subst
700         pkg = self.pkg
701
702         reason_filename = pkg.changes_file[:-8] + ".reason"
703         reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
704
705         # Move all the files into the reject directory
706         reject_files = pkg.files.keys() + [pkg.changes_file]
707         self.force_reject(reject_files)
708
709         # If we fail here someone is probably trying to exploit the race
710         # so let's just raise an exception ...
711         if os.path.exists(reason_filename):
712             os.unlink(reason_filename)
713         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
714
715         if not manual:
716             Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
717             Subst["__MANUAL_REJECT_MESSAGE__"] = ""
718             Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
719             os.write(reason_fd, reject_message)
720             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
721         else:
722             # Build up the rejection email
723             user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
724
725             Subst["__REJECTOR_ADDRESS__"] = user_email_address
726             Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
727             Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
728             reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
729             # Write the rejection email out as the <foo>.reason file
730             os.write(reason_fd, reject_mail_message)
731
732         os.close(reason_fd)
733
734         # Send the rejection mail if appropriate
735         if not Cnf["Dinstall::Options::No-Mail"]:
736             utils.send_mail(reject_mail_message)
737
738         self.Logger.log(["rejected", pkg.changes_file])
739         return 0
740
741     ################################################################################
742
743     # Ensure that source exists somewhere in the archive for the binary
744     # upload being processed.
745     #
746     # (1) exact match                      => 1.0-3
747     # (2) Bin-only NMU                     => 1.0-3+b1 , 1.0-3.1+b1
748
749     def source_exists (self, package, source_version, suites = ["any"]):
750         okay = 1
751         for suite in suites:
752             if suite == "any":
753                 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
754                     (package)
755             else:
756                 # source must exist in suite X, or in some other suite that's
757                 # mapped to X, recursively... silent-maps are counted too,
758                 # unreleased-maps aren't.
759                 maps = self.Cnf.ValueList("SuiteMappings")[:]
760                 maps.reverse()
761                 maps = [ m.split() for m in maps ]
762                 maps = [ (x[1], x[2]) for x in maps
763                                 if x[0] == "map" or x[0] == "silent-map" ]
764                 s = [suite]
765                 for x in maps:
766                     if x[1] in s and x[0] not in s:
767                         s.append(x[0])
768
769                 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
770             q = self.projectB.query(que)
771
772             # Reduce the query results to a list of version numbers
773             ql = [ i[0] for i in q.getresult() ]
774
775             # Try (1)
776             if source_version in ql:
777                 continue
778
779             # Try (2)
780             orig_source_version = re_bin_only_nmu.sub('', source_version)
781             if orig_source_version in ql:
782                 continue
783
784             # No source found...
785             okay = 0
786             break
787         return okay
788
789     ################################################################################
790
791     def in_override_p (self, package, component, suite, binary_type, file):
792         files = self.pkg.files
793
794         if binary_type == "": # must be source
795             file_type = "dsc"
796         else:
797             file_type = binary_type
798
799         # Override suite name; used for example with proposed-updates
800         if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
801             suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
802
803         # Avoid <undef> on unknown distributions
804         suite_id = database.get_suite_id(suite)
805         if suite_id == -1:
806             return None
807         component_id = database.get_component_id(component)
808         type_id = database.get_override_type_id(file_type)
809
810         q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
811                            % (package, suite_id, component_id, type_id))
812         result = q.getresult()
813         # If checking for a source package fall back on the binary override type
814         if file_type == "dsc" and not result:
815             deb_type_id = database.get_override_type_id("deb")
816             udeb_type_id = database.get_override_type_id("udeb")
817             q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
818                                % (package, suite_id, component_id, deb_type_id, udeb_type_id))
819             result = q.getresult()
820
821         # Remember the section and priority so we can check them later if appropriate
822         if result:
823             files[file]["override section"] = result[0][0]
824             files[file]["override priority"] = result[0][1]
825
826         return result
827
828     ################################################################################
829
830     def reject (self, str, prefix="Rejected: "):
831         if str:
832             # Unlike other rejects we add new lines first to avoid trailing
833             # new lines when this message is passed back up to a caller.
834             if self.reject_message:
835                 self.reject_message += "\n"
836             self.reject_message += prefix + str
837
838     ################################################################################
839
840     def get_anyversion(self, query_result, suite):
841         anyversion=None
842         anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
843         for (v, s) in query_result:
844             if s in [ x.lower() for x in anysuite ]:
845                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
846                     anyversion=v
847         return anyversion
848
849     ################################################################################
850
851     def cross_suite_version_check(self, query_result, file, new_version):
852         """Ensure versions are newer than existing packages in target
853         suites and that cross-suite version checking rules as
854         set out in the conf file are satisfied."""
855
856         # Check versions for each target suite
857         for target_suite in self.pkg.changes["distribution"].keys():
858             must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
859             must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
860             # Enforce "must be newer than target suite" even if conffile omits it
861             if target_suite not in must_be_newer_than:
862                 must_be_newer_than.append(target_suite)
863             for entry in query_result:
864                 existent_version = entry[0]
865                 suite = entry[1]
866                 if suite in must_be_newer_than and \
867                    apt_pkg.VersionCompare(new_version, existent_version) < 1:
868                     self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
869                 if suite in must_be_older_than and \
870                    apt_pkg.VersionCompare(new_version, existent_version) > -1:
871                     ch = self.pkg.changes
872                     cansave = 0
873                     if ch.get('distribution-version', {}).has_key(suite):
874                     # we really use the other suite, ignoring the conflicting one ...
875                         addsuite = ch["distribution-version"][suite]
876
877                         add_version = self.get_anyversion(query_result, addsuite)
878                         target_version = self.get_anyversion(query_result, target_suite)
879
880                         if not add_version:
881                             # not add_version can only happen if we map to a suite
882                             # that doesn't enhance the suite we're propup'ing from.
883                             # so "propup-ver x a b c; map a d" is a problem only if
884                             # d doesn't enhance a.
885                             #
886                             # i think we could always propagate in this case, rather
887                             # than complaining. either way, this isn't a REJECT issue
888                             #
889                             # And - we really should complain to the dorks who configured dak
890                             self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
891                             self.pkg.changes.setdefault("propdistribution", {})
892                             self.pkg.changes["propdistribution"][addsuite] = 1
893                             cansave = 1
894                         elif not target_version:
895                             # not targets_version is true when the package is NEW
896                             # we could just stick with the "...old version..." REJECT
897                             # for this, I think.
898                             self.reject("Won't propogate NEW packages.")
899                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
900                             # propogation would be redundant. no need to reject though.
901                             self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
902                             cansave = 1
903                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
904                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
905                             # propogate!!
906                             self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
907                             self.pkg.changes.setdefault("propdistribution", {})
908                             self.pkg.changes["propdistribution"][addsuite] = 1
909                             cansave = 1
910
911                     if not cansave:
912                         self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
913
914     ################################################################################
915
916     def check_binary_against_db(self, file):
917         self.reject_message = ""
918         files = self.pkg.files
919
920         # Ensure version is sane
921         q = self.projectB.query("""
922 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
923                                      architecture a
924  WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
925    AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
926                                 % (files[file]["package"],
927                                    files[file]["architecture"]))
928         self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
929
930         # Check for any existing copies of the file
931         q = self.projectB.query("""
932 SELECT b.id FROM binaries b, architecture a
933  WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
934    AND a.id = b.architecture"""
935                                 % (files[file]["package"],
936                                    files[file]["version"],
937                                    files[file]["architecture"]))
938         if q.getresult():
939             self.reject("%s: can not overwrite existing copy already in the archive." % (file))
940
941         return self.reject_message
942
943     ################################################################################
944
945     def check_source_against_db(self, file):
946         self.reject_message = ""
947         dsc = self.pkg.dsc
948
949         # Ensure version is sane
950         q = self.projectB.query("""
951 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
952  WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
953         self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
954
955         return self.reject_message
956
957     ################################################################################
958
959     # **WARNING**
960     # NB: this function can remove entries from the 'files' index [if
961     # the .orig.tar.gz is a duplicate of the one in the archive]; if
962     # you're iterating over 'files' and call this function as part of
963     # the loop, be sure to add a check to the top of the loop to
964     # ensure you haven't just tried to dereference the deleted entry.
965     # **WARNING**
966
967     def check_dsc_against_db(self, file):
968         self.reject_message = ""
969         files = self.pkg.files
970         dsc_files = self.pkg.dsc_files
971         legacy_source_untouchable = self.pkg.legacy_source_untouchable
972         self.pkg.orig_tar_gz = None
973
974         # Try and find all files mentioned in the .dsc.  This has
975         # to work harder to cope with the multiple possible
976         # locations of an .orig.tar.gz.
977         # The ordering on the select is needed to pick the newest orig
978         # when it exists in multiple places.
979         for dsc_file in dsc_files.keys():
980             found = None
981             if files.has_key(dsc_file):
982                 actual_md5 = files[dsc_file]["md5sum"]
983                 actual_size = int(files[dsc_file]["size"])
984                 found = "%s in incoming" % (dsc_file)
985                 # Check the file does not already exist in the archive
986                 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
987                 ql = q.getresult()
988                 # Strip out anything that isn't '%s' or '/%s$'
989                 for i in ql:
990                     if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
991                         ql.remove(i)
992
993                 # "[dak] has not broken them.  [dak] has fixed a
994                 # brokenness.  Your crappy hack exploited a bug in
995                 # the old dinstall.
996                 #
997                 # "(Come on!  I thought it was always obvious that
998                 # one just doesn't release different files with
999                 # the same name and version.)"
1000                 #                        -- ajk@ on d-devel@l.d.o
1001
1002                 if ql:
1003                     # Ignore exact matches for .orig.tar.gz
1004                     match = 0
1005                     if dsc_file.endswith(".orig.tar.gz"):
1006                         for i in ql:
1007                             if files.has_key(dsc_file) and \
1008                                int(files[dsc_file]["size"]) == int(i[0]) and \
1009                                files[dsc_file]["md5sum"] == i[1]:
1010                                 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
1011                                 del files[dsc_file]
1012                                 self.pkg.orig_tar_gz = i[2] + i[3]
1013                                 match = 1
1014
1015                     if not match:
1016                         self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
1017             elif dsc_file.endswith(".orig.tar.gz"):
1018                 # Check in the pool
1019                 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
1020                 ql = q.getresult()
1021                 # Strip out anything that isn't '%s' or '/%s$'
1022                 for i in ql:
1023                     if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
1024                         ql.remove(i)
1025
1026                 if ql:
1027                     # Unfortunately, we may get more than one match here if,
1028                     # for example, the package was in potato but had an -sa
1029                     # upload in woody.  So we need to choose the right one.
1030
1031                     x = ql[0]; # default to something sane in case we don't match any or have only one
1032
1033                     if len(ql) > 1:
1034                         for i in ql:
1035                             old_file = i[0] + i[1]
1036                             old_file_fh = utils.open_file(old_file)
1037                             actual_md5 = apt_pkg.md5sum(old_file_fh)
1038                             old_file_fh.close()
1039                             actual_size = os.stat(old_file)[stat.ST_SIZE]
1040                             if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1041                                 x = i
1042                             else:
1043                                 legacy_source_untouchable[i[3]] = ""
1044
1045                     old_file = x[0] + x[1]
1046                     old_file_fh = utils.open_file(old_file)
1047                     actual_md5 = apt_pkg.md5sum(old_file_fh)
1048                     old_file_fh.close()
1049                     actual_size = os.stat(old_file)[stat.ST_SIZE]
1050                     found = old_file
1051                     suite_type = x[2]
1052                     dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
1053                     # See install() in process-accepted...
1054                     self.pkg.orig_tar_id = x[3]
1055                     self.pkg.orig_tar_gz = old_file
1056                     if suite_type == "legacy" or suite_type == "legacy-mixed":
1057                         self.pkg.orig_tar_location = "legacy"
1058                     else:
1059                         self.pkg.orig_tar_location = x[4]
1060                 else:
1061                     # Not there? Check the queue directories...
1062
1063                     in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1064                     # See process_it() in 'dak process-unchecked' for explanation of this
1065                     # in_unchecked check dropped by ajt 2007-08-28, how did that
1066                     # ever make sense?
1067                     if os.path.exists(in_unchecked) and False:
1068                         return (self.reject_message, in_unchecked)
1069                     else:
1070                         for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1071                             in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)],dsc_file)
1072                             if os.path.exists(in_otherdir):
1073                                 in_otherdir_fh = utils.open_file(in_otherdir)
1074                                 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1075                                 in_otherdir_fh.close()
1076                                 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1077                                 found = in_otherdir
1078                                 self.pkg.orig_tar_gz = in_otherdir
1079
1080                     if not found:
1081                         self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1082                         self.pkg.orig_tar_gz = -1
1083                         continue
1084             else:
1085                 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1086                 continue
1087             if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1088                 self.reject("md5sum for %s doesn't match %s." % (found, file))
1089             if actual_size != int(dsc_files[dsc_file]["size"]):
1090                 self.reject("size for %s doesn't match %s." % (found, file))
1091
1092         return (self.reject_message, None)
1093
1094     def do_query(self, q):
1095         sys.stderr.write("query: \"%s\" ... " % (q))
1096         before = time.time()
1097         r = self.projectB.query(q)
1098         time_diff = time.time()-before
1099         sys.stderr.write("took %.3f seconds.\n" % (time_diff))
1100         return r