]> git.decadent.org.uk Git - dak.git/blob - daklib/changes.py
Merge commit 'lamby/master' into merge
[dak.git] / daklib / changes.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Changes class for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009  Joerg Jaspert <joerg@debian.org>
10 @copyright: 2009  Mark Hymers <mhy@debian.org>
11 @license: GNU General Public License version 2 or later
12 """
13
14 # This program is free software; you can redistribute it and/or modify
15 # it under the terms of the GNU General Public License as published by
16 # the Free Software Foundation; either version 2 of the License, or
17 # (at your option) any later version.
18
19 # This program is distributed in the hope that it will be useful,
20 # but WITHOUT ANY WARRANTY; without even the implied warranty of
21 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
22 # GNU General Public License for more details.
23
24 # You should have received a copy of the GNU General Public License
25 # along with this program; if not, write to the Free Software
26 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
27
28 ###############################################################################
29
30 import os
31 import stat
32 import time
33
34 import datetime
35 from cPickle import Unpickler, Pickler
36 from errno import EPERM
37
38 from apt_inst import debExtractControl
39 from apt_pkg import ParseSection
40
41 from utils import open_file, fubar, poolify
42 from config import *
43 from dbconn import *
44
45 ###############################################################################
46
47 __all__ = []
48
49 ###############################################################################
50
51 CHANGESFIELDS_MANDATORY = [ "distribution", "source", "architecture",
52         "version", "maintainer", "urgency", "fingerprint", "changedby822",
53         "changedby2047", "changedbyname", "maintainer822", "maintainer2047",
54         "maintainername", "maintaineremail", "closes", "changes" ]
55
56 __all__.append('CHANGESFIELDS_MANDATORY')
57
58 CHANGESFIELDS_OPTIONAL = [ "changed-by", "filecontents", "format",
59         "process-new note", "adv id", "distribution-version", "sponsoremail" ]
60
61 __all__.append('CHANGESFIELDS_OPTIONAL')
62
63 CHANGESFIELDS_FILES = [ "package", "version", "architecture", "type", "size",
64         "md5sum", "sha1sum", "sha256sum", "component", "location id",
65         "source package", "source version", "maintainer", "dbtype", "files id",
66         "new", "section", "priority", "othercomponents", "pool name",
67         "original component" ]
68
69 __all__.append('CHANGESFIELDS_FILES')
70
71 CHANGESFIELDS_DSC = [ "source", "version", "maintainer", "fingerprint",
72         "uploaders", "bts changelog", "dm-upload-allowed" ]
73
74 __all__.append('CHANGESFIELDS_DSC')
75
76 CHANGESFIELDS_DSCFILES_MANDATORY = [ "size", "md5sum" ]
77
78 __all__.append('CHANGESFIELDS_DSCFILES_MANDATORY')
79
80 CHANGESFIELDS_DSCFILES_OPTIONAL = [ "files id" ]
81
82 __all__.append('CHANGESFIELDS_DSCFILES_OPTIONAL')
83
84 CHANGESFIELDS_ORIGFILES = [ "id", "location" ]
85
86 __all__.append('CHANGESFIELDS_ORIGFILES')
87
88 ###############################################################################
89
90 class Changes(object):
91     """ Convenience wrapper to carry around all the package information """
92
93     def __init__(self, **kwds):
94         self.reset()
95
96     def reset(self):
97         self.changes_file = ""
98
99         self.changes = {}
100         self.dsc = {}
101         self.files = {}
102         self.dsc_files = {}
103         self.orig_files = {}
104
105     def file_summary(self):
106         # changes["distribution"] may not exist in corner cases
107         # (e.g. unreadable changes files)
108         if not self.changes.has_key("distribution") or not \
109                isinstance(self.changes["distribution"], dict):
110             self.changes["distribution"] = {}
111
112         byhand = False
113         new = False
114         summary = ""
115         override_summary = ""
116
117         for name, entry in sorted(self.files.items()):
118             if entry.has_key("byhand"):
119                 byhand = True
120                 summary += name + " byhand\n"
121
122             elif entry.has_key("new"):
123                 new = True
124                 summary += "(new) %s %s %s\n" % (name, entry["priority"], entry["section"])
125
126                 if entry.has_key("othercomponents"):
127                     summary += "WARNING: Already present in %s distribution.\n" % (entry["othercomponents"])
128
129                 if entry["type"] == "deb":
130                     deb_fh = open_file(name)
131                     summary += ParseSection(debExtractControl(deb_fh))["Description"] + '\n'
132                     deb_fh.close()
133
134             else:
135                 entry["pool name"] = poolify(self.changes.get("source", ""), entry["component"])
136                 destination = entry["pool name"] + name
137                 summary += name + "\n  to " + destination + "\n"
138
139                 if not entry.has_key("type"):
140                     entry["type"] = "unknown"
141
142                 if entry["type"] in ["deb", "udeb", "dsc"]:
143                     # (queue/unchecked), there we have override entries already, use them
144                     # (process-new), there we dont have override entries, use the newly generated ones.
145                     override_prio = entry.get("override priority", entry["priority"])
146                     override_sect = entry.get("override section", entry["section"])
147                     override_summary += "%s - %s %s\n" % (name, override_prio, override_sect)
148
149         return (byhand, new, summary, override_summary)
150
151     def check_override(self):
152         """
153         Checks override entries for validity.
154
155         Returns an empty string if there are no problems
156         or the text of a warning if there are
157         """
158
159         summary = ""
160
161         # Abandon the check if it's a non-sourceful upload
162         if not self.changes["architecture"].has_key("source"):
163             return summary
164
165         for name, entry in sorted(self.files.items()):
166             if not entry.has_key("new") and entry["type"] == "deb":
167                 if entry["section"] != "-":
168                     if entry["section"].lower() != entry["override section"].lower():
169                         summary += "%s: package says section is %s, override says %s.\n" % (name,
170                                                                                             entry["section"],
171                                                                                             entry["override section"])
172
173                 if entry["priority"] != "-":
174                     if entry["priority"] != entry["override priority"]:
175                         summary += "%s: package says priority is %s, override says %s.\n" % (name,
176                                                                                              entry["priority"],
177                                                                                              entry["override priority"])
178
179         return summary
180
181     def remove_known_changes(self, session=None):
182         if session is None:
183             session = DBConn().session()
184             privatetrans = True
185
186         session.delete(get_knownchange(self.changes_file, session))
187
188         if privatetrans:
189             session.commit()
190             session.close()
191
192
193     def mark_missing_fields(self):
194         """add "missing" in fields which we will require for the known_changes table"""
195         for key in ['urgency', 'maintainer', 'fingerprint', 'changedby' ]:
196             if (not self.changes.has_key(key)) or (not self.changes[key]):
197                 self.changes[key]='missing'
198
199     def add_known_changes(self, dirpath, session=None):
200         """add "missing" in fields which we will require for the known_changes table"""
201         cnf = Config()
202         privatetrans = False
203         if session is None:
204             session = DBConn().session()
205             privatetrans = True
206
207         changesfile = os.path.join(dirpath, self.changes_file)
208         filetime = datetime.datetime.fromtimestamp(os.path.getctime(changesfile))
209
210         self.mark_missing_fields()
211
212         session.execute(
213             """INSERT INTO known_changes
214               (changesname, seen, source, binaries, architecture, version,
215               distribution, urgency, maintainer, fingerprint, changedby, date)
216               VALUES (:changesfile,:filetime,:source,:binary, :architecture,
217               :version,:distribution,:urgency,:maintainer,:fingerprint,:changedby,:date)""",
218               { 'changesfile':changesfile,
219                 'filetime':filetime,
220                 'source':self.changes["source"],
221                 'binary':self.changes["binary"],
222                 'architecture':self.changes["architecture"],
223                 'version':self.changes["version"],
224                 'distribution':self.changes["distribution"],
225                 'urgency':self.changes["urgency"],
226                 'maintainer':self.changes["maintainer"],
227                 'fingerprint':self.changes["fingerprint"],
228                 'changedby':self.changes["changed-by"],
229                 'date':self.changes["date"]} )
230
231         if privatetrans:
232             session.commit()
233             session.close()
234
235     def load_dot_dak(self, changesfile):
236         """
237         Update ourself by reading a previously created cPickle .dak dumpfile.
238         """
239
240         self.changes_file = changesfile
241         dump_filename = self.changes_file[:-8]+".dak"
242         dump_file = open_file(dump_filename)
243
244         p = Unpickler(dump_file)
245
246         self.changes.update(p.load())
247         self.dsc.update(p.load())
248         self.files.update(p.load())
249         self.dsc_files.update(p.load())
250
251         next_obj = p.load()
252         if isinstance(next_obj, dict):
253             self.orig_files.update(next_obj)
254         else:
255             # Auto-convert old dak files to new format supporting
256             # multiple tarballs
257             orig_tar_gz = None
258             for dsc_file in self.dsc_files.keys():
259                 if dsc_file.endswith(".orig.tar.gz"):
260                     orig_tar_gz = dsc_file
261             self.orig_files[orig_tar_gz] = {}
262             if next_obj != None:
263                 self.orig_files[orig_tar_gz]["id"] = next_obj
264             next_obj = p.load()
265             if next_obj != None and next_obj != "":
266                 self.orig_files[orig_tar_gz]["location"] = next_obj
267             if len(self.orig_files[orig_tar_gz]) == 0:
268                 del self.orig_files[orig_tar_gz]
269
270         dump_file.close()
271
272     def sanitised_files(self):
273         ret = {}
274         for name, entry in self.files.items():
275             ret[name] = {}
276             for i in CHANGESFIELDS_FILES:
277                 if entry.has_key(i):
278                     ret[name][i] = entry[i]
279
280         return ret
281
282     def sanitised_changes(self):
283         ret = {}
284         # Mandatory changes fields
285         for i in CHANGESFIELDS_MANDATORY:
286             ret[i] = self.changes[i]
287
288         # Optional changes fields
289         for i in CHANGESFIELDS_OPTIONAL:
290             if self.changes.has_key(i):
291                 ret[i] = self.changes[i]
292
293         return ret
294
295     def sanitised_dsc(self):
296         ret = {}
297         for i in CHANGESFIELDS_DSC:
298             if self.dsc.has_key(i):
299                 ret[i] = self.dsc[i]
300
301         return ret
302
303     def sanitised_dsc_files(self):
304         ret = {}
305         for name, entry in self.dsc_files.items():
306             ret[name] = {}
307             # Mandatory dsc_files fields
308             for i in CHANGESFIELDS_DSCFILES_MANDATORY:
309                 ret[name][i] = entry[i]
310
311             # Optional dsc_files fields
312             for i in CHANGESFIELDS_DSCFILES_OPTIONAL:
313                 if entry.has_key(i):
314                     ret[name][i] = entry[i]
315
316         return ret
317
318     def sanitised_orig_files(self):
319         ret = {}
320         for name, entry in self.orig_files.items():
321             ret[name] = {}
322             # Optional orig_files fields
323             for i in CHANGESFIELDS_ORIGFILES:
324                 if entry.has_key(i):
325                     ret[name][i] = entry[i]
326
327         return ret
328
329     def write_dot_dak(self, dest_dir):
330         """
331         Dump ourself into a cPickle file.
332
333         @type dest_dir: string
334         @param dest_dir: Path where the dumpfile should be stored
335
336         @note: This could just dump the dictionaries as is, but I'd like to avoid this so
337                there's some idea of what process-accepted & process-new use from
338                process-unchecked. (JT)
339
340         """
341
342         dump_filename = os.path.join(dest_dir, self.changes_file[:-8] + ".dak")
343         dump_file = open_file(dump_filename, 'w')
344
345         try:
346             os.chmod(dump_filename, 0664)
347         except OSError, e:
348             # chmod may fail when the dumpfile is not owned by the user
349             # invoking dak (like e.g. when NEW is processed by a member
350             # of ftpteam)
351             if e.errno == EPERM:
352                 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
353                 # security precaution, should never happen unless a weird
354                 # umask is set anywhere
355                 if perms & stat.S_IWOTH:
356                     fubar("%s is world writable and chmod failed." % \
357                         (dump_filename,))
358                 # ignore the failed chmod otherwise as the file should
359                 # already have the right privileges and is just, at worst,
360                 # unreadable for world
361             else:
362                 raise
363
364         p = Pickler(dump_file, 1)
365
366         p.dump(self.sanitised_changes())
367         p.dump(self.sanitised_dsc())
368         p.dump(self.sanitised_files())
369         p.dump(self.sanitised_dsc_files())
370         p.dump(self.sanitised_orig_files())
371
372         dump_file.close()
373
374     def unknown_files_fields(self, name):
375         return sorted(list( set(self.files[name].keys()) -
376                             set(CHANGESFIELDS_FILES)))
377
378     def unknown_changes_fields(self):
379         return sorted(list( set(self.changes.keys()) -
380                             set(CHANGESFIELDS_MANDATORY + CHANGESFIELDS_OPTIONAL)))
381
382     def unknown_dsc_fields(self):
383         return sorted(list( set(self.dsc.keys()) -
384                             set(CHANGESFIELDS_DSC)))
385
386     def unknown_dsc_files_fields(self, name):
387         return sorted(list( set(self.dsc_files[name].keys()) -
388                             set(CHANGESFIELDS_DSCFILES_MANDATORY + CHANGESFIELDS_DSCFILES_OPTIONAL)))
389
390     def str_files(self):
391         r = []
392         for name, entry in self.files.items():
393             r.append("  %s:" % (name))
394             for i in CHANGESFIELDS_FILES:
395                 if entry.has_key(i):
396                     r.append("   %s: %s" % (i.capitalize(), entry[i]))
397             xfields = self.unknown_files_fields(name)
398             if len(xfields) > 0:
399                 r.append("files[%s] still has following unrecognised keys: %s" % (name, ", ".join(xfields)))
400
401         return r
402
403     def str_changes(self):
404         r = []
405         for i in CHANGESFIELDS_MANDATORY:
406             val = self.changes[i]
407             if isinstance(val, list):
408                 val = " ".join(val)
409             elif isinstance(val, dict):
410                 val = " ".join(val.keys())
411             r.append('  %s: %s' % (i.capitalize(), val))
412
413         for i in CHANGESFIELDS_OPTIONAL:
414             if self.changes.has_key(i):
415                 r.append('  %s: %s' % (i.capitalize(), self.changes[i]))
416
417         xfields = self.unknown_changes_fields()
418         if len(xfields) > 0:
419             r.append("Warning: changes still has the following unrecognised fields: %s" % ", ".join(xfields))
420
421         return r
422
423     def str_dsc(self):
424         r = []
425         for i in CHANGESFIELDS_DSC:
426             if self.dsc.has_key(i):
427                 r.append('  %s: %s' % (i.capitalize(), self.dsc[i]))
428
429         xfields = self.unknown_dsc_fields()
430         if len(xfields) > 0:
431             r.append("Warning: dsc still has the following unrecognised fields: %s" % ", ".join(xfields))
432
433         return r
434
435     def str_dsc_files(self):
436         r = []
437         for name, entry in self.dsc_files.items():
438             r.append("  %s:" % (name))
439             for i in CHANGESFIELDS_DSCFILES_MANDATORY:
440                 r.append("   %s: %s" % (i.capitalize(), entry[i]))
441             for i in CHANGESFIELDS_DSCFILES_OPTIONAL:
442                 if entry.has_key(i):
443                     r.append("   %s: %s" % (i.capitalize(), entry[i]))
444             xfields = self.unknown_dsc_files_fields(name)
445             if len(xfields) > 0:
446                 r.append("dsc_files[%s] still has following unrecognised keys: %s" % (name, ", ".join(xfields)))
447
448         return r
449
450     def __str__(self):
451         r = []
452
453         r.append(" Changes:")
454         r += self.str_changes()
455
456         r.append("")
457
458         r.append(" Dsc:")
459         r += self.str_dsc()
460
461         r.append("")
462
463         r.append(" Files:")
464         r += self.str_files()
465
466         r.append("")
467
468         r.append(" Dsc Files:")
469         r += self.str_dsc_files()
470
471         return "\n".join(r)
472
473 __all__.append('Changes')