]> git.decadent.org.uk Git - dak.git/blob - daklib/changes.py
Merge commit 'lamby/master' into merge
[dak.git] / daklib / changes.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Changes class for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009  Joerg Jaspert <joerg@debian.org>
10 @copyright: 2009  Mark Hymers <mhy@debian.org>
11 @license: GNU General Public License version 2 or later
12 """
13
14 # This program is free software; you can redistribute it and/or modify
15 # it under the terms of the GNU General Public License as published by
16 # the Free Software Foundation; either version 2 of the License, or
17 # (at your option) any later version.
18
19 # This program is distributed in the hope that it will be useful,
20 # but WITHOUT ANY WARRANTY; without even the implied warranty of
21 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
22 # GNU General Public License for more details.
23
24 # You should have received a copy of the GNU General Public License
25 # along with this program; if not, write to the Free Software
26 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
27
28 ###############################################################################
29
30 import os
31 import stat
32
33 import datetime
34 from cPickle import Unpickler, Pickler
35 from errno import EPERM
36
37 from apt_inst import debExtractControl
38 from apt_pkg import ParseSection
39
40 from utils import open_file, fubar, poolify
41 from config import *
42 from dbconn import *
43
44 ###############################################################################
45
46 __all__ = []
47
48 ###############################################################################
49
50 CHANGESFIELDS_MANDATORY = [ "distribution", "source", "architecture",
51         "version", "maintainer", "urgency", "fingerprint", "changedby822",
52         "changedby2047", "changedbyname", "maintainer822", "maintainer2047",
53         "maintainername", "maintaineremail", "closes", "changes" ]
54
55 __all__.append('CHANGESFIELDS_MANDATORY')
56
57 CHANGESFIELDS_OPTIONAL = [ "changed-by", "filecontents", "format",
58         "process-new note", "adv id", "distribution-version", "sponsoremail" ]
59
60 __all__.append('CHANGESFIELDS_OPTIONAL')
61
62 CHANGESFIELDS_FILES = [ "package", "version", "architecture", "type", "size",
63         "md5sum", "sha1sum", "sha256sum", "component", "location id",
64         "source package", "source version", "maintainer", "dbtype", "files id",
65         "new", "section", "priority", "othercomponents", "pool name",
66         "original component" ]
67
68 __all__.append('CHANGESFIELDS_FILES')
69
70 CHANGESFIELDS_DSC = [ "source", "version", "maintainer", "fingerprint",
71         "uploaders", "bts changelog", "dm-upload-allowed" ]
72
73 __all__.append('CHANGESFIELDS_DSC')
74
75 CHANGESFIELDS_DSCFILES_MANDATORY = [ "size", "md5sum" ]
76
77 __all__.append('CHANGESFIELDS_DSCFILES_MANDATORY')
78
79 CHANGESFIELDS_DSCFILES_OPTIONAL = [ "files id" ]
80
81 __all__.append('CHANGESFIELDS_DSCFILES_OPTIONAL')
82
83 CHANGESFIELDS_ORIGFILES = [ "id", "location" ]
84
85 __all__.append('CHANGESFIELDS_ORIGFILES')
86
87 ###############################################################################
88
89 class Changes(object):
90     """ Convenience wrapper to carry around all the package information """
91
92     def __init__(self, **kwds):
93         self.reset()
94
95     def reset(self):
96         self.changes_file = ""
97
98         self.changes = {}
99         self.dsc = {}
100         self.files = {}
101         self.dsc_files = {}
102         self.orig_files = {}
103
104     def file_summary(self):
105         # changes["distribution"] may not exist in corner cases
106         # (e.g. unreadable changes files)
107         if not self.changes.has_key("distribution") or not \
108                isinstance(self.changes["distribution"], dict):
109             self.changes["distribution"] = {}
110
111         byhand = False
112         new = False
113         summary = ""
114         override_summary = ""
115
116         for name, entry in sorted(self.files.items()):
117             if entry.has_key("byhand"):
118                 byhand = True
119                 summary += name + " byhand\n"
120
121             elif entry.has_key("new"):
122                 new = True
123                 summary += "(new) %s %s %s\n" % (name, entry["priority"], entry["section"])
124
125                 if entry.has_key("othercomponents"):
126                     summary += "WARNING: Already present in %s distribution.\n" % (entry["othercomponents"])
127
128                 if entry["type"] == "deb":
129                     deb_fh = open_file(name)
130                     summary += ParseSection(debExtractControl(deb_fh))["Description"] + '\n'
131                     deb_fh.close()
132
133             else:
134                 entry["pool name"] = poolify(self.changes.get("source", ""), entry["component"])
135                 destination = entry["pool name"] + name
136                 summary += name + "\n  to " + destination + "\n"
137
138                 if not entry.has_key("type"):
139                     entry["type"] = "unknown"
140
141                 if entry["type"] in ["deb", "udeb", "dsc"]:
142                     # (queue/unchecked), there we have override entries already, use them
143                     # (process-new), there we dont have override entries, use the newly generated ones.
144                     override_prio = entry.get("override priority", entry["priority"])
145                     override_sect = entry.get("override section", entry["section"])
146                     override_summary += "%s - %s %s\n" % (name, override_prio, override_sect)
147
148         return (byhand, new, summary, override_summary)
149
150     def check_override(self):
151         """
152         Checks override entries for validity.
153
154         Returns an empty string if there are no problems
155         or the text of a warning if there are
156         """
157
158         summary = ""
159
160         # Abandon the check if it's a non-sourceful upload
161         if not self.changes["architecture"].has_key("source"):
162             return summary
163
164         for name, entry in sorted(self.files.items()):
165             if not entry.has_key("new") and entry["type"] == "deb":
166                 if entry["section"] != "-":
167                     if entry["section"].lower() != entry["override section"].lower():
168                         summary += "%s: package says section is %s, override says %s.\n" % (name,
169                                                                                             entry["section"],
170                                                                                             entry["override section"])
171
172                 if entry["priority"] != "-":
173                     if entry["priority"] != entry["override priority"]:
174                         summary += "%s: package says priority is %s, override says %s.\n" % (name,
175                                                                                              entry["priority"],
176                                                                                              entry["override priority"])
177
178         return summary
179
180     def remove_known_changes(self, session=None):
181         if session is None:
182             session = DBConn().session()
183             privatetrans = True
184
185         session.delete(get_knownchange(self.changes_file, session))
186
187         if privatetrans:
188             session.commit()
189             session.close()
190
191
192     def mark_missing_fields(self):
193         """add "missing" in fields which we will require for the known_changes table"""
194         for key in ['urgency', 'maintainer', 'fingerprint', 'changed-by' ]:
195             if (not self.changes.has_key(key)) or (not self.changes[key]):
196                 self.changes[key]='missing'
197
198     def add_known_changes(self, dirpath, session=None):
199         """add "missing" in fields which we will require for the known_changes table"""
200         cnf = Config()
201         privatetrans = False
202         if session is None:
203             session = DBConn().session()
204             privatetrans = True
205
206         changesfile = os.path.join(dirpath, self.changes_file)
207         filetime = datetime.datetime.fromtimestamp(os.path.getctime(changesfile))
208
209         self.mark_missing_fields()
210
211         session.execute(
212             """INSERT INTO known_changes
213               (changesname, seen, source, binaries, architecture, version,
214               distribution, urgency, maintainer, fingerprint, changedby, date)
215               VALUES (:changesfile,:filetime,:source,:binary, :architecture,
216               :version,:distribution,:urgency,:maintainer,:fingerprint,:changedby,:date)""",
217               { 'changesfile':self.changes_file,
218                 'filetime':filetime,
219                 'source':self.changes["source"],
220                 'binary':self.changes["binary"],
221                 'architecture':self.changes["architecture"],
222                 'version':self.changes["version"],
223                 'distribution':self.changes["distribution"],
224                 'urgency':self.changes["urgency"],
225                 'maintainer':self.changes["maintainer"],
226                 'fingerprint':self.changes["fingerprint"],
227                 'changedby':self.changes["changed-by"],
228                 'date':self.changes["date"]} )
229
230         if privatetrans:
231             session.commit()
232             session.close()
233
234     def load_dot_dak(self, changesfile):
235         """
236         Update ourself by reading a previously created cPickle .dak dumpfile.
237         """
238
239         self.changes_file = changesfile
240         dump_filename = self.changes_file[:-8]+".dak"
241         dump_file = open_file(dump_filename)
242
243         p = Unpickler(dump_file)
244
245         self.changes.update(p.load())
246         self.dsc.update(p.load())
247         self.files.update(p.load())
248         self.dsc_files.update(p.load())
249
250         next_obj = p.load()
251         if isinstance(next_obj, dict):
252             self.orig_files.update(next_obj)
253         else:
254             # Auto-convert old dak files to new format supporting
255             # multiple tarballs
256             orig_tar_gz = None
257             for dsc_file in self.dsc_files.keys():
258                 if dsc_file.endswith(".orig.tar.gz"):
259                     orig_tar_gz = dsc_file
260             self.orig_files[orig_tar_gz] = {}
261             if next_obj != None:
262                 self.orig_files[orig_tar_gz]["id"] = next_obj
263             next_obj = p.load()
264             if next_obj != None and next_obj != "":
265                 self.orig_files[orig_tar_gz]["location"] = next_obj
266             if len(self.orig_files[orig_tar_gz]) == 0:
267                 del self.orig_files[orig_tar_gz]
268
269         dump_file.close()
270
271     def sanitised_files(self):
272         ret = {}
273         for name, entry in self.files.items():
274             ret[name] = {}
275             for i in CHANGESFIELDS_FILES:
276                 if entry.has_key(i):
277                     ret[name][i] = entry[i]
278
279         return ret
280
281     def sanitised_changes(self):
282         ret = {}
283         # Mandatory changes fields
284         for i in CHANGESFIELDS_MANDATORY:
285             ret[i] = self.changes[i]
286
287         # Optional changes fields
288         for i in CHANGESFIELDS_OPTIONAL:
289             if self.changes.has_key(i):
290                 ret[i] = self.changes[i]
291
292         return ret
293
294     def sanitised_dsc(self):
295         ret = {}
296         for i in CHANGESFIELDS_DSC:
297             if self.dsc.has_key(i):
298                 ret[i] = self.dsc[i]
299
300         return ret
301
302     def sanitised_dsc_files(self):
303         ret = {}
304         for name, entry in self.dsc_files.items():
305             ret[name] = {}
306             # Mandatory dsc_files fields
307             for i in CHANGESFIELDS_DSCFILES_MANDATORY:
308                 ret[name][i] = entry[i]
309
310             # Optional dsc_files fields
311             for i in CHANGESFIELDS_DSCFILES_OPTIONAL:
312                 if entry.has_key(i):
313                     ret[name][i] = entry[i]
314
315         return ret
316
317     def sanitised_orig_files(self):
318         ret = {}
319         for name, entry in self.orig_files.items():
320             ret[name] = {}
321             # Optional orig_files fields
322             for i in CHANGESFIELDS_ORIGFILES:
323                 if entry.has_key(i):
324                     ret[name][i] = entry[i]
325
326         return ret
327
328     def write_dot_dak(self, dest_dir):
329         """
330         Dump ourself into a cPickle file.
331
332         @type dest_dir: string
333         @param dest_dir: Path where the dumpfile should be stored
334
335         @note: This could just dump the dictionaries as is, but I'd like to avoid this so
336                there's some idea of what process-accepted & process-new use from
337                process-unchecked. (JT)
338
339         """
340
341         dump_filename = os.path.join(dest_dir, self.changes_file[:-8] + ".dak")
342         dump_file = open_file(dump_filename, 'w')
343
344         try:
345             os.chmod(dump_filename, 0664)
346         except OSError, e:
347             # chmod may fail when the dumpfile is not owned by the user
348             # invoking dak (like e.g. when NEW is processed by a member
349             # of ftpteam)
350             if e.errno == EPERM:
351                 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
352                 # security precaution, should never happen unless a weird
353                 # umask is set anywhere
354                 if perms & stat.S_IWOTH:
355                     fubar("%s is world writable and chmod failed." % \
356                         (dump_filename,))
357                 # ignore the failed chmod otherwise as the file should
358                 # already have the right privileges and is just, at worst,
359                 # unreadable for world
360             else:
361                 raise
362
363         p = Pickler(dump_file, 1)
364
365         p.dump(self.sanitised_changes())
366         p.dump(self.sanitised_dsc())
367         p.dump(self.sanitised_files())
368         p.dump(self.sanitised_dsc_files())
369         p.dump(self.sanitised_orig_files())
370
371         dump_file.close()
372
373     def unknown_files_fields(self, name):
374         return sorted(list( set(self.files[name].keys()) -
375                             set(CHANGESFIELDS_FILES)))
376
377     def unknown_changes_fields(self):
378         return sorted(list( set(self.changes.keys()) -
379                             set(CHANGESFIELDS_MANDATORY + CHANGESFIELDS_OPTIONAL)))
380
381     def unknown_dsc_fields(self):
382         return sorted(list( set(self.dsc.keys()) -
383                             set(CHANGESFIELDS_DSC)))
384
385     def unknown_dsc_files_fields(self, name):
386         return sorted(list( set(self.dsc_files[name].keys()) -
387                             set(CHANGESFIELDS_DSCFILES_MANDATORY + CHANGESFIELDS_DSCFILES_OPTIONAL)))
388
389     def str_files(self):
390         r = []
391         for name, entry in self.files.items():
392             r.append("  %s:" % (name))
393             for i in CHANGESFIELDS_FILES:
394                 if entry.has_key(i):
395                     r.append("   %s: %s" % (i.capitalize(), entry[i]))
396             xfields = self.unknown_files_fields(name)
397             if len(xfields) > 0:
398                 r.append("files[%s] still has following unrecognised keys: %s" % (name, ", ".join(xfields)))
399
400         return r
401
402     def str_changes(self):
403         r = []
404         for i in CHANGESFIELDS_MANDATORY:
405             val = self.changes[i]
406             if isinstance(val, list):
407                 val = " ".join(val)
408             elif isinstance(val, dict):
409                 val = " ".join(val.keys())
410             r.append('  %s: %s' % (i.capitalize(), val))
411
412         for i in CHANGESFIELDS_OPTIONAL:
413             if self.changes.has_key(i):
414                 r.append('  %s: %s' % (i.capitalize(), self.changes[i]))
415
416         xfields = self.unknown_changes_fields()
417         if len(xfields) > 0:
418             r.append("Warning: changes still has the following unrecognised fields: %s" % ", ".join(xfields))
419
420         return r
421
422     def str_dsc(self):
423         r = []
424         for i in CHANGESFIELDS_DSC:
425             if self.dsc.has_key(i):
426                 r.append('  %s: %s' % (i.capitalize(), self.dsc[i]))
427
428         xfields = self.unknown_dsc_fields()
429         if len(xfields) > 0:
430             r.append("Warning: dsc still has the following unrecognised fields: %s" % ", ".join(xfields))
431
432         return r
433
434     def str_dsc_files(self):
435         r = []
436         for name, entry in self.dsc_files.items():
437             r.append("  %s:" % (name))
438             for i in CHANGESFIELDS_DSCFILES_MANDATORY:
439                 r.append("   %s: %s" % (i.capitalize(), entry[i]))
440             for i in CHANGESFIELDS_DSCFILES_OPTIONAL:
441                 if entry.has_key(i):
442                     r.append("   %s: %s" % (i.capitalize(), entry[i]))
443             xfields = self.unknown_dsc_files_fields(name)
444             if len(xfields) > 0:
445                 r.append("dsc_files[%s] still has following unrecognised keys: %s" % (name, ", ".join(xfields)))
446
447         return r
448
449     def __str__(self):
450         r = []
451
452         r.append(" Changes:")
453         r += self.str_changes()
454
455         r.append("")
456
457         r.append(" Dsc:")
458         r += self.str_dsc()
459
460         r.append("")
461
462         r.append(" Files:")
463         r += self.str_files()
464
465         r.append("")
466
467         r.append(" Dsc Files:")
468         r += self.str_dsc_files()
469
470         return "\n".join(r)
471
472 __all__.append('Changes')