]> git.decadent.org.uk Git - dak.git/blob - daklib/changes.py
moved inserts of known_changes to Changes() class. add insert known_changes in p...
[dak.git] / daklib / changes.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Changes class for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009  Joerg Jaspert <joerg@debian.org>
10 @copyright: 2009  Mark Hymers <mhy@debian.org>
11 @license: GNU General Public License version 2 or later
12 """
13
14 # This program is free software; you can redistribute it and/or modify
15 # it under the terms of the GNU General Public License as published by
16 # the Free Software Foundation; either version 2 of the License, or
17 # (at your option) any later version.
18
19 # This program is distributed in the hope that it will be useful,
20 # but WITHOUT ANY WARRANTY; without even the implied warranty of
21 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
22 # GNU General Public License for more details.
23
24 # You should have received a copy of the GNU General Public License
25 # along with this program; if not, write to the Free Software
26 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
27
28 ###############################################################################
29
30 import os
31 import stat
32 from cPickle import Unpickler, Pickler
33 from errno import EPERM
34
35 from apt_inst import debExtractControl
36 from apt_pkg import ParseSection
37
38 from utils import open_file, fubar, poolify
39
40 ###############################################################################
41
42 __all__ = []
43
44 ###############################################################################
45
46 CHANGESFIELDS_MANDATORY = [ "distribution", "source", "architecture",
47         "version", "maintainer", "urgency", "fingerprint", "changedby822",
48         "changedby2047", "changedbyname", "maintainer822", "maintainer2047",
49         "maintainername", "maintaineremail", "closes", "changes" ]
50
51 __all__.append('CHANGESFIELDS_MANDATORY')
52
53 CHANGESFIELDS_OPTIONAL = [ "changed-by", "filecontents", "format",
54         "process-new note", "adv id", "distribution-version", "sponsoremail" ]
55
56 __all__.append('CHANGESFIELDS_OPTIONAL')
57
58 CHANGESFIELDS_FILES = [ "package", "version", "architecture", "type", "size",
59         "md5sum", "sha1sum", "sha256sum", "component", "location id",
60         "source package", "source version", "maintainer", "dbtype", "files id",
61         "new", "section", "priority", "othercomponents", "pool name",
62         "original component" ]
63
64 __all__.append('CHANGESFIELDS_FILES')
65
66 CHANGESFIELDS_DSC = [ "source", "version", "maintainer", "fingerprint",
67         "uploaders", "bts changelog", "dm-upload-allowed" ]
68
69 __all__.append('CHANGESFIELDS_DSC')
70
71 CHANGESFIELDS_DSCFILES_MANDATORY = [ "size", "md5sum" ]
72
73 __all__.append('CHANGESFIELDS_DSCFILES_MANDATORY')
74
75 CHANGESFIELDS_DSCFILES_OPTIONAL = [ "files id" ]
76
77 __all__.append('CHANGESFIELDS_DSCFILES_OPTIONAL')
78
79 CHANGESFIELDS_ORIGFILES = [ "id", "location" ]
80
81 __all__.append('CHANGESFIELDS_ORIGFILES')
82
83 ###############################################################################
84
85 class Changes(object):
86     """ Convenience wrapper to carry around all the package information """
87
88     def __init__(self, **kwds):
89         self.reset()
90
91     def reset(self):
92         self.changes_file = ""
93
94         self.changes = {}
95         self.dsc = {}
96         self.files = {}
97         self.dsc_files = {}
98         self.orig_files = {}
99
100     def file_summary(self):
101         # changes["distribution"] may not exist in corner cases
102         # (e.g. unreadable changes files)
103         if not self.changes.has_key("distribution") or not \
104                isinstance(self.changes["distribution"], dict):
105             self.changes["distribution"] = {}
106
107         byhand = False
108         new = False
109         summary = ""
110         override_summary = ""
111
112         for name, entry in sorted(self.files.items()):
113             if entry.has_key("byhand"):
114                 byhand = True
115                 summary += name + " byhand\n"
116
117             elif entry.has_key("new"):
118                 new = True
119                 summary += "(new) %s %s %s\n" % (name, entry["priority"], entry["section"])
120
121                 if entry.has_key("othercomponents"):
122                     summary += "WARNING: Already present in %s distribution.\n" % (entry["othercomponents"])
123
124                 if entry["type"] == "deb":
125                     deb_fh = open_file(name)
126                     summary += ParseSection(debExtractControl(deb_fh))["Description"] + '\n'
127                     deb_fh.close()
128
129             else:
130                 entry["pool name"] = poolify(self.changes.get("source", ""), entry["component"])
131                 destination = entry["pool name"] + name
132                 summary += name + "\n  to " + destination + "\n"
133
134                 if not entry.has_key("type"):
135                     entry["type"] = "unknown"
136
137                 if entry["type"] in ["deb", "udeb", "dsc"]:
138                     # (queue/unchecked), there we have override entries already, use them
139                     # (process-new), there we dont have override entries, use the newly generated ones.
140                     override_prio = entry.get("override priority", entry["priority"])
141                     override_sect = entry.get("override section", entry["section"])
142                     override_summary += "%s - %s %s\n" % (name, override_prio, override_sect)
143
144         return (byhand, new, summary, override_summary)
145
146     def check_override(self):
147         """
148         Checks override entries for validity.
149
150         Returns an empty string if there are no problems
151         or the text of a warning if there are
152         """
153
154         summary = ""
155
156         # Abandon the check if it's a non-sourceful upload
157         if not self.changes["architecture"].has_key("source"):
158             return summary
159
160         for name, entry in sorted(self.files.items()):
161             if not entry.has_key("new") and entry["type"] == "deb":
162                 if entry["section"] != "-":
163                     if entry["section"].lower() != entry["override section"].lower():
164                         summary += "%s: package says section is %s, override says %s.\n" % (name,
165                                                                                             entry["section"],
166                                                                                             entry["override section"])
167
168                 if entry["priority"] != "-":
169                     if entry["priority"] != entry["override priority"]:
170                         summary += "%s: package says priority is %s, override says %s.\n" % (name,
171                                                                                              entry["priority"],
172                                                                                              entry["override priority"])
173
174         return summary
175
176     def remove_known_changes(self, session=None):
177         if session is None:
178             session = DBConn().session()
179             privatetrans = True
180
181         session.query(KnownChange).filter(changesfile=self.changes_file).delete()
182
183         if privatetrans:
184             session.commit()
185             session.close()
186     def add_known_changes(self, queue, session=None):
187         cnf = Config()
188
189         if session is None:
190             session = DBConn().session()
191             privatetrans = True
192
193         dirpath = cnf["Dir::Queue::%s" % (queue) ]
194         changesfile = os.path.join(dirpath, self.changes_file)
195         filetime = datetime.datetime.fromtimestamp(os.path.getctime(changesfile))
196
197         session.execute(
198             """INSERT INTO known_changes
199               (changesname, seen, source, binaries, architecture, version,
200               distribution, urgency, maintainer, fingerprint, changedby, date)
201               VALUES (:changesfile,:filetime,:source,:binary, :architecture,
202               :version,:distribution,:urgency,'maintainer,:changedby,:date)""",
203               { 'changesfile':changesfile,
204                 'filetime':filetime,
205                 'source':self.changes["source"],
206                 'binary':self.changes["binary"],
207                 'architecture':self.changes["architecture"],
208                 'version':self.changes["version"],
209                 'distribution':self.changes["distribution"],
210                 'urgency':self.changes["urgency"],
211                 'maintainer':self.changes["maintainer"],
212                 'fingerprint':self.changes["fingerprint"],
213                 'changedby':self.changes["changed-by"],
214                 'date':self.changes["date"]} )
215
216         if privatetrans:
217             session.commit()
218             session.close()
219
220     def load_dot_dak(self, changesfile):
221         """
222         Update ourself by reading a previously created cPickle .dak dumpfile.
223         """
224
225         self.changes_file = changesfile
226         dump_filename = self.changes_file[:-8]+".dak"
227         dump_file = open_file(dump_filename)
228
229         p = Unpickler(dump_file)
230
231         self.changes.update(p.load())
232         self.dsc.update(p.load())
233         self.files.update(p.load())
234         self.dsc_files.update(p.load())
235
236         next_obj = p.load()
237         if isinstance(next_obj, dict):
238             self.orig_files.update(next_obj)
239         else:
240             # Auto-convert old dak files to new format supporting
241             # multiple tarballs
242             orig_tar_gz = None
243             for dsc_file in self.dsc_files.keys():
244                 if dsc_file.endswith(".orig.tar.gz"):
245                     orig_tar_gz = dsc_file
246             self.orig_files[orig_tar_gz] = {}
247             if next_obj != None:
248                 self.orig_files[orig_tar_gz]["id"] = next_obj
249             next_obj = p.load()
250             if next_obj != None and next_obj != "":
251                 self.orig_files[orig_tar_gz]["location"] = next_obj
252             if len(self.orig_files[orig_tar_gz]) == 0:
253                 del self.orig_files[orig_tar_gz]
254
255         dump_file.close()
256
257     def sanitised_files(self):
258         ret = {}
259         for name, entry in self.files.items():
260             ret[name] = {}
261             for i in CHANGESFIELDS_FILES:
262                 if entry.has_key(i):
263                     ret[name][i] = entry[i]
264
265         return ret
266
267     def sanitised_changes(self):
268         ret = {}
269         # Mandatory changes fields
270         for i in CHANGESFIELDS_MANDATORY:
271             ret[i] = self.changes[i]
272
273         # Optional changes fields
274         for i in CHANGESFIELDS_OPTIONAL:
275             if self.changes.has_key(i):
276                 ret[i] = self.changes[i]
277
278         return ret
279
280     def sanitised_dsc(self):
281         ret = {}
282         for i in CHANGESFIELDS_DSC:
283             if self.dsc.has_key(i):
284                 ret[i] = self.dsc[i]
285
286         return ret
287
288     def sanitised_dsc_files(self):
289         ret = {}
290         for name, entry in self.dsc_files.items():
291             ret[name] = {}
292             # Mandatory dsc_files fields
293             for i in CHANGESFIELDS_DSCFILES_MANDATORY:
294                 ret[name][i] = entry[i]
295
296             # Optional dsc_files fields
297             for i in CHANGESFIELDS_DSCFILES_OPTIONAL:
298                 if entry.has_key(i):
299                     ret[name][i] = entry[i]
300
301         return ret
302
303     def sanitised_orig_files(self):
304         ret = {}
305         for name, entry in self.orig_files.items():
306             ret[name] = {}
307             # Optional orig_files fields
308             for i in CHANGESFIELDS_ORIGFILES:
309                 if entry.has_key(i):
310                     ret[name][i] = entry[i]
311
312         return ret
313
314     def write_dot_dak(self, dest_dir):
315         """
316         Dump ourself into a cPickle file.
317
318         @type dest_dir: string
319         @param dest_dir: Path where the dumpfile should be stored
320
321         @note: This could just dump the dictionaries as is, but I'd like to avoid this so
322                there's some idea of what process-accepted & process-new use from
323                process-unchecked. (JT)
324
325         """
326
327         dump_filename = os.path.join(dest_dir, self.changes_file[:-8] + ".dak")
328         dump_file = open_file(dump_filename, 'w')
329
330         try:
331             os.chmod(dump_filename, 0664)
332         except OSError, e:
333             # chmod may fail when the dumpfile is not owned by the user
334             # invoking dak (like e.g. when NEW is processed by a member
335             # of ftpteam)
336             if e.errno == EPERM:
337                 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
338                 # security precaution, should never happen unless a weird
339                 # umask is set anywhere
340                 if perms & stat.S_IWOTH:
341                     fubar("%s is world writable and chmod failed." % \
342                         (dump_filename,))
343                 # ignore the failed chmod otherwise as the file should
344                 # already have the right privileges and is just, at worst,
345                 # unreadable for world
346             else:
347                 raise
348
349         p = Pickler(dump_file, 1)
350
351         p.dump(self.sanitised_changes())
352         p.dump(self.sanitised_dsc())
353         p.dump(self.sanitised_files())
354         p.dump(self.sanitised_dsc_files())
355         p.dump(self.sanitised_orig_files())
356
357         dump_file.close()
358
359     def unknown_files_fields(self, name):
360         return sorted(list( set(self.files[name].keys()) -
361                             set(CHANGESFIELDS_FILES)))
362
363     def unknown_changes_fields(self):
364         return sorted(list( set(self.changes.keys()) -
365                             set(CHANGESFIELDS_MANDATORY + CHANGESFIELDS_OPTIONAL)))
366
367     def unknown_dsc_fields(self):
368         return sorted(list( set(self.dsc.keys()) -
369                             set(CHANGESFIELDS_DSC)))
370
371     def unknown_dsc_files_fields(self, name):
372         return sorted(list( set(self.dsc_files[name].keys()) -
373                             set(CHANGESFIELDS_DSCFILES_MANDATORY + CHANGESFIELDS_DSCFILES_OPTIONAL)))
374
375     def str_files(self):
376         r = []
377         for name, entry in self.files.items():
378             r.append("  %s:" % (name))
379             for i in CHANGESFIELDS_FILES:
380                 if entry.has_key(i):
381                     r.append("   %s: %s" % (i.capitalize(), entry[i]))
382             xfields = self.unknown_files_fields(name)
383             if len(xfields) > 0:
384                 r.append("files[%s] still has following unrecognised keys: %s" % (name, ", ".join(xfields)))
385
386         return r
387
388     def str_changes(self):
389         r = []
390         for i in CHANGESFIELDS_MANDATORY:
391             val = self.changes[i]
392             if isinstance(val, list):
393                 val = " ".join(val)
394             elif isinstance(val, dict):
395                 val = " ".join(val.keys())
396             r.append('  %s: %s' % (i.capitalize(), val))
397
398         for i in CHANGESFIELDS_OPTIONAL:
399             if self.changes.has_key(i):
400                 r.append('  %s: %s' % (i.capitalize(), self.changes[i]))
401
402         xfields = self.unknown_changes_fields()
403         if len(xfields) > 0:
404             r.append("Warning: changes still has the following unrecognised fields: %s" % ", ".join(xfields))
405
406         return r
407
408     def str_dsc(self):
409         r = []
410         for i in CHANGESFIELDS_DSC:
411             if self.dsc.has_key(i):
412                 r.append('  %s: %s' % (i.capitalize(), self.dsc[i]))
413
414         xfields = self.unknown_dsc_fields()
415         if len(xfields) > 0:
416             r.append("Warning: dsc still has the following unrecognised fields: %s" % ", ".join(xfields))
417
418         return r
419
420     def str_dsc_files(self):
421         r = []
422         for name, entry in self.dsc_files.items():
423             r.append("  %s:" % (name))
424             for i in CHANGESFIELDS_DSCFILES_MANDATORY:
425                 r.append("   %s: %s" % (i.capitalize(), entry[i]))
426             for i in CHANGESFIELDS_DSCFILES_OPTIONAL:
427                 if entry.has_key(i):
428                     r.append("   %s: %s" % (i.capitalize(), entry[i]))
429             xfields = self.unknown_dsc_files_fields(name)
430             if len(xfields) > 0:
431                 r.append("dsc_files[%s] still has following unrecognised keys: %s" % (name, ", ".join(xfields)))
432
433         return r
434
435     def __str__(self):
436         r = []
437
438         r.append(" Changes:")
439         r += self.str_changes()
440
441         r.append("")
442
443         r.append(" Dsc:")
444         r += self.str_dsc()
445
446         r.append("")
447
448         r.append(" Files:")
449         r += self.str_files()
450
451         r.append("")
452
453         r.append(" Dsc Files:")
454         r += self.str_dsc_files()
455
456         return "\n".join(r)
457
458 __all__.append('Changes')