7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @copyright: 2009 Mark Hymers <mhy@debian.org>
11 @license: GNU General Public License version 2 or later
14 # This program is free software; you can redistribute it and/or modify
15 # it under the terms of the GNU General Public License as published by
16 # the Free Software Foundation; either version 2 of the License, or
17 # (at your option) any later version.
19 # This program is distributed in the hope that it will be useful,
20 # but WITHOUT ANY WARRANTY; without even the implied warranty of
21 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 # GNU General Public License for more details.
24 # You should have received a copy of the GNU General Public License
25 # along with this program; if not, write to the Free Software
26 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
28 ###############################################################################
32 from cPickle import Unpickler, Pickler
33 from errno import EPERM
35 from apt_inst import debExtractControl
36 from apt_pkg import ParseSection
38 from utils import open_file, fubar, poolify
40 ###############################################################################
44 ###############################################################################
46 CHANGESFIELDS_MANDATORY = [ "distribution", "source", "architecture",
47 "version", "maintainer", "urgency", "fingerprint", "changedby822",
48 "changedby2047", "changedbyname", "maintainer822", "maintainer2047",
49 "maintainername", "maintaineremail", "closes", "changes" ]
51 __all__.append('CHANGESFIELDS_MANDATORY')
53 CHANGESFIELDS_OPTIONAL = [ "changed-by", "filecontents", "format",
54 "process-new note", "adv id", "distribution-version", "sponsoremail" ]
56 __all__.append('CHANGESFIELDS_OPTIONAL')
58 CHANGESFIELDS_FILES = [ "package", "version", "architecture", "type", "size",
59 "md5sum", "sha1sum", "sha256sum", "component", "location id",
60 "source package", "source version", "maintainer", "dbtype", "files id",
61 "new", "section", "priority", "othercomponents", "pool name",
62 "original component" ]
64 __all__.append('CHANGESFIELDS_FILES')
66 CHANGESFIELDS_DSC = [ "source", "version", "maintainer", "fingerprint",
67 "uploaders", "bts changelog", "dm-upload-allowed" ]
69 __all__.append('CHANGESFIELDS_DSC')
71 CHANGESFIELDS_DSCFILES_MANDATORY = [ "size", "md5sum" ]
73 __all__.append('CHANGESFIELDS_DSCFILES_MANDATORY')
75 CHANGESFIELDS_DSCFILES_OPTIONAL = [ "files id" ]
77 __all__.append('CHANGESFIELDS_DSCFILES_OPTIONAL')
79 CHANGESFIELDS_ORIGFILES = [ "id", "location" ]
81 __all__.append('CHANGESFIELDS_ORIGFILES')
83 ###############################################################################
85 class Changes(object):
86 """ Convenience wrapper to carry around all the package information """
88 def __init__(self, **kwds):
92 self.changes_file = ""
100 def file_summary(self):
101 # changes["distribution"] may not exist in corner cases
102 # (e.g. unreadable changes files)
103 if not self.changes.has_key("distribution") or not \
104 isinstance(self.changes["distribution"], dict):
105 self.changes["distribution"] = {}
110 override_summary = ""
112 for name, entry in sorted(self.files.items()):
113 if entry.has_key("byhand"):
115 summary += name + " byhand\n"
117 elif entry.has_key("new"):
119 summary += "(new) %s %s %s\n" % (name, entry["priority"], entry["section"])
121 if entry.has_key("othercomponents"):
122 summary += "WARNING: Already present in %s distribution.\n" % (entry["othercomponents"])
124 if entry["type"] == "deb":
125 deb_fh = open_file(name)
126 summary += ParseSection(debExtractControl(deb_fh))["Description"] + '\n'
130 entry["pool name"] = poolify(self.changes.get("source", ""), entry["component"])
131 destination = entry["pool name"] + name
132 summary += name + "\n to " + destination + "\n"
134 if not entry.has_key("type"):
135 entry["type"] = "unknown"
137 if entry["type"] in ["deb", "udeb", "dsc"]:
138 # (queue/unchecked), there we have override entries already, use them
139 # (process-new), there we dont have override entries, use the newly generated ones.
140 override_prio = entry.get("override priority", entry["priority"])
141 override_sect = entry.get("override section", entry["section"])
142 override_summary += "%s - %s %s\n" % (name, override_prio, override_sect)
144 return (byhand, new, summary, override_summary)
146 def check_override(self):
148 Checks override entries for validity.
150 Returns an empty string if there are no problems
151 or the text of a warning if there are
156 # Abandon the check if it's a non-sourceful upload
157 if not self.changes["architecture"].has_key("source"):
160 for name, entry in sorted(self.files.items()):
161 if not entry.has_key("new") and entry["type"] == "deb":
162 if entry["section"] != "-":
163 if entry["section"].lower() != entry["override section"].lower():
164 summary += "%s: package says section is %s, override says %s.\n" % (name,
166 entry["override section"])
168 if entry["priority"] != "-":
169 if entry["priority"] != entry["override priority"]:
170 summary += "%s: package says priority is %s, override says %s.\n" % (name,
172 entry["override priority"])
177 def load_dot_dak(self, changesfile):
179 Update ourself by reading a previously created cPickle .dak dumpfile.
182 self.changes_file = changesfile
183 dump_filename = self.changes_file[:-8]+".dak"
184 dump_file = open_file(dump_filename)
186 p = Unpickler(dump_file)
188 self.changes.update(p.load())
189 self.dsc.update(p.load())
190 self.files.update(p.load())
191 self.dsc_files.update(p.load())
194 if isinstance(next_obj, dict):
195 self.orig_files.update(next_obj)
197 # Auto-convert old dak files to new format supporting
200 for dsc_file in self.dsc_files.keys():
201 if dsc_file.endswith(".orig.tar.gz"):
202 orig_tar_gz = dsc_file
203 self.orig_files[orig_tar_gz] = {}
205 self.orig_files[orig_tar_gz]["id"] = next_obj
207 if next_obj != None and next_obj != "":
208 self.orig_files[orig_tar_gz]["location"] = next_obj
209 if len(self.orig_files[orig_tar_gz]) == 0:
210 del self.orig_files[orig_tar_gz]
214 def sanitised_files(self):
216 for name, entry in self.files.items():
218 for i in CHANGESFIELDS_FILES:
220 ret[name][i] = entry[i]
224 def sanitised_changes(self):
226 # Mandatory changes fields
227 for i in CHANGESFIELDS_MANDATORY:
228 ret[i] = self.changes[i]
230 # Optional changes fields
231 for i in CHANGESFIELDS_OPTIONAL:
232 if self.changes.has_key(i):
233 ret[i] = self.changes[i]
237 def sanitised_dsc(self):
239 for i in CHANGESFIELDS_DSC:
240 if self.dsc.has_key(i):
245 def sanitised_dsc_files(self):
247 for name, entry in self.dsc_files.items():
249 # Mandatory dsc_files fields
250 for i in CHANGESFIELDS_DSCFILES_MANDATORY:
251 ret[name][i] = entry[i]
253 # Optional dsc_files fields
254 for i in CHANGESFIELDS_DSCFILES_OPTIONAL:
256 ret[name][i] = entry[i]
260 def sanitised_orig_files(self):
262 for name, entry in self.orig_files.items():
264 # Optional orig_files fields
265 for i in CHANGESFIELDS_ORIGFILES:
267 ret[name][i] = entry[i]
271 def write_dot_dak(self, dest_dir):
273 Dump ourself into a cPickle file.
275 @type dest_dir: string
276 @param dest_dir: Path where the dumpfile should be stored
278 @note: This could just dump the dictionaries as is, but I'd like to avoid this so
279 there's some idea of what process-accepted & process-new use from
280 process-unchecked. (JT)
284 dump_filename = os.path.join(dest_dir, self.changes_file[:-8] + ".dak")
285 dump_file = open_file(dump_filename, 'w')
288 os.chmod(dump_filename, 0664)
290 # chmod may fail when the dumpfile is not owned by the user
291 # invoking dak (like e.g. when NEW is processed by a member
294 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
295 # security precaution, should never happen unless a weird
296 # umask is set anywhere
297 if perms & stat.S_IWOTH:
298 fubar("%s is world writable and chmod failed." % \
300 # ignore the failed chmod otherwise as the file should
301 # already have the right privileges and is just, at worst,
302 # unreadable for world
306 p = Pickler(dump_file, 1)
308 p.dump(self.sanitised_changes())
309 p.dump(self.sanitised_dsc())
310 p.dump(self.sanitised_files())
311 p.dump(self.sanitised_dsc_files())
312 p.dump(self.sanitised_orig_files())
316 def unknown_files_fields(self, name):
317 return sorted(list( set(self.files[name].keys()) -
318 set(CHANGESFIELDS_FILES)))
320 def unknown_changes_fields(self):
321 return sorted(list( set(self.changes.keys()) -
322 set(CHANGESFIELDS_MANDATORY + CHANGESFIELDS_OPTIONAL)))
324 def unknown_dsc_fields(self):
325 return sorted(list( set(self.dsc.keys()) -
326 set(CHANGESFIELDS_DSC)))
328 def unknown_dsc_files_fields(self, name):
329 return sorted(list( set(self.dsc_files[name].keys()) -
330 set(CHANGESFIELDS_DSCFILES_MANDATORY + CHANGESFIELDS_DSCFILES_OPTIONAL)))
334 for name, entry in self.files.items():
335 r.append(" %s:" % (name))
336 for i in CHANGESFIELDS_FILES:
338 r.append(" %s: %s" % (i.capitalize(), entry[i]))
339 xfields = self.unknown_files_fields(name)
341 r.append("files[%s] still has following unrecognised keys: %s" % (name, ", ".join(xfields)))
345 def str_changes(self):
347 for i in CHANGESFIELDS_MANDATORY:
348 val = self.changes[i]
349 if isinstance(val, list):
351 elif isinstance(val, dict):
352 val = " ".join(val.keys())
353 r.append(' %s: %s' % (i.capitalize(), val))
355 for i in CHANGESFIELDS_OPTIONAL:
356 if self.changes.has_key(i):
357 r.append(' %s: %s' % (i.capitalize(), self.changes[i]))
359 xfields = self.unknown_changes_fields()
361 r.append("Warning: changes still has the following unrecognised fields: %s" % ", ".join(xfields))
367 for i in CHANGESFIELDS_DSC:
368 if self.dsc.has_key(i):
369 r.append(' %s: %s' % (i.capitalize(), self.dsc[i]))
371 xfields = self.unknown_dsc_fields()
373 r.append("Warning: dsc still has the following unrecognised fields: %s" % ", ".join(xfields))
377 def str_dsc_files(self):
379 for name, entry in self.dsc_files.items():
380 r.append(" %s:" % (name))
381 for i in CHANGESFIELDS_DSCFILES_MANDATORY:
382 r.append(" %s: %s" % (i.capitalize(), entry[i]))
383 for i in CHANGESFIELDS_DSCFILES_OPTIONAL:
385 r.append(" %s: %s" % (i.capitalize(), entry[i]))
386 xfields = self.unknown_dsc_files_fields(name)
388 r.append("dsc_files[%s] still has following unrecognised keys: %s" % (name, ", ".join(xfields)))
395 r.append(" Changes:")
396 r += self.str_changes()
406 r += self.str_files()
410 r.append(" Dsc Files:")
411 r += self.str_dsc_files()
415 __all__.append('Changes')