7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @copyright: 2009 Mark Hymers <mhy@debian.org>
11 @license: GNU General Public License version 2 or later
14 # This program is free software; you can redistribute it and/or modify
15 # it under the terms of the GNU General Public License as published by
16 # the Free Software Foundation; either version 2 of the License, or
17 # (at your option) any later version.
19 # This program is distributed in the hope that it will be useful,
20 # but WITHOUT ANY WARRANTY; without even the implied warranty of
21 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 # GNU General Public License for more details.
24 # You should have received a copy of the GNU General Public License
25 # along with this program; if not, write to the Free Software
26 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
28 ###############################################################################
32 from cPickle import Unpickler, Pickler
33 from errno import EPERM
35 from apt_inst import debExtractControl
36 from apt_pkg import ParseSection
38 from utils import open_file, fubar, poolify
40 ###############################################################################
44 ###############################################################################
46 CHANGESFIELDS_MANDATORY = [ "distribution", "source", "architecture",
47 "version", "maintainer", "urgency", "fingerprint", "changedby822",
48 "changedby2047", "changedbyname", "maintainer822", "maintainer2047",
49 "maintainername", "maintaineremail", "closes", "changes" ]
51 __all__.append('CHANGESFIELDS_MANDATORY')
53 CHANGESFIELDS_OPTIONAL = [ "changed-by", "filecontents", "format",
54 "process-new note", "adv id", "distribution-version", "sponsoremail" ]
56 __all__.append('CHANGESFIELDS_OPTIONAL')
58 CHANGESFIELDS_FILES = [ "package", "version", "architecture", "type", "size",
59 "md5sum", "sha1sum", "sha256sum", "component", "location id",
60 "source package", "source version", "maintainer", "dbtype", "files id",
61 "new", "section", "priority", "othercomponents", "pool name",
62 "original component" ]
64 __all__.append('CHANGESFIELDS_FILES')
66 CHANGESFIELDS_DSC = [ "source", "version", "maintainer", "fingerprint",
67 "uploaders", "bts changelog", "dm-upload-allowed" ]
69 __all__.append('CHANGESFIELDS_DSC')
71 CHANGESFIELDS_DSCFILES_MANDATORY = [ "size", "md5sum" ]
73 __all__.append('CHANGESFIELDS_DSCFILES_MANDATORY')
75 CHANGESFIELDS_DSCFILES_OPTIONAL = [ "files id" ]
77 __all__.append('CHANGESFIELDS_DSCFILES_OPTIONAL')
79 ###############################################################################
81 class Changes(object):
82 """ Convenience wrapper to carry around all the package information """
84 def __init__(self, **kwds):
88 self.changes_file = ""
95 self.orig_tar_id = None
96 self.orig_tar_location = ""
97 self.orig_tar_gz = None
99 def file_summary(self):
100 # changes["distribution"] may not exist in corner cases
101 # (e.g. unreadable changes files)
102 if not self.changes.has_key("distribution") or not \
103 isinstance(self.changes["distribution"], dict):
104 self.changes["distribution"] = {}
109 override_summary = ""
111 for name, entry in sorted(self.files.items()):
112 if entry.has_key("byhand"):
114 summary += name + " byhand\n"
116 elif entry.has_key("new"):
118 summary += "(new) %s %s %s\n" % (name, entry["priority"], entry["section"])
120 if entry.has_key("othercomponents"):
121 summary += "WARNING: Already present in %s distribution.\n" % (entry["othercomponents"])
123 if entry["type"] == "deb":
124 deb_fh = open_file(name)
125 summary += ParseSection(debExtractControl(deb_fh))["Description"] + '\n'
129 entry["pool name"] = poolify(self.changes.get("source", ""), entry["component"])
130 destination = entry["pool name"] + name
131 summary += name + "\n to " + destination + "\n"
133 if not entry.has_key("type"):
134 entry["type"] = "unknown"
136 if entry["type"] in ["deb", "udeb", "dsc"]:
137 # (queue/unchecked), there we have override entries already, use them
138 # (process-new), there we dont have override entries, use the newly generated ones.
139 override_prio = entry.get("override priority", entry["priority"])
140 override_sect = entry.get("override section", entry["section"])
141 override_summary += "%s - %s %s\n" % (name, override_prio, override_sect)
143 return (byhand, new, summary, override_summary)
145 def check_override(self):
147 Checks override entries for validity.
149 Returns an empty string if there are no problems
150 or the text of a warning if there are
155 # Abandon the check if it's a non-sourceful upload
156 if not self.changes["architecture"].has_key("source"):
159 for name, entry in sorted(self.files.items()):
160 if not entry.has_key("new") and entry["type"] == "deb":
161 if entry["section"] != "-":
162 if entry["section"].lower() != entry["override section"].lower():
163 summary += "%s: package says section is %s, override says %s.\n" % (name,
165 entry["override section"])
167 if entry["priority"] != "-":
168 if entry["priority"] != entry["override priority"]:
169 summary += "%s: package says priority is %s, override says %s.\n" % (name,
171 entry["override priority"])
176 def load_dot_dak(self, changesfile):
178 Update ourself by reading a previously created cPickle .dak dumpfile.
181 self.changes_file = changesfile
182 dump_filename = self.changes_file[:-8]+".dak"
183 dump_file = open_file(dump_filename)
185 p = Unpickler(dump_file)
187 self.changes.update(p.load())
188 self.dsc.update(p.load())
189 self.files.update(p.load())
190 self.dsc_files.update(p.load())
192 self.orig_tar_id = p.load()
193 self.orig_tar_location = p.load()
197 def sanitised_files(self):
199 for name, entry in self.files.items():
201 for i in CHANGESFIELDS_FILES:
203 ret[name][i] = entry[i]
207 def sanitised_changes(self):
209 # Mandatory changes fields
210 for i in CHANGESFIELDS_MANDATORY:
211 ret[i] = self.changes[i]
213 # Optional changes fields
214 for i in CHANGESFIELDS_OPTIONAL:
215 if self.changes.has_key(i):
216 ret[i] = self.changes[i]
220 def sanitised_dsc(self):
222 for i in CHANGESFIELDS_DSC:
223 if self.dsc.has_key(i):
228 def sanitised_dsc_files(self):
230 for name, entry in self.dsc_files.items():
232 # Mandatory dsc_files fields
233 for i in CHANGESFIELDS_DSCFILES_MANDATORY:
234 ret[name][i] = entry[i]
236 # Optional dsc_files fields
237 for i in CHANGESFIELDS_DSCFILES_OPTIONAL:
239 ret[name][i] = entry[i]
243 def write_dot_dak(self, dest_dir):
245 Dump ourself into a cPickle file.
247 @type dest_dir: string
248 @param dest_dir: Path where the dumpfile should be stored
250 @note: This could just dump the dictionaries as is, but I'd like to avoid this so
251 there's some idea of what process-accepted & process-new use from
252 process-unchecked. (JT)
256 dump_filename = os.path.join(dest_dir, self.changes_file[:-8] + ".dak")
257 dump_file = open_file(dump_filename, 'w')
260 os.chmod(dump_filename, 0664)
262 # chmod may fail when the dumpfile is not owned by the user
263 # invoking dak (like e.g. when NEW is processed by a member
266 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
267 # security precaution, should never happen unless a weird
268 # umask is set anywhere
269 if perms & stat.S_IWOTH:
270 fubar("%s is world writable and chmod failed." % \
272 # ignore the failed chmod otherwise as the file should
273 # already have the right privileges and is just, at worst,
274 # unreadable for world
278 p = Pickler(dump_file, 1)
280 p.dump(self.sanitised_changes())
281 p.dump(self.sanitised_dsc())
282 p.dump(self.sanitised_files())
283 p.dump(self.sanitised_dsc_files())
284 p.dump(self.orig_tar_id)
285 p.dump(self.orig_tar_location)
289 def unknown_files_fields(self, name):
290 return sorted(list( set(self.files[name].keys()) -
291 set(CHANGESFIELDS_FILES)))
293 def unknown_changes_fields(self):
294 return sorted(list( set(self.changes.keys()) -
295 set(CHANGESFIELDS_MANDATORY + CHANGESFIELDS_OPTIONAL)))
297 def unknown_dsc_fields(self):
298 return sorted(list( set(self.dsc.keys()) -
299 set(CHANGESFIELDS_DSC)))
301 def unknown_dsc_files_fields(self, name):
302 return sorted(list( set(self.dsc_files[name].keys()) -
303 set(CHANGESFIELDS_DSCFILES_MANDATORY + CHANGESFIELDS_DSCFILES_OPTIONAL)))
307 for name, entry in self.files.items():
308 r.append(" %s:" % (name))
309 for i in CHANGESFIELDS_FILES:
311 r.append(" %s: %s" % (i.capitalize(), entry[i]))
312 xfields = self.unknown_files_fields(name)
314 r.append("files[%s] still has following unrecognised keys: %s" % (name, ", ".join(xfields)))
318 def str_changes(self):
320 for i in CHANGESFIELDS_MANDATORY:
321 val = self.changes[i]
322 if isinstance(val, list):
324 elif isinstance(val, dict):
325 val = " ".join(val.keys())
326 r.append(' %s: %s' % (i.capitalize(), val))
328 for i in CHANGESFIELDS_OPTIONAL:
329 if self.changes.has_key(i):
330 r.append(' %s: %s' % (i.capitalize(), self.changes[i]))
332 xfields = self.unknown_changes_fields()
334 r.append("Warning: changes still has the following unrecognised fields: %s" % ", ".join(xfields))
340 for i in CHANGESFIELDS_DSC:
341 if self.dsc.has_key(i):
342 r.append(' %s: %s' % (i.capitalize(), self.dsc[i]))
344 xfields = self.unknown_dsc_fields()
346 r.append("Warning: dsc still has the following unrecognised fields: %s" % ", ".join(xfields))
350 def str_dsc_files(self):
352 for name, entry in self.dsc_files.items():
353 r.append(" %s:" % (name))
354 for i in CHANGESFIELDS_DSCFILES_MANDATORY:
355 r.append(" %s: %s" % (i.capitalize(), entry[i]))
356 for i in CHANGESFIELDS_DSCFILES_OPTIONAL:
358 r.append(" %s: %s" % (i.capitalize(), entry[i]))
359 xfields = self.unknown_dsc_files_fields(name)
361 r.append("dsc_files[%s] still has following unrecognised keys: %s" % (name, ", ".join(xfields)))
368 r.append(" Changes:")
369 r += self.str_changes()
379 r += self.str_files()
383 r.append(" Dsc Files:")
384 r += self.str_dsc_files()
388 __all__.append('Changes')