7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @copyright: 2009 Mark Hymers <mhy@debian.org>
11 @license: GNU General Public License version 2 or later
14 # This program is free software; you can redistribute it and/or modify
15 # it under the terms of the GNU General Public License as published by
16 # the Free Software Foundation; either version 2 of the License, or
17 # (at your option) any later version.
19 # This program is distributed in the hope that it will be useful,
20 # but WITHOUT ANY WARRANTY; without even the implied warranty of
21 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22 # GNU General Public License for more details.
24 # You should have received a copy of the GNU General Public License
25 # along with this program; if not, write to the Free Software
26 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
28 ###############################################################################
34 from cPickle import Unpickler, Pickler
35 from errno import EPERM
37 from apt_inst import debExtractControl
38 from apt_pkg import ParseSection
40 from utils import open_file, fubar, poolify
44 ###############################################################################
48 ###############################################################################
50 CHANGESFIELDS_MANDATORY = [ "distribution", "source", "architecture",
51 "version", "maintainer", "urgency", "fingerprint", "changedby822",
52 "changedby2047", "changedbyname", "maintainer822", "maintainer2047",
53 "maintainername", "maintaineremail", "closes", "changes" ]
55 __all__.append('CHANGESFIELDS_MANDATORY')
57 CHANGESFIELDS_OPTIONAL = [ "changed-by", "filecontents", "format",
58 "process-new note", "adv id", "distribution-version", "sponsoremail" ]
60 __all__.append('CHANGESFIELDS_OPTIONAL')
62 CHANGESFIELDS_FILES = [ "package", "version", "architecture", "type", "size",
63 "md5sum", "sha1sum", "sha256sum", "component", "location id",
64 "source package", "source version", "maintainer", "dbtype", "files id",
65 "new", "section", "priority", "othercomponents", "pool name",
66 "original component" ]
68 __all__.append('CHANGESFIELDS_FILES')
70 CHANGESFIELDS_DSC = [ "source", "version", "maintainer", "fingerprint",
71 "uploaders", "bts changelog", "dm-upload-allowed" ]
73 __all__.append('CHANGESFIELDS_DSC')
75 CHANGESFIELDS_DSCFILES_MANDATORY = [ "size", "md5sum" ]
77 __all__.append('CHANGESFIELDS_DSCFILES_MANDATORY')
79 CHANGESFIELDS_DSCFILES_OPTIONAL = [ "files id" ]
81 __all__.append('CHANGESFIELDS_DSCFILES_OPTIONAL')
83 CHANGESFIELDS_ORIGFILES = [ "id", "location" ]
85 __all__.append('CHANGESFIELDS_ORIGFILES')
87 ###############################################################################
89 class Changes(object):
90 """ Convenience wrapper to carry around all the package information """
92 def __init__(self, **kwds):
96 self.changes_file = ""
104 def file_summary(self):
105 # changes["distribution"] may not exist in corner cases
106 # (e.g. unreadable changes files)
107 if not self.changes.has_key("distribution") or not \
108 isinstance(self.changes["distribution"], dict):
109 self.changes["distribution"] = {}
114 override_summary = ""
116 for name, entry in sorted(self.files.items()):
117 if entry.has_key("byhand"):
119 summary += name + " byhand\n"
121 elif entry.has_key("new"):
123 summary += "(new) %s %s %s\n" % (name, entry["priority"], entry["section"])
125 if entry.has_key("othercomponents"):
126 summary += "WARNING: Already present in %s distribution.\n" % (entry["othercomponents"])
128 if entry["type"] == "deb":
129 deb_fh = open_file(name)
130 summary += ParseSection(debExtractControl(deb_fh))["Description"] + '\n'
134 entry["pool name"] = poolify(self.changes.get("source", ""), entry["component"])
135 destination = entry["pool name"] + name
136 summary += name + "\n to " + destination + "\n"
138 if not entry.has_key("type"):
139 entry["type"] = "unknown"
141 if entry["type"] in ["deb", "udeb", "dsc"]:
142 # (queue/unchecked), there we have override entries already, use them
143 # (process-new), there we dont have override entries, use the newly generated ones.
144 override_prio = entry.get("override priority", entry["priority"])
145 override_sect = entry.get("override section", entry["section"])
146 override_summary += "%s - %s %s\n" % (name, override_prio, override_sect)
148 return (byhand, new, summary, override_summary)
150 def check_override(self):
152 Checks override entries for validity.
154 Returns an empty string if there are no problems
155 or the text of a warning if there are
160 # Abandon the check if it's a non-sourceful upload
161 if not self.changes["architecture"].has_key("source"):
164 for name, entry in sorted(self.files.items()):
165 if not entry.has_key("new") and entry["type"] == "deb":
166 if entry["section"] != "-":
167 if entry["section"].lower() != entry["override section"].lower():
168 summary += "%s: package says section is %s, override says %s.\n" % (name,
170 entry["override section"])
172 if entry["priority"] != "-":
173 if entry["priority"] != entry["override priority"]:
174 summary += "%s: package says priority is %s, override says %s.\n" % (name,
176 entry["override priority"])
180 def remove_known_changes(self, session=None):
182 session = DBConn().session()
185 session.delete(get_knownchange(self.changes_file, session))
192 def mark_missing_fields(self):
193 """add "missing" in fields which we will require for the known_changes table"""
194 for key in ['urgency', 'maintainer', 'fingerprint', 'changed-by' ]:
195 if (not self.changes.has_key(key)) or (not self.changes[key]):
196 self.changes[key]='missing'
198 def add_known_changes(self, dirpath, session=None):
199 """add "missing" in fields which we will require for the known_changes table"""
203 session = DBConn().session()
206 changesfile = os.path.join(dirpath, self.changes_file)
207 filetime = datetime.datetime.fromtimestamp(os.path.getctime(changesfile))
209 self.mark_missing_fields()
212 """INSERT INTO known_changes
213 (changesname, seen, source, binaries, architecture, version,
214 distribution, urgency, maintainer, fingerprint, changedby, date)
215 VALUES (:changesfile,:filetime,:source,:binary, :architecture,
216 :version,:distribution,:urgency,:maintainer,:fingerprint,:changedby,:date)""",
217 { 'changesfile':self.changes_file,
219 'source':self.changes["source"],
220 'binary':self.changes["binary"],
221 'architecture':self.changes["architecture"],
222 'version':self.changes["version"],
223 'distribution':self.changes["distribution"],
224 'urgency':self.changes["urgency"],
225 'maintainer':self.changes["maintainer"],
226 'fingerprint':self.changes["fingerprint"],
227 'changedby':self.changes["changed-by"],
228 'date':self.changes["date"]} )
234 def load_dot_dak(self, changesfile):
236 Update ourself by reading a previously created cPickle .dak dumpfile.
239 self.changes_file = changesfile
240 dump_filename = self.changes_file[:-8]+".dak"
241 dump_file = open_file(dump_filename)
243 p = Unpickler(dump_file)
245 self.changes.update(p.load())
246 self.dsc.update(p.load())
247 self.files.update(p.load())
248 self.dsc_files.update(p.load())
251 if isinstance(next_obj, dict):
252 self.orig_files.update(next_obj)
254 # Auto-convert old dak files to new format supporting
257 for dsc_file in self.dsc_files.keys():
258 if dsc_file.endswith(".orig.tar.gz"):
259 orig_tar_gz = dsc_file
260 self.orig_files[orig_tar_gz] = {}
262 self.orig_files[orig_tar_gz]["id"] = next_obj
264 if next_obj != None and next_obj != "":
265 self.orig_files[orig_tar_gz]["location"] = next_obj
266 if len(self.orig_files[orig_tar_gz]) == 0:
267 del self.orig_files[orig_tar_gz]
271 def sanitised_files(self):
273 for name, entry in self.files.items():
275 for i in CHANGESFIELDS_FILES:
277 ret[name][i] = entry[i]
281 def sanitised_changes(self):
283 # Mandatory changes fields
284 for i in CHANGESFIELDS_MANDATORY:
285 ret[i] = self.changes[i]
287 # Optional changes fields
288 for i in CHANGESFIELDS_OPTIONAL:
289 if self.changes.has_key(i):
290 ret[i] = self.changes[i]
294 def sanitised_dsc(self):
296 for i in CHANGESFIELDS_DSC:
297 if self.dsc.has_key(i):
302 def sanitised_dsc_files(self):
304 for name, entry in self.dsc_files.items():
306 # Mandatory dsc_files fields
307 for i in CHANGESFIELDS_DSCFILES_MANDATORY:
308 ret[name][i] = entry[i]
310 # Optional dsc_files fields
311 for i in CHANGESFIELDS_DSCFILES_OPTIONAL:
313 ret[name][i] = entry[i]
317 def sanitised_orig_files(self):
319 for name, entry in self.orig_files.items():
321 # Optional orig_files fields
322 for i in CHANGESFIELDS_ORIGFILES:
324 ret[name][i] = entry[i]
328 def write_dot_dak(self, dest_dir):
330 Dump ourself into a cPickle file.
332 @type dest_dir: string
333 @param dest_dir: Path where the dumpfile should be stored
335 @note: This could just dump the dictionaries as is, but I'd like to avoid this so
336 there's some idea of what process-accepted & process-new use from
337 process-unchecked. (JT)
341 dump_filename = os.path.join(dest_dir, self.changes_file[:-8] + ".dak")
342 dump_file = open_file(dump_filename, 'w')
345 os.chmod(dump_filename, 0664)
347 # chmod may fail when the dumpfile is not owned by the user
348 # invoking dak (like e.g. when NEW is processed by a member
351 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
352 # security precaution, should never happen unless a weird
353 # umask is set anywhere
354 if perms & stat.S_IWOTH:
355 fubar("%s is world writable and chmod failed." % \
357 # ignore the failed chmod otherwise as the file should
358 # already have the right privileges and is just, at worst,
359 # unreadable for world
363 p = Pickler(dump_file, 1)
365 p.dump(self.sanitised_changes())
366 p.dump(self.sanitised_dsc())
367 p.dump(self.sanitised_files())
368 p.dump(self.sanitised_dsc_files())
369 p.dump(self.sanitised_orig_files())
373 def unknown_files_fields(self, name):
374 return sorted(list( set(self.files[name].keys()) -
375 set(CHANGESFIELDS_FILES)))
377 def unknown_changes_fields(self):
378 return sorted(list( set(self.changes.keys()) -
379 set(CHANGESFIELDS_MANDATORY + CHANGESFIELDS_OPTIONAL)))
381 def unknown_dsc_fields(self):
382 return sorted(list( set(self.dsc.keys()) -
383 set(CHANGESFIELDS_DSC)))
385 def unknown_dsc_files_fields(self, name):
386 return sorted(list( set(self.dsc_files[name].keys()) -
387 set(CHANGESFIELDS_DSCFILES_MANDATORY + CHANGESFIELDS_DSCFILES_OPTIONAL)))
391 for name, entry in self.files.items():
392 r.append(" %s:" % (name))
393 for i in CHANGESFIELDS_FILES:
395 r.append(" %s: %s" % (i.capitalize(), entry[i]))
396 xfields = self.unknown_files_fields(name)
398 r.append("files[%s] still has following unrecognised keys: %s" % (name, ", ".join(xfields)))
402 def str_changes(self):
404 for i in CHANGESFIELDS_MANDATORY:
405 val = self.changes[i]
406 if isinstance(val, list):
408 elif isinstance(val, dict):
409 val = " ".join(val.keys())
410 r.append(' %s: %s' % (i.capitalize(), val))
412 for i in CHANGESFIELDS_OPTIONAL:
413 if self.changes.has_key(i):
414 r.append(' %s: %s' % (i.capitalize(), self.changes[i]))
416 xfields = self.unknown_changes_fields()
418 r.append("Warning: changes still has the following unrecognised fields: %s" % ", ".join(xfields))
424 for i in CHANGESFIELDS_DSC:
425 if self.dsc.has_key(i):
426 r.append(' %s: %s' % (i.capitalize(), self.dsc[i]))
428 xfields = self.unknown_dsc_fields()
430 r.append("Warning: dsc still has the following unrecognised fields: %s" % ", ".join(xfields))
434 def str_dsc_files(self):
436 for name, entry in self.dsc_files.items():
437 r.append(" %s:" % (name))
438 for i in CHANGESFIELDS_DSCFILES_MANDATORY:
439 r.append(" %s: %s" % (i.capitalize(), entry[i]))
440 for i in CHANGESFIELDS_DSCFILES_OPTIONAL:
442 r.append(" %s: %s" % (i.capitalize(), entry[i]))
443 xfields = self.unknown_dsc_files_fields(name)
445 r.append("dsc_files[%s] still has following unrecognised keys: %s" % (name, ", ".join(xfields)))
452 r.append(" Changes:")
453 r += self.str_changes()
463 r += self.str_files()
467 r.append(" Dsc Files:")
468 r += self.str_dsc_files()
472 __all__.append('Changes')