- dump_file.close()
-
- ###########################################################################
-
-
- def dump_vars(self, dest_dir):
- """
- Dump our Pkg object into a cPickle file.
-
- @type dest_dir: string
- @param dest_dir: Path where the dumpfile should be stored
-
- @note: This could just dump the dictionaries as is, but I'd like to avoid this so
- there's some idea of what process-accepted & process-new use from
- process-unchecked. (JT)
-
- """
-
- changes = self.pkg.changes
- dsc = self.pkg.dsc
- files = self.pkg.files
- dsc_files = self.pkg.dsc_files
- orig_tar_id = self.pkg.orig_tar_id
- orig_tar_location = self.pkg.orig_tar_location
-
- dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
- dump_file = utils.open_file(dump_filename, 'w')
- try:
- os.chmod(dump_filename, 0664)
- except OSError, e:
- # chmod may fail when the dumpfile is not owned by the user
- # invoking dak (like e.g. when NEW is processed by a member
- # of ftpteam)
- if errno.errorcode[e.errno] == 'EPERM':
- perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
- # security precaution, should never happen unless a weird
- # umask is set anywhere
- if perms & stat.S_IWOTH:
- utils.fubar("%s is world writable and chmod failed." % \
- (dump_filename,))
- # ignore the failed chmod otherwise as the file should
- # already have the right privileges and is just, at worst,
- # unreadable for world
- else:
- raise
-
- p = cPickle.Pickler(dump_file, 1)
- d_changes = {}
- d_dsc = {}
- d_files = {}
- d_dsc_files = {}
-
- ## files
- for file_entry in files.keys():
- d_files[file_entry] = {}
- for i in [ "package", "version", "architecture", "type", "size",
- "md5sum", "sha1sum", "sha256sum", "component",
- "location id", "source package", "source version",
- "maintainer", "dbtype", "files id", "new",
- "section", "priority", "othercomponents",
- "pool name", "original component" ]:
- if files[file_entry].has_key(i):
- d_files[file_entry][i] = files[file_entry][i]
- ## changes
- # Mandatory changes fields
- for i in [ "distribution", "source", "architecture", "version",
- "maintainer", "urgency", "fingerprint", "changedby822",
- "changedby2047", "changedbyname", "maintainer822",
- "maintainer2047", "maintainername", "maintaineremail",
- "closes", "changes" ]:
- d_changes[i] = changes[i]
- # Optional changes fields
- for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version",
- "sponsoremail" ]:
- if changes.has_key(i):
- d_changes[i] = changes[i]
- ## dsc
- for i in [ "source", "version", "maintainer", "fingerprint",
- "uploaders", "bts changelog", "dm-upload-allowed" ]:
- if dsc.has_key(i):
- d_dsc[i] = dsc[i]
- ## dsc_files
- for file_entry in dsc_files.keys():
- d_dsc_files[file_entry] = {}
- # Mandatory dsc_files fields
- for i in [ "size", "md5sum" ]:
- d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
- # Optional dsc_files fields
- for i in [ "files id" ]:
- if dsc_files[file_entry].has_key(i):
- d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
-
- for i in [ d_changes, d_dsc, d_files, d_dsc_files,
- orig_tar_id, orig_tar_location ]:
- p.dump(i)
- dump_file.close()