X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=daklib%2Fqueue.py;h=813782e23ef38e0dfc7c35818eba74d3f00a37c2;hb=e00d86bf93fe8e20b456caae7c099b153b77678f;hp=f35ee18cbd4b37c950e57cafe999a053d6b7aa35;hpb=1d79ca7f4979f4b1d1b7741b4d2ab8af8db7c347;p=dak.git diff --git a/daklib/queue.py b/daklib/queue.py index f35ee18c..813782e2 100755 --- a/daklib/queue.py +++ b/daklib/queue.py @@ -129,7 +129,7 @@ def check_valid(new): new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"]) # Sanity checks di = section.find("debian-installer") != -1 - if (di and file_type != "udeb") or (not di and file_type == "udeb"): + if (di and file_type not in ("udeb", "dsc")) or (not di and file_type == "udeb"): new[pkg]["section id"] = -1 if (priority == "source" and file_type != "dsc") or \ (priority != "source" and file_type == "dsc"): @@ -171,8 +171,11 @@ class Upload: ########################################################################### def init_vars (self): - for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]: - exec "self.pkg.%s.clear();" % (i) + self.pkg.changes.clear() + self.pkg.dsc.clear() + self.pkg.files.clear() + self.pkg.dsc_files.clear() + self.pkg.legacy_source_untouchable.clear() self.pkg.orig_tar_id = None self.pkg.orig_tar_location = "" self.pkg.orig_tar_gz = None @@ -183,10 +186,16 @@ class Upload: dump_filename = self.pkg.changes_file[:-8]+".dak" dump_file = utils.open_file(dump_filename) p = cPickle.Unpickler(dump_file) - for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]: - exec "self.pkg.%s.update(p.load());" % (i) - for i in [ "orig_tar_id", "orig_tar_location" ]: - exec "self.pkg.%s = p.load();" % (i) + + self.pkg.changes.update(p.load()) + self.pkg.dsc.update(p.load()) + self.pkg.files.update(p.load()) + self.pkg.dsc_files.update(p.load()) + self.pkg.legacy_source_untouchable.update(p.load()) + + self.pkg.orig_tar_id = p.load() + self.pkg.orig_tar_location = p.load() + dump_file.close() ########################################################################### @@ -196,31 +205,50 @@ class Upload: # process-new use from process-unchecked def dump_vars(self, dest_dir): - for i in [ "changes", "dsc", "files", "dsc_files", - "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]: - exec "%s = self.pkg.%s;" % (i,i) + + changes = self.pkg.changes + dsc = self.pkg.dsc + files = self.pkg.files + dsc_files = self.pkg.dsc_files + legacy_source_untouchable = self.pkg.legacy_source_untouchable + orig_tar_id = self.pkg.orig_tar_id + orig_tar_location = self.pkg.orig_tar_location + dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak") dump_file = utils.open_file(dump_filename, 'w') try: - os.chmod(dump_filename, 0660) + os.chmod(dump_filename, 0664) except OSError, e: + # chmod may fail when the dumpfile is not owned by the user + # invoking dak (like e.g. when NEW is processed by a member + # of ftpteam) if errno.errorcode[e.errno] == 'EPERM': perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE]) - if perms & stat.S_IROTH: - utils.fubar("%s is world readable and chmod failed." % (dump_filename)) + # security precaution, should never happen unless a weird + # umask is set anywhere + if perms & stat.S_IWOTH: + utils.fubar("%s is world writable and chmod failed." % \ + (dump_filename,)) + # ignore the failed chmod otherwise as the file should + # already have the right privileges and is just, at worst, + # unreadable for world else: raise p = cPickle.Pickler(dump_file, 1) - for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]: - exec "%s = {}" % i + d_changes = {} + d_dsc = {} + d_files = {} + d_dsc_files = {} + ## files for file_entry in files.keys(): d_files[file_entry] = {} for i in [ "package", "version", "architecture", "type", "size", - "md5sum", "component", "location id", "source package", - "source version", "maintainer", "dbtype", "files id", - "new", "section", "priority", "othercomponents", + "md5sum", "sha1sum", "sha256sum", "component", + "location id", "source package", "source version", + "maintainer", "dbtype", "files id", "new", + "section", "priority", "othercomponents", "pool name", "original component" ]: if files[file_entry].has_key(i): d_files[file_entry][i] = files[file_entry][i]