-def add_tiffani (files, path, indexstem):
- index = "%s.diff/Index" % (indexstem)
- filepath = "%s/%s" % (path, index)
- if os.path.exists(filepath):
- #print "ALERT: there was a tiffani file %s" % (filepath)
- files.append(index)
-
-def compressnames (tree,type,file):
- compress = AptCnf.get("%s::%s::Compress" % (tree,type), AptCnf.get("Default::%s::Compress" % (type), ". gzip"))
- result = []
- cl = compress.split()
- uncompress = ("." not in cl)
- for mode in compress.split():
- if mode == ".":
- result.append(file)
- elif mode == "gzip":
- if uncompress:
- result.append("<zcat/.gz>" + file)
- uncompress = 0
- result.append(file + ".gz")
- elif mode == "bzip2":
- if uncompress:
- result.append("<bzcat/.bz2>" + file)
- uncompress = 0
- result.append(file + ".bz2")
- return result
-
-def create_temp_file (cmd):
- f = tempfile.TemporaryFile()
- r = popen2.popen2(cmd)
- r[1].close()
- r = r[0]
- size = 0
- while 1:
- x = r.readline()
- if not x:
- r.close()
- del x,r
- break
- f.write(x)
- size += len(x)
- f.flush()
- f.seek(0)
- return (size, f)
-
-def print_md5sha_files (tree, files, hashop):
- path = Cnf["Dir::Root"] + tree + "/"
- for name in files:
- try:
- if name[0] == "<":
- j = name.index("/")
- k = name.index(">")
- (cat, ext, name) = (name[1:j], name[j+1:k], name[k+1:])
- (size, file_handle) = create_temp_file("%s %s%s%s" %
- (cat, path, name, ext))
- else:
- size = os.stat(path + name)[stat.ST_SIZE]
- file_handle = daklib.utils.open_file(path + name)
- except daklib.utils.cant_open_exc:
- print "ALERT: Couldn't open " + path + name
- else:
- hash = hashop(file_handle)
- file_handle.close()
- out.write(" %s %8d %s\n" % (hash, size, name))
-
-def print_md5_files (tree, files):
- print_md5sha_files (tree, files, apt_pkg.md5sum)
-
-def print_sha1_files (tree, files):
- print_md5sha_files (tree, files, apt_pkg.sha1sum)
-
-def print_sha256_files (tree, files):
- print_md5sha_files (tree, files, apt_pkg.sha256sum)
+ architectures = get_suite_architectures(suite.suite_name, skipall=True, skipsrc=True, session=session)
+
+ # Attribs contains a tuple of field names and the database names to use to
+ # fill them in
+ attribs = ( ('Origin', 'origin'),
+ ('Label', 'label'),
+ ('Suite', 'release_suite_output'),
+ ('Version', 'version'),
+ ('Codename', 'codename'),
+ ('Changelogs', 'changelog_url'),
+ )
+
+ # A "Sub" Release file has slightly different fields
+ subattribs = ( ('Archive', 'suite_name'),
+ ('Origin', 'origin'),
+ ('Label', 'label'),
+ ('Version', 'version') )
+
+ # Boolean stuff. If we find it true in database, write out "yes" into the release file
+ boolattrs = ( ('NotAutomatic', 'notautomatic'),
+ ('ButAutomaticUpgrades', 'butautomaticupgrades'),
+ ('Acquire-By-Hash', 'byhash'),
+ )
+
+ cnf = Config()
+
+ suite_suffix = cnf.find("Dinstall::SuiteSuffix", "")
+
+ self.create_output_directories()
+ self.create_release_symlinks()
+
+ outfile = os.path.join(self.suite_release_path(), "Release")
+ out = open(outfile + ".new", "w")
+
+ for key, dbfield in attribs:
+ # Hack to skip NULL Version fields as we used to do this
+ # We should probably just always ignore anything which is None
+ if key in ("Version", "Changelogs") and getattr(suite, dbfield) is None:
+ continue
+
+ out.write("%s: %s\n" % (key, getattr(suite, dbfield)))
+
+ out.write("Date: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time()))))
+
+ if suite.validtime:
+ validtime=float(suite.validtime)
+ out.write("Valid-Until: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time()+validtime))))
+
+ for key, dbfield in boolattrs:
+ if getattr(suite, dbfield, False):
+ out.write("%s: yes\n" % (key))
+
+ out.write("Architectures: %s\n" % (" ".join([a.arch_string for a in architectures])))
+
+ components = [ c.component_name for c in suite.components ]
+
+ out.write("Components: %s\n" % (" ".join(components)))
+
+ # For exact compatibility with old g-r, write out Description here instead
+ # of with the rest of the DB fields above
+ if getattr(suite, 'description') is not None:
+ out.write("Description: %s\n" % suite.description)
+
+ for comp in components:
+ for dirpath, dirnames, filenames in os.walk(os.path.join(self.suite_path(), comp), topdown=True):
+ if not re_gensubrelease.match(dirpath):
+ continue
+
+ subfile = os.path.join(dirpath, "Release")
+ subrel = open(subfile + '.new', "w")
+
+ for key, dbfield in subattribs:
+ if getattr(suite, dbfield) is not None:
+ subrel.write("%s: %s\n" % (key, getattr(suite, dbfield)))
+
+ for key, dbfield in boolattrs:
+ if getattr(suite, dbfield, False):
+ subrel.write("%s: yes\n" % (key))
+
+ subrel.write("Component: %s%s\n" % (suite_suffix, comp))
+
+ # Urgh, but until we have all the suite/component/arch stuff in the DB,
+ # this'll have to do
+ arch = os.path.split(dirpath)[-1]
+ if arch.startswith('binary-'):
+ arch = arch[7:]
+
+ subrel.write("Architecture: %s\n" % (arch))
+ subrel.close()
+
+ os.rename(subfile + '.new', subfile)
+
+ # Now that we have done the groundwork, we want to get off and add the files with
+ # their checksums to the main Release file
+ oldcwd = os.getcwd()
+
+ os.chdir(self.suite_path())
+
+ hashes = [x for x in RELEASE_HASHES if x.db_name in suite.checksums]
+
+ fileinfo = {}
+
+ uncompnotseen = {}
+
+ for dirpath, dirnames, filenames in os.walk(".", followlinks=True, topdown=True):
+ for entry in filenames:
+ # Skip things we don't want to include
+ if not re_includeinrelease.match(entry):
+ continue
+
+ if dirpath == '.' and entry in ["Release", "Release.gpg", "InRelease"]:
+ continue
+
+ filename = os.path.join(dirpath.lstrip('./'), entry)
+ fileinfo[filename] = {}
+ contents = open(filename, 'r').read()
+
+ # If we find a file for which we have a compressed version and
+ # haven't yet seen the uncompressed one, store the possibility
+ # for future use
+ if entry.endswith(".gz") and filename[:-3] not in uncompnotseen:
+ uncompnotseen[filename[:-3]] = (gzip.GzipFile, filename)
+ elif entry.endswith(".bz2") and filename[:-4] not in uncompnotseen:
+ uncompnotseen[filename[:-4]] = (bz2.BZ2File, filename)
+ elif entry.endswith(".xz") and filename[:-3] not in uncompnotseen:
+ uncompnotseen[filename[:-3]] = (XzFile, filename)
+
+ fileinfo[filename]['len'] = len(contents)
+
+ for hf in hashes:
+ fileinfo[filename][hf.release_field] = hf.func(contents)
+
+ for filename, comp in uncompnotseen.items():
+ # If we've already seen the uncompressed file, we don't
+ # need to do anything again
+ if filename in fileinfo:
+ continue
+
+ fileinfo[filename] = {}
+
+ # File handler is comp[0], filename of compressed file is comp[1]
+ contents = comp[0](comp[1], 'r').read()
+
+ fileinfo[filename]['len'] = len(contents)
+
+ for hf in hashes:
+ fileinfo[filename][hf.release_field] = hf.func(contents)
+
+
+ for field in sorted(h.release_field for h in hashes):
+ out.write('%s:\n' % field)
+ for filename in sorted(fileinfo.keys()):
+ out.write(" %s %8d %s\n" % (fileinfo[filename][field], fileinfo[filename]['len'], filename))
+
+ out.close()
+ os.rename(outfile + '.new', outfile)
+
+ self._update_hashfile_table(session, fileinfo, hashes)
+ if suite.byhash:
+ self._make_byhash_links(fileinfo, hashes)
+
+ sign_release_dir(suite, os.path.dirname(outfile))
+
+ os.chdir(oldcwd)
+
+ return