- if AptCnf.has_key("tree::%s" % (tree)):
- if AptCnf.has_key("tree::%s::Contents" % (tree)):
- pass
- else:
- for x in os.listdir("%s/%s" % (Cnf["Dir::Root"], tree)):
- if x.startswith('Contents-'):
- files.append(x)
-
- for sec in AptCnf["tree::%s::Sections" % (tree)].split():
- for arch in AptCnf["tree::%s::Architectures" % (tree)].split():
- if arch == "source":
- filepath = "%s/%s/Sources" % (sec, arch)
- for cfile in compressnames("tree::%s" % (tree), "Sources", filepath):
- files.append(cfile)
- add_tiffani(files, Cnf["Dir::Root"] + tree, filepath)
- else:
- disks = "%s/disks-%s" % (sec, arch)
- diskspath = Cnf["Dir::Root"]+tree+"/"+disks
- if os.path.exists(diskspath):
- for dir in os.listdir(diskspath):
- if os.path.exists("%s/%s/md5sum.txt" % (diskspath, dir)):
- files.append("%s/%s/md5sum.txt" % (disks, dir))
-
- filepath = "%s/binary-%s/Packages" % (sec, arch)
- for cfile in compressnames("tree::%s" % (tree), "Packages", filepath):
- files.append(cfile)
- add_tiffani(files, Cnf["Dir::Root"] + tree, filepath)
-
- if arch == "source":
- rel = "%s/%s/Release" % (sec, arch)
- else:
- rel = "%s/binary-%s/Release" % (sec, arch)
- relpath = Cnf["Dir::Root"]+tree+"/"+rel
-
- try:
- if os.access(relpath, os.F_OK):
- if os.stat(relpath).st_nlink > 1:
- os.unlink(relpath)
- release = open(relpath, "w")
- #release = open(longsuite.replace("/","_") + "_" + arch + "_" + sec + "_Release", "w")
- except IOError:
- utils.fubar("Couldn't write to " + relpath)
-
- release.write("Archive: %s\n" % (suite))
- if version != "":
- release.write("Version: %s\n" % (version))
- if suite_suffix:
- release.write("Component: %s/%s\n" % (suite_suffix,sec))
- else:
- release.write("Component: %s\n" % (sec))
- release.write("Origin: %s\n" % (origin))
- release.write("Label: %s\n" % (label))
- if notautomatic != "":
- release.write("NotAutomatic: %s\n" % (notautomatic))
- release.write("Architecture: %s\n" % (arch))
- release.close()
- files.append(rel)
-
- if AptCnf.has_key("tree::%s/main" % (tree)):
- for dis in ["main", "contrib", "non-free"]:
- if not AptCnf.has_key("tree::%s/%s" % (tree, dis)): continue
- sec = AptCnf["tree::%s/%s::Sections" % (tree,dis)].split()[0]
- if sec != "debian-installer":
- print "ALERT: weird non debian-installer section in %s" % (tree)
-
- for arch in AptCnf["tree::%s/%s::Architectures" % (tree,dis)].split():
- if arch != "source": # always true
- for cfile in compressnames("tree::%s/%s" % (tree,dis),
- "Packages",
- "%s/%s/binary-%s/Packages" % (dis, sec, arch)):
- files.append(cfile)
- elif AptCnf.has_key("tree::%s::FakeDI" % (tree)):
- usetree = AptCnf["tree::%s::FakeDI" % (tree)]
- sec = AptCnf["tree::%s/main::Sections" % (usetree)].split()[0]
- if sec != "debian-installer":
- print "ALERT: weird non debian-installer section in %s" % (usetree)
-
- for arch in AptCnf["tree::%s/main::Architectures" % (usetree)].split():
- if arch != "source": # always true
- for cfile in compressnames("tree::%s/main" % (usetree), "Packages", "main/%s/binary-%s/Packages" % (sec, arch)):
- files.append(cfile)
-
- elif AptCnf.has_key("bindirectory::%s" % (tree)):
- for cfile in compressnames("bindirectory::%s" % (tree), "Packages", AptCnf["bindirectory::%s::Packages" % (tree)]):
- files.append(cfile.replace(tree+"/","",1))
- for cfile in compressnames("bindirectory::%s" % (tree), "Sources", AptCnf["bindirectory::%s::Sources" % (tree)]):
- files.append(cfile.replace(tree+"/","",1))
- else:
- print "ALERT: no tree/bindirectory for %s" % (tree)
-
- out.write("MD5Sum:\n")
- print_md5_files(tree, files)
- out.write("SHA1:\n")
- print_sha1_files(tree, files)
- out.write("SHA256:\n")
- print_sha256_files(tree, files)
+ out.write("Components: %s\n" % (" ".join(components)))
+
+ # For exact compatibility with old g-r, write out Description here instead
+ # of with the rest of the DB fields above
+ if getattr(suite, 'description') is not None:
+ out.write("Description: %s\n" % suite.description)
+
+ for comp in components:
+ for dirpath, dirnames, filenames in os.walk(os.path.join(suite.archive.path, "dists", suite.suite_name, suite_suffix, comp), topdown=True):
+ if not re_gensubrelease.match(dirpath):
+ continue
+
+ subfile = os.path.join(dirpath, "Release")
+ subrel = open(subfile + '.new', "w")
+
+ for key, dbfield in subattribs:
+ if getattr(suite, dbfield) is not None:
+ subrel.write("%s: %s\n" % (key, getattr(suite, dbfield)))
+
+ for key, dbfield in boolattrs:
+ if getattr(suite, dbfield, False):
+ subrel.write("%s: yes\n" % (key))
+
+ subrel.write("Component: %s%s\n" % (suite_suffix, comp))
+
+ # Urgh, but until we have all the suite/component/arch stuff in the DB,
+ # this'll have to do
+ arch = os.path.split(dirpath)[-1]
+ if arch.startswith('binary-'):
+ arch = arch[7:]
+
+ subrel.write("Architecture: %s\n" % (arch))
+ subrel.close()
+
+ os.rename(subfile + '.new', subfile)
+
+ # Now that we have done the groundwork, we want to get off and add the files with
+ # their checksums to the main Release file
+ oldcwd = os.getcwd()
+
+ os.chdir(os.path.join(suite.archive.path, "dists", suite.suite_name, suite_suffix))
+
+ hashfuncs = { 'MD5Sum' : apt_pkg.md5sum,
+ 'SHA1' : apt_pkg.sha1sum,
+ 'SHA256' : apt_pkg.sha256sum }
+
+ fileinfo = {}
+
+ uncompnotseen = {}
+
+ for dirpath, dirnames, filenames in os.walk(".", followlinks=True, topdown=True):
+ for entry in filenames:
+ # Skip things we don't want to include
+ if not re_includeinrelease.match(entry):
+ continue
+
+ if dirpath == '.' and entry in ["Release", "Release.gpg", "InRelease"]:
+ continue
+
+ filename = os.path.join(dirpath.lstrip('./'), entry)
+ fileinfo[filename] = {}
+ contents = open(filename, 'r').read()
+
+ # If we find a file for which we have a compressed version and
+ # haven't yet seen the uncompressed one, store the possibility
+ # for future use
+ if entry.endswith(".gz") and entry[:-3] not in uncompnotseen.keys():
+ uncompnotseen[filename[:-3]] = (gzip.GzipFile, filename)
+ elif entry.endswith(".bz2") and entry[:-4] not in uncompnotseen.keys():
+ uncompnotseen[filename[:-4]] = (bz2.BZ2File, filename)
+ elif entry.endswith(".xz") and entry[:-3] not in uncompnotseen.keys():
+ uncompnotseen[filename[:-3]] = (XzFile, filename)
+
+ fileinfo[filename]['len'] = len(contents)
+
+ for hf, func in hashfuncs.items():
+ fileinfo[filename][hf] = func(contents)
+
+ for filename, comp in uncompnotseen.items():
+ # If we've already seen the uncompressed file, we don't
+ # need to do anything again
+ if filename in fileinfo.keys():
+ continue
+
+ # Skip uncompressed Contents files as they're huge, take ages to
+ # checksum and we checksum the compressed ones anyways
+ if os.path.basename(filename).startswith("Contents"):
+ continue
+
+ fileinfo[filename] = {}
+
+ # File handler is comp[0], filename of compressed file is comp[1]
+ contents = comp[0](comp[1], 'r').read()
+
+ fileinfo[filename]['len'] = len(contents)
+
+ for hf, func in hashfuncs.items():
+ fileinfo[filename][hf] = func(contents)
+
+
+ for h in sorted(hashfuncs.keys()):
+ out.write('%s:\n' % h)
+ for filename in sorted(fileinfo.keys()):
+ out.write(" %s %8d %s\n" % (fileinfo[filename][h], fileinfo[filename]['len'], filename))