- out.write("Origin: %s\n" % (origin))
- out.write("Label: %s\n" % (label))
- out.write("Suite: %s\n" % (suite))
- if version != "":
- out.write("Version: %s\n" % (version))
- if codename != "":
- out.write("Codename: %s\n" % (codename))
- out.write("Date: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time()))))
- if notautomatic != "":
- out.write("NotAutomatic: %s\n" % (notautomatic))
- out.write("Architectures: %s\n" % (" ".join(filter(utils.real_arch, SuiteBlock.ValueList("Architectures")))))
- if components:
- out.write("Components: %s\n" % (" ".join(components)))
-
- if description:
- out.write("Description: %s\n" % (description))
-
- files = []
-
- if AptCnf.has_key("tree::%s" % (tree)):
- for sec in AptCnf["tree::%s::Sections" % (tree)].split():
- for arch in AptCnf["tree::%s::Architectures" % (tree)].split():
- if arch == "source":
- filepath = "%s/%s/Sources" % (sec, arch)
- for file in compressnames("tree::%s" % (tree), "Sources", filepath):
- files.append(file)
- add_tiffani(files, Cnf["Dir::Root"] + tree, filepath)
- else:
- disks = "%s/disks-%s" % (sec, arch)
- diskspath = Cnf["Dir::Root"]+tree+"/"+disks
- if os.path.exists(diskspath):
- for dir in os.listdir(diskspath):
- if os.path.exists("%s/%s/md5sum.txt" % (diskspath, dir)):
- files.append("%s/%s/md5sum.txt" % (disks, dir))
-
- filepath = "%s/binary-%s/Packages" % (sec, arch)
- for file in compressnames("tree::%s" % (tree), "Packages", filepath):
- files.append(file)
- add_tiffani(files, Cnf["Dir::Root"] + tree, filepath)
-
- if arch == "source":
- rel = "%s/%s/Release" % (sec, arch)
+ out.write("Architectures: %s\n" % (" ".join([a.arch_string for a in architectures])))
+
+ components = [ c.component_name for c in suite.components ]
+
+ out.write("Components: %s\n" % (" ".join(components)))
+
+ # For exact compatibility with old g-r, write out Description here instead
+ # of with the rest of the DB fields above
+ if getattr(suite, 'description') is not None:
+ out.write("Description: %s\n" % suite.description)
+
+ for comp in components:
+ for dirpath, dirnames, filenames in os.walk(os.path.join(self.suite_path(), comp), topdown=True):
+ if not re_gensubrelease.match(dirpath):
+ continue
+
+ subfile = os.path.join(dirpath, "Release")
+ subrel = open(subfile + '.new', "w")
+
+ for key, dbfield in subattribs:
+ if getattr(suite, dbfield) is not None:
+ subrel.write("%s: %s\n" % (key, getattr(suite, dbfield)))
+
+ for key, dbfield in boolattrs:
+ if getattr(suite, dbfield, False):
+ subrel.write("%s: yes\n" % (key))
+
+ subrel.write("Component: %s%s\n" % (suite_suffix, comp))
+
+ # Urgh, but until we have all the suite/component/arch stuff in the DB,
+ # this'll have to do
+ arch = os.path.split(dirpath)[-1]
+ if arch.startswith('binary-'):
+ arch = arch[7:]
+
+ subrel.write("Architecture: %s\n" % (arch))
+ subrel.close()
+
+ os.rename(subfile + '.new', subfile)
+
+ # Now that we have done the groundwork, we want to get off and add the files with
+ # their checksums to the main Release file
+ oldcwd = os.getcwd()
+
+ os.chdir(self.suite_path())
+
+ hashes = [x for x in RELEASE_HASHES if x.db_name in suite.checksums]
+
+ fileinfo = {}
+
+ uncompnotseen = {}
+
+ for dirpath, dirnames, filenames in os.walk(".", followlinks=True, topdown=True):
+ for entry in filenames:
+ # Skip things we don't want to include
+ if not re_includeinrelease.match(entry):
+ continue
+
+ if dirpath == '.' and entry in ["Release", "Release.gpg", "InRelease"]:
+ continue
+
+ filename = os.path.join(dirpath.lstrip('./'), entry)
+ fileinfo[filename] = {}
+ contents = open(filename, 'r').read()
+
+ # If we find a file for which we have a compressed version and
+ # haven't yet seen the uncompressed one, store the possibility
+ # for future use
+ if entry.endswith(".gz") and filename[:-3] not in uncompnotseen:
+ uncompnotseen[filename[:-3]] = (gzip.GzipFile, filename)
+ elif entry.endswith(".bz2") and filename[:-4] not in uncompnotseen:
+ uncompnotseen[filename[:-4]] = (bz2.BZ2File, filename)
+ elif entry.endswith(".xz") and filename[:-3] not in uncompnotseen:
+ uncompnotseen[filename[:-3]] = (XzFile, filename)
+
+ fileinfo[filename]['len'] = len(contents)
+
+ for hf in hashes:
+ fileinfo[filename][hf.release_field] = hf.func(contents)
+
+ for filename, comp in uncompnotseen.items():
+ # If we've already seen the uncompressed file, we don't
+ # need to do anything again
+ if filename in fileinfo:
+ continue
+
+ fileinfo[filename] = {}
+
+ # File handler is comp[0], filename of compressed file is comp[1]
+ contents = comp[0](comp[1], 'r').read()
+
+ fileinfo[filename]['len'] = len(contents)
+
+ for hf in hashes:
+ fileinfo[filename][hf.release_field] = hf.func(contents)
+
+
+ for field in sorted(h.release_field for h in hashes):
+ out.write('%s:\n' % field)
+ for filename in sorted(fileinfo.keys()):
+ out.write(" %s %8d %s\n" % (fileinfo[filename][field], fileinfo[filename]['len'], filename))
+
+ out.close()
+ os.rename(outfile + '.new', outfile)
+
+ if suite.byhash:
+ query = """
+ UPDATE hashfile SET unreferenced = CURRENT_TIMESTAMP
+ WHERE suite_id = :id AND unreferenced IS NULL"""
+ session.execute(query, {'id': suite.suite_id})
+
+ for filename in fileinfo:
+ if not os.path.exists(filename):
+ # probably an uncompressed index we didn't generate
+ continue
+
+ for h in hashes:
+ field = h.release_field
+ hashfile = os.path.join(os.path.dirname(filename), 'by-hash', field, fileinfo[filename][field])
+ query = "SELECT 1 FROM hashfile WHERE path = :p AND suite_id = :id"
+ q = session.execute(
+ query,
+ {'p': hashfile, 'id': suite.suite_id})
+ if q.rowcount:
+ session.execute('''
+ UPDATE hashfile SET unreferenced = NULL
+ WHERE path = :p and suite_id = :id''',
+ {'p': hashfile, 'id': suite.suite_id})