From: Anthony Towns Date: Sat, 29 Jul 2006 17:56:35 +0000 (+1000) Subject: support for ~ in versions; use _ as an internal separator instead X-Git-Url: https://git.decadent.org.uk/gitweb/?a=commitdiff_plain;h=1bcac7a38c0b55aa9a8ec984c44661f92e7bc536;p=dak.git support for ~ in versions; use _ as an internal separator instead --- diff --git a/dak/control_suite.py b/dak/control_suite.py index b22f4e05..3a94ca5d 100755 --- a/dak/control_suite.py +++ b/dak/control_suite.py @@ -77,10 +77,10 @@ def get_id (package, version, architecture): ql = q.getresult() if not ql: - daklib.utils.warn("Couldn't find '%s~%s~%s'." % (package, version, architecture)) + daklib.utils.warn("Couldn't find '%s_%s_%s'." % (package, version, architecture)) return None if len(ql) > 1: - daklib.utils.warn("Found more than one match for '%s~%s~%s'." % (package, version, architecture)) + daklib.utils.warn("Found more than one match for '%s_%s_%s'." % (package, version, architecture)) return None id = ql[0][0] return id @@ -178,13 +178,13 @@ def process_file (file, suite, action): # Take action if action == "add": if assoication_id: - daklib.utils.warn("'%s~%s~%s' already exists in suite %s." % (package, version, architecture, suite)) + daklib.utils.warn("'%s_%s_%s' already exists in suite %s." % (package, version, architecture, suite)) continue else: q = projectB.query("INSERT INTO src_associations (suite, source) VALUES (%s, %s)" % (suite_id, id)) elif action == "remove": if assoication_id == None: - daklib.utils.warn("'%s~%s~%s' doesn't exist in suite %s." % (package, version, architecture, suite)) + daklib.utils.warn("'%s_%s_%s' doesn't exist in suite %s." % (package, version, architecture, suite)) continue else: q = projectB.query("DELETE FROM src_associations WHERE id = %s" % (assoication_id)) @@ -199,13 +199,13 @@ def process_file (file, suite, action): # Take action if action == "add": if assoication_id: - daklib.utils.warn("'%s~%s~%s' already exists in suite %s." % (package, version, architecture, suite)) + daklib.utils.warn("'%s_%s_%s' already exists in suite %s." % (package, version, architecture, suite)) continue else: q = projectB.query("INSERT INTO bin_associations (suite, bin) VALUES (%s, %s)" % (suite_id, id)) elif action == "remove": if assoication_id == None: - daklib.utils.warn("'%s~%s~%s' doesn't exist in suite %s." % (package, version, architecture, suite)) + daklib.utils.warn("'%s_%s_%s' doesn't exist in suite %s." % (package, version, architecture, suite)) continue else: q = projectB.query("DELETE FROM bin_associations WHERE id = %s" % (assoication_id)) diff --git a/dak/cruft_report.py b/dak/cruft_report.py index 2a3974e5..19333265 100755 --- a/dak/cruft_report.py +++ b/dak/cruft_report.py @@ -202,7 +202,7 @@ def do_dubious_nbs(dubious_nbs): def do_obsolete_source(duplicate_bins, bin2source): obsolete = {} for key in duplicate_bins.keys(): - (source_a, source_b) = key.split('~') + (source_a, source_b) = key.split('_') for source in [ source_a, source_b ]: if not obsolete.has_key(source): if not source_binaries.has_key(source): @@ -340,7 +340,7 @@ def main (): if bin_pkgs.has_key(binary): key_list = [ source, bin_pkgs[binary] ] key_list.sort() - key = '~'.join(key_list) + key = '_'.join(key_list) duplicate_bins.setdefault(key, []) duplicate_bins[key].append(binary) bin_pkgs[binary] = source @@ -390,7 +390,7 @@ def main (): if previous_source != source: key_list = [ source, previous_source ] key_list.sort() - key = '~'.join(key_list) + key = '_'.join(key_list) duplicate_bins.setdefault(key, []) if package not in duplicate_bins[key]: duplicate_bins[key].append(package) @@ -445,7 +445,7 @@ def main (): keys = duplicate_bins.keys() keys.sort() for key in keys: - (source_a, source_b) = key.split("~") + (source_a, source_b) = key.split("_") print " o %s & %s => %s" % (source_a, source_b, ", ".join(duplicate_bins[key])) print diff --git a/dak/import_archive.py b/dak/import_archive.py index ad69419f..f064b4ae 100755 --- a/dak/import_archive.py +++ b/dak/import_archive.py @@ -307,7 +307,7 @@ def get_location_path(directory): def get_or_set_files_id (filename, size, md5sum, location_id): global files_id_cache, files_id_serial, files_query_cache - cache_key = "~".join((filename, size, md5sum, repr(location_id))) + cache_key = "_".join((filename, size, md5sum, repr(location_id))) if not files_id_cache.has_key(cache_key): files_id_serial += 1 files_query_cache.write("%d\t%s\t%s\t%s\t%d\t\\N\n" % (files_id_serial, filename, size, md5sum, location_id)) @@ -363,7 +363,7 @@ def process_sources (filename, suite, component, archive): (md5sum, size, filename) = line.strip().split() # Don't duplicate .orig.tar.gz's if filename.endswith(".orig.tar.gz"): - cache_key = "%s~%s~%s" % (filename, size, md5sum) + cache_key = "%s_%s_%s" % (filename, size, md5sum) if orig_tar_gz_cache.has_key(cache_key): id = orig_tar_gz_cache[cache_key] else: @@ -376,9 +376,9 @@ def process_sources (filename, suite, component, archive): if filename.endswith(".dsc"): files_id = id filename = directory + package + '_' + no_epoch_version + '.dsc' - cache_key = "%s~%s" % (package, version) + cache_key = "%s_%s" % (package, version) if not source_cache.has_key(cache_key): - nasty_key = "%s~%s" % (package, version) + nasty_key = "%s_%s" % (package, version) source_id_serial += 1 if not source_cache_for_binaries.has_key(nasty_key): source_cache_for_binaries[nasty_key] = source_id_serial @@ -438,16 +438,16 @@ def process_packages (filename, suite, component, archive): filename = poolify (filename, location) if architecture == "all": filename = re_arch_from_filename.sub("binary-all", filename) - cache_key = "%s~%s" % (source, source_version) + cache_key = "%s_%s" % (source, source_version) source_id = source_cache_for_binaries.get(cache_key, None) size = Scanner.Section["size"] md5sum = Scanner.Section["md5sum"] files_id = get_or_set_files_id (filename, size, md5sum, location_id) type = "deb"; # FIXME - cache_key = "%s~%s~%s~%d~%d~%d~%d" % (package, version, repr(source_id), architecture_id, location_id, files_id, suite_id) + cache_key = "%s_%s_%s_%d_%d_%d_%d" % (package, version, repr(source_id), architecture_id, location_id, files_id, suite_id) if not arch_all_cache.has_key(cache_key): arch_all_cache[cache_key] = 1 - cache_key = "%s~%s~%s~%d" % (package, version, repr(source_id), architecture_id) + cache_key = "%s_%s_%s_%d" % (package, version, repr(source_id), architecture_id) if not binary_cache.has_key(cache_key): if not source_id: source_id = "\N" diff --git a/dak/make_maintainers.py b/dak/make_maintainers.py index 86c5f190..077c2483 100755 --- a/dak/make_maintainers.py +++ b/dak/make_maintainers.py @@ -139,7 +139,7 @@ def main(): lhs = split[0] maintainer = fix_maintainer(" ".join(split[1:])) if lhs.find('~') != -1: - (package, version) = lhs.split('~') + (package, version) = lhs.split('~', 1) else: package = lhs version = '*' diff --git a/dak/process_unchecked.py b/dak/process_unchecked.py index 36b091f2..cf603124 100755 --- a/dak/process_unchecked.py +++ b/dak/process_unchecked.py @@ -39,7 +39,7 @@ from types import * ################################################################################ -re_valid_version = re.compile(r"^([0-9]+:)?[0-9A-Za-z\.\-\+:]+$") +re_valid_version = re.compile(r"^([0-9]+:)?[0-9A-Za-z\.\-\+:~]+$") re_valid_pkg_name = re.compile(r"^[\dA-Za-z][\dA-Za-z\+\-\.]+$") re_changelog_versions = re.compile(r"^\w[-+0-9a-z.]+ \([^\(\) \t]+\)") re_strip_revision = re.compile(r"-([^-]+)$") diff --git a/dak/stats.py b/dak/stats.py index df608214..f7414edd 100755 --- a/dak/stats.py +++ b/dak/stats.py @@ -73,7 +73,7 @@ def daily_install_stats(): stats = {} file = daklib.utils.open_file("2001-11") for line in file.readlines(): - split = line.strip().split('~') + split = line.strip().split('|') program = split[1] if program != "katie" and program != "process-accepted": continue diff --git a/daklib/database.py b/daklib/database.py index f511d27d..2aad6017 100644 --- a/daklib/database.py +++ b/daklib/database.py @@ -185,7 +185,7 @@ def get_component_id (component): def get_location_id (location, component, archive): global location_id_cache - cache_key = location + '~' + component + '~' + location + cache_key = location + '_' + component + '_' + location if location_id_cache.has_key(cache_key): return location_id_cache[cache_key] @@ -208,7 +208,7 @@ def get_location_id (location, component, archive): def get_source_id (source, version): global source_id_cache - cache_key = source + '~' + version + '~' + cache_key = source + '_' + version + '_' if source_id_cache.has_key(cache_key): return source_id_cache[cache_key] @@ -278,7 +278,7 @@ def get_or_set_fingerprint_id (fingerprint): def get_files_id (filename, size, md5sum, location_id): global files_id_cache - cache_key = "%s~%d" % (filename, location_id) + cache_key = "%s_%d" % (filename, location_id) if files_id_cache.has_key(cache_key): return files_id_cache[cache_key] @@ -331,7 +331,7 @@ def set_files_id (filename, size, md5sum, location_id): ## ##q = projectB.query("SELECT id FROM files WHERE id = currval('files_id_seq')") ##ql = q.getresult()[0] - ##cache_key = "%s~%d" % (filename, location_id) + ##cache_key = "%s_%d" % (filename, location_id) ##files_id_cache[cache_key] = ql[0] ##return files_id_cache[cache_key] diff --git a/docs/README.assumptions b/docs/README.assumptions index b036c3d7..2c33055b 100644 --- a/docs/README.assumptions +++ b/docs/README.assumptions @@ -2,7 +2,7 @@ Assumptions ----------- o Usernames do not contain ",". [dak import-users-from-passwd] -o Package names do not contain "~" [dak cruft-report] +o Package names and versions do not contain "_" [dak cruft-report] o Suites are case-independent in conf files, but forced lower case in use. [dak make-suite-file-list] o Components are case-sensitive. [dak make-suite-file-list] o There's always source of some sort