ql = q.getresult()
if not ql:
- daklib.utils.warn("Couldn't find '%s~%s~%s'." % (package, version, architecture))
+ daklib.utils.warn("Couldn't find '%s_%s_%s'." % (package, version, architecture))
return None
if len(ql) > 1:
- daklib.utils.warn("Found more than one match for '%s~%s~%s'." % (package, version, architecture))
+ daklib.utils.warn("Found more than one match for '%s_%s_%s'." % (package, version, architecture))
return None
id = ql[0][0]
return id
# Take action
if action == "add":
if assoication_id:
- daklib.utils.warn("'%s~%s~%s' already exists in suite %s." % (package, version, architecture, suite))
+ daklib.utils.warn("'%s_%s_%s' already exists in suite %s." % (package, version, architecture, suite))
continue
else:
q = projectB.query("INSERT INTO src_associations (suite, source) VALUES (%s, %s)" % (suite_id, id))
elif action == "remove":
if assoication_id == None:
- daklib.utils.warn("'%s~%s~%s' doesn't exist in suite %s." % (package, version, architecture, suite))
+ daklib.utils.warn("'%s_%s_%s' doesn't exist in suite %s." % (package, version, architecture, suite))
continue
else:
q = projectB.query("DELETE FROM src_associations WHERE id = %s" % (assoication_id))
# Take action
if action == "add":
if assoication_id:
- daklib.utils.warn("'%s~%s~%s' already exists in suite %s." % (package, version, architecture, suite))
+ daklib.utils.warn("'%s_%s_%s' already exists in suite %s." % (package, version, architecture, suite))
continue
else:
q = projectB.query("INSERT INTO bin_associations (suite, bin) VALUES (%s, %s)" % (suite_id, id))
elif action == "remove":
if assoication_id == None:
- daklib.utils.warn("'%s~%s~%s' doesn't exist in suite %s." % (package, version, architecture, suite))
+ daklib.utils.warn("'%s_%s_%s' doesn't exist in suite %s." % (package, version, architecture, suite))
continue
else:
q = projectB.query("DELETE FROM bin_associations WHERE id = %s" % (assoication_id))
def do_obsolete_source(duplicate_bins, bin2source):
obsolete = {}
for key in duplicate_bins.keys():
- (source_a, source_b) = key.split('~')
+ (source_a, source_b) = key.split('_')
for source in [ source_a, source_b ]:
if not obsolete.has_key(source):
if not source_binaries.has_key(source):
if bin_pkgs.has_key(binary):
key_list = [ source, bin_pkgs[binary] ]
key_list.sort()
- key = '~'.join(key_list)
+ key = '_'.join(key_list)
duplicate_bins.setdefault(key, [])
duplicate_bins[key].append(binary)
bin_pkgs[binary] = source
if previous_source != source:
key_list = [ source, previous_source ]
key_list.sort()
- key = '~'.join(key_list)
+ key = '_'.join(key_list)
duplicate_bins.setdefault(key, [])
if package not in duplicate_bins[key]:
duplicate_bins[key].append(package)
keys = duplicate_bins.keys()
keys.sort()
for key in keys:
- (source_a, source_b) = key.split("~")
+ (source_a, source_b) = key.split("_")
print " o %s & %s => %s" % (source_a, source_b, ", ".join(duplicate_bins[key]))
print
def get_or_set_files_id (filename, size, md5sum, location_id):
global files_id_cache, files_id_serial, files_query_cache
- cache_key = "~".join((filename, size, md5sum, repr(location_id)))
+ cache_key = "_".join((filename, size, md5sum, repr(location_id)))
if not files_id_cache.has_key(cache_key):
files_id_serial += 1
files_query_cache.write("%d\t%s\t%s\t%s\t%d\t\\N\n" % (files_id_serial, filename, size, md5sum, location_id))
(md5sum, size, filename) = line.strip().split()
# Don't duplicate .orig.tar.gz's
if filename.endswith(".orig.tar.gz"):
- cache_key = "%s~%s~%s" % (filename, size, md5sum)
+ cache_key = "%s_%s_%s" % (filename, size, md5sum)
if orig_tar_gz_cache.has_key(cache_key):
id = orig_tar_gz_cache[cache_key]
else:
if filename.endswith(".dsc"):
files_id = id
filename = directory + package + '_' + no_epoch_version + '.dsc'
- cache_key = "%s~%s" % (package, version)
+ cache_key = "%s_%s" % (package, version)
if not source_cache.has_key(cache_key):
- nasty_key = "%s~%s" % (package, version)
+ nasty_key = "%s_%s" % (package, version)
source_id_serial += 1
if not source_cache_for_binaries.has_key(nasty_key):
source_cache_for_binaries[nasty_key] = source_id_serial
filename = poolify (filename, location)
if architecture == "all":
filename = re_arch_from_filename.sub("binary-all", filename)
- cache_key = "%s~%s" % (source, source_version)
+ cache_key = "%s_%s" % (source, source_version)
source_id = source_cache_for_binaries.get(cache_key, None)
size = Scanner.Section["size"]
md5sum = Scanner.Section["md5sum"]
files_id = get_or_set_files_id (filename, size, md5sum, location_id)
type = "deb"; # FIXME
- cache_key = "%s~%s~%s~%d~%d~%d~%d" % (package, version, repr(source_id), architecture_id, location_id, files_id, suite_id)
+ cache_key = "%s_%s_%s_%d_%d_%d_%d" % (package, version, repr(source_id), architecture_id, location_id, files_id, suite_id)
if not arch_all_cache.has_key(cache_key):
arch_all_cache[cache_key] = 1
- cache_key = "%s~%s~%s~%d" % (package, version, repr(source_id), architecture_id)
+ cache_key = "%s_%s_%s_%d" % (package, version, repr(source_id), architecture_id)
if not binary_cache.has_key(cache_key):
if not source_id:
source_id = "\N"
lhs = split[0]
maintainer = fix_maintainer(" ".join(split[1:]))
if lhs.find('~') != -1:
- (package, version) = lhs.split('~')
+ (package, version) = lhs.split('~', 1)
else:
package = lhs
version = '*'
################################################################################
-re_valid_version = re.compile(r"^([0-9]+:)?[0-9A-Za-z\.\-\+:]+$")
+re_valid_version = re.compile(r"^([0-9]+:)?[0-9A-Za-z\.\-\+:~]+$")
re_valid_pkg_name = re.compile(r"^[\dA-Za-z][\dA-Za-z\+\-\.]+$")
re_changelog_versions = re.compile(r"^\w[-+0-9a-z.]+ \([^\(\) \t]+\)")
re_strip_revision = re.compile(r"-([^-]+)$")
stats = {}
file = daklib.utils.open_file("2001-11")
for line in file.readlines():
- split = line.strip().split('~')
+ split = line.strip().split('|')
program = split[1]
if program != "katie" and program != "process-accepted":
continue
def get_location_id (location, component, archive):
global location_id_cache
- cache_key = location + '~' + component + '~' + location
+ cache_key = location + '_' + component + '_' + location
if location_id_cache.has_key(cache_key):
return location_id_cache[cache_key]
def get_source_id (source, version):
global source_id_cache
- cache_key = source + '~' + version + '~'
+ cache_key = source + '_' + version + '_'
if source_id_cache.has_key(cache_key):
return source_id_cache[cache_key]
def get_files_id (filename, size, md5sum, location_id):
global files_id_cache
- cache_key = "%s~%d" % (filename, location_id)
+ cache_key = "%s_%d" % (filename, location_id)
if files_id_cache.has_key(cache_key):
return files_id_cache[cache_key]
##
##q = projectB.query("SELECT id FROM files WHERE id = currval('files_id_seq')")
##ql = q.getresult()[0]
- ##cache_key = "%s~%d" % (filename, location_id)
+ ##cache_key = "%s_%d" % (filename, location_id)
##files_id_cache[cache_key] = ql[0]
##return files_id_cache[cache_key]
-----------
o Usernames do not contain ",". [dak import-users-from-passwd]
-o Package names do not contain "~" [dak cruft-report]
+o Package names and versions do not contain "_" [dak cruft-report]
o Suites are case-independent in conf files, but forced lower case in use. [dak make-suite-file-list]
o Components are case-sensitive. [dak make-suite-file-list]
o There's always source of some sort