###############################################################################
import errno, fcntl, os, sys, time, re
-import apt_pkg
+import apt_pkg, tarfile, commands
from daklib import database
from daklib import logging
from daklib import queue
else:
os.unlink(self.log_filename)
+
+###############################################################################
+
+def generate_contents_information(filename):
+ # Generate all the contents for the database
+ cmd = "ar t %s" % (filename)
+ (result, output) = commands.getstatusoutput(cmd)
+ if result != 0:
+ reject("%s: 'ar t' invocation failed." % (filename))
+ reject(utils.prefix_multi_line_string(output, " [ar output:] "), "")
+
+ # Ugh ... this is ugly ... Code ripped from process_unchecked.py
+ chunks = output.split('\n')
+ cmd = "ar x %s %s" % (filename, chunks[2])
+ (result, output) = commands.getstatusoutput(cmd)
+ if result != 0:
+ reject("%s: 'ar t' invocation failed." % (filename))
+ reject(utils.prefix_multi_line_string(output, " [ar output:] "), "")
+
+ # Got deb tarballs, now lets go through and determine what bits
+ # and pieces the deb had ...
+ if chunks[2] == "data.tar.gz":
+ data = tarfile.open("data.tar.gz", "r:gz")
+ elif data_tar == "data.tar.bz2":
+ data = tarfile.open("data.tar.bz2", "r:bz2")
+ else:
+ os.remove(chunks[2])
+ reject("couldn't find data.tar.*")
+
+ contents = []
+ for tarinfo in data:
+ if not tarinfo.isdir():
+ contents.append(tarinfo.name[2:])
+
+ os.remove(chunks[2])
+ return contents
+
###############################################################################
def reject (str, prefix="Rejected: "):
# Begin a transaction; if we bomb out anywhere between here and the COMMIT WORK below, the DB will not be changed.
projectB.query("BEGIN WORK")
+ # Ensure that we have all the hashes we need below.
+ rejmsg = utils.ensure_hashes(changes, dsc, files, dsc_files)
+ if len(rejmsg) > 0:
+ # There were errors. Print them and SKIP the changes.
+ for msg in rejmsg:
+ utils.warn(msg)
+ return
+
# Add the .dsc file to the DB
for file in files.keys():
if files[file]["type"] == "dsc":
filename = files[file]["pool name"] + file
dsc_component = files[file]["component"]
dsc_location_id = files[file]["location id"]
+ if dsc.has_key("dm-upload-allowed") and dsc["dm-upload-allowed"] == "yes":
+ dm_upload_allowed = "true"
+ else:
+ dm_upload_allowed = "false"
if not files[file].has_key("files id") or not files[file]["files id"]:
files[file]["files id"] = database.set_files_id (filename, files[file]["size"], files[file]["md5sum"], files[file]["sha1sum"], files[file]["sha256sum"], dsc_location_id)
- projectB.query("INSERT INTO source (source, version, maintainer, changedby, file, install_date, sig_fpr) VALUES ('%s', '%s', %d, %d, %d, '%s', %s)"
- % (package, version, maintainer_id, changedby_id, files[file]["files id"], install_date, fingerprint_id))
+ projectB.query("INSERT INTO source (source, version, maintainer, changedby, file, install_date, sig_fpr, dm_upload_allowed) VALUES ('%s', '%s', %d, %d, %d, '%s', %s, %s)"
+ % (package, version, maintainer_id, changedby_id, files[file]["files id"], install_date, fingerprint_id, dm_upload_allowed))
for suite in changes["distribution"].keys():
suite_id = database.get_suite_id(suite)
# files id is stored in dsc_files by check_dsc().
files_id = dsc_files[dsc_file].get("files id", None)
if files_id == None:
- files_id = database.get_files_id(filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], files[file]["sha1sum"], files[file]["sha256sum"], dsc_location_id)
+ files_id = database.get_files_id(filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], dsc_location_id)
# FIXME: needs to check for -1/-2 and or handle exception
if files_id == None:
- files_id = database.set_files_id (filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], files[file]["sha1sum"], files[file]["sha256sum"], dsc_location_id)
+ files_id = database.set_files_id (filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], files[dsc_file]["sha1sum"], files[dsc_file]["sha256sum"], dsc_location_id)
projectB.query("INSERT INTO dsc_files (source, file) VALUES (currval('source_id_seq'), %d)" % (files_id))
# Add the src_uploaders to the DB
- if dsc.get("dm-upload-allowed", "no") == "yes":
- uploader_ids = [maintainer_id]
- if dsc.has_key("uploaders"):
- for u in dsc["uploaders"].split(","):
- u = u.replace("'", "\\'")
- u = u.strip()
- uploader_ids.append(
- database.get_or_set_maintainer_id(u))
- added_ids = {}
- for u in uploader_ids:
- if added_ids.has_key(u):
- utils.warn("Already saw uploader %s for source %s" % (u, package))
- continue
- added_ids[u]=1
- projectB.query("INSERT INTO src_uploaders (source, maintainer) VALUES (currval('source_id_seq'), %d)" % (u))
+ uploader_ids = [maintainer_id]
+ if dsc.has_key("uploaders"):
+ for u in dsc["uploaders"].split(","):
+ u = u.replace("'", "\\'")
+ u = u.strip()
+ uploader_ids.append(
+ database.get_or_set_maintainer_id(u))
+ added_ids = {}
+ for u in uploader_ids:
+ if added_ids.has_key(u):
+ utils.warn("Already saw uploader %s for source %s" % (u, package))
+ continue
+ added_ids[u]=1
+ projectB.query("INSERT INTO src_uploaders (source, maintainer) VALUES (currval('source_id_seq'), %d)" % (u))
# Add the .deb files to the DB
source = files[file]["source package"]
source_version = files[file]["source version"]
filename = files[file]["pool name"] + file
+ contents = generate_contents_information(file)
if not files[file].has_key("location id") or not files[file]["location id"]:
files[file]["location id"] = database.get_location_id(Cnf["Dir::Pool"],files[file]["component"],utils.where_am_i())
if not files[file].has_key("files id") or not files[file]["files id"]:
- files[file]["files id"] = database.set_files_id (filename, files[file]["size"], files[file]["md5sum"], files[file]["location id"])
+ files[file]["files id"] = database.set_files_id (filename, files[file]["size"], files[file]["md5sum"], files[file]["sha1sum"], files[file]["sha256sum"], files[file]["location id"])
source_id = database.get_source_id (source, source_version)
if source_id:
projectB.query("INSERT INTO binaries (package, version, maintainer, source, architecture, file, type, sig_fpr) VALUES ('%s', '%s', %d, %d, %d, %d, '%s', %d)"
suite_id = database.get_suite_id(suite)
projectB.query("INSERT INTO bin_associations (suite, bin) VALUES (%d, currval('binaries_id_seq'))" % (suite_id))
+ # insert contents into the database
+ q = projectB.query("SELECT currval('binaries_id_seq')")
+ bin_id = int(q.getresult()[0][0])
+ for file in contents:
+ database.insert_content_path(bin_id, file)
+
# If the .orig.tar.gz is in a legacy directory we need to poolify
# it, so that apt-get source (and anything else that goes by the
# "Directory:" field in the Sources.gz file) works.
if new_files_id == None:
utils.copy(old_filename, Cnf["Dir::Pool"] + new_filename)
new_files_id = database.set_files_id(new_filename, file_size, file_md5sum, file_sha1sum, file_sha256sum, dsc_location_id)
- projectB.query("UPDATE dsc_files SET file = %s WHERE source = %s AND file = %s" % (new_files_id, source_id, orig_tar_id))
+ projectB.query("UPDATE dsc_files SET file = %s WHERE source = %s AND file = %s" % (new_files_id, database.get_source_id(changes["source"], changes["version"]), orig_tar_id))
# Install the files into the pool
for file in files.keys():
utils.copy(pkg.changes_file, Cnf["Dir::Root"] + dest)
for dest in copy_dot_dak.keys():
utils.copy(Upload.pkg.changes_file[:-8]+".dak", dest)
-
projectB.query("COMMIT WORK")
# Move the .changes into the 'done' directory