From: Michael Casadevall Date: Sat, 3 Jan 2009 02:46:15 +0000 (-0500) Subject: Merge branch 'master' into content_generation X-Git-Url: https://git.decadent.org.uk/gitweb/?a=commitdiff_plain;h=2b4d5bf75e28a6b5fa5afbd58defdf661c893033;hp=-c;p=dak.git Merge branch 'master' into content_generation --- 2b4d5bf75e28a6b5fa5afbd58defdf661c893033 diff --combined dak/dak.py index 77999abe,e8a7df03..92753ecc --- a/dak/dak.py +++ b/dak/dak.py @@@ -105,8 -105,6 +105,8 @@@ def init() "Generate lists of packages per suite for apt-ftparchive"), ("generate-releases", "Generate Release files"), + ("generate-contents", + "Generate contest files"), ("generate-index-diffs", "Generate .diff/Index files"), ("clean-suites", @@@ -146,6 -144,8 +146,8 @@@ "Sync PostgreSQL users with passwd file"), ("init-db", "Update the database to match the conf file"), + ("update-db", + "Updates databae schema to latest revision"), ("init-dirs", "Initial setup of the archive"), ("make-maintainers", diff --combined dak/process_accepted.py index 4dd5b69d,ea238ef7..fa66a0c4 --- a/dak/process_accepted.py +++ b/dak/process_accepted.py @@@ -30,7 -30,7 +30,7 @@@ ############################################################################### import errno, fcntl, os, sys, time, re -import apt_pkg +import apt_pkg, tarfile, commands from daklib import database from daklib import logging from daklib import queue @@@ -96,43 -96,6 +96,43 @@@ class Urgency_Log else: os.unlink(self.log_filename) + +############################################################################### + +def generate_contents_information(filename): + # Generate all the contents for the database + cmd = "ar t %s" % (filename) + (result, output) = commands.getstatusoutput(cmd) + if result != 0: + reject("%s: 'ar t' invocation failed." % (filename)) + reject(utils.prefix_multi_line_string(output, " [ar output:] "), "") + + # Ugh ... this is ugly ... Code ripped from process_unchecked.py + chunks = output.split('\n') + cmd = "ar x %s %s" % (filename, chunks[2]) + (result, output) = commands.getstatusoutput(cmd) + if result != 0: + reject("%s: 'ar t' invocation failed." % (filename)) + reject(utils.prefix_multi_line_string(output, " [ar output:] "), "") + + # Got deb tarballs, now lets go through and determine what bits + # and pieces the deb had ... + if chunks[2] == "data.tar.gz": + data = tarfile.open("data.tar.gz", "r:gz") + elif data_tar == "data.tar.bz2": + data = tarfile.open("data.tar.bz2", "r:bz2") + else: + os.remove(chunks[2]) + reject("couldn't find data.tar.*") + + contents = [] + for tarinfo in data: + if not tarinfo.isdir(): + contents.append(tarinfo.name[2:]) + + os.remove(chunks[2]) + return contents + ############################################################################### def reject (str, prefix="Rejected: "): @@@ -335,10 -298,14 +335,14 @@@ def install () filename = files[file]["pool name"] + file dsc_component = files[file]["component"] dsc_location_id = files[file]["location id"] + if dsc.has_key("dm-upload-allowed") and dsc["dm-upload-allowed"] == "yes": + dm_upload_allowed = "true" + else: + dm_upload_allowed = "false" if not files[file].has_key("files id") or not files[file]["files id"]: files[file]["files id"] = database.set_files_id (filename, files[file]["size"], files[file]["md5sum"], files[file]["sha1sum"], files[file]["sha256sum"], dsc_location_id) - projectB.query("INSERT INTO source (source, version, maintainer, changedby, file, install_date, sig_fpr) VALUES ('%s', '%s', %d, %d, %d, '%s', %s)" - % (package, version, maintainer_id, changedby_id, files[file]["files id"], install_date, fingerprint_id)) + projectB.query("INSERT INTO source (source, version, maintainer, changedby, file, install_date, sig_fpr, dm_upload_allowed) VALUES ('%s', '%s', %d, %d, %d, '%s', %s, %s)" + % (package, version, maintainer_id, changedby_id, files[file]["files id"], install_date, fingerprint_id, dm_upload_allowed)) for suite in changes["distribution"].keys(): suite_id = database.get_suite_id(suite) @@@ -359,21 -326,20 +363,20 @@@ projectB.query("INSERT INTO dsc_files (source, file) VALUES (currval('source_id_seq'), %d)" % (files_id)) # Add the src_uploaders to the DB - if dsc.get("dm-upload-allowed", "no") == "yes": - uploader_ids = [maintainer_id] - if dsc.has_key("uploaders"): - for u in dsc["uploaders"].split(","): - u = u.replace("'", "\\'") - u = u.strip() - uploader_ids.append( - database.get_or_set_maintainer_id(u)) - added_ids = {} - for u in uploader_ids: - if added_ids.has_key(u): - utils.warn("Already saw uploader %s for source %s" % (u, package)) - continue - added_ids[u]=1 - projectB.query("INSERT INTO src_uploaders (source, maintainer) VALUES (currval('source_id_seq'), %d)" % (u)) + uploader_ids = [maintainer_id] + if dsc.has_key("uploaders"): + for u in dsc["uploaders"].split(","): + u = u.replace("'", "\\'") + u = u.strip() + uploader_ids.append( + database.get_or_set_maintainer_id(u)) + added_ids = {} + for u in uploader_ids: + if added_ids.has_key(u): + utils.warn("Already saw uploader %s for source %s" % (u, package)) + continue + added_ids[u]=1 + projectB.query("INSERT INTO src_uploaders (source, maintainer) VALUES (currval('source_id_seq'), %d)" % (u)) # Add the .deb files to the DB @@@ -391,7 -357,6 +394,7 @@@ source = files[file]["source package"] source_version = files[file]["source version"] filename = files[file]["pool name"] + file + contents = generate_contents_information(file) if not files[file].has_key("location id") or not files[file]["location id"]: files[file]["location id"] = database.get_location_id(Cnf["Dir::Pool"],files[file]["component"],utils.where_am_i()) if not files[file].has_key("files id") or not files[file]["files id"]: @@@ -406,12 -371,6 +409,12 @@@ suite_id = database.get_suite_id(suite) projectB.query("INSERT INTO bin_associations (suite, bin) VALUES (%d, currval('binaries_id_seq'))" % (suite_id)) + # insert contents into the database + q = projectB.query("SELECT currval('binaries_id_seq')") + bin_id = int(q.getresult()[0][0]) + for file in contents: + database.insert_content_path(bin_id, file) + # If the .orig.tar.gz is in a legacy directory we need to poolify # it, so that apt-get source (and anything else that goes by the # "Directory:" field in the Sources.gz file) works. @@@ -474,6 -433,7 +477,6 @@@ utils.copy(pkg.changes_file, Cnf["Dir::Root"] + dest) for dest in copy_dot_dak.keys(): utils.copy(Upload.pkg.changes_file[:-8]+".dak", dest) - projectB.query("COMMIT WORK") # Move the .changes into the 'done' directory