]> git.decadent.org.uk Git - dak.git/commitdiff
Merge branch 'master' into content_generation
authorMichael Casadevall <sonicmctails@gmail.com>
Sat, 3 Jan 2009 02:46:15 +0000 (21:46 -0500)
committerMichael Casadevall <sonicmctails@gmail.com>
Sat, 3 Jan 2009 02:46:15 +0000 (21:46 -0500)
1  2 
dak/dak.py
dak/process_accepted.py

diff --combined dak/dak.py
index 77999abea3beb614faf5d8d2c8d506298a63f029,e8a7df03a3c52d2998d24dd2e73b7110230e1385..92753ecc81d94004ee2b88db7822b68915a365a3
@@@ -105,8 -105,6 +105,8 @@@ def init()
           "Generate lists of packages per suite for apt-ftparchive"),
          ("generate-releases",
           "Generate Release files"),
 +        ("generate-contents",
 +         "Generate contest files"),
          ("generate-index-diffs",
           "Generate .diff/Index files"),
          ("clean-suites",
           "Sync PostgreSQL users with passwd file"),
          ("init-db",
           "Update the database to match the conf file"),
+         ("update-db",
+          "Updates databae schema to latest revision"),
          ("init-dirs",
           "Initial setup of the archive"),
          ("make-maintainers",
diff --combined dak/process_accepted.py
index 4dd5b69d20f2bde74937027f9d8c5a72c5db4f12,ea238ef783d1eed36ab0861a27633e820a3cb2d4..fa66a0c42975c44df0f8ed777efb8f8b72804ba1
@@@ -30,7 -30,7 +30,7 @@@
  ###############################################################################
  
  import errno, fcntl, os, sys, time, re
 -import apt_pkg
 +import apt_pkg, tarfile, commands
  from daklib import database
  from daklib import logging
  from daklib import queue
@@@ -96,43 -96,6 +96,43 @@@ class Urgency_Log
          else:
              os.unlink(self.log_filename)
  
 +
 +###############################################################################
 +
 +def generate_contents_information(filename):
 +    # Generate all the contents for the database
 +    cmd = "ar t %s" % (filename)
 +    (result, output) = commands.getstatusoutput(cmd)
 +    if result != 0:
 +        reject("%s: 'ar t' invocation failed." % (filename))
 +        reject(utils.prefix_multi_line_string(output, " [ar output:] "), "")
 +
 +    # Ugh ... this is ugly ... Code ripped from process_unchecked.py
 +    chunks = output.split('\n')
 +    cmd = "ar x %s %s" % (filename, chunks[2])
 +    (result, output) = commands.getstatusoutput(cmd)
 +    if result != 0:
 +        reject("%s: 'ar t' invocation failed." % (filename))
 +        reject(utils.prefix_multi_line_string(output, " [ar output:] "), "")
 +
 +    # Got deb tarballs, now lets go through and determine what bits
 +    # and pieces the deb had ...
 +    if chunks[2] == "data.tar.gz":
 +        data = tarfile.open("data.tar.gz", "r:gz")
 +    elif data_tar == "data.tar.bz2":
 +        data = tarfile.open("data.tar.bz2", "r:bz2")
 +    else:
 +        os.remove(chunks[2])
 +        reject("couldn't find data.tar.*")
 +
 +    contents = []
 +    for tarinfo in data:
 +        if not tarinfo.isdir():
 +            contents.append(tarinfo.name[2:])
 +
 +    os.remove(chunks[2])
 +    return contents
 +
  ###############################################################################
  
  def reject (str, prefix="Rejected: "):
@@@ -335,10 -298,14 +335,14 @@@ def install ()
              filename = files[file]["pool name"] + file
              dsc_component = files[file]["component"]
              dsc_location_id = files[file]["location id"]
+             if dsc.has_key("dm-upload-allowed") and  dsc["dm-upload-allowed"] == "yes":
+                 dm_upload_allowed = "true"
+             else:
+                 dm_upload_allowed = "false"
              if not files[file].has_key("files id") or not files[file]["files id"]:
                  files[file]["files id"] = database.set_files_id (filename, files[file]["size"], files[file]["md5sum"], files[file]["sha1sum"], files[file]["sha256sum"], dsc_location_id)
-             projectB.query("INSERT INTO source (source, version, maintainer, changedby, file, install_date, sig_fpr) VALUES ('%s', '%s', %d, %d, %d, '%s', %s)"
-                            % (package, version, maintainer_id, changedby_id, files[file]["files id"], install_date, fingerprint_id))
+             projectB.query("INSERT INTO source (source, version, maintainer, changedby, file, install_date, sig_fpr, dm_upload_allowed) VALUES ('%s', '%s', %d, %d, %d, '%s', %s, %s)"
+                            % (package, version, maintainer_id, changedby_id, files[file]["files id"], install_date, fingerprint_id, dm_upload_allowed))
  
              for suite in changes["distribution"].keys():
                  suite_id = database.get_suite_id(suite)
                  projectB.query("INSERT INTO dsc_files (source, file) VALUES (currval('source_id_seq'), %d)" % (files_id))
  
              # Add the src_uploaders to the DB
-             if dsc.get("dm-upload-allowed", "no") == "yes":
-                 uploader_ids = [maintainer_id]
-                 if dsc.has_key("uploaders"):
-                     for u in dsc["uploaders"].split(","):
-                         u = u.replace("'", "\\'")
-                         u = u.strip()
-                         uploader_ids.append(
-                             database.get_or_set_maintainer_id(u))
-                 added_ids = {}
-                 for u in uploader_ids:
-                     if added_ids.has_key(u):
-                         utils.warn("Already saw uploader %s for source %s" % (u, package))
-                         continue
-                     added_ids[u]=1
-                     projectB.query("INSERT INTO src_uploaders (source, maintainer) VALUES (currval('source_id_seq'), %d)" % (u))
+             uploader_ids = [maintainer_id]
+             if dsc.has_key("uploaders"):
+                 for u in dsc["uploaders"].split(","):
+                     u = u.replace("'", "\\'")
+                     u = u.strip()
+                     uploader_ids.append(
+                         database.get_or_set_maintainer_id(u))
+             added_ids = {}
+             for u in uploader_ids:
+                 if added_ids.has_key(u):
+                     utils.warn("Already saw uploader %s for source %s" % (u, package))
+                     continue
+                 added_ids[u]=1
+                 projectB.query("INSERT INTO src_uploaders (source, maintainer) VALUES (currval('source_id_seq'), %d)" % (u))
  
  
      # Add the .deb files to the DB
              source = files[file]["source package"]
              source_version = files[file]["source version"]
              filename = files[file]["pool name"] + file
 +            contents = generate_contents_information(file)
              if not files[file].has_key("location id") or not files[file]["location id"]:
                  files[file]["location id"] = database.get_location_id(Cnf["Dir::Pool"],files[file]["component"],utils.where_am_i())
              if not files[file].has_key("files id") or not files[file]["files id"]:
                  suite_id = database.get_suite_id(suite)
                  projectB.query("INSERT INTO bin_associations (suite, bin) VALUES (%d, currval('binaries_id_seq'))" % (suite_id))
  
 +            # insert contents into the database
 +            q = projectB.query("SELECT currval('binaries_id_seq')")
 +            bin_id = int(q.getresult()[0][0])
 +            for file in contents:
 +                database.insert_content_path(bin_id, file)
 +
      # If the .orig.tar.gz is in a legacy directory we need to poolify
      # it, so that apt-get source (and anything else that goes by the
      # "Directory:" field in the Sources.gz file) works.
          utils.copy(pkg.changes_file, Cnf["Dir::Root"] + dest)
      for dest in copy_dot_dak.keys():
          utils.copy(Upload.pkg.changes_file[:-8]+".dak", dest)
 -
      projectB.query("COMMIT WORK")
  
      # Move the .changes into the 'done' directory