###############################################################################
-import errno, fcntl, os, sys, time, re
-import apt_pkg
+import errno
+import fcntl
+import os
+import sys
+import time
+import re
+import apt_pkg, commands
from daklib import database
from daklib import logging
from daklib import queue
else:
os.unlink(self.log_filename)
+
###############################################################################
+
def reject (str, prefix="Rejected: "):
global reject_message
if str:
('h',"help","Dinstall::Options::Help"),
('n',"no-action","Dinstall::Options::No-Action"),
('p',"no-lock", "Dinstall::Options::No-Lock"),
- ('s',"no-mail", "Dinstall::Options::No-Mail")]
+ ('s',"no-mail", "Dinstall::Options::No-Mail"),
+ ('d',"directory", "Dinstall::Options::Directory", "HasArg")]
- for i in ["automatic", "help", "no-action", "no-lock", "no-mail", "version"]:
+ for i in ["automatic", "help", "no-action", "no-lock", "no-mail",
+ "version", "directory"]:
if not Cnf.has_key("Dinstall::Options::%s" % (i)):
Cnf["Dinstall::Options::%s" % (i)] = ""
if Options["Help"]:
usage()
+ # If we have a directory flag, use it to find our files
+ if Cnf["Dinstall::Options::Directory"] != "":
+ # Note that we clobber the list of files we were given in this case
+ # so warn if the user has done both
+ if len(changes_files) > 0:
+ utils.warn("Directory provided so ignoring files given on command line")
+
+ changes_files = utils.get_changes_files(Cnf["Dinstall::Options::Directory"])
+
Upload = queue.Upload(Cnf)
projectB = Upload.projectB
suite_id = database.get_suite_id(suite)
projectB.query("INSERT INTO bin_associations (suite, bin) VALUES (%d, currval('binaries_id_seq'))" % (suite_id))
+ if not database.copy_temporary_contents(package, version, files[newfile]):
+ reject("Missing contents for package")
+
+ orig_tar_id = Upload.pkg.orig_tar_id
+ orig_tar_location = Upload.pkg.orig_tar_location
+
# If this is a sourceful diff only upload that is moving
# cross-component we need to copy the .orig.tar.gz into the new
# component too for the same reasons as above.
utils.copy(pkg.changes_file, Cnf["Dir::Root"] + dest)
for dest in copy_dot_dak.keys():
utils.copy(Upload.pkg.changes_file[:-8]+".dak", dest)
-
projectB.query("COMMIT WORK")
# Move the .changes into the 'done' directory
################################################################################
-import commands, errno, fcntl, os, re, shutil, stat, sys, time, tempfile, traceback
+import commands
+import errno
+import fcntl
+import os
+import re
+import shutil
+import stat
+import sys
+import time
+import tempfile
+import traceback
+import tarfile
import apt_inst, apt_pkg
-from daklib import database
+from debian_bundle import deb822
+from daklib.dbconn import DBConn
+from daklib.binary import Binary
from daklib import logging
from daklib import queue
from daklib import utils
('h',"help","Dinstall::Options::Help"),
('n',"no-action","Dinstall::Options::No-Action"),
('p',"no-lock", "Dinstall::Options::No-Lock"),
- ('s',"no-mail", "Dinstall::Options::No-Mail")]
+ ('s',"no-mail", "Dinstall::Options::No-Mail"),
+ ('d',"directory", "Dinstall::Options::Directory", "HasArg")]
for i in ["automatic", "help", "no-action", "no-lock", "no-mail",
- "override-distribution", "version"]:
+ "override-distribution", "version", "directory"]:
Cnf["Dinstall::Options::%s" % (i)] = ""
changes_files = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
if Options["Help"]:
usage()
+ # If we have a directory flag, use it to find our files
+ if Cnf["Dinstall::Options::Directory"] != "":
+ # Note that we clobber the list of files we were given in this case
+ # so warn if the user has done both
+ if len(changes_files) > 0:
+ utils.warn("Directory provided so ignoring files given on command line")
+
+ changes_files = utils.get_changes_files(Cnf["Dinstall::Options::Directory"])
+
Upload = queue.Upload(Cnf)
changes = Upload.pkg.changes
################################################################################
+def create_tmpdir():
+ """
+ Create a temporary directory that can be used for unpacking files into for
+ checking
+ """
+ tmpdir = tempfile.mkdtemp()
+ return tmpdir
+
+################################################################################
+
def copy_to_holding(filename):
global in_holding
except ParseChangesError, line:
reject("%s: parse error, can't grok: %s." % (filename, line))
return 0
+ except ChangesUnicodeError:
+ reject("%s: changes file not proper utf-8" % (filename))
+ return 0
# Parse the Files field from the .changes into another dictionary
try:
################################################################################
-def check_deb_ar(filename):
- """
- Sanity check the ar of a .deb, i.e. that there is:
-
- 1. debian-binary
- 2. control.tar.gz
- 3. data.tar.gz or data.tar.bz2
-
- in that order, and nothing else.
- """
- cmd = "ar t %s" % (filename)
- (result, output) = commands.getstatusoutput(cmd)
- if result != 0:
- reject("%s: 'ar t' invocation failed." % (filename))
- reject(utils.prefix_multi_line_string(output, " [ar output:] "), "")
- chunks = output.split('\n')
- if len(chunks) != 3:
- reject("%s: found %d chunks, expected 3." % (filename, len(chunks)))
- if chunks[0] != "debian-binary":
- reject("%s: first chunk is '%s', expected 'debian-binary'." % (filename, chunks[0]))
- if chunks[1] != "control.tar.gz":
- reject("%s: second chunk is '%s', expected 'control.tar.gz'." % (filename, chunks[1]))
- if chunks[2] not in [ "data.tar.bz2", "data.tar.gz" ]:
- reject("%s: third chunk is '%s', expected 'data.tar.gz' or 'data.tar.bz2'." % (filename, chunks[2]))
-
-################################################################################
-
def check_files():
global reprocess
has_binaries = 0
has_source = 0
+ cursor = DBConn().cursor()
+ # Check for packages that have moved from one component to another
+ # STU: this should probably be changed to not join on architecture, suite tables but instead to used their cached name->id mappings from DBConn
+ cursor.execute("""PREPARE moved_pkg_q AS
+ SELECT c.name FROM binaries b, bin_associations ba, suite s, location l,
+ component c, architecture a, files f
+ WHERE b.package = $1 AND s.suite_name = $2
+ AND (a.arch_string = $3 OR a.arch_string = 'all')
+ AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
+ AND f.location = l.id
+ AND l.component = c.id
+ AND b.file = f.id""")
+
for f in file_keys:
# Ensure the file does not already exist in one of the accepted directories
for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
# Check the version and for file overwrites
reject(Upload.check_binary_against_db(f),"")
- check_deb_ar(f)
+ Binary(f).scan_package()
# Checks for a source package...
else:
# Validate the component
component = files[f]["component"]
- component_id = database.get_component_id(component)
+ component_id = DBConn().get_component_id(component)
if component_id == -1:
reject("file '%s' has unknown component '%s'." % (f, component))
continue
# Determine the location
location = Cnf["Dir::Pool"]
- location_id = database.get_location_id (location, component, archive)
+ location_id = DBConn().get_location_id(location, component, archive)
if location_id == -1:
reject("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
files[f]["location id"] = location_id
# Check the md5sum & size against existing files (if any)
files[f]["pool name"] = utils.poolify (changes["source"], files[f]["component"])
- files_id = database.get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["location id"])
+ files_id = DBConn().get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["location id"])
if files_id == -1:
reject("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
elif files_id == -2:
files[f]["files id"] = files_id
# Check for packages that have moved from one component to another
- q = Upload.projectB.query("""
-SELECT c.name FROM binaries b, bin_associations ba, suite s, location l,
- component c, architecture a, files f
- WHERE b.package = '%s' AND s.suite_name = '%s'
- AND (a.arch_string = '%s' OR a.arch_string = 'all')
- AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
- AND f.location = l.id AND l.component = c.id AND b.file = f.id"""
- % (files[f]["package"], suite,
- files[f]["architecture"]))
- ql = q.getresult()
+ files[f]['suite'] = suite
+ cursor.execute("""EXECUTE moved_pkg_q( %(package)s, %(suite)s, %(architecture)s )""", ( files[f] ) )
+ ql = cursor.fetchone()
if ql:
files[f]["othercomponents"] = ql[0][0]
reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
except InvalidDscError, line:
reject("%s: syntax error on line %s." % (dsc_filename, line))
+ except ChangesUnicodeError:
+ reject("%s: dsc file not proper utf-8." % (dsc_filename))
+
# Build up the file list of files mentioned by the .dsc
try:
dsc_files.update(utils.build_file_list(dsc, is_a_dsc=1))
or pkg.orig_tar_gz == -1:
return
- # Create a temporary directory to extract the source into
- if Options["No-Action"]:
- tmpdir = tempfile.mkdtemp()
- else:
- # We're in queue/holding and can create a random directory.
- tmpdir = "%s" % (os.getpid())
- os.mkdir(tmpdir)
+ tmpdir = create_tmpdir()
# Move into the temporary directory
cwd = os.getcwd()
################################################################################
def lookup_uid_from_fingerprint(fpr):
- q = Upload.projectB.query("SELECT u.uid, u.name, k.debian_maintainer FROM fingerprint f JOIN keyrings k ON (f.keyring=k.id), uid u WHERE f.uid = u.id AND f.fingerprint = '%s'" % (fpr))
- qs = q.getresult()
- if len(qs) == 0:
- return (None, None, None)
+ """
+ Return the uid,name,isdm for a given gpg fingerprint
+
+ @ptype fpr: string
+ @param fpr: a 40 byte GPG fingerprint
+
+ @return (uid, name, isdm)
+ """
+ cursor = DBConn().cursor()
+ cursor.execute( "SELECT u.uid, u.name, k.debian_maintainer FROM fingerprint f JOIN keyrings k ON (f.keyring=k.id), uid u WHERE f.uid = u.id AND f.fingerprint = '%s'" % (fpr))
+ qs = cursor.fetchone()
+ if qs:
+ return qs
else:
- return qs[0]
+ return (None, None, None)
def check_signed_by_key():
"""Ensure the .changes is signed by an authorized uploader."""
if not sponsored and not may_nmu:
source_ids = []
- q = Upload.projectB.query("SELECT s.id, s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = '%s' AND s.dm_upload_allowed = 'yes'" % (changes["source"]))
+ cursor.execute( "SELECT s.id, s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = %(source)s AND s.dm_upload_allowed = 'yes'", changes )
highest_sid, highest_version = None, None
should_reject = True
- for si in q.getresult():
+ while True:
+ si = cursor.fetchone()
+ if not si:
+ break
+
if highest_version == None or apt_pkg.VersionCompare(si[1], highest_version) == 1:
highest_sid = si[0]
highest_version = si[1]
if highest_sid == None:
reject("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % changes["source"])
else:
- q = Upload.projectB.query("SELECT m.name FROM maintainer m WHERE m.id IN (SELECT su.maintainer FROM src_uploaders su JOIN source s ON (s.id = su.source) WHERE su.source = %s)" % (highest_sid))
- for m in q.getresult():
+
+ cursor.execute("SELECT m.name FROM maintainer m WHERE m.id IN (SELECT su.maintainer FROM src_uploaders su JOIN source s ON (s.id = su.source) WHERE su.source = %s)" % (highest_sid))
+
+ while True:
+ m = cursor.fetchone()
+ if not m:
+ break
+
(rfc822, rfc2047, name, email) = utils.fix_maintainer(m[0])
if email == uid_email or name == uid_name:
should_reject=False
for b in changes["binary"].keys():
for suite in changes["distribution"].keys():
- suite_id = database.get_suite_id(suite)
- q = Upload.projectB.query("SELECT DISTINCT s.source FROM source s JOIN binaries b ON (s.id = b.source) JOIN bin_associations ba On (b.id = ba.bin) WHERE b.package = '%s' AND ba.suite = %s" % (b, suite_id))
- for s in q.getresult():
+ suite_id = DBConn().get_suite_id(suite)
+
+ cursor.execute("SELECT DISTINCT s.source FROM source s JOIN binaries b ON (s.id = b.source) JOIN bin_associations ba On (b.id = ba.bin) WHERE b.package = %(package)s AND ba.suite = %(suite)s" , {'package':b, 'suite':suite_id} )
+ while True:
+ s = cursor.fetchone()
+ if not s:
+ break
+
if s[0] != changes["source"]:
reject("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
################################################################################
def is_unembargo ():
- q = Upload.projectB.query(
- "SELECT package FROM disembargo WHERE package = '%s' AND version = '%s'" %
- (changes["source"], changes["version"]))
- ql = q.getresult()
- if ql:
+ cursor = DBConn().cursor()
+ cursor.execute( "SELECT package FROM disembargo WHERE package = %(source)s AND version = %(version)s", changes )
+ if cursor.fetchone():
return 1
oldcwd = os.getcwd()
if changes["architecture"].has_key("source"):
if Options["No-Action"]: return 1
- Upload.projectB.query(
- "INSERT INTO disembargo (package, version) VALUES ('%s', '%s')" %
- (changes["source"], changes["version"]))
+ cursor.execute( "INSERT INTO disembargo (package, version) VALUES ('%(package)s', '%(version)s')",
+ changes )
+ cursor.execute( "COMMIT" )
return 1
return 0
return 0
if not changes["architecture"].has_key("source"):
- pusuite = database.get_suite_id("proposed-updates")
- q = Upload.projectB.query(
- "SELECT S.source FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = '%s' AND s.version = '%s' AND sa.suite = %d" %
- (changes["source"], changes["version"], pusuite))
- ql = q.getresult()
- if ql:
+ pusuite = DBConn().get_suite_id("proposed-updates")
+ cursor = DBConn().cursor()
+ cursor.execute( """SELECT 1 FROM source s
+ JOIN src_associations sa ON (s.id = sa.source)
+ WHERE s.source = %(source)s
+ AND s.version = '%(version)s'
+ AND sa.suite = %(suite)d""",
+ {'source' : changes['source'],
+ 'version' : changes['version'],
+ 'suite' : pasuite})
+
+ if cursor.fetchone():
# source is already in proposed-updates so no need to hold
return 0
return 0
if not changes["architecture"].has_key("source"):
- pusuite = database.get_suite_id("oldstable-proposed-updates")
- q = Upload.projectB.query(
- "SELECT S.source FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = '%s' AND s.version = '%s' AND sa.suite = %d" %
- (changes["source"], changes["version"], pusuite))
- ql = q.getresult()
- if ql:
- # source is already in oldstable-proposed-updates so no need to hold
+ pusuite = DBConn().get_suite_id("oldstable-proposed-updates")
+ cursor = DBConn().cursor()
+ cursor.execute( """"SELECT 1 FROM source s
+ JOIN src_associations sa ON (s.id = sa.source)
+ WHERE s.source = %(source)s
+ AND s.version = %(version)s
+ AND sa.suite = %d""",
+ {'source' : changes['source'],
+ 'version' : changes['version'],
+ 'suite' : pasuite})
+ if cursor.fetchone():
return 0
return 1
import apt_pkg
import database
import time
+import tarfile
import re
import string
import email as modemail
changes_in = open_file(filename)
content = changes_in.read()
changes_in.close()
+ try:
+ unicode(content, 'utf-8')
+ except UnicodeError:
+ raise ChangesUnicodeError, "Changes file not proper utf-8"
return parse_deb822(content, signing_rules)
################################################################################
################################################################################
+ def get_changes_files(dir):
+ """
+ Takes a directory and lists all .changes files in it (as well as chdir'ing
+ to the directory; this is due to broken behaviour on the part of p-u/p-a
+ when you're not in the right place)
+
+ Returns a list of filenames
+ """
+ try:
+ # Much of the rest of p-u/p-a depends on being in the right place
+ os.chdir(dir)
+ changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
+ except OSError, e:
+ fubar("Failed to read list from directory %s (%s)" % (dir, e))
+
+ return changes_files
+
+ ################################################################################
+
apt_pkg.init()
Cnf = apt_pkg.newConfiguration()
apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
################################################################################
+
+def generate_contents_information(filename):
+ """
+ Generate a list of flies contained in a .deb
+
+ @type filename: string
+ @param filename: the path to a data.tar.gz or data.tar.bz2
+
+ @rtype: list
+ @return: a list of files in the data.tar.* portion of the .deb
+ """
+ cmd = "ar t %s" % (filename)
+ (result, output) = commands.getstatusoutput(cmd)
+ if result != 0:
+ reject("%s: 'ar t' invocation failed." % (filename))
+ reject(utils.prefix_multi_line_string(output, " [ar output:] "), "")
+
+ # Ugh ... this is ugly ... Code ripped from process_unchecked.py
+ chunks = output.split('\n')
+
+ contents = []
+ try:
+ cmd = "ar x %s %s" % (filename, chunks[2])
+ (result, output) = commands.getstatusoutput(cmd)
+ if result != 0:
+ reject("%s: '%s' invocation failed." % (filename, cmd))
+ reject(utils.prefix_multi_line_string(output, " [ar output:] "), "")
+
+ # Got deb tarballs, now lets go through and determine what bits
+ # and pieces the deb had ...
+ if chunks[2] == "data.tar.gz":
+ data = tarfile.open("data.tar.gz", "r:gz")
+ elif chunks[2] == "data.tar.bz2":
+ data = tarfile.open("data.tar.bz2", "r:bz2")
+ else:
+ os.remove(chunks[2])
+ reject("couldn't find data.tar.*")
+
+ for tarinfo in data:
+ if not tarinfo.isdir():
+ contents.append(tarinfo.name[2:])
+
+ finally:
+ if os.path.exists( chunks[2] ):
+ shutil.rmtree( chunks[2] )
+ os.remove( chunks[2] )
+
+ return contents
+
+###############################################################################