import stat
import sys
import time
-import tempfile
import traceback
import tarfile
-import apt_inst, apt_pkg
+import apt_inst
+import apt_pkg
from debian_bundle import deb822
from daklib.dbconn import DBConn
from daklib.binary import Binary
('h',"help","Dinstall::Options::Help"),
('n',"no-action","Dinstall::Options::No-Action"),
('p',"no-lock", "Dinstall::Options::No-Lock"),
- ('s',"no-mail", "Dinstall::Options::No-Mail")]
+ ('s',"no-mail", "Dinstall::Options::No-Mail"),
+ ('d',"directory", "Dinstall::Options::Directory", "HasArg")]
for i in ["automatic", "help", "no-action", "no-lock", "no-mail",
- "override-distribution", "version"]:
+ "override-distribution", "version", "directory"]:
Cnf["Dinstall::Options::%s" % (i)] = ""
changes_files = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
if Options["Help"]:
usage()
+ # If we have a directory flag, use it to find our files
+ if Cnf["Dinstall::Options::Directory"] != "":
+ # Note that we clobber the list of files we were given in this case
+ # so warn if the user has done both
+ if len(changes_files) > 0:
+ utils.warn("Directory provided so ignoring files given on command line")
+
+ changes_files = utils.get_changes_files(Cnf["Dinstall::Options::Directory"])
+
Upload = queue.Upload(Cnf)
changes = Upload.pkg.changes
################################################################################
-def create_tmpdir():
- """
- Create a temporary directory that can be used for unpacking files into for
- checking
- """
- tmpdir = tempfile.mkdtemp()
- return tmpdir
-
-################################################################################
-
def copy_to_holding(filename):
global in_holding
except ParseChangesError, line:
reject("%s: parse error, can't grok: %s." % (filename, line))
return 0
+ except ChangesUnicodeError:
+ reject("%s: changes file not proper utf-8" % (filename))
+ return 0
# Parse the Files field from the .changes into another dictionary
try:
(source, dest) = args[1:3]
if changes["distribution"].has_key(source):
for arch in changes["architecture"].keys():
- if arch not in database.get_suite_architectures(source):
+ if arch not in DBConn().get_suite_architectures(source):
reject("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch),"")
del changes["distribution"][source]
changes["distribution"][dest] = 1
cursor = DBConn().cursor()
# Check for packages that have moved from one component to another
# STU: this should probably be changed to not join on architecture, suite tables but instead to used their cached name->id mappings from DBConn
- cursor.execute("""PREPARE moved_pkg_q AS
+ DBConn().prepare("moved_pkg_q", """
+ PREPARE moved_pkg_q(text,text,text) AS
SELECT c.name FROM binaries b, bin_associations ba, suite s, location l,
component c, architecture a, files f
WHERE b.package = $1 AND s.suite_name = $2
default_suite = Cnf.get("Dinstall::DefaultSuite", "Unstable")
architecture = control.Find("Architecture")
upload_suite = changes["distribution"].keys()[0]
- if architecture not in database.get_suite_architectures(default_suite) and architecture not in database.get_suite_architectures(upload_suite):
+ if architecture not in DBConn().get_suite_architectures(default_suite) and architecture not in DBConn().get_suite_architectures(upload_suite):
reject("Unknown architecture '%s'." % (architecture))
# Ensure the architecture of the .deb is one of the ones
# Check the version and for file overwrites
reject(Upload.check_binary_against_db(f),"")
- Binary(f).scan_package()
+ Binary(f, reject).scan_package()
# Checks for a source package...
else:
reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
except InvalidDscError, line:
reject("%s: syntax error on line %s." % (dsc_filename, line))
+ except ChangesUnicodeError:
+ reject("%s: dsc file not proper utf-8." % (dsc_filename))
+
# Build up the file list of files mentioned by the .dsc
try:
dsc_files.update(utils.build_file_list(dsc, is_a_dsc=1))
or pkg.orig_tar_gz == -1:
return
- tmpdir = create_tmpdir()
+ tmpdir = utils.temp_dirname()
# Move into the temporary directory
cwd = os.getcwd()
"""
Return the uid,name,isdm for a given gpg fingerprint
- @ptype fpr: string
+ @type fpr: string
@param fpr: a 40 byte GPG fingerprint
- @return (uid, name, isdm)
+ @return: (uid, name, isdm)
"""
cursor = DBConn().cursor()
cursor.execute( "SELECT u.uid, u.name, k.debian_maintainer FROM fingerprint f JOIN keyrings k ON (f.keyring=k.id), uid u WHERE f.uid = u.id AND f.fingerprint = '%s'" % (fpr))
if qs:
return qs
else:
- return (None, None, None)
+ return (None, None, False)
def check_signed_by_key():
"""Ensure the .changes is signed by an authorized uploader."""
uid_name = ""
# match claimed name with actual name:
- if uid == None:
+ if uid is None:
+ # This is fundamentally broken but need us to refactor how we get
+ # the UIDs/Fingerprints in order for us to fix it properly
uid, uid_email = changes["fingerprint"], uid
may_nmu, may_sponsor = 1, 1
# XXX by default new dds don't have a fingerprint/uid in the db atm,
# and can't get one in there if we don't allow nmu/sponsorship
- elif is_dm is "t":
- uid_email = uid
- may_nmu, may_sponsor = 0, 0
- else:
+ elif is_dm is False:
+ # If is_dm is False, we allow full upload rights
uid_email = "%s@debian.org" % (uid)
may_nmu, may_sponsor = 1, 1
+ else:
+ # Assume limited upload rights unless we've discovered otherwise
+ uid_email = uid
+ may_nmu, may_sponsor = 0, 0
+
if uid_email in [changes["maintaineremail"], changes["changedbyemail"]]:
sponsored = 0
if sponsored and not may_sponsor:
reject("%s is not authorised to sponsor uploads" % (uid))
+ cursor = DBConn().cursor()
if not sponsored and not may_nmu:
source_ids = []
cursor.execute( "SELECT s.id, s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = %(source)s AND s.dm_upload_allowed = 'yes'", changes )
cursor.execute( """SELECT 1 FROM source s
JOIN src_associations sa ON (s.id = sa.source)
WHERE s.source = %(source)s
- AND s.version = '%(version)s'
- AND sa.suite = %(suite)d""",
+ AND s.version = %(version)s
+ AND sa.suite = %(suite)s""",
{'source' : changes['source'],
'version' : changes['version'],
- 'suite' : pasuite})
+ 'suite' : pusuite})
if cursor.fetchone():
# source is already in proposed-updates so no need to hold
if not changes["architecture"].has_key("source"):
pusuite = DBConn().get_suite_id("oldstable-proposed-updates")
cursor = DBConn().cursor()
- cursor.execute( """"SELECT 1 FROM source s
- JOIN src_associations sa ON (s.id = sa.source)
- WHERE s.source = %(source)s
- AND s.version = %(version)s
- AND sa.suite = %d""",
- {'source' : changes['source'],
+ cursor.execute( """SELECT 1 FROM source s
+ JOIN src_associations sa ON (s.id = sa.source)
+ WHERE s.source = %(source)s
+ AND s.version = %(version)s
+ AND sa.suite = %(suite)s""",
+ {'source' : changes['source'],
'version' : changes['version'],
- 'suite' : pasuite})
+ 'suite' : pusuite})
if cursor.fetchone():
return 0
Logger.log(["Moving to new", pkg.changes_file])
Upload.dump_vars(Cnf["Dir::Queue::New"])
- move_to_dir(Cnf["Dir::Queue::New"])
+ move_to_dir(Cnf["Dir::Queue::New"], perms=0640, changesperms=0644)
if not Options["No-Mail"]:
print "Sending new ack."