# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import commands
+import codecs
+import datetime
import email.Header
import os
import pwd
+import grp
import select
import socket
import shutil
import tempfile
import traceback
import stat
+import apt_inst
import apt_pkg
import time
import re
-import string
import email as modemail
import subprocess
-
-from dbconn import DBConn, get_architecture, get_component, get_suite
+import ldap
+import errno
+
+import daklib.config as config
+import daklib.daksubprocess
+from dbconn import DBConn, get_architecture, get_component, get_suite, \
+ get_override_type, Keyring, session_wrapper, \
+ get_active_keyring_paths, get_primary_keyring_path, \
+ get_suite_architectures, get_or_set_metadatakey, DBSource, \
+ Component, Override, OverrideType
+from sqlalchemy import desc
from dak_exceptions import *
+from gpg import SignedFile
from textutils import fix_maintainer
from regexes import re_html_escaping, html_escaping, re_single_line_field, \
re_multi_line_field, re_srchasver, re_taint_free, \
- re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
- re_is_orig_source
+ re_re_mark, re_whitespace_comment, re_issource, \
+ re_is_orig_source, re_build_dep_arch, re_parse_maintainer
from formats import parse_format, validate_changes_format
from srcformats import get_format_from_string
################################################################################
default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
-default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
alias_cache = None #: Cache for email alias checks
key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
-# Monkeypatch commands.getstatusoutput as it returns a "0" exit code in
-# all situations under lenny's Python.
-import commands
+# Monkeypatch commands.getstatusoutput as it may not return the correct exit
+# code in lenny's Python. This also affects commands.getoutput and
+# commands.getstatus.
def dak_getstatusoutput(cmd):
- pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
+ pipe = daklib.daksubprocess.Popen(cmd, shell=True, universal_newlines=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- output = "".join(pipe.stdout.readlines())
+ output = pipe.stdout.read()
+
+ pipe.wait()
+
+ if output[-1:] == '\n':
+ output = output[:-1]
ret = pipe.wait()
if ret is None:
try:
f = open(filename, mode)
except IOError:
- raise CantOpenError, filename
+ raise CantOpenError(filename)
return f
################################################################################
def our_raw_input(prompt=""):
if prompt:
- sys.stdout.write(prompt)
+ while 1:
+ try:
+ sys.stdout.write(prompt)
+ break
+ except IOError:
+ pass
sys.stdout.flush()
try:
ret = raw_input()
################################################################################
-def extract_component_from_section(section):
+def extract_component_from_section(section, session=None):
component = ""
if section.find('/') != -1:
# Expand default component
if component == "":
- if Cnf.has_key("Component::%s" % section):
- component = section
- else:
- component = "main"
+ component = "main"
return (section, component)
################################################################################
-def parse_deb822(contents, signing_rules=0):
+def parse_deb822(armored_contents, signing_rules=0, keyrings=None, session=None):
+ require_signature = True
+ if keyrings == None:
+ keyrings = []
+ require_signature = False
+
+ signed_file = SignedFile(armored_contents, keyrings=keyrings, require_signature=require_signature)
+ contents = signed_file.contents
+
error = ""
changes = {}
lines = contents.splitlines(True)
if len(lines) == 0:
- raise ParseChangesError, "[Empty changes file]"
+ raise ParseChangesError("[Empty changes file]")
# Reindex by line number so we can easily verify the format of
# .dsc files...
index += 1
indexed_lines[index] = line[:-1]
- inside_signature = 0
-
num_of_lines = len(indexed_lines.keys())
index = 0
first = -1
while index < num_of_lines:
index += 1
line = indexed_lines[index]
- if line == "":
- if signing_rules == 1:
- index += 1
- if index > num_of_lines:
- raise InvalidDscError, index
- line = indexed_lines[index]
- if not line.startswith("-----BEGIN PGP SIGNATURE"):
- raise InvalidDscError, index
- inside_signature = 0
- break
- else:
- continue
- if line.startswith("-----BEGIN PGP SIGNATURE"):
+ if line == "" and signing_rules == 1:
+ if index != num_of_lines:
+ raise InvalidDscError(index)
break
- if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
- inside_signature = 1
- if signing_rules == 1:
- while index < num_of_lines and line != "":
- index += 1
- line = indexed_lines[index]
- continue
- # If we're not inside the signed data, don't process anything
- if signing_rules >= 0 and not inside_signature:
- continue
slf = re_single_line_field.match(line)
if slf:
field = slf.groups()[0].lower()
mlf = re_multi_line_field.match(line)
if mlf:
if first == -1:
- raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
+ raise ParseChangesError("'%s'\n [Multi-line field continuing on from nothing?]" % (line))
if first == 1 and changes[field] != "":
changes[field] += '\n'
first = 0
continue
error += line
- if signing_rules == 1 and inside_signature:
- raise InvalidDscError, index
-
- changes["filecontents"] = "".join(lines)
+ changes["filecontents"] = armored_contents
if changes.has_key("source"):
# Strip the source version in brackets from the source field,
changes["source-version"] = srcver.group(2)
if error:
- raise ParseChangesError, error
+ raise ParseChangesError(error)
return changes
################################################################################
-def parse_changes(filename, signing_rules=0):
+def parse_changes(filename, signing_rules=0, dsc_file=0, keyrings=None):
"""
Parses a changes file and returns a dictionary where each field is a
key. The mandatory first argument is the filename of the .changes
"-----BEGIN PGP SIGNATURE-----".
"""
- changes_in = open_file(filename)
- content = changes_in.read()
- changes_in.close()
+ with open_file(filename) as changes_in:
+ content = changes_in.read()
try:
unicode(content, 'utf-8')
except UnicodeError:
- raise ChangesUnicodeError, "Changes file not proper utf-8"
- return parse_deb822(content, signing_rules)
+ raise ChangesUnicodeError("Changes file not proper utf-8")
+ changes = parse_deb822(content, signing_rules, keyrings=keyrings)
-################################################################################
-
-def hash_key(hashname):
- return '%ssum' % hashname
-
-################################################################################
-
-def create_hash(where, files, hashname, hashfunc):
- """
- create_hash extends the passed files dict with the given hash by
- iterating over all files on disk and passing them to the hashing
- function given.
- """
-
- rejmsg = []
- for f in files.keys():
- try:
- file_handle = open_file(f)
- except CantOpenError:
- rejmsg.append("Could not open file %s for checksumming" % (f))
- continue
- files[f][hash_key(hashname)] = hashfunc(file_handle)
+ if not dsc_file:
+ # Finally ensure that everything needed for .changes is there
+ must_keywords = ('Format', 'Date', 'Source', 'Binary', 'Architecture', 'Version',
+ 'Distribution', 'Maintainer', 'Description', 'Changes', 'Files')
- file_handle.close()
- return rejmsg
+ missingfields=[]
+ for keyword in must_keywords:
+ if not changes.has_key(keyword.lower()):
+ missingfields.append(keyword)
-################################################################################
+ if len(missingfields):
+ raise ParseChangesError("Missing mandantory field(s) in changes file (policy 5.5): %s" % (missingfields))
-def check_hash(where, files, hashname, hashfunc):
- """
- check_hash checks the given hash in the files dict against the actual
- files on disk. The hash values need to be present consistently in
- all file entries. It does not modify its input in any way.
- """
-
- rejmsg = []
- for f in files.keys():
- file_handle = None
- try:
- try:
- file_handle = open_file(f)
-
- # Check for the hash entry, to not trigger a KeyError.
- if not files[f].has_key(hash_key(hashname)):
- rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
- where))
- continue
-
- # Actually check the hash for correctness.
- if hashfunc(file_handle) != files[f][hash_key(hashname)]:
- rejmsg.append("%s: %s check failed in %s" % (f, hashname,
- where))
- except CantOpenError:
- # TODO: This happens when the file is in the pool.
- # warn("Cannot open file %s" % f)
- continue
- finally:
- if file_handle:
- file_handle.close()
- return rejmsg
+ return changes
################################################################################
-def check_size(where, files):
- """
- check_size checks the file sizes in the passed files dict against the
- files on disk.
- """
-
- rejmsg = []
- for f in files.keys():
- try:
- entry = os.stat(f)
- except OSError, exc:
- if exc.errno == 2:
- # TODO: This happens when the file is in the pool.
- continue
- raise
-
- actual_size = entry[stat.ST_SIZE]
- size = int(files[f]["size"])
- if size != actual_size:
- rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
- % (f, actual_size, size, where))
- return rejmsg
+def hash_key(hashname):
+ return '%ssum' % hashname
################################################################################
-def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
+def check_dsc_files(dsc_filename, dsc, dsc_files):
"""
Verify that the files listed in the Files field of the .dsc are
those expected given the announced Format.
"""
rejmsg = []
- # Parse the file if needed
- if dsc is None:
- dsc = parse_changes(dsc_filename, signing_rules=1);
-
- if dsc_files is None:
- dsc_files = build_file_list(dsc, is_a_dsc=1)
-
# Ensure .dsc lists proper set of source files according to the format
# announced
has = defaultdict(lambda: 0)
(r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
(r'diff.gz', ('debian_diff',)),
(r'tar.gz', ('native_tar_gz', 'native_tar')),
- (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
- (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
- (r'tar\.(gz|bz2)', ('native_tar',)),
- (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
+ (r'debian\.tar\.(gz|bz2|xz)', ('debian_tar',)),
+ (r'orig\.tar\.(gz|bz2|xz)', ('orig_tar',)),
+ (r'tar\.(gz|bz2|xz)', ('native_tar',)),
+ (r'orig-.+\.tar\.(gz|bz2|xz)', ('more_orig_tar',)),
)
- for f in dsc_files.keys():
+ for f in dsc_files:
m = re_issource.match(f)
if not m:
rejmsg.append("%s: %s in Files field not recognised as source."
################################################################################
-def check_hash_fields(what, manifest):
- """
- check_hash_fields ensures that there are no checksum fields in the
- given dict that we do not know about.
- """
-
- rejmsg = []
- hashes = map(lambda x: x[0], known_hashes)
- for field in manifest:
- if field.startswith("checksums-"):
- hashname = field.split("-",1)[1]
- if hashname not in hashes:
- rejmsg.append("Unsupported checksum field for %s "\
- "in %s" % (hashname, what))
- return rejmsg
-
-################################################################################
-
-def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
- if format >= version:
- # The version should contain the specified hash.
- func = check_hash
-
- # Import hashes from the changes
- rejmsg = parse_checksums(".changes", files, changes, hashname)
- if len(rejmsg) > 0:
- return rejmsg
- else:
- # We need to calculate the hash because it can't possibly
- # be in the file.
- func = create_hash
- return func(".changes", files, hashname, hashfunc)
-
-# We could add the orig which might be in the pool to the files dict to
-# access the checksums easily.
-
-def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
- """
- ensure_dsc_hashes' task is to ensure that each and every *present* hash
- in the dsc is correct, i.e. identical to the changes file and if necessary
- the pool. The latter task is delegated to check_hash.
- """
-
- rejmsg = []
- if not dsc.has_key('Checksums-%s' % (hashname,)):
- return rejmsg
- # Import hashes from the dsc
- parse_checksums(".dsc", dsc_files, dsc, hashname)
- # And check it...
- rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
- return rejmsg
-
-################################################################################
-
-def parse_checksums(where, files, manifest, hashname):
- rejmsg = []
- field = 'checksums-%s' % hashname
- if not field in manifest:
- return rejmsg
- for line in manifest[field].split('\n'):
- if not line:
- break
- clist = line.strip().split(' ')
- if len(clist) == 3:
- checksum, size, checkfile = clist
- else:
- rejmsg.append("Cannot parse checksum line [%s]" % (line))
- continue
- if not files.has_key(checkfile):
- # TODO: check for the file's entry in the original files dict, not
- # the one modified by (auto)byhand and other weird stuff
- # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
- # (file, hashname, where))
- continue
- if not files[checkfile]["size"] == size:
- rejmsg.append("%s: size differs for files and checksums-%s entry "\
- "in %s" % (checkfile, hashname, where))
- continue
- files[checkfile][hash_key(hashname)] = checksum
- for f in files.keys():
- if not files[f].has_key(hash_key(hashname)):
- rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
- hashname, where))
- return rejmsg
-
-################################################################################
-
# Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
else:
(md5, size, name) = s
except ValueError:
- raise ParseChangesError, i
+ raise ParseChangesError(i)
if section == "":
section = "-"
(section, component) = extract_component_from_section(section)
- files[name] = Dict(size=size, section=section,
+ files[name] = dict(size=size, section=section,
priority=priority, component=component)
files[name][hashname] = md5
################################################################################
-def send_mail (message, filename=""):
- """sendmail wrapper, takes _either_ a message string or a file as arguments"""
+def send_mail (message, filename="", whitelists=None):
+ """sendmail wrapper, takes _either_ a message string or a file as arguments
+
+ @type whitelists: list of (str or None)
+ @param whitelists: path to whitelists. C{None} or an empty list whitelists
+ everything, otherwise an address is whitelisted if it is
+ included in any of the lists.
+ In addition a global whitelist can be specified in
+ Dinstall::MailWhiteList.
+ """
+
+ maildir = Cnf.get('Dir::Mail')
+ if maildir:
+ path = os.path.join(maildir, datetime.datetime.now().isoformat())
+ path = find_next_free(path)
+ with open(path, 'w') as fh:
+ print >>fh, message,
+
+ # Check whether we're supposed to be sending mail
+ if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
+ return
# If we've been passed a string dump it into a temporary file
if message:
os.write (fd, message)
os.close (fd)
- if Cnf.has_key("Dinstall::MailWhiteList") and \
- Cnf["Dinstall::MailWhiteList"] != "":
- message_in = open_file(filename)
- message_raw = modemail.message_from_file(message_in)
- message_in.close();
+ if whitelists is None or None in whitelists:
+ whitelists = []
+ if Cnf.get('Dinstall::MailWhiteList', ''):
+ whitelists.append(Cnf['Dinstall::MailWhiteList'])
+ if len(whitelists) != 0:
+ with open_file(filename) as message_in:
+ message_raw = modemail.message_from_file(message_in)
whitelist = [];
- whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
- try:
+ for path in whitelists:
+ with open_file(path, 'r') as whitelist_in:
for line in whitelist_in:
if not re_whitespace_comment.match(line):
if re_re_mark.match(line):
whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
else:
whitelist.append(re.compile(re.escape(line.strip())))
- finally:
- whitelist_in.close()
# Fields to check.
fields = ["To", "Bcc", "Cc"]
mail_whitelisted = 1
break
if not mail_whitelisted:
- print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
+ print "Skipping {0} since it's not whitelisted".format(item)
continue
match.append(item)
if len(match) == 0:
del message_raw[field]
else:
- message_raw.replace_header(field, string.join(match, ", "))
+ message_raw.replace_header(field, ', '.join(match))
# Change message fields in order if we don't have a To header
if not message_raw.has_key("To"):
os.unlink (filename);
return;
- fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
+ fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0o700);
os.write (fd, message_raw.as_string(True));
os.close (fd);
# Invoke sendmail
(result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
if (result != 0):
- raise SendmailFailedError, output
+ raise SendmailFailedError(output)
# Clean up any temporary files
if message:
################################################################################
-def poolify (source, component):
- if component:
- component += '/'
+def poolify (source, component=None):
if source[:3] == "lib":
- return component + source[:4] + '/' + source + '/'
+ return source[:4] + '/' + source + '/'
else:
- return component + source[:1] + '/' + source + '/'
+ return source[:1] + '/' + source + '/'
################################################################################
-def move (src, dest, overwrite = 0, perms = 0664):
+def move (src, dest, overwrite = 0, perms = 0o664):
if os.path.exists(dest) and os.path.isdir(dest):
dest_dir = dest
else:
dest_dir = os.path.dirname(dest)
- if not os.path.exists(dest_dir):
+ if not os.path.lexists(dest_dir):
umask = os.umask(00000)
- os.makedirs(dest_dir, 02775)
+ os.makedirs(dest_dir, 0o2775)
os.umask(umask)
#print "Moving %s to %s..." % (src, dest)
if os.path.exists(dest) and os.path.isdir(dest):
dest += '/' + os.path.basename(src)
# Don't overwrite unless forced to
- if os.path.exists(dest):
+ if os.path.lexists(dest):
if not overwrite:
fubar("Can't move %s to %s - file already exists." % (src, dest))
else:
os.chmod(dest, perms)
os.unlink(src)
-def copy (src, dest, overwrite = 0, perms = 0664):
+def copy (src, dest, overwrite = 0, perms = 0o664):
if os.path.exists(dest) and os.path.isdir(dest):
dest_dir = dest
else:
dest_dir = os.path.dirname(dest)
if not os.path.exists(dest_dir):
umask = os.umask(00000)
- os.makedirs(dest_dir, 02775)
+ os.makedirs(dest_dir, 0o2775)
os.umask(umask)
#print "Copying %s to %s..." % (src, dest)
if os.path.exists(dest) and os.path.isdir(dest):
dest += '/' + os.path.basename(src)
# Don't overwrite unless forced to
- if os.path.exists(dest):
+ if os.path.lexists(dest):
if not overwrite:
raise FileExistsError
else:
################################################################################
-def where_am_i ():
- res = socket.gethostbyaddr(socket.gethostname())
- database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
- if database_hostname:
- return database_hostname
- else:
- return res[0]
-
def which_conf_file ():
- res = socket.gethostbyaddr(socket.gethostname())
+ if os.getenv('DAK_CONFIG'):
+ return os.getenv('DAK_CONFIG')
+
+ res = socket.getfqdn()
# In case we allow local config files per user, try if one exists
- if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
+ if Cnf.find_b("Config::" + res + "::AllowLocalConfig"):
homedir = os.getenv("HOME")
confpath = os.path.join(homedir, "/etc/dak.conf")
if os.path.exists(confpath):
- apt_pkg.ReadConfigFileISC(Cnf,default_config)
+ apt_pkg.read_config_file_isc(Cnf,confpath)
# We are still in here, so there is no local config file or we do
# not allow local files. Do the normal stuff.
- if Cnf.get("Config::" + res[0] + "::DakConfig"):
- return Cnf["Config::" + res[0] + "::DakConfig"]
- else:
- return default_config
+ if Cnf.get("Config::" + res + "::DakConfig"):
+ return Cnf["Config::" + res + "::DakConfig"]
-def which_apt_conf_file ():
- res = socket.gethostbyaddr(socket.gethostname())
- # In case we allow local config files per user, try if one exists
- if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
- homedir = os.getenv("HOME")
- confpath = os.path.join(homedir, "/etc/dak.conf")
- if os.path.exists(confpath):
- apt_pkg.ReadConfigFileISC(Cnf,default_config)
-
- if Cnf.get("Config::" + res[0] + "::AptConfig"):
- return Cnf["Config::" + res[0] + "::AptConfig"]
- else:
- return default_apt_config
-
-def which_alias_file():
- hostname = socket.gethostbyaddr(socket.gethostname())[0]
- aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
- if os.path.exists(aliasfn):
- return aliasfn
- else:
- return None
+ return default_config
################################################################################
-def TemplateSubst(map, filename):
+def TemplateSubst(subst_map, filename):
""" Perform a substition of template """
- templatefile = open_file(filename)
- template = templatefile.read()
- for x in map.keys():
- template = template.replace(x, str(map[x]))
- templatefile.close()
+ with open_file(filename) as templatefile:
+ template = templatefile.read()
+ for k, v in subst_map.iteritems():
+ template = template.replace(k, str(v))
return template
################################################################################
################################################################################
-def cc_fix_changes (changes):
- o = changes.get("architecture", "")
- if o:
- del changes["architecture"]
- changes["architecture"] = {}
- for j in o.split():
- changes["architecture"][j] = 1
-
-def changes_compare (a, b):
- """ Sort by source name, source version, 'have source', and then by filename """
- try:
- a_changes = parse_changes(a)
- except:
- return -1
-
- try:
- b_changes = parse_changes(b)
- except:
- return 1
-
- cc_fix_changes (a_changes)
- cc_fix_changes (b_changes)
-
- # Sort by source name
- a_source = a_changes.get("source")
- b_source = b_changes.get("source")
- q = cmp (a_source, b_source)
- if q:
- return q
-
- # Sort by source version
- a_version = a_changes.get("version", "0")
- b_version = b_changes.get("version", "0")
- q = apt_pkg.VersionCompare(a_version, b_version)
- if q:
- return q
-
- # Sort by 'have source'
- a_has_source = a_changes["architecture"].get("source")
- b_has_source = b_changes["architecture"].get("source")
- if a_has_source and not b_has_source:
- return -1
- elif b_has_source and not a_has_source:
- return 1
-
- # Fall back to sort by filename
- return cmp(a, b)
-
-################################################################################
-
def find_next_free (dest, too_many=100):
extra = 0
orig_dest = dest
- while os.path.exists(dest) and extra < too_many:
+ while os.path.lexists(dest) and extra < too_many:
dest = orig_dest + '.' + repr(extra)
extra += 1
if extra >= too_many:
################################################################################
-def validate_changes_file_arg(filename, require_changes=1):
- """
- 'filename' is either a .changes or .dak file. If 'filename' is a
- .dak file, it's changed to be the corresponding .changes file. The
- function then checks if the .changes file a) exists and b) is
- readable and returns the .changes filename if so. If there's a
- problem, the next action depends on the option 'require_changes'
- argument:
-
- - If 'require_changes' == -1, errors are ignored and the .changes
- filename is returned.
- - If 'require_changes' == 0, a warning is given and 'None' is returned.
- - If 'require_changes' == 1, a fatal error is raised.
-
- """
- error = None
-
- orig_filename = filename
- if filename.endswith(".dak"):
- filename = filename[:-4]+".changes"
-
- if not filename.endswith(".changes"):
- error = "invalid file type; not a changes file"
- else:
- if not os.access(filename,os.R_OK):
- if os.path.exists(filename):
- error = "permission denied"
- else:
- error = "file not found"
-
- if error:
- if require_changes == 1:
- fubar("%s: %s." % (orig_filename, error))
- elif require_changes == 0:
- warn("Skipping %s - %s" % (orig_filename, error))
- return None
- else: # We only care about the .dak file
- return filename
- else:
- return filename
-
-################################################################################
-
-def real_arch(arch):
- return (arch != "source" and arch != "all")
-
-################################################################################
-
def join_with_commas_and(list):
if len(list) == 0: return "nothing"
if len(list) == 1: return list[0]
suite_ids_list = []
for suitename in split_args(Options["Suite"]):
suite = get_suite(suitename, session=session)
- if suite.suite_id is None:
- warn("suite '%s' not recognised." % (suite.suite_name))
+ if not suite or suite.suite_id is None:
+ warn("suite '%s' not recognised." % (suite and suite.suite_name or suitename))
else:
suite_ids_list.append(suite.suite_id)
if suite_ids_list:
################################################################################
-# Inspired(tm) by Bryn Keller's print_exc_plus (See
-# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
-
-def print_exc():
- tb = sys.exc_info()[2]
- while tb.tb_next:
- tb = tb.tb_next
- stack = []
- frame = tb.tb_frame
- while frame:
- stack.append(frame)
- frame = frame.f_back
- stack.reverse()
- traceback.print_exc()
- for frame in stack:
- print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
- frame.f_code.co_filename,
- frame.f_lineno)
- for key, value in frame.f_locals.items():
- print "\t%20s = " % key,
- try:
- print value
- except:
- print "<unable to print>"
-
-################################################################################
-
-def try_with_debug(function):
- try:
- function()
- except SystemExit:
- raise
- except:
- print_exc()
-
-################################################################################
-
def arch_compare_sw (a, b):
"""
Function for use in sorting lists of architectures.
################################################################################
-def split_args (s, dwim=1):
+def split_args (s, dwim=True):
"""
Split command line arguments which can be separated by either commas
or whitespace. If dwim is set, it will complain about string ending
################################################################################
-def Dict(**dict): return dict
-
-########################################
-
-def gpgv_get_status_output(cmd, status_read, status_write):
- """
- Our very own version of commands.getouputstatus(), hacked to support
- gpgv's status fd.
- """
-
- cmd = ['/bin/sh', '-c', cmd]
- p2cread, p2cwrite = os.pipe()
- c2pread, c2pwrite = os.pipe()
- errout, errin = os.pipe()
- pid = os.fork()
- if pid == 0:
- # Child
- os.close(0)
- os.close(1)
- os.dup(p2cread)
- os.dup(c2pwrite)
- os.close(2)
- os.dup(errin)
- for i in range(3, 256):
- if i != status_write:
- try:
- os.close(i)
- except:
- pass
- try:
- os.execvp(cmd[0], cmd)
- finally:
- os._exit(1)
-
- # Parent
- os.close(p2cread)
- os.dup2(c2pread, c2pwrite)
- os.dup2(errout, errin)
-
- output = status = ""
- while 1:
- i, o, e = select.select([c2pwrite, errin, status_read], [], [])
- more_data = []
- for fd in i:
- r = os.read(fd, 8196)
- if len(r) > 0:
- more_data.append(fd)
- if fd == c2pwrite or fd == errin:
- output += r
- elif fd == status_read:
- status += r
- else:
- fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
- if not more_data:
- pid, exit_status = os.waitpid(pid, 0)
- try:
- os.close(status_write)
- os.close(status_read)
- os.close(c2pread)
- os.close(c2pwrite)
- os.close(p2cwrite)
- os.close(errin)
- os.close(errout)
- except:
- pass
- break
-
- return output, status, exit_status
-
-################################################################################
-
-def process_gpgv_output(status):
- # Process the status-fd output
- keywords = {}
- internal_error = ""
- for line in status.split('\n'):
- line = line.strip()
- if line == "":
- continue
- split = line.split()
- if len(split) < 2:
- internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
- continue
- (gnupg, keyword) = split[:2]
- if gnupg != "[GNUPG:]":
- internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
- continue
- args = split[2:]
- if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
- internal_error += "found duplicate status token ('%s').\n" % (keyword)
- continue
- else:
- keywords[keyword] = args
-
- return (keywords, internal_error)
-
-################################################################################
-
-def retrieve_key (filename, keyserver=None, keyring=None):
- """
- Retrieve the key that signed 'filename' from 'keyserver' and
- add it to 'keyring'. Returns nothing on success, or an error message
- on error.
- """
-
- # Defaults for keyserver and keyring
- if not keyserver:
- keyserver = Cnf["Dinstall::KeyServer"]
- if not keyring:
- keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
-
- # Ensure the filename contains no shell meta-characters or other badness
- if not re_taint_free.match(filename):
- return "%s: tainted filename" % (filename)
-
- # Invoke gpgv on the file
- status_read, status_write = os.pipe()
- cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
- (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
-
- # Process the status-fd output
- (keywords, internal_error) = process_gpgv_output(status)
- if internal_error:
- return internal_error
-
- if not keywords.has_key("NO_PUBKEY"):
- return "didn't find expected NO_PUBKEY in gpgv status-fd output"
-
- fingerprint = keywords["NO_PUBKEY"][0]
- # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
- # it'll try to create a lockfile in /dev. A better solution might
- # be a tempfile or something.
- cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
- % (Cnf["Dinstall::SigningKeyring"])
- cmd += " --keyring %s --keyserver %s --recv-key %s" \
- % (keyring, keyserver, fingerprint)
- (result, output) = commands.getstatusoutput(cmd)
- if (result != 0):
- return "'%s' failed with exit code %s" % (cmd, result)
-
- return ""
-
-################################################################################
-
def gpg_keyring_args(keyrings=None):
if not keyrings:
- keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
+ keyrings = get_active_keyring_paths()
return " ".join(["--keyring %s" % x for x in keyrings])
################################################################################
-def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
- """
- Check the signature of a file and return the fingerprint if the
- signature is valid or 'None' if it's not. The first argument is the
- filename whose signature should be checked. The second argument is a
- reject function and is called when an error is found. The reject()
- function must allow for two arguments: the first is the error message,
- the second is an optional prefix string. It's possible for reject()
- to be called more than once during an invocation of check_signature().
- The third argument is optional and is the name of the files the
- detached signature applies to. The fourth argument is optional and is
- a *list* of keyrings to use. 'autofetch' can either be None, True or
- False. If None, the default behaviour specified in the config will be
- used.
- """
-
- rejects = []
-
- # Ensure the filename contains no shell meta-characters or other badness
- if not re_taint_free.match(sig_filename):
- rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
- return (None, rejects)
-
- if data_filename and not re_taint_free.match(data_filename):
- rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
- return (None, rejects)
-
- if not keyrings:
- keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
-
- # Autofetch the signing key if that's enabled
- if autofetch == None:
- autofetch = Cnf.get("Dinstall::KeyAutoFetch")
- if autofetch:
- error_msg = retrieve_key(sig_filename)
- if error_msg:
- rejects.append(error_msg)
- return (None, rejects)
-
- # Build the command line
- status_read, status_write = os.pipe()
- cmd = "gpgv --status-fd %s %s %s %s" % (
- status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
-
- # Invoke gpgv on the file
- (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
-
- # Process the status-fd output
- (keywords, internal_error) = process_gpgv_output(status)
-
- # If we failed to parse the status-fd output, let's just whine and bail now
- if internal_error:
- rejects.append("internal error while performing signature check on %s." % (sig_filename))
- rejects.append(internal_error, "")
- rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
- return (None, rejects)
-
- # Now check for obviously bad things in the processed output
- if keywords.has_key("KEYREVOKED"):
- rejects.append("The key used to sign %s has been revoked." % (sig_filename))
- if keywords.has_key("BADSIG"):
- rejects.append("bad signature on %s." % (sig_filename))
- if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
- rejects.append("failed to check signature on %s." % (sig_filename))
- if keywords.has_key("NO_PUBKEY"):
- args = keywords["NO_PUBKEY"]
- if len(args) >= 1:
- key = args[0]
- rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
- if keywords.has_key("BADARMOR"):
- rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
- if keywords.has_key("NODATA"):
- rejects.append("no signature found in %s." % (sig_filename))
- if keywords.has_key("EXPKEYSIG"):
- args = keywords["EXPKEYSIG"]
- if len(args) >= 1:
- key = args[0]
- rejects.append("Signature made by expired key 0x%s" % (key))
- if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
- args = keywords["KEYEXPIRED"]
- expiredate=""
- if len(args) >= 1:
- timestamp = args[0]
- if timestamp.count("T") == 0:
- try:
- expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
- except ValueError:
- expiredate = "unknown (%s)" % (timestamp)
- else:
- expiredate = timestamp
- rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
-
- if len(rejects) > 0:
- return (None, rejects)
-
- # Next check gpgv exited with a zero return code
- if exit_status:
- rejects.append("gpgv failed while checking %s." % (sig_filename))
- if status.strip():
- rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
- else:
- rejects.append(prefix_multi_line_string(output, " [GPG output:] "), "")
- return (None, rejects)
-
- # Sanity check the good stuff we expect
- if not keywords.has_key("VALIDSIG"):
- rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
- else:
- args = keywords["VALIDSIG"]
- if len(args) < 1:
- rejects.append("internal error while checking signature on %s." % (sig_filename))
- else:
- fingerprint = args[0]
- if not keywords.has_key("GOODSIG"):
- rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
- if not keywords.has_key("SIG_ID"):
- rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
-
- # Finally ensure there's not something we don't recognise
- known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
- SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
- NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
-
- for keyword in keywords.keys():
- if not known_keywords.has_key(keyword):
- rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
-
- if len(rejects) > 0:
- return (None, rejects)
- else:
- return (fingerprint, [])
-
-################################################################################
-
def gpg_get_key_addresses(fingerprint):
"""retreive email addresses from gpg key uids for a given fingerprint"""
addresses = key_uid_email_cache.get(fingerprint)
if addresses != None:
return addresses
- addresses = set()
- cmd = "gpg --no-default-keyring %s --fingerprint %s" \
- % (gpg_keyring_args(), fingerprint)
- (result, output) = commands.getstatusoutput(cmd)
- if result == 0:
+ addresses = list()
+ try:
+ with open(os.devnull, "wb") as devnull:
+ output = daklib.daksubprocess.check_output(
+ ["gpg", "--no-default-keyring"] + gpg_keyring_args().split() +
+ ["--with-colons", "--list-keys", fingerprint], stderr=devnull)
+ except subprocess.CalledProcessError:
+ pass
+ else:
for l in output.split('\n'):
- m = re_gpg_uid.match(l)
- if m:
- addresses.add(m.group(1))
+ parts = l.split(':')
+ if parts[0] not in ("uid", "pub"):
+ continue
+ try:
+ uid = parts[9]
+ except IndexError:
+ continue
+ try:
+ # Do not use unicode_escape, because it is locale-specific
+ uid = codecs.decode(uid, "string_escape").decode("utf-8")
+ except UnicodeDecodeError:
+ uid = uid.decode("latin1") # does not fail
+ m = re_parse_maintainer.match(uid)
+ if not m:
+ continue
+ address = m.group(2)
+ address = address.encode("utf8") # dak still uses bytes
+ if address.endswith('@debian.org'):
+ # prefer @debian.org addresses
+ # TODO: maybe not hardcode the domain
+ addresses.insert(0, address)
+ else:
+ addresses.append(address)
key_uid_email_cache[fingerprint] = addresses
return addresses
################################################################################
-# Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
-
-def wrap(paragraph, max_length, prefix=""):
- line = ""
- s = ""
- have_started = 0
- words = paragraph.split()
-
- for word in words:
- word_size = len(word)
- if word_size > max_length:
- if have_started:
- s += line + '\n' + prefix
- s += word + '\n' + prefix
- else:
- if have_started:
- new_length = len(line) + word_size + 1
- if new_length > max_length:
- s += line + '\n' + prefix
- line = word
- else:
- line += ' ' + word
- else:
- line = word
- have_started = 1
+def get_logins_from_ldap(fingerprint='*'):
+ """retrieve login from LDAP linked to a given fingerprint"""
+
+ LDAPDn = Cnf['Import-LDAP-Fingerprints::LDAPDn']
+ LDAPServer = Cnf['Import-LDAP-Fingerprints::LDAPServer']
+ l = ldap.open(LDAPServer)
+ l.simple_bind_s('','')
+ Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
+ '(keyfingerprint=%s)' % fingerprint,
+ ['uid', 'keyfingerprint'])
+ login = {}
+ for elem in Attrs:
+ login[elem[1]['keyFingerPrint'][0]] = elem[1]['uid'][0]
+ return login
- if have_started:
- s += line
+################################################################################
- return s
+def get_users_from_ldap():
+ """retrieve login and user names from LDAP"""
+
+ LDAPDn = Cnf['Import-LDAP-Fingerprints::LDAPDn']
+ LDAPServer = Cnf['Import-LDAP-Fingerprints::LDAPServer']
+ l = ldap.open(LDAPServer)
+ l.simple_bind_s('','')
+ Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
+ '(uid=*)', ['uid', 'cn', 'mn', 'sn'])
+ users = {}
+ for elem in Attrs:
+ elem = elem[1]
+ name = []
+ for k in ('cn', 'mn', 'sn'):
+ try:
+ if elem[k][0] != '-':
+ name.append(elem[k][0])
+ except KeyError:
+ pass
+ users[' '.join(name)] = elem['uid'][0]
+ return users
################################################################################
################################################################################
-def temp_filename(directory=None, prefix="dak", suffix=""):
+def temp_filename(directory=None, prefix="dak", suffix="", mode=None, group=None):
"""
Return a secure and unique filename by pre-creating it.
- If 'directory' is non-null, it will be the directory the file is pre-created in.
- If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
- If 'suffix' is non-null, the filename will end with it.
- Returns a pair (fd, name).
+ @type directory: str
+ @param directory: If non-null it will be the directory the file is pre-created in.
+
+ @type prefix: str
+ @param prefix: The filename will be prefixed with this string
+
+ @type suffix: str
+ @param suffix: The filename will end with this string
+
+ @type mode: str
+ @param mode: If set the file will get chmodded to those permissions
+
+ @type group: str
+ @param group: If set the file will get chgrped to the specified group.
+
+ @rtype: list
+ @return: Returns a pair (fd, name)
"""
- return tempfile.mkstemp(suffix, prefix, directory)
+ (tfd, tfname) = tempfile.mkstemp(suffix, prefix, directory)
+ if mode:
+ os.chmod(tfname, mode)
+ if group:
+ gid = grp.getgrnam(group).gr_gid
+ os.chown(tfname, -1, gid)
+ return (tfd, tfname)
################################################################################
-def temp_dirname(parent=None, prefix="dak", suffix=""):
+def temp_dirname(parent=None, prefix="dak", suffix="", mode=None, group=None):
"""
Return a secure and unique directory by pre-creating it.
- If 'parent' is non-null, it will be the directory the directory is pre-created in.
- If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
- If 'suffix' is non-null, the filename will end with it.
- Returns a pathname to the new directory
+ @type parent: str
+ @param parent: If non-null it will be the directory the directory is pre-created in.
+
+ @type prefix: str
+ @param prefix: The filename will be prefixed with this string
+
+ @type suffix: str
+ @param suffix: The filename will end with this string
+
+ @type mode: str
+ @param mode: If set the file will get chmodded to those permissions
+
+ @type group: str
+ @param group: If set the file will get chgrped to the specified group.
+
+ @rtype: list
+ @return: Returns a pair (fd, name)
+
"""
- return tempfile.mkdtemp(suffix, prefix, parent)
+ tfname = tempfile.mkdtemp(suffix, prefix, parent)
+ if mode:
+ os.chmod(tfname, mode)
+ if group:
+ gid = grp.getgrnam(group).gr_gid
+ os.chown(tfname, -1, gid)
+ return tfname
################################################################################
################################################################################
-def get_changes_files(dir):
+def get_changes_files(from_dir):
"""
Takes a directory and lists all .changes files in it (as well as chdir'ing
to the directory; this is due to broken behaviour on the part of p-u/p-a
"""
try:
# Much of the rest of p-u/p-a depends on being in the right place
- os.chdir(dir)
- changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
- except OSError, e:
- fubar("Failed to read list from directory %s (%s)" % (dir, e))
+ os.chdir(from_dir)
+ changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
+ except OSError as e:
+ fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
return changes_files
################################################################################
-apt_pkg.init()
+Cnf = config.Config().Cnf
+
+################################################################################
+
+def parse_wnpp_bug_file(file = "/srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm"):
+ """
+ Parses the wnpp bug list available at https://qa.debian.org/data/bts/wnpp_rm
+ Well, actually it parsed a local copy, but let's document the source
+ somewhere ;)
+
+ returns a dict associating source package name with a list of open wnpp
+ bugs (Yes, there might be more than one)
+ """
-Cnf = apt_pkg.newConfiguration()
-apt_pkg.ReadConfigFileISC(Cnf,default_config)
+ line = []
+ try:
+ f = open(file)
+ lines = f.readlines()
+ except IOError as e:
+ print "Warning: Couldn't open %s; don't know about WNPP bugs, so won't close any." % file
+ lines = []
+ wnpp = {}
-if which_conf_file() != default_config:
- apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
+ for line in lines:
+ splited_line = line.split(": ", 1)
+ if len(splited_line) > 1:
+ wnpp[splited_line[0]] = splited_line[1].split("|")
+
+ for source in wnpp.keys():
+ bugs = []
+ for wnpp_bug in wnpp[source]:
+ bug_no = re.search("(\d)+", wnpp_bug).group()
+ if bug_no:
+ bugs.append(bug_no)
+ wnpp[source] = bugs
+ return wnpp
-###############################################################################
+################################################################################
-def ensure_orig_files(changes, dest_dir, session):
+def get_packages_from_ftp(root, suite, component, architecture):
"""
- Ensure that dest_dir contains all the orig tarballs for the specified
- changes. If it does not, symlink them into place.
+ Returns an object containing apt_pkg-parseable data collected by
+ aggregating Packages.gz files gathered for each architecture.
- Returns a 2-tuple (already_exists, symlinked) containing a list of files
- that were already there and a list of files that were symlinked into place.
+ @type root: string
+ @param root: path to ftp archive root directory
+
+ @type suite: string
+ @param suite: suite to extract files from
+
+ @type component: string
+ @param component: component to extract files from
+
+ @type architecture: string
+ @param architecture: architecture to extract files from
+
+ @rtype: TagFile
+ @return: apt_pkg class containing package data
"""
+ filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (root, suite, component, architecture)
+ (fd, temp_file) = temp_filename()
+ (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_file))
+ if (result != 0):
+ fubar("Gunzip invocation failed!\n%s\n" % (output), result)
+ filename = "%s/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (root, suite, component, architecture)
+ if os.path.exists(filename):
+ (result, output) = commands.getstatusoutput("gunzip -c %s >> %s" % (filename, temp_file))
+ if (result != 0):
+ fubar("Gunzip invocation failed!\n%s\n" % (output), result)
+ packages = open_file(temp_file)
+ Packages = apt_pkg.TagFile(packages)
+ os.unlink(temp_file)
+ return Packages
- exists, symlinked = [], []
+################################################################################
- for dsc_file in changes.dsc_files:
+def deb_extract_control(fh):
+ """extract DEBIAN/control from a binary package"""
+ return apt_inst.DebFile(fh).control.extractdata("control")
- # Skip all files that are not orig tarballs
- if not re_is_orig_source.match(dsc_file):
- continue
+################################################################################
- # Skip orig files not identified in the pool
- if not (dsc_file in changes.orig_files and
- 'id' in changes.orig_files[dsc_file]):
- continue
+def mail_addresses_for_upload(maintainer, changed_by, fingerprint):
+ """mail addresses to contact for an upload
- dest = os.path.join(dest_dir, dsc_file)
+ @type maintainer: str
+ @param maintainer: Maintainer field of the .changes file
- if os.path.exists(dest):
- exists.append(dest)
- continue
+ @type changed_by: str
+ @param changed_by: Changed-By field of the .changes file
+
+ @type fingerprint: str
+ @param fingerprint: fingerprint of the key used to sign the upload
+
+ @rtype: list of str
+ @return: list of RFC 2047-encoded mail addresses to contact regarding
+ this upload
+ """
+ addresses = [maintainer]
+ if changed_by != maintainer:
+ addresses.append(changed_by)
+
+ fpr_addresses = gpg_get_key_addresses(fingerprint)
+ if len(fpr_addresses) > 0 and fix_maintainer(changed_by)[3] not in fpr_addresses and fix_maintainer(maintainer)[3] not in fpr_addresses:
+ addresses.append(fpr_addresses[0])
+
+ encoded_addresses = [ fix_maintainer(e)[1] for e in addresses ]
+ return encoded_addresses
+
+################################################################################
+
+def call_editor(text="", suffix=".txt"):
+ """run editor and return the result as a string
- orig_file_id = changes.orig_files[dsc_file]['id']
+ @type text: str
+ @param text: initial text
- c = session.execute(
- 'SELECT l.path, f.filename FROM location l, files f WHERE f.id = :id and f.location = l.id',
- {'id': orig_file_id}
- )
+ @type suffix: str
+ @param suffix: extension for temporary file
+
+ @rtype: str
+ @return: string with the edited text
+ """
+ editor = os.environ.get('VISUAL', os.environ.get('EDITOR', 'vi'))
+ tmp = tempfile.NamedTemporaryFile(suffix=suffix, delete=False)
+ try:
+ print >>tmp, text,
+ tmp.close()
+ daklib.daksubprocess.check_call([editor, tmp.name])
+ return open(tmp.name, 'r').read()
+ finally:
+ os.unlink(tmp.name)
- res = c.fetchone()
- if not res:
- return "[INTERNAL ERROR] Couldn't find id %s in files table." % orig_file_id
+################################################################################
- src = os.path.join(res[0], res[1])
- os.symlink(src, dest)
- symlinked.append(dest)
+def check_reverse_depends(removals, suite, arches=None, session=None, cruft=False, quiet=False):
+ dbsuite = get_suite(suite, session)
+ overridesuite = dbsuite
+ if dbsuite.overridesuite is not None:
+ overridesuite = get_suite(dbsuite.overridesuite, session)
+ dep_problem = 0
+ p2c = {}
+ all_broken = defaultdict(lambda: defaultdict(set))
+ if arches:
+ all_arches = set(arches)
+ else:
+ all_arches = set(x.arch_string for x in get_suite_architectures(suite))
+ all_arches -= set(["source", "all"])
+ removal_set = set(removals)
+ metakey_d = get_or_set_metadatakey("Depends", session)
+ metakey_p = get_or_set_metadatakey("Provides", session)
+ params = {
+ 'suite_id': dbsuite.suite_id,
+ 'metakey_d_id': metakey_d.key_id,
+ 'metakey_p_id': metakey_p.key_id,
+ }
+ for architecture in all_arches | set(['all']):
+ deps = {}
+ sources = {}
+ virtual_packages = {}
+ params['arch_id'] = get_architecture(architecture, session).arch_id
+
+ statement = '''
+ SELECT b.package, s.source, c.name as component,
+ (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends,
+ (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides
+ FROM binaries b
+ JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id
+ JOIN source s ON b.source = s.id
+ JOIN files_archive_map af ON b.file = af.file_id
+ JOIN component c ON af.component_id = c.id
+ WHERE b.architecture = :arch_id'''
+ query = session.query('package', 'source', 'component', 'depends', 'provides'). \
+ from_statement(statement).params(params)
+ for package, source, component, depends, provides in query:
+ sources[package] = source
+ p2c[package] = component
+ if depends is not None:
+ deps[package] = depends
+ # Maintain a counter for each virtual package. If a
+ # Provides: exists, set the counter to 0 and count all
+ # provides by a package not in the list for removal.
+ # If the counter stays 0 at the end, we know that only
+ # the to-be-removed packages provided this virtual
+ # package.
+ if provides is not None:
+ for virtual_pkg in provides.split(","):
+ virtual_pkg = virtual_pkg.strip()
+ if virtual_pkg == package: continue
+ if not virtual_packages.has_key(virtual_pkg):
+ virtual_packages[virtual_pkg] = 0
+ if package not in removals:
+ virtual_packages[virtual_pkg] += 1
+
+ # If a virtual package is only provided by the to-be-removed
+ # packages, treat the virtual package as to-be-removed too.
+ removal_set.update(virtual_pkg for virtual_pkg in virtual_packages if not virtual_packages[virtual_pkg])
+
+ # Check binary dependencies (Depends)
+ for package in deps:
+ if package in removals: continue
+ try:
+ parsed_dep = apt_pkg.parse_depends(deps[package])
+ except ValueError as e:
+ print "Error for package %s: %s" % (package, e)
+ parsed_dep = []
+ for dep in parsed_dep:
+ # Check for partial breakage. If a package has a ORed
+ # dependency, there is only a dependency problem if all
+ # packages in the ORed depends will be removed.
+ unsat = 0
+ for dep_package, _, _ in dep:
+ if dep_package in removals:
+ unsat += 1
+ if unsat == len(dep):
+ component = p2c[package]
+ source = sources[package]
+ if component != "main":
+ source = "%s/%s" % (source, component)
+ all_broken[source][package].add(architecture)
+ dep_problem = 1
+
+ if all_broken and not quiet:
+ if cruft:
+ print " - broken Depends:"
+ else:
+ print "# Broken Depends:"
+ for source, bindict in sorted(all_broken.items()):
+ lines = []
+ for binary, arches in sorted(bindict.items()):
+ if arches == all_arches or 'all' in arches:
+ lines.append(binary)
+ else:
+ lines.append('%s [%s]' % (binary, ' '.join(sorted(arches))))
+ if cruft:
+ print ' %s: %s' % (source, lines[0])
+ else:
+ print '%s: %s' % (source, lines[0])
+ for line in lines[1:]:
+ if cruft:
+ print ' ' + ' ' * (len(source) + 2) + line
+ else:
+ print ' ' * (len(source) + 2) + line
+ if not cruft:
+ print
+
+ # Check source dependencies (Build-Depends and Build-Depends-Indep)
+ all_broken = defaultdict(set)
+ metakey_bd = get_or_set_metadatakey("Build-Depends", session)
+ metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
+ params = {
+ 'suite_id': dbsuite.suite_id,
+ 'metakey_ids': (metakey_bd.key_id, metakey_bdi.key_id),
+ }
+ statement = '''
+ SELECT s.source, string_agg(sm.value, ', ') as build_dep
+ FROM source s
+ JOIN source_metadata sm ON s.id = sm.src_id
+ WHERE s.id in
+ (SELECT source FROM src_associations
+ WHERE suite = :suite_id)
+ AND sm.key_id in :metakey_ids
+ GROUP BY s.id, s.source'''
+ query = session.query('source', 'build_dep').from_statement(statement). \
+ params(params)
+ for source, build_dep in query:
+ if source in removals: continue
+ parsed_dep = []
+ if build_dep is not None:
+ # Remove [arch] information since we want to see breakage on all arches
+ build_dep = re_build_dep_arch.sub("", build_dep)
+ try:
+ parsed_dep = apt_pkg.parse_src_depends(build_dep)
+ except ValueError as e:
+ print "Error for source %s: %s" % (source, e)
+ for dep in parsed_dep:
+ unsat = 0
+ for dep_package, _, _ in dep:
+ if dep_package in removals:
+ unsat += 1
+ if unsat == len(dep):
+ component, = session.query(Component.component_name) \
+ .join(Component.overrides) \
+ .filter(Override.suite == overridesuite) \
+ .filter(Override.package == re.sub('/(contrib|non-free)$', '', source)) \
+ .join(Override.overridetype).filter(OverrideType.overridetype == 'dsc') \
+ .first()
+ key = source
+ if component != "main":
+ key = "%s/%s" % (source, component)
+ all_broken[key].add(pp_deps(dep))
+ dep_problem = 1
+
+ if all_broken and not quiet:
+ if cruft:
+ print " - broken Build-Depends:"
+ else:
+ print "# Broken Build-Depends:"
+ for source, bdeps in sorted(all_broken.items()):
+ bdeps = sorted(bdeps)
+ if cruft:
+ print ' %s: %s' % (source, bdeps[0])
+ else:
+ print '%s: %s' % (source, bdeps[0])
+ for bdep in bdeps[1:]:
+ if cruft:
+ print ' ' + ' ' * (len(source) + 2) + bdep
+ else:
+ print ' ' * (len(source) + 2) + bdep
+ if not cruft:
+ print
- return (exists, symlinked)
+ return dep_problem