import email as modemail
import subprocess
-from dbconn import DBConn, get_architecture, get_component, get_suite
+from dbconn import DBConn, get_architecture, get_component, get_suite, \
+ get_override_type, Keyring, session_wrapper, \
+ get_active_keyring_paths, get_primary_keyring_path
+from sqlalchemy import desc
from dak_exceptions import *
+from gpg import SignedFile
from textutils import fix_maintainer
from regexes import re_html_escaping, html_escaping, re_single_line_field, \
re_multi_line_field, re_srchasver, re_taint_free, \
- re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource
+ re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
+ re_is_orig_source
from formats import parse_format, validate_changes_format
from srcformats import get_format_from_string
pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- output = "".join(pipe.stdout.readlines())
+ output = pipe.stdout.read()
+
+ pipe.wait()
if output[-1:] == '\n':
output = output[:-1]
try:
f = open(filename, mode)
except IOError:
- raise CantOpenError, filename
+ raise CantOpenError(filename)
return f
################################################################################
def our_raw_input(prompt=""):
if prompt:
- sys.stdout.write(prompt)
+ while 1:
+ try:
+ sys.stdout.write(prompt)
+ break
+ except IOError:
+ pass
sys.stdout.flush()
try:
ret = raw_input()
################################################################################
-def extract_component_from_section(section):
+def extract_component_from_section(section, session=None):
component = ""
if section.find('/') != -1:
# Expand default component
if component == "":
- if Cnf.has_key("Component::%s" % section):
- component = section
- else:
+ comp = get_component(section, session)
+ if comp is None:
component = "main"
+ else:
+ component = comp.component_name
return (section, component)
################################################################################
-def parse_deb822(contents, signing_rules=0):
+def parse_deb822(armored_contents, signing_rules=0, keyrings=None, session=None):
+ require_signature = True
+ if keyrings == None:
+ keyrings = []
+ require_signature = False
+
+ signed_file = SignedFile(armored_contents, keyrings=keyrings, require_signature=require_signature)
+ contents = signed_file.contents
+
error = ""
changes = {}
lines = contents.splitlines(True)
if len(lines) == 0:
- raise ParseChangesError, "[Empty changes file]"
+ raise ParseChangesError("[Empty changes file]")
# Reindex by line number so we can easily verify the format of
# .dsc files...
index += 1
indexed_lines[index] = line[:-1]
- inside_signature = 0
-
num_of_lines = len(indexed_lines.keys())
index = 0
first = -1
while index < num_of_lines:
index += 1
line = indexed_lines[index]
- if line == "":
- if signing_rules == 1:
- index += 1
- if index > num_of_lines:
- raise InvalidDscError, index
- line = indexed_lines[index]
- if not line.startswith("-----BEGIN PGP SIGNATURE"):
- raise InvalidDscError, index
- inside_signature = 0
- break
- else:
- continue
- if line.startswith("-----BEGIN PGP SIGNATURE"):
+ if line == "" and signing_rules == 1:
+ if index != num_of_lines:
+ raise InvalidDscError(index)
break
- if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
- inside_signature = 1
- if signing_rules == 1:
- while index < num_of_lines and line != "":
- index += 1
- line = indexed_lines[index]
- continue
- # If we're not inside the signed data, don't process anything
- if signing_rules >= 0 and not inside_signature:
- continue
slf = re_single_line_field.match(line)
if slf:
field = slf.groups()[0].lower()
mlf = re_multi_line_field.match(line)
if mlf:
if first == -1:
- raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
+ raise ParseChangesError("'%s'\n [Multi-line field continuing on from nothing?]" % (line))
if first == 1 and changes[field] != "":
changes[field] += '\n'
first = 0
continue
error += line
- if signing_rules == 1 and inside_signature:
- raise InvalidDscError, index
-
- changes["filecontents"] = "".join(lines)
+ changes["filecontents"] = armored_contents
if changes.has_key("source"):
# Strip the source version in brackets from the source field,
changes["source-version"] = srcver.group(2)
if error:
- raise ParseChangesError, error
+ raise ParseChangesError(error)
return changes
################################################################################
-def parse_changes(filename, signing_rules=0):
+def parse_changes(filename, signing_rules=0, dsc_file=0, keyrings=None):
"""
Parses a changes file and returns a dictionary where each field is a
key. The mandatory first argument is the filename of the .changes
try:
unicode(content, 'utf-8')
except UnicodeError:
- raise ChangesUnicodeError, "Changes file not proper utf-8"
- return parse_deb822(content, signing_rules)
+ raise ChangesUnicodeError("Changes file not proper utf-8")
+ changes = parse_deb822(content, signing_rules, keyrings=keyrings)
+
+
+ if not dsc_file:
+ # Finally ensure that everything needed for .changes is there
+ must_keywords = ('Format', 'Date', 'Source', 'Binary', 'Architecture', 'Version',
+ 'Distribution', 'Maintainer', 'Description', 'Changes', 'Files')
+
+ missingfields=[]
+ for keyword in must_keywords:
+ if not changes.has_key(keyword.lower()):
+ missingfields.append(keyword)
+
+ if len(missingfields):
+ raise ParseChangesError("Missing mandantory field(s) in changes file (policy 5.5): %s" % (missingfields))
+
+ return changes
################################################################################
for f in files.keys():
try:
entry = os.stat(f)
- except OSError, exc:
+ except OSError as exc:
if exc.errno == 2:
# TODO: This happens when the file is in the pool.
continue
# Parse the file if needed
if dsc is None:
- dsc = parse_changes(dsc_filename, signing_rules=1);
+ dsc = parse_changes(dsc_filename, signing_rules=1, dsc_file=1);
if dsc_files is None:
dsc_files = build_file_list(dsc, is_a_dsc=1)
(r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
(r'diff.gz', ('debian_diff',)),
(r'tar.gz', ('native_tar_gz', 'native_tar')),
- (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
- (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
- (r'tar\.(gz|bz2)', ('native_tar',)),
- (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
+ (r'debian\.tar\.(gz|bz2|xz)', ('debian_tar',)),
+ (r'orig\.tar\.(gz|bz2|xz)', ('orig_tar',)),
+ (r'tar\.(gz|bz2|xz)', ('native_tar',)),
+ (r'orig-.+\.tar\.(gz|bz2|xz)', ('more_orig_tar',)),
)
for f in dsc_files.keys():
files[checkfile][hash_key(hashname)] = checksum
for f in files.keys():
if not files[f].has_key(hash_key(hashname)):
- rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
- hashname, where))
+ rejmsg.append("%s: no entry in checksums-%s in %s" % (f, hashname, where))
return rejmsg
################################################################################
else:
(md5, size, name) = s
except ValueError:
- raise ParseChangesError, i
+ raise ParseChangesError(i)
if section == "":
section = "-"
################################################################################
+# see http://bugs.debian.org/619131
+def build_package_list(dsc, session = None):
+ if not dsc.has_key("package-list"):
+ return {}
+
+ packages = {}
+
+ for line in dsc["package-list"].split("\n"):
+ if not line:
+ break
+
+ fields = line.split()
+ name = fields[0]
+ package_type = fields[1]
+ (section, component) = extract_component_from_section(fields[2])
+ priority = fields[3]
+
+ # Validate type if we have a session
+ if session and get_override_type(package_type, session) is None:
+ # Maybe just warn and ignore? exit(1) might be a bit hard...
+ utils.fubar("invalid type (%s) in Package-List." % (package_type))
+
+ if name not in packages or packages[name]["type"] == "dsc":
+ packages[name] = dict(priority=priority, section=section, type=package_type, component=component, files=[])
+
+ return packages
+
+################################################################################
+
def send_mail (message, filename=""):
"""sendmail wrapper, takes _either_ a message string or a file as arguments"""
+ # Check whether we're supposed to be sending mail
+ if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
+ return
+
# If we've been passed a string dump it into a temporary file
if message:
(fd, filename) = tempfile.mkstemp()
os.unlink (filename);
return;
- fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
+ fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0o700);
os.write (fd, message_raw.as_string(True));
os.close (fd);
# Invoke sendmail
(result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
if (result != 0):
- raise SendmailFailedError, output
+ raise SendmailFailedError(output)
# Clean up any temporary files
if message:
################################################################################
-def move (src, dest, overwrite = 0, perms = 0664):
+def move (src, dest, overwrite = 0, perms = 0o664):
if os.path.exists(dest) and os.path.isdir(dest):
dest_dir = dest
else:
dest_dir = os.path.dirname(dest)
if not os.path.exists(dest_dir):
umask = os.umask(00000)
- os.makedirs(dest_dir, 02775)
+ os.makedirs(dest_dir, 0o2775)
os.umask(umask)
#print "Moving %s to %s..." % (src, dest)
if os.path.exists(dest) and os.path.isdir(dest):
os.chmod(dest, perms)
os.unlink(src)
-def copy (src, dest, overwrite = 0, perms = 0664):
+def copy (src, dest, overwrite = 0, perms = 0o664):
if os.path.exists(dest) and os.path.isdir(dest):
dest_dir = dest
else:
dest_dir = os.path.dirname(dest)
if not os.path.exists(dest_dir):
umask = os.umask(00000)
- os.makedirs(dest_dir, 02775)
+ os.makedirs(dest_dir, 0o2775)
os.umask(umask)
#print "Copying %s to %s..." % (src, dest)
if os.path.exists(dest) and os.path.isdir(dest):
################################################################################
def where_am_i ():
- res = socket.gethostbyaddr(socket.gethostname())
- database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
+ res = socket.getfqdn()
+ database_hostname = Cnf.get("Config::" + res + "::DatabaseHostname")
if database_hostname:
return database_hostname
else:
- return res[0]
+ return res
def which_conf_file ():
- if os.getenv("DAK_CONFIG"):
- print(os.getenv("DAK_CONFIG"))
- return os.getenv("DAK_CONFIG")
- else:
- res = socket.gethostbyaddr(socket.gethostname())
- # In case we allow local config files per user, try if one exists
- if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
- homedir = os.getenv("HOME")
- confpath = os.path.join(homedir, "/etc/dak.conf")
- if os.path.exists(confpath):
- apt_pkg.ReadConfigFileISC(Cnf,default_config)
-
- # We are still in here, so there is no local config file or we do
- # not allow local files. Do the normal stuff.
- if Cnf.get("Config::" + res[0] + "::DakConfig"):
- return Cnf["Config::" + res[0] + "::DakConfig"]
- else:
- return default_config
+ if os.getenv('DAK_CONFIG'):
+ return os.getenv('DAK_CONFIG')
+
+ res = socket.getfqdn()
+ # In case we allow local config files per user, try if one exists
+ if Cnf.FindB("Config::" + res + "::AllowLocalConfig"):
+ homedir = os.getenv("HOME")
+ confpath = os.path.join(homedir, "/etc/dak.conf")
+ if os.path.exists(confpath):
+ apt_pkg.ReadConfigFileISC(Cnf,confpath)
+
+ # We are still in here, so there is no local config file or we do
+ # not allow local files. Do the normal stuff.
+ if Cnf.get("Config::" + res + "::DakConfig"):
+ return Cnf["Config::" + res + "::DakConfig"]
+
+ return default_config
def which_apt_conf_file ():
- res = socket.gethostbyaddr(socket.gethostname())
+ res = socket.getfqdn()
# In case we allow local config files per user, try if one exists
- if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
+ if Cnf.FindB("Config::" + res + "::AllowLocalConfig"):
homedir = os.getenv("HOME")
confpath = os.path.join(homedir, "/etc/dak.conf")
if os.path.exists(confpath):
apt_pkg.ReadConfigFileISC(Cnf,default_config)
- if Cnf.get("Config::" + res[0] + "::AptConfig"):
- return Cnf["Config::" + res[0] + "::AptConfig"]
+ if Cnf.get("Config::" + res + "::AptConfig"):
+ return Cnf["Config::" + res + "::AptConfig"]
else:
return default_apt_config
def which_alias_file():
- hostname = socket.gethostbyaddr(socket.gethostname())[0]
+ hostname = socket.getfqdn()
aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
if os.path.exists(aliasfn):
return aliasfn
suite_ids_list = []
for suitename in split_args(Options["Suite"]):
suite = get_suite(suitename, session=session)
- if suite.suite_id is None:
- warn("suite '%s' not recognised." % (suite.suite_name))
+ if not suite or suite.suite_id is None:
+ warn("suite '%s' not recognised." % (suite and suite.suite_name or suitename))
else:
suite_ids_list.append(suite.suite_id)
if suite_ids_list:
if not keyserver:
keyserver = Cnf["Dinstall::KeyServer"]
if not keyring:
- keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
+ keyring = get_primary_keyring_path()
# Ensure the filename contains no shell meta-characters or other badness
if not re_taint_free.match(filename):
def gpg_keyring_args(keyrings=None):
if not keyrings:
- keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
+ keyrings = get_active_keyring_paths()
return " ".join(["--keyring %s" % x for x in keyrings])
################################################################################
-
-def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
+@session_wrapper
+def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None, session=None):
"""
Check the signature of a file and return the fingerprint if the
signature is valid or 'None' if it's not. The first argument is the
return (None, rejects)
if not keyrings:
- keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
+ keyrings = [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).all() ]
# Autofetch the signing key if that's enabled
if autofetch == None:
if exit_status:
rejects.append("gpgv failed while checking %s." % (sig_filename))
if status.strip():
- rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
+ rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "))
else:
- rejects.append(prefix_multi_line_string(output, " [GPG output:] "), "")
+ rejects.append(prefix_multi_line_string(output, " [GPG output:] "))
return (None, rejects)
# Sanity check the good stuff we expect
# Much of the rest of p-u/p-a depends on being in the right place
os.chdir(from_dir)
changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
- except OSError, e:
+ except OSError as e:
fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
return changes_files
apt_pkg.init()
Cnf = apt_pkg.newConfiguration()
-apt_pkg.ReadConfigFileISC(Cnf,default_config)
+if not os.getenv("DAK_TEST"):
+ apt_pkg.ReadConfigFileISC(Cnf,default_config)
if which_conf_file() != default_config:
apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
+
+################################################################################
+
+def parse_wnpp_bug_file(file = "/srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm"):
+ """
+ Parses the wnpp bug list available at http://qa.debian.org/data/bts/wnpp_rm
+ Well, actually it parsed a local copy, but let's document the source
+ somewhere ;)
+
+ returns a dict associating source package name with a list of open wnpp
+ bugs (Yes, there might be more than one)
+ """
+
+ line = []
+ try:
+ f = open(file)
+ lines = f.readlines()
+ except IOError as e:
+ print "Warning: Couldn't open %s; don't know about WNPP bugs, so won't close any." % file
+ lines = []
+ wnpp = {}
+
+ for line in lines:
+ splited_line = line.split(": ", 1)
+ if len(splited_line) > 1:
+ wnpp[splited_line[0]] = splited_line[1].split("|")
+
+ for source in wnpp.keys():
+ bugs = []
+ for wnpp_bug in wnpp[source]:
+ bug_no = re.search("(\d)+", wnpp_bug).group()
+ if bug_no:
+ bugs.append(bug_no)
+ wnpp[source] = bugs
+ return wnpp
+
+################################################################################
+
+def get_packages_from_ftp(root, suite, component, architecture):
+ """
+ Returns an object containing apt_pkg-parseable data collected by
+ aggregating Packages.gz files gathered for each architecture.
+
+ @type root: string
+ @param root: path to ftp archive root directory
+
+ @type suite: string
+ @param suite: suite to extract files from
+
+ @type component: string
+ @param component: component to extract files from
+
+ @type architecture: string
+ @param architecture: architecture to extract files from
+
+ @rtype: TagFile
+ @return: apt_pkg class containing package data
+
+ """
+ filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (root, suite, component, architecture)
+ (fd, temp_file) = temp_filename()
+ (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_file))
+ if (result != 0):
+ fubar("Gunzip invocation failed!\n%s\n" % (output), result)
+ filename = "%s/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (root, suite, component, architecture)
+ if os.path.exists(filename):
+ (result, output) = commands.getstatusoutput("gunzip -c %s >> %s" % (filename, temp_file))
+ if (result != 0):
+ fubar("Gunzip invocation failed!\n%s\n" % (output), result)
+ packages = open_file(temp_file)
+ Packages = apt_pkg.ParseTagFile(packages)
+ os.unlink(temp_file)
+ return Packages