# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-import codecs
import commands
import email.Header
import os
import re
import string
import email as modemail
+import subprocess
from dbconn import DBConn, get_architecture, get_component, get_suite
from dak_exceptions import *
from textutils import fix_maintainer
from regexes import re_html_escaping, html_escaping, re_single_line_field, \
- re_multi_line_field, re_srchasver, re_verwithext, \
- re_parse_maintainer, re_taint_free, re_gpg_uid, re_re_mark, \
- re_whitespace_comment
+ re_multi_line_field, re_srchasver, re_taint_free, \
+ re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
+ re_is_orig_source
+
+from formats import parse_format, validate_changes_format
+from srcformats import get_format_from_string
+from collections import defaultdict
################################################################################
known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
+# Monkeypatch commands.getstatusoutput as it returns a "0" exit code in
+# all situations under lenny's Python.
+import commands
+def dak_getstatusoutput(cmd):
+ pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+
+ output = "".join(pipe.stdout.readlines())
+
+ ret = pipe.wait()
+ if ret is None:
+ ret = 0
+
+ return ret, output
+commands.getstatusoutput = dak_getstatusoutput
+
################################################################################
def html_escape(s):
################################################################################
+def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
+ """
+ Verify that the files listed in the Files field of the .dsc are
+ those expected given the announced Format.
+
+ @type dsc_filename: string
+ @param dsc_filename: path of .dsc file
+
+ @type dsc: dict
+ @param dsc: the content of the .dsc parsed by C{parse_changes()}
+
+ @type dsc_files: dict
+ @param dsc_files: the file list returned by C{build_file_list()}
+
+ @rtype: list
+ @return: all errors detected
+ """
+ rejmsg = []
+
+ # Parse the file if needed
+ if dsc is None:
+ dsc = parse_changes(dsc_filename, signing_rules=1);
+
+ if dsc_files is None:
+ dsc_files = build_file_list(dsc, is_a_dsc=1)
+
+ # Ensure .dsc lists proper set of source files according to the format
+ # announced
+ has = defaultdict(lambda: 0)
+
+ ftype_lookup = (
+ (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
+ (r'diff.gz', ('debian_diff',)),
+ (r'tar.gz', ('native_tar_gz', 'native_tar')),
+ (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
+ (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
+ (r'tar\.(gz|bz2)', ('native_tar',)),
+ (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
+ )
+
+ for f in dsc_files.keys():
+ m = re_issource.match(f)
+ if not m:
+ rejmsg.append("%s: %s in Files field not recognised as source."
+ % (dsc_filename, f))
+ continue
+
+ # Populate 'has' dictionary by resolving keys in lookup table
+ matched = False
+ for regex, keys in ftype_lookup:
+ if re.match(regex, m.group(3)):
+ matched = True
+ for key in keys:
+ has[key] += 1
+ break
+
+ # File does not match anything in lookup table; reject
+ if not matched:
+ reject("%s: unexpected source file '%s'" % (dsc_filename, f))
+
+ # Check for multiple files
+ for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
+ if has[file_type] > 1:
+ rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
+
+ # Source format specific tests
+ try:
+ format = get_format_from_string(dsc['format'])
+ rejmsg.extend([
+ '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
+ ])
+
+ except UnknownFormatError:
+ # Not an error here for now
+ pass
+
+ return rejmsg
+
+################################################################################
+
def check_hash_fields(what, manifest):
"""
check_hash_fields ensures that there are no checksum fields in the
for line in manifest[field].split('\n'):
if not line:
break
- checksum, size, checkfile = line.strip().split(' ')
+ clist = line.strip().split(' ')
+ if len(clist) == 3:
+ checksum, size, checkfile = clist
+ else:
+ rejmsg.append("Cannot parse checksum line [%s]" % (line))
+ continue
if not files.has_key(checkfile):
# TODO: check for the file's entry in the original files dict, not
# the one modified by (auto)byhand and other weird stuff
if not changes.has_key(field):
raise NoFilesFieldError
- # Make sure we recognise the format of the Files: field
- format = re_verwithext.search(changes.get("format", "0.0"))
- if not format:
- raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
-
- format = format.groups()
- if format[1] == None:
- format = int(float(format[0])), 0, format[2]
- else:
- format = int(format[0]), int(format[1]), format[2]
- if format[2] == None:
- format = format[:2]
-
- if is_a_dsc:
- # format = (1,0) are the only formats we currently accept,
- # format = (0,0) are missing format headers of which we still
- # have some in the archive.
- if format != (1,0) and format != (0,0):
- raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
- else:
- if (format < (1,5) or format > (1,8)):
- raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
- if field != "files" and format < (1,8):
- raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
+ # Validate .changes Format: field
+ if not is_a_dsc:
+ validate_changes_format(parse_format(changes['format']), field)
includes_section = (not is_a_dsc) and field == "files"
templatefile = open_file(filename)
template = templatefile.read()
for x in map.keys():
- template = template.replace(x,map[x])
+ template = template.replace(x, str(map[x]))
templatefile.close()
return template
def whoami ():
return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
+def getusername ():
+ return pwd.getpwuid(os.getuid())[0]
+
################################################################################
def size_type (c):
suite_ids_list = []
for suitename in split_args(Options["Suite"]):
suite = get_suite(suitename, session=session)
- if suite_id is None:
- warn("suite '%s' not recognised." % (suitename))
+ if suite.suite_id is None:
+ warn("suite '%s' not recognised." % (suite.suite_name))
else:
suite_ids_list.append(suite.suite_id)
if suite_ids_list:
apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
###############################################################################
+
+def ensure_orig_files(changes, dest_dir, session):
+ """
+ Ensure that dest_dir contains all the orig tarballs for the specified
+ changes. If it does not, symlink them into place.
+
+ Returns a 2-tuple (already_exists, symlinked) containing a list of files
+ that were already there and a list of files that were symlinked into place.
+ """
+
+ exists, symlinked = [], []
+
+ for dsc_file in changes.dsc_files:
+
+ # Skip all files that are not orig tarballs
+ if not re_is_orig_source.match(dsc_file):
+ continue
+
+ # Skip orig files not identified in the pool
+ if not (dsc_file in changes.orig_files and
+ 'id' in changes.orig_files[dsc_file]):
+ continue
+
+ dest = os.path.join(dest_dir, dsc_file)
+
+ if os.path.exists(dest):
+ exists.append(dest)
+ continue
+
+ orig_file_id = changes.orig_files[dsc_file]['id']
+
+ c = session.execute(
+ 'SELECT l.path, f.filename FROM location l, files f WHERE f.id = :id and f.location = l.id',
+ {'id': orig_file_id}
+ )
+
+ res = c.fetchone()
+ if not res:
+ return "[INTERNAL ERROR] Couldn't find id %s in files table." % orig_file_id
+
+ src = os.path.join(res[0], res[1])
+ os.symlink(src, dest)
+ symlinked.append(dest)
+
+ return (exists, symlinked)