SecurityQueueHandling "true";
SecurityQueueBuild "true";
DefaultSuite "stable";
- SuiteSuffix "updates";
+ SuiteSuffix "updates/";
OverrideMaintainer "dak@security.debian.org";
LegacyStableHasNoSections "false";
};
do_dists
fi
-dak contents -l 10000 scan
+dak contents -l 10000 scan-binary
+dak contents -l 1000 scan-source
pg_timestamp postunchecked
from daklib.config import Config
from daklib.dbconn import *
-from daklib.contents import ContentsScanner, ContentsWriter
+from daklib.contents import BinaryContentsScanner, ContentsWriter, \
+ SourceContentsScanner
from daklib import daklog
from daklib import utils
generate
generate Contents-$arch.gz files
- scan
- scan the debs in the existing pool and load contents into the bin_contents table
+ scan-source
+ scan the source packages in the existing pool and load contents into
+ the src_contents table
+
+ scan-binary
+ scan the (u)debs in the existing pool and load contents into the
+ bin_contents table
OPTIONS
-h, --help
-f, --force
write Contents files for suites marked as untouchable, too
-OPTIONS for scan
+OPTIONS for scan-source and scan-binary
-l, --limit=NUMBER
maximum number of packages to scan
"""
################################################################################
-def scan_all(cnf, limit):
- Logger = daklog.Logger(cnf.Cnf, 'contents scan')
- result = ContentsScanner.scan_all(limit)
+def binary_scan_all(cnf, limit):
+ Logger = daklog.Logger(cnf.Cnf, 'contents scan-binary')
+ result = BinaryContentsScanner.scan_all(limit)
+ processed = '%(processed)d packages processed' % result
+ remaining = '%(remaining)d packages remaining' % result
+ Logger.log([processed, remaining])
+ Logger.close()
+
+################################################################################
+
+def source_scan_all(cnf, limit):
+ Logger = daklog.Logger(cnf.Cnf, 'contents scan-source')
+ result = SourceContentsScanner.scan_all(limit)
processed = '%(processed)d packages processed' % result
remaining = '%(remaining)d packages remaining' % result
Logger.log([processed, remaining])
if len(options['Limit']) > 0:
limit = int(options['Limit'])
- if args[0] == 'scan':
- scan_all(cnf, limit)
+ if args[0] == 'scan-source':
+ source_scan_all(cnf, limit)
+ return
+
+ if args[0] == 'scan-binary':
+ binary_scan_all(cnf, limit)
return
suite_names = utils.split_args(options['Suite'])
--- /dev/null
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+Allow us to mark keyrings as no longer in use
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2011 Mark Hymers <mhy@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+
+import psycopg2
+from daklib.dak_exceptions import DBUpdateError
+from socket import gethostname;
+
+################################################################################
+def do_update(self):
+ """
+ Allow us to mark keyrings as no longer in use
+ """
+ print __doc__
+ try:
+ c = self.db.cursor()
+
+ c.execute("ALTER TABLE keyrings ADD COLUMN active BOOL DEFAULT TRUE")
+ c.execute("UPDATE config SET value = '50' WHERE name = 'db_revision'")
+ self.db.commit()
+
+ except psycopg2.ProgrammingError, msg:
+ self.db.rollback()
+ raise DBUpdateError, 'Unable to apply sick update 50, rollback issued. Error message : %s' % (str(msg))
--- /dev/null
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+Fix table for source contents.
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2011 Torsten Werner <twerner@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+
+import psycopg2
+from daklib.dak_exceptions import DBUpdateError
+from socket import gethostname;
+
+################################################################################
+def do_update(self):
+ """
+ Fix table for source contents.
+ """
+ print __doc__
+ try:
+ c = self.db.cursor()
+
+ c.execute("""
+ CREATE INDEX ON src_contents (source_id)""")
+ c.execute("""
+ ALTER TABLE src_contents ADD COLUMN created TIMESTAMP WITH TIME ZONE
+ NOT NULL DEFAULT now()""")
+ c.execute("""
+ ALTER TABLE src_contents ADD COLUMN modified TIMESTAMP WITH TIME ZONE
+ NOT NULL DEFAULT now()""")
+ c.execute("""
+ CREATE TRIGGER modified_src_contents BEFORE UPDATE ON src_contents
+ FOR EACH ROW EXECUTE PROCEDURE tfunc_set_modified()""")
+
+ c.execute("UPDATE config SET value = '51' WHERE name = 'db_revision'")
+ self.db.commit()
+
+ except psycopg2.ProgrammingError, msg:
+ self.db.rollback()
+ raise DBUpdateError, 'Unable to apply sick update 51, rollback issued. Error message : %s' % (str(msg))
from daklib import utils
import apt_pkg, os, stat, sys
-def fetch(query, args, session):
- return [path + filename for (path, filename) in \
- session.execute(query, args).fetchall()]
-
-def getSources(suite, component, session, timestamp):
- extra_cond = ""
- if timestamp:
- extra_cond = "AND extract(epoch from sa.created) > %d" % timestamp
- query = """
- SELECT l.path, f.filename
- FROM source s
- JOIN src_associations sa
- ON s.id = sa.source AND sa.suite = :suite %s
- JOIN files f
- ON s.file = f.id
- JOIN location l
- ON f.location = l.id AND l.component = :component
- ORDER BY filename
- """ % extra_cond
- args = { 'suite': suite.suite_id,
- 'component': component.component_id }
- return fetch(query, args, session)
-
-def getBinaries(suite, component, architecture, type, session, timestamp):
- extra_cond = ""
- if timestamp:
- extra_cond = "AND extract(epoch from ba.created) > %d" % timestamp
- query = """
-CREATE TEMP TABLE b_candidates (
- source integer,
- file integer,
- architecture integer);
-
-INSERT INTO b_candidates (source, file, architecture)
- SELECT b.source, b.file, b.architecture
- FROM binaries b
- JOIN bin_associations ba ON b.id = ba.bin
- WHERE b.type = :type AND ba.suite = :suite AND
- b.architecture IN (2, :architecture) %s;
-
-CREATE TEMP TABLE gf_candidates (
- filename text,
- path text,
- architecture integer,
- src integer,
- source text);
-
-INSERT INTO gf_candidates (filename, path, architecture, src, source)
- SELECT f.filename, l.path, bc.architecture, bc.source as src, s.source
- FROM b_candidates bc
- JOIN source s ON bc.source = s.id
- JOIN files f ON bc.file = f.id
- JOIN location l ON f.location = l.id
- WHERE l.component = :component;
-
-WITH arch_any AS
-
- (SELECT path, filename FROM gf_candidates
- WHERE architecture > 2),
-
- arch_all_with_any AS
- (SELECT path, filename FROM gf_candidates
- WHERE architecture = 2 AND
- src IN (SELECT src FROM gf_candidates WHERE architecture > 2)),
-
- arch_all_without_any AS
- (SELECT path, filename FROM gf_candidates
- WHERE architecture = 2 AND
- source NOT IN (SELECT DISTINCT source FROM gf_candidates WHERE architecture > 2)),
-
- filelist AS
- (SELECT * FROM arch_any
- UNION
- SELECT * FROM arch_all_with_any
- UNION
- SELECT * FROM arch_all_without_any)
-
- SELECT * FROM filelist ORDER BY filename
- """ % extra_cond
- args = { 'suite': suite.suite_id,
- 'component': component.component_id,
- 'architecture': architecture.arch_id,
- 'type': type }
- return fetch(query, args, session)
+from daklib.lists import getSources, getBinaries
def listPath(suite, component, architecture = None, type = None,
incremental_mode = False):
(file, timestamp) = listPath(suite, component,
incremental_mode = incremental_mode)
session = DBConn().session()
- for filename in getSources(suite, component, session, timestamp):
+ for _, filename in getSources(suite, component, session, timestamp):
file.write(filename + '\n')
session.close()
file.close()
(file, timestamp) = listPath(suite, component, architecture, type,
incremental_mode)
session = DBConn().session()
- for filename in getBinaries(suite, component, architecture, type,
+ for _, filename in getBinaries(suite, component, architecture, type,
session, timestamp):
file.write(filename + '\n')
session.close()
#!/usr/bin/env python
-""" Create all the Release files
+"""
+Create all the Release files
@contact: Debian FTPMaster <ftpmaster@debian.org>
-@Copyright: 2001, 2002, 2006 Anthony Towns <ajt@debian.org>
-@copyright: 2009, 2011 Joerg Jaspert <joerg@debian.org>
+@copyright: 2011 Joerg Jaspert <joerg@debian.org>
+@copyright: 2011 Mark Hymers <mhy@debian.org>
@license: GNU General Public License version 2 or later
+
"""
+
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-# ``Bored now''
+################################################################################
+
+# <mhy> I wish they wouldnt leave biscuits out, thats just tempting. Damnit.
################################################################################
import sys
import os
+import os.path
import stat
import time
import gzip
import bz2
import apt_pkg
+from tempfile import mkstemp, mkdtemp
+import commands
+from multiprocessing import Pool, TimeoutError
+from sqlalchemy.orm import object_session
-from daklib import utils
+from daklib import utils, daklog
+from daklib.regexes import re_gensubrelease, re_includeinrelease
from daklib.dak_exceptions import *
from daklib.dbconn import *
from daklib.config import Config
################################################################################
-
-Cnf = None
-out = None
-AptCnf = None
+Logger = None #: Our logging object
+results = [] #: Results of the subprocesses
################################################################################
def usage (exit_code=0):
- print """Usage: dak generate-releases [OPTION]... [SUITE]...
-Generate Release files (for SUITE).
+ """ Usage information"""
- -h, --help show this help and exit
- -a, --apt-conf FILE use FILE instead of default apt.conf
- -f, --force-touch ignore Untouchable directives in dak.conf
+ print """Usage: dak generate-releases [OPTIONS]
+Generate the Release files
-If no SUITE is given Release files are generated for all suites."""
+ -s, --suite=SUITE(s) process this suite
+ Default: All suites not marked 'untouchable'
+ -f, --force Allow processing of untouchable suites
+ CAREFUL: Only to be used at (point) release time!
+ -h, --help show this help and exit
+SUITE can be a space seperated list, e.g.
+ --suite=unstable testing
+ """
sys.exit(exit_code)
-################################################################################
+########################################################################
-def add_tiffani (files, path, indexstem):
- index = "%s.diff/Index" % (indexstem)
- filepath = "%s/%s" % (path, index)
- if os.path.exists(filepath):
- #print "ALERT: there was a tiffani file %s" % (filepath)
- files.append(index)
-
-def gen_i18n_index (files, tree, sec):
- path = Cnf["Dir::Root"] + tree + "/"
- i18n_path = "%s/i18n" % (sec)
- if os.path.exists("%s/%s" % (path, i18n_path)):
- index = "%s/Index" % (i18n_path)
- out = open("%s/%s" % (path, index), "w")
- out.write("SHA1:\n")
- for x in os.listdir("%s/%s" % (path, i18n_path)):
- if x.startswith('Translation-'):
- f = open("%s/%s/%s" % (path, i18n_path, x), "r")
- size = os.fstat(f.fileno())[6]
- f.seek(0)
- sha1sum = apt_pkg.sha1sum(f)
- f.close()
- out.write(" %s %7d %s\n" % (sha1sum, size, x))
- out.close()
- files.append(index)
-
-def compressnames (tree,type,file):
- compress = AptCnf.get("%s::%s::Compress" % (tree,type), AptCnf.get("Default::%s::Compress" % (type), ". gzip"))
- result = []
- cl = compress.split()
- uncompress = ("." not in cl)
- for mode in compress.split():
- if mode == ".":
- result.append(file)
- elif mode == "gzip":
- if uncompress:
- result.append("<zcat/.gz>" + file)
- uncompress = 0
- result.append(file + ".gz")
- elif mode == "bzip2":
- if uncompress:
- result.append("<bzcat/.bz2>" + file)
- uncompress = 0
- result.append(file + ".bz2")
- return result
-
-decompressors = { 'zcat' : gzip.GzipFile,
- 'bzip2' : bz2.BZ2File }
-
-hashfuncs = { 'MD5Sum' : apt_pkg.md5sum,
- 'SHA1' : apt_pkg.sha1sum,
- 'SHA256' : apt_pkg.sha256sum }
-
-def print_hash_files (tree, files, hashop):
- path = Cnf["Dir::Root"] + tree + "/"
- for name in files:
- hashvalue = ""
- hashlen = 0
- try:
- if name[0] == "<":
- j = name.index("/")
- k = name.index(">")
- (cat, ext, name) = (name[1:j], name[j+1:k], name[k+1:])
- file_handle = decompressors[ cat ]( "%s%s%s" % (path, name, ext) )
- contents = file_handle.read()
- hashvalue = hashfuncs[ hashop ](contents)
- hashlen = len(contents)
+def get_result(arg):
+ global results
+ if arg:
+ results.append(arg)
+
+def sign_release_dir(dirname):
+ cnf = Config()
+
+ if cnf.has_key("Dinstall::SigningKeyring"):
+ keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
+ if cnf.has_key("Dinstall::SigningPubKeyring"):
+ keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
+
+ arguments = "--no-options --batch --no-tty --armour"
+ signkeyids = cnf.signingkeyids.split()
+
+ relname = os.path.join(dirname, 'Release')
+
+ dest = os.path.join(dirname, 'Release.gpg')
+ if os.path.exists(dest):
+ os.unlink(dest)
+
+ inlinedest = os.path.join(dirname, 'InRelease')
+ if os.path.exists(inlinedest):
+ os.unlink(inlinedest)
+
+ for keyid in signkeyids:
+ if keyid != "":
+ defkeyid = "--default-key %s" % keyid
else:
- try:
- file_handle = utils.open_file(path + name)
- hashvalue = hashfuncs[ hashop ](file_handle)
- hashlen = os.stat(path + name).st_size
- except:
- raise
- else:
- if file_handle:
- file_handle.close()
-
- except CantOpenError:
- print "ALERT: Couldn't open " + path + name
- except IOError:
- print "ALERT: IOError when reading %s" % (path + name)
- raise
- else:
- out.write(" %s %8d %s\n" % (hashvalue, hashlen, name))
-
-def write_release_file (relpath, suite, component, origin, label, arch, version="", suite_suffix="", notautomatic="", butautomaticupgrades=""):
- try:
- if os.access(relpath, os.F_OK):
- if os.stat(relpath).st_nlink > 1:
- os.unlink(relpath)
- release = open(relpath, "w")
- except IOError:
- utils.fubar("Couldn't write to " + relpath)
-
- release.write("Archive: %s\n" % (suite))
- if version != "":
- release.write("Version: %s\n" % (version))
-
- if suite_suffix:
- release.write("Component: %s/%s\n" % (suite_suffix,component))
- else:
- release.write("Component: %s\n" % (component))
+ defkeyid = ""
- release.write("Origin: %s\n" % (origin))
- release.write("Label: %s\n" % (label))
- if notautomatic != "":
- release.write("NotAutomatic: %s\n" % (notautomatic))
- if butautomaticupgrades != "":
- release.write("ButAutomaticUpgrades: %s\n" % (butautomaticupgrades))
- release.write("Architecture: %s\n" % (arch))
- release.close()
+ os.system("gpg %s %s %s --detach-sign <%s >>%s" %
+ (keyring, defkeyid, arguments, relname, dest))
+
+ os.system("gpg %s %s %s --clearsign <%s >>%s" %
+ (keyring, defkeyid, arguments, relname, inlinedest))
+
+class ReleaseWriter(object):
+ def __init__(self, suite):
+ self.suite = suite
+
+ def generate_release_files(self):
+ """
+ Generate Release files for the given suite
+
+ @type suite: string
+ @param suite: Suite name
+ """
+
+ suite = self.suite
+ session = object_session(suite)
+
+ architectures = get_suite_architectures(suite.suite_name, skipall=True, skipsrc=True, session=session)
+
+ # Attribs contains a tuple of field names and the database names to use to
+ # fill them in
+ attribs = ( ('Origin', 'origin'),
+ ('Label', 'label'),
+ ('Suite', 'suite_name'),
+ ('Version', 'version'),
+ ('Codename', 'codename') )
+
+ # A "Sub" Release file has slightly different fields
+ subattribs = ( ('Origin', 'origin'),
+ ('Label', 'label'),
+ ('Archive', 'suite_name'),
+ ('Version', 'version') )
+
+ # Boolean stuff. If we find it true in database, write out "yes" into the release file
+ boolattrs = ( ('NotAutomatic', 'notautomatic'),
+ ('ButAutomaticUpgrades', 'butautomaticupgrades') )
+
+ cnf = Config()
+
+ suite_suffix = "%s" % (cnf.Find("Dinstall::SuiteSuffix"))
+
+ outfile = os.path.join(cnf["Dir::Root"], 'dists', "%s/%s" % (suite.suite_name, suite_suffix), "Release")
+ out = open(outfile, "w")
+
+ for key, dbfield in attribs:
+ if getattr(suite, dbfield) is not None:
+ out.write("%s: %s\n" % (key, getattr(suite, dbfield)))
+
+ out.write("Date: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time()))))
+
+ if suite.validtime:
+ validtime=float(suite.validtime)
+ out.write("Valid-Until: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time()+validtime))))
+
+ for key, dbfield in boolattrs:
+ if getattr(suite, dbfield, False):
+ out.write("%s: yes\n" % (key))
+
+ out.write("Architectures: %s\n" % (" ".join([a.arch_string for a in architectures])))
+
+ ## FIXME: Components need to be adjusted to whatever will be in the db
+ ## Needs putting in the DB
+ components = ['main', 'contrib', 'non-free']
+
+ out.write("Components: %s\n" % ( " ".join(map(lambda x: "%s%s" % (suite_suffix, x), components ))))
+
+ # For exact compatibility with old g-r, write out Description here instead
+ # of with the rest of the DB fields above
+ if getattr(suite, 'description') is not None:
+ out.write("Description: %s\n" % suite.description)
+
+ for comp in components:
+ for dirpath, dirnames, filenames in os.walk("%sdists/%s/%s" % (cnf["Dir::Root"], suite.suite_name, comp), topdown=True):
+ if not re_gensubrelease.match(dirpath):
+ continue
+
+ subfile = os.path.join(dirpath, "Release")
+ subrel = open(subfile, "w")
+
+ for key, dbfield in subattribs:
+ if getattr(suite, dbfield) is not None:
+ subrel.write("%s: %s\n" % (key, getattr(suite, dbfield)))
+
+ for key, dbfield in boolattrs:
+ if getattr(suite, dbfield, False):
+ subrel.write("%s: yes\n" % (key))
+
+ subrel.write("Component: %s%s\n" % (suite_suffix, comp))
+ subrel.close()
+
+ # Now that we have done the groundwork, we want to get off and add the files with
+ # their checksums to the main Release file
+ oldcwd = os.getcwd()
+
+ os.chdir("%sdists/%s/%s" % (cnf["Dir::Root"], suite.suite_name, suite_suffix))
+
+ hashfuncs = { 'MD5Sum' : apt_pkg.md5sum,
+ 'SHA1' : apt_pkg.sha1sum,
+ 'SHA256' : apt_pkg.sha256sum }
+
+ fileinfo = {}
+
+ uncompnotseen = {}
+
+ for dirpath, dirnames, filenames in os.walk(".", followlinks=True, topdown=True):
+ for entry in filenames:
+ # Skip things we don't want to include
+ if not re_includeinrelease.match(entry):
+ continue
+
+ if dirpath == '.' and entry in ["Release", "Release.gpg", "InRelease"]:
+ continue
+
+ filename = os.path.join(dirpath.lstrip('./'), entry)
+ fileinfo[filename] = {}
+ contents = open(filename, 'r').read()
+
+ # If we find a file for which we have a compressed version and
+ # haven't yet seen the uncompressed one, store the possibility
+ # for future use
+ if entry.endswith(".gz") and entry[:-3] not in uncompnotseen.keys():
+ uncompnotseen[filename[:-3]] = (gzip.GzipFile, filename)
+ elif entry.endswith(".bz2") and entry[:-4] not in uncompnotseen.keys():
+ uncompnotseen[filename[:-4]] = (bz2.BZ2File, filename)
+
+ fileinfo[filename]['len'] = len(contents)
+
+ for hf, func in hashfuncs.items():
+ fileinfo[filename][hf] = func(contents)
+
+ for filename, comp in uncompnotseen.items():
+ # If we've already seen the uncompressed file, we don't
+ # need to do anything again
+ if filename in fileinfo.keys():
+ continue
+
+ # Skip uncompressed Contents files as they're huge, take ages to
+ # checksum and we checksum the compressed ones anyways
+ if os.path.basename(filename).startswith("Contents"):
+ continue
+
+ fileinfo[filename] = {}
+
+ # File handler is comp[0], filename of compressed file is comp[1]
+ contents = comp[0](comp[1], 'r').read()
+
+ fileinfo[filename]['len'] = len(contents)
+
+ for hf, func in hashfuncs.items():
+ fileinfo[filename][hf] = func(contents)
+
+
+ for h in sorted(hashfuncs.keys()):
+ out.write('%s:\n' % h)
+ for filename in sorted(fileinfo.keys()):
+ out.write(" %s %8d %s\n" % (fileinfo[filename][h], fileinfo[filename]['len'], filename))
+
+ out.close()
+
+ sign_release_dir(os.path.dirname(outfile))
+
+ os.chdir(oldcwd)
+
+ return
-################################################################################
def main ():
- global Cnf, AptCnf, out
- out = sys.stdout
+ global Logger, results
- Cnf = utils.get_conf()
cnf = Config()
+ for i in ["Help", "Suite", "Force"]:
+ if not cnf.has_key("Generate-Releases::Options::%s" % (i)):
+ cnf["Generate-Releases::Options::%s" % (i)] = ""
+
Arguments = [('h',"help","Generate-Releases::Options::Help"),
- ('a',"apt-conf","Generate-Releases::Options::Apt-Conf", "HasArg"),
- ('f',"force-touch","Generate-Releases::Options::Force-Touch"),
- ]
- for i in [ "help", "apt-conf", "force-touch" ]:
- if not Cnf.has_key("Generate-Releases::Options::%s" % (i)):
- Cnf["Generate-Releases::Options::%s" % (i)] = ""
+ ('s',"suite","Generate-Releases::Options::Suite"),
+ ('f',"force","Generate-Releases::Options::Force")]
- suites = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
- Options = Cnf.SubTree("Generate-Releases::Options")
+ suite_names = apt_pkg.ParseCommandLine(cnf.Cnf, Arguments, sys.argv)
+ Options = cnf.SubTree("Generate-Releases::Options")
if Options["Help"]:
usage()
- if not Options["Apt-Conf"]:
- Options["Apt-Conf"] = utils.which_apt_conf_file()
-
- AptCnf = apt_pkg.newConfiguration()
- apt_pkg.ReadConfigFileISC(AptCnf, Options["Apt-Conf"])
+ Logger = daklog.Logger(cnf, 'generate-releases')
- if not suites:
- suites = Cnf.SubTree("Suite").List()
+ session = DBConn().session()
- for suitename in suites:
- print "Processing: " + suitename
- SuiteBlock = Cnf.SubTree("Suite::" + suitename)
- suiteobj = get_suite(suitename.lower())
- if not suiteobj:
- print "ALERT: Cannot find suite %s!" % (suitename.lower())
- continue
+ if Options["Suite"]:
+ suites = []
+ for s in suite_names:
+ suite = get_suite(s.lower(), session)
+ if suite:
+ suites.append(suite)
+ else:
+ print "cannot find suite %s" % s
+ Logger.log(['cannot find suite %s' % s])
+ else:
+ suites = session.query(Suite).filter(Suite.untouchable == False).all()
- # Use the canonical name
- suite = suiteobj.suite_name.lower()
+ broken=[]
+ # For each given suite, run one process
+ results = []
- if suiteobj.untouchable and not Options["Force-Touch"]:
- print "Skipping: " + suite + " (untouchable)"
- continue
+ pool = Pool()
- origin = suiteobj.origin
- label = suiteobj.label or suiteobj.origin
- codename = suiteobj.codename or ""
- version = ""
- if suiteobj.version and suiteobj.version != '-':
- version = suiteobj.version
- description = suiteobj.description or ""
-
- architectures = get_suite_architectures(suite, skipall=True, skipsrc=True)
-
- if suiteobj.notautomatic:
- notautomatic = "yes"
- else:
- notautomatic = ""
-
- if suiteobj.butautomaticupgrades:
- butautomaticupgrades = "yes"
- else:
- butautomaticupgrades = ""
-
- if SuiteBlock.has_key("Components"):
- components = SuiteBlock.ValueList("Components")
- else:
- components = []
-
- suite_suffix = Cnf.Find("Dinstall::SuiteSuffix")
- if components and suite_suffix:
- longsuite = suite + "/" + suite_suffix
- else:
- longsuite = suite
-
- tree = SuiteBlock.get("Tree", "dists/%s" % (longsuite))
-
- if AptCnf.has_key("tree::%s" % (tree)):
- pass
- elif AptCnf.has_key("bindirectory::%s" % (tree)):
- pass
- else:
- aptcnf_filename = os.path.basename(utils.which_apt_conf_file())
- print "ALERT: suite %s not in %s, nor untouchable!" % (suite, aptcnf_filename)
+ for s in suites:
+ # Setup a multiprocessing Pool. As many workers as we have CPU cores.
+ if s.untouchable and not Options["Force"]:
+ print "Skipping %s (untouchable)" % s.suite_name
continue
- print Cnf["Dir::Root"] + tree + "/Release"
- out = open(Cnf["Dir::Root"] + tree + "/Release", "w")
+ print "Processing %s" % s.suite_name
+ Logger.log(['Processing release file for Suite: %s' % (s.suite_name)])
+ pool.apply_async(generate_helper, (s.suite_id, ), callback=get_result)
- out.write("Origin: %s\n" % (suiteobj.origin))
- out.write("Label: %s\n" % (label))
- out.write("Suite: %s\n" % (suite))
- if version != "":
- out.write("Version: %s\n" % (version))
- if codename != "":
- out.write("Codename: %s\n" % (codename))
- out.write("Date: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time()))))
+ # No more work will be added to our pool, close it and then wait for all to finish
+ pool.close()
+ pool.join()
- if suiteobj.validtime:
- validtime=float(suiteobj.validtime)
- out.write("Valid-Until: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time()+validtime))))
+ retcode = 0
- if notautomatic != "":
- out.write("NotAutomatic: %s\n" % (notautomatic))
- if butautomaticupgrades != "":
- out.write("ButAutomaticUpgrades: %s\n" % (butautomaticupgrades))
- out.write("Architectures: %s\n" % (" ".join([a.arch_string for a in architectures])))
- if components:
- out.write("Components: %s\n" % (" ".join(components)))
+ if len(results) > 0:
+ Logger.log(['Release file generation broken: %s' % (results)])
+ print "Release file generation broken:\n", '\n'.join(results)
+ retcode = 1
- if description:
- out.write("Description: %s\n" % (description))
+ Logger.close()
- files = []
+ sys.exit(retcode)
- if AptCnf.has_key("tree::%s" % (tree)):
- if AptCnf.has_key("tree::%s::Contents" % (tree)):
- pass
- else:
- for x in os.listdir("%s/%s" % (Cnf["Dir::Root"], tree)):
- if x.startswith('Contents-'):
- if x.endswith('.diff'):
- files.append("%s/Index" % (x))
- else:
- files.append(x)
-
- for sec in AptCnf["tree::%s::Sections" % (tree)].split():
- for arch in AptCnf["tree::%s::Architectures" % (tree)].split():
- if arch == "source":
- filepath = "%s/%s/Sources" % (sec, arch)
- for cfile in compressnames("tree::%s" % (tree), "Sources", filepath):
- files.append(cfile)
- add_tiffani(files, Cnf["Dir::Root"] + tree, filepath)
- else:
- installer = "%s/installer-%s" % (sec, arch)
- installerpath = Cnf["Dir::Root"]+tree+"/"+installer
- if os.path.exists(installerpath):
- for directory in os.listdir(installerpath):
- if os.path.exists("%s/%s/images/MD5SUMS" % (installerpath, directory)):
- files.append("%s/%s/images/MD5SUMS" % (installer, directory))
-
- filepath = "%s/binary-%s/Packages" % (sec, arch)
- for cfile in compressnames("tree::%s" % (tree), "Packages", filepath):
- files.append(cfile)
- add_tiffani(files, Cnf["Dir::Root"] + tree, filepath)
-
- if arch == "source":
- rel = "%s/%s/Release" % (sec, arch)
- else:
- rel = "%s/binary-%s/Release" % (sec, arch)
- relpath = Cnf["Dir::Root"]+tree+"/"+rel
- write_release_file(relpath, suite, sec, origin, label, arch, version, suite_suffix, notautomatic, butautomaticupgrades)
- files.append(rel)
- gen_i18n_index(files, tree, sec)
-
- if AptCnf.has_key("tree::%s/main" % (tree)):
- for dis in ["main", "contrib", "non-free"]:
- if not AptCnf.has_key("tree::%s/%s" % (tree, dis)): continue
- sec = AptCnf["tree::%s/%s::Sections" % (tree,dis)].split()[0]
- if sec != "debian-installer":
- print "ALERT: weird non debian-installer section in %s" % (tree)
-
- for arch in AptCnf["tree::%s/%s::Architectures" % (tree,dis)].split():
- if arch != "source": # always true
- rel = "%s/%s/binary-%s/Release" % (dis, sec, arch)
- relpath = Cnf["Dir::Root"]+tree+"/"+rel
- write_release_file(relpath, suite, dis, origin, label, arch, version, suite_suffix, notautomatic, butautomaticupgrades)
- files.append(rel)
- for cfile in compressnames("tree::%s/%s" % (tree,dis),
- "Packages",
- "%s/%s/binary-%s/Packages" % (dis, sec, arch)):
- files.append(cfile)
- elif AptCnf.has_key("tree::%s::FakeDI" % (tree)):
- usetree = AptCnf["tree::%s::FakeDI" % (tree)]
- sec = AptCnf["tree::%s/main::Sections" % (usetree)].split()[0]
- if sec != "debian-installer":
- print "ALERT: weird non debian-installer section in %s" % (usetree)
-
- for arch in AptCnf["tree::%s/main::Architectures" % (usetree)].split():
- if arch != "source": # always true
- for cfile in compressnames("tree::%s/main" % (usetree), "Packages", "main/%s/binary-%s/Packages" % (sec, arch)):
- files.append(cfile)
-
- elif AptCnf.has_key("bindirectory::%s" % (tree)):
- for cfile in compressnames("bindirectory::%s" % (tree), "Packages", AptCnf["bindirectory::%s::Packages" % (tree)]):
- files.append(cfile.replace(tree+"/","",1))
- for cfile in compressnames("bindirectory::%s" % (tree), "Sources", AptCnf["bindirectory::%s::Sources" % (tree)]):
- files.append(cfile.replace(tree+"/","",1))
- else:
- print "ALERT: no tree/bindirectory for %s" % (tree)
-
- for hashvalue in cnf.SubTree("Generate-Releases").List():
- if suite in [ i.lower() for i in cnf.ValueList("Generate-Releases::%s" % (hashvalue)) ]:
- out.write("%s:\n" % (hashvalue))
- print_hash_files(tree, files, hashvalue)
+def generate_helper(suite_id):
+ '''
+ This function is called in a new subprocess.
+ '''
+ session = DBConn().session()
+ suite = Suite.get(suite_id, session)
+ try:
+ rw = ReleaseWriter(suite)
+ rw.generate_release_files()
+ except Exception, e:
+ return str(e)
- out.close()
- if Cnf.has_key("Dinstall::SigningKeyring"):
- keyring = "--secret-keyring \"%s\"" % Cnf["Dinstall::SigningKeyring"]
- if Cnf.has_key("Dinstall::SigningPubKeyring"):
- keyring += " --keyring \"%s\"" % Cnf["Dinstall::SigningPubKeyring"]
-
- arguments = "--no-options --batch --no-tty --armour"
- signkeyids=cnf.signingkeyids.split()
-
- dest = Cnf["Dir::Root"] + tree + "/Release.gpg"
- if os.path.exists(dest):
- os.unlink(dest)
- inlinedest = Cnf["Dir::Root"] + tree + "/InRelease"
- if os.path.exists(inlinedest):
- os.unlink(inlinedest)
-
- for keyid in signkeyids:
- if keyid != "":
- defkeyid = "--default-key %s" % keyid
- else:
- defkeyid = ""
- os.system("gpg %s %s %s --detach-sign <%s >>%s" %
- (keyring, defkeyid, arguments,
- Cnf["Dir::Root"] + tree + "/Release", dest))
- os.system("gpg %s %s %s --clearsign <%s >>%s" %
- (keyring, defkeyid, arguments,
- Cnf["Dir::Root"] + tree + "/Release", inlinedest))
+ return
#######################################################################################
# Make a copy of distribution we can happily trample on
changes["suite"] = copy.copy(changes["distribution"])
+ # Try to get an included dsc
+ dsc = None
+ (status, _) = upload.load_dsc()
+ if status:
+ dsc = upload.pkg.dsc
+
# The main NEW processing loop
done = 0
+ new = {}
while not done:
# Find out what's new
- new, byhand = determine_new(upload.pkg.changes_file, changes, files, session=session)
+ new, byhand = determine_new(upload.pkg.changes_file, changes, files, dsc=dsc, session=session, new=new)
if not new:
break
u.logger = Logger
origchanges = os.path.abspath(u.pkg.changes_file)
+ # Try to get an included dsc
+ dsc = None
+ (status, _) = u.load_dsc()
+ if status:
+ dsc = u.pkg.dsc
+
cnf = Config()
bcc = "X-DAK: dak process-new"
if cnf.has_key("Dinstall::Bcc"):
if not recheck(u, session):
return
- new, byhand = determine_new(u.pkg.changes_file, u.pkg.changes, files, session=session)
+ new, byhand = determine_new(u.pkg.changes_file, u.pkg.changes, files, dsc=dsc, session=session)
if byhand:
do_byhand(u, session)
elif new:
u.check_source_against_db(deb_filename, session)
u.pkg.changes["suite"] = u.pkg.changes["distribution"]
- new, byhand = determine_new(u.pkg.changes_file, u.pkg.changes, files, 0, session)
+ new, byhand = determine_new(u.pkg.changes_file, u.pkg.changes, files, 0, dsc=u.pkg.dsc, session=session)
outfile = open(os.path.join(cnf["Show-New::HTMLPath"],htmlname),"w")
from tempfile import mkdtemp
import os.path
+import signal
-class ContentsWriter(object):
+class BinaryContentsWriter(object):
'''
- ContentsWriter writes the Contents-$arch.gz files.
+ BinaryContentsWriter writes the Contents-$arch.gz files.
'''
def __init__(self, suite, architecture, overridetype, component = None):
- '''
- The constructor clones its arguments into a new session object to make
- sure that the new ContentsWriter object can be executed in a different
- thread.
- '''
self.suite = suite
self.architecture = architecture
self.overridetype = overridetype
gzip.stdin.close()
output_file.close()
gzip.wait()
- try:
- os.remove(final_filename)
- except:
- pass
+ os.chmod(temp_filename, 0664)
os.rename(temp_filename, final_filename)
- os.chmod(final_filename, 0664)
+
+class SourceContentsWriter(object):
+ '''
+ SourceContentsWriter writes the Contents-source.gz files.
+ '''
+ def __init__(self, suite, component):
+ self.suite = suite
+ self.component = component
+ self.session = suite.session()
+
+ def query(self):
+ '''
+ Returns a query object that is doing most of the work.
+ '''
+ params = {
+ 'suite_id': self.suite.suite_id,
+ 'component_id': self.component.component_id,
+ }
+
+ sql = '''
+create temp table newest_sources (
+ id integer primary key,
+ source text);
+
+create index sources_binaries_by_source on newest_sources (source);
+
+insert into newest_sources (id, source)
+ select distinct on (source) s.id, s.source from source s
+ join files f on f.id = s.file
+ join location l on l.id = f.location
+ where s.id in (select source from src_associations where suite = :suite_id)
+ and l.component = :component_id
+ order by source, version desc;
+
+select sc.file, string_agg(s.source, ',' order by s.source) as pkglist
+ from newest_sources s, src_contents sc
+ where s.id = sc.source_id group by sc.file'''
+
+ return self.session.query("file", "pkglist").from_statement(sql). \
+ params(params)
+
+ def formatline(self, filename, package_list):
+ '''
+ Returns a formatted string for the filename argument.
+ '''
+ return "%s\t%s\n" % (filename, package_list)
+
+ def fetch(self):
+ '''
+ Yields a new line of the Contents-source.gz file in filename order.
+ '''
+ for filename, package_list in self.query().yield_per(100):
+ yield self.formatline(filename, package_list)
+ # end transaction to return connection to pool
+ self.session.rollback()
+
+ def get_list(self):
+ '''
+ Returns a list of lines for the Contents-source.gz file.
+ '''
+ return [item for item in self.fetch()]
+
+ def output_filename(self):
+ '''
+ Returns the name of the output file.
+ '''
+ values = {
+ 'root': Config()['Dir::Root'],
+ 'suite': self.suite.suite_name,
+ 'component': self.component.component_name
+ }
+ return "%(root)s/dists/%(suite)s/%(component)s/Contents-source.gz" % values
+
+ def write_file(self):
+ '''
+ Write the output file.
+ '''
+ command = ['gzip', '--rsyncable']
+ final_filename = self.output_filename()
+ temp_filename = final_filename + '.new'
+ output_file = open(temp_filename, 'w')
+ gzip = Popen(command, stdin = PIPE, stdout = output_file)
+ for item in self.fetch():
+ gzip.stdin.write(item)
+ gzip.stdin.close()
+ output_file.close()
+ gzip.wait()
+ os.chmod(temp_filename, 0664)
+ os.rename(temp_filename, final_filename)
+
+
+def binary_helper(suite_id, arch_id, overridetype_id, component_id = None):
+ '''
+ This function is called in a new subprocess and multiprocessing wants a top
+ level function.
+ '''
+ session = DBConn().session()
+ suite = Suite.get(suite_id, session)
+ architecture = Architecture.get(arch_id, session)
+ overridetype = OverrideType.get(overridetype_id, session)
+ log_message = [suite.suite_name, architecture.arch_string, overridetype.overridetype]
+ if component_id is None:
+ component = None
+ else:
+ component = Component.get(component_id, session)
+ log_message.append(component.component_name)
+ contents_writer = BinaryContentsWriter(suite, architecture, overridetype, component)
+ contents_writer.write_file()
+ return log_message
+
+def source_helper(suite_id, component_id):
+ '''
+ This function is called in a new subprocess and multiprocessing wants a top
+ level function.
+ '''
+ session = DBConn().session()
+ suite = Suite.get(suite_id, session)
+ component = Component.get(component_id, session)
+ log_message = [suite.suite_name, 'source', component.component_name]
+ contents_writer = SourceContentsWriter(suite, component)
+ contents_writer.write_file()
+ return log_message
+
+class ContentsWriter(object):
+ '''
+ Loop over all suites, architectures, overridetypes, and components to write
+ all contents files.
+ '''
@classmethod
def log_result(class_, result):
'''
deb_id = get_override_type('deb', session).overridetype_id
udeb_id = get_override_type('udeb', session).overridetype_id
main_id = get_component('main', session).component_id
+ contrib_id = get_component('contrib', session).component_id
non_free_id = get_component('non-free', session).component_id
pool = Pool()
for suite in suite_query:
suite_id = suite.suite_id
+ # handle source packages
+ pool.apply_async(source_helper, (suite_id, main_id),
+ callback = class_.log_result)
+ pool.apply_async(source_helper, (suite_id, contrib_id),
+ callback = class_.log_result)
+ pool.apply_async(source_helper, (suite_id, non_free_id),
+ callback = class_.log_result)
for architecture in suite.get_architectures(skipsrc = True, skipall = True):
arch_id = architecture.arch_id
# handle 'deb' packages
- pool.apply_async(generate_helper, (suite_id, arch_id, deb_id), \
+ pool.apply_async(binary_helper, (suite_id, arch_id, deb_id), \
callback = class_.log_result)
# handle 'udeb' packages for 'main' and 'non-free'
- pool.apply_async(generate_helper, (suite_id, arch_id, udeb_id, main_id), \
+ pool.apply_async(binary_helper, (suite_id, arch_id, udeb_id, main_id), \
callback = class_.log_result)
- pool.apply_async(generate_helper, (suite_id, arch_id, udeb_id, non_free_id), \
+ pool.apply_async(binary_helper, (suite_id, arch_id, udeb_id, non_free_id), \
callback = class_.log_result)
pool.close()
pool.join()
session.close()
-def generate_helper(suite_id, arch_id, overridetype_id, component_id = None):
- '''
- This function is called in a new subprocess.
- '''
- session = DBConn().session()
- suite = Suite.get(suite_id, session)
- architecture = Architecture.get(arch_id, session)
- overridetype = OverrideType.get(overridetype_id, session)
- log_message = [suite.suite_name, architecture.arch_string, overridetype.overridetype]
- if component_id is None:
- component = None
- else:
- component = Component.get(component_id, session)
- log_message.append(component.component_name)
- contents_writer = ContentsWriter(suite, architecture, overridetype, component)
- contents_writer.write_file()
- return log_message
-
-class ContentsScanner(object):
+class BinaryContentsScanner(object):
'''
- ContentsScanner provides a threadsafe method scan() to scan the contents of
- a DBBinary object.
+ BinaryContentsScanner provides a threadsafe method scan() to scan the
+ contents of a DBBinary object.
'''
def __init__(self, binary_id):
'''
processed = query.count()
pool = Pool()
for binary in query.yield_per(100):
- pool.apply_async(scan_helper, (binary.binary_id, ))
+ pool.apply_async(binary_scan_helper, (binary.binary_id, ))
pool.close()
pool.join()
remaining = remaining()
session.close()
return { 'processed': processed, 'remaining': remaining }
-def scan_helper(binary_id):
+def binary_scan_helper(binary_id):
'''
This function runs in a subprocess.
'''
- scanner = ContentsScanner(binary_id)
+ scanner = BinaryContentsScanner(binary_id)
scanner.scan()
+def subprocess_setup():
+ # Python installs a SIGPIPE handler by default. This is usually not what
+ # non-Python subprocesses expect.
+ signal.signal(signal.SIGPIPE, signal.SIG_DFL)
+
class UnpackedSource(object):
'''
UnpackedSource extracts a source package into a temporary location and
The dscfilename is a name of a DSC file that will be extracted.
'''
self.root_directory = os.path.join(mkdtemp(), 'root')
- command = ('dpkg-source', '--no-copy', '--no-check', '-x', dscfilename,
- self.root_directory)
- # dpkg-source does not have a --quiet option
- devnull = open(os.devnull, 'w')
- check_call(command, stdout = devnull, stderr = devnull)
- devnull.close()
+ command = ('dpkg-source', '--no-copy', '--no-check', '-q', '-x',
+ dscfilename, self.root_directory)
+ check_call(command, preexec_fn = subprocess_setup)
def get_root_directory(self):
'''
Enforce cleanup.
'''
self.cleanup()
+
+
+class SourceContentsScanner(object):
+ '''
+ SourceContentsScanner provides a method scan() to scan the contents of a
+ DBSource object.
+ '''
+ def __init__(self, source_id):
+ '''
+ The argument source_id is the id of the DBSource object that
+ should be scanned.
+ '''
+ self.source_id = source_id
+
+ def scan(self):
+ '''
+ This method does the actual scan and fills in the associated SrcContents
+ property. It commits any changes to the database.
+ '''
+ session = DBConn().session()
+ source = session.query(DBSource).get(self.source_id)
+ fileset = set(source.scan_contents())
+ for filename in fileset:
+ source.contents.append(SrcContents(file = filename))
+ session.commit()
+ session.close()
+
+ @classmethod
+ def scan_all(class_, limit = None):
+ '''
+ The class method scan_all() scans all source using multiple processes.
+ The number of sources to be scanned can be limited with the limit
+ argument. Returns the number of processed and remaining packages as a
+ dict.
+ '''
+ session = DBConn().session()
+ query = session.query(DBSource).filter(DBSource.contents == None)
+ remaining = query.count
+ if limit is not None:
+ query = query.limit(limit)
+ processed = query.count()
+ pool = Pool()
+ for source in query.yield_per(100):
+ pool.apply_async(source_scan_helper, (source.source_id, ))
+ pool.close()
+ pool.join()
+ remaining = remaining()
+ session.close()
+ return { 'processed': processed, 'remaining': remaining }
+
+def source_scan_helper(source_id):
+ '''
+ This function runs in a subprocess.
+ '''
+ try:
+ scanner = SourceContentsScanner(source_id)
+ scanner.scan()
+ except Exception, e:
+ print e
+
################################################################################
+class SrcContents(ORMObject):
+ def __init__(self, file = None, source = None):
+ self.file = file
+ self.source = source
+
+ def properties(self):
+ return ['file', 'source']
+
+__all__.append('SrcContents')
+
+################################################################################
+
from debian.debfile import Deb822
# Temporary Deb822 subclass to fix bugs with : handling; see #597249
def properties(self):
return ['source', 'source_id', 'maintainer', 'changedby', \
'fingerprint', 'poolfile', 'version', 'suites_count', \
- 'install_date', 'binaries_count']
+ 'install_date', 'binaries_count', 'uploaders_count']
def not_null_constraints(self):
return ['source', 'version', 'install_date', 'maintainer', \
metadata = association_proxy('key', 'value')
+ def scan_contents(self):
+ '''
+ Returns a set of names for non directories. The path names are
+ normalized after converting them from either utf-8 or iso8859-1
+ encoding.
+ '''
+ fullpath = self.poolfile.fullpath
+ from daklib.contents import UnpackedSource
+ unpacked = UnpackedSource(fullpath)
+ fileset = set()
+ for name in unpacked.get_all_filenames():
+ # enforce proper utf-8 encoding
+ try:
+ name.decode('utf-8')
+ except UnicodeDecodeError:
+ name = name.decode('iso8859-1').encode('utf-8')
+ fileset.add(name)
+ return fileset
+
__all__.append('DBSource')
@session_wrapper
session.add(df)
# Add the src_uploaders to the DB
- uploader_ids = [source.maintainer_id]
+ source.uploaders = [source.maintainer]
if u.pkg.dsc.has_key("uploaders"):
for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
up = up.strip()
- uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
-
- added_ids = {}
- for up_id in uploader_ids:
- if added_ids.has_key(up_id):
- import utils
- utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
- continue
-
- added_ids[up_id]=1
-
- su = SrcUploader()
- su.maintainer_id = up_id
- su.source_id = source.source_id
- session.add(su)
+ source.uploaders.append(get_or_set_maintainer(up, session))
session.flush()
################################################################################
-class SrcUploader(object):
- def __init__(self, *args, **kwargs):
- pass
-
- def __repr__(self):
- return '<SrcUploader %s>' % self.uploader_id
-
-__all__.append('SrcUploader')
-
-################################################################################
-
SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
('SuiteID', 'suite_id'),
('Version', 'version'),
'source_acl',
'source_metadata',
'src_associations',
+ 'src_contents',
'src_format',
'src_uploaders',
'suite',
primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
suites = relation(Suite, secondary=self.tbl_src_associations,
backref=backref('sources', lazy='dynamic')),
- srcuploaders = relation(SrcUploader),
+ uploaders = relation(Maintainer,
+ secondary=self.tbl_src_uploaders),
key = relation(SourceMetadata, cascade='all',
collection_class=attribute_mapped_collection('key'))),
extension = validator)
properties = dict(src_format_id = self.tbl_src_format.c.id,
format_name = self.tbl_src_format.c.format_name))
- mapper(SrcUploader, self.tbl_src_uploaders,
- properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
- source_id = self.tbl_src_uploaders.c.source,
- source = relation(DBSource,
- primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
- maintainer_id = self.tbl_src_uploaders.c.maintainer,
- maintainer = relation(Maintainer,
- primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
-
mapper(Suite, self.tbl_suite,
properties = dict(suite_id = self.tbl_suite.c.id,
policy_queue = relation(PolicyQueue),
backref=backref('contents', lazy='dynamic', cascade='all')),
file = self.tbl_bin_contents.c.file))
+ mapper(SrcContents, self.tbl_src_contents,
+ properties = dict(
+ source = relation(DBSource,
+ backref=backref('contents', lazy='dynamic', cascade='all')),
+ file = self.tbl_src_contents.c.file))
+
mapper(MetadataKey, self.tbl_metadata_keys,
properties = dict(
key_id = self.tbl_metadata_keys.c.key_id,
--- /dev/null
+#!/usr/bin/python
+
+"""
+Helper functions for list generating commands (Packages, Sources).
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2009-2011 Torsten Werner <twerner@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+
+from dbconn import get_architecture
+
+def fetch(query, args, session):
+ for (id, path, filename) in session.execute(query, args).fetchall():
+ yield (id, path + filename)
+
+def getSources(suite, component, session, timestamp = None):
+ '''
+ Calculates the sources in suite and component optionally limited by
+ sources newer than timestamp. Returns a generator that yields a
+ tuple of source id and full pathname to the dsc file. See function
+ writeSourceList() in dak/generate_filelist.py for an example that
+ uses this function.
+ '''
+ extra_cond = ""
+ if timestamp:
+ extra_cond = "AND extract(epoch from sa.created) > %d" % timestamp
+ query = """
+ SELECT s.id, l.path, f.filename
+ FROM source s
+ JOIN src_associations sa
+ ON s.id = sa.source AND sa.suite = :suite %s
+ JOIN files f
+ ON s.file = f.id
+ JOIN location l
+ ON f.location = l.id AND l.component = :component
+ ORDER BY filename
+ """ % extra_cond
+ args = { 'suite': suite.suite_id,
+ 'component': component.component_id }
+ return fetch(query, args, session)
+
+def getBinaries(suite, component, architecture, type, session, timestamp = None):
+ '''
+ Calculates the binaries in suite and component of architecture and
+ type 'deb' or 'udeb' optionally limited to binaries newer than
+ timestamp. Returns a generator that yields a tuple of binary id and
+ full pathname to the u(deb) file. See function writeBinaryList() in
+ dak/generate_filelist.py for an example that uses this function.
+ '''
+ extra_cond = ""
+ if timestamp:
+ extra_cond = "AND extract(epoch from ba.created) > %d" % timestamp
+ query = """
+CREATE TEMP TABLE b_candidates (
+ id integer,
+ source integer,
+ file integer,
+ architecture integer);
+
+INSERT INTO b_candidates (id, source, file, architecture)
+ SELECT b.id, b.source, b.file, b.architecture
+ FROM binaries b
+ JOIN bin_associations ba ON b.id = ba.bin
+ WHERE b.type = :type AND ba.suite = :suite AND
+ b.architecture IN (:arch_all, :architecture) %s;
+
+CREATE TEMP TABLE gf_candidates (
+ id integer,
+ filename text,
+ path text,
+ architecture integer,
+ src integer,
+ source text);
+
+INSERT INTO gf_candidates (id, filename, path, architecture, src, source)
+ SELECT bc.id, f.filename, l.path, bc.architecture, bc.source as src, s.source
+ FROM b_candidates bc
+ JOIN source s ON bc.source = s.id
+ JOIN files f ON bc.file = f.id
+ JOIN location l ON f.location = l.id
+ WHERE l.component = :component;
+
+WITH arch_any AS
+
+ (SELECT id, path, filename FROM gf_candidates
+ WHERE architecture <> :arch_all),
+
+ arch_all_with_any AS
+ (SELECT id, path, filename FROM gf_candidates
+ WHERE architecture = :arch_all AND
+ src IN (SELECT src FROM gf_candidates WHERE architecture <> :arch_all)),
+
+ arch_all_without_any AS
+ (SELECT id, path, filename FROM gf_candidates
+ WHERE architecture = :arch_all AND
+ source NOT IN (SELECT DISTINCT source FROM gf_candidates WHERE architecture <> :arch_all)),
+
+ filelist AS
+ (SELECT * FROM arch_any
+ UNION
+ SELECT * FROM arch_all_with_any
+ UNION
+ SELECT * FROM arch_all_without_any)
+
+ SELECT * FROM filelist ORDER BY filename
+ """ % extra_cond
+ args = { 'suite': suite.suite_id,
+ 'component': component.component_id,
+ 'architecture': architecture.arch_id,
+ 'arch_all': get_architecture('all', session).arch_id,
+ 'type': type }
+ return fetch(query, args, session)
+
from urgencylog import UrgencyLog
from dbconn import *
from summarystats import SummaryStats
-from utils import parse_changes, check_dsc_files
+from utils import parse_changes, check_dsc_files, build_package_set
from textutils import fix_maintainer
from lintian import parse_lintian_output, generate_reject_messages
from contents import UnpackedSource
# Determine what parts in a .changes are NEW
-def determine_new(filename, changes, files, warn=1, session = None):
+def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = {}):
"""
Determine what parts in a C{changes} file are NEW.
@type warn: bool
@param warn: Warn if overrides are added for (old)stable
+ @type dsc: Upload.Pkg.dsc dict
+ @param dsc: (optional); Dsc dictionary
+
+ @type new: dict
+ @param new: new packages as returned by a previous call to this function, but override information may have changed
+
@rtype: dict
@return: dictionary of NEW components.
"""
# TODO: This should all use the database instead of parsing the changes
# file again
- new = {}
byhand = {}
dbchg = get_dbchange(filename, session)
if dbchg is None:
print "Warning: cannot find changes file in database; won't check byhand"
+ # Try to get the Package-Set field from an included .dsc file (if possible).
+ if dsc:
+ for package, entry in build_package_set(dsc, session).items():
+ if not new.has_key(package):
+ new[package] = entry
+
# Build up a list of potentially new things
for name, f in files.items():
# Keep a record of byhand elements
self.rejects.append("source only uploads are not supported.")
###########################################################################
- def check_dsc(self, action=True, session=None):
- """Returns bool indicating whether or not the source changes are valid"""
- # Ensure there is source to check
- if not self.pkg.changes["architecture"].has_key("source"):
- return True
- # Find the .dsc
+ def __dsc_filename(self):
+ """
+ Returns: (Status, Dsc_Filename)
+ where
+ Status: Boolean; True when there was no error, False otherwise
+ Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
+ """
dsc_filename = None
- for f, entry in self.pkg.files.items():
- if entry["type"] == "dsc":
+
+ # find the dsc
+ for name, entry in self.pkg.files.items():
+ if entry.has_key("type") and entry["type"] == "dsc":
if dsc_filename:
- self.rejects.append("can not process a .changes file with multiple .dsc's.")
- return False
+ return False, "cannot process a .changes file with multiple .dsc's."
else:
- dsc_filename = f
+ dsc_filename = name
- # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
if not dsc_filename:
- self.rejects.append("source uploads must contain a dsc file")
- return False
+ return False, "source uploads must contain a dsc file"
+
+ return True, dsc_filename
+
+ def load_dsc(self, action=True, signing_rules=1):
+ """
+ Find and load the dsc from self.pkg.files into self.dsc
+
+ Returns: (Status, Reason)
+ where
+ Status: Boolean; True when there was no error, False otherwise
+ Reason: String; When Status is False this describes the error
+ """
+
+ # find the dsc
+ (status, dsc_filename) = self.__dsc_filename()
+ if not status:
+ # If status is false, dsc_filename has the reason
+ return False, dsc_filename
- # Parse the .dsc file
try:
- self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
+ self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
except CantOpenError:
- # if not -n copy_to_holding() will have done this for us...
if not action:
- self.rejects.append("%s: can't read file." % (dsc_filename))
+ return False, "%s: can't read file." % (dsc_filename)
except ParseChangesError, line:
- self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
+ return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
except InvalidDscError, line:
- self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
+ return False, "%s: syntax error on line %s." % (dsc_filename, line)
except ChangesUnicodeError:
- self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
+ return False, "%s: dsc file not proper utf-8." % (dsc_filename)
+
+ return True, None
+
+ ###########################################################################
+
+ def check_dsc(self, action=True, session=None):
+ """Returns bool indicating whether or not the source changes are valid"""
+ # Ensure there is source to check
+ if not self.pkg.changes["architecture"].has_key("source"):
+ return True
+
+ (status, reason) = self.load_dsc(action=action)
+ if not status:
+ self.rejects.append(reason)
+ return False
+ (status, dsc_filename) = self.__dsc_filename()
+ if not status:
+ # If status is false, dsc_filename has the reason
+ self.rejects.append(dsc_filename)
+ return False
# Build up the file list of files mentioned by the .dsc
try:
# If we do not have a tagfile, don't do anything
tagfile = cnf.get("Dinstall::LintianTags")
- if tagfile is None:
+ if not tagfile:
return
# Parse the yaml file
# Check any one-off upload blocks
self.check_upload_blocks(fpr, session)
- # Start with DM as a special case
+ # If the source_acl is None, source is never allowed
+ if fpr.source_acl is None:
+ if self.pkg.changes["architecture"].has_key("source"):
+ rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
+ rej += '\nPlease contact ftpmaster if you think this is incorrect'
+ self.rejects.append(rej)
+ return
+ # Do DM as a special case
# DM is a special case unfortunately, so we check it first
# (keys with no source access get more access than DMs in one
# way; DMs can only upload for their packages whether source
# or binary, whereas keys with no access might be able to
# upload some binaries)
- if fpr.source_acl.access_level == 'dm':
+ elif fpr.source_acl.access_level == 'dm':
self.check_dm_upload(fpr, session)
else:
- # Check source-based permissions for other types
- if self.pkg.changes["architecture"].has_key("source") and \
- fpr.source_acl.access_level is None:
- rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
- rej += '\nPlease contact ftpmaster if you think this is incorrect'
- self.rejects.append(rej)
- return
# If not a DM, we allow full upload rights
uid_email = "%s@debian.org" % (fpr.uid.uid)
self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
if len(tmparches.keys()) > 0:
if fpr.binary_reject:
- rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
- rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
+ rej = "changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
+ if len(tmparches.keys()) == 1:
+ rej += "\n\narchitecture involved is: %s" % ",".join(tmparches.keys())
+ else:
+ rej += "\n\narchitectures involved are: %s" % ",".join(tmparches.keys())
self.rejects.append(rej)
else:
# TODO: This is where we'll implement reject vs throw away binaries later
## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
## non-developer maintainers cannot NMU or hijack packages)
- # srcuploaders includes the maintainer
+ # uploader includes the maintainer
accept = False
- for sup in r.srcuploaders:
- (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
+ for uploader in r.uploaders:
+ (rfc822, rfc2047, name, email) = uploader.get_split_maintainer()
# Eww - I hope we never have two people with the same name in Debian
if email == fpr.uid.uid or name == fpr.uid.name:
accept = True
# in process-upload
re_match_expired = re.compile(r"^The key used to sign .+ has expired on .+$")
+
+# in generate-releases
+re_gensubrelease = re.compile (r".*/(binary-[0-9a-z-]+|source)$")
+re_includeinrelease = re.compile (r"(Contents-[0-9a-z-]+.gz|Index|Packages(.gz|.bz2)?|Sources(.gz|.bz2)?|MD5SUMS|Release)$")
import email as modemail
import subprocess
-from dbconn import DBConn, get_architecture, get_component, get_suite
+from dbconn import DBConn, get_architecture, get_component, get_suite, get_override_type, Keyring
from dak_exceptions import *
from textutils import fix_maintainer
from regexes import re_html_escaping, html_escaping, re_single_line_field, \
################################################################################
+# see http://bugs.debian.org/619131
+def build_package_set(dsc, session = None):
+ if not dsc.has_key("package-set"):
+ return {}
+
+ packages = {}
+
+ for line in dsc["package-set"].split("\n"):
+ if not line:
+ break
+
+ (name, section, priority) = line.split()
+ (section, component) = extract_component_from_section(section)
+
+ package_type = "deb"
+ if name.find(":") != -1:
+ (package_type, name) = name.split(":", 1)
+ if package_type == "src":
+ package_type = "dsc"
+
+ # Validate type if we have a session
+ if session and get_override_type(package_type, session) is None:
+ # Maybe just warn and ignore? exit(1) might be a bit hard...
+ utils.fubar("invalid type (%s) in Package-Set." % (package_type))
+
+ if section == "":
+ section = "-"
+ if priority == "":
+ priority = "-"
+
+ if package_type == "dsc":
+ priority = "source"
+
+ if not packages.has_key(name) or packages[name]["type"] == "dsc":
+ packages[name] = dict(priority=priority, section=section, type=package_type, component=component, files=[])
+
+ return packages
+
+################################################################################
+
def send_mail (message, filename=""):
"""sendmail wrapper, takes _either_ a message string or a file as arguments"""
return (None, rejects)
if not keyrings:
- keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
+ keyrings = [ x.keyring_name for x in DBConn().session().query(Keyring).filter(Keyring.active == True).all() ]
# Autofetch the signing key if that's enabled
if autofetch == None:
from db_test import DBDakTestCase, fixture
from daklib.dbconn import *
-from daklib.contents import ContentsWriter, ContentsScanner, UnpackedSource
+from daklib.contents import BinaryContentsWriter, BinaryContentsScanner, \
+ UnpackedSource, SourceContentsScanner, SourceContentsWriter
from os.path import normpath
from sqlalchemy.exc import FlushError, IntegrityError
self.assertEqual(self.override['hello_sid_main_udeb'], \
self.otype['udeb'].overrides.one())
- def test_contentswriter(self):
+ def test_binarycontentswriter(self):
'''
- Test the ContentsWriter class.
+ Test the BinaryContentsWriter class.
'''
self.setup_suites()
self.setup_architectures()
self.setup_overrides()
self.binary['hello_2.2-1_i386'].contents.append(BinContents(file = '/usr/bin/hello'))
self.session.commit()
- cw = ContentsWriter(self.suite['squeeze'], self.arch['i386'], self.otype['deb'])
+ cw = BinaryContentsWriter(self.suite['squeeze'], self.arch['i386'], self.otype['deb'])
self.assertEqual(['/usr/bin/hello python/hello\n'], \
cw.get_list())
# test formatline and sort order
# test output_filename
self.assertEqual('tests/fixtures/ftp/dists/squeeze/Contents-i386.gz', \
normpath(cw.output_filename()))
- cw = ContentsWriter(self.suite['squeeze'], self.arch['i386'], \
+ cw = BinaryContentsWriter(self.suite['squeeze'], self.arch['i386'], \
self.otype['udeb'], self.comp['main'])
self.assertEqual('tests/fixtures/ftp/dists/squeeze/main/Contents-i386.gz', \
normpath(cw.output_filename()))
self.session.delete(self.binary['hello_2.2-1_i386'])
self.session.commit()
- def test_scan_contents(self):
+ def test_binary_scan_contents(self):
+ '''
+ Tests the BinaryContentsScanner.
+ '''
self.setup_binaries()
filelist = [f for f in self.binary['hello_2.2-1_i386'].scan_contents()]
self.assertEqual(['usr/bin/hello', 'usr/share/doc/hello/copyright'],
filelist)
self.session.commit()
- ContentsScanner(self.binary['hello_2.2-1_i386'].binary_id).scan()
+ BinaryContentsScanner(self.binary['hello_2.2-1_i386'].binary_id).scan()
bin_contents_list = self.binary['hello_2.2-1_i386'].contents.order_by('file').all()
self.assertEqual(2, len(bin_contents_list))
self.assertEqual('usr/bin/hello', bin_contents_list[0].file)
def test_unpack(self):
'''
- Tests the UnpackedSource class.
+ Tests the UnpackedSource class and the SourceContentsScanner.
'''
- self.setup_poolfiles()
- dscfilename = fixture('ftp/pool/' + self.file['hello_2.2-1.dsc'].filename)
+ self.setup_sources()
+ source = self.source['hello_2.2-1']
+ dscfilename = fixture('ftp/pool/' + source.poolfile.filename)
unpacked = UnpackedSource(dscfilename)
self.assertTrue(len(unpacked.get_root_directory()) > 0)
self.assertEqual('hello (2.2-1) unstable; urgency=low\n',
all_filenames = set(unpacked.get_all_filenames())
self.assertEqual(8, len(all_filenames))
self.assertTrue('debian/rules' in all_filenames)
+ # method scan_contents()
+ self.assertEqual(all_filenames, source.scan_contents())
+ # exception with invalid files
self.assertRaises(CalledProcessError, lambda: UnpackedSource('invalidname'))
+ # SourceContentsScanner
+ self.session.commit()
+ self.assertTrue(source.contents.count() == 0)
+ SourceContentsScanner(source.source_id).scan()
+ self.assertTrue(source.contents.count() > 0)
+
+ def test_sourcecontentswriter(self):
+ '''
+ Test the SourceContentsWriter class.
+ '''
+ self.setup_sources()
+ self.session.flush()
+ # remove newer package from sid because it disturbs the test
+ self.source['hello_2.2-2'].suites = []
+ self.session.commit()
+ source = self.source['hello_2.2-1']
+ SourceContentsScanner(source.source_id).scan()
+ cw = SourceContentsWriter(source.suites[0], source.poolfile.location.component)
+ result = cw.get_list()
+ self.assertEqual(8, len(result))
+ self.assertTrue('debian/changelog\thello\n' in result)
def classes_to_clean(self):
return [Override, Suite, BinContents, DBBinary, DBSource, Architecture, Section, \