--- /dev/null
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+Allow us to mark keyrings as no longer in use
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2011 Mark Hymers <mhy@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+
+import psycopg2
+from daklib.dak_exceptions import DBUpdateError
+from socket import gethostname;
+
+################################################################################
+def do_update(self):
+ """
+ Allow us to mark keyrings as no longer in use
+ """
+ print __doc__
+ try:
+ c = self.db.cursor()
+
+ c.execute("ALTER TABLE keyrings ADD COLUMN active BOOL DEFAULT TRUE")
+ c.execute("UPDATE config SET value = '50' WHERE name = 'db_revision'")
+ self.db.commit()
+
+ except psycopg2.ProgrammingError, msg:
+ self.db.rollback()
+ raise DBUpdateError, 'Unable to apply sick update 50, rollback issued. Error message : %s' % (str(msg))
from daklib import utils
import apt_pkg, os, stat, sys
-def fetch(query, args, session):
- return [path + filename for (path, filename) in \
- session.execute(query, args).fetchall()]
-
-def getSources(suite, component, session, timestamp):
- extra_cond = ""
- if timestamp:
- extra_cond = "AND extract(epoch from sa.created) > %d" % timestamp
- query = """
- SELECT l.path, f.filename
- FROM source s
- JOIN src_associations sa
- ON s.id = sa.source AND sa.suite = :suite %s
- JOIN files f
- ON s.file = f.id
- JOIN location l
- ON f.location = l.id AND l.component = :component
- ORDER BY filename
- """ % extra_cond
- args = { 'suite': suite.suite_id,
- 'component': component.component_id }
- return fetch(query, args, session)
-
-def getBinaries(suite, component, architecture, type, session, timestamp):
- extra_cond = ""
- if timestamp:
- extra_cond = "AND extract(epoch from ba.created) > %d" % timestamp
- query = """
-CREATE TEMP TABLE b_candidates (
- source integer,
- file integer,
- architecture integer);
-
-INSERT INTO b_candidates (source, file, architecture)
- SELECT b.source, b.file, b.architecture
- FROM binaries b
- JOIN bin_associations ba ON b.id = ba.bin
- WHERE b.type = :type AND ba.suite = :suite AND
- b.architecture IN (2, :architecture) %s;
-
-CREATE TEMP TABLE gf_candidates (
- filename text,
- path text,
- architecture integer,
- src integer,
- source text);
-
-INSERT INTO gf_candidates (filename, path, architecture, src, source)
- SELECT f.filename, l.path, bc.architecture, bc.source as src, s.source
- FROM b_candidates bc
- JOIN source s ON bc.source = s.id
- JOIN files f ON bc.file = f.id
- JOIN location l ON f.location = l.id
- WHERE l.component = :component;
-
-WITH arch_any AS
-
- (SELECT path, filename FROM gf_candidates
- WHERE architecture > 2),
-
- arch_all_with_any AS
- (SELECT path, filename FROM gf_candidates
- WHERE architecture = 2 AND
- src IN (SELECT src FROM gf_candidates WHERE architecture > 2)),
-
- arch_all_without_any AS
- (SELECT path, filename FROM gf_candidates
- WHERE architecture = 2 AND
- source NOT IN (SELECT DISTINCT source FROM gf_candidates WHERE architecture > 2)),
-
- filelist AS
- (SELECT * FROM arch_any
- UNION
- SELECT * FROM arch_all_with_any
- UNION
- SELECT * FROM arch_all_without_any)
-
- SELECT * FROM filelist ORDER BY filename
- """ % extra_cond
- args = { 'suite': suite.suite_id,
- 'component': component.component_id,
- 'architecture': architecture.arch_id,
- 'type': type }
- return fetch(query, args, session)
+from daklib.lists import getSources, getBinaries
def listPath(suite, component, architecture = None, type = None,
incremental_mode = False):
(file, timestamp) = listPath(suite, component,
incremental_mode = incremental_mode)
session = DBConn().session()
- for filename in getSources(suite, component, session, timestamp):
+ for _, filename in getSources(suite, component, session, timestamp):
file.write(filename + '\n')
session.close()
file.close()
(file, timestamp) = listPath(suite, component, architecture, type,
incremental_mode)
session = DBConn().session()
- for filename in getBinaries(suite, component, architecture, type,
+ for _, filename in getBinaries(suite, component, architecture, type,
session, timestamp):
file.write(filename + '\n')
session.close()
# Make a copy of distribution we can happily trample on
changes["suite"] = copy.copy(changes["distribution"])
+ # Try to get an included dsc
+ dsc = None
+ (status, _) = upload.load_dsc()
+ if status:
+ dsc = upload.pkg.dsc
+
# The main NEW processing loop
done = 0
+ new = {}
while not done:
# Find out what's new
- new, byhand = determine_new(upload.pkg.changes_file, changes, files, session=session)
+ new, byhand = determine_new(upload.pkg.changes_file, changes, files, dsc=dsc, session=session, new=new)
if not new:
break
u.logger = Logger
origchanges = os.path.abspath(u.pkg.changes_file)
+ # Try to get an included dsc
+ dsc = None
+ (status, _) = u.load_dsc()
+ if status:
+ dsc = u.pkg.dsc
+
cnf = Config()
bcc = "X-DAK: dak process-new"
if cnf.has_key("Dinstall::Bcc"):
if not recheck(u, session):
return
- new, byhand = determine_new(u.pkg.changes_file, u.pkg.changes, files, session=session)
+ new, byhand = determine_new(u.pkg.changes_file, u.pkg.changes, files, dsc=dsc, session=session)
if byhand:
do_byhand(u, session)
elif new:
u.check_source_against_db(deb_filename, session)
u.pkg.changes["suite"] = u.pkg.changes["distribution"]
- new, byhand = determine_new(u.pkg.changes_file, u.pkg.changes, files, 0, session)
+ new, byhand = determine_new(u.pkg.changes_file, u.pkg.changes, files, 0, dsc=u.pkg.dsc, session=session)
outfile = open(os.path.join(cnf["Show-New::HTMLPath"],htmlname),"w")
################################################################################
Cnf = None
-required_database_schema = 49
+required_database_schema = 50
################################################################################
--- /dev/null
+#!/usr/bin/python
+
+"""
+Helper functions for list generating commands (Packages, Sources).
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2009-2011 Torsten Werner <twerner@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+
+def fetch(query, args, session):
+ for (id, path, filename) in session.execute(query, args).fetchall():
+ yield (id, path + filename)
+
+def getSources(suite, component, session, timestamp = None):
+ '''
+ Calculates the sources in suite and component optionally limited by
+ sources newer than timestamp. Returns a generator that yields a
+ tuple of source id and full pathname to the dsc file. See function
+ writeSourceList() in dak/generate_filelist.py for an example that
+ uses this function.
+ '''
+ extra_cond = ""
+ if timestamp:
+ extra_cond = "AND extract(epoch from sa.created) > %d" % timestamp
+ query = """
+ SELECT s.id, l.path, f.filename
+ FROM source s
+ JOIN src_associations sa
+ ON s.id = sa.source AND sa.suite = :suite %s
+ JOIN files f
+ ON s.file = f.id
+ JOIN location l
+ ON f.location = l.id AND l.component = :component
+ ORDER BY filename
+ """ % extra_cond
+ args = { 'suite': suite.suite_id,
+ 'component': component.component_id }
+ return fetch(query, args, session)
+
+def getBinaries(suite, component, architecture, type, session, timestamp = None):
+ '''
+ Calculates the binaries in suite and component of architecture and
+ type 'deb' or 'udeb' optionally limited to binaries newer than
+ timestamp. Returns a generator that yields a tuple of binary id and
+ full pathname to the u(deb) file. See function writeBinaryList() in
+ dak/generate_filelist.py for an example that uses this function.
+ '''
+ extra_cond = ""
+ if timestamp:
+ extra_cond = "AND extract(epoch from ba.created) > %d" % timestamp
+ query = """
+CREATE TEMP TABLE b_candidates (
+ id integer,
+ source integer,
+ file integer,
+ architecture integer);
+
+INSERT INTO b_candidates (id, source, file, architecture)
+ SELECT b.id, b.source, b.file, b.architecture
+ FROM binaries b
+ JOIN bin_associations ba ON b.id = ba.bin
+ WHERE b.type = :type AND ba.suite = :suite AND
+ b.architecture IN (2, :architecture) %s;
+
+CREATE TEMP TABLE gf_candidates (
+ id integer,
+ filename text,
+ path text,
+ architecture integer,
+ src integer,
+ source text);
+
+INSERT INTO gf_candidates (id, filename, path, architecture, src, source)
+ SELECT bc.id, f.filename, l.path, bc.architecture, bc.source as src, s.source
+ FROM b_candidates bc
+ JOIN source s ON bc.source = s.id
+ JOIN files f ON bc.file = f.id
+ JOIN location l ON f.location = l.id
+ WHERE l.component = :component;
+
+WITH arch_any AS
+
+ (SELECT id, path, filename FROM gf_candidates
+ WHERE architecture > 2),
+
+ arch_all_with_any AS
+ (SELECT id, path, filename FROM gf_candidates
+ WHERE architecture = 2 AND
+ src IN (SELECT src FROM gf_candidates WHERE architecture > 2)),
+
+ arch_all_without_any AS
+ (SELECT id, path, filename FROM gf_candidates
+ WHERE architecture = 2 AND
+ source NOT IN (SELECT DISTINCT source FROM gf_candidates WHERE architecture > 2)),
+
+ filelist AS
+ (SELECT * FROM arch_any
+ UNION
+ SELECT * FROM arch_all_with_any
+ UNION
+ SELECT * FROM arch_all_without_any)
+
+ SELECT * FROM filelist ORDER BY filename
+ """ % extra_cond
+ args = { 'suite': suite.suite_id,
+ 'component': component.component_id,
+ 'architecture': architecture.arch_id,
+ 'type': type }
+ return fetch(query, args, session)
+
from urgencylog import UrgencyLog
from dbconn import *
from summarystats import SummaryStats
-from utils import parse_changes, check_dsc_files
+from utils import parse_changes, check_dsc_files, build_package_set
from textutils import fix_maintainer
from lintian import parse_lintian_output, generate_reject_messages
from contents import UnpackedSource
# Determine what parts in a .changes are NEW
-def determine_new(filename, changes, files, warn=1, session = None):
+def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = {}):
"""
Determine what parts in a C{changes} file are NEW.
@type warn: bool
@param warn: Warn if overrides are added for (old)stable
+ @type dsc: Upload.Pkg.dsc dict
+ @param dsc: (optional); Dsc dictionary
+
+ @type new: dict
+ @param new: new packages as returned by a previous call to this function, but override information may have changed
+
@rtype: dict
@return: dictionary of NEW components.
"""
# TODO: This should all use the database instead of parsing the changes
# file again
- new = {}
byhand = {}
dbchg = get_dbchange(filename, session)
if dbchg is None:
print "Warning: cannot find changes file in database; won't check byhand"
+ # Try to get the Package-Set field from an included .dsc file (if possible).
+ if dsc:
+ for package, entry in build_package_set(dsc, session).items():
+ if not new.has_key(package):
+ new[package] = entry
+
# Build up a list of potentially new things
for name, f in files.items():
# Keep a record of byhand elements
self.rejects.append("source only uploads are not supported.")
###########################################################################
- def check_dsc(self, action=True, session=None):
- """Returns bool indicating whether or not the source changes are valid"""
- # Ensure there is source to check
- if not self.pkg.changes["architecture"].has_key("source"):
- return True
- # Find the .dsc
+ def __dsc_filename(self):
+ """
+ Returns: (Status, Dsc_Filename)
+ where
+ Status: Boolean; True when there was no error, False otherwise
+ Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
+ """
dsc_filename = None
- for f, entry in self.pkg.files.items():
- if entry["type"] == "dsc":
+
+ # find the dsc
+ for name, entry in self.pkg.files.items():
+ if entry.has_key("type") and entry["type"] == "dsc":
if dsc_filename:
- self.rejects.append("can not process a .changes file with multiple .dsc's.")
- return False
+ return False, "cannot process a .changes file with multiple .dsc's."
else:
- dsc_filename = f
+ dsc_filename = name
- # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
if not dsc_filename:
- self.rejects.append("source uploads must contain a dsc file")
- return False
+ return False, "source uploads must contain a dsc file"
+
+ return True, dsc_filename
+
+ def load_dsc(self, action=True, signing_rules=1):
+ """
+ Find and load the dsc from self.pkg.files into self.dsc
+
+ Returns: (Status, Reason)
+ where
+ Status: Boolean; True when there was no error, False otherwise
+ Reason: String; When Status is False this describes the error
+ """
+
+ # find the dsc
+ (status, dsc_filename) = self.__dsc_filename()
+ if not status:
+ # If status is false, dsc_filename has the reason
+ return False, dsc_filename
- # Parse the .dsc file
try:
- self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
+ self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
except CantOpenError:
- # if not -n copy_to_holding() will have done this for us...
if not action:
- self.rejects.append("%s: can't read file." % (dsc_filename))
+ return False, "%s: can't read file." % (dsc_filename)
except ParseChangesError, line:
- self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
+ return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
except InvalidDscError, line:
- self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
+ return False, "%s: syntax error on line %s." % (dsc_filename, line)
except ChangesUnicodeError:
- self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
+ return False, "%s: dsc file not proper utf-8." % (dsc_filename)
+
+ return True, None
+
+ ###########################################################################
+
+ def check_dsc(self, action=True, session=None):
+ """Returns bool indicating whether or not the source changes are valid"""
+ # Ensure there is source to check
+ if not self.pkg.changes["architecture"].has_key("source"):
+ return True
+
+ (status, reason) = self.load_dsc(action=action)
+ if not status:
+ self.rejects.append(reason)
+ return False
+ (status, dsc_filename) = self.__dsc_filename()
+ if not status:
+ # If status is false, dsc_filename has the reason
+ self.rejects.append(dsc_filename)
+ return False
# Build up the file list of files mentioned by the .dsc
try:
# If we do not have a tagfile, don't do anything
tagfile = cnf.get("Dinstall::LintianTags")
- if tagfile is None:
+ if not tagfile:
return
# Parse the yaml file
# Check any one-off upload blocks
self.check_upload_blocks(fpr, session)
- # Start with DM as a special case
+ # If the source_acl is None, source is never allowed
+ if fpr.source_acl is None:
+ if self.pkg.changes["architecture"].has_key("source"):
+ rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
+ rej += '\nPlease contact ftpmaster if you think this is incorrect'
+ self.rejects.append(rej)
+ return
+ # Do DM as a special case
# DM is a special case unfortunately, so we check it first
# (keys with no source access get more access than DMs in one
# way; DMs can only upload for their packages whether source
# or binary, whereas keys with no access might be able to
# upload some binaries)
- if fpr.source_acl.access_level == 'dm':
+ elif fpr.source_acl.access_level == 'dm':
self.check_dm_upload(fpr, session)
else:
- # Check source-based permissions for other types
- if self.pkg.changes["architecture"].has_key("source") and \
- fpr.source_acl.access_level is None:
- rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
- rej += '\nPlease contact ftpmaster if you think this is incorrect'
- self.rejects.append(rej)
- return
# If not a DM, we allow full upload rights
uid_email = "%s@debian.org" % (fpr.uid.uid)
self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
if len(tmparches.keys()) > 0:
if fpr.binary_reject:
- rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
- rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
+ rej = "changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
+ if len(tmparches.keys()) == 1:
+ rej += "\n\narchitecture involved is: %s" % ",".join(tmparches.keys())
+ else:
+ rej += "\n\narchitectures involved are: %s" % ",".join(tmparches.keys())
self.rejects.append(rej)
else:
# TODO: This is where we'll implement reject vs throw away binaries later
import email as modemail
import subprocess
-from dbconn import DBConn, get_architecture, get_component, get_suite
+from dbconn import DBConn, get_architecture, get_component, get_suite, get_override_type, Keyring
from dak_exceptions import *
from textutils import fix_maintainer
from regexes import re_html_escaping, html_escaping, re_single_line_field, \
################################################################################
+# see http://bugs.debian.org/619131
+def build_package_set(dsc, session = None):
+ if not dsc.has_key("package-set"):
+ return {}
+
+ packages = {}
+
+ for line in dsc["package-set"].split("\n"):
+ if not line:
+ break
+
+ (name, section, priority) = line.split()
+ (section, component) = extract_component_from_section(section)
+
+ package_type = "deb"
+ if name.find(":") != -1:
+ (package_type, name) = name.split(":", 1)
+ if package_type == "src":
+ package_type = "dsc"
+
+ # Validate type if we have a session
+ if session and get_override_type(package_type, session) is None:
+ # Maybe just warn and ignore? exit(1) might be a bit hard...
+ utils.fubar("invalid type (%s) in Package-Set." % (package_type))
+
+ if section == "":
+ section = "-"
+ if priority == "":
+ priority = "-"
+
+ if package_type == "dsc":
+ priority = "source"
+
+ if not packages.has_key(name) or packages[name]["type"] == "dsc":
+ packages[name] = dict(priority=priority, section=section, type=package_type, component=component, files=[])
+
+ return packages
+
+################################################################################
+
def send_mail (message, filename=""):
"""sendmail wrapper, takes _either_ a message string or a file as arguments"""
return (None, rejects)
if not keyrings:
- keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
+ keyrings = [ x.keyring_name for x in DBConn().session().query(Keyring).filter(Keyring.active == True).all() ]
# Autofetch the signing key if that's enabled
if autofetch == None: