--- /dev/null
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+Adding table to get rid of queue/done checks
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2009 Joerg Jaspert <joerg@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+
+
+################################################################################
+
+import psycopg2
+import time
+import os
+import datetime
+from daklib.dak_exceptions import DBUpdateError, InvalidDscError, ChangesUnicodeError
+from daklib.config import Config
+from daklib.utils import parse_changes, warn, gpgv_get_status_output, process_gpgv_output
+
+################################################################################
+
+def check_signature (sig_filename, data_filename=""):
+ keyrings = [
+ "/home/joerg/keyring/keyrings/debian-keyring.gpg",
+ "/home/joerg/keyring/keyrings/debian-keyring.pgp",
+ "/home/joerg/keyring/keyrings/debian-maintainers.gpg",
+ "/home/joerg/keyring/keyrings/debian-role-keys.gpg",
+ "/home/joerg/keyring/keyrings/emeritus-keyring.pgp",
+ "/home/joerg/keyring/keyrings/emeritus-keyring.gpg",
+ "/home/joerg/keyring/keyrings/removed-keys.gpg",
+ "/home/joerg/keyring/keyrings/removed-keys.pgp"
+ ]
+
+ keyringargs = " ".join(["--keyring %s" % x for x in keyrings ])
+
+ # Build the command line
+ status_read, status_write = os.pipe()
+ cmd = "gpgv --status-fd %s %s %s" % (status_write, keyringargs, sig_filename)
+
+ # Invoke gpgv on the file
+ (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
+
+ # Process the status-fd output
+ (keywords, internal_error) = process_gpgv_output(status)
+
+ # If we failed to parse the status-fd output, let's just whine and bail now
+ if internal_error:
+ warn("Couldn't parse signature")
+ return None
+
+ # usually one would check for bad things here. We, however, do not care.
+
+ # Next check gpgv exited with a zero return code
+ if exit_status:
+ warn("Couldn't parse signature")
+ return None
+
+ # Sanity check the good stuff we expect
+ if not keywords.has_key("VALIDSIG"):
+ warn("Couldn't parse signature")
+ else:
+ args = keywords["VALIDSIG"]
+ if len(args) < 1:
+ warn("Couldn't parse signature")
+ else:
+ fingerprint = args[0]
+
+ return fingerprint
+
+################################################################################
+
+def do_update(self):
+ print "Adding known_changes table"
+
+ try:
+ c = self.db.cursor()
+ c.execute("""
+ CREATE TABLE known_changes (
+ id SERIAL PRIMARY KEY,
+ changesname TEXT NOT NULL,
+ seen TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(),
+ source TEXT NOT NULL,
+ binaries TEXT NOT NULL,
+ architecture TEXT NOT NULL,
+ version TEXT NOT NULL,
+ distribution TEXT NOT NULL,
+ urgency TEXT NOT NULL,
+ maintainer TEXT NOT NULL,
+ fingerprint TEXT NOT NULL,
+ changedby TEXT NOT NULL,
+ date TEXT NOT NULL,
+ UNIQUE (changesname)
+ )
+ """)
+ c.execute("CREATE INDEX changesname_ind ON known_changes(changesname)")
+ c.execute("CREATE INDEX changestimestamp_ind ON known_changes(seen)")
+ c.execute("CREATE INDEX changessource_ind ON known_changes(source)")
+ c.execute("CREATE INDEX changesdistribution_ind ON known_changes(distribution)")
+ c.execute("CREATE INDEX changesurgency_ind ON known_changes(urgency)")
+
+ print "Done. Now looking for old changes files"
+ count = 0
+ failure = 0
+ cnf = Config()
+ for directory in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
+ checkdir = cnf["Dir::Queue::%s" % (directory) ]
+ if os.path.exists(checkdir):
+ print "Looking into %s" % (checkdir)
+ for dirpath, dirnames, filenames in os.walk(checkdir, topdown=False):
+ if not filenames:
+ # Empty directory (or only subdirectories), next
+ continue
+ for changesfile in filenames:
+ if not changesfile.endswith(".changes"):
+ # Only interested in changes files.
+ continue
+ try:
+ count += 1
+ print "Directory %s, file %7d, failures %3d. (%s)" % (dirpath[-10:], count, failure, changesfile)
+ changes = Changes()
+ changes.changes_file = changesfile
+ changesfile = os.path.join(dirpath, changesfile)
+ changes.changes = parse_changes(changesfile, signing_rules=-1)
+ changes.changes["fingerprint"], = check_signature(changesfile)
+ changes.add_known_changes(directory)
+ except InvalidDscError, line:
+ warn("syntax error in .dsc file '%s', line %s." % (f, line))
+ failure += 1
+ except ChangesUnicodeError:
+ warn("found invalid changes file, not properly utf-8 encoded")
+ failure += 1
+
+
+ c.execute("GRANT ALL ON known_changes TO ftpmaster;")
+ c.execute("GRANT SELECT ON known_changes TO public;")
+
+ c.execute("UPDATE config SET value = '20' WHERE name = 'db_revision'")
+ self.db.commit()
+
+ except psycopg2.ProgrammingError, msg:
+ self.db.rollback()
+ raise DBUpdateError, "Unable to apply source format update 15, rollback issued. Error message : %s" % (str(msg))
elif answer == 'E' and not Options["Trainee"]:
new = edit_overrides (new, upload, session)
elif answer == 'M' and not Options["Trainee"]:
+ upload.pkg.remove_known_changes()
aborted = upload.do_reject(manual=1,
reject_message=Options["Manual-Reject"],
note=get_new_comments(changes.get("source", ""), session=session))
os.chdir(u.pkg.directory)
u.do_reject(0, pi)
elif answer == 'A':
+ u.pkg.add_known_changes( "Accepted" )
u.accept(summary, short_summary)
u.check_override()
u.remove()
elif answer == queuekey:
+ u.pkg.add_known_changes( qu )
queue_info[qu]["process"](u, summary, short_summary)
u.remove()
elif answer == 'Q':
################################################################################
Cnf = None
-required_database_schema = 17
+required_database_schema = 18
################################################################################
import os
import stat
+import time
+
+import datetime
from cPickle import Unpickler, Pickler
from errno import EPERM
from apt_pkg import ParseSection
from utils import open_file, fubar, poolify
+from config import *
+from dbconn import *
###############################################################################
return summary
+ def remove_known_changes(self, session=None):
+ if session is None:
+ session = DBConn().session()
+ privatetrans = True
+
+ session.delete(get_knownchange(self.changes_file, session))
+
+ if privatetrans:
+ session.commit()
+ session.close()
+
+ def add_known_changes(self, queue, session=None):
+ cnf = Config()
+
+ if session is None:
+ session = DBConn().session()
+ privatetrans = True
+
+ dirpath = cnf["Dir::Queue::%s" % (queue) ]
+ changesfile = os.path.join(dirpath, self.changes_file)
+ filetime = datetime.datetime.fromtimestamp(os.path.getctime(changesfile))
+
+ session.execute(
+ """INSERT INTO known_changes
+ (changesname, seen, source, binaries, architecture, version,
+ distribution, urgency, maintainer, fingerprint, changedby, date)
+ VALUES (:changesfile,:filetime,:source,:binary, :architecture,
+ :version,:distribution,:urgency,:maintainer,:fingerprint,:changedby,:date)""",
+ { 'changesfile':changesfile,
+ 'filetime':filetime,
+ 'source':self.changes["source"],
+ 'binary':self.changes["binary"],
+ 'architecture':self.changes["architecture"],
+ 'version':self.changes["version"],
+ 'distribution':self.changes["distribution"],
+ 'urgency':self.changes["urgency"],
+ 'maintainer':self.changes["maintainer"],
+ 'fingerprint':self.changes["fingerprint"],
+ 'changedby':self.changes["changed-by"],
+ 'date':self.changes["date"]} )
+
+ if privatetrans:
+ session.commit()
+ session.close()
def load_dot_dak(self, changesfile):
"""
################################################################################
+import os
import apt_pkg
import socket
default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
-def which_conf_file(Cnf):
- res = socket.gethostbyaddr(socket.gethostname())
- if Cnf.get("Config::" + res[0] + "::DakConfig"):
- return Cnf["Config::" + res[0] + "::DakConfig"]
+def which_conf_file():
+ if os.getenv("DAK_CONFIG"):
+ return os.getenv("DAK_CONFIG")
else:
return default_config
self.Cnf = apt_pkg.newConfiguration()
- apt_pkg.ReadConfigFileISC(self.Cnf, default_config)
+ apt_pkg.ReadConfigFileISC(self.Cnf, which_conf_file())
# Check whether our dak.conf was the real one or
# just a pointer to our main one
################################################################################
+class KnownChange(object):
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def __repr__(self):
+ return '<KnownChange %s>' % self.changesname
+
+__all__.append('KnownChange')
+
+@session_wrapper
+def get_knownchange(filename, session=None):
+ """
+ returns knownchange object for given C{filename}.
+
+ @type archive: string
+ @param archive: the name of the arhive
+
+ @type session: Session
+ @param session: Optional SQLA session object (a temporary one will be
+ generated if not supplied)
+
+ @rtype: Archive
+ @return: Archive object for the given name (None if not present)
+
+ """
+ q = session.query(KnownChange).filter_by(changesname=filename)
+
+ try:
+ return q.one()
+ except NoResultFound:
+ return None
+
+__all__.append('get_knownchange')
+
+################################################################################
class Location(object):
def __init__(self, *args, **kwargs):
pass
self.tbl_files = Table('files', self.db_meta, autoload=True)
self.tbl_fingerprint = Table('fingerprint', self.db_meta, autoload=True)
self.tbl_keyrings = Table('keyrings', self.db_meta, autoload=True)
+ self.tbl_known_changes = Table('known_changes', self.db_meta, autoload=True)
self.tbl_keyring_acl_map = Table('keyring_acl_map', self.db_meta, autoload=True)
self.tbl_location = Table('location', self.db_meta, autoload=True)
self.tbl_maintainer = Table('maintainer', self.db_meta, autoload=True)
properties = dict(keyring_name = self.tbl_keyrings.c.name,
keyring_id = self.tbl_keyrings.c.id))
+ mapper(KnownChange, self.tbl_known_changes,
+ properties = dict(known_change_id = self.tbl_known_changes.c.id))
+
mapper(KeyringACLMap, self.tbl_keyring_acl_map,
properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
keyring = relation(Keyring, backref="keyring_acl_map"),
# Check there isn't already a changes file of the same name in one
# of the queue directories.
base_filename = os.path.basename(filename)
- for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
- if os.path.exists(os.path.join(Cnf["Dir::Queue::%s" % (d) ], base_filename)):
- self.rejects.append("%s: a file with this name already exists in the %s directory." % (base_filename, d))
+ if get_knownchange(base_filename):
+ self.rejects.append("%s: a file with this name already exists." % (base_filename))
# Check the .changes is non-empty
if not self.pkg.files:
return res[0]
def which_conf_file ():
- res = socket.gethostbyaddr(socket.gethostname())
- # In case we allow local config files per user, try if one exists
- if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
- homedir = os.getenv("HOME")
- confpath = os.path.join(homedir, "/etc/dak.conf")
- if os.path.exists(confpath):
- apt_pkg.ReadConfigFileISC(Cnf,default_config)
-
- # We are still in here, so there is no local config file or we do
- # not allow local files. Do the normal stuff.
- if Cnf.get("Config::" + res[0] + "::DakConfig"):
- return Cnf["Config::" + res[0] + "::DakConfig"]
+ if os.getenv("DAK_CONFIG"):
+ print(os.getenv("DAK_CONFIG"))
+ return os.getenv("DAK_CONFIG")
else:
- return default_config
+ res = socket.gethostbyaddr(socket.gethostname())
+ # In case we allow local config files per user, try if one exists
+ if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
+ homedir = os.getenv("HOME")
+ confpath = os.path.join(homedir, "/etc/dak.conf")
+ if os.path.exists(confpath):
+ apt_pkg.ReadConfigFileISC(Cnf,default_config)
+
+ # We are still in here, so there is no local config file or we do
+ # not allow local files. Do the normal stuff.
+ if Cnf.get("Config::" + res[0] + "::DakConfig"):
+ return Cnf["Config::" + res[0] + "::DakConfig"]
+ else:
+ return default_config
def which_apt_conf_file ():
res = socket.gethostbyaddr(socket.gethostname())