BinOverride "override.sid.$(SECTION)";
SrcOverride "override.sid.$(SECTION).src";
};
-
-tree "dists/etch-m68k"
-{
- FakeDI "dists/unstable";
- FileList "/srv/ftp.debian.org/database/dists/etch-m68k_$(SECTION)_binary-$(ARCH).list";
- SourceFileList "/srv/ftp.debian.org/database/dists/etch-m68k_$(SECTION)_source.list";
- Sections "main contrib non-free";
- Architectures "m68k source";
- BinOverride "override.etch.$(SECTION)";
- ExtraOverride "override.etch.extra.$(SECTION)";
- SrcOverride "override.etch.$(SECTION).src";
-};
-
-tree "dists/etch-m68k/main"
-{
- FileList "/srv/ftp.debian.org/database/dists/etch-m68k_main_$(SECTION)_binary-$(ARCH).list";
- Sections "debian-installer";
- Architectures "m68k";
- BinOverride "override.etch.main.$(SECTION)";
- SrcOverride "override.etch.main.src";
- BinCacheDB "packages-debian-installer-$(ARCH).db";
- Packages::Extensions ".udeb";
- Contents "$(DIST)/../Contents-udeb";
-};
-
-tree "dists/etch-m68k/non-free"
-{
- FileList "/srv/ftp.debian.org/database/dists/etch-m68k_non-free_$(SECTION)_binary-$(ARCH).list";
- Sections "debian-installer";
- Architectures "m68k";
- BinOverride "override.etch.main.$(SECTION)";
- SrcOverride "override.etch.main.src";
- BinCacheDB "packages-debian-installer-$(ARCH).db";
- Packages::Extensions ".udeb";
- Contents "$(DIST)/../Contents-udeb-nf";
-};
function buildd_dir() {
# Rebuilt the buildd dir to avoid long times of 403
log "Regenerating the buildd incoming dir"
+ STAMP=$(date "+%Y%m%d%H%M")
make_buildd_dir
}
$scriptsdir/expire_dumps -d . -p -f "dump_*"
}
+function transitionsclean() {
+ log "Removing out of date transitions..."
+ cd $base
+ dak transitions -c -a
+}
+
function reports() {
# Send a report on NEW/BYHAND packages
log "Nagging ftpteam about NEW/BYHAND packages"
function merkel2() {
# Push dak@merkel so it syncs the projectb there. Returns immediately, the sync runs detached
- log "Trigger merkels projectb sync"
+ log "Trigger merkel/flotows projectb sync"
ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb dak@merkel.debian.org sleep 1
+ # Also trigger flotow, the ftpmaster test box
+ ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_flotow_projectb dak@flotow.debconf.org sleep 1
}
function merkel3() {
cd $configdir
$scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
R --slave --vanilla < $base/misc/ftpstats.R
+ dak stats arch-space > $webdir/arch-space
+ dak stats pkg-nums > $webdir/pkg-nums
}
function aptftpcleanup() {
)
stage $GO
+GO=(
+ FUNC="transitionsclean"
+ TIME="transitionsclean"
+ ARGS=""
+ ERR=""
+)
+stage $GO
+
GO=(
FUNC="reports"
TIME="reports"
FUNC="aptftpcleanup"
TIME="apt-ftparchive cleanup"
ARGS=""
- ERR=""
+ ERR="false"
)
stage $GO
Transitions
{
+ Notifications "team@release.debian.org";
TempPath "/srv/ftp.debian.org/tmp/";
};
Process-New
{
AcceptedLockFile "/srv/ftp.debian.org/lock/unchecked.lock";
+ LockDir "/srv/ftp.debian.org/lock/new/";
};
Check-Overrides
"map testing-security testing-proposed-updates";
"map-unreleased testing unstable";
"map-unreleased testing-proposed-updates unstable";
+ "reject etch-m68k";
};
AutomaticByHandPackages {
--- /dev/null
+Config
+{
+ ries.debian.org
+ {
+ AllowLocalConfig "false";
+ DatabaseHostname "ftp-master";
+ DakConfig "/srv/ftp.debian.org/dak/config/debian/dak.conf";
+ AptConfig "/srv/ftp.debian.org/dak/config/debian/apt.conf";
+ }
+}
+
before = time.time()
sys.stdout.write("[Deleting from source table... ")
projectB.query("DELETE FROM dsc_files WHERE EXISTS (SELECT 1 FROM source s, files f, dsc_files df WHERE f.last_used <= '%s' AND s.file = f.id AND s.id = df.source AND df.id = dsc_files.id)" % (delete_date))
- projectB.query("DELETE FROM src_uploaders WHERE EXISTS (SELECT 1 FROM source s, files f WHERE f.last_used <= '%s' AND s.file = f.id AND s.id = src_uploaders.source)" % (delete_date))
projectB.query("DELETE FROM source WHERE EXISTS (SELECT 1 FROM files WHERE source.file = files.id AND files.last_used <= '%s')" % (delete_date))
sys.stdout.write("done. (%d seconds)]\n" % (int(time.time()-before)))
import logging
import math
import gzip
+import threading
+import Queue
import apt_pkg
from daklib import utils
from daklib.binary import Binary
-s, --suite={stable,testing,unstable,...}
only operate on a single suite
-
- -a, --arch={i386,amd64}
- only operate on a single architecture
"""
sys.exit(exit_code)
LIMIT 1"""
# find me all of the contents for a given .deb
-contents_q = """PREPARE contents_q(int,int,int,int) as
+contents_q = """PREPARE contents_q(int,int) as
+ SELECT (p.path||'/'||n.file) AS fn,
+ s.section,
+ b.package,
+ b.architecture
+ FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
+ JOIN content_file_names n ON (c.filename=n.id)
+ JOIN binaries b ON (b.id=c.binary_pkg)
+ JOIN override o ON (o.package=b.package)
+ JOIN section s ON (s.id=o.section)
+ WHERE o.suite = $1 AND o.type = $2
+ AND b.type='deb'
+ ORDER BY fn"""
+
+# find me all of the contents for a given .udeb
+udeb_contents_q = """PREPARE udeb_contents_q(int,int,int) as
SELECT (p.path||'/'||n.file) AS fn,
- comma_separated_list(s.section||'/'||b.package)
- FROM content_associations c
- JOIN content_file_paths p ON (c.filepath=p.id)
- JOIN content_file_names n ON (c.filename=n.id)
- JOIN binaries b ON (b.id=c.binary_pkg)
- JOIN bin_associations ba ON (b.id=ba.bin)
- JOIN override o ON (o.package=b.package)
- JOIN section s ON (s.id=o.section)
- WHERE (b.architecture = $1 OR b.architecture = $2)
- AND ba.suite = $3
- AND o.suite = $3
- AND b.type = 'deb'
- AND o.type = $4
- GROUP BY fn
- ORDER BY fn"""
-
-udeb_contents_q = """PREPARE udeb_contents_q(int,int,int,int,int) as
- SELECT (p.path||'/'||n.file) as fn,
- comma_separated_list(s.section||'/'||b.package)
- FROM content_associations c
- JOIN content_file_paths p ON (c.filepath=p.id)
- JOIN content_file_names n ON (c.filename=n.id)
- JOIN binaries b ON (b.id=c.binary_pkg)
- JOIN bin_associations ba ON (b.id=ba.bin)
- JOIN override o ON (o.package=b.package)
- JOIN section s ON (s.id=o.section)
- WHERE (b.architecture = $1 OR b.architecture = $2)
- AND s.id = $3
- AND ba.suite = $4
- AND o.suite = $4
- AND b.type = 'udeb'
- AND o.type = $5
- GROUP BY fn
- ORDER BY fn"""
+ s.section,
+ b.package,
+ b.architecture
+ FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
+ JOIN content_file_names n ON (c.filename=n.id)
+ JOIN binaries b ON (b.id=c.binary_pkg)
+ JOIN override o ON (o.package=b.package)
+ JOIN section s ON (s.id=o.section)
+ WHERE o.suite = $1 AND o.type = $2
+ AND s.id = $3
+ AND b.type='udeb'
+ ORDER BY fn"""
+
+# FROM content_file_paths p join content_associations c ON (c.filepath=p.id)
+# JOIN content_file_names n ON (c.filename=n.id)
+# JOIN binaries b ON (b.id=c.binary_pkg)
+# JOIN override o ON (o.package=b.package)
+# JOIN section s ON (s.id=o.section)
+# WHERE o.suite = $1 AND o.type = $2
+# AND s.id = $3
+# AND b.id in (SELECT ba.bin from bin_associations ba join binaries b on b.id=ba.bin where (b.architecture=$3 or b.architecture=$4)and ba.suite=$1 and b.type='udeb')
+# GROUP BY fn
+# ORDER BY fn;"""
+
# clear out all of the temporarily stored content associations
LEFT JOIN content_associations ca
ON ca.filepath=cfn.id
WHERE ca.id IS NULL)"""
-class Contents(object):
+
+class EndOfContents(object):
"""
- Class capable of generating Contents-$arch.gz files
+ A sentry object for the end of the filename stream
+ """
+ pass
- Usage GenerateContents().generateContents( ["main","contrib","non-free"] )
+class GzippedContentWriter(object):
+ """
+ An object which will write contents out to a Contents-$arch.gz
+ file on a separate thread
"""
- def __init__(self):
- self.header = None
+ header = None # a class object holding the header section of contents file
- def reject(self, message):
- log.error("E: %s" % message)
+ def __init__(self, filename):
+ """
+ @ptype filename: string
+ @param filename: the name of the file to write to
+ """
+ self.queue = Queue.Queue()
+ self.current_file = None
+ self.first_package = True
+ self.output = self.open_file(filename)
+ self.thread = threading.Thread(target=self.write_thread,
+ name='Contents writer')
+ self.thread.start()
+
+ def open_file(self, filename):
+ """
+ opens a gzip stream to the contents file
+ """
+ filepath = Config()["Contents::Root"] + filename
+ filedir = os.path.dirname(filepath)
+ if not os.path.isdir(filedir):
+ os.makedirs(filedir)
+ return gzip.open(filepath, "w")
+ def write(self, filename, section, package):
+ """
+ enqueue content to be written to the file on a separate thread
+ """
+ self.queue.put((filename,section,package))
+
+ def write_thread(self):
+ """
+ the target of a Thread which will do the actual writing
+ """
+ while True:
+ next = self.queue.get()
+ if isinstance(next, EndOfContents):
+ self.output.write('\n')
+ self.output.close()
+ break
+
+ (filename,section,package)=next
+ if next != self.current_file:
+ # this is the first file, so write the header first
+ if not self.current_file:
+ self.output.write(self._getHeader())
+
+ self.output.write('\n%s\t' % filename)
+ self.first_package = True
+
+ self.current_file=filename
+
+ if not self.first_package:
+ self.output.write(',')
+ else:
+ self.first_package=False
+ self.output.write('%s/%s' % (section,package))
+
+ def finish(self):
+ """
+ enqueue the sentry object so that writers will know to terminate
+ """
+ self.queue.put(EndOfContents())
+
+ @classmethod
def _getHeader(self):
"""
Internal method to return the header for Contents.gz files
This is boilerplate which explains the contents of the file and how
it can be used.
"""
- if self.header == None:
+ if not GzippedContentWriter.header:
if Config().has_key("Contents::Header"):
try:
h = open(os.path.join( Config()["Dir::Templates"],
Config()["Contents::Header"] ), "r")
- self.header = h.read()
+ GzippedContentWriter.header = h.read()
h.close()
except:
log.error( "error opening header file: %d\n%s" % (Config()["Contents::Header"],
traceback.format_exc() ))
- self.header = False
+ GzippedContentWriter.header = None
else:
- self.header = False
+ GzippedContentWriter.header = None
- return self.header
+ return GzippedContentWriter.header
- # goal column for section column
- _goal_column = 54
- def _write_content_file(self, cursor, filename):
- """
- Internal method for writing all the results to a given file.
- The cursor should have a result set generated from a query already.
- """
- filepath = Config()["Contents::Root"] + filename
- filedir = os.path.dirname(filepath)
- if not os.path.isdir(filedir):
- os.makedirs(filedir)
- f = gzip.open(filepath, "w")
- try:
- header = self._getHeader()
+class Contents(object):
+ """
+ Class capable of generating Contents-$arch.gz files
- if header:
- f.write(header)
+ Usage GenerateContents().generateContents( ["main","contrib","non-free"] )
+ """
- while True:
- contents = cursor.fetchone()
- if not contents:
- return
+ def __init__(self):
+ self.header = None
- num_tabs = max(1,
- int(math.ceil((self._goal_column - len(contents[0])-1) / 8)))
- f.write(contents[0] + ( '\t' * num_tabs ) + contents[-1] + "\n")
+ def reject(self, message):
+ log.error("E: %s" % message)
- finally:
- f.close()
+ # goal column for section column
+ _goal_column = 54
def cruft(self):
"""
"""
Generate Contents-$arch.gz files for every available arch in each given suite.
"""
- cursor = DBConn().cursor();
+ cursor = DBConn().cursor()
- DBConn().prepare( "arches_q", arches_q )
- DBConn().prepare( "contents_q", contents_q )
- DBConn().prepare( "udeb_contents_q", udeb_contents_q )
+ DBConn().prepare("arches_q", arches_q)
+ DBConn().prepare("contents_q", contents_q)
+ DBConn().prepare("udeb_contents_q", udeb_contents_q)
debtype_id=DBConn().get_override_type_id("deb")
udebtype_id=DBConn().get_override_type_id("udeb")
+ arch_all_id = DBConn().get_architecture_id("all")
suites = self._suites()
+
# Get our suites, and the architectures
for suite in [i.lower() for i in suites]:
suite_id = DBConn().get_suite_id(suite)
arch_list = self._arches(cursor, suite_id)
- arch_all_id = DBConn().get_architecture_id("all")
+ file_writers = {}
+
+ try:
+ for arch_id in arch_list:
+ file_writers[arch_id[0]] = GzippedContentWriter("dists/%s/Contents-%s.gz" % (suite, arch_id[1]))
+
+ cursor.execute("EXECUTE contents_q(%d,%d);" % (suite_id, debtype_id))
+
+ while True:
+ r = cursor.fetchone()
+ if not r:
+ break
+
+ filename, section, package, arch = r
+
+ if not file_writers.has_key( arch ):
+ continue
+
+ if arch == arch_all_id:
+ ## its arch all, so all contents files get it
+ for writer in file_writers.values():
+ writer.write(filename, section, package)
+
+ else:
+ file_writers[arch].write(filename, section, package)
+
+ finally:
+ # close all the files
+ for writer in file_writers.values():
+ writer.finish()
- for arch_id in arch_list:
- cursor.execute("EXECUTE contents_q(%d,%d,%d,%d)" % (arch_id[0], arch_all_id, suite_id, debtype_id))
- self._write_content_file(cursor, "dists/%s/Contents-%s.gz" % (suite, arch_id[1]))
# The MORE fun part. Ok, udebs need their own contents files, udeb, and udeb-nf (not-free)
# This is HORRIBLY debian specific :-/
- for section, fn_pattern in [("debian-installer","dists/%s/Contents-udeb-%s.gz"),
- ("non-free/debian-installer", "dists/%s/Contents-udeb-nf-%s.gz")]:
+ for section, fn_pattern in [("debian-installer","dists/%s/Contents-udeb-%s.gz"),
+ ("non-free/debian-installer", "dists/%s/Contents-udeb-nf-%s.gz")]:
- for arch_id in arch_list:
- section_id = DBConn().get_section_id(section) # all udebs should be here)
- if section_id != -1:
- cursor.execute("EXECUTE udeb_contents_q(%d,%d,%d,%d,%d)" % (arch_id[0], arch_all_id, section_id, suite_id, udebtype_id))
+ section_id = DBConn().get_section_id(section) # all udebs should be here)
+ if section_id != -1:
+
+ # Get our suites, and the architectures
+ for suite in [i.lower() for i in suites]:
+ suite_id = DBConn().get_suite_id(suite)
+ arch_list = self._arches(cursor, suite_id)
+
+ file_writers = {}
+
+ try:
+ for arch_id in arch_list:
+ file_writers[arch_id[0]] = GzippedContentWriter(fn_pattern % (suite, arch_id[1]))
+
+ cursor.execute("EXECUTE udeb_contents_q(%d,%d,%d)" % (suite_id, udebtype_id, section_id))
+
+ while True:
+ r = cursor.fetchone()
+ if not r:
+ break
+
+ filename, section, package, arch = r
+
+ if not file_writers.has_key( arch ):
+ continue
+
+ if arch == arch_all_id:
+ ## its arch all, so all contents files get it
+ for writer in file_writers.values():
+ writer.write(filename, section, package)
+
+ else:
+ file_writers[arch].write(filename, section, package)
+ finally:
+ # close all the files
+ for writer in file_writers.values():
+ writer.finish()
- self._write_content_file(cursor, fn_pattern % (suite, arch_id[1]))
################################################################################
"""
return a list of archs to operate on
"""
- arch_list = [ ]
- if Config().has_key( "%s::%s" %(options_prefix,"Arch")):
- archs = utils.split_args(Config()[ "%s::%s" %(options_prefix,"Arch")])
- for arch_name in archs:
- arch_list.append((DBConn().get_architecture_id(arch_name), arch_name))
- else:
- cursor.execute("EXECUTE arches_q(%d)" % (suite))
- while True:
- r = cursor.fetchone()
- if not r:
- break
+ arch_list = []
+ cursor.execute("EXECUTE arches_q(%d)" % (suite))
+ while True:
+ r = cursor.fetchone()
+ if not r:
+ break
- if r[1] != "source" and r[1] != "all":
- arch_list.append((r[0], r[1]))
+ if r[1] != "source" and r[1] != "all":
+ arch_list.append((r[0], r[1]))
return arch_list
################################################################################
+
def main():
cnf = Config()
('s',"suite", "%s::%s" % (options_prefix,"Suite"),"HasArg"),
('q',"quiet", "%s::%s" % (options_prefix,"Quiet")),
('v',"verbose", "%s::%s" % (options_prefix,"Verbose")),
- ('a',"arch", "%s::%s" % (options_prefix,"Arch"),"HasArg"),
]
commands = {'generate' : Contents.generate,
f.close()
else:
- utils.warn("No wanna-build dump file for architecture %s", architecture)
+ utils.warn("No wanna-build dump file for architecture %s" % architecture)
return ret
################################################################################
output = "Not Built from Source\n"
output += "---------------------\n\n"
- nbs_to_remove = []
+ cmd_output = ""
nbs_keys = real_nbs.keys()
nbs_keys.sort()
for source in nbs_keys:
output += " but no longer builds:\n"
versions = real_nbs[source].keys()
versions.sort(apt_pkg.VersionCompare)
+ all_packages = []
for version in versions:
packages = real_nbs[source][version].keys()
packages.sort()
- for pkg in packages:
- nbs_to_remove.append(pkg)
+ all_packages.extend(packages)
output += " o %s: %s\n" % (version, ", ".join(packages))
+ if all_packages:
+ all_packages.sort()
+ cmd_output += " dak rm -m \"[auto-cruft] NBS (was built by %s)\" -s %s -b %s\n\n" % (source, suite, " ".join(all_packages))
output += "\n"
- if nbs_to_remove:
+ if len(cmd_output):
print output
-
- print "Suggested command:"
- print " dak rm -m \"[auto-cruft] NBS\" -s %s -b %s" % (suite, " ".join(nbs_to_remove))
- print
+ print "Suggested commands:\n"
+ print cmd_output
################################################################################
for component in check_components:
architectures = filter(utils.real_arch, database.get_suite_architectures(suite))
for architecture in architectures:
+ if component == 'main/debian-installer' and re.match("kfreebsd", architecture):
+ continue
filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (Cnf["Dir::Root"], suite, component, architecture)
# apt_pkg.ParseTagFile needs a real file handle
(fd, temp_filename) = utils.temp_filename()
+"""
+Database update scripts for usage with B{dak update-db}
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2008 Michael Casadevall <mcasadevall@debian.org>
+@license: GNU General Public License version 2 or later
+
+Update scripts have to C{import psycopg2} and
+C{from daklib.dak_exceptions import DBUpdateError}.
+
+There has to be B{at least} the function C{do_update(self)} to be
+defined. It should take all neccessary steps to update the
+database. If the update fails the changes have to be rolled back and the
+C{DBUpdateError} exception raised to properly halt the execution of any
+other update.
+
+Example::
+ def do_update(self):
+ print "Doing something"
+
+ try:
+ c = self.db.cursor()
+ c.execute("SOME SQL STATEMENT")
+ self.db.commit()
+
+ except psycopg2.ProgrammingError, msg:
+ self.db.rollback()
+ raise DBUpdateError, "Unable to do whatever, rollback issued. Error message : %s" % (str(msg))
+
+This function can do whatever it wants and use everything from dak and
+daklib.
+
+"""
#!/usr/bin/env python
-""" Database Update Script - Saner DM db schema """
-# Copyright (C) 2008 Michael Casadevall <mcasadevall@debian.org>
+"""
+Saner DM db schema
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2008 Michael Casadevall <mcasadevall@debian.org>
+@license: GNU General Public License version 2 or later
+"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
--- /dev/null
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+Add constraints to src_uploaders
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2009 Mark Hymers <mhy@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+
+# <mhy> oh no, Ganneff has just corrected my english
+
+################################################################################
+
+import psycopg2
+import time
+from daklib.dak_exceptions import DBUpdateError
+from daklib.utils import get_conf
+
+################################################################################
+
+def do_update(self):
+ print "Add constraints to src_uploaders"
+ Cnf = get_conf()
+
+ try:
+ c = self.db.cursor()
+ # Deal with out-of-date src_uploaders entries
+ c.execute("DELETE FROM src_uploaders WHERE source NOT IN (SELECT id FROM source)")
+ c.execute("DELETE FROM src_uploaders WHERE maintainer NOT IN (SELECT id FROM maintainer)")
+ # Add constraints
+ c.execute("ALTER TABLE src_uploaders ADD CONSTRAINT src_uploaders_maintainer FOREIGN KEY (maintainer) REFERENCES maintainer(id) ON DELETE CASCADE")
+ c.execute("ALTER TABLE src_uploaders ADD CONSTRAINT src_uploaders_source FOREIGN KEY (source) REFERENCES source(id) ON DELETE CASCADE")
+ c.execute("UPDATE config SET value = '10' WHERE name = 'db_revision'")
+ self.db.commit()
+
+ except psycopg2.ProgrammingError, msg:
+ self.db.rollback()
+ raise DBUpdateError, "Unable to apply suite config updates, rollback issued. Error message : %s" % (str(msg))
--- /dev/null
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+Adding process-new comments to the DB
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2009 Joerg Jaspert <joerg@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+
+
+################################################################################
+
+import psycopg2
+import time
+
+################################################################################
+
+def do_update(self):
+ print "Adding process-new comments to the DB"
+
+ try:
+ c = self.db.cursor()
+ c.execute("""CREATE TABLE new_comments (
+ id SERIAL PRIMARY KEY NOT NULL,
+ package TEXT NOT NULL,
+ version TEXT NOT NULL,
+ comment TEXT NOT NULL,
+ author TEXT NOT NULL
+ )""")
+
+ c.execute("GRANT SELECT ON new_comments TO ftptrainee;")
+ c.execute("GRANT INSERT ON new_comments TO ftptrainee;")
+ c.execute("GRANT UPDATE ON new_comments TO ftptrainee;")
+ c.execute("GRANT SELECT ON new_comments TO ftpteam;")
+ c.execute("GRANT INSERT ON new_comments TO ftpteam;")
+ c.execute("GRANT UPDATE ON new_comments TO ftpteam;")
+ c.execute("GRANT ALL ON new_comments TO ftpmaster;")
+
+ c.execute("UPDATE config SET value = '11' WHERE name = 'db_revision'")
+ self.db.commit()
+
+ except psycopg2.ProgrammingError, msg:
+ self.db.rollback()
+ raise DBUpdateError, "Unable to apply process-new comments update, rollback issued. Error message : %s" % (str(msg))
--- /dev/null
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+Adding a date field to the process-new notes
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2009 Joerg Jaspert <joerg@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+
+
+################################################################################
+
+import psycopg2
+import time
+
+################################################################################
+
+def do_update(self):
+ print "Adding a date field to the process-new notes"
+
+ try:
+ c = self.db.cursor()
+ c.execute("ALTER TABLE new_comments ADD COLUMN notedate TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now()")
+
+ c.execute("UPDATE config SET value = '12' WHERE name = 'db_revision'")
+ self.db.commit()
+
+ except psycopg2.ProgrammingError, msg:
+ self.db.rollback()
+ raise DBUpdateError, "Unable to apply process-new update 12, rollback issued. Error message : %s" % (str(msg))
#!/usr/bin/env python
# coding=utf8
-""" Database Update Script - debversion """
-# Copyright © 2008 Michael Casadevall <mcasadevall@debian.org>
-# Copyright © 2008 Roger Leigh <rleigh@debian.org>
+"""
+debversion
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2008 Michael Casadevall <mcasadevall@debian.org>
+@copyright: 2008 Roger Leigh <rleigh@debian.org>
+@license: GNU General Public License version 2 or later
+"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
#!/usr/bin/env python
-""" Database Update Script - Remove unused versioncmp """
-# Copyright (C) 2008 Michael Casadevall <mcasadevall@debian.org>
-# Copyright (C) 2009 Joerg Jaspert <joerg@debian.org>
+"""
+Remove unused versioncmp
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2008 Michael Casadevall <mcasadevall@debian.org>
+@copyright: 2009 Joerg Jaspert <joerg@debian.org>
+@license: GNU General Public License version 2 or later
+"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
#!/usr/bin/env python
"""
-Database Update Script - Get suite_architectures table use sane values
+Get suite_architectures table use sane values
@contact: Debian FTP Master <ftpmaster@debian.org>
@copyright: 2009 Joerg Jaspert <joerg@debian.org>
#!/usr/bin/env python
"""
-Database Update Script - Fix bin_assoc_by_arch view
+Fix bin_assoc_by_arch view
@contact: Debian FTP Master <ftpmaster@debian.org>
@copyright: 2009 Joerg Jaspert <joerg@debian.org>
# coding=utf8
"""
-Debian Archive Kit Database Update Script
-Copyright © 2008 Michael Casadevall <mcasadevall@debian.org>
-Copyright © 2008 Roger Leigh <rleigh@debian.org>
+Adding content fields
-Debian Archive Kit Database Update Script 2
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2008 Michael Casadevall <mcasadevall@debian.org>
+@copyright: 2008 Roger Leigh <rleigh@debian.org>
+@license: GNU General Public License version 2 or later
"""
# This program is free software; you can redistribute it and/or modify
# coding=utf8
"""
-Debian Archive Kit Database Update Script
-Copyright © 2008 Michael Casadevall <mcasadevall@debian.org>
-Copyright © 2009 Joerg Jaspert <joerg@debian.org>
+Moving suite config into DB
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2008 Michael Casadevall <mcasadevall@debian.org>
+@copyright: 2009 Joerg Jaspert <joerg@debian.org>
+@license: GNU General Public License version 2 or later
-Debian Archive Kit Database Update Script 7
"""
# This program is free software; you can redistribute it and/or modify
# coding=utf8
"""
-Debian Archive Kit Database Update Script
-Copyright © 2008 Michael Casadevall <mcasadevall@debian.org>
-Copyright © 2009 Joerg Jaspert <joerg@debian.org>
+More suite config into the DB
-Debian Archive Kit Database Update Script 8
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2008 Michael Casadevall <mcasadevall@debian.org>
+@copyright: 2009 Joerg Jaspert <joerg@debian.org>
+@license: GNU General Public License version 2 or later
"""
# This program is free software; you can redistribute it and/or modify
# coding=utf8
"""
-Debian Archive Kit Database Update Script
-Copyright © 2008 Michael Casadevall <mcasadevall@debian.org>
-Copyright © 2009 Mike O'Connor <stew@debian.org>
+Pending contents disinguished by arch
-Debian Archive Kit Database Update Script 8
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2008 Michael Casadevall <mcasadevall@debian.org>
+@copyright: 2009 Mike O'Connor <stew@debian.org>
+@license: GNU General Public License version 2 or later
"""
# This program is free software; you can redistribute it and/or modify
#!/usr/bin/env python
-""" Installs Debian packages from queue/accepted into the pool """
-# Copyright (C) 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
+"""
+Installs Debian packages from queue/accepted into the pool
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
+@copyright: 2009 Joerg Jaspert <joerg@debian.org>
+@license: GNU General Public License version 2 or later
+
+"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
###############################################################################
-def action ():
+def action (queue=""):
(summary, short_summary) = Upload.build_summaries()
(prompt, answer) = ("", "XXX")
if not installing_to_stable:
install()
else:
- stable_install(summary, short_summary)
+ stable_install(summary, short_summary, queue)
elif answer == 'Q':
sys.exit(0)
################################################################################
-def stable_install (summary, short_summary):
+def stable_install (summary, short_summary, fromsuite="proposed-updates"):
global install_count
- print "Installing to stable."
+ fromsuite = fromsuite.lower()
+ tosuite = "Stable"
+ if fromsuite == "oldstable-proposed-updates":
+ tosuite = "OldStable"
+
+ print "Installing from %s to %s." % (fromsuite, tosuite)
# Begin a transaction; if we bomb out anywhere between here and
# the COMMIT WORK below, the DB won't be changed.
if not ql:
utils.fubar("[INTERNAL ERROR] couldn't find '%s' (%s) in source table." % (package, version))
source_id = ql[0][0]
- suite_id = database.get_suite_id('proposed-updates')
+ suite_id = database.get_suite_id(fromsuite)
projectB.query("DELETE FROM src_associations WHERE suite = '%s' AND source = '%s'" % (suite_id, source_id))
- suite_id = database.get_suite_id('stable')
+ suite_id = database.get_suite_id(tosuite.lower())
projectB.query("INSERT INTO src_associations (suite, source) VALUES ('%s', '%s')" % (suite_id, source_id))
# Add the binaries to stable (and remove it/them from proposed-updates)
utils.fubar("[INTERNAL ERROR] couldn't find '%s' (%s for %s architecture) in binaries table." % (package, version, architecture))
binary_id = ql[0][0]
- suite_id = database.get_suite_id('proposed-updates')
+ suite_id = database.get_suite_id(fromsuite)
projectB.query("DELETE FROM bin_associations WHERE suite = '%s' AND bin = '%s'" % (suite_id, binary_id))
- suite_id = database.get_suite_id('stable')
+ suite_id = database.get_suite_id(tosuite.lower())
projectB.query("INSERT INTO bin_associations (suite, bin) VALUES ('%s', '%s')" % (suite_id, binary_id))
projectB.query("COMMIT WORK")
utils.move (pkg.changes_file, Cnf["Dir::Morgue"] + '/process-accepted/' + os.path.basename(pkg.changes_file))
## Update the Stable ChangeLog file
- new_changelog_filename = Cnf["Dir::Root"] + Cnf["Suite::Stable::ChangeLogBase"] + ".ChangeLog"
- changelog_filename = Cnf["Dir::Root"] + Cnf["Suite::Stable::ChangeLogBase"] + "ChangeLog"
+ new_changelog_filename = Cnf["Dir::Root"] + Cnf["Suite::%s::ChangeLogBase" % (tosuite)] + ".ChangeLog"
+ changelog_filename = Cnf["Dir::Root"] + Cnf["Suite::%s::ChangeLogBase" % (tosuite)] + "ChangeLog"
if os.path.exists(new_changelog_filename):
os.unlink (new_changelog_filename)
new_changelog = utils.open_file(new_changelog_filename, 'w')
for newfile in files.keys():
if files[newfile]["type"] == "deb":
- new_changelog.write("stable/%s/binary-%s/%s\n" % (files[newfile]["component"], files[newfile]["architecture"], newfile))
+ new_changelog.write("%s/%s/binary-%s/%s\n" % (tosuite.lower(), files[newfile]["component"], files[newfile]["architecture"], newfile))
elif re_issource.match(newfile):
- new_changelog.write("stable/%s/source/%s\n" % (files[newfile]["component"], newfile))
+ new_changelog.write("%s/%s/source/%s\n" % (tosuite.lower(), files[newfile]["component"], newfile))
else:
new_changelog.write("%s\n" % (newfile))
chop_changes = re_fdnic.sub("\n", changes["changes"])
install_count += 1
if not Options["No-Mail"] and changes["architecture"].has_key("source"):
- Subst["__SUITE__"] = " into stable"
+ Subst["__SUITE__"] = " into %s" % (tosuite)
Subst["__SUMMARY__"] = summary
mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-accepted.install")
utils.send_mail(mail_message)
Upload.announce(short_summary, 1)
# Finally remove the .dak file
- dot_dak_file = os.path.join(Cnf["Suite::Proposed-Updates::CopyDotDak"], os.path.basename(Upload.pkg.changes_file[:-8]+".dak"))
+ dot_dak_file = os.path.join(Cnf["Suite::%s::CopyDotDak" % (fromsuite)], os.path.basename(Upload.pkg.changes_file[:-8]+".dak"))
os.unlink(dot_dak_file)
################################################################################
-def process_it (changes_file):
+def process_it (changes_file, queue=""):
global reject_message
reject_message = ""
if installing_to_stable:
old = Upload.pkg.changes_file
Upload.pkg.changes_file = os.path.basename(old)
- os.chdir(Cnf["Suite::Proposed-Updates::CopyDotDak"])
+ os.chdir(Cnf["Suite::%s::CopyDotDak" % (queue)])
Upload.init_vars()
Upload.update_vars()
Upload.pkg.changes_file = old
check()
- action()
+ action(queue)
# Restore CWD
os.chdir(pkg.directory)
utils.fubar("Archive maintenance in progress. Try again later.")
# If running from within proposed-updates; assume an install to stable
- if os.getcwd().find('proposed-updates') != -1:
+ queue = ""
+ if os.getenv('PWD').find('oldstable-proposed-updates') != -1:
+ queue = "Oldstable-Proposed-Updates"
+ installing_to_stable = 1
+ elif os.getenv('PWD').find('proposed-updates') != -1:
+ queue = "Proposed-Updates"
installing_to_stable = 1
# Obtain lock if not in no-action mode and initialize the log
# Process the changes files
for changes_file in changes_files:
print "\n" + changes_file
- process_it (changes_file)
+ process_it (changes_file, queue)
if install_count:
sets = "set"
@contact: Debian FTP Master <ftpmaster@debian.org>
@copyright: 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
+@copyright: 2009 Joerg Jaspert <joerg@debian.org>
@license: GNU General Public License version 2 or later
"""
# This program is free software; you can redistribute it and/or modify
################################################################################
+from __future__ import with_statement
+
import copy
import errno
import os
import stat
import sys
import time
+import contextlib
+import pwd
import apt_pkg, apt_inst
import examine_package
from daklib import database
from daklib import queue
from daklib import utils
from daklib.regexes import re_no_epoch, re_default_answer, re_isanum
+from daklib.dak_exceptions import CantOpenError, AlreadyLockedError
# Globals
Cnf = None #: Configuration, apt_pkg.Configuration
if reject_message.find("Rejected") != -1:
answer = "XXX"
- if Options["No-Action"] or Options["Automatic"]:
+ if Options["No-Action"] or Options["Automatic"] or Options["Trainee"]:
answer = 'S'
print "REJECT\n" + reject_message,
mtime = os.stat(d["filename"])[stat.ST_MTIME]
if mtime < oldest:
oldest = mtime
- have_note += (d.has_key("process-new note"))
+ have_note += (database.has_new_comment(d["source"], d["version"]))
per_source[source]["oldest"] = oldest
if not have_note:
per_source[source]["note_state"] = 0; # none
line = "%-20s %-20s %-20s" % (pkg, priority, section)
line = line.strip()+'\n'
file.write(line)
- note = Upload.pkg.changes.get("process-new note")
- if note:
- print "*"*75
- print note
- print "*"*75
+ note = database.get_new_comments(Upload.pkg.changes.get("source"))
+ if len(note) > 0:
+ for line in note:
+ print line
return broken, note
################################################################################
def edit_note(note):
# Write the current data to a temporary file
(fd, temp_filename) = utils.temp_filename()
- temp_file = os.fdopen(fd, 'w')
- temp_file.write(note)
- temp_file.close()
editor = os.environ.get("EDITOR","vi")
answer = 'E'
while answer == 'E':
os.system("%s %s" % (editor, temp_filename))
temp_file = utils.open_file(temp_filename)
- note = temp_file.read().rstrip()
+ newnote = temp_file.read().rstrip()
temp_file.close()
- print "Note:"
- print utils.prefix_multi_line_string(note," ")
+ print "New Note:"
+ print utils.prefix_multi_line_string(newnote," ")
prompt = "[D]one, Edit, Abandon, Quit ?"
answer = "XXX"
while prompt.find(answer) == -1:
elif answer == 'Q':
end()
sys.exit(0)
- Upload.pkg.changes["process-new note"] = note
- Upload.dump_vars(Cnf["Dir::Queue::New"])
+ database.add_new_comment(Upload.pkg.changes["source"], Upload.pkg.changes["version"], newnote, utils.whoami())
################################################################################
################################################################################
-def prod_maintainer ():
+def prod_maintainer (note):
# Here we prepare an editor and get them ready to prod...
(fd, temp_filename) = utils.temp_filename()
+ temp_file = os.fdopen(fd, 'w')
+ if len(note) > 0:
+ for line in note:
+ temp_file.write(line)
+ temp_file.close()
editor = os.environ.get("EDITOR","vi")
answer = 'E'
while answer == 'E':
os.system("%s %s" % (editor, temp_filename))
- f = os.fdopen(fd)
- prod_message = "".join(f.readlines())
- f.close()
+ temp_fh = utils.open_file(temp_filename)
+ prod_message = "".join(temp_fh.readlines())
+ temp_fh.close()
print "Prod message:"
print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
prompt = "[P]rod, Edit, Abandon, Quit ?"
if answer == "":
answer = m.group(1)
answer = answer[:1].upper()
- os.unlink(temp_filename)
- if answer == 'A':
- return
- elif answer == 'Q':
- end()
- sys.exit(0)
+ os.unlink(temp_filename)
+ if answer == 'A':
+ return
+ elif answer == 'Q':
+ end()
+ sys.exit(0)
# Otherwise, do the proding...
user_email_address = utils.whoami() + " <%s>" % (
Cnf["Dinstall::MyAdminAddress"])
answer = m.group(1)
answer = answer[:1].upper()
- if answer == 'A':
+ if answer == 'A' and not Options["Trainee"]:
done = add_overrides (new)
elif answer == 'C':
check_pkg()
- elif answer == 'E':
+ elif answer == 'E' and not Options["Trainee"]:
new = edit_overrides (new)
- elif answer == 'M':
- aborted = Upload.do_reject(1, Options["Manual-Reject"])
+ elif answer == 'M' and not Options["Trainee"]:
+ aborted = Upload.do_reject(manual=1,
+ reject_message=Options["Manual-Reject"],
+ note=database.get_new_comments(changes.get("source", "")))
if not aborted:
os.unlink(Upload.pkg.changes_file[:-8]+".dak")
done = 1
elif answer == 'N':
- edit_note(changes.get("process-new note", ""))
- elif answer == 'P':
- prod_maintainer()
- elif answer == 'R':
+ edit_note(database.get_new_comments(changes.get("source", "")))
+ elif answer == 'P' and not Options["Trainee"]:
+ prod_maintainer(database.get_new_comments(changes.get("source", "")))
+ elif answer == 'R' and not Options["Trainee"]:
confirm = utils.our_raw_input("Really clear note (y/N)? ").lower()
if confirm == "y":
- del changes["process-new note"]
+ database.delete_new_comments(changes.get("source"), changes.get("version"))
elif answer == 'S':
done = 1
elif answer == 'Q':
-C, --comments-dir=DIR use DIR as comments-dir, for [o-]p-u-new
-m, --manual-reject=MSG manual reject with `msg'
-n, --no-action don't do anything
+ -t, --trainee FTP Trainee mode
-V, --version display the version number and exit"""
sys.exit(exit_code)
('h',"help","Process-New::Options::Help"),
('C',"comments-dir","Process-New::Options::Comments-Dir", "HasArg"),
('m',"manual-reject","Process-New::Options::Manual-Reject", "HasArg"),
+ ('t',"trainee","Process-New::Options::Trainee"),
('n',"no-action","Process-New::Options::No-Action")]
- for i in ["automatic", "help", "manual-reject", "no-action", "version", "comments-dir"]:
+ for i in ["automatic", "help", "manual-reject", "no-action", "version", "comments-dir", "trainee"]:
if not Cnf.has_key("Process-New::Options::%s" % (i)):
Cnf["Process-New::Options::%s" % (i)] = ""
Upload = queue.Upload(Cnf)
if not Options["No-Action"]:
- Logger = Upload.Logger = logging.Logger(Cnf, "process-new")
+ try:
+ Logger = Upload.Logger = logging.Logger(Cnf, "process-new")
+ except CantOpenError, e:
+ Options["Trainee"] = "Oh yes"
projectB = Upload.projectB
else:
raise
+
+@contextlib.contextmanager
+def lock_package(package):
+ """
+ Lock C{package} so that noone else jumps in processing it.
+
+ @type package: string
+ @param package: source package name to lock
+ """
+
+ path = os.path.join(Cnf["Process-New::LockDir"], package)
+ try:
+ fd = os.open(path, os.O_CREAT | os.O_EXCL | os.O_RDONLY)
+ except OSError, e:
+ if e.errno == errno.EEXIST or e.errno == errno.EACCES:
+ user = pwd.getpwuid(os.stat(path)[stat.ST_UID])[4].split(',')[0].replace('.', '')
+ raise AlreadyLockedError, user
+
+ try:
+ yield fd
+ finally:
+ os.unlink(path)
+
def move_to_dir (dest, perms=0660, changesperms=0664):
utils.move (Upload.pkg.changes_file, dest, perms=changesperms)
file_keys = Upload.pkg.files.keys()
Upload.update_subst()
files = Upload.pkg.files
- if not recheck():
- return
-
- (new, byhand) = check_status(files)
- if new or byhand:
- if new:
- do_new()
- if byhand:
- do_byhand()
- (new, byhand) = check_status(files)
-
- if not new and not byhand:
- do_accept()
+ try:
+ with lock_package(Upload.pkg.changes["source"]):
+ if not recheck():
+ return
+
+ (new, byhand) = check_status(files)
+ if new or byhand:
+ if new:
+ do_new()
+ if byhand:
+ do_byhand()
+ (new, byhand) = check_status(files)
+
+ if not new and not byhand:
+ do_accept()
+ except AlreadyLockedError, e:
+ print "Seems to be locked by %s already, skipping..." % (e)
################################################################################
sys.stderr.write("Accepted %d package %s, %s.\n" % (accept_count, sets, utils.size_type(int(accept_bytes))))
Logger.log(["total",accept_count,accept_bytes])
- if not Options["No-Action"]:
+ if not Options["No-Action"] and not Options["Trainee"]:
Logger.close()
################################################################################
deb_file.close()
# Can't continue, none of the checks on control would work.
continue
+
+ # Check for mandantory "Description:"
+ deb_file.seek ( 0 )
+ try:
+ apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
+ except:
+ reject("%s: Missing Description in binary package" % (f))
+ continue
+
deb_file.close()
# Check for mandatory fields
"""
Return the uid,name,isdm for a given gpg fingerprint
- @ptype fpr: string
+ @type fpr: string
@param fpr: a 40 byte GPG fingerprint
- @return (uid, name, isdm)
+ @return: (uid, name, isdm)
"""
cursor = DBConn().cursor()
cursor.execute( "SELECT u.uid, u.name, k.debian_maintainer FROM fingerprint f JOIN keyrings k ON (f.keyring=k.id), uid u WHERE f.uid = u.id AND f.fingerprint = '%s'" % (fpr))
if qs:
return qs
else:
- return (None, None, None)
+ return (None, None, False)
def check_signed_by_key():
"""Ensure the .changes is signed by an authorized uploader."""
uid_name = ""
# match claimed name with actual name:
- if uid == None:
+ if uid is None:
+ # This is fundamentally broken but need us to refactor how we get
+ # the UIDs/Fingerprints in order for us to fix it properly
uid, uid_email = changes["fingerprint"], uid
may_nmu, may_sponsor = 1, 1
# XXX by default new dds don't have a fingerprint/uid in the db atm,
# and can't get one in there if we don't allow nmu/sponsorship
- elif is_dm is "t":
- uid_email = uid
- may_nmu, may_sponsor = 0, 0
- else:
+ elif is_dm is False:
+ # If is_dm is False, we allow full upload rights
uid_email = "%s@debian.org" % (uid)
may_nmu, may_sponsor = 1, 1
+ else:
+ # Assume limited upload rights unless we've discovered otherwise
+ uid_email = uid
+ may_nmu, may_sponsor = 0, 0
+
if uid_email in [changes["maintaineremail"], changes["changedbyemail"]]:
sponsored = 0
if sponsored and not may_sponsor:
reject("%s is not authorised to sponsor uploads" % (uid))
+ cursor = DBConn().cursor()
if not sponsored and not may_nmu:
source_ids = []
cursor.execute( "SELECT s.id, s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = %(source)s AND s.dm_upload_allowed = 'yes'", changes )
Logger.log(["Moving to new", pkg.changes_file])
Upload.dump_vars(Cnf["Dir::Queue::New"])
- move_to_dir(Cnf["Dir::Queue::New"])
+ move_to_dir(Cnf["Dir::Queue::New"], perms=0640, changesperms=0644)
if not Options["No-Mail"]:
print "Sending new ack."
import apt_pkg
import cgi
from daklib import queue
+from daklib import database
from daklib import utils
from daklib.dak_exceptions import *
Upload = None
direction = []
row_number = 0
+projectB = None
################################################################################
else:
if mtime < oldest:
oldest = mtime
- have_note += (d.has_key("process-new note"))
+ have_note += (database.has_new_comment(d["source"], d["version"]))
per_source[source]["oldest"] = oldest
if not have_note:
per_source[source]["note_state"] = 0; # none
usage()
Upload = queue.Upload(Cnf)
+ projectB = Upload.projectB
if Cnf.has_key("Queue-Report::Options::New"):
header()
Cnf = utils.get_conf()
- Arguments = [('h',"help","Edit-Transitions::Options::Help"),
+ Arguments = [('a',"automatic","Edit-Transitions::Options::Automatic"),
+ ('h',"help","Edit-Transitions::Options::Help"),
('e',"edit","Edit-Transitions::Options::Edit"),
('i',"import","Edit-Transitions::Options::Import", "HasArg"),
('c',"check","Edit-Transitions::Options::Check"),
('s',"sudo","Edit-Transitions::Options::Sudo"),
('n',"no-action","Edit-Transitions::Options::No-Action")]
- for i in ["help", "no-action", "edit", "import", "check", "sudo"]:
+ for i in ["automatic", "help", "no-action", "edit", "import", "check", "sudo"]:
if not Cnf.has_key("Edit-Transitions::Options::%s" % (i)):
Cnf["Edit-Transitions::Options::%s" % (i)] = ""
-i, --import <file> check and import transitions from file
-c, --check check the transitions file, remove outdated entries
-S, --sudo use sudo to update transitions file
+ -a, --automatic don't prompt (only affects check).
-n, --no-action don't do anything (only affects check)"""
sys.exit(exit_code)
def check_transitions(transitions):
"""
Check if the defined transitions still apply and remove those that no longer do.
- @note: Asks the user for confirmation first.
+ @note: Asks the user for confirmation first unless -a has been set.
"""
+ global Cnf
+
to_dump = 0
to_remove = []
+ info = {}
# Now look through all defined transitions
for trans in transitions:
t = transitions[trans]
# Will be None if nothing is in testing.
current = database.get_suite_version(source, "testing")
- print_info(trans, source, expected, t["rm"], t["reason"], t["packages"])
+ info[trans] = get_info(trans, source, expected, t["rm"], t["reason"], t["packages"])
+ print info[trans]
if current == None:
# No package in testing
if Options["no-action"]:
answer="n"
+ elif Options["automatic"]:
+ answer="y"
else:
answer = utils.our_raw_input(prompt).lower()
sys.exit(0)
elif answer == 'y':
print "Committing"
+ subst = {}
+ subst['__TRANSITION_MESSAGE__'] = "The following transitions were removed:\n"
for remove in to_remove:
+ subst['__TRANSITION_MESSAGE__'] += info[remove] + '\n'
del transitions[remove]
+ # If we have a mail address configured for transitions,
+ # send a notification
+ subst['__TRANSITION_EMAIL__'] = Cnf.get("Transitions::Notifications", "")
+ if subst['__TRANSITION_EMAIL__'] != "":
+ print "Sending notification to %s" % subst['__TRANSITION_EMAIL__']
+ subst['__DAK_ADDRESS__'] = Cnf["Dinstall::MyEmailAddress"]
+ subst['__BCC__'] = 'X-DAK: dak transitions'
+ if Cnf.has_key("Dinstall::Bcc"):
+ subst["__BCC__"] += '\nBcc: %s' % Cnf["Dinstall::Bcc"]
+ message = utils.TemplateSubst(subst,
+ os.path.join(Cnf["Dir::Templates"], 'transition.removed'))
+ utils.send_mail(message)
+
edit_file = temp_transitions_file(transitions)
write_transitions_from_file(edit_file)
################################################################################
-def print_info(trans, source, expected, rm, reason, packages):
+def get_info(trans, source, expected, rm, reason, packages):
"""
Print information about a single transition.
@param packages: list of blocked packages
"""
- print """Looking at transition: %s
+ return """Looking at transition: %s
Source: %s
New Version: %s
Responsible: %s
Description: %s
Blocked Packages (total: %d): %s
""" % (trans, source, expected, rm, reason, len(packages), ", ".join(packages))
- return
################################################################################
def transition_info(transitions):
"""
Print information about all defined transitions.
- Calls L{print_info} for every transition and then tells user if the transition is
+ Calls L{get_info} for every transition and then tells user if the transition is
still ongoing or if the expected version already hit testing.
@type transitions: dict
# Will be None if nothing is in testing.
current = database.get_suite_version(source, "testing")
- print_info(trans, source, expected, t["rm"], t["reason"], t["packages"])
+ print get_info(trans, source, expected, t["rm"], t["reason"], t["packages"])
if current == None:
# No package in testing
Cnf = None
projectB = None
-required_database_schema = 9
+required_database_schema = 12
################################################################################
class Binary(object):
def __init__(self, filename, reject=None):
"""
- @ptype filename: string
+ @type filename: string
@param filename: path of a .deb
- @ptype reject: function
+ @type reject: function
@param reject: a function to log reject messages to
"""
self.filename = filename
the hopefully near future, it should also include gathering info from the
control file.
- @ptype bootstrap_id: int
+ @type bootstrap_id: int
@param bootstrap_id: the id of the binary these packages
should be associated or zero meaning we are not bootstrapping
so insert into a temporary table
- @return True if the deb is valid and contents were imported
+ @return: True if the deb is valid and contents were imported
"""
result = False
rejected = not self.valid_deb(relaxed)
the hopefully near future, it should also include gathering info from the
control file.
- @ptype bootstrap_id: int
- @param bootstrap_id: the id of the binary these packages
- should be associated or zero meaning we are not bootstrapping
- so insert into a temporary table
+ @type package: string
+ @param package: the name of the package to be checked
- @return True if the deb is valid and contents were imported
+ @rtype: boolean
+ @return: True if the deb is valid and contents were imported
"""
rejected = not self.valid_deb(True)
self.__unpack()
"NoSourceFieldError": """Exception raised - we cant find the source - wtf?""",
"MissingContents": """Exception raised - we could not determine contents for this deb""",
"DBUpdateError": """Exception raised - could not update the database""",
- "ChangesUnicodeError": """Exception raised - changes file not properly utf-8 encoded"""
+ "ChangesUnicodeError": """Exception raised - changes file not properly utf-8 encoded""",
+ "AlreadyLockedError": """Exception raised - package already locked by someone else"""
} #: All dak exceptions
def construct_dak_exception(name, description):
@group readonly: get_suite_id, get_section_id, get_priority_id, get_override_type_id,
get_architecture_id, get_archive_id, get_component_id, get_location_id,
get_source_id, get_suite_version, get_files_id, get_maintainer, get_suites,
- get_suite_architectures
+ get_suite_architectures, get_new_comments, has_new_comment
@group read/write: get_or_set*, set_files_id
+@group writeonly: add_new_comment, delete_new_comments
@contact: Debian FTP Master <ftpmaster@debian.org>
@copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
################################################################################
+def get_new_comments(package):
+ """
+ Returns all the possible comments attached to C{package} in NEW. All versions.
+
+ @type package: string
+ @param package: name of the package
+
+ @rtype: list
+ @return: list of strings containing comments for all versions from all authors for package
+ """
+
+ comments = []
+ query = projectB.query(""" SELECT version, comment, author, notedate
+ FROM new_comments
+ WHERE package = '%s'
+ ORDER BY notedate
+ """ % (package))
+
+ for row in query.getresult():
+ comments.append("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s\n" % (row[2], row[0], row[3], row[1]))
+ comments.append("-"*72)
+
+ return comments
+
+def has_new_comment(package, version):
+ """
+ Returns true if the given combination of C{package}, C{version} has a comment.
+
+ @type package: string
+ @param package: name of the package
+
+ @type version: string
+ @param version: package version
+
+ @rtype: boolean
+ @return: true/false
+ """
+
+ exists = projectB.query("""SELECT 1 FROM new_comments
+ WHERE package='%s'
+ AND version='%s'
+ LIMIT 1"""
+ % (package, version) ).getresult()
+
+ if not exists:
+ return False
+ else:
+ return True
+
+def add_new_comment(package, version, comment, author):
+ """
+ Add a new comment for C{package}, C{version} written by C{author}
+
+ @type package: string
+ @param package: name of the package
+
+ @type version: string
+ @param version: package version
+
+ @type comment: string
+ @param comment: the comment
+
+ @type author: string
+ @param author: the authorname
+ """
+
+ projectB.query(""" INSERT INTO new_comments (package, version, comment, author)
+ VALUES ('%s', '%s', '%s', '%s')
+ """ % (package, version, comment, author) )
+
+ return
+
+def delete_new_comments(package, version):
+ """
+ Delete a comment for C{package}, C{version}, if one exists
+ """
+
+ projectB.query(""" DELETE FROM new_comments
+ WHERE package = '%s' AND version = '%s'
+ """ % (package, version))
+ return
+
+################################################################################
def copy_temporary_contents(package, version, arch, deb, reject):
"""
copy the previously stored contents from the temp table to the permanant one
Returns database id for given override C{type}.
Results are kept in a cache during runtime to minimize database queries.
- @type type: string
- @param type: The name of the override type
+ @type override_type: string
+ @param override_type: The name of the override type
@rtype: int
@return: the database id for the given override type
@type bin_id: int
@param bin_id: the id of the binary
- @type fullpath: string
- @param fullpath: the path of the file being associated with the binary
+ @type fullpaths: list
+ @param fullpaths: the list of paths of the file being associated with the binary
- @return True upon success
+ @return: True upon success
"""
c = self.db_con.cursor()
for fullpath in fullpaths:
(path, file) = os.path.split(fullpath)
+ if path.startswith( "./" ):
+ path = path[2:]
# Get the necessary IDs ...
file_id = self.get_or_set_contents_file_id(file)
path_id = self.get_or_set_contents_path_id(path)
@type fullpaths: list
@param fullpaths: the list of paths of the file being associated with the binary
- @return True upon success
+ @return: True upon success
"""
c = self.db_con.cursor()
###########################################################################
- def do_reject (self, manual = 0, reject_message = ""):
+ def do_reject (self, manual = 0, reject_message = "", note = ""):
"""
Reject an upload. If called without a reject message or C{manual} is
true, spawn an editor so the user can write one.
# editor so the user can add one in...
if manual and not reject_message:
(fd, temp_filename) = utils.temp_filename()
+ temp_file = os.fdopen(fd, 'w')
+ if len(note) > 0:
+ for line in note:
+ temp_file.write(line)
+ temp_file.close()
editor = os.environ.get("EDITOR","vi")
answer = 'E'
while answer == 'E':
def which_conf_file ():
res = socket.gethostbyaddr(socket.gethostname())
+ # In case we allow local config files per user, try if one exists
+ if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
+ homedir = os.getenv("HOME")
+ confpath = os.path.join(homedir, "/etc/dak.conf")
+ if os.path.exists(confpath):
+ apt_pkg.ReadConfigFileISC(Cnf,default_config)
+
+ # We are still in here, so there is no local config file or we do
+ # not allow local files. Do the normal stuff.
if Cnf.get("Config::" + res[0] + "::DakConfig"):
return Cnf["Config::" + res[0] + "::DakConfig"]
else:
def which_apt_conf_file ():
res = socket.gethostbyaddr(socket.gethostname())
+ # In case we allow local config files per user, try if one exists
+ if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
+ homedir = os.getenv("HOME")
+ confpath = os.path.join(homedir, "/etc/dak.conf")
+ if os.path.exists(confpath):
+ apt_pkg.ReadConfigFileISC(Cnf,default_config)
+
if Cnf.get("Config::" + res[0] + "::AptConfig"):
return Cnf["Config::" + res[0] + "::AptConfig"]
else:
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-<mhy> oh no, Ganneff has just corrected my english
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
<mhy> I often wonder if we should use NSA bot or something instead and get dinstall to send emails telling us about its progress :-)
<mhy> dinstall: I'm processing openoffice
<mhy> dinstall: I'm choking, please help me
<Ganneff> and if not - we can make it go multi-network
<Ganneff> first oftc, then opn, then ircnet, then - we will find some. quakenet anyone?
<mhy> I should know better than to give you ideas
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+<mhy> !!!!11111iiiiiioneoneoneone
+<dak> mhy: Error: "!!!11111iiiiiioneoneoneone" is not a valid command.
+<mhy> dak: oh shut up
+<dak> mhy: Error: "oh" is not a valid command.
Various
-------
+
* Implement autosigning, see ftpmaster_autosigning on ftp-master host in text/.
* Check TODO.old and move still-valid/useful entries over here.
Package: ftpmaster-lenny
Version: 1.0
Maintainer: Debian FTP Master <ftpmaster@debian.org>
-Depends: apt-utils, bicyclerepair, binutils-multiarch, build-essential, bzip2, cron, curl, cvs, debian-el, debian-bug, dpkg-dev-el, easypg, devscripts, emacs-goodies-el, emacs22-nox, gnupg, gpgv, graphviz, ikiwiki, irb, libapt-pkg-dev, libdbd-pg-ruby, lintian, mc, mutt, postgresql-plperl-8.3, pychecker, pylint, pymacs, python, python-apt, python-btsutils, python-debian, python-epydoc, python-ldap, python-mode, python-numpy, python-psycopg2, python-pygresql, python-pyrss2gen, python-soappy, python-yaml, r-base, rsync, ruby, ruby-elisp, subversion, git-core, symlinks
+Depends: apt-utils, bicyclerepair, binutils-multiarch, build-essential, bzip2, cron, curl, cvs, debian-el, dpkg-dev-el, easypg, devscripts, emacs-goodies-el, emacs22-nox, gnupg, gpgv, graphviz, ikiwiki, irb, libapt-pkg-dev, libdbd-pg-ruby, lintian, mc, mutt, postgresql-plperl-8.3, pychecker, pylint, pymacs, python, python-apt, python-btsutils, python-debian, python-epydoc, python-ldap, python-mode, python-numpy, python-psycopg2, python-pygresql, python-pyrss2gen, python-soappy, python-yaml, r-base, rsync, ruby, ruby-elisp, subversion, git-core, symlinks
Architecture: all
Copyright: copyright
Changelog: changelog
dpkg: /usr/bin/dselect
-FILE LOCATION
+FILE LOCATION
\ No newline at end of file
--- /dev/null
+From: __DAK_ADDRESS__
+To: __TRANSITION_EMAIL__
+__BCC__
+X-Debian: DAK
+Precedence: bulk
+MIME-Version: 1.0
+Content-Type: text/plain; charset="utf-8"
+Content-Transfer-Encoding: 8bit
+Subject: Transitions Completed
+
+The following transitions are complete and have been removed
+from the transitions list:
+
+__TRANSITION_MESSAGE__
+