# I know what I say. I dont know python and I wrote it. So go and read some other stuff.
import commands
-import re
import sys
-import time
-import os
import apt_pkg
-from daklib import daklog
from daklib import utils
from daklib.dbconn import DBConn, add_database_user, get_or_set_uid
from daklib.regexes import re_gpg_fingerprint, re_user_address, re_user_mails, re_user_name
from daklib import utils
from daklib.dbconn import *
-from daklib.config import Config
################################################################################
from daklib.dbconn import *
from daklib import utils
-from daklib.regexes import re_issource
from daklib.config import Config
################################################################################
missing-overrides - check for missing overrides
source-in-one-dir - ensure the source for each package is in one directory
timestamps - check for future timestamps in .deb's
- tar-gz-in-dsc - ensure each .dsc lists a .tar.gz file
+ files-in-dsc - ensure each .dsc references appropriate Files
validate-indices - ensure files mentioned in Packages & Sources exist
files-not-symlinks - check files in the database aren't symlinks
validate-builddeps - validate build-dependencies of .dsc files in the archive
################################################################################
-def check_missing_tar_gz_in_dsc():
+def check_files_in_dsc():
"""
- Ensure each .dsc lists a .tar.gz file
+ Ensure each .dsc lists appropriate files in its Files field (according
+ to the format announced in its Format field).
"""
count = 0
except:
utils.fubar("error parsing .dsc file '%s'." % (filename))
- dsc_files = utils.build_file_list(dsc, is_a_dsc=1)
- has_tar = 0
+ reasons = utils.check_dsc_files(filename, dsc)
+ for r in reasons:
+ utils.warn(r)
- for f in dsc_files.keys():
- m = re_issource.match(f)
- if not m:
- utils.fubar("%s not recognised as source." % (f))
- ftype = m.group(3)
- if ftype == "orig.tar.gz" or ftype == "tar.gz":
- has_tar = 1
-
- if not has_tar:
- utils.warn("%s has no .tar.gz in the .dsc file." % (f))
+ if len(reasons) > 0:
count += 1
if count:
check_source_in_one_dir()
elif mode == "timestamps":
check_timestamps()
- elif mode == "tar-gz-in-dsc":
- check_missing_tar_gz_in_dsc()
+ elif mode == "files-in-dsc":
+ check_files_in_dsc()
elif mode == "validate-indices":
check_indices_files_exist()
elif mode == "files-not-symlinks":
from daklib.config import Config
from daklib.dbconn import *
from daklib import utils
+from daklib import daklog
################################################################################
Options = None
+Logger = None
################################################################################
# Get the list of binary packages not in a suite and mark them for
# deletion.
- # TODO: This can be a single SQL UPDATE statement
q = session.execute("""
-SELECT b.file FROM binaries b, files f
+SELECT b.file, f.filename FROM binaries b, files f
WHERE f.last_used IS NULL AND b.file = f.id
AND NOT EXISTS (SELECT 1 FROM bin_associations ba WHERE ba.bin = b.id)""")
for i in q.fetchall():
+ Logger.log(["set lastused", i[1]])
session.execute("UPDATE files SET last_used = :lastused WHERE id = :fileid AND last_used IS NULL",
{'lastused': now_date, 'fileid': i[0]})
session.commit()
# Check for any binaries which are marked for eventual deletion
# but are now used again.
- # TODO: This can be a single SQL UPDATE statement
q = session.execute("""
-SELECT b.file FROM binaries b, files f
+SELECT b.file, f.filename FROM binaries b, files f
WHERE f.last_used IS NOT NULL AND f.id = b.file
AND EXISTS (SELECT 1 FROM bin_associations ba WHERE ba.bin = b.id)""")
for i in q.fetchall():
+ Logger.log(["unset lastused", i[1]])
session.execute("UPDATE files SET last_used = NULL WHERE id = :fileid", {'fileid': i[0]})
session.commit()
# Get the list of source packages not in a suite and not used by
# any binaries.
q = session.execute("""
-SELECT s.id, s.file FROM source s, files f
+SELECT s.id, s.file, f.filename FROM source s, files f
WHERE f.last_used IS NULL AND s.file = f.id
AND NOT EXISTS (SELECT 1 FROM src_associations sa WHERE sa.source = s.id)
AND NOT EXISTS (SELECT 1 FROM binaries b WHERE b.source = s.id)""")
for i in q.fetchall():
source_id = i[0]
dsc_file_id = i[1]
+ dsc_fname = i[2]
# Mark the .dsc file for deletion
+ Logger.log(["set lastused", dsc_fname])
session.execute("""UPDATE files SET last_used = :last_used
WHERE id = :dscfileid AND last_used IS NULL""",
{'last_used': now_date, 'dscfileid': dsc_file_id})
# Mark all other files references by .dsc too if they're not used by anyone else
- x = session.execute("""SELECT f.id FROM files f, dsc_files d
+ x = session.execute("""SELECT f.id, f.filename FROM files f, dsc_files d
WHERE d.source = :sourceid AND d.file = f.id""",
{'sourceid': source_id})
for j in x.fetchall():
file_id = j[0]
+ file_name = j[1]
y = session.execute("SELECT id FROM dsc_files d WHERE d.file = :fileid", {'fileid': file_id})
if len(y.fetchall()) == 1:
+ Logger.log(["set lastused", file_name])
session.execute("""UPDATE files SET last_used = :lastused
WHERE id = :fileid AND last_used IS NULL""",
{'lastused': now_date, 'fileid': file_id})
# are now used again.
q = session.execute("""
-SELECT f.id FROM source s, files f, dsc_files df
+SELECT f.id, f.filename FROM source s, files f, dsc_files df
WHERE f.last_used IS NOT NULL AND s.id = df.source AND df.file = f.id
AND ((EXISTS (SELECT 1 FROM src_associations sa WHERE sa.source = s.id))
OR (EXISTS (SELECT 1 FROM binaries b WHERE b.source = s.id)))""")
#### XXX: this should also handle deleted binaries specially (ie, not
#### reinstate sources because of them
- # Could be done in SQL; but left this way for hysterical raisins
- # [and freedom to innovate don'cha know?]
for i in q.fetchall():
+ Logger.log(["unset lastused", i[1]])
session.execute("UPDATE files SET last_used = NULL WHERE id = :fileid",
{'fileid': i[0]})
ql = q.fetchall()
if len(ql) > 0:
- print "WARNING: check_files found something it shouldn't"
+ utils.warn("check_files found something it shouldn't")
for x in ql:
- print x
+ utils.warn("orphaned file: %s" % x)
+ Logger.log(["set lastused", x[1], "ORPHANED FILE"])
session.execute("UPDATE files SET last_used = :lastused WHERE id = :fileid",
{'lastused': now_date, 'fileid': x[0]})
# XXX: why doesn't this remove the files here as well? I don't think it
# buys anything keeping this separate
print "Cleaning binaries from the DB..."
+ print "Deleting from binaries table... "
+ for bin in session.query(DBBinary).join(DBBinary.poolfile).filter(PoolFile.last_used <= delete_date):
+ Logger.log(["delete binary", bin.poolfile.filename])
+ if not Options["No-Action"]:
+ session.delete(bin)
if not Options["No-Action"]:
- print "Deleting from binaries table... "
- session.execute("""DELETE FROM binaries WHERE EXISTS
- (SELECT 1 FROM files WHERE binaries.file = files.id
- AND files.last_used <= :deldate)""",
- {'deldate': delete_date})
+ session.commit()
########################################
os.mkdir(dest)
# Delete from source
- if not Options["No-Action"]:
- print "Deleting from source table... "
- session.execute("""DELETE FROM dsc_files
- WHERE EXISTS
- (SELECT 1 FROM source s, files f, dsc_files df
- WHERE f.last_used <= :deletedate
- AND s.file = f.id AND s.id = df.source
- AND df.id = dsc_files.id)""", {'deletedate': delete_date})
- session.execute("""DELETE FROM source
- WHERE EXISTS
- (SELECT 1 FROM files
- WHERE source.file = files.id
- AND files.last_used <= :deletedate)""", {'deletedate': delete_date})
+ print "Deleting from source table... "
+ q = session.execute("""
+SELECT s.id, f.filename FROM source s, files f
+ WHERE f.last_used <= :deletedate
+ AND s.file = f.id""", {'deletedate': delete_date})
+ for s in q.fetchall():
+ Logger.log(["delete source", s[1], s[0]])
+ if not Options["No-Action"]:
+ session.execute("DELETE FROM dsc_files WHERE source = :s_id", {"s_id":s[0]})
+ session.execute("DELETE FROM source WHERE id = :s_id", {"s_id":s[0]})
+ if not Options["No-Action"]:
session.commit()
# Delete files from the pool
- query = """SELECT l.path, f.filename FROM location l, files f
- WHERE f.last_used <= :deletedate AND l.id = f.location"""
+ old_files = session.query(PoolFile).filter(PoolFile.last_used <= delete_date)
if max_delete is not None:
- query += " LIMIT %d" % max_delete
+ old_files = old_files.limit(max_delete)
print "Limiting removals to %d" % max_delete
- q = session.execute(query, {'deletedate': delete_date})
- for i in q.fetchall():
- filename = i[0] + i[1]
+ for pf in old_files:
+ filename = os.path.join(pf.location.path, pf.filename)
if not os.path.exists(filename):
utils.warn("can not find '%s'." % (filename))
continue
+ Logger.log(["delete pool file", filename])
if os.path.isfile(filename):
if os.path.islink(filename):
count += 1
- if Options["No-Action"]:
- print "Removing symlink %s..." % (filename)
- else:
+ Logger.log(["delete symlink", filename])
+ if not Options["No-Action"]:
os.unlink(filename)
else:
size += os.stat(filename)[stat.ST_SIZE]
if os.path.exists(dest_filename):
dest_filename = utils.find_next_free(dest_filename)
- if Options["No-Action"]:
- print "Cleaning %s -> %s ..." % (filename, dest_filename)
- else:
+ Logger.log(["move to morgue", filename, dest_filename])
+ if not Options["No-Action"]:
utils.move(filename, dest_filename)
+
+ if not Options["No-Action"]:
+ session.delete(pf)
+
else:
utils.fubar("%s is neither symlink nor file?!" % (filename))
- # Delete from the 'files' table
- # XXX: I've a horrible feeling that the max_delete stuff breaks here - mhy
- # TODO: Change it so we do the DELETEs as we go; it'll be slower but
- # more reliable
if not Options["No-Action"]:
- print "Deleting from files table... "
- session.execute("DELETE FROM files WHERE last_used <= :deletedate", {'deletedate': delete_date})
session.commit()
if count > 0:
# TODO Replace this whole thing with one SQL statement
q = session.execute("""
-SELECT m.id FROM maintainer m
+SELECT m.id, m.name FROM maintainer m
WHERE NOT EXISTS (SELECT 1 FROM binaries b WHERE b.maintainer = m.id)
AND NOT EXISTS (SELECT 1 FROM source s WHERE s.maintainer = m.id OR s.changedby = m.id)
AND NOT EXISTS (SELECT 1 FROM src_uploaders u WHERE u.maintainer = m.id)""")
for i in q.fetchall():
maintainer_id = i[0]
+ Logger.log(["delete maintainer", i[1]])
if not Options["No-Action"]:
session.execute("DELETE FROM maintainer WHERE id = :maint", {'maint': maintainer_id})
- count += 1
+ count += 1
if not Options["No-Action"]:
session.commit()
# TODO Replace this whole thing with one SQL statement
q = session.execute("""
-SELECT f.id FROM fingerprint f
+SELECT f.id, f.fingerprint FROM fingerprint f
WHERE f.keyring IS NULL
AND NOT EXISTS (SELECT 1 FROM binaries b WHERE b.sig_fpr = f.id)
AND NOT EXISTS (SELECT 1 FROM source s WHERE s.sig_fpr = f.id)""")
for i in q.fetchall():
fingerprint_id = i[0]
+ Logger.log(["delete fingerprint", i[1]])
if not Options["No-Action"]:
session.execute("DELETE FROM fingerprint WHERE id = :fpr", {'fpr': fingerprint_id})
- count += 1
+ count += 1
if not Options["No-Action"]:
session.commit()
our_delete_date = now_date - timedelta(seconds = int(cnf["Clean-Suites::QueueBuildStayOfExecution"]))
count = 0
- q = session.execute("SELECT filename FROM queue_build WHERE last_used <= :deletedate",
- {'deletedate': our_delete_date})
- for i in q.fetchall():
- filename = i[0]
- if not os.path.exists(filename):
- utils.warn("%s (from queue_build) doesn't exist." % (filename))
+ for qf in session.query(QueueBuild).filter(QueueBuild.last_used <= our_delete_date):
+ if not os.path.exists(qf.filename):
+ utils.warn("%s (from queue_build) doesn't exist." % (qf.filename))
continue
- if not cnf.FindB("Dinstall::SecurityQueueBuild") and not os.path.islink(filename):
- utils.fubar("%s (from queue_build) should be a symlink but isn't." % (filename))
+ if not cnf.FindB("Dinstall::SecurityQueueBuild") and not os.path.islink(qf.filename):
+ utils.fubar("%s (from queue_build) should be a symlink but isn't." % (qf.filename))
- os.unlink(filename)
+ Logger.log(["delete queue build", qf.filename])
+ if not Options["No-Action"]:
+ os.unlink(qf.filename)
+ session.delete(qf)
count += 1
- session.execute("DELETE FROM queue_build WHERE last_used <= :deletedate",
- {'deletedate': our_delete_date})
-
- session.commit()
+ if not Options["No-Action"]:
+ session.commit()
if count:
print "Cleaned %d queue_build files." % (count)
################################################################################
def main():
- global Options
+ global Options, Logger
cnf = Config()
if Options["Help"]:
usage()
+ Logger = daklog.Logger(cnf, "clean-suites", debug=Options["No-Action"])
+
session = DBConn().session()
now_date = datetime.now()
clean_fingerprints(now_date, delete_date, max_delete, session)
clean_queue_build(now_date, delete_date, max_delete, session)
+ Logger.close()
+
################################################################################
if __name__ == '__main__':
import sys
import os
import logging
-import math
import gzip
import threading
import Queue
################################################################################
-import commands, os, sys, time, re
+import commands, os, sys, re
import apt_pkg
from daklib.config import Config
################################################################################
import sys
-import imp
import daklib.utils
################################################################################
--- /dev/null
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+Adding table for allowed source formats
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2009 Raphael Hertzog <hertzog@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+
+
+################################################################################
+
+import psycopg2
+import time
+from daklib.dak_exceptions import DBUpdateError
+
+################################################################################
+
+def do_update(self):
+ print "Adding tables listing allowed source formats"
+
+ try:
+ c = self.db.cursor()
+ c.execute("""
+ CREATE TABLE src_format (
+ id SERIAL PRIMARY KEY,
+ format_name TEXT NOT NULL,
+ UNIQUE (format_name)
+ )
+ """)
+ c.execute("INSERT INTO src_format (format_name) VALUES('1.0')")
+ c.execute("INSERT INTO src_format (format_name) VALUES('3.0 (quilt)')")
+ c.execute("INSERT INTO src_format (format_name) VALUES('3.0 (native)')")
+
+ c.execute("""
+ CREATE TABLE suite_src_formats (
+ suite INT4 NOT NULL REFERENCES suite(id),
+ src_format INT4 NOT NULL REFERENCES src_format(id),
+ PRIMARY KEY (suite, src_format)
+ )
+ """)
+
+ print "Authorize format 1.0 on all suites by default"
+ c.execute("SELECT id FROM suite")
+ suites = c.fetchall()
+ c.execute("SELECT id FROM src_format WHERE format_name = '1.0'")
+ formats = c.fetchall()
+ for s in suites:
+ for f in formats:
+ c.execute("INSERT INTO suite_src_formats (suite, src_format) VALUES(%s, %s)", (s[0], f[0]))
+
+ print "Authorize all other formats on tpu, unstable & experimental by default"
+ c.execute("SELECT id FROM suite WHERE suite_name IN ('testing-proposed-updates', 'unstable', 'experimental')")
+ suites = c.fetchall()
+ c.execute("SELECT id FROM src_format WHERE format_name != '1.0'")
+ formats = c.fetchall()
+ for s in suites:
+ for f in formats:
+ c.execute("INSERT INTO suite_src_formats (suite, src_format) VALUES(%s, %s)", (s[0], f[0]))
+
+ c.execute("UPDATE config SET value = '15' WHERE name = 'db_revision'")
+ self.db.commit()
+
+ except psycopg2.ProgrammingError, msg:
+ self.db.rollback()
+ raise DBUpdateError, "Unable to apply source format update 15, rollback issued. Error message : %s" % (str(msg))
import sys
import os
import tempfile
-import subprocess
import time
import apt_pkg
from daklib.config import Config
from daklib.dbconn import *
-from daklib import utils
# Globals
################################################################################
-import commands, ldap, re, sys
+import commands, ldap, sys
import apt_pkg
from daklib.config import Config
################################################################################
-import os
-import sys
-
from daklib.dbconn import *
################################################################################
import os
import sys
from datetime import datetime
-import re
-import apt_pkg, commands
+import apt_pkg
from daklib import daklog
from daklib.queue import *
from daklib import utils
from daklib.dbconn import *
-from daklib.binary import copy_temporary_contents
from daklib.dak_exceptions import *
from daklib.regexes import re_default_answer, re_issource, re_fdnic
from daklib.urgencylog import UrgencyLog
df = DSCFile()
df.source_id = source.source_id
- # If the .orig.tar.gz is already in the pool, it's
+ # If the .orig tarball is already in the pool, it's
# files id is stored in dsc_files by check_dsc().
files_id = dentry.get("files id", None)
add_deb_to_db(u, newfile, session)
# If this is a sourceful diff only upload that is moving
- # cross-component we need to copy the .orig.tar.gz into the new
+ # cross-component we need to copy the .orig files into the new
# component too for the same reasons as above.
- #
- if u.pkg.changes["architecture"].has_key("source") and u.pkg.orig_tar_id and \
- u.pkg.orig_tar_location != dsc_location_id:
-
- oldf = get_poolfile_by_id(u.pkg.orig_tar_id, session)
- old_filename = os.path.join(oldf.location.path, oldf.filename)
- old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
- 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
-
- new_filename = os.path.join(utils.poolify(u.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
-
- # TODO: Care about size/md5sum collisions etc
- (found, newf) = check_poolfile(new_filename, file_size, file_md5sum, dsc_location_id, session)
-
- if newf is None:
- utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
- newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
-
- # TODO: Check that there's only 1 here
- source = get_sources_from_name(u.pkg.changes["source"], u.pkg.changes["version"])[0]
- dscf = get_dscfiles(source_id = source.source_id, poolfile_id=u.pkg.orig_tar_id, session=session)[0]
- dscf.poolfile_id = newf.file_id
- session.add(dscf)
- session.flush()
+ if u.pkg.changes["architecture"].has_key("source"):
+ for orig_file in u.pkg.orig_files.keys():
+ if not u.pkg.orig_files[orig_file].has_key("id"):
+ continue # Skip if it's not in the pool
+ orig_file_id = u.pkg.orig_files[orig_file]["id"]
+ if u.pkg.orig_files[orig_file]["location"] == dsc_location_id:
+ continue # Skip if the location didn't change
+
+ # Do the move
+ oldf = get_poolfile_by_id(orig_file_id, session)
+ old_filename = os.path.join(oldf.location.path, oldf.filename)
+ old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
+ 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
+
+ new_filename = os.path.join(utils.poolify(u.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
+
+ # TODO: Care about size/md5sum collisions etc
+ (found, newf) = check_poolfile(new_filename, file_size, file_md5sum, dsc_location_id, session)
+
+ if newf is None:
+ utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
+ newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
+
+ # TODO: Check that there's only 1 here
+ source = get_sources_from_name(u.pkg.changes["source"], u.pkg.changes["version"])[0]
+ dscf = get_dscfiles(source_id=source.source_id, poolfile_id=orig_file_id, session=session)[0]
+ dscf.poolfile_id = newf.file_id
+ session.add(dscf)
+ session.flush()
# Install the files into the pool
for newfile, entry in u.pkg.files.items():
os.unlink(dest)
os.symlink(src, dest)
- # Update last_used on any non-upload .orig.tar.gz symlink
- if u.pkg.orig_tar_id:
+ # Update last_used on any non-uploaded .orig symlink
+ for orig_file in u.pkg.orig_files.keys():
# Determine the .orig.tar.gz file name
- for dsc_file in u.pkg.dsc_files.keys():
- if dsc_file.endswith(".orig.tar.gz"):
- u.pkg.orig_tar_gz = os.path.join(dest_dir, dsc_file)
+ if not u.pkg.orig_files[orig_file].has_key("id"):
+ continue # Skip files not in the pool
+ # XXX: do we really want to update the orig_files dict here
+ # instead of using a temporary variable?
+ u.pkg.orig_files[orig_file]["path"] = os.path.join(dest_dir, orig_file)
# Remove it from the list of packages for later processing by apt-ftparchive
- qb = get_queue_build(u.pkg.orig_tar_gz, suite.suite_id, session)
+ qb = get_queue_build(u.pkg.orig_files[orig_file]["path"], suite.suite_id, session)
if qb:
qb.in_queue = False
qb.last_used = now_date
@contact: Debian FTP Master <ftpmaster@debian.org>
@copyright: 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
@copyright: 2009 Joerg Jaspert <joerg@debian.org>
+@copyright: 2009 Frank Lichtenheld <djpig@debian.org>
@license: GNU General Public License version 2 or later
"""
# This program is free software; you can redistribute it and/or modify
import apt_pkg, apt_inst
import examine_package
-from daklib import database
+from daklib.dbconn import *
+from daklib.queue import *
from daklib import daklog
-from daklib import queue
from daklib import utils
from daklib.regexes import re_no_epoch, re_default_answer, re_isanum
from daklib.dak_exceptions import CantOpenError, AlreadyLockedError, CantGetLockError
from daklib.summarystats import SummaryStats
+from daklib.config import Config
# Globals
-Cnf = None #: Configuration, apt_pkg.Configuration
Options = None
-Upload = None
-projectB = None #: database connection, pgobject
Logger = None
Priorities = None
Sections = None
-reject_message = ""
-
################################################################################
################################################################################
################################################################################
-def reject (str, prefix="Rejected: "):
- global reject_message
- if str:
- reject_message += prefix + str + "\n"
-
-def recheck():
- global reject_message
- files = Upload.pkg.files
- reject_message = ""
+def recheck(upload, session):
+ files = upload.pkg.files
+ cnf = Config()
for f in files.keys():
# The .orig.tar.gz can disappear out from under us is it's a
# duplicate of one in the archive.
if files[f]["type"] == "deb":
source_version = files[f]["source version"]
source_package = files[f]["source package"]
- if not Upload.pkg.changes["architecture"].has_key("source") \
- and not Upload.source_exists(source_package, source_version, Upload.pkg.changes["distribution"].keys()):
+ if not upload.pkg.changes["architecture"].has_key("source") \
+ and not upload.source_exists(source_package, source_version, upload.pkg.changes["distribution"].keys()):
source_epochless_version = re_no_epoch.sub('', source_version)
dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
found = 0
for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
- if Cnf.has_key("Dir::Queue::%s" % (q)):
- if os.path.exists(Cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
+ if cnf.has_key("Dir::Queue::%s" % (q)):
+ if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
found = 1
if not found:
- reject("no source found for %s %s (%s)." % (source_package, source_version, f))
+ upload.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
# Version and file overwrite checks
if files[f]["type"] == "deb":
- reject(Upload.check_binary_against_db(f), "")
+ upload.check_binary_against_db(f, session)
elif files[f]["type"] == "dsc":
- reject(Upload.check_source_against_db(f), "")
- (reject_msg, is_in_incoming) = Upload.check_dsc_against_db(f)
- reject(reject_msg, "")
+ upload.check_source_against_db(f, session)
+ upload.check_dsc_against_db(f, session)
- if reject_message.find("Rejected") != -1:
+ if len(upload.rejects) > 0:
answer = "XXX"
if Options["No-Action"] or Options["Automatic"] or Options["Trainee"]:
answer = 'S'
- print "REJECT\n" + reject_message,
+ print "REJECT\n" + upload.rejects.join("\n"),
prompt = "[R]eject, Skip, Quit ?"
while prompt.find(answer) == -1:
answer = answer[:1].upper()
if answer == 'R':
- Upload.do_reject(0, reject_message)
- os.unlink(Upload.pkg.changes_file[:-8]+".dak")
+ upload.do_reject(manual=0, reject_message=upload.rejects.join("\n"))
+ os.unlink(upload.pkg.changes_file[:-8]+".dak")
return 0
elif answer == 'S':
return 0
# Sort by time of oldest upload
return cmp(a["oldest"], b["oldest"])
-def sort_changes(changes_files):
+def sort_changes(changes_files, session):
"""Sort into source groups, then sort each source group by version,
have source, filename. Finally, sort the source groups by have
note, time of oldest upload of each source upload."""
cache = {}
# Read in all the .changes files
for filename in changes_files:
+ u = Upload()
try:
- Upload.pkg.changes_file = filename
- Upload.init_vars()
- Upload.update_vars()
- cache[filename] = copy.copy(Upload.pkg.changes)
+ u.pkg.load_dot_dak(filename)
+ u.update_subst()
+ cache[filename] = copy.copy(u.pkg.changes)
cache[filename]["filename"] = filename
except:
sorted_list.append(filename)
per_source[source]["list"].append(cache[filename])
# Determine oldest time and have note status for each source group
for source in per_source.keys():
- q = projectB.query("SELECT 1 FROM source WHERE source = '%s'" % source)
- ql = q.getresult()
- per_source[source]["source_in_database"] = len(ql)>0
+ q = session.query(DBSource).filter_by(source = source).all()
+ per_source[source]["source_in_database"] = len(q)>0
source_list = per_source[source]["list"]
first = source_list[0]
oldest = os.stat(first["filename"])[stat.ST_MTIME]
mtime = os.stat(d["filename"])[stat.ST_MTIME]
if mtime < oldest:
oldest = mtime
- have_note += (database.has_new_comment(d["source"], d["version"], True))
+ have_note += has_new_comment(d["source"], d["version"], session)
per_source[source]["oldest"] = oldest
if not have_note:
per_source[source]["note_state"] = 0; # none
################################################################################
class Section_Completer:
- def __init__ (self):
+ def __init__ (self, session):
self.sections = []
self.matches = []
- q = projectB.query("SELECT section FROM section")
- for i in q.getresult():
- self.sections.append(i[0])
+ for s, in session.query(Section.section):
+ self.sections.append(s)
def complete(self, text, state):
if state == 0:
############################################################
class Priority_Completer:
- def __init__ (self):
+ def __init__ (self, session):
self.priorities = []
self.matches = []
- q = projectB.query("SELECT priority FROM priority")
- for i in q.getresult():
- self.priorities.append(i[0])
+ for p, in session.query(Priority.priority):
+ self.priorities.append(p)
def complete(self, text, state):
if state == 0:
################################################################################
-def print_new (new, indexed, file=sys.stdout):
- queue.check_valid(new)
- broken = 0
+def print_new (new, upload, indexed, file=sys.stdout):
+ check_valid(new)
+ broken = False
index = 0
for pkg in new.keys():
index += 1
priority = new[pkg]["priority"]
if new[pkg]["section id"] == -1:
section += "[!]"
- broken = 1
+ broken = True
if new[pkg]["priority id"] == -1:
priority += "[!]"
- broken = 1
+ broken = True
if indexed:
line = "(%s): %-20s %-20s %-20s" % (index, pkg, priority, section)
else:
line = "%-20s %-20s %-20s" % (pkg, priority, section)
line = line.strip()+'\n'
file.write(line)
- note = database.get_new_comments(Upload.pkg.changes.get("source"))
- if len(note) > 0:
- for line in note:
- print line
- return broken, note
+ notes = get_new_comments(upload.pkg.changes.get("source"))
+ for note in notes:
+ print "\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
+ % (note.author, note.version, note.notedate, note.comment)
+ print "-" * 72
+ return broken, len(notes) > 0
################################################################################
################################################################################
################################################################################
-def edit_new (new):
+def edit_new (new, upload):
# Write the current data to a temporary file
(fd, temp_filename) = utils.temp_filename()
temp_file = os.fdopen(fd, 'w')
- print_new (new, 0, temp_file)
+ print_new (new, upload, indexed=0, file=temp_file)
temp_file.close()
# Spawn an editor on that file
editor = os.environ.get("EDITOR","vi")
if priority.endswith("[!]"):
priority = priority[:-3]
for f in new[pkg]["files"]:
- Upload.pkg.files[f]["section"] = section
- Upload.pkg.files[f]["priority"] = priority
+ upload.pkg.files[f]["section"] = section
+ upload.pkg.files[f]["priority"] = priority
new[pkg]["section"] = section
new[pkg]["priority"] = priority
################################################################################
-def edit_index (new, index):
+def edit_index (new, upload, index):
priority = new[index]["priority"]
section = new[index]["section"]
ftype = new[index]["type"]
readline.set_completer(None)
for f in new[index]["files"]:
- Upload.pkg.files[f]["section"] = section
- Upload.pkg.files[f]["priority"] = priority
+ upload.pkg.files[f]["section"] = section
+ upload.pkg.files[f]["priority"] = priority
new[index]["priority"] = priority
new[index]["section"] = section
return new
################################################################################
-def edit_overrides (new):
+def edit_overrides (new, upload, session):
print
done = 0
while not done:
- print_new (new, 1)
+ print_new (new, upload, indexed=1)
new_index = {}
index = 0
for i in new.keys():
got_answer = 1
if answer == 'E':
- edit_new(new)
+ edit_new(new, upload)
elif answer == 'D':
done = 1
else:
- edit_index (new, new_index[answer])
+ edit_index (new, upload, new_index[answer])
return new
################################################################################
-def edit_note(note):
+def edit_note(note, upload, session):
# Write the current data to a temporary file
(fd, temp_filename) = utils.temp_filename()
editor = os.environ.get("EDITOR","vi")
end()
sys.exit(0)
- database.add_new_comment(Upload.pkg.changes["source"], Upload.pkg.changes["version"], newnote, utils.whoami(), bool(Options["Trainee"]))
+ comment = NewComment()
+ comment.package = upload.pkg.changes["source"]
+ comment.version = upload.pkg.changes["version"]
+ comment.comment = newnote
+ comment.author = utils.whoami()
+ comment.trainee = bool(Options["Trainee"])
+ session.add(comment)
+ session.commit()
################################################################################
-def check_pkg ():
+def check_pkg (upload):
try:
less_fd = os.popen("less -R -", 'w', 0)
stdout_fd = sys.stdout
try:
sys.stdout = less_fd
- changes = utils.parse_changes (Upload.pkg.changes_file)
- examine_package.display_changes(changes['distribution'], Upload.pkg.changes_file)
- files = Upload.pkg.files
+ changes = utils.parse_changes (upload.pkg.changes_file)
+ examine_package.display_changes(changes['distribution'], upload.pkg.changes_file)
+ files = upload.pkg.files
for f in files.keys():
if files[f].has_key("new"):
ftype = files[f]["type"]
## FIXME: horribly Debian specific
-def do_bxa_notification():
- files = Upload.pkg.files
+def do_bxa_notification(upload):
+ files = upload.pkg.files
summary = ""
for f in files.keys():
if files[f]["type"] == "deb":
summary += "\n"
summary += "Package: %s\n" % (control.Find("Package"))
summary += "Description: %s\n" % (control.Find("Description"))
- Upload.Subst["__BINARY_DESCRIPTIONS__"] = summary
- bxa_mail = utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/process-new.bxa_notification")
+ upload.Subst["__BINARY_DESCRIPTIONS__"] = summary
+ bxa_mail = utils.TemplateSubst(upload.Subst,Config()["Dir::Templates"]+"/process-new.bxa_notification")
utils.send_mail(bxa_mail)
################################################################################
-def add_overrides (new):
- changes = Upload.pkg.changes
- files = Upload.pkg.files
+def add_overrides (new, upload, session):
+ changes = upload.pkg.changes
+ files = upload.pkg.files
srcpkg = changes.get("source")
- projectB.query("BEGIN WORK")
for suite in changes["suite"].keys():
- suite_id = database.get_suite_id(suite)
+ suite_id = get_suite(suite).suite_id
for pkg in new.keys():
- component_id = database.get_component_id(new[pkg]["component"])
- type_id = database.get_override_type_id(new[pkg]["type"])
+ component_id = get_component(new[pkg]["component"]).component_id
+ type_id = get_override_type(new[pkg]["type"]).overridetype_id
priority_id = new[pkg]["priority id"]
section_id = new[pkg]["section id"]
Logger.log(["%s overrides" % (srcpkg), suite, new[pkg]["component"], new[pkg]["type"], new[pkg]["priority"], new[pkg]["section"]])
- projectB.query("INSERT INTO override (suite, component, type, package, priority, section, maintainer) VALUES (%s, %s, %s, '%s', %s, %s, '')" % (suite_id, component_id, type_id, pkg, priority_id, section_id))
+ session.execute("INSERT INTO override (suite, component, type, package, priority, section, maintainer) VALUES (:sid, :cid, :tid, :pkg, :pid, :sectid, '')",
+ { 'sid': suite_id, 'cid': component_id, 'tid':type_id, 'pkg': pkg, 'pid': priority_id, 'sectid': section_id})
for f in new[pkg]["files"]:
if files[f].has_key("new"):
del files[f]["new"]
del new[pkg]
- projectB.query("COMMIT WORK")
+ session.commit()
- if Cnf.FindB("Dinstall::BXANotify"):
- do_bxa_notification()
+ if Config().FindB("Dinstall::BXANotify"):
+ do_bxa_notification(upload)
################################################################################
-def prod_maintainer (note):
+def prod_maintainer (note, upload):
+ cnf = Config()
# Here we prepare an editor and get them ready to prod...
(fd, temp_filename) = utils.temp_filename()
temp_file = os.fdopen(fd, 'w')
sys.exit(0)
# Otherwise, do the proding...
user_email_address = utils.whoami() + " <%s>" % (
- Cnf["Dinstall::MyAdminAddress"])
+ cnf["Dinstall::MyAdminAddress"])
- Subst = Upload.Subst
+ Subst = upload.Subst
Subst["__FROM_ADDRESS__"] = user_email_address
Subst["__PROD_MESSAGE__"] = prod_message
- Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
+ Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
prod_mail_message = utils.TemplateSubst(
- Subst,Cnf["Dir::Templates"]+"/process-new.prod")
+ Subst,cnf["Dir::Templates"]+"/process-new.prod")
# Send the prod mail if appropriate
- if not Cnf["Dinstall::Options::No-Mail"]:
+ if not cnf["Dinstall::Options::No-Mail"]:
utils.send_mail(prod_mail_message)
print "Sent proding message"
################################################################################
-def do_new():
+def do_new(upload, session):
print "NEW\n"
- files = Upload.pkg.files
- changes = Upload.pkg.changes
+ files = upload.pkg.files
+ changes = upload.pkg.changes
+ cnf = Config()
# Make a copy of distribution we can happily trample on
changes["suite"] = copy.copy(changes["distribution"])
# Fix up the list of target suites
for suite in changes["suite"].keys():
- override = Cnf.Find("Suite::%s::OverrideSuite" % (suite))
+ override = cnf.Find("Suite::%s::OverrideSuite" % (suite))
if override:
- (olderr, newerr) = (database.get_suite_id(suite) == -1,
- database.get_suite_id(override) == -1)
+ (olderr, newerr) = (get_suite(suite, session) == None,
+ get_suite(override, session) == None)
if olderr or newerr:
(oinv, newinv) = ("", "")
if olderr: oinv = "invalid "
changes["suite"][override] = 1
# Validate suites
for suite in changes["suite"].keys():
- suite_id = database.get_suite_id(suite)
- if suite_id == -1:
+ if get_suite(suite, session) is None:
utils.fubar("%s has invalid suite '%s' (possibly overriden). say wha?" % (changes, suite))
# The main NEW processing loop
done = 0
while not done:
# Find out what's new
- new = queue.determine_new(changes, files, projectB)
+ new = determine_new(changes, files)
if not new:
break
if Options["No-Action"] or Options["Automatic"]:
answer = 'S'
- (broken, note) = print_new(new, 0)
+ (broken, note) = print_new(new, upload, indexed=0)
prompt = ""
if not broken and not note:
answer = m.group(1)
answer = answer[:1].upper()
+ if answer in ( 'A', 'E', 'M', 'O', 'R' ) and Options["Trainee"]:
+ utils.warn("Trainees can't do that")
+ continue
+
if answer == 'A' and not Options["Trainee"]:
try:
check_daily_lock()
- done = add_overrides (new)
- Logger.log([utils.getusername(), "NEW ACCEPT: %s" % (Upload.pkg.changes_file)])
+ done = add_overrides (new, upload, session)
+ Logger.log(["NEW ACCEPT: %s" % (upload.pkg.changes_file)])
except CantGetLockError:
print "Hello? Operator! Give me the number for 911!"
print "Dinstall in the locked area, cant process packages, come back later"
elif answer == 'C':
- check_pkg()
+ check_pkg(upload)
elif answer == 'E' and not Options["Trainee"]:
- new = edit_overrides (new)
+ new = edit_overrides (new, upload, session)
elif answer == 'M' and not Options["Trainee"]:
- aborted = Upload.do_reject(manual=1,
+ aborted = upload.do_reject(manual=1,
reject_message=Options["Manual-Reject"],
- note=database.get_new_comments(changes.get("source", "")))
+ note=get_new_comments(changes.get("source", ""), session=session))
if not aborted:
- Logger.log([utils.getusername(), "NEW REJECT: %s" % (Upload.pkg.changes_file)])
- os.unlink(Upload.pkg.changes_file[:-8]+".dak")
+ Logger.log(["NEW REJECT: %s" % (upload.pkg.changes_file)])
+ os.unlink(upload.pkg.changes_file[:-8]+".dak")
done = 1
elif answer == 'N':
- edit_note(database.get_new_comments(changes.get("source", "")))
+ edit_note(get_new_comments(changes.get("source", ""), session=session),
+ upload, session)
elif answer == 'P' and not Options["Trainee"]:
- prod_maintainer(database.get_new_comments(changes.get("source", "")))
- Logger.log([utils.getusername(), "NEW PROD: %s" % (Upload.pkg.changes_file)])
+ prod_maintainer(get_new_comments(changes.get("source", ""), session=session),
+ upload)
+ Logger.log(["NEW PROD: %s" % (upload.pkg.changes_file)])
elif answer == 'R' and not Options["Trainee"]:
confirm = utils.our_raw_input("Really clear note (y/N)? ").lower()
if confirm == "y":
- database.delete_new_comments(changes.get("source"), changes.get("version"))
+ for c in get_new_comments(changes.get("source", ""), changes.get("version", ""), session=session):
+ session.delete(c)
+ session.commit()
elif answer == 'O' and not Options["Trainee"]:
confirm = utils.our_raw_input("Really clear all notes (y/N)? ").lower()
if confirm == "y":
- database.delete_all_new_comments(changes.get("source"))
+ for c in get_new_comments(changes.get("source", ""), session=session):
+ session.delete(c)
+ session.commit()
+
elif answer == 'S':
done = 1
elif answer == 'Q':
################################################################################
-def init():
- global Cnf, Options, Logger, Upload, projectB, Sections, Priorities
-
- Cnf = utils.get_conf()
-
- Arguments = [('a',"automatic","Process-New::Options::Automatic"),
- ('h',"help","Process-New::Options::Help"),
- ('C',"comments-dir","Process-New::Options::Comments-Dir", "HasArg"),
- ('m',"manual-reject","Process-New::Options::Manual-Reject", "HasArg"),
- ('t',"trainee","Process-New::Options::Trainee"),
- ('n',"no-action","Process-New::Options::No-Action")]
-
- for i in ["automatic", "help", "manual-reject", "no-action", "version", "comments-dir", "trainee"]:
- if not Cnf.has_key("Process-New::Options::%s" % (i)):
- Cnf["Process-New::Options::%s" % (i)] = ""
-
- changes_files = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
- if len(changes_files) == 0 and not Cnf.get("Process-New::Options::Comments-Dir",""):
- changes_files = utils.get_changes_files(Cnf["Dir::Queue::New"])
-
- Options = Cnf.SubTree("Process-New::Options")
-
- if Options["Help"]:
- usage()
-
- Upload = queue.Upload(Cnf)
-
- if not Options["No-Action"]:
- try:
- Logger = Upload.Logger = daklog.Logger(Cnf, "process-new")
- except CantOpenError, e:
- Options["Trainee"] = "True"
-
- projectB = Upload.projectB
-
- Sections = Section_Completer()
- Priorities = Priority_Completer()
- readline.parse_and_bind("tab: complete")
-
- return changes_files
-
-################################################################################
-
-def do_byhand():
+def do_byhand(upload, session):
done = 0
while not done:
- files = Upload.pkg.files
+ files = upload.pkg.files
will_install = 1
byhand = []
done = 1
for f in byhand:
del files[f]
- Logger.log([utils.getusername(), "BYHAND ACCEPT: %s" % (Upload.pkg.changes_file)])
+ Logger.log(["BYHAND ACCEPT: %s" % (upload.pkg.changes_file)])
except CantGetLockError:
print "Hello? Operator! Give me the number for 911!"
print "Dinstall in the locked area, cant process packages, come back later"
elif answer == 'M':
- Logger.log([utils.getusername(), "BYHAND REJECT: %s" % (Upload.pkg.changes_file)])
- Upload.do_reject(1, Options["Manual-Reject"])
- os.unlink(Upload.pkg.changes_file[:-8]+".dak")
+ Logger.log(["BYHAND REJECT: %s" % (upload.pkg.changes_file)])
+ upload.do_reject(manual=1, reject_message=Options["Manual-Reject"])
+ os.unlink(upload.pkg.changes_file[:-8]+".dak")
done = 1
elif answer == 'S':
done = 1
Raises CantGetLockError if the dinstall daily.lock exists.
"""
+ cnf = Config()
try:
- os.open(Cnf["Process-New::DinstallLockFile"], os.O_RDONLY | os.O_CREAT | os.O_EXCL)
+ os.open(cnf["Process-New::DinstallLockFile"],
+ os.O_RDONLY | os.O_CREAT | os.O_EXCL)
except OSError, e:
if e.errno == errno.EEXIST or e.errno == errno.EACCES:
raise CantGetLockError
- os.unlink(Cnf["Process-New::DinstallLockFile"])
+ os.unlink(cnf["Process-New::DinstallLockFile"])
@contextlib.contextmanager
@param package: source package name to lock
"""
- path = os.path.join(Cnf["Process-New::LockDir"], package)
+ path = os.path.join(Config()["Process-New::LockDir"], package)
try:
fd = os.open(path, os.O_CREAT | os.O_EXCL | os.O_RDONLY)
except OSError, e:
finally:
os.unlink(path)
-def move_to_dir (dest, perms=0660, changesperms=0664):
- utils.move (Upload.pkg.changes_file, dest, perms=changesperms)
- file_keys = Upload.pkg.files.keys()
- for f in file_keys:
- utils.move (f, dest, perms=perms)
-
-def is_source_in_queue_dir(qdir):
- entries = [ x for x in os.listdir(qdir) if x.startswith(Upload.pkg.changes["source"])
- and x.endswith(".changes") ]
- for entry in entries:
- # read the .dak
- u = queue.Upload(Cnf)
- u.pkg.changes_file = os.path.join(qdir, entry)
- u.update_vars()
- if not u.pkg.changes["architecture"].has_key("source"):
- # another binary upload, ignore
- continue
- if Upload.pkg.changes["version"] != u.pkg.changes["version"]:
- # another version, ignore
- continue
- # found it!
- return True
- return False
-
-def move_to_holding(suite, queue_dir):
- print "Moving to %s holding area." % (suite.upper(),)
- if Options["No-Action"]:
- return
- Logger.log(["Moving to %s" % (suite,), Upload.pkg.changes_file])
- Upload.dump_vars(queue_dir)
- move_to_dir(queue_dir, perms=0664)
- os.unlink(Upload.pkg.changes_file[:-8]+".dak")
-
-def _accept():
+# def move_to_dir (upload, dest, perms=0660, changesperms=0664):
+# utils.move (upload.pkg.changes_file, dest, perms=changesperms)
+# file_keys = upload.pkg.files.keys()
+# for f in file_keys:
+# utils.move (f, dest, perms=perms)
+
+# def is_source_in_queue_dir(qdir):
+# entries = [ x for x in os.listdir(qdir) if x.startswith(Upload.pkg.changes["source"])
+# and x.endswith(".changes") ]
+# for entry in entries:
+# # read the .dak
+# u = queue.Upload(Cnf)
+# u.pkg.changes_file = os.path.join(qdir, entry)
+# u.update_vars()
+# if not u.pkg.changes["architecture"].has_key("source"):
+# # another binary upload, ignore
+# continue
+# if Upload.pkg.changes["version"] != u.pkg.changes["version"]:
+# # another version, ignore
+# continue
+# # found it!
+# return True
+# return False
+
+# def move_to_holding(suite, queue_dir):
+# print "Moving to %s holding area." % (suite.upper(),)
+# if Options["No-Action"]:
+# return
+# Logger.log(["Moving to %s" % (suite,), Upload.pkg.changes_file])
+# Upload.dump_vars(queue_dir)
+# move_to_dir(queue_dir, perms=0664)
+# os.unlink(Upload.pkg.changes_file[:-8]+".dak")
+
+def _accept(upload):
if Options["No-Action"]:
return
- (summary, short_summary) = Upload.build_summaries()
- Upload.accept(summary, short_summary, targetdir=Cnf["Dir::Queue::Newstage"])
- os.unlink(Upload.pkg.changes_file[:-8]+".dak")
-
-def do_accept_stableupdate(suite, q):
- queue_dir = Cnf["Dir::Queue::%s" % (q,)]
- if not Upload.pkg.changes["architecture"].has_key("source"):
- # It is not a sourceful upload. So its source may be either in p-u
- # holding, in new, in accepted or already installed.
- if is_source_in_queue_dir(queue_dir):
- # It's in p-u holding, so move it there.
- print "Binary-only upload, source in %s." % (q,)
- move_to_holding(suite, queue_dir)
- elif Upload.source_exists(Upload.pkg.changes["source"],
- Upload.pkg.changes["version"]):
- # dak tells us that there is source available. At time of
- # writing this means that it is installed, so put it into
- # accepted.
- print "Binary-only upload, source installed."
- Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
- _accept()
- elif is_source_in_queue_dir(Cnf["Dir::Queue::Accepted"]):
- # The source is in accepted, the binary cleared NEW: accept it.
- print "Binary-only upload, source in accepted."
- Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
- _accept()
- elif is_source_in_queue_dir(Cnf["Dir::Queue::New"]):
- # It's in NEW. We expect the source to land in p-u holding
- # pretty soon.
- print "Binary-only upload, source in new."
- move_to_holding(suite, queue_dir)
- elif is_source_in_queue_dir(Cnf["Dir::Queue::Newstage"]):
- # It's in newstage. Accept into the holding area
- print "Binary-only upload, source in newstage."
- Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
- _accept()
- else:
- # No case applicable. Bail out. Return will cause the upload
- # to be skipped.
- print "ERROR"
- print "Stable update failed. Source not found."
- return
- else:
- # We are handling a sourceful upload. Move to accepted if currently
- # in p-u holding and to p-u holding otherwise.
- if is_source_in_queue_dir(queue_dir):
- print "Sourceful upload in %s, accepting." % (q,)
- _accept()
- else:
- move_to_holding(suite, queue_dir)
-
-def do_accept():
+ (summary, short_summary) = upload.build_summaries()
+ upload.accept(summary, short_summary, targetdir=Config()["Dir::Queue::Newstage"])
+ os.unlink(upload.pkg.changes_file[:-8]+".dak")
+
+# def do_accept_stableupdate(upload,suite, q):
+# cnf = Config()
+# queue_dir = cnf["Dir::Queue::%s" % (q,)]
+# if not upload.pkg.changes["architecture"].has_key("source"):
+# # It is not a sourceful upload. So its source may be either in p-u
+# # holding, in new, in accepted or already installed.
+# if is_source_in_queue_dir(queue_dir):
+# # It's in p-u holding, so move it there.
+# print "Binary-only upload, source in %s." % (q,)
+# move_to_holding(suite, queue_dir)
+# elif Upload.source_exists(Upload.pkg.changes["source"],
+# Upload.pkg.changes["version"]):
+# # dak tells us that there is source available. At time of
+# # writing this means that it is installed, so put it into
+# # accepted.
+# print "Binary-only upload, source installed."
+# Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
+# _accept()
+# elif is_source_in_queue_dir(Cnf["Dir::Queue::Accepted"]):
+# # The source is in accepted, the binary cleared NEW: accept it.
+# print "Binary-only upload, source in accepted."
+# Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
+# _accept()
+# elif is_source_in_queue_dir(Cnf["Dir::Queue::New"]):
+# # It's in NEW. We expect the source to land in p-u holding
+# # pretty soon.
+# print "Binary-only upload, source in new."
+# move_to_holding(suite, queue_dir)
+# elif is_source_in_queue_dir(Cnf["Dir::Queue::Newstage"]):
+# # It's in newstage. Accept into the holding area
+# print "Binary-only upload, source in newstage."
+# Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
+# _accept()
+# else:
+# # No case applicable. Bail out. Return will cause the upload
+# # to be skipped.
+# print "ERROR"
+# print "Stable update failed. Source not found."
+# return
+# else:
+# # We are handling a sourceful upload. Move to accepted if currently
+# # in p-u holding and to p-u holding otherwise.
+# if is_source_in_queue_dir(queue_dir):
+# print "Sourceful upload in %s, accepting." % (q,)
+# _accept()
+# else:
+# move_to_holding(suite, queue_dir)
+
+def do_accept(upload):
print "ACCEPT"
+ cnf = Config()
if not Options["No-Action"]:
- (summary, short_summary) = Upload.build_summaries()
- if Cnf.FindB("Dinstall::SecurityQueueHandling"):
- Upload.dump_vars(Cnf["Dir::Queue::Embargoed"])
- move_to_dir(Cnf["Dir::Queue::Embargoed"])
- Upload.queue_build("embargoed", Cnf["Dir::Queue::Embargoed"])
- # Check for override disparities
- Upload.Subst["__SUMMARY__"] = summary
- else:
+ (summary, short_summary) = upload.build_summaries()
+# if cnf.FindB("Dinstall::SecurityQueueHandling"):
+# upload.dump_vars(cnf["Dir::Queue::Embargoed"])
+# move_to_dir(cnf["Dir::Queue::Embargoed"])
+# upload.queue_build("embargoed", cnf["Dir::Queue::Embargoed"])
+# # Check for override disparities
+# upload.Subst["__SUMMARY__"] = summary
+# else:
# Stable updates need to be copied to proposed-updates holding
# area instead of accepted. Sourceful uploads need to go
# to it directly, binaries only if the source has not yet been
# accepted into p-u.
for suite, q in [("proposed-updates", "ProposedUpdates"),
("oldstable-proposed-updates", "OldProposedUpdates")]:
- if not Upload.pkg.changes["distribution"].has_key(suite):
+ if not upload.pkg.changes["distribution"].has_key(suite):
continue
- return do_accept_stableupdate(suite, q)
+ utils.fubar("stable accept not supported yet")
+# return do_accept_stableupdate(suite, q)
# Just a normal upload, accept it...
- _accept()
+ _accept(upload)
def check_status(files):
new = byhand = 0
new = 1
return (new, byhand)
-def do_pkg(changes_file):
- Upload.pkg.changes_file = changes_file
- Upload.init_vars()
- Upload.update_vars()
- Upload.update_subst()
- files = Upload.pkg.files
+def do_pkg(changes_file, session):
+ u = Upload()
+ u.pkg.load_dot_dak(changes_file)
+ u.update_subst()
+
+ cnf = Config()
+ bcc = "X-DAK: dak process-new\nX-Katie: lisa $Revision: 1.31 $"
+ if cnf.has_key("Dinstall::Bcc"):
+ u.Subst["__BCC__"] = bcc + "\nBcc: %s" % (cnf["Dinstall::Bcc"])
+ else:
+ u.Subst["__BCC__"] = bcc
+
+ files = u.pkg.files
try:
- with lock_package(Upload.pkg.changes["source"]):
- if not recheck():
+ with lock_package(u.pkg.changes["source"]):
+ if not recheck(u, session):
return
(new, byhand) = check_status(files)
if new or byhand:
if new:
- do_new()
+ do_new(u, session)
if byhand:
- do_byhand()
+ do_byhand(u, session)
(new, byhand) = check_status(files)
if not new and not byhand:
try:
check_daily_lock()
- do_accept()
+ do_accept(u)
except CantGetLockError:
print "Hello? Operator! Give me the number for 911!"
print "Dinstall in the locked area, cant process packages, come back later"
if accept_count > 1:
sets = "sets"
sys.stderr.write("Accepted %d package %s, %s.\n" % (accept_count, sets, utils.size_type(int(accept_bytes))))
- Logger.log([utils.getusername(), "total",accept_count,accept_bytes])
+ Logger.log(["total",accept_count,accept_bytes])
if not Options["No-Action"] and not Options["Trainee"]:
Logger.close()
################################################################################
-def do_comments(dir, opref, npref, line, fn):
- for comm in [ x for x in os.listdir(dir) if x.startswith(opref) ]:
- lines = open("%s/%s" % (dir, comm)).readlines()
- if len(lines) == 0 or lines[0] != line + "\n": continue
- changes_files = [ x for x in os.listdir(".") if x.startswith(comm[7:]+"_")
- and x.endswith(".changes") ]
- changes_files = sort_changes(changes_files)
- for f in changes_files:
- f = utils.validate_changes_file_arg(f, 0)
- if not f: continue
- print "\n" + f
- fn(f, "".join(lines[1:]))
-
- if opref != npref and not Options["No-Action"]:
- newcomm = npref + comm[len(opref):]
- os.rename("%s/%s" % (dir, comm), "%s/%s" % (dir, newcomm))
+# def do_comments(dir, opref, npref, line, fn):
+# for comm in [ x for x in os.listdir(dir) if x.startswith(opref) ]:
+# lines = open("%s/%s" % (dir, comm)).readlines()
+# if len(lines) == 0 or lines[0] != line + "\n": continue
+# changes_files = [ x for x in os.listdir(".") if x.startswith(comm[7:]+"_")
+# and x.endswith(".changes") ]
+# changes_files = sort_changes(changes_files)
+# for f in changes_files:
+# f = utils.validate_changes_file_arg(f, 0)
+# if not f: continue
+# print "\n" + f
+# fn(f, "".join(lines[1:]))
+
+# if opref != npref and not Options["No-Action"]:
+# newcomm = npref + comm[len(opref):]
+# os.rename("%s/%s" % (dir, comm), "%s/%s" % (dir, newcomm))
+
+# ################################################################################
+
+# def comment_accept(changes_file, comments):
+# Upload.pkg.changes_file = changes_file
+# Upload.init_vars()
+# Upload.update_vars()
+# Upload.update_subst()
+# files = Upload.pkg.files
+
+# if not recheck():
+# return # dak wants to REJECT, crap
+
+# (new, byhand) = check_status(files)
+# if not new and not byhand:
+# do_accept()
+
+# ################################################################################
+
+# def comment_reject(changes_file, comments):
+# Upload.pkg.changes_file = changes_file
+# Upload.init_vars()
+# Upload.update_vars()
+# Upload.update_subst()
+
+# if not recheck():
+# pass # dak has its own reasons to reject as well, which is fine
+
+# reject(comments)
+# print "REJECT\n" + reject_message,
+# if not Options["No-Action"]:
+# Upload.do_reject(0, reject_message)
+# os.unlink(Upload.pkg.changes_file[:-8]+".dak")
################################################################################
-def comment_accept(changes_file, comments):
- Upload.pkg.changes_file = changes_file
- Upload.init_vars()
- Upload.update_vars()
- Upload.update_subst()
- files = Upload.pkg.files
+def main():
+ global Options, Logger, Sections, Priorities
- if not recheck():
- return # dak wants to REJECT, crap
+ cnf = Config()
+ session = DBConn().session()
- (new, byhand) = check_status(files)
- if not new and not byhand:
- do_accept()
+ Arguments = [('a',"automatic","Process-New::Options::Automatic"),
+ ('h',"help","Process-New::Options::Help"),
+ ('C',"comments-dir","Process-New::Options::Comments-Dir", "HasArg"),
+ ('m',"manual-reject","Process-New::Options::Manual-Reject", "HasArg"),
+ ('t',"trainee","Process-New::Options::Trainee"),
+ ('n',"no-action","Process-New::Options::No-Action")]
-################################################################################
+ for i in ["automatic", "help", "manual-reject", "no-action", "version", "comments-dir", "trainee"]:
+ if not cnf.has_key("Process-New::Options::%s" % (i)):
+ cnf["Process-New::Options::%s" % (i)] = ""
-def comment_reject(changes_file, comments):
- Upload.pkg.changes_file = changes_file
- Upload.init_vars()
- Upload.update_vars()
- Upload.update_subst()
+ changes_files = apt_pkg.ParseCommandLine(cnf.Cnf,Arguments,sys.argv)
+ if len(changes_files) == 0 and not cnf.get("Process-New::Options::Comments-Dir",""):
+ changes_files = utils.get_changes_files(cnf["Dir::Queue::New"])
- if not recheck():
- pass # dak has its own reasons to reject as well, which is fine
+ Options = cnf.SubTree("Process-New::Options")
+
+ if Options["Help"]:
+ usage()
- reject(comments)
- print "REJECT\n" + reject_message,
if not Options["No-Action"]:
- Upload.do_reject(0, reject_message)
- os.unlink(Upload.pkg.changes_file[:-8]+".dak")
+ try:
+ Logger = daklog.Logger(cnf, "process-new")
+ except CantOpenError, e:
+ Options["Trainee"] = "True"
-################################################################################
+ Sections = Section_Completer(session)
+ Priorities = Priority_Completer(session)
+ readline.parse_and_bind("tab: complete")
-def main():
- changes_files = init()
- if len(changes_files) > 50:
+ if len(changes_files) > 1:
sys.stderr.write("Sorting changes...\n")
- changes_files = sort_changes(changes_files)
+ changes_files = sort_changes(changes_files, session)
# Kill me now? **FIXME**
- Cnf["Dinstall::Options::No-Mail"] = ""
- bcc = "X-DAK: dak process-new\nX-Katie: lisa $Revision: 1.31 $"
- if Cnf.has_key("Dinstall::Bcc"):
- Upload.Subst["__BCC__"] = bcc + "\nBcc: %s" % (Cnf["Dinstall::Bcc"])
- else:
- Upload.Subst["__BCC__"] = bcc
-
- commentsdir = Cnf.get("Process-New::Options::Comments-Dir","")
- if commentsdir:
- if changes_files != []:
- sys.stderr.write("Can't specify any changes files if working with comments-dir")
- sys.exit(1)
- do_comments(commentsdir, "ACCEPT.", "ACCEPTED.", "OK", comment_accept)
- do_comments(commentsdir, "REJECT.", "REJECTED.", "NOTOK", comment_reject)
- else:
+ cnf["Dinstall::Options::No-Mail"] = ""
+
+# commentsdir = cnf.get("Process-New::Options::Comments-Dir","")
+# if commentsdir:
+# if changes_files != []:
+# sys.stderr.write("Can't specify any changes files if working with comments-dir")
+# sys.exit(1)
+# do_comments(commentsdir, "ACCEPT.", "ACCEPTED.", "OK", comment_accept)
+# do_comments(commentsdir, "REJECT.", "REJECTED.", "NOTOK", comment_reject)
+# else:
+ if True:
for changes_file in changes_files:
changes_file = utils.validate_changes_file_arg(changes_file, 0)
if not changes_file:
continue
print "\n" + changes_file
- do_pkg (changes_file)
+ do_pkg (changes_file, session)
end()
################################################################################
-import commands
import errno
import fcntl
import os
-import re
-import shutil
-import stat
import sys
-import time
import traceback
-import tarfile
-import apt_inst
import apt_pkg
-from debian_bundle import deb822
from daklib.dbconn import *
-from daklib.binary import Binary
from daklib import daklog
from daklib.queue import *
from daklib import utils
from copy import copy
import glob, os, stat, sys, time
import apt_pkg
-import cgi
from daklib import utils
from daklib.changes import Changes
import commands
import os
-import re
import sys
import apt_pkg
import apt_inst
import sys, os, re, time
import apt_pkg
-import tempfile
from debian_bundle import deb822
from daklib.dbconn import *
from daklib import utils
from daklib.queue import determine_new, check_valid
from daklib import utils
+from daklib.regexes import re_source_ext
# Globals
Cnf = None
filestoexamine = []
for pkg in new.keys():
for fn in new[pkg]["files"]:
- if ( c.files[fn].has_key("new") and not
- c.files[fn]["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2"] ):
+ if (c.files[fn].has_key("new") and
+ (c.files[fn]["type"] == "dsc" or
+ not re_source_ext.match(c.files[fn]["type"]))):
filestoexamine.append(fn)
html_header(c.changes["source"], filestoexamine)
import apt_pkg
from daklib import utils
-from daklib.dbconn import DBConn, get_suite_architectures, Suite, Architecture, \
- BinAssociation
+from daklib.dbconn import DBConn, get_suite_architectures, Suite, Architecture
################################################################################
import errno
import fcntl
import tempfile
-import pwd
import apt_pkg
from daklib.dbconn import *
################################################################################
Cnf = None
-required_database_schema = 14
+required_database_schema = 15
################################################################################
__all__.append('CHANGESFIELDS_DSCFILES_OPTIONAL')
+CHANGESFIELDS_ORIGFILES = [ "id", "location" ]
+
+__all__.append('CHANGESFIELDS_ORIGFILES')
+
###############################################################################
class Changes(object):
self.dsc = {}
self.files = {}
self.dsc_files = {}
-
- self.orig_tar_id = None
- self.orig_tar_location = ""
- self.orig_tar_gz = None
+ self.orig_files = {}
def file_summary(self):
# changes["distribution"] may not exist in corner cases
self.files.update(p.load())
self.dsc_files.update(p.load())
- self.orig_tar_id = p.load()
- self.orig_tar_location = p.load()
+ next_obj = p.load()
+ if type(next_obj) is DictType:
+ self.pkg.orig_files.update(next_obj)
+ else:
+ # Auto-convert old dak files to new format supporting
+ # multiple tarballs
+ orig_tar_gz = None
+ for dsc_file in self.dsc_files.keys():
+ if dsc_file.endswith(".orig.tar.gz"):
+ orig_tar_gz = dsc_file
+ self.orig_files[orig_tar_gz] = {}
+ if next_obj != None:
+ self.orig_files[orig_tar_gz]["id"] = next_obj
+ next_obj = p.load()
+ if next_obj != None and next_obj != "":
+ self.orig_files[orig_tar_gz]["location"] = next_obj
+ if len(self.orig_files[orig_tar_gz]) == 0:
+ del self.orig_files[orig_tar_gz]
dump_file.close()
return ret
+ def sanitised_orig_files(self):
+ ret = {}
+ for name, entry in self.orig_files.items():
+ ret[name] = {}
+ # Optional orig_files fields
+ for i in CHANGESFIELDS_ORIGFILES:
+ if entry.has_key(i):
+ ret[name][i] = entry[i]
+
+ return ret
+
def write_dot_dak(self, dest_dir):
"""
Dump ourself into a cPickle file.
p.dump(self.sanitised_dsc())
p.dump(self.sanitised_files())
p.dump(self.sanitised_dsc_files())
- p.dump(self.orig_tar_id)
- p.dump(self.orig_tar_location)
+ p.dump(self.sanitised_orig_files())
dump_file.close()
logfile = utils.open_file(logfilename, 'a')
os.umask(umask)
self.logfile = logfile
- # Log the start of the program
- user = pwd.getpwuid(os.getuid())[0]
- self.log(["program start", user])
+ self.log(["program start"])
def log (self, details):
"Log an event"
- # Prepend the timestamp and program name
+ # Prepend timestamp, program name, and user name
+ details.insert(0, utils.getusername())
details.insert(0, self.program)
timestamp = time.strftime("%Y%m%d%H%M%S")
details.insert(0, timestamp)
################################################################################
def session_wrapper(fn):
+ """
+ Wrapper around common ".., session=None):" handling. If the wrapped
+ function is called without passing 'session', we create a local one
+ and destroy it when the function ends.
+
+ Also attaches a commit_or_flush method to the session; if we created a
+ local session, this is a synonym for session.commit(), otherwise it is a
+ synonym for session.flush().
+ """
+
def wrapped(*args, **kwargs):
private_transaction = False
+
+ # Find the session object
session = kwargs.get('session')
- # No session specified as last argument or in kwargs, create one.
- if session is None and len(args) <= len(getargspec(fn)[0]) - 1:
- private_transaction = True
- kwargs['session'] = DBConn().session()
+ if session is None:
+ if len(args) <= len(getargspec(fn)[0]) - 1:
+ # No session specified as last argument or in kwargs
+ private_transaction = True
+ session = kwargs['session'] = DBConn().session()
+ else:
+ # Session is last argument in args
+ session = args[-1]
+ if session is None:
+ session = args[-1] = DBConn().session()
+ private_transaction = True
+
+ if private_transaction:
+ session.commit_or_flush = session.commit
+ else:
+ session.commit_or_flush = session.flush
try:
return fn(*args, **kwargs)
finally:
if private_transaction:
# We created a session; close it.
- kwargs['session'].close()
+ session.close()
wrapped.__doc__ = fn.__doc__
wrapped.func_name = fn.func_name
@session_wrapper
def get_archive(archive, session=None):
"""
- returns database id for given c{archive}.
+ returns database id for given C{archive}.
@type archive: string
@param archive: the name of the arhive
__all__.append('ContentFilename')
+@session_wrapper
def get_or_set_contents_file_id(filename, session=None):
"""
Returns database id for given filename.
@rtype: int
@return: the database id for the given component
"""
- privatetrans = False
- if session is None:
- session = DBConn().session()
- privatetrans = True
q = session.query(ContentFilename).filter_by(filename=filename)
cf = ContentFilename()
cf.filename = filename
session.add(cf)
- if privatetrans:
- session.commit()
- else:
- session.flush()
+ session.commit_or_flush()
ret = cf.cafilename_id
- if privatetrans:
- session.close()
-
return ret
__all__.append('get_or_set_contents_file_id')
__all__.append('ContentFilepath')
+@session_wrapper
def get_or_set_contents_path_id(filepath, session=None):
"""
Returns database id for given path.
@rtype: int
@return: the database id for the given path
"""
- privatetrans = False
- if session is None:
- session = DBConn().session()
- privatetrans = True
q = session.query(ContentFilepath).filter_by(filepath=filepath)
cf = ContentFilepath()
cf.filepath = filepath
session.add(cf)
- if privatetrans:
- session.commit()
- else:
- session.flush()
+ session.commit_or_flush()
ret = cf.cafilepath_id
- if privatetrans:
- session.close()
-
return ret
__all__.append('get_or_set_contents_path_id')
__all__.append('Fingerprint')
+@session_wrapper
def get_or_set_fingerprint(fpr, session=None):
"""
Returns Fingerprint object for given fpr.
@rtype: Fingerprint
@return: the Fingerprint object for the given fpr
"""
- privatetrans = False
- if session is None:
- session = DBConn().session()
- privatetrans = True
q = session.query(Fingerprint).filter_by(fingerprint=fpr)
fingerprint = Fingerprint()
fingerprint.fingerprint = fpr
session.add(fingerprint)
- if privatetrans:
- session.commit()
- else:
- session.flush()
+ session.commit_or_flush()
ret = fingerprint
- if privatetrans:
- session.close()
-
return ret
__all__.append('get_or_set_fingerprint')
__all__.append('Keyring')
+@session_wrapper
def get_or_set_keyring(keyring, session=None):
"""
If C{keyring} does not have an entry in the C{keyrings} table yet, create one
@rtype: Keyring
@return: the Keyring object for this keyring
-
"""
- privatetrans = False
- if session is None:
- session = DBConn().session()
- privatetrans = True
- try:
- obj = session.query(Keyring).filter_by(keyring_name=keyring).first()
-
- if obj is None:
- obj = Keyring(keyring_name=keyring)
- session.add(obj)
- if privatetrans:
- session.commit()
- else:
- session.flush()
+ q = session.query(Keyring).filter_by(keyring_name=keyring)
+ try:
+ return q.one()
+ except NoResultFound:
+ obj = Keyring(keyring_name=keyring)
+ session.add(obj)
+ session.commit_or_flush()
return obj
- finally:
- if privatetrans:
- session.close()
__all__.append('get_or_set_keyring')
__all__.append('Maintainer')
+@session_wrapper
def get_or_set_maintainer(name, session=None):
"""
Returns Maintainer object for given maintainer name.
@rtype: Maintainer
@return: the Maintainer object for the given maintainer
"""
- privatetrans = False
- if session is None:
- session = DBConn().session()
- privatetrans = True
q = session.query(Maintainer).filter_by(name=name)
try:
maintainer = Maintainer()
maintainer.name = name
session.add(maintainer)
- if privatetrans:
- session.commit()
- else:
- session.flush()
+ session.commit_or_flush()
ret = maintainer
- if privatetrans:
- session.close()
-
return ret
__all__.append('get_or_set_maintainer')
+@session_wrapper
def get_maintainer(maintainer_id, session=None):
"""
Return the name of the maintainer behind C{maintainer_id} or None if that
@return: the Maintainer with this C{maintainer_id}
"""
- privatetrans = False
- if session is None:
- session = DBConn().session()
- privatetrans = True
-
- try:
- return session.query(Maintainer).get(maintainer_id)
- finally:
- if privatetrans:
- session.close()
+ return session.query(Maintainer).get(maintainer_id)
__all__.append('get_maintainer')
session.add(qb)
- # If the .orig.tar.gz is in the pool, create a symlink to
- # it (if one doesn't already exist)
- if changes.orig_tar_id:
- # Determine the .orig.tar.gz file name
- for dsc_file in changes.dsc_files.keys():
- if dsc_file.endswith(".orig.tar.gz"):
- filename = dsc_file
-
- dest = os.path.join(dest_dir, filename)
+ # If the .orig tarballs are in the pool, create a symlink to
+ # them (if one doesn't already exist)
+ for dsc_file in changes.dsc_files.keys():
+ # Skip all files except orig tarballs
+ if not re_is_orig_source.match(dsc_file):
+ continue
+ # Skip orig files not identified in the pool
+ if not (changes.orig_files.has_key(dsc_file) and
+ changes.orig_files[dsc_file].has_key("id")):
+ continue
+ orig_file_id = changes.orig_files[dsc_file]["id"]
+ dest = os.path.join(dest_dir, dsc_file)
# If it doesn't exist, create a symlink
if not os.path.exists(dest):
q = session.execute("SELECT l.path, f.filename FROM location l, files f WHERE f.id = :id and f.location = l.id",
- {'id': changes.orig_tar_id})
+ {'id': orig_file_id})
res = q.fetchone()
if not res:
- return "[INTERNAL ERROR] Couldn't find id %s in files table." % (changes.orig_tar_id)
+ return "[INTERNAL ERROR] Couldn't find id %s in files table." % (orig_file_id)
src = os.path.join(res[0], res[1])
os.symlink(src, dest)
################################################################################
+class SrcFormat(object):
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def __repr__(self):
+ return '<SrcFormat %s>' % (self.format_name)
+
+__all__.append('SrcFormat')
+
+################################################################################
+
class SrcUploader(object):
def __init__(self, *args, **kwargs):
pass
generated if not supplied)
@rtype: Suite
- @return: Suite object for the requested suite name (None if not presenT)
+ @return: Suite object for the requested suite name (None if not present)
"""
q = session.query(Suite).filter_by(suite_name=suite)
################################################################################
+class SuiteSrcFormat(object):
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def __repr__(self):
+ return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
+
+__all__.append('SuiteSrcFormat')
+
+@session_wrapper
+def get_suite_src_formats(suite, session=None):
+ """
+ Returns list of allowed SrcFormat for C{suite}.
+
+ @type suite: str
+ @param suite: Suite name to search for
+
+ @type session: Session
+ @param session: Optional SQL session object (a temporary one will be
+ generated if not supplied)
+
+ @rtype: list
+ @return: the list of allowed source formats for I{suite}
+ """
+
+ q = session.query(SrcFormat)
+ q = q.join(SuiteSrcFormat)
+ q = q.join(Suite).filter_by(suite_name=suite)
+ q = q.order_by('format_name')
+
+ return q.all()
+
+__all__.append('get_suite_src_formats')
+
+################################################################################
+
class Uid(object):
def __init__(self, *args, **kwargs):
pass
__all__.append('Uid')
+@session_wrapper
def add_database_user(uidname, session=None):
"""
Adds a database user
@return: the uid object for the given uidname
"""
- privatetrans = False
- if session is None:
- session = DBConn().session()
- privatetrans = True
-
session.execute("CREATE USER :uid", {'uid': uidname})
-
- if privatetrans:
- session.commit()
- session.close()
+ session.commit_or_flush()
__all__.append('add_database_user')
+@session_wrapper
def get_or_set_uid(uidname, session=None):
"""
Returns uid object for given uidname.
@return: the uid object for the given uidname
"""
- privatetrans = False
- if session is None:
- session = DBConn().session()
- privatetrans = True
-
q = session.query(Uid).filter_by(uid=uidname)
try:
uid = Uid()
uid.uid = uidname
session.add(uid)
- if privatetrans:
- session.commit()
- else:
- session.flush()
+ session.commit_or_flush()
ret = uid
- if privatetrans:
- session.close()
-
return ret
__all__.append('get_or_set_uid')
self.tbl_section = Table('section', self.db_meta, autoload=True)
self.tbl_source = Table('source', self.db_meta, autoload=True)
self.tbl_src_associations = Table('src_associations', self.db_meta, autoload=True)
+ self.tbl_src_format = Table('src_format', self.db_meta, autoload=True)
self.tbl_src_uploaders = Table('src_uploaders', self.db_meta, autoload=True)
self.tbl_suite = Table('suite', self.db_meta, autoload=True)
self.tbl_suite_architectures = Table('suite_architectures', self.db_meta, autoload=True)
+ self.tbl_suite_src_formats = Table('suite_src_formats', self.db_meta, autoload=True)
self.tbl_uid = Table('uid', self.db_meta, autoload=True)
def __setupmappers(self):
source_id = self.tbl_src_associations.c.source,
source = relation(DBSource)))
+ mapper(SrcFormat, self.tbl_src_format,
+ properties = dict(src_format_id = self.tbl_src_format.c.id,
+ format_name = self.tbl_src_format.c.format_name))
+
mapper(SrcUploader, self.tbl_src_uploaders,
properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
source_id = self.tbl_src_uploaders.c.source,
arch_id = self.tbl_suite_architectures.c.architecture,
architecture = relation(Architecture)))
+ mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
+ properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
+ suite = relation(Suite, backref='suitesrcformats'),
+ src_format_id = self.tbl_suite_src_formats.c.src_format,
+ src_format = relation(SrcFormat)))
+
mapper(Uid, self.tbl_uid,
properties = dict(uid_id = self.tbl_uid.c.id,
fingerprint = relation(Fingerprint)))
from holding import Holding
from dbconn import *
from summarystats import SummaryStats
-from utils import parse_changes
+from utils import parse_changes, check_dsc_files
from textutils import fix_maintainer
from binary import Binary
# Determine the type
if f.has_key("dbtype"):
file_type = file["dbtype"]
- elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
+ elif re_source_ext.match(f["type"]):
file_type = "dsc"
else:
utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
###########################################################################
def load_changes(self, filename):
"""
- @rtype boolean
+ @rtype: boolean
@rvalue: whether the changes file was valid or not. We may want to
reject even if this is True (see what gets put in self.rejects).
This is simply to prevent us even trying things later which will
self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
# Ensure the source version matches the version in the .changes file
- if entry["type"] == "orig.tar.gz":
+ if re_is_orig_source.match(f):
changes_version = self.pkg.changes["chopversion2"]
else:
changes_version = self.pkg.changes["chopversion"]
self.rejects.append("source only uploads are not supported.")
###########################################################################
- def check_dsc(self, action=True):
+ def check_dsc(self, action=True, session=None):
"""Returns bool indicating whether or not the source changes are valid"""
# Ensure there is source to check
if not self.pkg.changes["architecture"].has_key("source"):
if not re_valid_version.match(self.pkg.dsc["version"]):
self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
- # Bumping the version number of the .dsc breaks extraction by stable's
- # dpkg-source. So let's not do that...
- if self.pkg.dsc["format"] != "1.0":
- self.rejects.append("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename))
+ # Only a limited list of source formats are allowed in each suite
+ for dist in self.pkg.changes["distribution"].keys():
+ allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
+ if self.pkg.dsc["format"] not in allowed:
+ self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
# Validate the Maintainer field
try:
if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
- # Ensure there is a .tar.gz in the .dsc file
- has_tar = False
- for f in self.pkg.dsc_files.keys():
- m = re_issource.match(f)
- if not m:
- self.rejects.append("%s: %s in Files field not recognised as source." % (dsc_filename, f))
- continue
- ftype = m.group(3)
- if ftype == "orig.tar.gz" or ftype == "tar.gz":
- has_tar = True
-
- if not has_tar:
- self.rejects.append("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
+ # Ensure the Files field contain only what's expected
+ self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
# Ensure source is newer than existing source in target suites
session = DBConn().session()
if not os.path.exists(src):
return
ftype = m.group(3)
- if ftype == "orig.tar.gz" and self.pkg.orig_tar_gz:
+ if re_is_orig_source.match(f) and pkg.orig_files.has_key(f) and \
+ pkg.orig_files[f].has_key("path"):
continue
dest = os.path.join(os.getcwd(), f)
os.symlink(src, dest)
- # If the orig.tar.gz is not a part of the upload, create a symlink to the
- # existing copy.
- if self.pkg.orig_tar_gz:
- dest = os.path.join(os.getcwd(), os.path.basename(self.pkg.orig_tar_gz))
- os.symlink(self.pkg.orig_tar_gz, dest)
+ # If the orig files are not a part of the upload, create symlinks to the
+ # existing copies.
+ for orig_file in self.pkg.orig_files.keys():
+ if not self.pkg.orig_files[orig_file].has_key("path"):
+ continue
+ dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
+ os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
# Extract the source
cmd = "dpkg-source -sn -x %s" % (dsc_filename)
# We should probably scrap or rethink the whole reprocess thing
# Bail out if:
# a) there's no source
- # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
- # or c) the orig.tar.gz is MIA
+ # or b) reprocess is 2 - we will do this check next time when orig
+ # tarball is in 'files'
+ # or c) the orig files are MIA
if not self.pkg.changes["architecture"].has_key("source") or self.reprocess == 2 \
- or self.pkg.orig_tar_gz == -1:
+ or len(self.pkg.orig_files) == 0:
return
tmpdir = utils.temp_dirname()
user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
- self.Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
+ self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
# Write the rejection email out as the <foo>.reason file
os.write(reason_fd, reject_mail_message)
"""
@warning: NB: this function can remove entries from the 'files' index [if
- the .orig.tar.gz is a duplicate of the one in the archive]; if
+ the orig tarball is a duplicate of the one in the archive]; if
you're iterating over 'files' and call this function as part of
the loop, be sure to add a check to the top of the loop to
ensure you haven't just tried to dereference the deleted entry.
"""
Cnf = Config()
- self.pkg.orig_tar_gz = None
+ self.pkg.orig_files = {} # XXX: do we need to clear it?
+ orig_files = self.pkg.orig_files
# Try and find all files mentioned in the .dsc. This has
# to work harder to cope with the multiple possible
if len(ql) > 0:
# Ignore exact matches for .orig.tar.gz
match = 0
- if dsc_name.endswith(".orig.tar.gz"):
+ if re_is_orig_source.match(dsc_name):
for i in ql:
if self.pkg.files.has_key(dsc_name) and \
int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
# This would fix the stupidity of changing something we often iterate over
# whilst we're doing it
del self.pkg.files[dsc_name]
- self.pkg.orig_tar_gz = os.path.join(i.location.path, i.filename)
+ if not orig_files.has_key(dsc_name):
+ orig_files[dsc_name] = {}
+ orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
match = 1
if not match:
self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
- elif dsc_name.endswith(".orig.tar.gz"):
+ elif re_is_orig_source.match(dsc_name):
# Check in the pool
ql = get_poolfile_like_name(dsc_name, session)
# need this for updating dsc_files in install()
dsc_entry["files id"] = x.file_id
# See install() in process-accepted...
- self.pkg.orig_tar_id = x.file_id
- self.pkg.orig_tar_gz = old_file
- self.pkg.orig_tar_location = x.location.location_id
+ if not orig_files.has_key(dsc_name):
+ orig_files[dsc_name] = {}
+ orig_files[dsc_name]["id"] = x.file_id
+ orig_files[dsc_name]["path"] = old_file
+ orig_files[dsc_name]["location"] = x.location.location_id
else:
# TODO: Record the queues and info in the DB so we don't hardcode all this crap
# Not there? Check the queue directories...
in_otherdir_fh.close()
actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
found = in_otherdir
- self.pkg.orig_tar_gz = in_otherdir
+ if not orig_files.has_key(dsc_name):
+ orig_files[dsc_name] = {}
+ orig_files[dsc_name]["path"] = in_otherdir
if not found:
self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
- self.pkg.orig_tar_gz = -1
continue
else:
self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
re_extract_src_version = re.compile (r"(\S+)\s*\((.*)\)")
re_isadeb = re.compile (r"(.+?)_(.+?)_(.+)\.u?deb$")
-re_issource = re.compile (r"(.+)_(.+?)\.(orig\.tar\.gz|diff\.gz|tar\.gz|dsc)$")
+orig_source_ext_re = r"orig(?:-.+)?\.tar\.(?:gz|bz2)"
+re_orig_source_ext = re.compile(orig_source_ext_re + "$")
+re_source_ext = re.compile("(" + orig_source_ext_re + r"|debian\.tar\.(?:gz|bz2)|diff\.gz|tar\.(?:gz|bz2)|dsc)$")
+re_issource = re.compile(r"(.+)_(.+?)\." + re_source_ext.pattern)
+re_is_orig_source = re.compile (r"(.+)_(.+?)\.orig(?:-.+)?\.tar\.(?:gz|bz2)$")
re_single_line_field = re.compile(r"^(\S*?)\s*:\s*(.*)")
re_multi_line_field = re.compile(r"^\s(.*)")
--- /dev/null
+import re
+
+srcformats = []
+
+class SourceFormat(type):
+ def __new__(cls, name, bases, attrs):
+ klass = super(SourceFormat, cls).__new__(cls, name, bases, attrs)
+ srcformats.append(klass)
+
+ assert str(klass.name)
+ assert iter(klass.requires)
+ assert iter(klass.disallowed)
+
+ klass.re_format = re.compile(klass.format)
+
+ return klass
+
+ @classmethod
+ def reject_msgs(cls, has):
+ if len(cls.requires) != len([x for x in cls.requires if has[x]]):
+ yield "lack of required files for format %s" % cls.name
+
+ for key in cls.disallowed:
+ if has[key]:
+ yield "contains source files not allowed in format %s" % cls.name
+
+class FormatOne(SourceFormat):
+ __metaclass__ = SourceFormat
+
+ name = '1.0'
+ format = r'1.0'
+
+ requires = ()
+ disallowed = ('debian_tar', 'more_orig_tar')
+
+ @classmethod
+ def reject_msgs(cls, has):
+ if not (has['native_tar_gz'] or (has['orig_tar_gz'] and has['debian_diff'])):
+ yield "no .tar.gz or .orig.tar.gz+.diff.gz in 'Files' field."
+ if has['native_tar_gz'] and has['debian_diff']:
+ yield "native package with diff makes no sense"
+ if (has['orig_tar_gz'] != has['orig_tar']) or \
+ (has['native_tar_gz'] != has['native_tar']):
+ yield "contains source files not allowed in format %s" % cls.name
+
+ for msg in super(FormatOne, cls).reject_msgs(has):
+ yield msg
+
+class FormatThree(SourceFormat):
+ __metaclass__ = SourceFormat
+
+ name = '3.x (native)'
+ format = r'3\.\d+ \(native\)'
+
+ requires = ('native_tar',)
+ disallowed = ('orig_tar', 'debian_diff', 'debian_tar', 'more_orig_tar')
+
+class FormatThreeQuilt(SourceFormat):
+ __metaclass__ = SourceFormat
+
+ name = '3.x (quilt)'
+ format = r'3\.\d+ \(quilt\)'
+
+ requires = ('orig_tar', 'debian_tar')
+ disallowed = ('debian_diff', 'native_tar')
import re
import string
import email as modemail
+import subprocess
from dbconn import DBConn, get_architecture, get_component, get_suite
from dak_exceptions import *
from textutils import fix_maintainer
from regexes import re_html_escaping, html_escaping, re_single_line_field, \
re_multi_line_field, re_srchasver, re_verwithext, \
- re_parse_maintainer, re_taint_free, re_gpg_uid, re_re_mark, \
- re_whitespace_comment
+ re_parse_maintainer, re_taint_free, re_gpg_uid, \
+ re_re_mark, re_whitespace_comment, re_issource
+
+from srcformats import srcformats
+from collections import defaultdict
################################################################################
known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
+import commands
+def dak_getstatusoutput(cmd):
+ pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+
+ output = str.join("", pipe.stdout.readlines())
+
+ ret = pipe.wait()
+ if ret is None:
+ ret = 0
+
+ return ret, output
+commands.getstatusoutput = dak_getstatusoutput
+
################################################################################
def html_escape(s):
################################################################################
+def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
+ """
+ Verify that the files listed in the Files field of the .dsc are
+ those expected given the announced Format.
+
+ @type dsc_filename: string
+ @param dsc_filename: path of .dsc file
+
+ @type dsc: dict
+ @param dsc: the content of the .dsc parsed by C{parse_changes()}
+
+ @type dsc_files: dict
+ @param dsc_files: the file list returned by C{build_file_list()}
+
+ @rtype: list
+ @return: all errors detected
+ """
+ rejmsg = []
+
+ # Parse the file if needed
+ if dsc is None:
+ dsc = parse_changes(dsc_filename, signing_rules=1);
+
+ if dsc_files is None:
+ dsc_files = build_file_list(dsc, is_a_dsc=1)
+
+ # Ensure .dsc lists proper set of source files according to the format
+ # announced
+ has = defaultdict(lambda: 0)
+
+ ftype_lookup = (
+ (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
+ (r'diff.gz', ('debian_diff',)),
+ (r'tar.gz', ('native_tar_gz', 'native_tar')),
+ (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
+ (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
+ (r'tar\.(gz|bz2)', ('native_tar',)),
+ (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
+ )
+
+ for f in dsc_files.keys():
+ m = re_issource.match(f)
+ if not m:
+ rejmsg.append("%s: %s in Files field not recognised as source."
+ % (dsc_filename, f))
+ continue
+
+ # Populate 'has' dictionary by resolving keys in lookup table
+ matched = False
+ for regex, keys in ftype_lookup:
+ if re.match(regex, m.group(3)):
+ matched = True
+ for key in keys:
+ has[key] += 1
+ break
+
+ # File does not match anything in lookup table; reject
+ if not matched:
+ reject("%s: unexpected source file '%s'" % (dsc_filename, f))
+
+ # Check for multiple files
+ for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
+ if has[file_type] > 1:
+ rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
+
+ # Source format specific tests
+ for format in srcformats:
+ if format.re_format.match(dsc['format']):
+ rejmsg.extend([
+ '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
+ ])
+ break
+
+ return rejmsg
+
+################################################################################
+
def check_hash_fields(what, manifest):
"""
check_hash_fields ensures that there are no checksum fields in the
format = format[:2]
if is_a_dsc:
- # format = (1,0) are the only formats we currently accept,
# format = (0,0) are missing format headers of which we still
# have some in the archive.
- if format != (1,0) and format != (0,0):
+ if format != (1,0) and format != (0,0) and \
+ format != (3,0,"quilt") and format != (3,0,"native"):
raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
else:
if (format < (1,5) or format > (1,8)):
+#!/usr/bin/env python
+
import unittest
import os, sys
self.MATCH('W: tzdata: binary-without-manpage usr/sbin/tzconfig').groups(),
('W', 'tzdata', 'binary-without-manpage', 'usr/sbin/tzconfig')
)
+
+if __name__ == '__main__':
+ unittest.main()
--- /dev/null
+#!/usr/bin/env python
+
+import unittest
+
+import os, sys
+sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+
+from collections import defaultdict
+
+from daklib import srcformats
+
+class SourceFormatTestCase(unittest.TestCase):
+ def get_rejects(self, has_vars):
+ has = defaultdict(lambda: 0)
+ has.update(has_vars)
+ return list(self.fmt.reject_msgs(has))
+
+ def assertAccepted(self, has):
+ self.assertEqual(self.get_rejects(has), [])
+
+ def assertRejected(self, has):
+ self.assertNotEqual(self.get_rejects(has), [])
+
+class FormatOneTestCase(SourceFormatTestCase):
+ fmt = srcformats.FormatOne
+
+ def testEmpty(self):
+ self.assertRejected({})
+
+ def testNative(self):
+ self.assertAccepted({'native_tar': 1, 'native_tar_gz': 1})
+
+ def testStandard(self):
+ self.assertAccepted({
+ 'orig_tar': 1,
+ 'orig_tar_gz': 1,
+ 'debian_diff': 1,
+ })
+
+ def testDisallowed(self):
+ self.assertRejected({
+ 'native_tar': 1,
+ 'native_tar_gz': 1,
+ 'debian_tar': 1,
+ })
+ self.assertRejected({
+ 'orig_tar': 1,
+ 'orig_tar_gz': 1,
+ 'debian_diff': 0,
+ })
+ self.assertRejected({
+ 'native_tar': 1,
+ 'native_tar_gz': 1,
+ 'more_orig_tar': 1,
+ })
+ self.assertRejected({
+ 'native_tar': 1,
+ 'native_tar_gz': 1,
+ 'debian_diff': 1,
+ })
+
+class FormatTreeTestCase(SourceFormatTestCase):
+ fmt = srcformats.FormatThree
+
+ def testEmpty(self):
+ self.assertRejected({})
+
+ def testSimple(self):
+ self.assertAccepted({'native_tar': 1})
+
+ def testDisallowed(self):
+ self.assertRejected({'native_tar': 1, 'orig_tar': 1})
+ self.assertRejected({'native_tar': 1, 'debian_diff': 1})
+ self.assertRejected({'native_tar': 1, 'debian_tar': 1})
+ self.assertRejected({'native_tar': 1, 'more_orig_tar': 1})
+
+class FormatTreeQuiltTestCase(SourceFormatTestCase):
+ fmt = srcformats.FormatThreeQuilt
+
+ def testEmpty(self):
+ self.assertRejected({})
+
+ def testSimple(self):
+ self.assertAccepted({'orig_tar': 1, 'debian_tar': 1})
+
+ def testMultipleTarballs(self):
+ self.assertAccepted({
+ 'orig_tar': 1,
+ 'debian_tar': 1,
+ 'more_orig_tar': 42,
+ })
+
+ def testDisallowed(self):
+ self.assertRejected({
+ 'orig_tar': 1,
+ 'debian_tar': 1,
+ 'debian_diff': 1
+ })
+ self.assertRejected({
+ 'orig_tar': 1,
+ 'debian_tar': 1,
+ 'native_tar': 1,
+ })
+
+if __name__ == '__main__':
+ unittest.main()