5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
41 from datetime import datetime, timedelta
42 from errno import ENOENT
43 from tempfile import mkstemp, mkdtemp
45 from inspect import getargspec
48 from sqlalchemy import create_engine, Table, MetaData, Column, Integer
49 from sqlalchemy.orm import sessionmaker, mapper, relation
50 from sqlalchemy import types as sqltypes
52 # Don't remove this, we re-export the exceptions to scripts which import us
53 from sqlalchemy.exc import *
54 from sqlalchemy.orm.exc import NoResultFound
56 # Only import Config until Queue stuff is changed to store its config
58 from config import Config
59 from textutils import fix_maintainer
60 from dak_exceptions import NoSourceFieldError
62 ################################################################################
64 # Patch in support for the debversion field type so that it works during
67 class DebVersion(sqltypes.TypeEngine):
68 def get_col_spec(self):
71 def bind_processor(self, dialect):
74 def result_processor(self, dialect):
77 sa_major_version = sqlalchemy.__version__[0:3]
78 if sa_major_version in ["0.5", "0.6"]:
79 from sqlalchemy.databases import postgres
80 postgres.ischema_names['debversion'] = DebVersion
82 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
84 ################################################################################
86 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
88 ################################################################################
90 def session_wrapper(fn):
92 Wrapper around common ".., session=None):" handling. If the wrapped
93 function is called without passing 'session', we create a local one
94 and destroy it when the function ends.
96 Also attaches a commit_or_flush method to the session; if we created a
97 local session, this is a synonym for session.commit(), otherwise it is a
98 synonym for session.flush().
101 def wrapped(*args, **kwargs):
102 private_transaction = False
104 # Find the session object
105 session = kwargs.get('session')
108 if len(args) <= len(getargspec(fn)[0]) - 1:
109 # No session specified as last argument or in kwargs
110 private_transaction = True
111 session = kwargs['session'] = DBConn().session()
113 # Session is last argument in args
117 session = args[-1] = DBConn().session()
118 private_transaction = True
120 if private_transaction:
121 session.commit_or_flush = session.commit
123 session.commit_or_flush = session.flush
126 return fn(*args, **kwargs)
128 if private_transaction:
129 # We created a session; close it.
132 wrapped.__doc__ = fn.__doc__
133 wrapped.func_name = fn.func_name
137 __all__.append('session_wrapper')
139 ################################################################################
141 class Architecture(object):
142 def __init__(self, *args, **kwargs):
145 def __eq__(self, val):
146 if isinstance(val, str):
147 return (self.arch_string== val)
148 # This signals to use the normal comparison operator
149 return NotImplemented
151 def __ne__(self, val):
152 if isinstance(val, str):
153 return (self.arch_string != val)
154 # This signals to use the normal comparison operator
155 return NotImplemented
158 return '<Architecture %s>' % self.arch_string
160 __all__.append('Architecture')
163 def get_architecture(architecture, session=None):
165 Returns database id for given C{architecture}.
167 @type architecture: string
168 @param architecture: The name of the architecture
170 @type session: Session
171 @param session: Optional SQLA session object (a temporary one will be
172 generated if not supplied)
175 @return: Architecture object for the given arch (None if not present)
178 q = session.query(Architecture).filter_by(arch_string=architecture)
182 except NoResultFound:
185 __all__.append('get_architecture')
188 def get_architecture_suites(architecture, session=None):
190 Returns list of Suite objects for given C{architecture} name
192 @type architecture: str
193 @param architecture: Architecture name to search for
195 @type session: Session
196 @param session: Optional SQL session object (a temporary one will be
197 generated if not supplied)
200 @return: list of Suite objects for the given name (may be empty)
203 q = session.query(Suite)
204 q = q.join(SuiteArchitecture)
205 q = q.join(Architecture).filter_by(arch_string=architecture).order_by('suite_name')
211 __all__.append('get_architecture_suites')
213 ################################################################################
215 class Archive(object):
216 def __init__(self, *args, **kwargs):
220 return '<Archive %s>' % self.archive_name
222 __all__.append('Archive')
225 def get_archive(archive, session=None):
227 returns database id for given C{archive}.
229 @type archive: string
230 @param archive: the name of the arhive
232 @type session: Session
233 @param session: Optional SQLA session object (a temporary one will be
234 generated if not supplied)
237 @return: Archive object for the given name (None if not present)
240 archive = archive.lower()
242 q = session.query(Archive).filter_by(archive_name=archive)
246 except NoResultFound:
249 __all__.append('get_archive')
251 ################################################################################
253 class BinAssociation(object):
254 def __init__(self, *args, **kwargs):
258 return '<BinAssociation %s (%s, %s)>' % (self.ba_id, self.binary, self.suite)
260 __all__.append('BinAssociation')
262 ################################################################################
264 class BinContents(object):
265 def __init__(self, *args, **kwargs):
269 return '<BinContents (%s, %s)>' % (self.binary, self.filename)
271 __all__.append('BinContents')
273 ################################################################################
275 class DBBinary(object):
276 def __init__(self, *args, **kwargs):
280 return '<DBBinary %s (%s, %s)>' % (self.package, self.version, self.architecture)
282 __all__.append('DBBinary')
285 def get_suites_binary_in(package, session=None):
287 Returns list of Suite objects which given C{package} name is in
290 @param package: DBBinary package name to search for
293 @return: list of Suite objects for the given package
296 return session.query(Suite).join(BinAssociation).join(DBBinary).filter_by(package=package).all()
298 __all__.append('get_suites_binary_in')
301 def get_binary_from_id(binary_id, session=None):
303 Returns DBBinary object for given C{id}
306 @param binary_id: Id of the required binary
308 @type session: Session
309 @param session: Optional SQLA session object (a temporary one will be
310 generated if not supplied)
313 @return: DBBinary object for the given binary (None if not present)
316 q = session.query(DBBinary).filter_by(binary_id=binary_id)
320 except NoResultFound:
323 __all__.append('get_binary_from_id')
326 def get_binaries_from_name(package, version=None, architecture=None, session=None):
328 Returns list of DBBinary objects for given C{package} name
331 @param package: DBBinary package name to search for
333 @type version: str or None
334 @param version: Version to search for (or None)
336 @type architecture: str, list or None
337 @param architecture: Architectures to limit to (or None if no limit)
339 @type session: Session
340 @param session: Optional SQL session object (a temporary one will be
341 generated if not supplied)
344 @return: list of DBBinary objects for the given name (may be empty)
347 q = session.query(DBBinary).filter_by(package=package)
349 if version is not None:
350 q = q.filter_by(version=version)
352 if architecture is not None:
353 if not isinstance(architecture, list):
354 architecture = [architecture]
355 q = q.join(Architecture).filter(Architecture.arch_string.in_(architecture))
361 __all__.append('get_binaries_from_name')
364 def get_binaries_from_source_id(source_id, session=None):
366 Returns list of DBBinary objects for given C{source_id}
369 @param source_id: source_id to search for
371 @type session: Session
372 @param session: Optional SQL session object (a temporary one will be
373 generated if not supplied)
376 @return: list of DBBinary objects for the given name (may be empty)
379 return session.query(DBBinary).filter_by(source_id=source_id).all()
381 __all__.append('get_binaries_from_source_id')
384 def get_binary_from_name_suite(package, suitename, session=None):
385 ### For dak examine-package
386 ### XXX: Doesn't use object API yet
388 sql = """SELECT DISTINCT(b.package), b.version, c.name, su.suite_name
389 FROM binaries b, files fi, location l, component c, bin_associations ba, suite su
390 WHERE b.package='%(package)s'
392 AND fi.location = l.id
393 AND l.component = c.id
396 AND su.suite_name %(suitename)s
397 ORDER BY b.version DESC"""
399 return session.execute(sql % {'package': package, 'suitename': suitename})
401 __all__.append('get_binary_from_name_suite')
404 def get_binary_components(package, suitename, arch, session=None):
405 # Check for packages that have moved from one component to another
406 query = """SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f
407 WHERE b.package=:package AND s.suite_name=:suitename
408 AND (a.arch_string = :arch OR a.arch_string = 'all')
409 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
410 AND f.location = l.id
411 AND l.component = c.id
414 vals = {'package': package, 'suitename': suitename, 'arch': arch}
416 return session.execute(query, vals)
418 __all__.append('get_binary_components')
420 ################################################################################
422 class BinaryACL(object):
423 def __init__(self, *args, **kwargs):
427 return '<BinaryACL %s>' % self.binary_acl_id
429 __all__.append('BinaryACL')
431 ################################################################################
433 class BinaryACLMap(object):
434 def __init__(self, *args, **kwargs):
438 return '<BinaryACLMap %s>' % self.binary_acl_map_id
440 __all__.append('BinaryACLMap')
442 ################################################################################
447 ArchiveDir "%(archivepath)s";
448 OverrideDir "%(overridedir)s";
449 CacheDir "%(cachedir)s";
454 Packages::Compress ". bzip2 gzip";
455 Sources::Compress ". bzip2 gzip";
460 bindirectory "incoming"
465 BinOverride "override.sid.all3";
466 BinCacheDB "packages-accepted.db";
468 FileList "%(filelist)s";
471 Packages::Extensions ".deb .udeb";
474 bindirectory "incoming/"
477 BinOverride "override.sid.all3";
478 SrcOverride "override.sid.all3.src";
479 FileList "%(filelist)s";
483 class BuildQueue(object):
484 def __init__(self, *args, **kwargs):
488 return '<BuildQueue %s>' % self.queue_name
490 def write_metadata(self, starttime, force=False):
491 # Do we write out metafiles?
492 if not (force or self.generate_metadata):
495 session = DBConn().session().object_session(self)
497 fl_fd = fl_name = ac_fd = ac_name = None
499 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
500 startdir = os.getcwd()
503 # Grab files we want to include
504 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
505 # Write file list with newer files
506 (fl_fd, fl_name) = mkstemp()
508 os.write(fl_fd, '%s\n' % n.fullpath)
513 # Write minimal apt.conf
514 # TODO: Remove hardcoding from template
515 (ac_fd, ac_name) = mkstemp()
516 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
518 'cachedir': cnf["Dir::Cache"],
519 'overridedir': cnf["Dir::Override"],
523 # Run apt-ftparchive generate
524 os.chdir(os.path.dirname(ac_name))
525 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
527 # Run apt-ftparchive release
528 # TODO: Eww - fix this
529 bname = os.path.basename(self.path)
533 # We have to remove the Release file otherwise it'll be included in the
536 os.unlink(os.path.join(bname, 'Release'))
540 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
542 # Crude hack with open and append, but this whole section is and should be redone.
543 if self.notautomatic:
544 release=open("Release", "a")
545 release.write("NotAutomatic: yes")
550 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
551 if cnf.has_key("Dinstall::SigningPubKeyring"):
552 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
554 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
556 # Move the files if we got this far
557 os.rename('Release', os.path.join(bname, 'Release'))
559 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
561 # Clean up any left behind files
588 def clean_and_update(self, starttime, Logger, dryrun=False):
589 """WARNING: This routine commits for you"""
590 session = DBConn().session().object_session(self)
592 if self.generate_metadata and not dryrun:
593 self.write_metadata(starttime)
595 # Grab files older than our execution time
596 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
602 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
604 Logger.log(["I: Removing %s from the queue" % o.fullpath])
605 os.unlink(o.fullpath)
608 # If it wasn't there, don't worry
609 if e.errno == ENOENT:
612 # TODO: Replace with proper logging call
613 Logger.log(["E: Could not remove %s" % o.fullpath])
620 for f in os.listdir(self.path):
621 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
625 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
626 except NoResultFound:
627 fp = os.path.join(self.path, f)
629 Logger.log(["I: Would remove unused link %s" % fp])
631 Logger.log(["I: Removing unused link %s" % fp])
635 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
637 def add_file_from_pool(self, poolfile):
638 """Copies a file into the pool. Assumes that the PoolFile object is
639 attached to the same SQLAlchemy session as the Queue object is.
641 The caller is responsible for committing after calling this function."""
642 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
644 # Check if we have a file of this name or this ID already
645 for f in self.queuefiles:
646 if f.fileid is not None and f.fileid == poolfile.file_id or \
647 f.poolfile.filename == poolfile_basename:
648 # In this case, update the BuildQueueFile entry so we
649 # don't remove it too early
650 f.lastused = datetime.now()
651 DBConn().session().object_session(poolfile).add(f)
654 # Prepare BuildQueueFile object
655 qf = BuildQueueFile()
656 qf.build_queue_id = self.queue_id
657 qf.lastused = datetime.now()
658 qf.filename = poolfile_basename
660 targetpath = poolfile.fullpath
661 queuepath = os.path.join(self.path, poolfile_basename)
665 # We need to copy instead of symlink
667 utils.copy(targetpath, queuepath)
668 # NULL in the fileid field implies a copy
671 os.symlink(targetpath, queuepath)
672 qf.fileid = poolfile.file_id
676 # Get the same session as the PoolFile is using and add the qf to it
677 DBConn().session().object_session(poolfile).add(qf)
682 __all__.append('BuildQueue')
685 def get_build_queue(queuename, session=None):
687 Returns BuildQueue object for given C{queue name}, creating it if it does not
690 @type queuename: string
691 @param queuename: The name of the queue
693 @type session: Session
694 @param session: Optional SQLA session object (a temporary one will be
695 generated if not supplied)
698 @return: BuildQueue object for the given queue
701 q = session.query(BuildQueue).filter_by(queue_name=queuename)
705 except NoResultFound:
708 __all__.append('get_build_queue')
710 ################################################################################
712 class BuildQueueFile(object):
713 def __init__(self, *args, **kwargs):
717 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
721 return os.path.join(self.buildqueue.path, self.filename)
724 __all__.append('BuildQueueFile')
726 ################################################################################
728 class ChangePendingBinary(object):
729 def __init__(self, *args, **kwargs):
733 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
735 __all__.append('ChangePendingBinary')
737 ################################################################################
739 class ChangePendingFile(object):
740 def __init__(self, *args, **kwargs):
744 return '<ChangePendingFile %s>' % self.change_pending_file_id
746 __all__.append('ChangePendingFile')
748 ################################################################################
750 class ChangePendingSource(object):
751 def __init__(self, *args, **kwargs):
755 return '<ChangePendingSource %s>' % self.change_pending_source_id
757 __all__.append('ChangePendingSource')
759 ################################################################################
761 class Component(object):
762 def __init__(self, *args, **kwargs):
765 def __eq__(self, val):
766 if isinstance(val, str):
767 return (self.component_name == val)
768 # This signals to use the normal comparison operator
769 return NotImplemented
771 def __ne__(self, val):
772 if isinstance(val, str):
773 return (self.component_name != val)
774 # This signals to use the normal comparison operator
775 return NotImplemented
778 return '<Component %s>' % self.component_name
781 __all__.append('Component')
784 def get_component(component, session=None):
786 Returns database id for given C{component}.
788 @type component: string
789 @param component: The name of the override type
792 @return: the database id for the given component
795 component = component.lower()
797 q = session.query(Component).filter_by(component_name=component)
801 except NoResultFound:
804 __all__.append('get_component')
806 ################################################################################
808 class DBConfig(object):
809 def __init__(self, *args, **kwargs):
813 return '<DBConfig %s>' % self.name
815 __all__.append('DBConfig')
817 ################################################################################
820 def get_or_set_contents_file_id(filename, session=None):
822 Returns database id for given filename.
824 If no matching file is found, a row is inserted.
826 @type filename: string
827 @param filename: The filename
828 @type session: SQLAlchemy
829 @param session: Optional SQL session object (a temporary one will be
830 generated if not supplied). If not passed, a commit will be performed at
831 the end of the function, otherwise the caller is responsible for commiting.
834 @return: the database id for the given component
837 q = session.query(ContentFilename).filter_by(filename=filename)
840 ret = q.one().cafilename_id
841 except NoResultFound:
842 cf = ContentFilename()
843 cf.filename = filename
845 session.commit_or_flush()
846 ret = cf.cafilename_id
850 __all__.append('get_or_set_contents_file_id')
853 def get_contents(suite, overridetype, section=None, session=None):
855 Returns contents for a suite / overridetype combination, limiting
856 to a section if not None.
859 @param suite: Suite object
861 @type overridetype: OverrideType
862 @param overridetype: OverrideType object
864 @type section: Section
865 @param section: Optional section object to limit results to
867 @type session: SQLAlchemy
868 @param session: Optional SQL session object (a temporary one will be
869 generated if not supplied)
872 @return: ResultsProxy object set up to return tuples of (filename, section,
876 # find me all of the contents for a given suite
877 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
881 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
882 JOIN content_file_names n ON (c.filename=n.id)
883 JOIN binaries b ON (b.id=c.binary_pkg)
884 JOIN override o ON (o.package=b.package)
885 JOIN section s ON (s.id=o.section)
886 WHERE o.suite = :suiteid AND o.type = :overridetypeid
887 AND b.type=:overridetypename"""
889 vals = {'suiteid': suite.suite_id,
890 'overridetypeid': overridetype.overridetype_id,
891 'overridetypename': overridetype.overridetype}
893 if section is not None:
894 contents_q += " AND s.id = :sectionid"
895 vals['sectionid'] = section.section_id
897 contents_q += " ORDER BY fn"
899 return session.execute(contents_q, vals)
901 __all__.append('get_contents')
903 ################################################################################
905 class ContentFilepath(object):
906 def __init__(self, *args, **kwargs):
910 return '<ContentFilepath %s>' % self.filepath
912 __all__.append('ContentFilepath')
915 def get_or_set_contents_path_id(filepath, session=None):
917 Returns database id for given path.
919 If no matching file is found, a row is inserted.
921 @type filepath: string
922 @param filepath: The filepath
924 @type session: SQLAlchemy
925 @param session: Optional SQL session object (a temporary one will be
926 generated if not supplied). If not passed, a commit will be performed at
927 the end of the function, otherwise the caller is responsible for commiting.
930 @return: the database id for the given path
933 q = session.query(ContentFilepath).filter_by(filepath=filepath)
936 ret = q.one().cafilepath_id
937 except NoResultFound:
938 cf = ContentFilepath()
939 cf.filepath = filepath
941 session.commit_or_flush()
942 ret = cf.cafilepath_id
946 __all__.append('get_or_set_contents_path_id')
948 ################################################################################
950 class ContentAssociation(object):
951 def __init__(self, *args, **kwargs):
955 return '<ContentAssociation %s>' % self.ca_id
957 __all__.append('ContentAssociation')
959 def insert_content_paths(binary_id, fullpaths, session=None):
961 Make sure given path is associated with given binary id
964 @param binary_id: the id of the binary
965 @type fullpaths: list
966 @param fullpaths: the list of paths of the file being associated with the binary
967 @type session: SQLAlchemy session
968 @param session: Optional SQLAlchemy session. If this is passed, the caller
969 is responsible for ensuring a transaction has begun and committing the
970 results or rolling back based on the result code. If not passed, a commit
971 will be performed at the end of the function, otherwise the caller is
972 responsible for commiting.
974 @return: True upon success
979 session = DBConn().session()
984 def generate_path_dicts():
985 for fullpath in fullpaths:
986 if fullpath.startswith( './' ):
987 fullpath = fullpath[2:]
989 yield {'filename':fullpath, 'id': binary_id }
991 for d in generate_path_dicts():
992 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1001 traceback.print_exc()
1003 # Only rollback if we set up the session ourself
1010 __all__.append('insert_content_paths')
1012 ################################################################################
1014 class DSCFile(object):
1015 def __init__(self, *args, **kwargs):
1019 return '<DSCFile %s>' % self.dscfile_id
1021 __all__.append('DSCFile')
1024 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1026 Returns a list of DSCFiles which may be empty
1028 @type dscfile_id: int (optional)
1029 @param dscfile_id: the dscfile_id of the DSCFiles to find
1031 @type source_id: int (optional)
1032 @param source_id: the source id related to the DSCFiles to find
1034 @type poolfile_id: int (optional)
1035 @param poolfile_id: the poolfile id related to the DSCFiles to find
1038 @return: Possibly empty list of DSCFiles
1041 q = session.query(DSCFile)
1043 if dscfile_id is not None:
1044 q = q.filter_by(dscfile_id=dscfile_id)
1046 if source_id is not None:
1047 q = q.filter_by(source_id=source_id)
1049 if poolfile_id is not None:
1050 q = q.filter_by(poolfile_id=poolfile_id)
1054 __all__.append('get_dscfiles')
1056 ################################################################################
1058 class PoolFile(object):
1059 def __init__(self, *args, **kwargs):
1063 return '<PoolFile %s>' % self.filename
1067 return os.path.join(self.location.path, self.filename)
1069 __all__.append('PoolFile')
1072 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1075 (ValidFileFound [boolean or None], PoolFile object or None)
1077 @type filename: string
1078 @param filename: the filename of the file to check against the DB
1081 @param filesize: the size of the file to check against the DB
1083 @type md5sum: string
1084 @param md5sum: the md5sum of the file to check against the DB
1086 @type location_id: int
1087 @param location_id: the id of the location to look in
1090 @return: Tuple of length 2.
1091 - If more than one file found with that name: (C{None}, C{None})
1092 - If valid pool file found: (C{True}, C{PoolFile object})
1093 - If valid pool file not found:
1094 - (C{False}, C{None}) if no file found
1095 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1098 q = session.query(PoolFile).filter_by(filename=filename)
1099 q = q.join(Location).filter_by(location_id=location_id)
1109 if obj.md5sum != md5sum or obj.filesize != int(filesize):
1117 __all__.append('check_poolfile')
1120 def get_poolfile_by_id(file_id, session=None):
1122 Returns a PoolFile objects or None for the given id
1125 @param file_id: the id of the file to look for
1127 @rtype: PoolFile or None
1128 @return: either the PoolFile object or None
1131 q = session.query(PoolFile).filter_by(file_id=file_id)
1135 except NoResultFound:
1138 __all__.append('get_poolfile_by_id')
1142 def get_poolfile_by_name(filename, location_id=None, session=None):
1144 Returns an array of PoolFile objects for the given filename and
1145 (optionally) location_id
1147 @type filename: string
1148 @param filename: the filename of the file to check against the DB
1150 @type location_id: int
1151 @param location_id: the id of the location to look in (optional)
1154 @return: array of PoolFile objects
1157 q = session.query(PoolFile).filter_by(filename=filename)
1159 if location_id is not None:
1160 q = q.join(Location).filter_by(location_id=location_id)
1164 __all__.append('get_poolfile_by_name')
1167 def get_poolfile_like_name(filename, session=None):
1169 Returns an array of PoolFile objects which are like the given name
1171 @type filename: string
1172 @param filename: the filename of the file to check against the DB
1175 @return: array of PoolFile objects
1178 # TODO: There must be a way of properly using bind parameters with %FOO%
1179 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1183 __all__.append('get_poolfile_like_name')
1186 def add_poolfile(filename, datadict, location_id, session=None):
1188 Add a new file to the pool
1190 @type filename: string
1191 @param filename: filename
1193 @type datadict: dict
1194 @param datadict: dict with needed data
1196 @type location_id: int
1197 @param location_id: database id of the location
1200 @return: the PoolFile object created
1202 poolfile = PoolFile()
1203 poolfile.filename = filename
1204 poolfile.filesize = datadict["size"]
1205 poolfile.md5sum = datadict["md5sum"]
1206 poolfile.sha1sum = datadict["sha1sum"]
1207 poolfile.sha256sum = datadict["sha256sum"]
1208 poolfile.location_id = location_id
1210 session.add(poolfile)
1211 # Flush to get a file id (NB: This is not a commit)
1216 __all__.append('add_poolfile')
1218 ################################################################################
1220 class Fingerprint(object):
1221 def __init__(self, *args, **kwargs):
1225 return '<Fingerprint %s>' % self.fingerprint
1227 __all__.append('Fingerprint')
1230 def get_fingerprint(fpr, session=None):
1232 Returns Fingerprint object for given fpr.
1235 @param fpr: The fpr to find / add
1237 @type session: SQLAlchemy
1238 @param session: Optional SQL session object (a temporary one will be
1239 generated if not supplied).
1242 @return: the Fingerprint object for the given fpr or None
1245 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1249 except NoResultFound:
1254 __all__.append('get_fingerprint')
1257 def get_or_set_fingerprint(fpr, session=None):
1259 Returns Fingerprint object for given fpr.
1261 If no matching fpr is found, a row is inserted.
1264 @param fpr: The fpr to find / add
1266 @type session: SQLAlchemy
1267 @param session: Optional SQL session object (a temporary one will be
1268 generated if not supplied). If not passed, a commit will be performed at
1269 the end of the function, otherwise the caller is responsible for commiting.
1270 A flush will be performed either way.
1273 @return: the Fingerprint object for the given fpr
1276 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1280 except NoResultFound:
1281 fingerprint = Fingerprint()
1282 fingerprint.fingerprint = fpr
1283 session.add(fingerprint)
1284 session.commit_or_flush()
1289 __all__.append('get_or_set_fingerprint')
1291 ################################################################################
1293 # Helper routine for Keyring class
1294 def get_ldap_name(entry):
1296 for k in ["cn", "mn", "sn"]:
1298 if ret and ret[0] != "" and ret[0] != "-":
1300 return " ".join(name)
1302 ################################################################################
1304 class Keyring(object):
1305 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1306 " --with-colons --fingerprint --fingerprint"
1311 def __init__(self, *args, **kwargs):
1315 return '<Keyring %s>' % self.keyring_name
1317 def de_escape_gpg_str(self, txt):
1318 esclist = re.split(r'(\\x..)', txt)
1319 for x in range(1,len(esclist),2):
1320 esclist[x] = "%c" % (int(esclist[x][2:],16))
1321 return "".join(esclist)
1323 def parse_address(self, uid):
1324 """parses uid and returns a tuple of real name and email address"""
1326 (name, address) = email.Utils.parseaddr(uid)
1327 name = re.sub(r"\s*[(].*[)]", "", name)
1328 name = self.de_escape_gpg_str(name)
1331 return (name, address)
1333 def load_keys(self, keyring):
1334 if not self.keyring_id:
1335 raise Exception('Must be initialized with database information')
1337 k = os.popen(self.gpg_invocation % keyring, "r")
1341 for line in k.xreadlines():
1342 field = line.split(":")
1343 if field[0] == "pub":
1346 (name, addr) = self.parse_address(field[9])
1348 self.keys[key]["email"] = addr
1349 self.keys[key]["name"] = name
1350 self.keys[key]["fingerprints"] = []
1352 elif key and field[0] == "sub" and len(field) >= 12:
1353 signingkey = ("s" in field[11])
1354 elif key and field[0] == "uid":
1355 (name, addr) = self.parse_address(field[9])
1356 if "email" not in self.keys[key] and "@" in addr:
1357 self.keys[key]["email"] = addr
1358 self.keys[key]["name"] = name
1359 elif signingkey and field[0] == "fpr":
1360 self.keys[key]["fingerprints"].append(field[9])
1361 self.fpr_lookup[field[9]] = key
1363 def import_users_from_ldap(self, session):
1367 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1368 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1370 l = ldap.open(LDAPServer)
1371 l.simple_bind_s("","")
1372 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1373 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1374 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1376 ldap_fin_uid_id = {}
1383 uid = entry["uid"][0]
1384 name = get_ldap_name(entry)
1385 fingerprints = entry["keyFingerPrint"]
1387 for f in fingerprints:
1388 key = self.fpr_lookup.get(f, None)
1389 if key not in self.keys:
1391 self.keys[key]["uid"] = uid
1395 keyid = get_or_set_uid(uid, session).uid_id
1396 byuid[keyid] = (uid, name)
1397 byname[uid] = (keyid, name)
1399 return (byname, byuid)
1401 def generate_users_from_keyring(self, format, session):
1405 for x in self.keys.keys():
1406 if "email" not in self.keys[x]:
1408 self.keys[x]["uid"] = format % "invalid-uid"
1410 uid = format % self.keys[x]["email"]
1411 keyid = get_or_set_uid(uid, session).uid_id
1412 byuid[keyid] = (uid, self.keys[x]["name"])
1413 byname[uid] = (keyid, self.keys[x]["name"])
1414 self.keys[x]["uid"] = uid
1417 uid = format % "invalid-uid"
1418 keyid = get_or_set_uid(uid, session).uid_id
1419 byuid[keyid] = (uid, "ungeneratable user id")
1420 byname[uid] = (keyid, "ungeneratable user id")
1422 return (byname, byuid)
1424 __all__.append('Keyring')
1427 def get_keyring(keyring, session=None):
1429 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1430 If C{keyring} already has an entry, simply return the existing Keyring
1432 @type keyring: string
1433 @param keyring: the keyring name
1436 @return: the Keyring object for this keyring
1439 q = session.query(Keyring).filter_by(keyring_name=keyring)
1443 except NoResultFound:
1446 __all__.append('get_keyring')
1448 ################################################################################
1450 class KeyringACLMap(object):
1451 def __init__(self, *args, **kwargs):
1455 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1457 __all__.append('KeyringACLMap')
1459 ################################################################################
1461 class DBChange(object):
1462 def __init__(self, *args, **kwargs):
1466 return '<DBChange %s>' % self.changesname
1468 def clean_from_queue(self):
1469 session = DBConn().session().object_session(self)
1471 # Remove changes_pool_files entries
1474 # Remove changes_pending_files references
1477 # Clear out of queue
1478 self.in_queue = None
1479 self.approved_for_id = None
1481 __all__.append('DBChange')
1484 def get_dbchange(filename, session=None):
1486 returns DBChange object for given C{filename}.
1488 @type filename: string
1489 @param filename: the name of the file
1491 @type session: Session
1492 @param session: Optional SQLA session object (a temporary one will be
1493 generated if not supplied)
1496 @return: DBChange object for the given filename (C{None} if not present)
1499 q = session.query(DBChange).filter_by(changesname=filename)
1503 except NoResultFound:
1506 __all__.append('get_dbchange')
1508 ################################################################################
1510 class Location(object):
1511 def __init__(self, *args, **kwargs):
1515 return '<Location %s (%s)>' % (self.path, self.location_id)
1517 __all__.append('Location')
1520 def get_location(location, component=None, archive=None, session=None):
1522 Returns Location object for the given combination of location, component
1525 @type location: string
1526 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1528 @type component: string
1529 @param component: the component name (if None, no restriction applied)
1531 @type archive: string
1532 @param archive: the archive name (if None, no restriction applied)
1534 @rtype: Location / None
1535 @return: Either a Location object or None if one can't be found
1538 q = session.query(Location).filter_by(path=location)
1540 if archive is not None:
1541 q = q.join(Archive).filter_by(archive_name=archive)
1543 if component is not None:
1544 q = q.join(Component).filter_by(component_name=component)
1548 except NoResultFound:
1551 __all__.append('get_location')
1553 ################################################################################
1555 class Maintainer(object):
1556 def __init__(self, *args, **kwargs):
1560 return '''<Maintainer '%s' (%s)>''' % (self.name, self.maintainer_id)
1562 def get_split_maintainer(self):
1563 if not hasattr(self, 'name') or self.name is None:
1564 return ('', '', '', '')
1566 return fix_maintainer(self.name.strip())
1568 __all__.append('Maintainer')
1571 def get_or_set_maintainer(name, session=None):
1573 Returns Maintainer object for given maintainer name.
1575 If no matching maintainer name is found, a row is inserted.
1578 @param name: The maintainer name to add
1580 @type session: SQLAlchemy
1581 @param session: Optional SQL session object (a temporary one will be
1582 generated if not supplied). If not passed, a commit will be performed at
1583 the end of the function, otherwise the caller is responsible for commiting.
1584 A flush will be performed either way.
1587 @return: the Maintainer object for the given maintainer
1590 q = session.query(Maintainer).filter_by(name=name)
1593 except NoResultFound:
1594 maintainer = Maintainer()
1595 maintainer.name = name
1596 session.add(maintainer)
1597 session.commit_or_flush()
1602 __all__.append('get_or_set_maintainer')
1605 def get_maintainer(maintainer_id, session=None):
1607 Return the name of the maintainer behind C{maintainer_id} or None if that
1608 maintainer_id is invalid.
1610 @type maintainer_id: int
1611 @param maintainer_id: the id of the maintainer
1614 @return: the Maintainer with this C{maintainer_id}
1617 return session.query(Maintainer).get(maintainer_id)
1619 __all__.append('get_maintainer')
1621 ################################################################################
1623 class NewComment(object):
1624 def __init__(self, *args, **kwargs):
1628 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1630 __all__.append('NewComment')
1633 def has_new_comment(package, version, session=None):
1635 Returns true if the given combination of C{package}, C{version} has a comment.
1637 @type package: string
1638 @param package: name of the package
1640 @type version: string
1641 @param version: package version
1643 @type session: Session
1644 @param session: Optional SQLA session object (a temporary one will be
1645 generated if not supplied)
1651 q = session.query(NewComment)
1652 q = q.filter_by(package=package)
1653 q = q.filter_by(version=version)
1655 return bool(q.count() > 0)
1657 __all__.append('has_new_comment')
1660 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1662 Returns (possibly empty) list of NewComment objects for the given
1665 @type package: string (optional)
1666 @param package: name of the package
1668 @type version: string (optional)
1669 @param version: package version
1671 @type comment_id: int (optional)
1672 @param comment_id: An id of a comment
1674 @type session: Session
1675 @param session: Optional SQLA session object (a temporary one will be
1676 generated if not supplied)
1679 @return: A (possibly empty) list of NewComment objects will be returned
1682 q = session.query(NewComment)
1683 if package is not None: q = q.filter_by(package=package)
1684 if version is not None: q = q.filter_by(version=version)
1685 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1689 __all__.append('get_new_comments')
1691 ################################################################################
1693 class Override(object):
1694 def __init__(self, *args, **kwargs):
1698 return '<Override %s (%s)>' % (self.package, self.suite_id)
1700 __all__.append('Override')
1703 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1705 Returns Override object for the given parameters
1707 @type package: string
1708 @param package: The name of the package
1710 @type suite: string, list or None
1711 @param suite: The name of the suite (or suites if a list) to limit to. If
1712 None, don't limit. Defaults to None.
1714 @type component: string, list or None
1715 @param component: The name of the component (or components if a list) to
1716 limit to. If None, don't limit. Defaults to None.
1718 @type overridetype: string, list or None
1719 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1720 limit to. If None, don't limit. Defaults to None.
1722 @type session: Session
1723 @param session: Optional SQLA session object (a temporary one will be
1724 generated if not supplied)
1727 @return: A (possibly empty) list of Override objects will be returned
1730 q = session.query(Override)
1731 q = q.filter_by(package=package)
1733 if suite is not None:
1734 if not isinstance(suite, list): suite = [suite]
1735 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1737 if component is not None:
1738 if not isinstance(component, list): component = [component]
1739 q = q.join(Component).filter(Component.component_name.in_(component))
1741 if overridetype is not None:
1742 if not isinstance(overridetype, list): overridetype = [overridetype]
1743 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1747 __all__.append('get_override')
1750 ################################################################################
1752 class OverrideType(object):
1753 def __init__(self, *args, **kwargs):
1757 return '<OverrideType %s>' % self.overridetype
1759 __all__.append('OverrideType')
1762 def get_override_type(override_type, session=None):
1764 Returns OverrideType object for given C{override type}.
1766 @type override_type: string
1767 @param override_type: The name of the override type
1769 @type session: Session
1770 @param session: Optional SQLA session object (a temporary one will be
1771 generated if not supplied)
1774 @return: the database id for the given override type
1777 q = session.query(OverrideType).filter_by(overridetype=override_type)
1781 except NoResultFound:
1784 __all__.append('get_override_type')
1786 ################################################################################
1788 class DebContents(object):
1789 def __init__(self, *args, **kwargs):
1793 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1795 __all__.append('DebContents')
1798 class UdebContents(object):
1799 def __init__(self, *args, **kwargs):
1803 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1805 __all__.append('UdebContents')
1807 class PendingBinContents(object):
1808 def __init__(self, *args, **kwargs):
1812 return '<PendingBinContents %s>' % self.contents_id
1814 __all__.append('PendingBinContents')
1816 def insert_pending_content_paths(package,
1821 Make sure given paths are temporarily associated with given
1825 @param package: the package to associate with should have been read in from the binary control file
1826 @type fullpaths: list
1827 @param fullpaths: the list of paths of the file being associated with the binary
1828 @type session: SQLAlchemy session
1829 @param session: Optional SQLAlchemy session. If this is passed, the caller
1830 is responsible for ensuring a transaction has begun and committing the
1831 results or rolling back based on the result code. If not passed, a commit
1832 will be performed at the end of the function
1834 @return: True upon success, False if there is a problem
1837 privatetrans = False
1840 session = DBConn().session()
1844 arch = get_architecture(package['Architecture'], session)
1845 arch_id = arch.arch_id
1847 # Remove any already existing recorded files for this package
1848 q = session.query(PendingBinContents)
1849 q = q.filter_by(package=package['Package'])
1850 q = q.filter_by(version=package['Version'])
1851 q = q.filter_by(architecture=arch_id)
1854 for fullpath in fullpaths:
1856 if fullpath.startswith( "./" ):
1857 fullpath = fullpath[2:]
1859 pca = PendingBinContents()
1860 pca.package = package['Package']
1861 pca.version = package['Version']
1863 pca.architecture = arch_id
1866 pca.type = 8 # gross
1868 pca.type = 7 # also gross
1871 # Only commit if we set up the session ourself
1879 except Exception, e:
1880 traceback.print_exc()
1882 # Only rollback if we set up the session ourself
1889 __all__.append('insert_pending_content_paths')
1891 ################################################################################
1893 class PolicyQueue(object):
1894 def __init__(self, *args, **kwargs):
1898 return '<PolicyQueue %s>' % self.queue_name
1900 __all__.append('PolicyQueue')
1903 def get_policy_queue(queuename, session=None):
1905 Returns PolicyQueue object for given C{queue name}
1907 @type queuename: string
1908 @param queuename: The name of the queue
1910 @type session: Session
1911 @param session: Optional SQLA session object (a temporary one will be
1912 generated if not supplied)
1915 @return: PolicyQueue object for the given queue
1918 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1922 except NoResultFound:
1925 __all__.append('get_policy_queue')
1928 def get_policy_queue_from_path(pathname, session=None):
1930 Returns PolicyQueue object for given C{path name}
1932 @type queuename: string
1933 @param queuename: The path
1935 @type session: Session
1936 @param session: Optional SQLA session object (a temporary one will be
1937 generated if not supplied)
1940 @return: PolicyQueue object for the given queue
1943 q = session.query(PolicyQueue).filter_by(path=pathname)
1947 except NoResultFound:
1950 __all__.append('get_policy_queue_from_path')
1952 ################################################################################
1954 class Priority(object):
1955 def __init__(self, *args, **kwargs):
1958 def __eq__(self, val):
1959 if isinstance(val, str):
1960 return (self.priority == val)
1961 # This signals to use the normal comparison operator
1962 return NotImplemented
1964 def __ne__(self, val):
1965 if isinstance(val, str):
1966 return (self.priority != val)
1967 # This signals to use the normal comparison operator
1968 return NotImplemented
1971 return '<Priority %s (%s)>' % (self.priority, self.priority_id)
1973 __all__.append('Priority')
1976 def get_priority(priority, session=None):
1978 Returns Priority object for given C{priority name}.
1980 @type priority: string
1981 @param priority: The name of the priority
1983 @type session: Session
1984 @param session: Optional SQLA session object (a temporary one will be
1985 generated if not supplied)
1988 @return: Priority object for the given priority
1991 q = session.query(Priority).filter_by(priority=priority)
1995 except NoResultFound:
1998 __all__.append('get_priority')
2001 def get_priorities(session=None):
2003 Returns dictionary of priority names -> id mappings
2005 @type session: Session
2006 @param session: Optional SQL session object (a temporary one will be
2007 generated if not supplied)
2010 @return: dictionary of priority names -> id mappings
2014 q = session.query(Priority)
2016 ret[x.priority] = x.priority_id
2020 __all__.append('get_priorities')
2022 ################################################################################
2024 class Section(object):
2025 def __init__(self, *args, **kwargs):
2028 def __eq__(self, val):
2029 if isinstance(val, str):
2030 return (self.section == val)
2031 # This signals to use the normal comparison operator
2032 return NotImplemented
2034 def __ne__(self, val):
2035 if isinstance(val, str):
2036 return (self.section != val)
2037 # This signals to use the normal comparison operator
2038 return NotImplemented
2041 return '<Section %s>' % self.section
2043 __all__.append('Section')
2046 def get_section(section, session=None):
2048 Returns Section object for given C{section name}.
2050 @type section: string
2051 @param section: The name of the section
2053 @type session: Session
2054 @param session: Optional SQLA session object (a temporary one will be
2055 generated if not supplied)
2058 @return: Section object for the given section name
2061 q = session.query(Section).filter_by(section=section)
2065 except NoResultFound:
2068 __all__.append('get_section')
2071 def get_sections(session=None):
2073 Returns dictionary of section names -> id mappings
2075 @type session: Session
2076 @param session: Optional SQL session object (a temporary one will be
2077 generated if not supplied)
2080 @return: dictionary of section names -> id mappings
2084 q = session.query(Section)
2086 ret[x.section] = x.section_id
2090 __all__.append('get_sections')
2092 ################################################################################
2094 class DBSource(object):
2095 def __init__(self, *args, **kwargs):
2099 return '<DBSource %s (%s)>' % (self.source, self.version)
2101 __all__.append('DBSource')
2104 def source_exists(source, source_version, suites = ["any"], session=None):
2106 Ensure that source exists somewhere in the archive for the binary
2107 upload being processed.
2108 1. exact match => 1.0-3
2109 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2111 @type source: string
2112 @param source: source name
2114 @type source_version: string
2115 @param source_version: expected source version
2118 @param suites: list of suites to check in, default I{any}
2120 @type session: Session
2121 @param session: Optional SQLA session object (a temporary one will be
2122 generated if not supplied)
2125 @return: returns 1 if a source with expected version is found, otherwise 0
2132 for suite in suites:
2133 q = session.query(DBSource).filter_by(source=source)
2135 # source must exist in suite X, or in some other suite that's
2136 # mapped to X, recursively... silent-maps are counted too,
2137 # unreleased-maps aren't.
2138 maps = cnf.ValueList("SuiteMappings")[:]
2140 maps = [ m.split() for m in maps ]
2141 maps = [ (x[1], x[2]) for x in maps
2142 if x[0] == "map" or x[0] == "silent-map" ]
2145 if x[1] in s and x[0] not in s:
2148 q = q.join(SrcAssociation).join(Suite)
2149 q = q.filter(Suite.suite_name.in_(s))
2151 # Reduce the query results to a list of version numbers
2152 ql = [ j.version for j in q.all() ]
2155 if source_version in ql:
2159 from daklib.regexes import re_bin_only_nmu
2160 orig_source_version = re_bin_only_nmu.sub('', source_version)
2161 if orig_source_version in ql:
2164 # No source found so return not ok
2169 __all__.append('source_exists')
2172 def get_suites_source_in(source, session=None):
2174 Returns list of Suite objects which given C{source} name is in
2177 @param source: DBSource package name to search for
2180 @return: list of Suite objects for the given source
2183 return session.query(Suite).join(SrcAssociation).join(DBSource).filter_by(source=source).all()
2185 __all__.append('get_suites_source_in')
2188 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2190 Returns list of DBSource objects for given C{source} name and other parameters
2193 @param source: DBSource package name to search for
2195 @type version: str or None
2196 @param version: DBSource version name to search for or None if not applicable
2198 @type dm_upload_allowed: bool
2199 @param dm_upload_allowed: If None, no effect. If True or False, only
2200 return packages with that dm_upload_allowed setting
2202 @type session: Session
2203 @param session: Optional SQL session object (a temporary one will be
2204 generated if not supplied)
2207 @return: list of DBSource objects for the given name (may be empty)
2210 q = session.query(DBSource).filter_by(source=source)
2212 if version is not None:
2213 q = q.filter_by(version=version)
2215 if dm_upload_allowed is not None:
2216 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2220 __all__.append('get_sources_from_name')
2223 def get_source_in_suite(source, suite, session=None):
2225 Returns list of DBSource objects for a combination of C{source} and C{suite}.
2227 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2228 - B{suite} - a suite name, eg. I{unstable}
2230 @type source: string
2231 @param source: source package name
2234 @param suite: the suite name
2237 @return: the version for I{source} in I{suite}
2241 q = session.query(SrcAssociation)
2242 q = q.join('source').filter_by(source=source)
2243 q = q.join('suite').filter_by(suite_name=suite)
2246 return q.one().source
2247 except NoResultFound:
2250 __all__.append('get_source_in_suite')
2252 ################################################################################
2255 def add_dsc_to_db(u, filename, session=None):
2256 entry = u.pkg.files[filename]
2260 source.source = u.pkg.dsc["source"]
2261 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2262 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2263 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2264 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2265 source.install_date = datetime.now().date()
2267 dsc_component = entry["component"]
2268 dsc_location_id = entry["location id"]
2270 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2272 # Set up a new poolfile if necessary
2273 if not entry.has_key("files id") or not entry["files id"]:
2274 filename = entry["pool name"] + filename
2275 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2277 pfs.append(poolfile)
2278 entry["files id"] = poolfile.file_id
2280 source.poolfile_id = entry["files id"]
2284 for suite_name in u.pkg.changes["distribution"].keys():
2285 sa = SrcAssociation()
2286 sa.source_id = source.source_id
2287 sa.suite_id = get_suite(suite_name).suite_id
2292 # Add the source files to the DB (files and dsc_files)
2294 dscfile.source_id = source.source_id
2295 dscfile.poolfile_id = entry["files id"]
2296 session.add(dscfile)
2298 for dsc_file, dentry in u.pkg.dsc_files.items():
2300 df.source_id = source.source_id
2302 # If the .orig tarball is already in the pool, it's
2303 # files id is stored in dsc_files by check_dsc().
2304 files_id = dentry.get("files id", None)
2306 # Find the entry in the files hash
2307 # TODO: Bail out here properly
2309 for f, e in u.pkg.files.items():
2314 if files_id is None:
2315 filename = dfentry["pool name"] + dsc_file
2317 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2318 # FIXME: needs to check for -1/-2 and or handle exception
2319 if found and obj is not None:
2320 files_id = obj.file_id
2323 # If still not found, add it
2324 if files_id is None:
2325 # HACK: Force sha1sum etc into dentry
2326 dentry["sha1sum"] = dfentry["sha1sum"]
2327 dentry["sha256sum"] = dfentry["sha256sum"]
2328 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2329 pfs.append(poolfile)
2330 files_id = poolfile.file_id
2332 poolfile = get_poolfile_by_id(files_id, session)
2333 if poolfile is None:
2334 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2335 pfs.append(poolfile)
2337 df.poolfile_id = files_id
2342 # Add the src_uploaders to the DB
2343 uploader_ids = [source.maintainer_id]
2344 if u.pkg.dsc.has_key("uploaders"):
2345 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2347 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2350 for up_id in uploader_ids:
2351 if added_ids.has_key(up_id):
2353 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2359 su.maintainer_id = up_id
2360 su.source_id = source.source_id
2365 return source, dsc_component, dsc_location_id, pfs
2367 __all__.append('add_dsc_to_db')
2370 def add_deb_to_db(u, filename, session=None):
2372 Contrary to what you might expect, this routine deals with both
2373 debs and udebs. That info is in 'dbtype', whilst 'type' is
2374 'deb' for both of them
2377 entry = u.pkg.files[filename]
2380 bin.package = entry["package"]
2381 bin.version = entry["version"]
2382 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2383 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2384 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2385 bin.binarytype = entry["dbtype"]
2388 filename = entry["pool name"] + filename
2389 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2390 if not entry.get("location id", None):
2391 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2393 if entry.get("files id", None):
2394 poolfile = get_poolfile_by_id(bin.poolfile_id)
2395 bin.poolfile_id = entry["files id"]
2397 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2398 bin.poolfile_id = entry["files id"] = poolfile.file_id
2401 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2402 if len(bin_sources) != 1:
2403 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2404 (bin.package, bin.version, entry["architecture"],
2405 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2407 bin.source_id = bin_sources[0].source_id
2409 # Add and flush object so it has an ID
2413 # Add BinAssociations
2414 for suite_name in u.pkg.changes["distribution"].keys():
2415 ba = BinAssociation()
2416 ba.binary_id = bin.binary_id
2417 ba.suite_id = get_suite(suite_name).suite_id
2422 # Deal with contents - disabled for now
2423 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2425 # print "REJECT\nCould not determine contents of package %s" % bin.package
2426 # session.rollback()
2427 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2431 __all__.append('add_deb_to_db')
2433 ################################################################################
2435 class SourceACL(object):
2436 def __init__(self, *args, **kwargs):
2440 return '<SourceACL %s>' % self.source_acl_id
2442 __all__.append('SourceACL')
2444 ################################################################################
2446 class SrcAssociation(object):
2447 def __init__(self, *args, **kwargs):
2451 return '<SrcAssociation %s (%s, %s)>' % (self.sa_id, self.source, self.suite)
2453 __all__.append('SrcAssociation')
2455 ################################################################################
2457 class SrcFormat(object):
2458 def __init__(self, *args, **kwargs):
2462 return '<SrcFormat %s>' % (self.format_name)
2464 __all__.append('SrcFormat')
2466 ################################################################################
2468 class SrcUploader(object):
2469 def __init__(self, *args, **kwargs):
2473 return '<SrcUploader %s>' % self.uploader_id
2475 __all__.append('SrcUploader')
2477 ################################################################################
2479 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2480 ('SuiteID', 'suite_id'),
2481 ('Version', 'version'),
2482 ('Origin', 'origin'),
2484 ('Description', 'description'),
2485 ('Untouchable', 'untouchable'),
2486 ('Announce', 'announce'),
2487 ('Codename', 'codename'),
2488 ('OverrideCodename', 'overridecodename'),
2489 ('ValidTime', 'validtime'),
2490 ('Priority', 'priority'),
2491 ('NotAutomatic', 'notautomatic'),
2492 ('CopyChanges', 'copychanges'),
2493 ('OverrideSuite', 'overridesuite')]
2495 class Suite(object):
2496 def __init__(self, *args, **kwargs):
2500 return '<Suite %s>' % self.suite_name
2502 def __eq__(self, val):
2503 if isinstance(val, str):
2504 return (self.suite_name == val)
2505 # This signals to use the normal comparison operator
2506 return NotImplemented
2508 def __ne__(self, val):
2509 if isinstance(val, str):
2510 return (self.suite_name != val)
2511 # This signals to use the normal comparison operator
2512 return NotImplemented
2516 for disp, field in SUITE_FIELDS:
2517 val = getattr(self, field, None)
2519 ret.append("%s: %s" % (disp, val))
2521 return "\n".join(ret)
2523 __all__.append('Suite')
2526 def get_suite_architecture(suite, architecture, session=None):
2528 Returns a SuiteArchitecture object given C{suite} and ${arch} or None if it
2532 @param suite: Suite name to search for
2534 @type architecture: str
2535 @param architecture: Architecture name to search for
2537 @type session: Session
2538 @param session: Optional SQL session object (a temporary one will be
2539 generated if not supplied)
2541 @rtype: SuiteArchitecture
2542 @return: the SuiteArchitecture object or None
2545 q = session.query(SuiteArchitecture)
2546 q = q.join(Architecture).filter_by(arch_string=architecture)
2547 q = q.join(Suite).filter_by(suite_name=suite)
2551 except NoResultFound:
2554 __all__.append('get_suite_architecture')
2557 def get_suite(suite, session=None):
2559 Returns Suite object for given C{suite name}.
2562 @param suite: The name of the suite
2564 @type session: Session
2565 @param session: Optional SQLA session object (a temporary one will be
2566 generated if not supplied)
2569 @return: Suite object for the requested suite name (None if not present)
2572 q = session.query(Suite).filter_by(suite_name=suite)
2576 except NoResultFound:
2579 __all__.append('get_suite')
2581 ################################################################################
2583 class SuiteArchitecture(object):
2584 def __init__(self, *args, **kwargs):
2588 return '<SuiteArchitecture (%s, %s)>' % (self.suite_id, self.arch_id)
2590 __all__.append('SuiteArchitecture')
2593 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2595 Returns list of Architecture objects for given C{suite} name
2598 @param suite: Suite name to search for
2600 @type skipsrc: boolean
2601 @param skipsrc: Whether to skip returning the 'source' architecture entry
2604 @type skipall: boolean
2605 @param skipall: Whether to skip returning the 'all' architecture entry
2608 @type session: Session
2609 @param session: Optional SQL session object (a temporary one will be
2610 generated if not supplied)
2613 @return: list of Architecture objects for the given name (may be empty)
2616 q = session.query(Architecture)
2617 q = q.join(SuiteArchitecture)
2618 q = q.join(Suite).filter_by(suite_name=suite)
2621 q = q.filter(Architecture.arch_string != 'source')
2624 q = q.filter(Architecture.arch_string != 'all')
2626 q = q.order_by('arch_string')
2630 __all__.append('get_suite_architectures')
2632 ################################################################################
2634 class SuiteSrcFormat(object):
2635 def __init__(self, *args, **kwargs):
2639 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2641 __all__.append('SuiteSrcFormat')
2644 def get_suite_src_formats(suite, session=None):
2646 Returns list of allowed SrcFormat for C{suite}.
2649 @param suite: Suite name to search for
2651 @type session: Session
2652 @param session: Optional SQL session object (a temporary one will be
2653 generated if not supplied)
2656 @return: the list of allowed source formats for I{suite}
2659 q = session.query(SrcFormat)
2660 q = q.join(SuiteSrcFormat)
2661 q = q.join(Suite).filter_by(suite_name=suite)
2662 q = q.order_by('format_name')
2666 __all__.append('get_suite_src_formats')
2668 ################################################################################
2671 def __init__(self, *args, **kwargs):
2674 def __eq__(self, val):
2675 if isinstance(val, str):
2676 return (self.uid == val)
2677 # This signals to use the normal comparison operator
2678 return NotImplemented
2680 def __ne__(self, val):
2681 if isinstance(val, str):
2682 return (self.uid != val)
2683 # This signals to use the normal comparison operator
2684 return NotImplemented
2687 return '<Uid %s (%s)>' % (self.uid, self.name)
2689 __all__.append('Uid')
2692 def get_or_set_uid(uidname, session=None):
2694 Returns uid object for given uidname.
2696 If no matching uidname is found, a row is inserted.
2698 @type uidname: string
2699 @param uidname: The uid to add
2701 @type session: SQLAlchemy
2702 @param session: Optional SQL session object (a temporary one will be
2703 generated if not supplied). If not passed, a commit will be performed at
2704 the end of the function, otherwise the caller is responsible for commiting.
2707 @return: the uid object for the given uidname
2710 q = session.query(Uid).filter_by(uid=uidname)
2714 except NoResultFound:
2718 session.commit_or_flush()
2723 __all__.append('get_or_set_uid')
2726 def get_uid_from_fingerprint(fpr, session=None):
2727 q = session.query(Uid)
2728 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2732 except NoResultFound:
2735 __all__.append('get_uid_from_fingerprint')
2737 ################################################################################
2739 class UploadBlock(object):
2740 def __init__(self, *args, **kwargs):
2744 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2746 __all__.append('UploadBlock')
2748 ################################################################################
2750 class DBConn(object):
2752 database module init.
2756 def __init__(self, *args, **kwargs):
2757 self.__dict__ = self.__shared_state
2759 if not getattr(self, 'initialised', False):
2760 self.initialised = True
2761 self.debug = kwargs.has_key('debug')
2764 def __setuptables(self):
2765 tables_with_primary = (
2776 'changes_pending_binaries',
2777 'changes_pending_files',
2778 'changes_pending_source',
2788 'pending_bin_contents',
2800 # The following tables have primary keys but sqlalchemy
2801 # version 0.5 fails to reflect them correctly with database
2802 # versions before upgrade #41.
2804 #'build_queue_files',
2807 tables_no_primary = (
2809 'changes_pending_files_map',
2810 'changes_pending_source_files',
2811 'changes_pool_files',
2814 'suite_architectures',
2815 'suite_src_formats',
2816 'suite_build_queue_copy',
2818 # see the comment above
2820 'build_queue_files',
2824 'almost_obsolete_all_associations',
2825 'almost_obsolete_src_associations',
2826 'any_associations_source',
2827 'bin_assoc_by_arch',
2828 'bin_associations_binaries',
2829 'binaries_suite_arch',
2830 'binfiles_suite_component_arch',
2833 'newest_all_associations',
2834 'newest_any_associations',
2836 'newest_src_association',
2837 'obsolete_all_associations',
2838 'obsolete_any_associations',
2839 'obsolete_any_by_all_associations',
2840 'obsolete_src_associations',
2842 'src_associations_bin',
2843 'src_associations_src',
2844 'suite_arch_by_name',
2847 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
2848 # correctly and that is why we have to use a workaround. It can
2849 # be removed as soon as we switch to version 0.6.
2850 for table_name in tables_with_primary:
2851 table = Table(table_name, self.db_meta, \
2852 Column('id', Integer, primary_key = True), \
2853 autoload=True, useexisting=True)
2854 setattr(self, 'tbl_%s' % table_name, table)
2856 for table_name in tables_no_primary:
2857 table = Table(table_name, self.db_meta, autoload=True)
2858 setattr(self, 'tbl_%s' % table_name, table)
2860 for view_name in views:
2861 view = Table(view_name, self.db_meta, autoload=True)
2862 setattr(self, 'view_%s' % view_name, view)
2864 def __setupmappers(self):
2865 mapper(Architecture, self.tbl_architecture,
2866 properties = dict(arch_id = self.tbl_architecture.c.id))
2868 mapper(Archive, self.tbl_archive,
2869 properties = dict(archive_id = self.tbl_archive.c.id,
2870 archive_name = self.tbl_archive.c.name))
2872 mapper(BinAssociation, self.tbl_bin_associations,
2873 properties = dict(ba_id = self.tbl_bin_associations.c.id,
2874 suite_id = self.tbl_bin_associations.c.suite,
2875 suite = relation(Suite),
2876 binary_id = self.tbl_bin_associations.c.bin,
2877 binary = relation(DBBinary)))
2879 mapper(PendingBinContents, self.tbl_pending_bin_contents,
2880 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
2881 filename = self.tbl_pending_bin_contents.c.filename,
2882 package = self.tbl_pending_bin_contents.c.package,
2883 version = self.tbl_pending_bin_contents.c.version,
2884 arch = self.tbl_pending_bin_contents.c.arch,
2885 otype = self.tbl_pending_bin_contents.c.type))
2887 mapper(DebContents, self.tbl_deb_contents,
2888 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
2889 package=self.tbl_deb_contents.c.package,
2890 suite=self.tbl_deb_contents.c.suite,
2891 arch=self.tbl_deb_contents.c.arch,
2892 section=self.tbl_deb_contents.c.section,
2893 filename=self.tbl_deb_contents.c.filename))
2895 mapper(UdebContents, self.tbl_udeb_contents,
2896 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
2897 package=self.tbl_udeb_contents.c.package,
2898 suite=self.tbl_udeb_contents.c.suite,
2899 arch=self.tbl_udeb_contents.c.arch,
2900 section=self.tbl_udeb_contents.c.section,
2901 filename=self.tbl_udeb_contents.c.filename))
2903 mapper(BuildQueue, self.tbl_build_queue,
2904 properties = dict(queue_id = self.tbl_build_queue.c.id))
2906 mapper(BuildQueueFile, self.tbl_build_queue_files,
2907 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
2908 poolfile = relation(PoolFile, backref='buildqueueinstances')))
2910 mapper(DBBinary, self.tbl_binaries,
2911 properties = dict(binary_id = self.tbl_binaries.c.id,
2912 package = self.tbl_binaries.c.package,
2913 version = self.tbl_binaries.c.version,
2914 maintainer_id = self.tbl_binaries.c.maintainer,
2915 maintainer = relation(Maintainer),
2916 source_id = self.tbl_binaries.c.source,
2917 source = relation(DBSource),
2918 arch_id = self.tbl_binaries.c.architecture,
2919 architecture = relation(Architecture),
2920 poolfile_id = self.tbl_binaries.c.file,
2921 poolfile = relation(PoolFile),
2922 binarytype = self.tbl_binaries.c.type,
2923 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2924 fingerprint = relation(Fingerprint),
2925 install_date = self.tbl_binaries.c.install_date,
2926 binassociations = relation(BinAssociation,
2927 primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))))
2929 mapper(BinaryACL, self.tbl_binary_acl,
2930 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
2932 mapper(BinaryACLMap, self.tbl_binary_acl_map,
2933 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
2934 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
2935 architecture = relation(Architecture)))
2937 mapper(Component, self.tbl_component,
2938 properties = dict(component_id = self.tbl_component.c.id,
2939 component_name = self.tbl_component.c.name))
2941 mapper(DBConfig, self.tbl_config,
2942 properties = dict(config_id = self.tbl_config.c.id))
2944 mapper(DSCFile, self.tbl_dsc_files,
2945 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2946 source_id = self.tbl_dsc_files.c.source,
2947 source = relation(DBSource),
2948 poolfile_id = self.tbl_dsc_files.c.file,
2949 poolfile = relation(PoolFile)))
2951 mapper(PoolFile, self.tbl_files,
2952 properties = dict(file_id = self.tbl_files.c.id,
2953 filesize = self.tbl_files.c.size,
2954 location_id = self.tbl_files.c.location,
2955 location = relation(Location)))
2957 mapper(Fingerprint, self.tbl_fingerprint,
2958 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2959 uid_id = self.tbl_fingerprint.c.uid,
2960 uid = relation(Uid),
2961 keyring_id = self.tbl_fingerprint.c.keyring,
2962 keyring = relation(Keyring),
2963 source_acl = relation(SourceACL),
2964 binary_acl = relation(BinaryACL)))
2966 mapper(Keyring, self.tbl_keyrings,
2967 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2968 keyring_id = self.tbl_keyrings.c.id))
2970 mapper(DBChange, self.tbl_changes,
2971 properties = dict(change_id = self.tbl_changes.c.id,
2972 poolfiles = relation(PoolFile,
2973 secondary=self.tbl_changes_pool_files,
2974 backref="changeslinks"),
2975 seen = self.tbl_changes.c.seen,
2976 source = self.tbl_changes.c.source,
2977 binaries = self.tbl_changes.c.binaries,
2978 architecture = self.tbl_changes.c.architecture,
2979 distribution = self.tbl_changes.c.distribution,
2980 urgency = self.tbl_changes.c.urgency,
2981 maintainer = self.tbl_changes.c.maintainer,
2982 changedby = self.tbl_changes.c.changedby,
2983 date = self.tbl_changes.c.date,
2984 version = self.tbl_changes.c.version,
2985 files = relation(ChangePendingFile,
2986 secondary=self.tbl_changes_pending_files_map,
2987 backref="changesfile"),
2988 in_queue_id = self.tbl_changes.c.in_queue,
2989 in_queue = relation(PolicyQueue,
2990 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
2991 approved_for_id = self.tbl_changes.c.approved_for))
2993 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
2994 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
2996 mapper(ChangePendingFile, self.tbl_changes_pending_files,
2997 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
2998 filename = self.tbl_changes_pending_files.c.filename,
2999 size = self.tbl_changes_pending_files.c.size,
3000 md5sum = self.tbl_changes_pending_files.c.md5sum,
3001 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3002 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3004 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3005 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3006 change = relation(DBChange),
3007 maintainer = relation(Maintainer,
3008 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3009 changedby = relation(Maintainer,
3010 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3011 fingerprint = relation(Fingerprint),
3012 source_files = relation(ChangePendingFile,
3013 secondary=self.tbl_changes_pending_source_files,
3014 backref="pending_sources")))
3017 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3018 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3019 keyring = relation(Keyring, backref="keyring_acl_map"),
3020 architecture = relation(Architecture)))
3022 mapper(Location, self.tbl_location,
3023 properties = dict(location_id = self.tbl_location.c.id,
3024 component_id = self.tbl_location.c.component,
3025 component = relation(Component),
3026 archive_id = self.tbl_location.c.archive,
3027 archive = relation(Archive),
3028 archive_type = self.tbl_location.c.type))
3030 mapper(Maintainer, self.tbl_maintainer,
3031 properties = dict(maintainer_id = self.tbl_maintainer.c.id))
3033 mapper(NewComment, self.tbl_new_comments,
3034 properties = dict(comment_id = self.tbl_new_comments.c.id))
3036 mapper(Override, self.tbl_override,
3037 properties = dict(suite_id = self.tbl_override.c.suite,
3038 suite = relation(Suite),
3039 package = self.tbl_override.c.package,
3040 component_id = self.tbl_override.c.component,
3041 component = relation(Component),
3042 priority_id = self.tbl_override.c.priority,
3043 priority = relation(Priority),
3044 section_id = self.tbl_override.c.section,
3045 section = relation(Section),
3046 overridetype_id = self.tbl_override.c.type,
3047 overridetype = relation(OverrideType)))
3049 mapper(OverrideType, self.tbl_override_type,
3050 properties = dict(overridetype = self.tbl_override_type.c.type,
3051 overridetype_id = self.tbl_override_type.c.id))
3053 mapper(PolicyQueue, self.tbl_policy_queue,
3054 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3056 mapper(Priority, self.tbl_priority,
3057 properties = dict(priority_id = self.tbl_priority.c.id))
3059 mapper(Section, self.tbl_section,
3060 properties = dict(section_id = self.tbl_section.c.id,
3061 section=self.tbl_section.c.section))
3063 mapper(DBSource, self.tbl_source,
3064 properties = dict(source_id = self.tbl_source.c.id,
3065 version = self.tbl_source.c.version,
3066 maintainer_id = self.tbl_source.c.maintainer,
3067 maintainer = relation(Maintainer,
3068 primaryjoin=(self.tbl_source.c.maintainer==self.tbl_maintainer.c.id)),
3069 poolfile_id = self.tbl_source.c.file,
3070 poolfile = relation(PoolFile),
3071 fingerprint_id = self.tbl_source.c.sig_fpr,
3072 fingerprint = relation(Fingerprint),
3073 changedby_id = self.tbl_source.c.changedby,
3074 changedby = relation(Maintainer,
3075 primaryjoin=(self.tbl_source.c.changedby==self.tbl_maintainer.c.id)),
3076 srcfiles = relation(DSCFile,
3077 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3078 srcassociations = relation(SrcAssociation,
3079 primaryjoin=(self.tbl_source.c.id==self.tbl_src_associations.c.source)),
3080 srcuploaders = relation(SrcUploader)))
3082 mapper(SourceACL, self.tbl_source_acl,
3083 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3085 mapper(SrcAssociation, self.tbl_src_associations,
3086 properties = dict(sa_id = self.tbl_src_associations.c.id,
3087 suite_id = self.tbl_src_associations.c.suite,
3088 suite = relation(Suite),
3089 source_id = self.tbl_src_associations.c.source,
3090 source = relation(DBSource)))
3092 mapper(SrcFormat, self.tbl_src_format,
3093 properties = dict(src_format_id = self.tbl_src_format.c.id,
3094 format_name = self.tbl_src_format.c.format_name))
3096 mapper(SrcUploader, self.tbl_src_uploaders,
3097 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3098 source_id = self.tbl_src_uploaders.c.source,
3099 source = relation(DBSource,
3100 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3101 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3102 maintainer = relation(Maintainer,
3103 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3105 mapper(Suite, self.tbl_suite,
3106 properties = dict(suite_id = self.tbl_suite.c.id,
3107 policy_queue = relation(PolicyQueue),
3108 copy_queues = relation(BuildQueue, secondary=self.tbl_suite_build_queue_copy)))
3110 mapper(SuiteArchitecture, self.tbl_suite_architectures,
3111 properties = dict(suite_id = self.tbl_suite_architectures.c.suite,
3112 suite = relation(Suite, backref='suitearchitectures'),
3113 arch_id = self.tbl_suite_architectures.c.architecture,
3114 architecture = relation(Architecture)))
3116 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3117 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3118 suite = relation(Suite, backref='suitesrcformats'),
3119 src_format_id = self.tbl_suite_src_formats.c.src_format,
3120 src_format = relation(SrcFormat)))
3122 mapper(Uid, self.tbl_uid,
3123 properties = dict(uid_id = self.tbl_uid.c.id,
3124 fingerprint = relation(Fingerprint)))
3126 mapper(UploadBlock, self.tbl_upload_blocks,
3127 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3128 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3129 uid = relation(Uid, backref="uploadblocks")))
3131 ## Connection functions
3132 def __createconn(self):
3133 from config import Config
3137 connstr = "postgres://%s" % cnf["DB::Host"]
3138 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3139 connstr += ":%s" % cnf["DB::Port"]
3140 connstr += "/%s" % cnf["DB::Name"]
3143 connstr = "postgres:///%s" % cnf["DB::Name"]
3144 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3145 connstr += "?port=%s" % cnf["DB::Port"]
3147 self.db_pg = create_engine(connstr, echo=self.debug)
3148 self.db_meta = MetaData()
3149 self.db_meta.bind = self.db_pg
3150 self.db_smaker = sessionmaker(bind=self.db_pg,
3154 self.__setuptables()
3155 self.__setupmappers()
3158 return self.db_smaker()
3160 __all__.append('DBConn')