5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
41 from datetime import datetime, timedelta
42 from errno import ENOENT
43 from tempfile import mkstemp, mkdtemp
45 from inspect import getargspec
48 from sqlalchemy import create_engine, Table, MetaData
49 from sqlalchemy.orm import sessionmaker, mapper, relation
50 from sqlalchemy import types as sqltypes
52 # Don't remove this, we re-export the exceptions to scripts which import us
53 from sqlalchemy.exc import *
54 from sqlalchemy.orm.exc import NoResultFound
56 # Only import Config until Queue stuff is changed to store its config
58 from config import Config
59 from textutils import fix_maintainer
61 ################################################################################
63 # Patch in support for the debversion field type so that it works during
66 class DebVersion(sqltypes.Text):
68 Support the debversion type
71 def get_col_spec(self):
74 sa_major_version = sqlalchemy.__version__[0:3]
75 if sa_major_version == "0.5":
76 from sqlalchemy.databases import postgres
77 postgres.ischema_names['debversion'] = DebVersion
79 raise Exception("dak isn't ported to SQLA versions != 0.5 yet. See daklib/dbconn.py")
81 ################################################################################
83 __all__ = ['IntegrityError', 'SQLAlchemyError']
85 ################################################################################
87 def session_wrapper(fn):
89 Wrapper around common ".., session=None):" handling. If the wrapped
90 function is called without passing 'session', we create a local one
91 and destroy it when the function ends.
93 Also attaches a commit_or_flush method to the session; if we created a
94 local session, this is a synonym for session.commit(), otherwise it is a
95 synonym for session.flush().
98 def wrapped(*args, **kwargs):
99 private_transaction = False
101 # Find the session object
102 session = kwargs.get('session')
105 if len(args) <= len(getargspec(fn)[0]) - 1:
106 # No session specified as last argument or in kwargs
107 private_transaction = True
108 session = kwargs['session'] = DBConn().session()
110 # Session is last argument in args
114 session = args[-1] = DBConn().session()
115 private_transaction = True
117 if private_transaction:
118 session.commit_or_flush = session.commit
120 session.commit_or_flush = session.flush
123 return fn(*args, **kwargs)
125 if private_transaction:
126 # We created a session; close it.
129 wrapped.__doc__ = fn.__doc__
130 wrapped.func_name = fn.func_name
134 __all__.append('session_wrapper')
136 ################################################################################
138 class Architecture(object):
139 def __init__(self, *args, **kwargs):
142 def __eq__(self, val):
143 if isinstance(val, str):
144 return (self.arch_string== val)
145 # This signals to use the normal comparison operator
146 return NotImplemented
148 def __ne__(self, val):
149 if isinstance(val, str):
150 return (self.arch_string != val)
151 # This signals to use the normal comparison operator
152 return NotImplemented
155 return '<Architecture %s>' % self.arch_string
157 __all__.append('Architecture')
160 def get_architecture(architecture, session=None):
162 Returns database id for given C{architecture}.
164 @type architecture: string
165 @param architecture: The name of the architecture
167 @type session: Session
168 @param session: Optional SQLA session object (a temporary one will be
169 generated if not supplied)
172 @return: Architecture object for the given arch (None if not present)
175 q = session.query(Architecture).filter_by(arch_string=architecture)
179 except NoResultFound:
182 __all__.append('get_architecture')
185 def get_architecture_suites(architecture, session=None):
187 Returns list of Suite objects for given C{architecture} name
189 @type architecture: str
190 @param architecture: Architecture name to search for
192 @type session: Session
193 @param session: Optional SQL session object (a temporary one will be
194 generated if not supplied)
197 @return: list of Suite objects for the given name (may be empty)
200 q = session.query(Suite)
201 q = q.join(SuiteArchitecture)
202 q = q.join(Architecture).filter_by(arch_string=architecture).order_by('suite_name')
208 __all__.append('get_architecture_suites')
210 ################################################################################
212 class Archive(object):
213 def __init__(self, *args, **kwargs):
217 return '<Archive %s>' % self.archive_name
219 __all__.append('Archive')
222 def get_archive(archive, session=None):
224 returns database id for given C{archive}.
226 @type archive: string
227 @param archive: the name of the arhive
229 @type session: Session
230 @param session: Optional SQLA session object (a temporary one will be
231 generated if not supplied)
234 @return: Archive object for the given name (None if not present)
237 archive = archive.lower()
239 q = session.query(Archive).filter_by(archive_name=archive)
243 except NoResultFound:
246 __all__.append('get_archive')
248 ################################################################################
250 class BinAssociation(object):
251 def __init__(self, *args, **kwargs):
255 return '<BinAssociation %s (%s, %s)>' % (self.ba_id, self.binary, self.suite)
257 __all__.append('BinAssociation')
259 ################################################################################
261 class BinContents(object):
262 def __init__(self, *args, **kwargs):
266 return '<BinContents (%s, %s)>' % (self.binary, self.filename)
268 __all__.append('BinContents')
270 ################################################################################
272 class DBBinary(object):
273 def __init__(self, *args, **kwargs):
277 return '<DBBinary %s (%s, %s)>' % (self.package, self.version, self.architecture)
279 __all__.append('DBBinary')
282 def get_suites_binary_in(package, session=None):
284 Returns list of Suite objects which given C{package} name is in
287 @param package: DBBinary package name to search for
290 @return: list of Suite objects for the given package
293 return session.query(Suite).join(BinAssociation).join(DBBinary).filter_by(package=package).all()
295 __all__.append('get_suites_binary_in')
298 def get_binary_from_id(binary_id, session=None):
300 Returns DBBinary object for given C{id}
303 @param binary_id: Id of the required binary
305 @type session: Session
306 @param session: Optional SQLA session object (a temporary one will be
307 generated if not supplied)
310 @return: DBBinary object for the given binary (None if not present)
313 q = session.query(DBBinary).filter_by(binary_id=binary_id)
317 except NoResultFound:
320 __all__.append('get_binary_from_id')
323 def get_binaries_from_name(package, version=None, architecture=None, session=None):
325 Returns list of DBBinary objects for given C{package} name
328 @param package: DBBinary package name to search for
330 @type version: str or None
331 @param version: Version to search for (or None)
333 @type architecture: str, list or None
334 @param architecture: Architectures to limit to (or None if no limit)
336 @type session: Session
337 @param session: Optional SQL session object (a temporary one will be
338 generated if not supplied)
341 @return: list of DBBinary objects for the given name (may be empty)
344 q = session.query(DBBinary).filter_by(package=package)
346 if version is not None:
347 q = q.filter_by(version=version)
349 if architecture is not None:
350 if not isinstance(architecture, list):
351 architecture = [architecture]
352 q = q.join(Architecture).filter(Architecture.arch_string.in_(architecture))
358 __all__.append('get_binaries_from_name')
361 def get_binaries_from_source_id(source_id, session=None):
363 Returns list of DBBinary objects for given C{source_id}
366 @param source_id: source_id to search for
368 @type session: Session
369 @param session: Optional SQL session object (a temporary one will be
370 generated if not supplied)
373 @return: list of DBBinary objects for the given name (may be empty)
376 return session.query(DBBinary).filter_by(source_id=source_id).all()
378 __all__.append('get_binaries_from_source_id')
381 def get_binary_from_name_suite(package, suitename, session=None):
382 ### For dak examine-package
383 ### XXX: Doesn't use object API yet
385 sql = """SELECT DISTINCT(b.package), b.version, c.name, su.suite_name
386 FROM binaries b, files fi, location l, component c, bin_associations ba, suite su
387 WHERE b.package='%(package)s'
389 AND fi.location = l.id
390 AND l.component = c.id
393 AND su.suite_name %(suitename)s
394 ORDER BY b.version DESC"""
396 return session.execute(sql % {'package': package, 'suitename': suitename})
398 __all__.append('get_binary_from_name_suite')
401 def get_binary_components(package, suitename, arch, session=None):
402 # Check for packages that have moved from one component to another
403 query = """SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f
404 WHERE b.package=:package AND s.suite_name=:suitename
405 AND (a.arch_string = :arch OR a.arch_string = 'all')
406 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
407 AND f.location = l.id
408 AND l.component = c.id
411 vals = {'package': package, 'suitename': suitename, 'arch': arch}
413 return session.execute(query, vals)
415 __all__.append('get_binary_components')
417 ################################################################################
419 class BinaryACL(object):
420 def __init__(self, *args, **kwargs):
424 return '<BinaryACL %s>' % self.binary_acl_id
426 __all__.append('BinaryACL')
428 ################################################################################
430 class BinaryACLMap(object):
431 def __init__(self, *args, **kwargs):
435 return '<BinaryACLMap %s>' % self.binary_acl_map_id
437 __all__.append('BinaryACLMap')
439 ################################################################################
444 ArchiveDir "%(archivepath)s";
445 OverrideDir "/srv/ftp-master.debian.org/scripts/override/";
446 CacheDir "/srv/ftp-master.debian.org/database/";
451 Packages::Compress ". bzip2 gzip";
452 Sources::Compress ". bzip2 gzip";
457 bindirectory "incoming"
462 BinOverride "override.sid.all3";
463 BinCacheDB "packages-accepted.db";
465 FileList "%(filelist)s";
468 Packages::Extensions ".deb .udeb";
471 bindirectory "incoming/"
474 BinOverride "override.sid.all3";
475 SrcOverride "override.sid.all3.src";
476 FileList "%(filelist)s";
480 class BuildQueue(object):
481 def __init__(self, *args, **kwargs):
485 return '<BuildQueue %s>' % self.queue_name
487 def write_metadata(self, starttime, force=False):
488 # Do we write out metafiles?
489 if not (force or self.generate_metadata):
492 session = DBConn().session().object_session(self)
494 fl_fd = fl_name = ac_fd = ac_name = None
496 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
497 startdir = os.getcwd()
500 # Grab files we want to include
501 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
502 # Write file list with newer files
503 (fl_fd, fl_name) = mkstemp()
505 os.write(fl_fd, '%s\n' % n.fullpath)
508 # Write minimal apt.conf
509 # TODO: Remove hardcoding from template
510 (ac_fd, ac_name) = mkstemp()
511 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
512 'filelist': fl_name})
515 # Run apt-ftparchive generate
516 os.chdir(os.path.dirname(ac_name))
517 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
519 # Run apt-ftparchive release
520 # TODO: Eww - fix this
521 bname = os.path.basename(self.path)
525 # We have to remove the Release file otherwise it'll be included in the
528 os.unlink(os.path.join(bname, 'Release'))
532 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
534 # Crude hack with open and append, but this whole section is and should be redone.
535 if self.notautomatic:
536 release=open("Release", "a")
537 release.write("NotAutomatic: yes")
543 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
544 if cnf.has_key("Dinstall::SigningPubKeyring"):
545 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
547 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
549 # Move the files if we got this far
550 os.rename('Release', os.path.join(bname, 'Release'))
552 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
554 # Clean up any left behind files
581 def clean_and_update(self, starttime, Logger, dryrun=False):
582 """WARNING: This routine commits for you"""
583 session = DBConn().session().object_session(self)
585 if self.generate_metadata and not dryrun:
586 self.write_metadata(starttime)
588 # Grab files older than our execution time
589 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
595 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
597 Logger.log(["I: Removing %s from the queue" % o.fullpath])
598 os.unlink(o.fullpath)
601 # If it wasn't there, don't worry
602 if e.errno == ENOENT:
605 # TODO: Replace with proper logging call
606 Logger.log(["E: Could not remove %s" % o.fullpath])
613 for f in os.listdir(self.path):
614 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release'):
618 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
619 except NoResultFound:
620 fp = os.path.join(self.path, f)
622 Logger.log(["I: Would remove unused link %s" % fp])
624 Logger.log(["I: Removing unused link %s" % fp])
628 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
630 def add_file_from_pool(self, poolfile):
631 """Copies a file into the pool. Assumes that the PoolFile object is
632 attached to the same SQLAlchemy session as the Queue object is.
634 The caller is responsible for committing after calling this function."""
635 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
637 # Check if we have a file of this name or this ID already
638 for f in self.queuefiles:
639 if f.fileid is not None and f.fileid == poolfile.file_id or \
640 f.poolfile.filename == poolfile_basename:
641 # In this case, update the BuildQueueFile entry so we
642 # don't remove it too early
643 f.lastused = datetime.now()
644 DBConn().session().object_session(poolfile).add(f)
647 # Prepare BuildQueueFile object
648 qf = BuildQueueFile()
649 qf.build_queue_id = self.queue_id
650 qf.lastused = datetime.now()
651 qf.filename = poolfile_basename
653 targetpath = poolfile.fullpath
654 queuepath = os.path.join(self.path, poolfile_basename)
658 # We need to copy instead of symlink
660 utils.copy(targetpath, queuepath)
661 # NULL in the fileid field implies a copy
664 os.symlink(targetpath, queuepath)
665 qf.fileid = poolfile.file_id
669 # Get the same session as the PoolFile is using and add the qf to it
670 DBConn().session().object_session(poolfile).add(qf)
675 __all__.append('BuildQueue')
678 def get_build_queue(queuename, session=None):
680 Returns BuildQueue object for given C{queue name}, creating it if it does not
683 @type queuename: string
684 @param queuename: The name of the queue
686 @type session: Session
687 @param session: Optional SQLA session object (a temporary one will be
688 generated if not supplied)
691 @return: BuildQueue object for the given queue
694 q = session.query(BuildQueue).filter_by(queue_name=queuename)
698 except NoResultFound:
701 __all__.append('get_build_queue')
703 ################################################################################
705 class BuildQueueFile(object):
706 def __init__(self, *args, **kwargs):
710 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
714 return os.path.join(self.buildqueue.path, self.filename)
717 __all__.append('BuildQueueFile')
719 ################################################################################
721 class ChangePendingBinary(object):
722 def __init__(self, *args, **kwargs):
726 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
728 __all__.append('ChangePendingBinary')
730 ################################################################################
732 class ChangePendingFile(object):
733 def __init__(self, *args, **kwargs):
737 return '<ChangePendingFile %s>' % self.change_pending_file_id
739 __all__.append('ChangePendingFile')
741 ################################################################################
743 class ChangePendingSource(object):
744 def __init__(self, *args, **kwargs):
748 return '<ChangePendingSource %s>' % self.change_pending_source_id
750 __all__.append('ChangePendingSource')
752 ################################################################################
754 class Component(object):
755 def __init__(self, *args, **kwargs):
758 def __eq__(self, val):
759 if isinstance(val, str):
760 return (self.component_name == val)
761 # This signals to use the normal comparison operator
762 return NotImplemented
764 def __ne__(self, val):
765 if isinstance(val, str):
766 return (self.component_name != val)
767 # This signals to use the normal comparison operator
768 return NotImplemented
771 return '<Component %s>' % self.component_name
774 __all__.append('Component')
777 def get_component(component, session=None):
779 Returns database id for given C{component}.
781 @type component: string
782 @param component: The name of the override type
785 @return: the database id for the given component
788 component = component.lower()
790 q = session.query(Component).filter_by(component_name=component)
794 except NoResultFound:
797 __all__.append('get_component')
799 ################################################################################
801 class DBConfig(object):
802 def __init__(self, *args, **kwargs):
806 return '<DBConfig %s>' % self.name
808 __all__.append('DBConfig')
810 ################################################################################
813 def get_or_set_contents_file_id(filename, session=None):
815 Returns database id for given filename.
817 If no matching file is found, a row is inserted.
819 @type filename: string
820 @param filename: The filename
821 @type session: SQLAlchemy
822 @param session: Optional SQL session object (a temporary one will be
823 generated if not supplied). If not passed, a commit will be performed at
824 the end of the function, otherwise the caller is responsible for commiting.
827 @return: the database id for the given component
830 q = session.query(ContentFilename).filter_by(filename=filename)
833 ret = q.one().cafilename_id
834 except NoResultFound:
835 cf = ContentFilename()
836 cf.filename = filename
838 session.commit_or_flush()
839 ret = cf.cafilename_id
843 __all__.append('get_or_set_contents_file_id')
846 def get_contents(suite, overridetype, section=None, session=None):
848 Returns contents for a suite / overridetype combination, limiting
849 to a section if not None.
852 @param suite: Suite object
854 @type overridetype: OverrideType
855 @param overridetype: OverrideType object
857 @type section: Section
858 @param section: Optional section object to limit results to
860 @type session: SQLAlchemy
861 @param session: Optional SQL session object (a temporary one will be
862 generated if not supplied)
865 @return: ResultsProxy object set up to return tuples of (filename, section,
869 # find me all of the contents for a given suite
870 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
874 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
875 JOIN content_file_names n ON (c.filename=n.id)
876 JOIN binaries b ON (b.id=c.binary_pkg)
877 JOIN override o ON (o.package=b.package)
878 JOIN section s ON (s.id=o.section)
879 WHERE o.suite = :suiteid AND o.type = :overridetypeid
880 AND b.type=:overridetypename"""
882 vals = {'suiteid': suite.suite_id,
883 'overridetypeid': overridetype.overridetype_id,
884 'overridetypename': overridetype.overridetype}
886 if section is not None:
887 contents_q += " AND s.id = :sectionid"
888 vals['sectionid'] = section.section_id
890 contents_q += " ORDER BY fn"
892 return session.execute(contents_q, vals)
894 __all__.append('get_contents')
896 ################################################################################
898 class ContentFilepath(object):
899 def __init__(self, *args, **kwargs):
903 return '<ContentFilepath %s>' % self.filepath
905 __all__.append('ContentFilepath')
908 def get_or_set_contents_path_id(filepath, session=None):
910 Returns database id for given path.
912 If no matching file is found, a row is inserted.
914 @type filepath: string
915 @param filepath: The filepath
917 @type session: SQLAlchemy
918 @param session: Optional SQL session object (a temporary one will be
919 generated if not supplied). If not passed, a commit will be performed at
920 the end of the function, otherwise the caller is responsible for commiting.
923 @return: the database id for the given path
926 q = session.query(ContentFilepath).filter_by(filepath=filepath)
929 ret = q.one().cafilepath_id
930 except NoResultFound:
931 cf = ContentFilepath()
932 cf.filepath = filepath
934 session.commit_or_flush()
935 ret = cf.cafilepath_id
939 __all__.append('get_or_set_contents_path_id')
941 ################################################################################
943 class ContentAssociation(object):
944 def __init__(self, *args, **kwargs):
948 return '<ContentAssociation %s>' % self.ca_id
950 __all__.append('ContentAssociation')
952 def insert_content_paths(binary_id, fullpaths, session=None):
954 Make sure given path is associated with given binary id
957 @param binary_id: the id of the binary
958 @type fullpaths: list
959 @param fullpaths: the list of paths of the file being associated with the binary
960 @type session: SQLAlchemy session
961 @param session: Optional SQLAlchemy session. If this is passed, the caller
962 is responsible for ensuring a transaction has begun and committing the
963 results or rolling back based on the result code. If not passed, a commit
964 will be performed at the end of the function, otherwise the caller is
965 responsible for commiting.
967 @return: True upon success
972 session = DBConn().session()
977 def generate_path_dicts():
978 for fullpath in fullpaths:
979 if fullpath.startswith( './' ):
980 fullpath = fullpath[2:]
982 yield {'filename':fullpath, 'id': binary_id }
984 for d in generate_path_dicts():
985 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
994 traceback.print_exc()
996 # Only rollback if we set up the session ourself
1003 __all__.append('insert_content_paths')
1005 ################################################################################
1007 class DSCFile(object):
1008 def __init__(self, *args, **kwargs):
1012 return '<DSCFile %s>' % self.dscfile_id
1014 __all__.append('DSCFile')
1017 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1019 Returns a list of DSCFiles which may be empty
1021 @type dscfile_id: int (optional)
1022 @param dscfile_id: the dscfile_id of the DSCFiles to find
1024 @type source_id: int (optional)
1025 @param source_id: the source id related to the DSCFiles to find
1027 @type poolfile_id: int (optional)
1028 @param poolfile_id: the poolfile id related to the DSCFiles to find
1031 @return: Possibly empty list of DSCFiles
1034 q = session.query(DSCFile)
1036 if dscfile_id is not None:
1037 q = q.filter_by(dscfile_id=dscfile_id)
1039 if source_id is not None:
1040 q = q.filter_by(source_id=source_id)
1042 if poolfile_id is not None:
1043 q = q.filter_by(poolfile_id=poolfile_id)
1047 __all__.append('get_dscfiles')
1049 ################################################################################
1051 class PoolFile(object):
1052 def __init__(self, *args, **kwargs):
1056 return '<PoolFile %s>' % self.filename
1060 return os.path.join(self.location.path, self.filename)
1062 __all__.append('PoolFile')
1065 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1068 (ValidFileFound [boolean or None], PoolFile object or None)
1070 @type filename: string
1071 @param filename: the filename of the file to check against the DB
1074 @param filesize: the size of the file to check against the DB
1076 @type md5sum: string
1077 @param md5sum: the md5sum of the file to check against the DB
1079 @type location_id: int
1080 @param location_id: the id of the location to look in
1083 @return: Tuple of length 2.
1084 - If more than one file found with that name: (C{None}, C{None})
1085 - If valid pool file found: (C{True}, C{PoolFile object})
1086 - If valid pool file not found:
1087 - (C{False}, C{None}) if no file found
1088 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1091 q = session.query(PoolFile).filter_by(filename=filename)
1092 q = q.join(Location).filter_by(location_id=location_id)
1102 if obj.md5sum != md5sum or obj.filesize != int(filesize):
1110 __all__.append('check_poolfile')
1113 def get_poolfile_by_id(file_id, session=None):
1115 Returns a PoolFile objects or None for the given id
1118 @param file_id: the id of the file to look for
1120 @rtype: PoolFile or None
1121 @return: either the PoolFile object or None
1124 q = session.query(PoolFile).filter_by(file_id=file_id)
1128 except NoResultFound:
1131 __all__.append('get_poolfile_by_id')
1135 def get_poolfile_by_name(filename, location_id=None, session=None):
1137 Returns an array of PoolFile objects for the given filename and
1138 (optionally) location_id
1140 @type filename: string
1141 @param filename: the filename of the file to check against the DB
1143 @type location_id: int
1144 @param location_id: the id of the location to look in (optional)
1147 @return: array of PoolFile objects
1150 q = session.query(PoolFile).filter_by(filename=filename)
1152 if location_id is not None:
1153 q = q.join(Location).filter_by(location_id=location_id)
1157 __all__.append('get_poolfile_by_name')
1160 def get_poolfile_like_name(filename, session=None):
1162 Returns an array of PoolFile objects which are like the given name
1164 @type filename: string
1165 @param filename: the filename of the file to check against the DB
1168 @return: array of PoolFile objects
1171 # TODO: There must be a way of properly using bind parameters with %FOO%
1172 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1176 __all__.append('get_poolfile_like_name')
1179 def add_poolfile(filename, datadict, location_id, session=None):
1181 Add a new file to the pool
1183 @type filename: string
1184 @param filename: filename
1186 @type datadict: dict
1187 @param datadict: dict with needed data
1189 @type location_id: int
1190 @param location_id: database id of the location
1193 @return: the PoolFile object created
1195 poolfile = PoolFile()
1196 poolfile.filename = filename
1197 poolfile.filesize = datadict["size"]
1198 poolfile.md5sum = datadict["md5sum"]
1199 poolfile.sha1sum = datadict["sha1sum"]
1200 poolfile.sha256sum = datadict["sha256sum"]
1201 poolfile.location_id = location_id
1203 session.add(poolfile)
1204 # Flush to get a file id (NB: This is not a commit)
1209 __all__.append('add_poolfile')
1211 ################################################################################
1213 class Fingerprint(object):
1214 def __init__(self, *args, **kwargs):
1218 return '<Fingerprint %s>' % self.fingerprint
1220 __all__.append('Fingerprint')
1223 def get_fingerprint(fpr, session=None):
1225 Returns Fingerprint object for given fpr.
1228 @param fpr: The fpr to find / add
1230 @type session: SQLAlchemy
1231 @param session: Optional SQL session object (a temporary one will be
1232 generated if not supplied).
1235 @return: the Fingerprint object for the given fpr or None
1238 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1242 except NoResultFound:
1247 __all__.append('get_fingerprint')
1250 def get_or_set_fingerprint(fpr, session=None):
1252 Returns Fingerprint object for given fpr.
1254 If no matching fpr is found, a row is inserted.
1257 @param fpr: The fpr to find / add
1259 @type session: SQLAlchemy
1260 @param session: Optional SQL session object (a temporary one will be
1261 generated if not supplied). If not passed, a commit will be performed at
1262 the end of the function, otherwise the caller is responsible for commiting.
1263 A flush will be performed either way.
1266 @return: the Fingerprint object for the given fpr
1269 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1273 except NoResultFound:
1274 fingerprint = Fingerprint()
1275 fingerprint.fingerprint = fpr
1276 session.add(fingerprint)
1277 session.commit_or_flush()
1282 __all__.append('get_or_set_fingerprint')
1284 ################################################################################
1286 # Helper routine for Keyring class
1287 def get_ldap_name(entry):
1289 for k in ["cn", "mn", "sn"]:
1291 if ret and ret[0] != "" and ret[0] != "-":
1293 return " ".join(name)
1295 ################################################################################
1297 class Keyring(object):
1298 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1299 " --with-colons --fingerprint --fingerprint"
1304 def __init__(self, *args, **kwargs):
1308 return '<Keyring %s>' % self.keyring_name
1310 def de_escape_gpg_str(self, txt):
1311 esclist = re.split(r'(\\x..)', txt)
1312 for x in range(1,len(esclist),2):
1313 esclist[x] = "%c" % (int(esclist[x][2:],16))
1314 return "".join(esclist)
1316 def load_keys(self, keyring):
1319 if not self.keyring_id:
1320 raise Exception('Must be initialized with database information')
1322 k = os.popen(self.gpg_invocation % keyring, "r")
1326 for line in k.xreadlines():
1327 field = line.split(":")
1328 if field[0] == "pub":
1330 (name, addr) = email.Utils.parseaddr(field[9])
1331 name = re.sub(r"\s*[(].*[)]", "", name)
1332 if name == "" or addr == "" or "@" not in addr:
1334 addr = "invalid-uid"
1335 name = self.de_escape_gpg_str(name)
1336 self.keys[key] = {"email": addr}
1338 self.keys[key]["name"] = name
1339 self.keys[key]["aliases"] = [name]
1340 self.keys[key]["fingerprints"] = []
1342 elif key and field[0] == "sub" and len(field) >= 12:
1343 signingkey = ("s" in field[11])
1344 elif key and field[0] == "uid":
1345 (name, addr) = email.Utils.parseaddr(field[9])
1346 if name and name not in self.keys[key]["aliases"]:
1347 self.keys[key]["aliases"].append(name)
1348 elif signingkey and field[0] == "fpr":
1349 self.keys[key]["fingerprints"].append(field[9])
1350 self.fpr_lookup[field[9]] = key
1352 def import_users_from_ldap(self, session):
1356 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1357 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1359 l = ldap.open(LDAPServer)
1360 l.simple_bind_s("","")
1361 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1362 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1363 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1365 ldap_fin_uid_id = {}
1372 uid = entry["uid"][0]
1373 name = get_ldap_name(entry)
1374 fingerprints = entry["keyFingerPrint"]
1376 for f in fingerprints:
1377 key = self.fpr_lookup.get(f, None)
1378 if key not in self.keys:
1380 self.keys[key]["uid"] = uid
1384 keyid = get_or_set_uid(uid, session).uid_id
1385 byuid[keyid] = (uid, name)
1386 byname[uid] = (keyid, name)
1388 return (byname, byuid)
1390 def generate_users_from_keyring(self, format, session):
1394 for x in self.keys.keys():
1395 if self.keys[x]["email"] == "invalid-uid":
1397 self.keys[x]["uid"] = format % "invalid-uid"
1399 uid = format % self.keys[x]["email"]
1400 keyid = get_or_set_uid(uid, session).uid_id
1401 byuid[keyid] = (uid, self.keys[x]["name"])
1402 byname[uid] = (keyid, self.keys[x]["name"])
1403 self.keys[x]["uid"] = uid
1406 uid = format % "invalid-uid"
1407 keyid = get_or_set_uid(uid, session).uid_id
1408 byuid[keyid] = (uid, "ungeneratable user id")
1409 byname[uid] = (keyid, "ungeneratable user id")
1411 return (byname, byuid)
1413 __all__.append('Keyring')
1416 def get_keyring(keyring, session=None):
1418 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1419 If C{keyring} already has an entry, simply return the existing Keyring
1421 @type keyring: string
1422 @param keyring: the keyring name
1425 @return: the Keyring object for this keyring
1428 q = session.query(Keyring).filter_by(keyring_name=keyring)
1432 except NoResultFound:
1435 __all__.append('get_keyring')
1437 ################################################################################
1439 class KeyringACLMap(object):
1440 def __init__(self, *args, **kwargs):
1444 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1446 __all__.append('KeyringACLMap')
1448 ################################################################################
1450 class DBChange(object):
1451 def __init__(self, *args, **kwargs):
1455 return '<DBChange %s>' % self.changesname
1457 def clean_from_queue(self):
1458 session = DBConn().session().object_session(self)
1460 # Remove changes_pool_files entries
1463 # Remove changes_pending_files references
1466 # Clear out of queue
1467 self.in_queue = None
1468 self.approved_for_id = None
1470 __all__.append('DBChange')
1473 def get_dbchange(filename, session=None):
1475 returns DBChange object for given C{filename}.
1477 @type filename: string
1478 @param filename: the name of the file
1480 @type session: Session
1481 @param session: Optional SQLA session object (a temporary one will be
1482 generated if not supplied)
1485 @return: DBChange object for the given filename (C{None} if not present)
1488 q = session.query(DBChange).filter_by(changesname=filename)
1492 except NoResultFound:
1495 __all__.append('get_dbchange')
1497 ################################################################################
1499 class Location(object):
1500 def __init__(self, *args, **kwargs):
1504 return '<Location %s (%s)>' % (self.path, self.location_id)
1506 __all__.append('Location')
1509 def get_location(location, component=None, archive=None, session=None):
1511 Returns Location object for the given combination of location, component
1514 @type location: string
1515 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1517 @type component: string
1518 @param component: the component name (if None, no restriction applied)
1520 @type archive: string
1521 @param archive: the archive name (if None, no restriction applied)
1523 @rtype: Location / None
1524 @return: Either a Location object or None if one can't be found
1527 q = session.query(Location).filter_by(path=location)
1529 if archive is not None:
1530 q = q.join(Archive).filter_by(archive_name=archive)
1532 if component is not None:
1533 q = q.join(Component).filter_by(component_name=component)
1537 except NoResultFound:
1540 __all__.append('get_location')
1542 ################################################################################
1544 class Maintainer(object):
1545 def __init__(self, *args, **kwargs):
1549 return '''<Maintainer '%s' (%s)>''' % (self.name, self.maintainer_id)
1551 def get_split_maintainer(self):
1552 if not hasattr(self, 'name') or self.name is None:
1553 return ('', '', '', '')
1555 return fix_maintainer(self.name.strip())
1557 __all__.append('Maintainer')
1560 def get_or_set_maintainer(name, session=None):
1562 Returns Maintainer object for given maintainer name.
1564 If no matching maintainer name is found, a row is inserted.
1567 @param name: The maintainer name to add
1569 @type session: SQLAlchemy
1570 @param session: Optional SQL session object (a temporary one will be
1571 generated if not supplied). If not passed, a commit will be performed at
1572 the end of the function, otherwise the caller is responsible for commiting.
1573 A flush will be performed either way.
1576 @return: the Maintainer object for the given maintainer
1579 q = session.query(Maintainer).filter_by(name=name)
1582 except NoResultFound:
1583 maintainer = Maintainer()
1584 maintainer.name = name
1585 session.add(maintainer)
1586 session.commit_or_flush()
1591 __all__.append('get_or_set_maintainer')
1594 def get_maintainer(maintainer_id, session=None):
1596 Return the name of the maintainer behind C{maintainer_id} or None if that
1597 maintainer_id is invalid.
1599 @type maintainer_id: int
1600 @param maintainer_id: the id of the maintainer
1603 @return: the Maintainer with this C{maintainer_id}
1606 return session.query(Maintainer).get(maintainer_id)
1608 __all__.append('get_maintainer')
1610 ################################################################################
1612 class NewComment(object):
1613 def __init__(self, *args, **kwargs):
1617 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1619 __all__.append('NewComment')
1622 def has_new_comment(package, version, session=None):
1624 Returns true if the given combination of C{package}, C{version} has a comment.
1626 @type package: string
1627 @param package: name of the package
1629 @type version: string
1630 @param version: package version
1632 @type session: Session
1633 @param session: Optional SQLA session object (a temporary one will be
1634 generated if not supplied)
1640 q = session.query(NewComment)
1641 q = q.filter_by(package=package)
1642 q = q.filter_by(version=version)
1644 return bool(q.count() > 0)
1646 __all__.append('has_new_comment')
1649 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1651 Returns (possibly empty) list of NewComment objects for the given
1654 @type package: string (optional)
1655 @param package: name of the package
1657 @type version: string (optional)
1658 @param version: package version
1660 @type comment_id: int (optional)
1661 @param comment_id: An id of a comment
1663 @type session: Session
1664 @param session: Optional SQLA session object (a temporary one will be
1665 generated if not supplied)
1668 @return: A (possibly empty) list of NewComment objects will be returned
1671 q = session.query(NewComment)
1672 if package is not None: q = q.filter_by(package=package)
1673 if version is not None: q = q.filter_by(version=version)
1674 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1678 __all__.append('get_new_comments')
1680 ################################################################################
1682 class Override(object):
1683 def __init__(self, *args, **kwargs):
1687 return '<Override %s (%s)>' % (self.package, self.suite_id)
1689 __all__.append('Override')
1692 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1694 Returns Override object for the given parameters
1696 @type package: string
1697 @param package: The name of the package
1699 @type suite: string, list or None
1700 @param suite: The name of the suite (or suites if a list) to limit to. If
1701 None, don't limit. Defaults to None.
1703 @type component: string, list or None
1704 @param component: The name of the component (or components if a list) to
1705 limit to. If None, don't limit. Defaults to None.
1707 @type overridetype: string, list or None
1708 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1709 limit to. If None, don't limit. Defaults to None.
1711 @type session: Session
1712 @param session: Optional SQLA session object (a temporary one will be
1713 generated if not supplied)
1716 @return: A (possibly empty) list of Override objects will be returned
1719 q = session.query(Override)
1720 q = q.filter_by(package=package)
1722 if suite is not None:
1723 if not isinstance(suite, list): suite = [suite]
1724 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1726 if component is not None:
1727 if not isinstance(component, list): component = [component]
1728 q = q.join(Component).filter(Component.component_name.in_(component))
1730 if overridetype is not None:
1731 if not isinstance(overridetype, list): overridetype = [overridetype]
1732 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1736 __all__.append('get_override')
1739 ################################################################################
1741 class OverrideType(object):
1742 def __init__(self, *args, **kwargs):
1746 return '<OverrideType %s>' % self.overridetype
1748 __all__.append('OverrideType')
1751 def get_override_type(override_type, session=None):
1753 Returns OverrideType object for given C{override type}.
1755 @type override_type: string
1756 @param override_type: The name of the override type
1758 @type session: Session
1759 @param session: Optional SQLA session object (a temporary one will be
1760 generated if not supplied)
1763 @return: the database id for the given override type
1766 q = session.query(OverrideType).filter_by(overridetype=override_type)
1770 except NoResultFound:
1773 __all__.append('get_override_type')
1775 ################################################################################
1777 class DebContents(object):
1778 def __init__(self, *args, **kwargs):
1782 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1784 __all__.append('DebContents')
1787 class UdebContents(object):
1788 def __init__(self, *args, **kwargs):
1792 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1794 __all__.append('UdebContents')
1796 class PendingBinContents(object):
1797 def __init__(self, *args, **kwargs):
1801 return '<PendingBinContents %s>' % self.contents_id
1803 __all__.append('PendingBinContents')
1805 def insert_pending_content_paths(package,
1810 Make sure given paths are temporarily associated with given
1814 @param package: the package to associate with should have been read in from the binary control file
1815 @type fullpaths: list
1816 @param fullpaths: the list of paths of the file being associated with the binary
1817 @type session: SQLAlchemy session
1818 @param session: Optional SQLAlchemy session. If this is passed, the caller
1819 is responsible for ensuring a transaction has begun and committing the
1820 results or rolling back based on the result code. If not passed, a commit
1821 will be performed at the end of the function
1823 @return: True upon success, False if there is a problem
1826 privatetrans = False
1829 session = DBConn().session()
1833 arch = get_architecture(package['Architecture'], session)
1834 arch_id = arch.arch_id
1836 # Remove any already existing recorded files for this package
1837 q = session.query(PendingBinContents)
1838 q = q.filter_by(package=package['Package'])
1839 q = q.filter_by(version=package['Version'])
1840 q = q.filter_by(architecture=arch_id)
1843 for fullpath in fullpaths:
1845 if fullpath.startswith( "./" ):
1846 fullpath = fullpath[2:]
1848 pca = PendingBinContents()
1849 pca.package = package['Package']
1850 pca.version = package['Version']
1852 pca.architecture = arch_id
1855 pca.type = 8 # gross
1857 pca.type = 7 # also gross
1860 # Only commit if we set up the session ourself
1868 except Exception, e:
1869 traceback.print_exc()
1871 # Only rollback if we set up the session ourself
1878 __all__.append('insert_pending_content_paths')
1880 ################################################################################
1882 class PolicyQueue(object):
1883 def __init__(self, *args, **kwargs):
1887 return '<PolicyQueue %s>' % self.queue_name
1889 __all__.append('PolicyQueue')
1892 def get_policy_queue(queuename, session=None):
1894 Returns PolicyQueue object for given C{queue name}
1896 @type queuename: string
1897 @param queuename: The name of the queue
1899 @type session: Session
1900 @param session: Optional SQLA session object (a temporary one will be
1901 generated if not supplied)
1904 @return: PolicyQueue object for the given queue
1907 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1911 except NoResultFound:
1914 __all__.append('get_policy_queue')
1916 ################################################################################
1918 class Priority(object):
1919 def __init__(self, *args, **kwargs):
1922 def __eq__(self, val):
1923 if isinstance(val, str):
1924 return (self.priority == val)
1925 # This signals to use the normal comparison operator
1926 return NotImplemented
1928 def __ne__(self, val):
1929 if isinstance(val, str):
1930 return (self.priority != val)
1931 # This signals to use the normal comparison operator
1932 return NotImplemented
1935 return '<Priority %s (%s)>' % (self.priority, self.priority_id)
1937 __all__.append('Priority')
1940 def get_priority(priority, session=None):
1942 Returns Priority object for given C{priority name}.
1944 @type priority: string
1945 @param priority: The name of the priority
1947 @type session: Session
1948 @param session: Optional SQLA session object (a temporary one will be
1949 generated if not supplied)
1952 @return: Priority object for the given priority
1955 q = session.query(Priority).filter_by(priority=priority)
1959 except NoResultFound:
1962 __all__.append('get_priority')
1965 def get_priorities(session=None):
1967 Returns dictionary of priority names -> id mappings
1969 @type session: Session
1970 @param session: Optional SQL session object (a temporary one will be
1971 generated if not supplied)
1974 @return: dictionary of priority names -> id mappings
1978 q = session.query(Priority)
1980 ret[x.priority] = x.priority_id
1984 __all__.append('get_priorities')
1986 ################################################################################
1988 class Section(object):
1989 def __init__(self, *args, **kwargs):
1992 def __eq__(self, val):
1993 if isinstance(val, str):
1994 return (self.section == val)
1995 # This signals to use the normal comparison operator
1996 return NotImplemented
1998 def __ne__(self, val):
1999 if isinstance(val, str):
2000 return (self.section != val)
2001 # This signals to use the normal comparison operator
2002 return NotImplemented
2005 return '<Section %s>' % self.section
2007 __all__.append('Section')
2010 def get_section(section, session=None):
2012 Returns Section object for given C{section name}.
2014 @type section: string
2015 @param section: The name of the section
2017 @type session: Session
2018 @param session: Optional SQLA session object (a temporary one will be
2019 generated if not supplied)
2022 @return: Section object for the given section name
2025 q = session.query(Section).filter_by(section=section)
2029 except NoResultFound:
2032 __all__.append('get_section')
2035 def get_sections(session=None):
2037 Returns dictionary of section names -> id mappings
2039 @type session: Session
2040 @param session: Optional SQL session object (a temporary one will be
2041 generated if not supplied)
2044 @return: dictionary of section names -> id mappings
2048 q = session.query(Section)
2050 ret[x.section] = x.section_id
2054 __all__.append('get_sections')
2056 ################################################################################
2058 class DBSource(object):
2059 def __init__(self, *args, **kwargs):
2063 return '<DBSource %s (%s)>' % (self.source, self.version)
2065 __all__.append('DBSource')
2068 def source_exists(source, source_version, suites = ["any"], session=None):
2070 Ensure that source exists somewhere in the archive for the binary
2071 upload being processed.
2072 1. exact match => 1.0-3
2073 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2075 @type source: string
2076 @param source: source name
2078 @type source_version: string
2079 @param source_version: expected source version
2082 @param suites: list of suites to check in, default I{any}
2084 @type session: Session
2085 @param session: Optional SQLA session object (a temporary one will be
2086 generated if not supplied)
2089 @return: returns 1 if a source with expected version is found, otherwise 0
2096 for suite in suites:
2097 q = session.query(DBSource).filter_by(source=source)
2099 # source must exist in suite X, or in some other suite that's
2100 # mapped to X, recursively... silent-maps are counted too,
2101 # unreleased-maps aren't.
2102 maps = cnf.ValueList("SuiteMappings")[:]
2104 maps = [ m.split() for m in maps ]
2105 maps = [ (x[1], x[2]) for x in maps
2106 if x[0] == "map" or x[0] == "silent-map" ]
2109 if x[1] in s and x[0] not in s:
2112 q = q.join(SrcAssociation).join(Suite)
2113 q = q.filter(Suite.suite_name.in_(s))
2115 # Reduce the query results to a list of version numbers
2116 ql = [ j.version for j in q.all() ]
2119 if source_version in ql:
2123 from daklib.regexes import re_bin_only_nmu
2124 orig_source_version = re_bin_only_nmu.sub('', source_version)
2125 if orig_source_version in ql:
2128 # No source found so return not ok
2133 __all__.append('source_exists')
2136 def get_suites_source_in(source, session=None):
2138 Returns list of Suite objects which given C{source} name is in
2141 @param source: DBSource package name to search for
2144 @return: list of Suite objects for the given source
2147 return session.query(Suite).join(SrcAssociation).join(DBSource).filter_by(source=source).all()
2149 __all__.append('get_suites_source_in')
2152 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2154 Returns list of DBSource objects for given C{source} name and other parameters
2157 @param source: DBSource package name to search for
2159 @type version: str or None
2160 @param version: DBSource version name to search for or None if not applicable
2162 @type dm_upload_allowed: bool
2163 @param dm_upload_allowed: If None, no effect. If True or False, only
2164 return packages with that dm_upload_allowed setting
2166 @type session: Session
2167 @param session: Optional SQL session object (a temporary one will be
2168 generated if not supplied)
2171 @return: list of DBSource objects for the given name (may be empty)
2174 q = session.query(DBSource).filter_by(source=source)
2176 if version is not None:
2177 q = q.filter_by(version=version)
2179 if dm_upload_allowed is not None:
2180 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2184 __all__.append('get_sources_from_name')
2187 def get_source_in_suite(source, suite, session=None):
2189 Returns list of DBSource objects for a combination of C{source} and C{suite}.
2191 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2192 - B{suite} - a suite name, eg. I{unstable}
2194 @type source: string
2195 @param source: source package name
2198 @param suite: the suite name
2201 @return: the version for I{source} in I{suite}
2205 q = session.query(SrcAssociation)
2206 q = q.join('source').filter_by(source=source)
2207 q = q.join('suite').filter_by(suite_name=suite)
2210 return q.one().source
2211 except NoResultFound:
2214 __all__.append('get_source_in_suite')
2216 ################################################################################
2219 def add_dsc_to_db(u, filename, session=None):
2220 entry = u.pkg.files[filename]
2224 source.source = u.pkg.dsc["source"]
2225 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2226 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2227 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2228 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2229 source.install_date = datetime.now().date()
2231 dsc_component = entry["component"]
2232 dsc_location_id = entry["location id"]
2234 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2236 # Set up a new poolfile if necessary
2237 if not entry.has_key("files id") or not entry["files id"]:
2238 filename = entry["pool name"] + filename
2239 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2241 pfs.append(poolfile)
2242 entry["files id"] = poolfile.file_id
2244 source.poolfile_id = entry["files id"]
2248 for suite_name in u.pkg.changes["distribution"].keys():
2249 sa = SrcAssociation()
2250 sa.source_id = source.source_id
2251 sa.suite_id = get_suite(suite_name).suite_id
2256 # Add the source files to the DB (files and dsc_files)
2258 dscfile.source_id = source.source_id
2259 dscfile.poolfile_id = entry["files id"]
2260 session.add(dscfile)
2262 for dsc_file, dentry in u.pkg.dsc_files.items():
2264 df.source_id = source.source_id
2266 # If the .orig tarball is already in the pool, it's
2267 # files id is stored in dsc_files by check_dsc().
2268 files_id = dentry.get("files id", None)
2270 # Find the entry in the files hash
2271 # TODO: Bail out here properly
2273 for f, e in u.pkg.files.items():
2278 if files_id is None:
2279 filename = dfentry["pool name"] + dsc_file
2281 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2282 # FIXME: needs to check for -1/-2 and or handle exception
2283 if found and obj is not None:
2284 files_id = obj.file_id
2287 # If still not found, add it
2288 if files_id is None:
2289 # HACK: Force sha1sum etc into dentry
2290 dentry["sha1sum"] = dfentry["sha1sum"]
2291 dentry["sha256sum"] = dfentry["sha256sum"]
2292 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2293 pfs.append(poolfile)
2294 files_id = poolfile.file_id
2296 poolfile = get_poolfile_by_id(files_id, session)
2297 if poolfile is None:
2298 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2299 pfs.append(poolfile)
2301 df.poolfile_id = files_id
2306 # Add the src_uploaders to the DB
2307 uploader_ids = [source.maintainer_id]
2308 if u.pkg.dsc.has_key("uploaders"):
2309 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2311 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2314 for up_id in uploader_ids:
2315 if added_ids.has_key(up_id):
2317 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2323 su.maintainer_id = up_id
2324 su.source_id = source.source_id
2329 return source, dsc_component, dsc_location_id, pfs
2331 __all__.append('add_dsc_to_db')
2334 def add_deb_to_db(u, filename, session=None):
2336 Contrary to what you might expect, this routine deals with both
2337 debs and udebs. That info is in 'dbtype', whilst 'type' is
2338 'deb' for both of them
2341 entry = u.pkg.files[filename]
2344 bin.package = entry["package"]
2345 bin.version = entry["version"]
2346 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2347 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2348 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2349 bin.binarytype = entry["dbtype"]
2352 filename = entry["pool name"] + filename
2353 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2354 if not entry.get("location id", None):
2355 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2357 if entry.get("files id", None):
2358 poolfile = get_poolfile_by_id(bin.poolfile_id)
2359 bin.poolfile_id = entry["files id"]
2361 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2362 bin.poolfile_id = entry["files id"] = poolfile.file_id
2365 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2366 if len(bin_sources) != 1:
2367 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2368 (bin.package, bin.version, bin.architecture.arch_string,
2369 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2371 bin.source_id = bin_sources[0].source_id
2373 # Add and flush object so it has an ID
2377 # Add BinAssociations
2378 for suite_name in u.pkg.changes["distribution"].keys():
2379 ba = BinAssociation()
2380 ba.binary_id = bin.binary_id
2381 ba.suite_id = get_suite(suite_name).suite_id
2386 # Deal with contents - disabled for now
2387 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2389 # print "REJECT\nCould not determine contents of package %s" % bin.package
2390 # session.rollback()
2391 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2395 __all__.append('add_deb_to_db')
2397 ################################################################################
2399 class SourceACL(object):
2400 def __init__(self, *args, **kwargs):
2404 return '<SourceACL %s>' % self.source_acl_id
2406 __all__.append('SourceACL')
2408 ################################################################################
2410 class SrcAssociation(object):
2411 def __init__(self, *args, **kwargs):
2415 return '<SrcAssociation %s (%s, %s)>' % (self.sa_id, self.source, self.suite)
2417 __all__.append('SrcAssociation')
2419 ################################################################################
2421 class SrcFormat(object):
2422 def __init__(self, *args, **kwargs):
2426 return '<SrcFormat %s>' % (self.format_name)
2428 __all__.append('SrcFormat')
2430 ################################################################################
2432 class SrcUploader(object):
2433 def __init__(self, *args, **kwargs):
2437 return '<SrcUploader %s>' % self.uploader_id
2439 __all__.append('SrcUploader')
2441 ################################################################################
2443 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2444 ('SuiteID', 'suite_id'),
2445 ('Version', 'version'),
2446 ('Origin', 'origin'),
2448 ('Description', 'description'),
2449 ('Untouchable', 'untouchable'),
2450 ('Announce', 'announce'),
2451 ('Codename', 'codename'),
2452 ('OverrideCodename', 'overridecodename'),
2453 ('ValidTime', 'validtime'),
2454 ('Priority', 'priority'),
2455 ('NotAutomatic', 'notautomatic'),
2456 ('CopyChanges', 'copychanges'),
2457 ('CopyDotDak', 'copydotdak'),
2458 ('CommentsDir', 'commentsdir'),
2459 ('OverrideSuite', 'overridesuite'),
2460 ('ChangelogBase', 'changelogbase')]
2462 class Suite(object):
2463 def __init__(self, *args, **kwargs):
2467 return '<Suite %s>' % self.suite_name
2469 def __eq__(self, val):
2470 if isinstance(val, str):
2471 return (self.suite_name == val)
2472 # This signals to use the normal comparison operator
2473 return NotImplemented
2475 def __ne__(self, val):
2476 if isinstance(val, str):
2477 return (self.suite_name != val)
2478 # This signals to use the normal comparison operator
2479 return NotImplemented
2483 for disp, field in SUITE_FIELDS:
2484 val = getattr(self, field, None)
2486 ret.append("%s: %s" % (disp, val))
2488 return "\n".join(ret)
2490 __all__.append('Suite')
2493 def get_suite_architecture(suite, architecture, session=None):
2495 Returns a SuiteArchitecture object given C{suite} and ${arch} or None if it
2499 @param suite: Suite name to search for
2501 @type architecture: str
2502 @param architecture: Architecture name to search for
2504 @type session: Session
2505 @param session: Optional SQL session object (a temporary one will be
2506 generated if not supplied)
2508 @rtype: SuiteArchitecture
2509 @return: the SuiteArchitecture object or None
2512 q = session.query(SuiteArchitecture)
2513 q = q.join(Architecture).filter_by(arch_string=architecture)
2514 q = q.join(Suite).filter_by(suite_name=suite)
2518 except NoResultFound:
2521 __all__.append('get_suite_architecture')
2524 def get_suite(suite, session=None):
2526 Returns Suite object for given C{suite name}.
2529 @param suite: The name of the suite
2531 @type session: Session
2532 @param session: Optional SQLA session object (a temporary one will be
2533 generated if not supplied)
2536 @return: Suite object for the requested suite name (None if not present)
2539 q = session.query(Suite).filter_by(suite_name=suite)
2543 except NoResultFound:
2546 __all__.append('get_suite')
2548 ################################################################################
2550 class SuiteArchitecture(object):
2551 def __init__(self, *args, **kwargs):
2555 return '<SuiteArchitecture (%s, %s)>' % (self.suite_id, self.arch_id)
2557 __all__.append('SuiteArchitecture')
2560 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2562 Returns list of Architecture objects for given C{suite} name
2565 @param suite: Suite name to search for
2567 @type skipsrc: boolean
2568 @param skipsrc: Whether to skip returning the 'source' architecture entry
2571 @type skipall: boolean
2572 @param skipall: Whether to skip returning the 'all' architecture entry
2575 @type session: Session
2576 @param session: Optional SQL session object (a temporary one will be
2577 generated if not supplied)
2580 @return: list of Architecture objects for the given name (may be empty)
2583 q = session.query(Architecture)
2584 q = q.join(SuiteArchitecture)
2585 q = q.join(Suite).filter_by(suite_name=suite)
2588 q = q.filter(Architecture.arch_string != 'source')
2591 q = q.filter(Architecture.arch_string != 'all')
2593 q = q.order_by('arch_string')
2597 __all__.append('get_suite_architectures')
2599 ################################################################################
2601 class SuiteSrcFormat(object):
2602 def __init__(self, *args, **kwargs):
2606 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2608 __all__.append('SuiteSrcFormat')
2611 def get_suite_src_formats(suite, session=None):
2613 Returns list of allowed SrcFormat for C{suite}.
2616 @param suite: Suite name to search for
2618 @type session: Session
2619 @param session: Optional SQL session object (a temporary one will be
2620 generated if not supplied)
2623 @return: the list of allowed source formats for I{suite}
2626 q = session.query(SrcFormat)
2627 q = q.join(SuiteSrcFormat)
2628 q = q.join(Suite).filter_by(suite_name=suite)
2629 q = q.order_by('format_name')
2633 __all__.append('get_suite_src_formats')
2635 ################################################################################
2638 def __init__(self, *args, **kwargs):
2641 def __eq__(self, val):
2642 if isinstance(val, str):
2643 return (self.uid == val)
2644 # This signals to use the normal comparison operator
2645 return NotImplemented
2647 def __ne__(self, val):
2648 if isinstance(val, str):
2649 return (self.uid != val)
2650 # This signals to use the normal comparison operator
2651 return NotImplemented
2654 return '<Uid %s (%s)>' % (self.uid, self.name)
2656 __all__.append('Uid')
2659 def add_database_user(uidname, session=None):
2661 Adds a database user
2663 @type uidname: string
2664 @param uidname: The uid of the user to add
2666 @type session: SQLAlchemy
2667 @param session: Optional SQL session object (a temporary one will be
2668 generated if not supplied). If not passed, a commit will be performed at
2669 the end of the function, otherwise the caller is responsible for commiting.
2672 @return: the uid object for the given uidname
2675 session.execute("CREATE USER :uid", {'uid': uidname})
2676 session.commit_or_flush()
2678 __all__.append('add_database_user')
2681 def get_or_set_uid(uidname, session=None):
2683 Returns uid object for given uidname.
2685 If no matching uidname is found, a row is inserted.
2687 @type uidname: string
2688 @param uidname: The uid to add
2690 @type session: SQLAlchemy
2691 @param session: Optional SQL session object (a temporary one will be
2692 generated if not supplied). If not passed, a commit will be performed at
2693 the end of the function, otherwise the caller is responsible for commiting.
2696 @return: the uid object for the given uidname
2699 q = session.query(Uid).filter_by(uid=uidname)
2703 except NoResultFound:
2707 session.commit_or_flush()
2712 __all__.append('get_or_set_uid')
2715 def get_uid_from_fingerprint(fpr, session=None):
2716 q = session.query(Uid)
2717 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2721 except NoResultFound:
2724 __all__.append('get_uid_from_fingerprint')
2726 ################################################################################
2728 class UploadBlock(object):
2729 def __init__(self, *args, **kwargs):
2733 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2735 __all__.append('UploadBlock')
2737 ################################################################################
2739 class DBConn(object):
2741 database module init.
2745 def __init__(self, *args, **kwargs):
2746 self.__dict__ = self.__shared_state
2748 if not getattr(self, 'initialised', False):
2749 self.initialised = True
2750 self.debug = kwargs.has_key('debug')
2753 def __setuptables(self):
2763 'build_queue_files',
2766 'changes_pending_binaries',
2767 'changes_pending_files',
2768 'changes_pending_files_map',
2769 'changes_pending_source',
2770 'changes_pending_source_files',
2771 'changes_pool_files',
2784 'pending_bin_contents',
2794 'suite_architectures',
2795 'suite_src_formats',
2796 'suite_build_queue_copy',
2802 for table_name in tables:
2803 table = Table(table_name, self.db_meta, autoload=True)
2804 setattr(self, 'tbl_%s' % table_name, table)
2806 def __setupmappers(self):
2807 mapper(Architecture, self.tbl_architecture,
2808 properties = dict(arch_id = self.tbl_architecture.c.id))
2810 mapper(Archive, self.tbl_archive,
2811 properties = dict(archive_id = self.tbl_archive.c.id,
2812 archive_name = self.tbl_archive.c.name))
2814 mapper(BinAssociation, self.tbl_bin_associations,
2815 properties = dict(ba_id = self.tbl_bin_associations.c.id,
2816 suite_id = self.tbl_bin_associations.c.suite,
2817 suite = relation(Suite),
2818 binary_id = self.tbl_bin_associations.c.bin,
2819 binary = relation(DBBinary)))
2821 mapper(PendingBinContents, self.tbl_pending_bin_contents,
2822 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
2823 filename = self.tbl_pending_bin_contents.c.filename,
2824 package = self.tbl_pending_bin_contents.c.package,
2825 version = self.tbl_pending_bin_contents.c.version,
2826 arch = self.tbl_pending_bin_contents.c.arch,
2827 otype = self.tbl_pending_bin_contents.c.type))
2829 mapper(DebContents, self.tbl_deb_contents,
2830 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
2831 package=self.tbl_deb_contents.c.package,
2832 suite=self.tbl_deb_contents.c.suite,
2833 arch=self.tbl_deb_contents.c.arch,
2834 section=self.tbl_deb_contents.c.section,
2835 filename=self.tbl_deb_contents.c.filename))
2837 mapper(UdebContents, self.tbl_udeb_contents,
2838 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
2839 package=self.tbl_udeb_contents.c.package,
2840 suite=self.tbl_udeb_contents.c.suite,
2841 arch=self.tbl_udeb_contents.c.arch,
2842 section=self.tbl_udeb_contents.c.section,
2843 filename=self.tbl_udeb_contents.c.filename))
2845 mapper(BuildQueue, self.tbl_build_queue,
2846 properties = dict(queue_id = self.tbl_build_queue.c.id))
2848 mapper(BuildQueueFile, self.tbl_build_queue_files,
2849 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
2850 poolfile = relation(PoolFile, backref='buildqueueinstances')))
2852 mapper(DBBinary, self.tbl_binaries,
2853 properties = dict(binary_id = self.tbl_binaries.c.id,
2854 package = self.tbl_binaries.c.package,
2855 version = self.tbl_binaries.c.version,
2856 maintainer_id = self.tbl_binaries.c.maintainer,
2857 maintainer = relation(Maintainer),
2858 source_id = self.tbl_binaries.c.source,
2859 source = relation(DBSource),
2860 arch_id = self.tbl_binaries.c.architecture,
2861 architecture = relation(Architecture),
2862 poolfile_id = self.tbl_binaries.c.file,
2863 poolfile = relation(PoolFile),
2864 binarytype = self.tbl_binaries.c.type,
2865 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2866 fingerprint = relation(Fingerprint),
2867 install_date = self.tbl_binaries.c.install_date,
2868 binassociations = relation(BinAssociation,
2869 primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))))
2871 mapper(BinaryACL, self.tbl_binary_acl,
2872 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
2874 mapper(BinaryACLMap, self.tbl_binary_acl_map,
2875 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
2876 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
2877 architecture = relation(Architecture)))
2879 mapper(Component, self.tbl_component,
2880 properties = dict(component_id = self.tbl_component.c.id,
2881 component_name = self.tbl_component.c.name))
2883 mapper(DBConfig, self.tbl_config,
2884 properties = dict(config_id = self.tbl_config.c.id))
2886 mapper(DSCFile, self.tbl_dsc_files,
2887 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2888 source_id = self.tbl_dsc_files.c.source,
2889 source = relation(DBSource),
2890 poolfile_id = self.tbl_dsc_files.c.file,
2891 poolfile = relation(PoolFile)))
2893 mapper(PoolFile, self.tbl_files,
2894 properties = dict(file_id = self.tbl_files.c.id,
2895 filesize = self.tbl_files.c.size,
2896 location_id = self.tbl_files.c.location,
2897 location = relation(Location)))
2899 mapper(Fingerprint, self.tbl_fingerprint,
2900 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2901 uid_id = self.tbl_fingerprint.c.uid,
2902 uid = relation(Uid),
2903 keyring_id = self.tbl_fingerprint.c.keyring,
2904 keyring = relation(Keyring),
2905 source_acl = relation(SourceACL),
2906 binary_acl = relation(BinaryACL)))
2908 mapper(Keyring, self.tbl_keyrings,
2909 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2910 keyring_id = self.tbl_keyrings.c.id))
2912 mapper(DBChange, self.tbl_changes,
2913 properties = dict(change_id = self.tbl_changes.c.id,
2914 poolfiles = relation(PoolFile,
2915 secondary=self.tbl_changes_pool_files,
2916 backref="changeslinks"),
2917 seen = self.tbl_changes.c.seen,
2918 source = self.tbl_changes.c.source,
2919 binaries = self.tbl_changes.c.binaries,
2920 architecture = self.tbl_changes.c.architecture,
2921 distribution = self.tbl_changes.c.distribution,
2922 urgency = self.tbl_changes.c.urgency,
2923 maintainer = self.tbl_changes.c.maintainer,
2924 changedby = self.tbl_changes.c.changedby,
2925 date = self.tbl_changes.c.date,
2926 version = self.tbl_changes.c.version,
2927 files = relation(ChangePendingFile,
2928 secondary=self.tbl_changes_pending_files_map,
2929 backref="changesfile"),
2930 in_queue_id = self.tbl_changes.c.in_queue,
2931 in_queue = relation(PolicyQueue,
2932 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
2933 approved_for_id = self.tbl_changes.c.approved_for))
2935 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
2936 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
2938 mapper(ChangePendingFile, self.tbl_changes_pending_files,
2939 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
2940 filename = self.tbl_changes_pending_files.c.filename,
2941 size = self.tbl_changes_pending_files.c.size,
2942 md5sum = self.tbl_changes_pending_files.c.md5sum,
2943 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
2944 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
2946 mapper(ChangePendingSource, self.tbl_changes_pending_source,
2947 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
2948 change = relation(DBChange),
2949 maintainer = relation(Maintainer,
2950 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
2951 changedby = relation(Maintainer,
2952 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
2953 fingerprint = relation(Fingerprint),
2954 source_files = relation(ChangePendingFile,
2955 secondary=self.tbl_changes_pending_source_files,
2956 backref="pending_sources")))
2959 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
2960 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
2961 keyring = relation(Keyring, backref="keyring_acl_map"),
2962 architecture = relation(Architecture)))
2964 mapper(Location, self.tbl_location,
2965 properties = dict(location_id = self.tbl_location.c.id,
2966 component_id = self.tbl_location.c.component,
2967 component = relation(Component),
2968 archive_id = self.tbl_location.c.archive,
2969 archive = relation(Archive),
2970 archive_type = self.tbl_location.c.type))
2972 mapper(Maintainer, self.tbl_maintainer,
2973 properties = dict(maintainer_id = self.tbl_maintainer.c.id))
2975 mapper(NewComment, self.tbl_new_comments,
2976 properties = dict(comment_id = self.tbl_new_comments.c.id))
2978 mapper(Override, self.tbl_override,
2979 properties = dict(suite_id = self.tbl_override.c.suite,
2980 suite = relation(Suite),
2981 package = self.tbl_override.c.package,
2982 component_id = self.tbl_override.c.component,
2983 component = relation(Component),
2984 priority_id = self.tbl_override.c.priority,
2985 priority = relation(Priority),
2986 section_id = self.tbl_override.c.section,
2987 section = relation(Section),
2988 overridetype_id = self.tbl_override.c.type,
2989 overridetype = relation(OverrideType)))
2991 mapper(OverrideType, self.tbl_override_type,
2992 properties = dict(overridetype = self.tbl_override_type.c.type,
2993 overridetype_id = self.tbl_override_type.c.id))
2995 mapper(PolicyQueue, self.tbl_policy_queue,
2996 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
2998 mapper(Priority, self.tbl_priority,
2999 properties = dict(priority_id = self.tbl_priority.c.id))
3001 mapper(Section, self.tbl_section,
3002 properties = dict(section_id = self.tbl_section.c.id,
3003 section=self.tbl_section.c.section))
3005 mapper(DBSource, self.tbl_source,
3006 properties = dict(source_id = self.tbl_source.c.id,
3007 version = self.tbl_source.c.version,
3008 maintainer_id = self.tbl_source.c.maintainer,
3009 maintainer = relation(Maintainer,
3010 primaryjoin=(self.tbl_source.c.maintainer==self.tbl_maintainer.c.id)),
3011 poolfile_id = self.tbl_source.c.file,
3012 poolfile = relation(PoolFile),
3013 fingerprint_id = self.tbl_source.c.sig_fpr,
3014 fingerprint = relation(Fingerprint),
3015 changedby_id = self.tbl_source.c.changedby,
3016 changedby = relation(Maintainer,
3017 primaryjoin=(self.tbl_source.c.changedby==self.tbl_maintainer.c.id)),
3018 srcfiles = relation(DSCFile,
3019 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3020 srcassociations = relation(SrcAssociation,
3021 primaryjoin=(self.tbl_source.c.id==self.tbl_src_associations.c.source)),
3022 srcuploaders = relation(SrcUploader)))
3024 mapper(SourceACL, self.tbl_source_acl,
3025 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3027 mapper(SrcAssociation, self.tbl_src_associations,
3028 properties = dict(sa_id = self.tbl_src_associations.c.id,
3029 suite_id = self.tbl_src_associations.c.suite,
3030 suite = relation(Suite),
3031 source_id = self.tbl_src_associations.c.source,
3032 source = relation(DBSource)))
3034 mapper(SrcFormat, self.tbl_src_format,
3035 properties = dict(src_format_id = self.tbl_src_format.c.id,
3036 format_name = self.tbl_src_format.c.format_name))
3038 mapper(SrcUploader, self.tbl_src_uploaders,
3039 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3040 source_id = self.tbl_src_uploaders.c.source,
3041 source = relation(DBSource,
3042 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3043 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3044 maintainer = relation(Maintainer,
3045 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3047 mapper(Suite, self.tbl_suite,
3048 properties = dict(suite_id = self.tbl_suite.c.id,
3049 policy_queue = relation(PolicyQueue),
3050 copy_queues = relation(BuildQueue, secondary=self.tbl_suite_build_queue_copy)))
3052 mapper(SuiteArchitecture, self.tbl_suite_architectures,
3053 properties = dict(suite_id = self.tbl_suite_architectures.c.suite,
3054 suite = relation(Suite, backref='suitearchitectures'),
3055 arch_id = self.tbl_suite_architectures.c.architecture,
3056 architecture = relation(Architecture)))
3058 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3059 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3060 suite = relation(Suite, backref='suitesrcformats'),
3061 src_format_id = self.tbl_suite_src_formats.c.src_format,
3062 src_format = relation(SrcFormat)))
3064 mapper(Uid, self.tbl_uid,
3065 properties = dict(uid_id = self.tbl_uid.c.id,
3066 fingerprint = relation(Fingerprint)))
3068 mapper(UploadBlock, self.tbl_upload_blocks,
3069 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3070 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3071 uid = relation(Uid, backref="uploadblocks")))
3073 ## Connection functions
3074 def __createconn(self):
3075 from config import Config
3079 connstr = "postgres://%s" % cnf["DB::Host"]
3080 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3081 connstr += ":%s" % cnf["DB::Port"]
3082 connstr += "/%s" % cnf["DB::Name"]
3085 connstr = "postgres:///%s" % cnf["DB::Name"]
3086 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3087 connstr += "?port=%s" % cnf["DB::Port"]
3089 self.db_pg = create_engine(connstr, echo=self.debug)
3090 self.db_meta = MetaData()
3091 self.db_meta.bind = self.db_pg
3092 self.db_smaker = sessionmaker(bind=self.db_pg,
3096 self.__setuptables()
3097 self.__setupmappers()
3100 return self.db_smaker()
3102 __all__.append('DBConn')