5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
40 from datetime import datetime, timedelta
41 from errno import ENOENT
42 from tempfile import mkstemp, mkdtemp
44 from inspect import getargspec
47 from sqlalchemy import create_engine, Table, MetaData
48 from sqlalchemy.orm import sessionmaker, mapper, relation
49 from sqlalchemy import types as sqltypes
51 # Don't remove this, we re-export the exceptions to scripts which import us
52 from sqlalchemy.exc import *
53 from sqlalchemy.orm.exc import NoResultFound
55 # Only import Config until Queue stuff is changed to store its config
57 from config import Config
58 from textutils import fix_maintainer
60 ################################################################################
62 # Patch in support for the debversion field type so that it works during
65 class DebVersion(sqltypes.Text):
67 Support the debversion type
70 def get_col_spec(self):
73 sa_major_version = sqlalchemy.__version__[0:3]
74 if sa_major_version == "0.5":
75 from sqlalchemy.databases import postgres
76 postgres.ischema_names['debversion'] = DebVersion
78 raise Exception("dak isn't ported to SQLA versions != 0.5 yet. See daklib/dbconn.py")
80 ################################################################################
82 __all__ = ['IntegrityError', 'SQLAlchemyError']
84 ################################################################################
86 def session_wrapper(fn):
88 Wrapper around common ".., session=None):" handling. If the wrapped
89 function is called without passing 'session', we create a local one
90 and destroy it when the function ends.
92 Also attaches a commit_or_flush method to the session; if we created a
93 local session, this is a synonym for session.commit(), otherwise it is a
94 synonym for session.flush().
97 def wrapped(*args, **kwargs):
98 private_transaction = False
100 # Find the session object
101 session = kwargs.get('session')
104 if len(args) <= len(getargspec(fn)[0]) - 1:
105 # No session specified as last argument or in kwargs
106 private_transaction = True
107 session = kwargs['session'] = DBConn().session()
109 # Session is last argument in args
113 session = args[-1] = DBConn().session()
114 private_transaction = True
116 if private_transaction:
117 session.commit_or_flush = session.commit
119 session.commit_or_flush = session.flush
122 return fn(*args, **kwargs)
124 if private_transaction:
125 # We created a session; close it.
128 wrapped.__doc__ = fn.__doc__
129 wrapped.func_name = fn.func_name
133 __all__.append('session_wrapper')
135 ################################################################################
137 class Architecture(object):
138 def __init__(self, *args, **kwargs):
141 def __eq__(self, val):
142 if isinstance(val, str):
143 return (self.arch_string== val)
144 # This signals to use the normal comparison operator
145 return NotImplemented
147 def __ne__(self, val):
148 if isinstance(val, str):
149 return (self.arch_string != val)
150 # This signals to use the normal comparison operator
151 return NotImplemented
154 return '<Architecture %s>' % self.arch_string
156 __all__.append('Architecture')
159 def get_architecture(architecture, session=None):
161 Returns database id for given C{architecture}.
163 @type architecture: string
164 @param architecture: The name of the architecture
166 @type session: Session
167 @param session: Optional SQLA session object (a temporary one will be
168 generated if not supplied)
171 @return: Architecture object for the given arch (None if not present)
174 q = session.query(Architecture).filter_by(arch_string=architecture)
178 except NoResultFound:
181 __all__.append('get_architecture')
184 def get_architecture_suites(architecture, session=None):
186 Returns list of Suite objects for given C{architecture} name
188 @type architecture: str
189 @param architecture: Architecture name to search for
191 @type session: Session
192 @param session: Optional SQL session object (a temporary one will be
193 generated if not supplied)
196 @return: list of Suite objects for the given name (may be empty)
199 q = session.query(Suite)
200 q = q.join(SuiteArchitecture)
201 q = q.join(Architecture).filter_by(arch_string=architecture).order_by('suite_name')
207 __all__.append('get_architecture_suites')
209 ################################################################################
211 class Archive(object):
212 def __init__(self, *args, **kwargs):
216 return '<Archive %s>' % self.archive_name
218 __all__.append('Archive')
221 def get_archive(archive, session=None):
223 returns database id for given C{archive}.
225 @type archive: string
226 @param archive: the name of the arhive
228 @type session: Session
229 @param session: Optional SQLA session object (a temporary one will be
230 generated if not supplied)
233 @return: Archive object for the given name (None if not present)
236 archive = archive.lower()
238 q = session.query(Archive).filter_by(archive_name=archive)
242 except NoResultFound:
245 __all__.append('get_archive')
247 ################################################################################
249 class BinAssociation(object):
250 def __init__(self, *args, **kwargs):
254 return '<BinAssociation %s (%s, %s)>' % (self.ba_id, self.binary, self.suite)
256 __all__.append('BinAssociation')
258 ################################################################################
260 class BinContents(object):
261 def __init__(self, *args, **kwargs):
265 return '<BinContents (%s, %s)>' % (self.binary, self.filename)
267 __all__.append('BinContents')
269 ################################################################################
271 class DBBinary(object):
272 def __init__(self, *args, **kwargs):
276 return '<DBBinary %s (%s, %s)>' % (self.package, self.version, self.architecture)
278 __all__.append('DBBinary')
281 def get_suites_binary_in(package, session=None):
283 Returns list of Suite objects which given C{package} name is in
286 @param package: DBBinary package name to search for
289 @return: list of Suite objects for the given package
292 return session.query(Suite).join(BinAssociation).join(DBBinary).filter_by(package=package).all()
294 __all__.append('get_suites_binary_in')
297 def get_binary_from_id(binary_id, session=None):
299 Returns DBBinary object for given C{id}
302 @param binary_id: Id of the required binary
304 @type session: Session
305 @param session: Optional SQLA session object (a temporary one will be
306 generated if not supplied)
309 @return: DBBinary object for the given binary (None if not present)
312 q = session.query(DBBinary).filter_by(binary_id=binary_id)
316 except NoResultFound:
319 __all__.append('get_binary_from_id')
322 def get_binaries_from_name(package, version=None, architecture=None, session=None):
324 Returns list of DBBinary objects for given C{package} name
327 @param package: DBBinary package name to search for
329 @type version: str or None
330 @param version: Version to search for (or None)
332 @type architecture: str, list or None
333 @param architecture: Architectures to limit to (or None if no limit)
335 @type session: Session
336 @param session: Optional SQL session object (a temporary one will be
337 generated if not supplied)
340 @return: list of DBBinary objects for the given name (may be empty)
343 q = session.query(DBBinary).filter_by(package=package)
345 if version is not None:
346 q = q.filter_by(version=version)
348 if architecture is not None:
349 if not isinstance(architecture, list):
350 architecture = [architecture]
351 q = q.join(Architecture).filter(Architecture.arch_string.in_(architecture))
357 __all__.append('get_binaries_from_name')
360 def get_binaries_from_source_id(source_id, session=None):
362 Returns list of DBBinary objects for given C{source_id}
365 @param source_id: source_id to search for
367 @type session: Session
368 @param session: Optional SQL session object (a temporary one will be
369 generated if not supplied)
372 @return: list of DBBinary objects for the given name (may be empty)
375 return session.query(DBBinary).filter_by(source_id=source_id).all()
377 __all__.append('get_binaries_from_source_id')
380 def get_binary_from_name_suite(package, suitename, session=None):
381 ### For dak examine-package
382 ### XXX: Doesn't use object API yet
384 sql = """SELECT DISTINCT(b.package), b.version, c.name, su.suite_name
385 FROM binaries b, files fi, location l, component c, bin_associations ba, suite su
386 WHERE b.package='%(package)s'
388 AND fi.location = l.id
389 AND l.component = c.id
392 AND su.suite_name %(suitename)s
393 ORDER BY b.version DESC"""
395 return session.execute(sql % {'package': package, 'suitename': suitename})
397 __all__.append('get_binary_from_name_suite')
400 def get_binary_components(package, suitename, arch, session=None):
401 # Check for packages that have moved from one component to another
402 query = """SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f
403 WHERE b.package=:package AND s.suite_name=:suitename
404 AND (a.arch_string = :arch OR a.arch_string = 'all')
405 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
406 AND f.location = l.id
407 AND l.component = c.id
410 vals = {'package': package, 'suitename': suitename, 'arch': arch}
412 return session.execute(query, vals)
414 __all__.append('get_binary_components')
416 ################################################################################
418 class BinaryACL(object):
419 def __init__(self, *args, **kwargs):
423 return '<BinaryACL %s>' % self.binary_acl_id
425 __all__.append('BinaryACL')
427 ################################################################################
429 class BinaryACLMap(object):
430 def __init__(self, *args, **kwargs):
434 return '<BinaryACLMap %s>' % self.binary_acl_map_id
436 __all__.append('BinaryACLMap')
438 ################################################################################
443 ArchiveDir "%(archivepath)s";
444 OverrideDir "/srv/ftp.debian.org/scripts/override/";
445 CacheDir "/srv/ftp.debian.org/database/";
450 Packages::Compress ". bzip2 gzip";
451 Sources::Compress ". bzip2 gzip";
456 bindirectory "incoming"
461 BinOverride "override.sid.all3";
462 BinCacheDB "packages-accepted.db";
464 FileList "%(filelist)s";
467 Packages::Extensions ".deb .udeb";
470 bindirectory "incoming/"
473 BinOverride "override.sid.all3";
474 SrcOverride "override.sid.all3.src";
475 FileList "%(filelist)s";
479 class BuildQueue(object):
480 def __init__(self, *args, **kwargs):
484 return '<BuildQueue %s>' % self.queue_name
486 def write_metadata(self, starttime, force=False):
487 # Do we write out metafiles?
488 if not (force or self.generate_metadata):
491 session = DBConn().session().object_session(self)
493 fl_fd = fl_name = ac_fd = ac_name = None
495 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
496 startdir = os.getcwd()
499 # Grab files we want to include
500 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
501 # Write file list with newer files
502 (fl_fd, fl_name) = mkstemp()
504 os.write(fl_fd, '%s\n' % n.fullpath)
507 # Write minimal apt.conf
508 # TODO: Remove hardcoding from template
509 (ac_fd, ac_name) = mkstemp()
510 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
511 'filelist': fl_name})
514 # Run apt-ftparchive generate
515 os.chdir(os.path.dirname(ac_name))
516 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
518 # Run apt-ftparchive release
519 # TODO: Eww - fix this
520 bname = os.path.basename(self.path)
524 # We have to remove the Release file otherwise it'll be included in the
527 os.unlink(os.path.join(bname, 'Release'))
531 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
536 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
537 if cnf.has_key("Dinstall::SigningPubKeyring"):
538 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
540 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
542 # Move the files if we got this far
543 os.rename('Release', os.path.join(bname, 'Release'))
545 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
547 # Clean up any left behind files
574 def clean_and_update(self, starttime, Logger, dryrun=False):
575 """WARNING: This routine commits for you"""
576 session = DBConn().session().object_session(self)
578 if self.generate_metadata and not dryrun:
579 self.write_metadata(starttime)
581 # Grab files older than our execution time
582 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
588 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
590 Logger.log(["I: Removing %s from the queue" % o.fullpath])
591 os.unlink(o.fullpath)
594 # If it wasn't there, don't worry
595 if e.errno == ENOENT:
598 # TODO: Replace with proper logging call
599 Logger.log(["E: Could not remove %s" % o.fullpath])
606 for f in os.listdir(self.path):
607 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release'):
611 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
612 except NoResultFound:
613 fp = os.path.join(self.path, f)
615 Logger.log(["I: Would remove unused link %s" % fp])
617 Logger.log(["I: Removing unused link %s" % fp])
621 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
623 def add_file_from_pool(self, poolfile):
624 """Copies a file into the pool. Assumes that the PoolFile object is
625 attached to the same SQLAlchemy session as the Queue object is.
627 The caller is responsible for committing after calling this function."""
628 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
630 # Check if we have a file of this name or this ID already
631 for f in self.queuefiles:
632 if f.fileid is not None and f.fileid == poolfile.file_id or \
633 f.poolfile.filename == poolfile_basename:
634 # In this case, update the BuildQueueFile entry so we
635 # don't remove it too early
636 f.lastused = datetime.now()
637 DBConn().session().object_session(poolfile).add(f)
640 # Prepare BuildQueueFile object
641 qf = BuildQueueFile()
642 qf.build_queue_id = self.queue_id
643 qf.lastused = datetime.now()
644 qf.filename = poolfile_basename
646 targetpath = poolfile.fullpath
647 queuepath = os.path.join(self.path, poolfile_basename)
651 # We need to copy instead of symlink
653 utils.copy(targetpath, queuepath)
654 # NULL in the fileid field implies a copy
657 os.symlink(targetpath, queuepath)
658 qf.fileid = poolfile.file_id
662 # Get the same session as the PoolFile is using and add the qf to it
663 DBConn().session().object_session(poolfile).add(qf)
668 __all__.append('BuildQueue')
671 def get_build_queue(queuename, session=None):
673 Returns BuildQueue object for given C{queue name}, creating it if it does not
676 @type queuename: string
677 @param queuename: The name of the queue
679 @type session: Session
680 @param session: Optional SQLA session object (a temporary one will be
681 generated if not supplied)
684 @return: BuildQueue object for the given queue
687 q = session.query(BuildQueue).filter_by(queue_name=queuename)
691 except NoResultFound:
694 __all__.append('get_build_queue')
696 ################################################################################
698 class BuildQueueFile(object):
699 def __init__(self, *args, **kwargs):
703 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
707 return os.path.join(self.buildqueue.path, self.filename)
710 __all__.append('BuildQueueFile')
712 ################################################################################
714 class ChangePendingBinary(object):
715 def __init__(self, *args, **kwargs):
719 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
721 __all__.append('ChangePendingBinary')
723 ################################################################################
725 class ChangePendingFile(object):
726 def __init__(self, *args, **kwargs):
730 return '<ChangePendingFile %s>' % self.change_pending_file_id
732 __all__.append('ChangePendingFile')
734 ################################################################################
736 class ChangePendingSource(object):
737 def __init__(self, *args, **kwargs):
741 return '<ChangePendingSource %s>' % self.change_pending_source_id
743 __all__.append('ChangePendingSource')
745 ################################################################################
747 class Component(object):
748 def __init__(self, *args, **kwargs):
751 def __eq__(self, val):
752 if isinstance(val, str):
753 return (self.component_name == val)
754 # This signals to use the normal comparison operator
755 return NotImplemented
757 def __ne__(self, val):
758 if isinstance(val, str):
759 return (self.component_name != val)
760 # This signals to use the normal comparison operator
761 return NotImplemented
764 return '<Component %s>' % self.component_name
767 __all__.append('Component')
770 def get_component(component, session=None):
772 Returns database id for given C{component}.
774 @type component: string
775 @param component: The name of the override type
778 @return: the database id for the given component
781 component = component.lower()
783 q = session.query(Component).filter_by(component_name=component)
787 except NoResultFound:
790 __all__.append('get_component')
792 ################################################################################
794 class DBConfig(object):
795 def __init__(self, *args, **kwargs):
799 return '<DBConfig %s>' % self.name
801 __all__.append('DBConfig')
803 ################################################################################
806 def get_or_set_contents_file_id(filename, session=None):
808 Returns database id for given filename.
810 If no matching file is found, a row is inserted.
812 @type filename: string
813 @param filename: The filename
814 @type session: SQLAlchemy
815 @param session: Optional SQL session object (a temporary one will be
816 generated if not supplied). If not passed, a commit will be performed at
817 the end of the function, otherwise the caller is responsible for commiting.
820 @return: the database id for the given component
823 q = session.query(ContentFilename).filter_by(filename=filename)
826 ret = q.one().cafilename_id
827 except NoResultFound:
828 cf = ContentFilename()
829 cf.filename = filename
831 session.commit_or_flush()
832 ret = cf.cafilename_id
836 __all__.append('get_or_set_contents_file_id')
839 def get_contents(suite, overridetype, section=None, session=None):
841 Returns contents for a suite / overridetype combination, limiting
842 to a section if not None.
845 @param suite: Suite object
847 @type overridetype: OverrideType
848 @param overridetype: OverrideType object
850 @type section: Section
851 @param section: Optional section object to limit results to
853 @type session: SQLAlchemy
854 @param session: Optional SQL session object (a temporary one will be
855 generated if not supplied)
858 @return: ResultsProxy object set up to return tuples of (filename, section,
862 # find me all of the contents for a given suite
863 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
867 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
868 JOIN content_file_names n ON (c.filename=n.id)
869 JOIN binaries b ON (b.id=c.binary_pkg)
870 JOIN override o ON (o.package=b.package)
871 JOIN section s ON (s.id=o.section)
872 WHERE o.suite = :suiteid AND o.type = :overridetypeid
873 AND b.type=:overridetypename"""
875 vals = {'suiteid': suite.suite_id,
876 'overridetypeid': overridetype.overridetype_id,
877 'overridetypename': overridetype.overridetype}
879 if section is not None:
880 contents_q += " AND s.id = :sectionid"
881 vals['sectionid'] = section.section_id
883 contents_q += " ORDER BY fn"
885 return session.execute(contents_q, vals)
887 __all__.append('get_contents')
889 ################################################################################
891 class ContentFilepath(object):
892 def __init__(self, *args, **kwargs):
896 return '<ContentFilepath %s>' % self.filepath
898 __all__.append('ContentFilepath')
901 def get_or_set_contents_path_id(filepath, session=None):
903 Returns database id for given path.
905 If no matching file is found, a row is inserted.
907 @type filepath: string
908 @param filepath: The filepath
910 @type session: SQLAlchemy
911 @param session: Optional SQL session object (a temporary one will be
912 generated if not supplied). If not passed, a commit will be performed at
913 the end of the function, otherwise the caller is responsible for commiting.
916 @return: the database id for the given path
919 q = session.query(ContentFilepath).filter_by(filepath=filepath)
922 ret = q.one().cafilepath_id
923 except NoResultFound:
924 cf = ContentFilepath()
925 cf.filepath = filepath
927 session.commit_or_flush()
928 ret = cf.cafilepath_id
932 __all__.append('get_or_set_contents_path_id')
934 ################################################################################
936 class ContentAssociation(object):
937 def __init__(self, *args, **kwargs):
941 return '<ContentAssociation %s>' % self.ca_id
943 __all__.append('ContentAssociation')
945 def insert_content_paths(binary_id, fullpaths, session=None):
947 Make sure given path is associated with given binary id
950 @param binary_id: the id of the binary
951 @type fullpaths: list
952 @param fullpaths: the list of paths of the file being associated with the binary
953 @type session: SQLAlchemy session
954 @param session: Optional SQLAlchemy session. If this is passed, the caller
955 is responsible for ensuring a transaction has begun and committing the
956 results or rolling back based on the result code. If not passed, a commit
957 will be performed at the end of the function, otherwise the caller is
958 responsible for commiting.
960 @return: True upon success
965 session = DBConn().session()
972 def generate_path_dicts():
973 for fullpath in fullpaths:
974 if fullpath.startswith( './' ):
975 fullpath = fullpath[2:]
977 yield {'fulename':fullpath, 'id': binary_id }
979 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
980 generate_path_dicts() )
988 traceback.print_exc()
990 # Only rollback if we set up the session ourself
997 __all__.append('insert_content_paths')
999 ################################################################################
1001 class DSCFile(object):
1002 def __init__(self, *args, **kwargs):
1006 return '<DSCFile %s>' % self.dscfile_id
1008 __all__.append('DSCFile')
1011 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1013 Returns a list of DSCFiles which may be empty
1015 @type dscfile_id: int (optional)
1016 @param dscfile_id: the dscfile_id of the DSCFiles to find
1018 @type source_id: int (optional)
1019 @param source_id: the source id related to the DSCFiles to find
1021 @type poolfile_id: int (optional)
1022 @param poolfile_id: the poolfile id related to the DSCFiles to find
1025 @return: Possibly empty list of DSCFiles
1028 q = session.query(DSCFile)
1030 if dscfile_id is not None:
1031 q = q.filter_by(dscfile_id=dscfile_id)
1033 if source_id is not None:
1034 q = q.filter_by(source_id=source_id)
1036 if poolfile_id is not None:
1037 q = q.filter_by(poolfile_id=poolfile_id)
1041 __all__.append('get_dscfiles')
1043 ################################################################################
1045 class PoolFile(object):
1046 def __init__(self, *args, **kwargs):
1050 return '<PoolFile %s>' % self.filename
1054 return os.path.join(self.location.path, self.filename)
1056 __all__.append('PoolFile')
1059 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1062 (ValidFileFound [boolean or None], PoolFile object or None)
1064 @type filename: string
1065 @param filename: the filename of the file to check against the DB
1068 @param filesize: the size of the file to check against the DB
1070 @type md5sum: string
1071 @param md5sum: the md5sum of the file to check against the DB
1073 @type location_id: int
1074 @param location_id: the id of the location to look in
1077 @return: Tuple of length 2.
1078 - If more than one file found with that name: (C{None}, C{None})
1079 - If valid pool file found: (C{True}, C{PoolFile object})
1080 - If valid pool file not found:
1081 - (C{False}, C{None}) if no file found
1082 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1085 q = session.query(PoolFile).filter_by(filename=filename)
1086 q = q.join(Location).filter_by(location_id=location_id)
1096 if obj.md5sum != md5sum or obj.filesize != int(filesize):
1104 __all__.append('check_poolfile')
1107 def get_poolfile_by_id(file_id, session=None):
1109 Returns a PoolFile objects or None for the given id
1112 @param file_id: the id of the file to look for
1114 @rtype: PoolFile or None
1115 @return: either the PoolFile object or None
1118 q = session.query(PoolFile).filter_by(file_id=file_id)
1122 except NoResultFound:
1125 __all__.append('get_poolfile_by_id')
1129 def get_poolfile_by_name(filename, location_id=None, session=None):
1131 Returns an array of PoolFile objects for the given filename and
1132 (optionally) location_id
1134 @type filename: string
1135 @param filename: the filename of the file to check against the DB
1137 @type location_id: int
1138 @param location_id: the id of the location to look in (optional)
1141 @return: array of PoolFile objects
1144 q = session.query(PoolFile).filter_by(filename=filename)
1146 if location_id is not None:
1147 q = q.join(Location).filter_by(location_id=location_id)
1151 __all__.append('get_poolfile_by_name')
1154 def get_poolfile_like_name(filename, session=None):
1156 Returns an array of PoolFile objects which are like the given name
1158 @type filename: string
1159 @param filename: the filename of the file to check against the DB
1162 @return: array of PoolFile objects
1165 # TODO: There must be a way of properly using bind parameters with %FOO%
1166 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1170 __all__.append('get_poolfile_like_name')
1173 def add_poolfile(filename, datadict, location_id, session=None):
1175 Add a new file to the pool
1177 @type filename: string
1178 @param filename: filename
1180 @type datadict: dict
1181 @param datadict: dict with needed data
1183 @type location_id: int
1184 @param location_id: database id of the location
1187 @return: the PoolFile object created
1189 poolfile = PoolFile()
1190 poolfile.filename = filename
1191 poolfile.filesize = datadict["size"]
1192 poolfile.md5sum = datadict["md5sum"]
1193 poolfile.sha1sum = datadict["sha1sum"]
1194 poolfile.sha256sum = datadict["sha256sum"]
1195 poolfile.location_id = location_id
1197 session.add(poolfile)
1198 # Flush to get a file id (NB: This is not a commit)
1203 __all__.append('add_poolfile')
1205 ################################################################################
1207 class Fingerprint(object):
1208 def __init__(self, *args, **kwargs):
1212 return '<Fingerprint %s>' % self.fingerprint
1214 __all__.append('Fingerprint')
1217 def get_fingerprint(fpr, session=None):
1219 Returns Fingerprint object for given fpr.
1222 @param fpr: The fpr to find / add
1224 @type session: SQLAlchemy
1225 @param session: Optional SQL session object (a temporary one will be
1226 generated if not supplied).
1229 @return: the Fingerprint object for the given fpr or None
1232 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1236 except NoResultFound:
1241 __all__.append('get_fingerprint')
1244 def get_or_set_fingerprint(fpr, session=None):
1246 Returns Fingerprint object for given fpr.
1248 If no matching fpr is found, a row is inserted.
1251 @param fpr: The fpr to find / add
1253 @type session: SQLAlchemy
1254 @param session: Optional SQL session object (a temporary one will be
1255 generated if not supplied). If not passed, a commit will be performed at
1256 the end of the function, otherwise the caller is responsible for commiting.
1257 A flush will be performed either way.
1260 @return: the Fingerprint object for the given fpr
1263 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1267 except NoResultFound:
1268 fingerprint = Fingerprint()
1269 fingerprint.fingerprint = fpr
1270 session.add(fingerprint)
1271 session.commit_or_flush()
1276 __all__.append('get_or_set_fingerprint')
1278 ################################################################################
1280 # Helper routine for Keyring class
1281 def get_ldap_name(entry):
1283 for k in ["cn", "mn", "sn"]:
1285 if ret and ret[0] != "" and ret[0] != "-":
1287 return " ".join(name)
1289 ################################################################################
1291 class Keyring(object):
1292 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1293 " --with-colons --fingerprint --fingerprint"
1298 def __init__(self, *args, **kwargs):
1302 return '<Keyring %s>' % self.keyring_name
1304 def de_escape_gpg_str(self, txt):
1305 esclist = re.split(r'(\\x..)', txt)
1306 for x in range(1,len(esclist),2):
1307 esclist[x] = "%c" % (int(esclist[x][2:],16))
1308 return "".join(esclist)
1310 def load_keys(self, keyring):
1313 if not self.keyring_id:
1314 raise Exception('Must be initialized with database information')
1316 k = os.popen(self.gpg_invocation % keyring, "r")
1320 for line in k.xreadlines():
1321 field = line.split(":")
1322 if field[0] == "pub":
1324 (name, addr) = email.Utils.parseaddr(field[9])
1325 name = re.sub(r"\s*[(].*[)]", "", name)
1326 if name == "" or addr == "" or "@" not in addr:
1328 addr = "invalid-uid"
1329 name = self.de_escape_gpg_str(name)
1330 self.keys[key] = {"email": addr}
1332 self.keys[key]["name"] = name
1333 self.keys[key]["aliases"] = [name]
1334 self.keys[key]["fingerprints"] = []
1336 elif key and field[0] == "sub" and len(field) >= 12:
1337 signingkey = ("s" in field[11])
1338 elif key and field[0] == "uid":
1339 (name, addr) = email.Utils.parseaddr(field[9])
1340 if name and name not in self.keys[key]["aliases"]:
1341 self.keys[key]["aliases"].append(name)
1342 elif signingkey and field[0] == "fpr":
1343 self.keys[key]["fingerprints"].append(field[9])
1344 self.fpr_lookup[field[9]] = key
1346 def import_users_from_ldap(self, session):
1350 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1351 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1353 l = ldap.open(LDAPServer)
1354 l.simple_bind_s("","")
1355 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1356 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1357 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1359 ldap_fin_uid_id = {}
1366 uid = entry["uid"][0]
1367 name = get_ldap_name(entry)
1368 fingerprints = entry["keyFingerPrint"]
1370 for f in fingerprints:
1371 key = self.fpr_lookup.get(f, None)
1372 if key not in self.keys:
1374 self.keys[key]["uid"] = uid
1378 keyid = get_or_set_uid(uid, session).uid_id
1379 byuid[keyid] = (uid, name)
1380 byname[uid] = (keyid, name)
1382 return (byname, byuid)
1384 def generate_users_from_keyring(self, format, session):
1388 for x in self.keys.keys():
1389 if self.keys[x]["email"] == "invalid-uid":
1391 self.keys[x]["uid"] = format % "invalid-uid"
1393 uid = format % self.keys[x]["email"]
1394 keyid = get_or_set_uid(uid, session).uid_id
1395 byuid[keyid] = (uid, self.keys[x]["name"])
1396 byname[uid] = (keyid, self.keys[x]["name"])
1397 self.keys[x]["uid"] = uid
1400 uid = format % "invalid-uid"
1401 keyid = get_or_set_uid(uid, session).uid_id
1402 byuid[keyid] = (uid, "ungeneratable user id")
1403 byname[uid] = (keyid, "ungeneratable user id")
1405 return (byname, byuid)
1407 __all__.append('Keyring')
1410 def get_keyring(keyring, session=None):
1412 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1413 If C{keyring} already has an entry, simply return the existing Keyring
1415 @type keyring: string
1416 @param keyring: the keyring name
1419 @return: the Keyring object for this keyring
1422 q = session.query(Keyring).filter_by(keyring_name=keyring)
1426 except NoResultFound:
1429 __all__.append('get_keyring')
1431 ################################################################################
1433 class KeyringACLMap(object):
1434 def __init__(self, *args, **kwargs):
1438 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1440 __all__.append('KeyringACLMap')
1442 ################################################################################
1444 class DBChange(object):
1445 def __init__(self, *args, **kwargs):
1449 return '<DBChange %s>' % self.changesname
1451 def clean_from_queue(self):
1452 session = DBConn().session().object_session(self)
1454 # Remove changes_pool_files entries
1457 # Remove changes_pending_files references
1460 # Clear out of queue
1461 self.in_queue = None
1462 self.approved_for_id = None
1464 __all__.append('DBChange')
1467 def get_dbchange(filename, session=None):
1469 returns DBChange object for given C{filename}.
1471 @type filename: string
1472 @param filename: the name of the file
1474 @type session: Session
1475 @param session: Optional SQLA session object (a temporary one will be
1476 generated if not supplied)
1479 @return: DBChange object for the given filename (C{None} if not present)
1482 q = session.query(DBChange).filter_by(changesname=filename)
1486 except NoResultFound:
1489 __all__.append('get_dbchange')
1491 ################################################################################
1493 class Location(object):
1494 def __init__(self, *args, **kwargs):
1498 return '<Location %s (%s)>' % (self.path, self.location_id)
1500 __all__.append('Location')
1503 def get_location(location, component=None, archive=None, session=None):
1505 Returns Location object for the given combination of location, component
1508 @type location: string
1509 @param location: the path of the location, e.g. I{/srv/ftp.debian.org/ftp/pool/}
1511 @type component: string
1512 @param component: the component name (if None, no restriction applied)
1514 @type archive: string
1515 @param archive: the archive name (if None, no restriction applied)
1517 @rtype: Location / None
1518 @return: Either a Location object or None if one can't be found
1521 q = session.query(Location).filter_by(path=location)
1523 if archive is not None:
1524 q = q.join(Archive).filter_by(archive_name=archive)
1526 if component is not None:
1527 q = q.join(Component).filter_by(component_name=component)
1531 except NoResultFound:
1534 __all__.append('get_location')
1536 ################################################################################
1538 class Maintainer(object):
1539 def __init__(self, *args, **kwargs):
1543 return '''<Maintainer '%s' (%s)>''' % (self.name, self.maintainer_id)
1545 def get_split_maintainer(self):
1546 if not hasattr(self, 'name') or self.name is None:
1547 return ('', '', '', '')
1549 return fix_maintainer(self.name.strip())
1551 __all__.append('Maintainer')
1554 def get_or_set_maintainer(name, session=None):
1556 Returns Maintainer object for given maintainer name.
1558 If no matching maintainer name is found, a row is inserted.
1561 @param name: The maintainer name to add
1563 @type session: SQLAlchemy
1564 @param session: Optional SQL session object (a temporary one will be
1565 generated if not supplied). If not passed, a commit will be performed at
1566 the end of the function, otherwise the caller is responsible for commiting.
1567 A flush will be performed either way.
1570 @return: the Maintainer object for the given maintainer
1573 q = session.query(Maintainer).filter_by(name=name)
1576 except NoResultFound:
1577 maintainer = Maintainer()
1578 maintainer.name = name
1579 session.add(maintainer)
1580 session.commit_or_flush()
1585 __all__.append('get_or_set_maintainer')
1588 def get_maintainer(maintainer_id, session=None):
1590 Return the name of the maintainer behind C{maintainer_id} or None if that
1591 maintainer_id is invalid.
1593 @type maintainer_id: int
1594 @param maintainer_id: the id of the maintainer
1597 @return: the Maintainer with this C{maintainer_id}
1600 return session.query(Maintainer).get(maintainer_id)
1602 __all__.append('get_maintainer')
1604 ################################################################################
1606 class NewComment(object):
1607 def __init__(self, *args, **kwargs):
1611 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1613 __all__.append('NewComment')
1616 def has_new_comment(package, version, session=None):
1618 Returns true if the given combination of C{package}, C{version} has a comment.
1620 @type package: string
1621 @param package: name of the package
1623 @type version: string
1624 @param version: package version
1626 @type session: Session
1627 @param session: Optional SQLA session object (a temporary one will be
1628 generated if not supplied)
1634 q = session.query(NewComment)
1635 q = q.filter_by(package=package)
1636 q = q.filter_by(version=version)
1638 return bool(q.count() > 0)
1640 __all__.append('has_new_comment')
1643 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1645 Returns (possibly empty) list of NewComment objects for the given
1648 @type package: string (optional)
1649 @param package: name of the package
1651 @type version: string (optional)
1652 @param version: package version
1654 @type comment_id: int (optional)
1655 @param comment_id: An id of a comment
1657 @type session: Session
1658 @param session: Optional SQLA session object (a temporary one will be
1659 generated if not supplied)
1662 @return: A (possibly empty) list of NewComment objects will be returned
1665 q = session.query(NewComment)
1666 if package is not None: q = q.filter_by(package=package)
1667 if version is not None: q = q.filter_by(version=version)
1668 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1672 __all__.append('get_new_comments')
1674 ################################################################################
1676 class Override(object):
1677 def __init__(self, *args, **kwargs):
1681 return '<Override %s (%s)>' % (self.package, self.suite_id)
1683 __all__.append('Override')
1686 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1688 Returns Override object for the given parameters
1690 @type package: string
1691 @param package: The name of the package
1693 @type suite: string, list or None
1694 @param suite: The name of the suite (or suites if a list) to limit to. If
1695 None, don't limit. Defaults to None.
1697 @type component: string, list or None
1698 @param component: The name of the component (or components if a list) to
1699 limit to. If None, don't limit. Defaults to None.
1701 @type overridetype: string, list or None
1702 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1703 limit to. If None, don't limit. Defaults to None.
1705 @type session: Session
1706 @param session: Optional SQLA session object (a temporary one will be
1707 generated if not supplied)
1710 @return: A (possibly empty) list of Override objects will be returned
1713 q = session.query(Override)
1714 q = q.filter_by(package=package)
1716 if suite is not None:
1717 if not isinstance(suite, list): suite = [suite]
1718 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1720 if component is not None:
1721 if not isinstance(component, list): component = [component]
1722 q = q.join(Component).filter(Component.component_name.in_(component))
1724 if overridetype is not None:
1725 if not isinstance(overridetype, list): overridetype = [overridetype]
1726 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1730 __all__.append('get_override')
1733 ################################################################################
1735 class OverrideType(object):
1736 def __init__(self, *args, **kwargs):
1740 return '<OverrideType %s>' % self.overridetype
1742 __all__.append('OverrideType')
1745 def get_override_type(override_type, session=None):
1747 Returns OverrideType object for given C{override type}.
1749 @type override_type: string
1750 @param override_type: The name of the override type
1752 @type session: Session
1753 @param session: Optional SQLA session object (a temporary one will be
1754 generated if not supplied)
1757 @return: the database id for the given override type
1760 q = session.query(OverrideType).filter_by(overridetype=override_type)
1764 except NoResultFound:
1767 __all__.append('get_override_type')
1769 ################################################################################
1771 class DebContents(object):
1772 def __init__(self, *args, **kwargs):
1776 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1778 __all__.append('DebContents')
1781 class UdebContents(object):
1782 def __init__(self, *args, **kwargs):
1786 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1788 __all__.append('UdebContents')
1790 class PendingBinContents(object):
1791 def __init__(self, *args, **kwargs):
1795 return '<PendingBinContents %s>' % self.contents_id
1797 __all__.append('PendingBinContents')
1799 def insert_pending_content_paths(package,
1804 Make sure given paths are temporarily associated with given
1808 @param package: the package to associate with should have been read in from the binary control file
1809 @type fullpaths: list
1810 @param fullpaths: the list of paths of the file being associated with the binary
1811 @type session: SQLAlchemy session
1812 @param session: Optional SQLAlchemy session. If this is passed, the caller
1813 is responsible for ensuring a transaction has begun and committing the
1814 results or rolling back based on the result code. If not passed, a commit
1815 will be performed at the end of the function
1817 @return: True upon success, False if there is a problem
1820 privatetrans = False
1823 session = DBConn().session()
1827 arch = get_architecture(package['Architecture'], session)
1828 arch_id = arch.arch_id
1830 # Remove any already existing recorded files for this package
1831 q = session.query(PendingBinContents)
1832 q = q.filter_by(package=package['Package'])
1833 q = q.filter_by(version=package['Version'])
1834 q = q.filter_by(architecture=arch_id)
1837 for fullpath in fullpaths:
1839 if fullpath.startswith( "./" ):
1840 fullpath = fullpath[2:]
1842 pca = PendingBinContents()
1843 pca.package = package['Package']
1844 pca.version = package['Version']
1846 pca.architecture = arch_id
1849 pca.type = 8 # gross
1851 pca.type = 7 # also gross
1854 # Only commit if we set up the session ourself
1862 except Exception, e:
1863 traceback.print_exc()
1865 # Only rollback if we set up the session ourself
1872 __all__.append('insert_pending_content_paths')
1874 ################################################################################
1876 class PolicyQueue(object):
1877 def __init__(self, *args, **kwargs):
1881 return '<PolicyQueue %s>' % self.queue_name
1883 __all__.append('PolicyQueue')
1886 def get_policy_queue(queuename, session=None):
1888 Returns PolicyQueue object for given C{queue name}
1890 @type queuename: string
1891 @param queuename: The name of the queue
1893 @type session: Session
1894 @param session: Optional SQLA session object (a temporary one will be
1895 generated if not supplied)
1898 @return: PolicyQueue object for the given queue
1901 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1905 except NoResultFound:
1908 __all__.append('get_policy_queue')
1910 ################################################################################
1912 class Priority(object):
1913 def __init__(self, *args, **kwargs):
1916 def __eq__(self, val):
1917 if isinstance(val, str):
1918 return (self.priority == val)
1919 # This signals to use the normal comparison operator
1920 return NotImplemented
1922 def __ne__(self, val):
1923 if isinstance(val, str):
1924 return (self.priority != val)
1925 # This signals to use the normal comparison operator
1926 return NotImplemented
1929 return '<Priority %s (%s)>' % (self.priority, self.priority_id)
1931 __all__.append('Priority')
1934 def get_priority(priority, session=None):
1936 Returns Priority object for given C{priority name}.
1938 @type priority: string
1939 @param priority: The name of the priority
1941 @type session: Session
1942 @param session: Optional SQLA session object (a temporary one will be
1943 generated if not supplied)
1946 @return: Priority object for the given priority
1949 q = session.query(Priority).filter_by(priority=priority)
1953 except NoResultFound:
1956 __all__.append('get_priority')
1959 def get_priorities(session=None):
1961 Returns dictionary of priority names -> id mappings
1963 @type session: Session
1964 @param session: Optional SQL session object (a temporary one will be
1965 generated if not supplied)
1968 @return: dictionary of priority names -> id mappings
1972 q = session.query(Priority)
1974 ret[x.priority] = x.priority_id
1978 __all__.append('get_priorities')
1980 ################################################################################
1982 class Section(object):
1983 def __init__(self, *args, **kwargs):
1986 def __eq__(self, val):
1987 if isinstance(val, str):
1988 return (self.section == val)
1989 # This signals to use the normal comparison operator
1990 return NotImplemented
1992 def __ne__(self, val):
1993 if isinstance(val, str):
1994 return (self.section != val)
1995 # This signals to use the normal comparison operator
1996 return NotImplemented
1999 return '<Section %s>' % self.section
2001 __all__.append('Section')
2004 def get_section(section, session=None):
2006 Returns Section object for given C{section name}.
2008 @type section: string
2009 @param section: The name of the section
2011 @type session: Session
2012 @param session: Optional SQLA session object (a temporary one will be
2013 generated if not supplied)
2016 @return: Section object for the given section name
2019 q = session.query(Section).filter_by(section=section)
2023 except NoResultFound:
2026 __all__.append('get_section')
2029 def get_sections(session=None):
2031 Returns dictionary of section names -> id mappings
2033 @type session: Session
2034 @param session: Optional SQL session object (a temporary one will be
2035 generated if not supplied)
2038 @return: dictionary of section names -> id mappings
2042 q = session.query(Section)
2044 ret[x.section] = x.section_id
2048 __all__.append('get_sections')
2050 ################################################################################
2052 class DBSource(object):
2053 def __init__(self, *args, **kwargs):
2057 return '<DBSource %s (%s)>' % (self.source, self.version)
2059 __all__.append('DBSource')
2062 def source_exists(source, source_version, suites = ["any"], session=None):
2064 Ensure that source exists somewhere in the archive for the binary
2065 upload being processed.
2066 1. exact match => 1.0-3
2067 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2069 @type source: string
2070 @param source: source name
2072 @type source_version: string
2073 @param source_version: expected source version
2076 @param suites: list of suites to check in, default I{any}
2078 @type session: Session
2079 @param session: Optional SQLA session object (a temporary one will be
2080 generated if not supplied)
2083 @return: returns 1 if a source with expected version is found, otherwise 0
2090 for suite in suites:
2091 q = session.query(DBSource).filter_by(source=source)
2093 # source must exist in suite X, or in some other suite that's
2094 # mapped to X, recursively... silent-maps are counted too,
2095 # unreleased-maps aren't.
2096 maps = cnf.ValueList("SuiteMappings")[:]
2098 maps = [ m.split() for m in maps ]
2099 maps = [ (x[1], x[2]) for x in maps
2100 if x[0] == "map" or x[0] == "silent-map" ]
2103 if x[1] in s and x[0] not in s:
2106 q = q.join(SrcAssociation).join(Suite)
2107 q = q.filter(Suite.suite_name.in_(s))
2109 # Reduce the query results to a list of version numbers
2110 ql = [ j.version for j in q.all() ]
2113 if source_version in ql:
2117 from daklib.regexes import re_bin_only_nmu
2118 orig_source_version = re_bin_only_nmu.sub('', source_version)
2119 if orig_source_version in ql:
2122 # No source found so return not ok
2127 __all__.append('source_exists')
2130 def get_suites_source_in(source, session=None):
2132 Returns list of Suite objects which given C{source} name is in
2135 @param source: DBSource package name to search for
2138 @return: list of Suite objects for the given source
2141 return session.query(Suite).join(SrcAssociation).join(DBSource).filter_by(source=source).all()
2143 __all__.append('get_suites_source_in')
2146 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2148 Returns list of DBSource objects for given C{source} name and other parameters
2151 @param source: DBSource package name to search for
2153 @type version: str or None
2154 @param version: DBSource version name to search for or None if not applicable
2156 @type dm_upload_allowed: bool
2157 @param dm_upload_allowed: If None, no effect. If True or False, only
2158 return packages with that dm_upload_allowed setting
2160 @type session: Session
2161 @param session: Optional SQL session object (a temporary one will be
2162 generated if not supplied)
2165 @return: list of DBSource objects for the given name (may be empty)
2168 q = session.query(DBSource).filter_by(source=source)
2170 if version is not None:
2171 q = q.filter_by(version=version)
2173 if dm_upload_allowed is not None:
2174 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2178 __all__.append('get_sources_from_name')
2181 def get_source_in_suite(source, suite, session=None):
2183 Returns list of DBSource objects for a combination of C{source} and C{suite}.
2185 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2186 - B{suite} - a suite name, eg. I{unstable}
2188 @type source: string
2189 @param source: source package name
2192 @param suite: the suite name
2195 @return: the version for I{source} in I{suite}
2199 q = session.query(SrcAssociation)
2200 q = q.join('source').filter_by(source=source)
2201 q = q.join('suite').filter_by(suite_name=suite)
2204 return q.one().source
2205 except NoResultFound:
2208 __all__.append('get_source_in_suite')
2210 ################################################################################
2213 def add_dsc_to_db(u, filename, session=None):
2214 entry = u.pkg.files[filename]
2218 source.source = u.pkg.dsc["source"]
2219 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2220 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2221 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2222 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2223 source.install_date = datetime.now().date()
2225 dsc_component = entry["component"]
2226 dsc_location_id = entry["location id"]
2228 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2230 # Set up a new poolfile if necessary
2231 if not entry.has_key("files id") or not entry["files id"]:
2232 filename = entry["pool name"] + filename
2233 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2235 pfs.append(poolfile)
2236 entry["files id"] = poolfile.file_id
2238 source.poolfile_id = entry["files id"]
2242 for suite_name in u.pkg.changes["distribution"].keys():
2243 sa = SrcAssociation()
2244 sa.source_id = source.source_id
2245 sa.suite_id = get_suite(suite_name).suite_id
2250 # Add the source files to the DB (files and dsc_files)
2252 dscfile.source_id = source.source_id
2253 dscfile.poolfile_id = entry["files id"]
2254 session.add(dscfile)
2256 for dsc_file, dentry in u.pkg.dsc_files.items():
2258 df.source_id = source.source_id
2260 # If the .orig tarball is already in the pool, it's
2261 # files id is stored in dsc_files by check_dsc().
2262 files_id = dentry.get("files id", None)
2264 # Find the entry in the files hash
2265 # TODO: Bail out here properly
2267 for f, e in u.pkg.files.items():
2272 if files_id is None:
2273 filename = dfentry["pool name"] + dsc_file
2275 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2276 # FIXME: needs to check for -1/-2 and or handle exception
2277 if found and obj is not None:
2278 files_id = obj.file_id
2281 # If still not found, add it
2282 if files_id is None:
2283 # HACK: Force sha1sum etc into dentry
2284 dentry["sha1sum"] = dfentry["sha1sum"]
2285 dentry["sha256sum"] = dfentry["sha256sum"]
2286 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2287 pfs.append(poolfile)
2288 files_id = poolfile.file_id
2290 poolfile = get_poolfile_by_id(files_id, session)
2291 if poolfile is None:
2292 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2293 pfs.append(poolfile)
2295 df.poolfile_id = files_id
2300 # Add the src_uploaders to the DB
2301 uploader_ids = [source.maintainer_id]
2302 if u.pkg.dsc.has_key("uploaders"):
2303 for up in u.pkg.dsc["uploaders"].split(","):
2305 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2308 for up_id in uploader_ids:
2309 if added_ids.has_key(up_id):
2311 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2317 su.maintainer_id = up_id
2318 su.source_id = source.source_id
2323 return source, dsc_component, dsc_location_id, pfs
2325 __all__.append('add_dsc_to_db')
2328 def add_deb_to_db(u, filename, session=None):
2330 Contrary to what you might expect, this routine deals with both
2331 debs and udebs. That info is in 'dbtype', whilst 'type' is
2332 'deb' for both of them
2335 entry = u.pkg.files[filename]
2338 bin.package = entry["package"]
2339 bin.version = entry["version"]
2340 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2341 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2342 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2343 bin.binarytype = entry["dbtype"]
2346 filename = entry["pool name"] + filename
2347 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2348 if not entry.get("location id", None):
2349 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2351 if entry.get("files id", None):
2352 poolfile = get_poolfile_by_id(bin.poolfile_id)
2353 bin.poolfile_id = entry["files id"]
2355 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2356 bin.poolfile_id = entry["files id"] = poolfile.file_id
2359 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2360 if len(bin_sources) != 1:
2361 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2362 (bin.package, bin.version, bin.architecture.arch_string,
2363 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2365 bin.source_id = bin_sources[0].source_id
2367 # Add and flush object so it has an ID
2371 # Add BinAssociations
2372 for suite_name in u.pkg.changes["distribution"].keys():
2373 ba = BinAssociation()
2374 ba.binary_id = bin.binary_id
2375 ba.suite_id = get_suite(suite_name).suite_id
2380 # Deal with contents - disabled for now
2381 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2383 # print "REJECT\nCould not determine contents of package %s" % bin.package
2384 # session.rollback()
2385 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2389 __all__.append('add_deb_to_db')
2391 ################################################################################
2393 class SourceACL(object):
2394 def __init__(self, *args, **kwargs):
2398 return '<SourceACL %s>' % self.source_acl_id
2400 __all__.append('SourceACL')
2402 ################################################################################
2404 class SrcAssociation(object):
2405 def __init__(self, *args, **kwargs):
2409 return '<SrcAssociation %s (%s, %s)>' % (self.sa_id, self.source, self.suite)
2411 __all__.append('SrcAssociation')
2413 ################################################################################
2415 class SrcFormat(object):
2416 def __init__(self, *args, **kwargs):
2420 return '<SrcFormat %s>' % (self.format_name)
2422 __all__.append('SrcFormat')
2424 ################################################################################
2426 class SrcUploader(object):
2427 def __init__(self, *args, **kwargs):
2431 return '<SrcUploader %s>' % self.uploader_id
2433 __all__.append('SrcUploader')
2435 ################################################################################
2437 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2438 ('SuiteID', 'suite_id'),
2439 ('Version', 'version'),
2440 ('Origin', 'origin'),
2442 ('Description', 'description'),
2443 ('Untouchable', 'untouchable'),
2444 ('Announce', 'announce'),
2445 ('Codename', 'codename'),
2446 ('OverrideCodename', 'overridecodename'),
2447 ('ValidTime', 'validtime'),
2448 ('Priority', 'priority'),
2449 ('NotAutomatic', 'notautomatic'),
2450 ('CopyChanges', 'copychanges'),
2451 ('CopyDotDak', 'copydotdak'),
2452 ('CommentsDir', 'commentsdir'),
2453 ('OverrideSuite', 'overridesuite'),
2454 ('ChangelogBase', 'changelogbase')]
2457 class Suite(object):
2458 def __init__(self, *args, **kwargs):
2462 return '<Suite %s>' % self.suite_name
2464 def __eq__(self, val):
2465 if isinstance(val, str):
2466 return (self.suite_name == val)
2467 # This signals to use the normal comparison operator
2468 return NotImplemented
2470 def __ne__(self, val):
2471 if isinstance(val, str):
2472 return (self.suite_name != val)
2473 # This signals to use the normal comparison operator
2474 return NotImplemented
2478 for disp, field in SUITE_FIELDS:
2479 val = getattr(self, field, None)
2481 ret.append("%s: %s" % (disp, val))
2483 return "\n".join(ret)
2485 __all__.append('Suite')
2488 def get_suite_architecture(suite, architecture, session=None):
2490 Returns a SuiteArchitecture object given C{suite} and ${arch} or None if it
2494 @param suite: Suite name to search for
2496 @type architecture: str
2497 @param architecture: Architecture name to search for
2499 @type session: Session
2500 @param session: Optional SQL session object (a temporary one will be
2501 generated if not supplied)
2503 @rtype: SuiteArchitecture
2504 @return: the SuiteArchitecture object or None
2507 q = session.query(SuiteArchitecture)
2508 q = q.join(Architecture).filter_by(arch_string=architecture)
2509 q = q.join(Suite).filter_by(suite_name=suite)
2513 except NoResultFound:
2516 __all__.append('get_suite_architecture')
2519 def get_suite(suite, session=None):
2521 Returns Suite object for given C{suite name}.
2524 @param suite: The name of the suite
2526 @type session: Session
2527 @param session: Optional SQLA session object (a temporary one will be
2528 generated if not supplied)
2531 @return: Suite object for the requested suite name (None if not present)
2534 q = session.query(Suite).filter_by(suite_name=suite)
2538 except NoResultFound:
2541 __all__.append('get_suite')
2543 ################################################################################
2545 class SuiteArchitecture(object):
2546 def __init__(self, *args, **kwargs):
2550 return '<SuiteArchitecture (%s, %s)>' % (self.suite_id, self.arch_id)
2552 __all__.append('SuiteArchitecture')
2555 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2557 Returns list of Architecture objects for given C{suite} name
2560 @param suite: Suite name to search for
2562 @type skipsrc: boolean
2563 @param skipsrc: Whether to skip returning the 'source' architecture entry
2566 @type skipall: boolean
2567 @param skipall: Whether to skip returning the 'all' architecture entry
2570 @type session: Session
2571 @param session: Optional SQL session object (a temporary one will be
2572 generated if not supplied)
2575 @return: list of Architecture objects for the given name (may be empty)
2578 q = session.query(Architecture)
2579 q = q.join(SuiteArchitecture)
2580 q = q.join(Suite).filter_by(suite_name=suite)
2583 q = q.filter(Architecture.arch_string != 'source')
2586 q = q.filter(Architecture.arch_string != 'all')
2588 q = q.order_by('arch_string')
2592 __all__.append('get_suite_architectures')
2594 ################################################################################
2596 class SuiteSrcFormat(object):
2597 def __init__(self, *args, **kwargs):
2601 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2603 __all__.append('SuiteSrcFormat')
2606 def get_suite_src_formats(suite, session=None):
2608 Returns list of allowed SrcFormat for C{suite}.
2611 @param suite: Suite name to search for
2613 @type session: Session
2614 @param session: Optional SQL session object (a temporary one will be
2615 generated if not supplied)
2618 @return: the list of allowed source formats for I{suite}
2621 q = session.query(SrcFormat)
2622 q = q.join(SuiteSrcFormat)
2623 q = q.join(Suite).filter_by(suite_name=suite)
2624 q = q.order_by('format_name')
2628 __all__.append('get_suite_src_formats')
2630 ################################################################################
2633 def __init__(self, *args, **kwargs):
2636 def __eq__(self, val):
2637 if isinstance(val, str):
2638 return (self.uid == val)
2639 # This signals to use the normal comparison operator
2640 return NotImplemented
2642 def __ne__(self, val):
2643 if isinstance(val, str):
2644 return (self.uid != val)
2645 # This signals to use the normal comparison operator
2646 return NotImplemented
2649 return '<Uid %s (%s)>' % (self.uid, self.name)
2651 __all__.append('Uid')
2654 def add_database_user(uidname, session=None):
2656 Adds a database user
2658 @type uidname: string
2659 @param uidname: The uid of the user to add
2661 @type session: SQLAlchemy
2662 @param session: Optional SQL session object (a temporary one will be
2663 generated if not supplied). If not passed, a commit will be performed at
2664 the end of the function, otherwise the caller is responsible for commiting.
2667 @return: the uid object for the given uidname
2670 session.execute("CREATE USER :uid", {'uid': uidname})
2671 session.commit_or_flush()
2673 __all__.append('add_database_user')
2676 def get_or_set_uid(uidname, session=None):
2678 Returns uid object for given uidname.
2680 If no matching uidname is found, a row is inserted.
2682 @type uidname: string
2683 @param uidname: The uid to add
2685 @type session: SQLAlchemy
2686 @param session: Optional SQL session object (a temporary one will be
2687 generated if not supplied). If not passed, a commit will be performed at
2688 the end of the function, otherwise the caller is responsible for commiting.
2691 @return: the uid object for the given uidname
2694 q = session.query(Uid).filter_by(uid=uidname)
2698 except NoResultFound:
2702 session.commit_or_flush()
2707 __all__.append('get_or_set_uid')
2710 def get_uid_from_fingerprint(fpr, session=None):
2711 q = session.query(Uid)
2712 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2716 except NoResultFound:
2719 __all__.append('get_uid_from_fingerprint')
2721 ################################################################################
2723 class UploadBlock(object):
2724 def __init__(self, *args, **kwargs):
2728 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2730 __all__.append('UploadBlock')
2732 ################################################################################
2734 class DBConn(object):
2736 database module init.
2740 def __init__(self, *args, **kwargs):
2741 self.__dict__ = self.__shared_state
2743 if not getattr(self, 'initialised', False):
2744 self.initialised = True
2745 self.debug = kwargs.has_key('debug')
2748 def __setuptables(self):
2758 'build_queue_files',
2761 'changes_pending_binaries',
2762 'changes_pending_files',
2763 'changes_pending_files_map',
2764 'changes_pending_source',
2765 'changes_pending_source_files',
2766 'changes_pool_files',
2779 'pending_bin_contents',
2789 'suite_architectures',
2790 'suite_src_formats',
2791 'suite_build_queue_copy',
2797 for table_name in tables:
2798 table = Table(table_name, self.db_meta, autoload=True)
2799 setattr(self, 'tbl_%s' % table_name, table)
2801 def __setupmappers(self):
2802 mapper(Architecture, self.tbl_architecture,
2803 properties = dict(arch_id = self.tbl_architecture.c.id))
2805 mapper(Archive, self.tbl_archive,
2806 properties = dict(archive_id = self.tbl_archive.c.id,
2807 archive_name = self.tbl_archive.c.name))
2809 mapper(BinAssociation, self.tbl_bin_associations,
2810 properties = dict(ba_id = self.tbl_bin_associations.c.id,
2811 suite_id = self.tbl_bin_associations.c.suite,
2812 suite = relation(Suite),
2813 binary_id = self.tbl_bin_associations.c.bin,
2814 binary = relation(DBBinary)))
2816 mapper(PendingBinContents, self.tbl_pending_bin_contents,
2817 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
2818 filename = self.tbl_pending_bin_contents.c.filename,
2819 package = self.tbl_pending_bin_contents.c.package,
2820 version = self.tbl_pending_bin_contents.c.version,
2821 arch = self.tbl_pending_bin_contents.c.arch,
2822 otype = self.tbl_pending_bin_contents.c.type))
2824 mapper(DebContents, self.tbl_deb_contents,
2825 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
2826 package=self.tbl_deb_contents.c.package,
2827 component=self.tbl_deb_contents.c.component,
2828 arch=self.tbl_deb_contents.c.arch,
2829 section=self.tbl_deb_contents.c.section,
2830 filename=self.tbl_deb_contents.c.filename))
2832 mapper(UdebContents, self.tbl_udeb_contents,
2833 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
2834 package=self.tbl_udeb_contents.c.package,
2835 component=self.tbl_udeb_contents.c.component,
2836 arch=self.tbl_udeb_contents.c.arch,
2837 section=self.tbl_udeb_contents.c.section,
2838 filename=self.tbl_udeb_contents.c.filename))
2840 mapper(DBBinary, self.tbl_binaries,
2841 properties = dict(binary_id = self.tbl_binaries.c.id,
2842 package = self.tbl_binaries.c.package,
2843 version = self.tbl_binaries.c.version,
2844 maintainer_id = self.tbl_binaries.c.maintainer,
2845 maintainer = relation(Maintainer),
2846 source_id = self.tbl_binaries.c.source,
2847 source = relation(DBSource),
2848 arch_id = self.tbl_binaries.c.architecture,
2849 architecture = relation(Architecture),
2850 poolfile_id = self.tbl_binaries.c.file,
2851 poolfile = relation(PoolFile),
2852 binarytype = self.tbl_binaries.c.type,
2853 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2854 fingerprint = relation(Fingerprint),
2855 install_date = self.tbl_binaries.c.install_date,
2856 binassociations = relation(BinAssociation,
2857 primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))))
2859 mapper(BinaryACL, self.tbl_binary_acl,
2860 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
2862 mapper(BinaryACLMap, self.tbl_binary_acl_map,
2863 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
2864 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
2865 architecture = relation(Architecture)))
2867 mapper(Component, self.tbl_component,
2868 properties = dict(component_id = self.tbl_component.c.id,
2869 component_name = self.tbl_component.c.name))
2871 mapper(DBConfig, self.tbl_config,
2872 properties = dict(config_id = self.tbl_config.c.id))
2874 mapper(DSCFile, self.tbl_dsc_files,
2875 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2876 source_id = self.tbl_dsc_files.c.source,
2877 source = relation(DBSource),
2878 poolfile_id = self.tbl_dsc_files.c.file,
2879 poolfile = relation(PoolFile)))
2881 mapper(PoolFile, self.tbl_files,
2882 properties = dict(file_id = self.tbl_files.c.id,
2883 filesize = self.tbl_files.c.size,
2884 location_id = self.tbl_files.c.location,
2885 location = relation(Location)))
2887 mapper(Fingerprint, self.tbl_fingerprint,
2888 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2889 uid_id = self.tbl_fingerprint.c.uid,
2890 uid = relation(Uid),
2891 keyring_id = self.tbl_fingerprint.c.keyring,
2892 keyring = relation(Keyring),
2893 source_acl = relation(SourceACL),
2894 binary_acl = relation(BinaryACL)))
2896 mapper(Keyring, self.tbl_keyrings,
2897 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2898 keyring_id = self.tbl_keyrings.c.id))
2900 mapper(DBChange, self.tbl_changes,
2901 properties = dict(change_id = self.tbl_changes.c.id,
2902 poolfiles = relation(PoolFile,
2903 secondary=self.tbl_changes_pool_files,
2904 backref="changeslinks"),
2905 seen = self.tbl_changes.c.seen,
2906 source = self.tbl_changes.c.source,
2907 binaries = self.tbl_changes.c.binaries,
2908 architecture = self.tbl_changes.c.architecture,
2909 distribution = self.tbl_changes.c.distribution,
2910 urgency = self.tbl_changes.c.urgency,
2911 maintainer = self.tbl_changes.c.maintainer,
2912 changedby = self.tbl_changes.c.changedby,
2913 date = self.tbl_changes.c.date,
2914 version = self.tbl_changes.c.version,
2915 files = relation(ChangePendingFile,
2916 secondary=self.tbl_changes_pending_files_map,
2917 backref="changesfile"),
2918 in_queue_id = self.tbl_changes.c.in_queue,
2919 in_queue = relation(PolicyQueue,
2920 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
2921 approved_for_id = self.tbl_changes.c.approved_for))
2923 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
2924 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
2926 mapper(ChangePendingFile, self.tbl_changes_pending_files,
2927 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
2928 filename = self.tbl_changes_pending_files.c.filename,
2929 size = self.tbl_changes_pending_files.c.size,
2930 md5sum = self.tbl_changes_pending_files.c.md5sum,
2931 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
2932 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
2934 mapper(ChangePendingSource, self.tbl_changes_pending_source,
2935 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
2936 change = relation(DBChange),
2937 maintainer = relation(Maintainer,
2938 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
2939 changedby = relation(Maintainer,
2940 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
2941 fingerprint = relation(Fingerprint),
2942 source_files = relation(ChangePendingFile,
2943 secondary=self.tbl_changes_pending_source_files,
2944 backref="pending_sources"),
2945 files = relation(KnownChangePendingFile, backref="changesfile")))
2947 mapper(KnownChangePendingFile, self.tbl_changes_pending_files,
2948 properties = dict(known_change_pending_file_id = self.tbl_changes_pending_files.c.id))
2950 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
2951 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
2952 keyring = relation(Keyring, backref="keyring_acl_map"),
2953 architecture = relation(Architecture)))
2955 mapper(Location, self.tbl_location,
2956 properties = dict(location_id = self.tbl_location.c.id,
2957 component_id = self.tbl_location.c.component,
2958 component = relation(Component),
2959 archive_id = self.tbl_location.c.archive,
2960 archive = relation(Archive),
2961 archive_type = self.tbl_location.c.type))
2963 mapper(Maintainer, self.tbl_maintainer,
2964 properties = dict(maintainer_id = self.tbl_maintainer.c.id))
2966 mapper(NewComment, self.tbl_new_comments,
2967 properties = dict(comment_id = self.tbl_new_comments.c.id))
2969 mapper(Override, self.tbl_override,
2970 properties = dict(suite_id = self.tbl_override.c.suite,
2971 suite = relation(Suite),
2972 package = self.tbl_override.c.package,
2973 component_id = self.tbl_override.c.component,
2974 component = relation(Component),
2975 priority_id = self.tbl_override.c.priority,
2976 priority = relation(Priority),
2977 section_id = self.tbl_override.c.section,
2978 section = relation(Section),
2979 overridetype_id = self.tbl_override.c.type,
2980 overridetype = relation(OverrideType)))
2982 mapper(OverrideType, self.tbl_override_type,
2983 properties = dict(overridetype = self.tbl_override_type.c.type,
2984 overridetype_id = self.tbl_override_type.c.id))
2986 mapper(PolicyQueue, self.tbl_policy_queue,
2987 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
2989 mapper(Priority, self.tbl_priority,
2990 properties = dict(priority_id = self.tbl_priority.c.id))
2992 mapper(Section, self.tbl_section,
2993 properties = dict(section_id = self.tbl_section.c.id,
2994 section=self.tbl_section.c.section))
2996 mapper(DBSource, self.tbl_source,
2997 properties = dict(source_id = self.tbl_source.c.id,
2998 version = self.tbl_source.c.version,
2999 maintainer_id = self.tbl_source.c.maintainer,
3000 maintainer = relation(Maintainer,
3001 primaryjoin=(self.tbl_source.c.maintainer==self.tbl_maintainer.c.id)),
3002 poolfile_id = self.tbl_source.c.file,
3003 poolfile = relation(PoolFile),
3004 fingerprint_id = self.tbl_source.c.sig_fpr,
3005 fingerprint = relation(Fingerprint),
3006 changedby_id = self.tbl_source.c.changedby,
3007 changedby = relation(Maintainer,
3008 primaryjoin=(self.tbl_source.c.changedby==self.tbl_maintainer.c.id)),
3009 srcfiles = relation(DSCFile,
3010 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3011 srcassociations = relation(SrcAssociation,
3012 primaryjoin=(self.tbl_source.c.id==self.tbl_src_associations.c.source)),
3013 srcuploaders = relation(SrcUploader)))
3015 mapper(SourceACL, self.tbl_source_acl,
3016 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3018 mapper(SrcAssociation, self.tbl_src_associations,
3019 properties = dict(sa_id = self.tbl_src_associations.c.id,
3020 suite_id = self.tbl_src_associations.c.suite,
3021 suite = relation(Suite),
3022 source_id = self.tbl_src_associations.c.source,
3023 source = relation(DBSource)))
3025 mapper(SrcFormat, self.tbl_src_format,
3026 properties = dict(src_format_id = self.tbl_src_format.c.id,
3027 format_name = self.tbl_src_format.c.format_name))
3029 mapper(SrcUploader, self.tbl_src_uploaders,
3030 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3031 source_id = self.tbl_src_uploaders.c.source,
3032 source = relation(DBSource,
3033 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3034 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3035 maintainer = relation(Maintainer,
3036 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3038 mapper(Suite, self.tbl_suite,
3039 properties = dict(suite_id = self.tbl_suite.c.id,
3040 policy_queue = relation(PolicyQueue),
3041 copy_queues = relation(BuildQueue, secondary=self.tbl_suite_build_queue_copy)))
3043 mapper(SuiteArchitecture, self.tbl_suite_architectures,
3044 properties = dict(suite_id = self.tbl_suite_architectures.c.suite,
3045 suite = relation(Suite, backref='suitearchitectures'),
3046 arch_id = self.tbl_suite_architectures.c.architecture,
3047 architecture = relation(Architecture)))
3049 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3050 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3051 suite = relation(Suite, backref='suitesrcformats'),
3052 src_format_id = self.tbl_suite_src_formats.c.src_format,
3053 src_format = relation(SrcFormat)))
3055 mapper(Uid, self.tbl_uid,
3056 properties = dict(uid_id = self.tbl_uid.c.id,
3057 fingerprint = relation(Fingerprint)))
3059 mapper(UploadBlock, self.tbl_upload_blocks,
3060 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3061 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3062 uid = relation(Uid, backref="uploadblocks")))
3064 ## Connection functions
3065 def __createconn(self):
3066 from config import Config
3070 connstr = "postgres://%s" % cnf["DB::Host"]
3071 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3072 connstr += ":%s" % cnf["DB::Port"]
3073 connstr += "/%s" % cnf["DB::Name"]
3076 connstr = "postgres:///%s" % cnf["DB::Name"]
3077 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3078 connstr += "?port=%s" % cnf["DB::Port"]
3080 self.db_pg = create_engine(connstr, echo=self.debug)
3081 self.db_meta = MetaData()
3082 self.db_meta.bind = self.db_pg
3083 self.db_smaker = sessionmaker(bind=self.db_pg,
3087 self.__setuptables()
3088 self.__setupmappers()
3091 return self.db_smaker()
3093 __all__.append('DBConn')