elif re_source_ext.match(f["type"]):
file_type = "dsc"
else:
+ file_type = f["type"]
utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
# Validate the override type
# Determine what parts in a .changes are NEW
-def determine_new(changes, files, warn=1):
+def determine_new(changes, files, warn=1, session = None):
"""
Determine what parts in a C{changes} file are NEW.
"""
new = {}
- session = DBConn().session()
-
# Build up a list of potentially new things
for name, f in files.items():
# Skip byhand elements
if new[pkg].has_key("othercomponents"):
print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
- session.close()
-
return new
################################################################################
-def check_valid(new):
+def check_valid(new, session = None):
"""
Check if section and priority for NEW packages exist in database.
Additionally does sanity checks:
priority_name = new[pkg]["priority"]
file_type = new[pkg]["type"]
- section = get_section(section_name)
+ section = get_section(section_name, session)
if section is None:
new[pkg]["section id"] = -1
else:
new[pkg]["section id"] = section.section_id
- priority = get_priority(priority_name)
+ priority = get_priority(priority_name, session)
if priority is None:
new[pkg]["priority id"] = -1
else:
# Check the .changes is non-empty
if not self.pkg.files:
- self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
+ self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
return False
# Changes was syntactically valid even if we'll reject
architecture = control.Find("Architecture")
upload_suite = self.pkg.changes["distribution"].keys()[0]
- if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
- and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
+ if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
+ and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
self.rejects.append("Unknown architecture '%s'." % (architecture))
# Ensure the architecture of the .deb is one of the ones
entry["new"] = 1
else:
dsc_file_exists = False
- for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
+ for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates", "Lenny-Volatile-Proposed-Updates"]:
if cnf.has_key("Dir::Queue::%s" % (myq)):
if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
dsc_file_exists = True
location = cnf["Dir::Pool"]
l = get_location(location, entry["component"], session=session)
if l is None:
- self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %)" % entry["component"])
+ self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
entry["location id"] = -1
else:
entry["location id"] = l.location_id
# Make sure that our source object is up-to-date
session.expire(source)
- # Add changelog information to the database
- self.store_changelog()
+ # Add changelog information to the database
+ self.store_changelog()
# Install the files into the pool
for newfile, entry in self.pkg.files.items():
return too_new
def store_changelog(self):
+
+ # Skip binary-only upload if it is not a bin-NMU
+ if not self.pkg.changes['architecture'].has_key('source'):
+ from daklib.regexes import re_bin_only_nmu
+ if not re_bin_only_nmu.search(self.pkg.changes['version']):
+ return
+
session = DBConn().session()
+ # Check if upload already has a changelog entry
+ query = """SELECT changelog_id FROM changes WHERE source = :source
+ AND version = :version AND architecture = :architecture AND changelog_id != 0"""
+ if session.execute(query, {'source': self.pkg.changes['source'], \
+ 'version': self.pkg.changes['version'], \
+ 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
+ session.commit()
+ return
+
# Add current changelog text into changelogs_text table, return created ID
query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
# Link ID to the upload available in changes table
query = """UPDATE changes SET changelog_id = :id WHERE source = :source
- AND version = :version AND architecture LIKE '%source%'"""
+ AND version = :version AND architecture = :architecture"""
session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
- 'version': self.pkg.changes['version']})
+ 'version': self.pkg.changes['version'], \
+ 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
session.commit()