2 # Generate two rss feeds for a directory with .changes file
4 # License: GPL v2 or later
5 # Author: Filippo Giunchedi <filippo@debian.org>
15 from optparse import OptionParser
16 from datetime import datetime
21 # starting with squeeze
22 from debian.deb822 import Changes
25 from debian_bundle.deb822 import Changes
27 inrss_filename = "NEW_in.rss"
28 outrss_filename = "NEW_out.rss"
29 db_filename = "status.db"
31 parser = OptionParser()
32 parser.set_defaults(queuedir="queue", outdir="out", datadir="status",
33 logdir="log", max_entries="30")
35 parser.add_option("-q", "--queuedir", dest="queuedir",
36 help="The queue dir (%default)")
37 parser.add_option("-o", "--outdir", dest="outdir",
38 help="The output directory (%default)")
39 parser.add_option("-d", "--datadir", dest="datadir",
40 help="The data dir (%default)")
41 parser.add_option("-l", "--logdir", dest="logdir",
42 help="The ACCEPT/REJECT dak log dir (%default)")
43 parser.add_option("-m", "--max-entries", dest="max_entries", type="int",
44 help="Max number of entries to keep (%default)")
48 self.feed_in = PyRSS2Gen.RSS2(
49 title = "Packages entering NEW",
50 link = "http://ftp-master.debian.org/new.html",
51 description = "Debian packages entering the NEW queue" )
53 self.feed_out = PyRSS2Gen.RSS2(
54 title = "Packages leaving NEW",
55 link = "http://ftp-master.debian.org/new.html",
56 description = "Debian packages leaving the NEW queue" )
60 def purge_old_items(feed, max):
61 """ Purge RSSItem from feed, no more than max. """
62 if feed.items is None or len(feed.items) == 0:
65 feed.items = feed.items[:max]
68 def parse_changes(fname):
69 """ Parse a .changes file named fname.
71 Return {fname: parsed} """
73 m = Changes(open(fname))
75 wanted_fields = set(['Source', 'Version', 'Architecture', 'Distribution',
76 'Date', 'Maintainer', 'Description', 'Changes'])
78 if not set(m.keys()).issuperset(wanted_fields):
81 return {os.path.basename(fname): m}
83 def parse_queuedir(dir):
84 """ Parse dir for .changes files.
86 Return a dictionary {filename: parsed_file}"""
88 if not os.path.exists(dir):
92 for fname in os.listdir(dir):
93 if not fname.endswith(".changes"):
96 parsed = parse_changes(os.path.join(dir, fname))
102 def parse_leave_reason(fname):
103 """ Parse a dak log file fname for ACCEPT/REJECT reason from process-new.
105 Return a dictionary {filename: reason}"""
107 reason_re = re.compile(".+\|process-new\|.+\|NEW (ACCEPT|REJECT): (\S+)")
112 sys.stderr.write("Can't open %s: %s\n" % (fname, e))
116 for l in f.readlines():
117 m = reason_re.search(l)
119 res[m.group(2)] = m.group(1)
124 def add_rss_item(status, msg, direction):
125 if direction == "in":
126 feed = status.feed_in
127 title = "%s %s entered NEW" % (msg['Source'], msg['Version'])
128 pubdate = msg['Date']
129 elif direction == "out":
130 feed = status.feed_out
131 if msg.has_key('Leave-Reason'):
132 title = "%s %s left NEW (%s)" % (msg['Source'], msg['Version'],
135 title = "%s %s left NEW" % (msg['Source'], msg['Version'])
138 pubdate = datetime.utcnow()
142 description = "<pre>Description: %s\nChanges: %s\n</pre>" % \
143 (cgi.escape(msg['Description']),
144 cgi.escape(msg['Changes']))
146 link = "http://ftp-master.debian.org/new/%s_%s.html" % \
147 (msg['Source'], msg['Version'])
153 description = description,
154 author = cgi.escape(msg['Maintainer']),
160 def update_feeds(curqueue, status, settings):
161 # inrss -> append all items in curqueue not in status.queue
162 # outrss -> append all items in status.queue not in curqueue
165 # logfile from dak's process-new
166 reason_log = os.path.join(settings.logdir, time.strftime("%Y-%m"))
168 for (name, parsed) in curqueue.items():
169 if not status.queue.has_key(name):
171 add_rss_item(status, parsed, "in")
173 for (name, parsed) in status.queue.items():
174 if not curqueue.has_key(name):
175 # removed package, try to find out why
176 if leave_reason is None:
177 leave_reason = parse_leave_reason(reason_log)
178 if leave_reason and leave_reason.has_key(name):
179 parsed['Leave-Reason'] = leave_reason[name]
180 add_rss_item(status, parsed, "out")
184 if __name__ == "__main__":
186 (settings, args) = parser.parse_args()
188 if not os.path.exists(settings.outdir):
189 sys.stderr.write("Outdir '%s' does not exists\n" % settings.outdir)
193 if not os.path.exists(settings.datadir):
194 sys.stderr.write("Datadir '%s' does not exists\n" % settings.datadir)
198 status_db = os.path.join(settings.datadir, db_filename)
201 status = cPickle.load(open(status_db))
205 current_queue = parse_queuedir(settings.queuedir)
206 if not current_queue:
207 sys.stderr.write("Unable to scan queuedir '%s'\n" % settings.queuedir)
211 update_feeds(current_queue, status, settings)
213 purge_old_items(status.feed_in, settings.max_entries)
214 purge_old_items(status.feed_out, settings.max_entries)
216 feed_in_file = os.path.join(settings.outdir, inrss_filename)
217 feed_out_file = os.path.join(settings.outdir, outrss_filename)
220 status.feed_in.write_xml(file(feed_in_file, "w+"), "utf-8")
221 status.feed_out.write_xml(file(feed_out_file, "w+"), "utf-8")
223 sys.stderr.write("Unable to write feeds: %s\n", why)
226 status.queue = current_queue
229 cPickle.dump(status, open(status_db, "w+"))
231 sys.stderr.write("Unable to save status: %s\n", why)