2 # Generate two rss feeds for a directory with .changes file
4 # License: GPL v2 or later
5 # Author: Filippo Giunchedi <filippo@debian.org>
15 from optparse import OptionParser
16 from datetime import datetime
20 from debian.deb822 import Changes
22 inrss_filename = "NEW_in.rss"
23 outrss_filename = "NEW_out.rss"
24 db_filename = "status.db"
26 parser = OptionParser()
27 parser.set_defaults(queuedir="queue", outdir="out", datadir="status",
28 logdir="log", max_entries="30")
30 parser.add_option("-q", "--queuedir", dest="queuedir",
31 help="The queue dir (%default)")
32 parser.add_option("-o", "--outdir", dest="outdir",
33 help="The output directory (%default)")
34 parser.add_option("-d", "--datadir", dest="datadir",
35 help="The data dir (%default)")
36 parser.add_option("-l", "--logdir", dest="logdir",
37 help="The ACCEPT/REJECT dak log dir (%default)")
38 parser.add_option("-m", "--max-entries", dest="max_entries", type="int",
39 help="Max number of entries to keep (%default)")
43 self.feed_in = PyRSS2Gen.RSS2(
44 title = "Packages entering NEW",
45 link = "http://ftp-master.debian.org/new.html",
46 description = "Debian packages entering the NEW queue" )
48 self.feed_out = PyRSS2Gen.RSS2(
49 title = "Packages leaving NEW",
50 link = "http://ftp-master.debian.org/new.html",
51 description = "Debian packages leaving the NEW queue" )
55 def purge_old_items(feed, max):
56 """ Purge RSSItem from feed, no more than max. """
57 if feed.items is None or len(feed.items) == 0:
60 feed.items = feed.items[:max]
63 def parse_changes(fname):
64 """ Parse a .changes file named fname.
66 Return {fname: parsed} """
68 m = Changes(open(fname))
70 wanted_fields = set(['Source', 'Version', 'Architecture', 'Distribution',
71 'Date', 'Maintainer', 'Description', 'Changes'])
73 if not set(m.keys()).issuperset(wanted_fields):
76 return {os.path.basename(fname): m}
78 def parse_queuedir(dir):
79 """ Parse dir for .changes files.
81 Return a dictionary {filename: parsed_file}"""
83 if not os.path.exists(dir):
87 for fname in os.listdir(dir):
88 if not fname.endswith(".changes"):
91 parsed = parse_changes(os.path.join(dir, fname))
97 def parse_leave_reason(fname):
98 """ Parse a dak log file fname for ACCEPT/REJECT reason from process-new.
100 Return a dictionary {filename: reason}"""
102 reason_re = re.compile(".+\|process-new\|.+\|NEW (ACCEPT|REJECT): (\S+)")
107 sys.stderr.write("Can't open %s: %s\n" % (fname, e))
111 for l in f.readlines():
112 m = reason_re.search(l)
114 res[m.group(2)] = m.group(1)
119 def add_rss_item(status, msg, direction):
120 if direction == "in":
121 feed = status.feed_in
122 title = "%s %s entered NEW" % (msg['Source'], msg['Version'])
123 pubdate = msg['Date']
124 elif direction == "out":
125 feed = status.feed_out
126 if msg.has_key('Leave-Reason'):
127 title = "%s %s left NEW (%s)" % (msg['Source'], msg['Version'],
130 title = "%s %s left NEW" % (msg['Source'], msg['Version'])
133 pubdate = datetime.utcnow()
137 description = "<pre>Description: %s\nChanges: %s\n</pre>" % \
138 (cgi.escape(msg['Description']),
139 cgi.escape(msg['Changes']))
141 link = "http://ftp-master.debian.org/new/%s_%s.html" % \
142 (msg['Source'], msg['Version'])
148 description = description,
149 author = cgi.escape(msg['Maintainer']),
155 def update_feeds(curqueue, status, settings):
156 # inrss -> append all items in curqueue not in status.queue
157 # outrss -> append all items in status.queue not in curqueue
160 # logfile from dak's process-new
161 reason_log = os.path.join(settings.logdir, time.strftime("%Y-%m"))
163 for (name, parsed) in curqueue.items():
164 if not status.queue.has_key(name):
166 add_rss_item(status, parsed, "in")
168 for (name, parsed) in status.queue.items():
169 if not curqueue.has_key(name):
170 # removed package, try to find out why
171 if leave_reason is None:
172 leave_reason = parse_leave_reason(reason_log)
173 if leave_reason and leave_reason.has_key(name):
174 parsed['Leave-Reason'] = leave_reason[name]
175 add_rss_item(status, parsed, "out")
179 if __name__ == "__main__":
181 (settings, args) = parser.parse_args()
183 if not os.path.exists(settings.outdir):
184 sys.stderr.write("Outdir '%s' does not exists\n" % settings.outdir)
188 if not os.path.exists(settings.datadir):
189 sys.stderr.write("Datadir '%s' does not exists\n" % settings.datadir)
193 status_db = os.path.join(settings.datadir, db_filename)
196 status = cPickle.load(open(status_db))
200 current_queue = parse_queuedir(settings.queuedir)
202 update_feeds(current_queue, status, settings)
204 purge_old_items(status.feed_in, settings.max_entries)
205 purge_old_items(status.feed_out, settings.max_entries)
207 feed_in_file = os.path.join(settings.outdir, inrss_filename)
208 feed_out_file = os.path.join(settings.outdir, outrss_filename)
211 status.feed_in.write_xml(file(feed_in_file, "w+"), "utf-8")
212 status.feed_out.write_xml(file(feed_out_file, "w+"), "utf-8")
213 except IOError as why:
214 sys.stderr.write("Unable to write feeds: %s\n", why)
217 status.queue = current_queue
220 cPickle.dump(status, open(status_db, "w+"))
221 except IOError as why:
222 sys.stderr.write("Unable to save status: %s\n", why)