Let's make sure getUrls doesn't return duplicates.

This commit is contained in:
Jeremy Fincher 2004-07-28 06:02:09 +00:00
parent d594232c5c
commit 66bad99eaa

View File

@ -41,6 +41,7 @@ import supybot.plugins as plugins
import os import os
import re import re
import sets
import time import time
import getopt import getopt
import urllib2 import urllib2
@ -119,10 +120,12 @@ class URLDB(object):
self.filename, utils.exnToString(e)) self.filename, utils.exnToString(e))
return [] return []
try: try:
urls = sets.Set()
for line in fd: for line in fd:
line = line.strip() line = line.strip()
(url, nick) = line.split() (url, nick) = line.split()
if p(url, nick): if url not in urls and p(url, nick):
urls.add(url)
L.append(url) L.append(url)
L.reverse() L.reverse()
return L return L