class ProjectnameDownloaderMiddleware(object):
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the downloader middleware does not modify the
# passed objects.
def __init__(self):
self.Client = pymongo.MongoClient()
self.Mydb = self.Client['jinrong']
self.Table = self.Mydb['jinrog_table']
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_request(self, request, spider):
# Called for each request that goes through the downloader
# middleware.
sha1 = hashlib.sha1(request.url)
sha1_url = sha1.hexdigest()
My_Query = {'sha1_url':sha1_url}
Mydoc = self.Table.find(My_Query)
if Mydoc.count() == 0:
self.Table.insert({'url':request.url,'sha1_url':sha1_url})
return None
else:
return request.url