# -*- coding: utf-8 -*- # # Copyright Grégory Soutadé 2015 # This file is part of iwla # iwla is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # iwla is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with iwla. If not, see . # import re from iwla import IWLA from iplugin import IPlugin """ Post analysis hook Find feeds parsers (first hit in feeds conf value and no viewed pages if it's a robot) If there is ony one hit per day to a feed, merge feeds parsers with the same user agent as it must be the same person with a different IP address. Plugin requirements : None Conf values needed : feeds merge_one_hit_only_feeds_parsers* Output files : None Statistics creation : remote_addr => feed_parser Statistics update : None Statistics deletion : None """ class IWLAPostAnalysisFeeds(IPlugin): NOT_A_FEED_PARSER = 0 FEED_PARSER = 1 MERGED_FEED_PARSER = 2 BAD_FEED_PARSER = 3 def __init__(self, iwla): super(IWLAPostAnalysisFeeds, self).__init__(iwla) self.API_VERSION = 1 self.conf_requires = ['feeds'] def load(self): feeds = self.iwla.getConfValue('feeds', None) self.merge_one_hit_only_feeds_parsers = self.iwla.getConfValue('merge_one_hit_only_feeds_parsers', True) if feeds is None: return False self.feeds_re = [] for f in feeds: self.feeds_re.append(re.compile(r'.*%s.*' % (f))) self.bad_feeds_re = [] self.bad_feeds_re.append(re.compile(r'.*crawl.*')) self.user_agents_re = [] self.user_agents_re.append(re.compile(r'.*rss.*')) self.user_agents_re.append(re.compile(r'.*atom.*')) self.user_agents_re.append(re.compile(r'.*feed.*')) return True def mergeOneHitOnlyFeedsParsers(self, isFeedParser, one_hit_only, hit): if isFeedParser and (hit['viewed_hits'][0] + hit['not_viewed_hits'][0]) == 1: user_agent = hit['requests'][0]['http_user_agent'].lower() if one_hit_only.get(user_agent, None) is None: # Merged isFeedParser = self.MERGED_FEED_PARSER one_hit_only[user_agent] = (hit) else: isFeedParser = self.NOT_A_FEED_PARSER hit['feed_parser'] = isFeedParser def hook(self): hits = self.iwla.getCurrentVisits() one_hit_only = {} for hit in hits.values(): isFeedParser = hit.get('feed_parser', None) if isFeedParser == self.FEED_PARSER and\ self.merge_one_hit_only_feeds_parsers: self.mergeOneHitOnlyFeedsParsers(isFeedParser, one_hit_only, hit) if isFeedParser: if hit['feed_parser'] == self.BAD_FEED_PARSER: continue if not hit.get('feed_name_analysed', False) and\ hit.get('dns_name_replaced', False): hit['feed_name_analysed'] = True addr = hit.get('remote_addr', None) for r in self.bad_feeds_re: if r.match(addr): hit['feed_parser'] = self.BAD_FEED_PARSER return return isFeedParser = self.NOT_A_FEED_PARSER uri = hit['requests'][0]['extract_request']['extract_uri'].lower() for regexp in self.feeds_re: if regexp.match(uri): isFeedParser = self.FEED_PARSER # Robot that views pages -> bot if hit['robot']: if hit['not_viewed_pages'][0]: isFeedParser = self.NOT_A_FEED_PARSER break if isFeedParser == self.NOT_A_FEED_PARSER: user_agent = hit['requests'][0]['http_user_agent'].lower() for regexp in self.user_agents_re: if regexp.match(user_agent): isFeedParser = self.FEED_PARSER break if self.merge_one_hit_only_feeds_parsers: self.mergeOneHitOnlyFeedsParsers(isFeedParser, one_hit_only, hit) else: hit['feed_parser'] = isFeedParser