iwla/plugins/pre_analysis/robots.py

161 lines
4.8 KiB
Python
Raw Normal View History

2014-12-18 19:54:31 +01:00
# -*- coding: utf-8 -*-
#
# Copyright Grégory Soutadé 2015
# This file is part of iwla
# iwla is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# iwla is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with iwla. If not, see <http://www.gnu.org/licenses/>.
#
2014-11-24 17:13:59 +01:00
import re
2015-05-13 18:13:18 +02:00
import logging
import inspect
2014-11-24 17:13:59 +01:00
from iwla import IWLA
from iplugin import IPlugin
2014-11-26 16:17:16 +01:00
import awstats_data
2014-11-24 17:13:59 +01:00
"""
Pre analysis hook
Filter robots
Plugin requirements :
None
Conf values needed :
None
Output files :
None
Statistics creation :
None
Statistics update :
visits :
remote_addr =>
robot
keep_requests
Statistics deletion :
None
"""
2014-11-24 17:13:59 +01:00
class IWLAPreAnalysisRobots(IPlugin):
2014-11-24 21:42:57 +01:00
def __init__(self, iwla):
super(IWLAPreAnalysisRobots, self).__init__(iwla)
2014-11-24 17:13:59 +01:00
self.API_VERSION = 1
def load(self):
self.awstats_robots = list(map(lambda x : re.compile(('.*%s.*') % (x), re.IGNORECASE), awstats_data.robots))
2015-01-11 18:06:44 +01:00
self.robot_re = re.compile(r'.*bot.*', re.IGNORECASE)
self.crawl_re = re.compile(r'.*crawl.*', re.IGNORECASE)
self.compatible_re = re.compile(r'.*\(.*compatible; (.*); \+.*\)*')
2015-05-13 18:13:18 +02:00
self.logger = logging.getLogger(self.__class__.__name__)
2014-11-24 17:13:59 +01:00
return True
2015-05-13 18:13:18 +02:00
def _setRobot(self, k, super_hit):
callerframerecord = inspect.stack()[1]
frame = callerframerecord[0]
info = inspect.getframeinfo(frame)
self.logger.debug('%s is a robot (caller %s:%d)' % (k, info.function, info.lineno))
2015-05-13 18:13:18 +02:00
super_hit['robot'] = 1
super_hit['keep_requests'] = False
robot_name = self.compatible_re.match(super_hit['requests'][0]['http_user_agent'])
if robot_name:
super_hit['robot_name'] = robot_name[1]
2015-05-13 18:13:18 +02:00
2014-11-24 17:13:59 +01:00
# Basic rule to detect robots
def hook(self):
2016-02-06 14:45:09 +01:00
hits = self.iwla.getCurrentVisits()
for (k, super_hit) in hits.items():
2015-05-13 18:13:18 +02:00
if super_hit['robot']:
self.logger.debug('%s is a robot' % (k))
continue
2014-11-24 17:13:59 +01:00
if super_hit.get('feed_parser', False):
self.logger.debug('%s is feed parser' % (k))
continue
2014-11-24 17:13:59 +01:00
isRobot = False
referers = 0
first_page = super_hit['requests'][0]
2015-01-11 18:06:44 +01:00
if self.robot_re.match(first_page['http_user_agent']) or\
self.crawl_re.match(first_page['http_user_agent']):
self.logger.debug(first_page['http_user_agent'])
2015-05-13 18:13:18 +02:00
self._setRobot(k, super_hit)
2015-01-11 18:06:44 +01:00
continue
for r in self.awstats_robots:
if r.match(first_page['http_user_agent']):
isRobot = True
break
2014-11-26 16:17:16 +01:00
if isRobot:
self.logger.debug(first_page['http_user_agent'])
2015-05-13 18:13:18 +02:00
self._setRobot(k, super_hit)
continue
2014-11-24 17:13:59 +01:00
# 1) no pages view --> robot
# if not super_hit['viewed_pages'][0]:
2014-11-26 16:17:16 +01:00
# super_hit['robot'] = 1
# continue
2014-11-24 17:13:59 +01:00
# 2) Less than 1 hit per page
if super_hit['viewed_pages'][0] and (super_hit['viewed_hits'][0] < super_hit['viewed_pages'][0]):
self._setRobot(k, super_hit)
continue
# 3) no pages and not hit --> robot
if not super_hit['viewed_hits'][0] and not super_hit['viewed_pages'][0]:
self._setRobot(k, super_hit)
continue
not_found_pages = 0
not_modified_pages = 0
2014-11-24 17:13:59 +01:00
for hit in super_hit['requests']:
# 5) /robots.txt read
if hit['extract_request']['http_uri'].endswith('/robots.txt'):
2015-05-13 18:13:18 +02:00
self._setRobot(k, super_hit)
2014-11-24 17:13:59 +01:00
break
if int(hit['status']) in (404, 403):
not_found_pages += 1
elif int(hit['status']) in (304,):
not_modified_pages += 1
# 6) Any referer for hits
2014-11-24 17:13:59 +01:00
if not hit['is_page'] and hit['http_referer']:
referers += 1
if isRobot:
2015-05-13 18:13:18 +02:00
self._setRobot(k, super_hit)
2014-11-24 17:13:59 +01:00
continue
# 7) more than 10 404/403 or 304 pages
if not_found_pages > 10 or not_modified_pages > 10:
self._setRobot(k, super_hit)
continue
if not super_hit['viewed_pages'][0] and \
(super_hit['viewed_hits'][0] and not referers):
2015-05-13 18:13:18 +02:00
self._setRobot(k, super_hit)
2014-11-24 17:13:59 +01:00
continue