2014-12-18 19:54:31 +01:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
#
|
|
|
|
# Copyright Grégory Soutadé 2015
|
|
|
|
|
|
|
|
# This file is part of iwla
|
|
|
|
|
|
|
|
# iwla is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# iwla is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with iwla. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
#
|
|
|
|
|
2014-11-24 17:13:59 +01:00
|
|
|
import re
|
2015-05-13 18:13:18 +02:00
|
|
|
import logging
|
2015-05-22 07:51:11 +02:00
|
|
|
import inspect
|
2014-11-24 17:13:59 +01:00
|
|
|
|
|
|
|
from iwla import IWLA
|
|
|
|
from iplugin import IPlugin
|
|
|
|
|
2014-11-26 16:17:16 +01:00
|
|
|
import awstats_data
|
2014-11-24 17:13:59 +01:00
|
|
|
|
2014-12-19 11:34:25 +01:00
|
|
|
"""
|
|
|
|
Pre analysis hook
|
|
|
|
|
|
|
|
Filter robots
|
|
|
|
|
|
|
|
Plugin requirements :
|
|
|
|
None
|
|
|
|
|
|
|
|
Conf values needed :
|
2022-06-23 21:16:30 +02:00
|
|
|
None
|
2014-12-19 11:34:25 +01:00
|
|
|
|
|
|
|
Output files :
|
|
|
|
None
|
|
|
|
|
|
|
|
Statistics creation :
|
|
|
|
None
|
|
|
|
|
|
|
|
Statistics update :
|
|
|
|
visits :
|
|
|
|
remote_addr =>
|
|
|
|
robot
|
2022-06-23 21:16:30 +02:00
|
|
|
keep_requests
|
2014-12-19 11:34:25 +01:00
|
|
|
|
|
|
|
Statistics deletion :
|
|
|
|
None
|
|
|
|
"""
|
2014-12-09 16:54:02 +01:00
|
|
|
|
2014-11-24 17:13:59 +01:00
|
|
|
class IWLAPreAnalysisRobots(IPlugin):
|
2014-11-24 21:42:57 +01:00
|
|
|
def __init__(self, iwla):
|
|
|
|
super(IWLAPreAnalysisRobots, self).__init__(iwla)
|
2014-11-24 17:13:59 +01:00
|
|
|
self.API_VERSION = 1
|
|
|
|
|
|
|
|
def load(self):
|
2021-08-06 08:45:04 +02:00
|
|
|
self.awstats_robots = list(map(lambda x : re.compile(('.*%s.*') % (x), re.IGNORECASE), awstats_data.robots))
|
2015-01-11 18:06:44 +01:00
|
|
|
self.robot_re = re.compile(r'.*bot.*', re.IGNORECASE)
|
|
|
|
self.crawl_re = re.compile(r'.*crawl.*', re.IGNORECASE)
|
2023-01-28 09:38:59 +01:00
|
|
|
self.compatible_re = re.compile(r'.*\(.*compatible; (.*); \+.*\)*')
|
2015-05-13 18:13:18 +02:00
|
|
|
self.logger = logging.getLogger(self.__class__.__name__)
|
2022-11-16 21:10:11 +01:00
|
|
|
|
2014-11-24 17:13:59 +01:00
|
|
|
return True
|
|
|
|
|
2015-05-13 18:13:18 +02:00
|
|
|
def _setRobot(self, k, super_hit):
|
2015-05-22 07:51:11 +02:00
|
|
|
callerframerecord = inspect.stack()[1]
|
|
|
|
frame = callerframerecord[0]
|
|
|
|
info = inspect.getframeinfo(frame)
|
|
|
|
|
|
|
|
self.logger.debug('%s is a robot (caller %s:%d)' % (k, info.function, info.lineno))
|
2015-05-13 18:13:18 +02:00
|
|
|
super_hit['robot'] = 1
|
2022-06-23 21:16:30 +02:00
|
|
|
super_hit['keep_requests'] = False
|
2023-02-18 08:48:57 +01:00
|
|
|
for hit in super_hit['requests']:
|
|
|
|
robot_name = self.compatible_re.match(hit['http_user_agent'])
|
|
|
|
if robot_name:
|
|
|
|
super_hit['robot_name'] = robot_name[1]
|
|
|
|
break
|
2015-05-13 18:13:18 +02:00
|
|
|
|
2014-11-24 17:13:59 +01:00
|
|
|
# Basic rule to detect robots
|
2014-11-26 20:31:13 +01:00
|
|
|
def hook(self):
|
2016-02-06 14:45:09 +01:00
|
|
|
hits = self.iwla.getCurrentVisits()
|
2014-11-27 09:01:51 +01:00
|
|
|
for (k, super_hit) in hits.items():
|
2015-05-13 18:13:18 +02:00
|
|
|
if super_hit['robot']:
|
|
|
|
self.logger.debug('%s is a robot' % (k))
|
|
|
|
continue
|
2014-11-24 17:13:59 +01:00
|
|
|
|
2022-11-16 21:10:11 +01:00
|
|
|
if super_hit.get('feed_parser', False):
|
|
|
|
self.logger.debug('%s is feed parser' % (k))
|
|
|
|
continue
|
|
|
|
|
2014-11-24 17:13:59 +01:00
|
|
|
isRobot = False
|
|
|
|
referers = 0
|
|
|
|
|
|
|
|
first_page = super_hit['requests'][0]
|
2015-01-11 18:06:44 +01:00
|
|
|
|
|
|
|
if self.robot_re.match(first_page['http_user_agent']) or\
|
2023-02-18 08:49:14 +01:00
|
|
|
self.crawl_re.match(first_page['http_user_agent']) or\
|
|
|
|
self.compatible_re.match(first_page['http_user_agent']):
|
2015-05-22 07:51:11 +02:00
|
|
|
self.logger.debug(first_page['http_user_agent'])
|
2015-05-13 18:13:18 +02:00
|
|
|
self._setRobot(k, super_hit)
|
2015-01-11 18:06:44 +01:00
|
|
|
continue
|
2014-11-27 09:01:51 +01:00
|
|
|
|
|
|
|
for r in self.awstats_robots:
|
|
|
|
if r.match(first_page['http_user_agent']):
|
|
|
|
isRobot = True
|
|
|
|
break
|
2014-11-26 16:17:16 +01:00
|
|
|
|
2014-11-27 09:01:51 +01:00
|
|
|
if isRobot:
|
2015-05-22 07:51:11 +02:00
|
|
|
self.logger.debug(first_page['http_user_agent'])
|
2015-05-13 18:13:18 +02:00
|
|
|
self._setRobot(k, super_hit)
|
2014-11-27 09:01:51 +01:00
|
|
|
continue
|
2014-11-24 17:13:59 +01:00
|
|
|
|
|
|
|
# 1) no pages view --> robot
|
2017-08-24 07:55:53 +02:00
|
|
|
# if not super_hit['viewed_pages'][0]:
|
2014-11-26 16:17:16 +01:00
|
|
|
# super_hit['robot'] = 1
|
|
|
|
# continue
|
2014-11-24 17:13:59 +01:00
|
|
|
|
2021-06-03 08:52:04 +02:00
|
|
|
# 2) Less than 1 hit per page
|
|
|
|
if super_hit['viewed_pages'][0] and (super_hit['viewed_hits'][0] < super_hit['viewed_pages'][0]):
|
2019-08-30 07:50:54 +02:00
|
|
|
self._setRobot(k, super_hit)
|
|
|
|
continue
|
|
|
|
|
2023-01-28 09:40:26 +01:00
|
|
|
# 3) no pages and not hit --> robot
|
2019-08-30 07:50:54 +02:00
|
|
|
if not super_hit['viewed_hits'][0] and not super_hit['viewed_pages'][0]:
|
|
|
|
self._setRobot(k, super_hit)
|
|
|
|
continue
|
|
|
|
|
2017-05-25 21:04:18 +02:00
|
|
|
not_found_pages = 0
|
2023-01-28 09:40:26 +01:00
|
|
|
not_modified_pages = 0
|
2014-11-24 17:13:59 +01:00
|
|
|
for hit in super_hit['requests']:
|
2019-08-30 07:50:54 +02:00
|
|
|
# 5) /robots.txt read
|
2015-04-06 17:52:31 +02:00
|
|
|
if hit['extract_request']['http_uri'].endswith('/robots.txt'):
|
2015-05-13 18:13:18 +02:00
|
|
|
self._setRobot(k, super_hit)
|
2014-11-24 17:13:59 +01:00
|
|
|
break
|
|
|
|
|
2023-01-28 09:40:26 +01:00
|
|
|
if int(hit['status']) in (404, 403):
|
2017-05-25 21:04:18 +02:00
|
|
|
not_found_pages += 1
|
2023-01-28 09:40:26 +01:00
|
|
|
elif int(hit['status']) in (304,):
|
|
|
|
not_modified_pages += 1
|
2017-05-25 21:04:18 +02:00
|
|
|
|
2019-08-30 07:50:54 +02:00
|
|
|
# 6) Any referer for hits
|
2014-11-24 17:13:59 +01:00
|
|
|
if not hit['is_page'] and hit['http_referer']:
|
|
|
|
referers += 1
|
|
|
|
|
|
|
|
if isRobot:
|
2015-05-13 18:13:18 +02:00
|
|
|
self._setRobot(k, super_hit)
|
2014-11-24 17:13:59 +01:00
|
|
|
continue
|
|
|
|
|
2023-01-28 09:40:26 +01:00
|
|
|
# 7) more than 10 404/403 or 304 pages
|
|
|
|
if not_found_pages > 10 or not_modified_pages > 10:
|
2017-05-25 21:04:18 +02:00
|
|
|
self._setRobot(k, super_hit)
|
|
|
|
continue
|
|
|
|
|
2017-08-24 07:55:53 +02:00
|
|
|
if not super_hit['viewed_pages'][0] and \
|
|
|
|
(super_hit['viewed_hits'][0] and not referers):
|
2015-05-13 18:13:18 +02:00
|
|
|
self._setRobot(k, super_hit)
|
2014-11-24 17:13:59 +01:00
|
|
|
continue
|
2023-02-04 08:40:04 +01:00
|
|
|
|
|
|
|
# 8) Special case : 1 page and 1 hit, but not from the same source
|
|
|
|
if (super_hit['viewed_pages'][0] == 1 and super_hit['viewed_hits'][0] == 1 and len(super_hit['requests']) == 2) and\
|
|
|
|
(super_hit['requests'][0]['server_name'] != super_hit['requests'][1]['server_name']):
|
|
|
|
self._setRobot(k, super_hit)
|
|
|
|
continue
|
|
|
|
|