iwla/plugins/pre_analysis/robots.py

114 lines
2.8 KiB
Python
Raw Normal View History

2014-11-19 08:01:12 +01:00
# -*- coding: utf-8 -*-
#
# Copyright Grégory Soutadé 2015
# This file is part of iwla
# iwla is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# iwla is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with iwla. If not, see <http://www.gnu.org/licenses/>.
#
import re
from iwla import IWLA
from iplugin import IPlugin
import awstats_data
"""
Pre analysis hook
Filter robots
Plugin requirements :
None
Conf values needed :
page_to_hit_conf*
hit_to_page_conf*
Output files :
None
Statistics creation :
None
Statistics update :
visits :
remote_addr =>
robot
Statistics deletion :
None
"""
class IWLAPreAnalysisRobots(IPlugin):
def __init__(self, iwla):
super(IWLAPreAnalysisRobots, self).__init__(iwla)
self.API_VERSION = 1
def load(self):
self.awstats_robots = map(lambda (x) : re.compile(('.*%s.*') % (x), re.IGNORECASE), awstats_data.robots)
return True
# Basic rule to detect robots
def hook(self):
hits = self.iwla.getCurrentVisists()
for (k, super_hit) in hits.items():
if super_hit['robot']: continue
isRobot = False
referers = 0
first_page = super_hit['requests'][0]
if not self.iwla.isValidForCurrentAnalysis(first_page): continue
for r in self.awstats_robots:
if r.match(first_page['http_user_agent']):
isRobot = True
break
if isRobot:
super_hit['robot'] = 1
continue
# 1) no pages view --> robot
# if not super_hit['viewed_pages']:
# super_hit['robot'] = 1
# continue
# 2) pages without hit --> robot
if not super_hit['viewed_hits']:
super_hit['robot'] = 1
continue
for hit in super_hit['requests']:
# 3) /robots.txt read
if hit['extract_request']['http_uri'] == '/robots.txt':
isRobot = True
break
# 4) Any referer for hits
if not hit['is_page'] and hit['http_referer']:
referers += 1
if isRobot:
super_hit['robot'] = 1
continue
if not super_hit['viewed_pages'] and \
(super_hit['viewed_hits'] and not referers):
super_hit['robot'] = 1
continue