iwla/plugins/pre_analysis/robots.py

67 lines
1.9 KiB
Python
Raw Normal View History

2014-11-24 17:13:59 +01:00
import re
from iwla import IWLA
from iplugin import IPlugin
2014-11-26 16:17:16 +01:00
import awstats_data
2014-11-24 17:13:59 +01:00
class IWLAPreAnalysisRobots(IPlugin):
2014-11-24 21:42:57 +01:00
def __init__(self, iwla):
super(IWLAPreAnalysisRobots, self).__init__(iwla)
2014-11-24 17:13:59 +01:00
self.API_VERSION = 1
def load(self):
2014-11-26 16:56:33 +01:00
self.awstats_robots = map(lambda (x) : re.compile(('.*%s.*') % (x), re.IGNORECASE), awstats_data.robots)
2014-11-24 17:13:59 +01:00
return True
# Basic rule to detect robots
def hook(self):
hits = self.iwla.getCurrentVisists()
for (k, super_hit) in hits.items():
2014-11-24 17:13:59 +01:00
if super_hit['robot']: continue
isRobot = False
referers = 0
first_page = super_hit['requests'][0]
if not self.iwla.isValidForCurrentAnalysis(first_page): continue
for r in self.awstats_robots:
if r.match(first_page['http_user_agent']):
isRobot = True
break
2014-11-26 16:17:16 +01:00
if isRobot:
super_hit['robot'] = 1
continue
2014-11-24 17:13:59 +01:00
# 1) no pages view --> robot
2014-11-26 16:17:16 +01:00
# if not super_hit['viewed_pages']:
# super_hit['robot'] = 1
# continue
2014-11-24 17:13:59 +01:00
# 2) pages without hit --> robot
if not super_hit['viewed_hits']:
super_hit['robot'] = 1
continue
for hit in super_hit['requests']:
# 3) /robots.txt read
if hit['extract_request']['http_uri'] == '/robots.txt':
isRobot = True
break
# 4) Any referer for hits
if not hit['is_page'] and hit['http_referer']:
referers += 1
if isRobot:
super_hit['robot'] = 1
continue
2014-11-26 16:17:16 +01:00
if not super_hit['viewed_pages'] and \
(super_hit['viewed_hits'] and not referers):
2014-11-24 17:13:59 +01:00
super_hit['robot'] = 1
continue