iwla/plugins/pre_analysis/H001_robot.py

69 lines
1.7 KiB
Python
Raw Normal View History

2014-11-20 16:15:57 +01:00
import re
2014-11-21 16:56:58 +01:00
from iwla import IWLA
2014-11-20 16:15:57 +01:00
from awstats_robots_data import awstats_robots
PLUGIN_CLASS = 'HTTP'
API_VERSION = 1
def get_plugins_infos():
infos = {'class' : PLUGIN_CLASS,
'min_version' : API_VERSION,
'max_version' : -1}
return infos
def load():
global awstats_robots
print '==> Generating robot dictionary'
awstats_robots = map(lambda (x) : re.compile(x, re.IGNORECASE), awstats_robots)
return True
2014-11-19 08:01:12 +01:00
# Basic rule to detect robots
2014-11-21 16:56:58 +01:00
def hook(iwla):
hits = iwla.getCurrentVisists()
2014-11-19 08:01:12 +01:00
for k in hits.keys():
super_hit = hits[k]
if super_hit['robot']: continue
isRobot = False
referers = 0
2014-11-21 10:41:29 +01:00
first_page = super_hit['requests'][0]
if first_page['time_decoded'].tm_mday == super_hit['last_access'].tm_mday:
for r in awstats_robots:
if r.match(first_page['http_user_agent']):
super_hit['robot'] = 1
continue
2014-11-20 16:15:57 +01:00
2014-11-19 08:01:12 +01:00
# 1) no pages view --> robot
if not super_hit['viewed_pages']:
super_hit['robot'] = 1
continue
# 2) pages without hit --> robot
if not super_hit['viewed_hits']:
super_hit['robot'] = 1
continue
2014-11-21 10:41:29 +01:00
for hit in super_hit['requests']:
2014-11-19 08:01:12 +01:00
# 3) /robots.txt read
if hit['extract_request']['http_uri'] == '/robots.txt':
isRobot = True
break
# 4) Any referer for hits
if not hit['is_page'] and hit['http_referer']:
referers += 1
if isRobot:
super_hit['robot'] = 1
continue
if super_hit['viewed_hits'] and not referers:
super_hit['robot'] = 1
continue