import re from iwla import IWLA from awstats_robots_data import awstats_robots PLUGIN_CLASS = 'HTTP' API_VERSION = 1 def get_plugins_infos(): infos = {'class' : PLUGIN_CLASS, 'min_version' : API_VERSION, 'max_version' : -1} return infos def load(): global awstats_robots print '==> Generating robot dictionary' awstats_robots = map(lambda (x) : re.compile(x, re.IGNORECASE), awstats_robots) return True # Basic rule to detect robots def hook(iwla): hits = iwla.getCurrentVisists() for k in hits.keys(): super_hit = hits[k] if super_hit['robot']: continue isRobot = False referers = 0 first_page = super_hit['requests'][0] if first_page['time_decoded'].tm_mday == super_hit['last_access'].tm_mday: for r in awstats_robots: if r.match(first_page['http_user_agent']): super_hit['robot'] = 1 continue # 1) no pages view --> robot if not super_hit['viewed_pages']: super_hit['robot'] = 1 continue # 2) pages without hit --> robot if not super_hit['viewed_hits']: super_hit['robot'] = 1 continue for hit in super_hit['requests']: # 3) /robots.txt read if hit['extract_request']['http_uri'] == '/robots.txt': isRobot = True break # 4) Any referer for hits if not hit['is_page'] and hit['http_referer']: referers += 1 if isRobot: super_hit['robot'] = 1 continue if super_hit['viewed_hits'] and not referers: super_hit['robot'] = 1 continue