iwla/iwla.py
2014-11-19 08:01:12 +01:00

214 lines
5.6 KiB
Python
Executable File

#!/usr/bin/env python
import os
import re
import time
import glob
import imp
from robots import awstats_robots;
print '==> Start'
meta_visit = {}
current_visit = {}
log_format = '$server_name:$server_port $remote_addr - $remote_user [$time_local] ' +\
'"$request" $status $body_bytes_sent ' +\
'"$http_referer" "$http_user_agent"';
log_format_extracted = re.sub(r'([^\$\w])', r'\\\g<1>', log_format);
log_format_extracted = re.sub(r'\$(\w+)', '(?P<\g<1>>.+)', log_format_extracted)
http_request_extracted = re.compile(r'(?P<http_method>\S+) (?P<http_uri>\S+) (?P<http_version>\S+)')
#09/Nov/2014:06:35:16 +0100
time_format = '%d/%b/%Y:%H:%M:%S +0100'
#print "Log format : " + log_format_extracted
log_re = re.compile(log_format_extracted)
uri_re = re.compile(r'(?P<extract_uri>[^\?]*)[\?(?P<extract_parameters>.*)]?')
pages_extensions = ['/', 'html', 'xhtml', 'py', 'pl', 'rb', 'php']
viewed_http_codes = [200]
cur_time = None
PRE_HOOK_DIRECTORY = './hooks_pre/*.py'
POST_HOOK_DIRECTORY = './hooks_post/*.py'
print '==> Generating robot dictionary'
awstats_robots = map(lambda (x) : re.compile(x, re.IGNORECASE), awstats_robots)
def generate_day_stats():
days_stats = {}
days_stats['viewed_bandwidth'] = 0
days_stats['not_viewed_bandwidth'] = 0
days_stats['viewed_pages'] = 0
days_stats['viewed_hits'] = 0
days_stats['pages'] = set()
for k in current_visit.keys():
super_hit = current_visit[k]
if super_hit['robot']:
days_stats['not_viewed_bandwidth'] += super_hit['bandwith']
continue
days_stats['viewed_bandwidth'] += super_hit['bandwith']
days_stats['viewed_pages'] += super_hit['viewed_pages']
days_stats['viewed_hits'] += super_hit['viewed_hits']
for p in super_hit['pages']:
if not p['is_page']: continue
req = p['extract_request']
days_stats['pages'].add(req['extract_uri'])
return days_stats
def call_plugins(path, *kwargs):
print '==> Call plugins (%s)' % path
plugins = glob.glob(path)
plugins.sort()
for p in plugins:
print '\t%s' % (p)
mod = imp.load_source('hook', p)
mod.hook(*kwargs)
def isPage(request):
for e in pages_extensions:
if request.endswith(e):
return True
return False
def appendHit(hit):
super_hit = current_visit[hit['remote_addr']]
super_hit['pages'].append(hit)
super_hit['bandwith'] += int(hit['body_bytes_sent'])
request = hit['extract_request']
if 'extract_uri' in request.keys():
uri = request['extract_uri']
else:
uri = request['http_uri']
hit['is_page'] = isPage(uri)
# Don't count redirect status
if int(hit['status']) == 302: return
if super_hit['robot'] or\
not int(hit['status']) in viewed_http_codes:
page_key = 'not_viewed_pages'
hit_key = 'not_viewed_hits'
else:
page_key = 'viewed_pages'
hit_key = 'viewed_hits'
if hit['is_page']:
super_hit[page_key] += 1
else:
super_hit[hit_key] += 1
def createUser(hit):
super_hit = current_visit[hit['remote_addr']] = {}
super_hit['viewed_pages'] = 0;
super_hit['viewed_hits'] = 0;
super_hit['not_viewed_pages'] = 0;
super_hit['not_viewed_hits'] = 0;
super_hit['bandwith'] = 0;
super_hit['pages'] = [];
super_hit['robot'] = isRobot(hit);
appendHit(hit)
def isRobot(hit):
for r in awstats_robots:
if r.match(hit['http_user_agent']):
return True
return False
def decode_http_request(hit):
if not 'request' in hit.keys(): return False
groups = http_request_extracted.match(hit['request'])
if groups:
hit['extract_request'] = groups.groupdict()
uri_groups = uri_re.match(hit['extract_request']['http_uri']);
if uri_groups:
d = uri_groups.groupdict()
hit['extract_request']['extract_uri'] = d['extract_uri']
if 'extract_parameters' in d.keys():
hit['extract_request']['extract_parameters'] = d['extract_parameters']
else:
print "Bad request extraction " + hit['request']
return False
referer_groups = uri_re.match(hit['http_referer']);
if referer_groups:
referer = hit['extract_referer'] = referer_groups.groupdict()
return True
def decode_time(hit):
t = hit['time_local']
hit['time_decoded'] = time.strptime(t, time_format)
def newHit(hit):
global cur_time
if not decode_http_request(hit): return
for k in hit.keys():
if hit[k] == '-': hit[k] = ''
decode_time(hit)
t = hit['time_decoded']
meta_visit['last_time'] = t
if cur_time == None:
cur_time = t
else:
if cur_time.tm_mday != t.tm_mday:
return False
remote_addr = hit['remote_addr']
if remote_addr in current_visit.keys():
appendHit(hit)
else:
createUser(hit)
return True
print '==> Analysing log'
f = open("access.log")
for l in f:
# print "line " + l;
groups = log_re.match(l)
if groups:
if not newHit(groups.groupdict()):
break
else:
print "No match " + l
f.close();
call_plugins(PRE_HOOK_DIRECTORY, current_visit)
stats = generate_day_stats()
print stats
valid_visitors = {k: v for (k,v) in current_visit.items() if not current_visit[k]['robot']}
#print valid_visitors
# for ip in current_visit.keys():
# hit = current_visit[ip]
# if hit['robot']: continue
# print "%s =>" % (ip)
# for k in hit.keys():
# if k != 'pages':
# print "\t%s : %s" % (k, current_visit[ip][k])
call_plugins(POST_HOOK_DIRECTORY, valid_visitors)