On r715
This commit is contained in:
parent
db965a8070
commit
888b481b1d
|
@ -1 +0,0 @@
|
||||||
../../plugins/pre_analysis/H001_robot.py
|
|
39
hooks/pre_analysis/H001_robot.py
Normal file
39
hooks/pre_analysis/H001_robot.py
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
|
||||||
|
# Basic rule to detect robots
|
||||||
|
|
||||||
|
def hook(hits):
|
||||||
|
for k in hits.keys():
|
||||||
|
super_hit = hits[k]
|
||||||
|
|
||||||
|
if super_hit['robot']: continue
|
||||||
|
|
||||||
|
isRobot = False
|
||||||
|
referers = 0
|
||||||
|
|
||||||
|
# 1) no pages view --> robot
|
||||||
|
if not super_hit['viewed_pages']:
|
||||||
|
super_hit['robot'] = 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
# 2) pages without hit --> robot
|
||||||
|
if not super_hit['viewed_hits']:
|
||||||
|
super_hit['robot'] = 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
for hit in super_hit['pages']:
|
||||||
|
# 3) /robots.txt read
|
||||||
|
if hit['extract_request']['http_uri'] == '/robots.txt':
|
||||||
|
isRobot = True
|
||||||
|
break
|
||||||
|
|
||||||
|
# 4) Any referer for hits
|
||||||
|
if not hit['is_page'] and hit['http_referer']:
|
||||||
|
referers += 1
|
||||||
|
|
||||||
|
if isRobot:
|
||||||
|
super_hit['robot'] = 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
if super_hit['viewed_hits'] and not referers:
|
||||||
|
super_hit['robot'] = 1
|
||||||
|
continue
|
|
@ -1 +0,0 @@
|
||||||
../../plugins/pre_analysis/H002_soutade.py
|
|
19
hooks/pre_analysis/H002_soutade.py
Normal file
19
hooks/pre_analysis/H002_soutade.py
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
import re
|
||||||
|
|
||||||
|
# Remove logo from indefero
|
||||||
|
logo_re = re.compile(r'^.+/logo/$')
|
||||||
|
|
||||||
|
# Basic rule to detect robots
|
||||||
|
|
||||||
|
def hook(hits):
|
||||||
|
for k in hits.keys():
|
||||||
|
super_hit = hits[k]
|
||||||
|
|
||||||
|
if super_hit['robot']: continue
|
||||||
|
|
||||||
|
for p in super_hit['pages']:
|
||||||
|
if not p['is_page']: continue
|
||||||
|
if logo_re.match(p['extract_request']['extract_uri']):
|
||||||
|
p['is_page'] = False
|
||||||
|
super_hit['viewed_pages'] -= 1
|
||||||
|
super_hit['viewed_hits'] += 1
|
|
@ -1 +0,0 @@
|
||||||
../plugins/hooks_pre/H001_robot.py
|
|
|
@ -1 +0,0 @@
|
||||||
../plugins/hooks_pre/H002_soutade.py
|
|
149
iwla.py
149
iwla.py
|
@ -5,11 +5,14 @@ import re
|
||||||
import time
|
import time
|
||||||
import glob
|
import glob
|
||||||
import imp
|
import imp
|
||||||
|
import pickle
|
||||||
|
import gzip
|
||||||
from robots import awstats_robots;
|
from robots import awstats_robots;
|
||||||
|
|
||||||
print '==> Start'
|
print '==> Start'
|
||||||
|
|
||||||
meta_visit = {}
|
meta_visit = {'last_time':None}
|
||||||
|
analyse_started = False
|
||||||
current_visit = {}
|
current_visit = {}
|
||||||
|
|
||||||
log_format = '$server_name:$server_port $remote_addr - $remote_user [$time_local] ' +\
|
log_format = '$server_name:$server_port $remote_addr - $remote_user [$time_local] ' +\
|
||||||
|
@ -28,39 +31,38 @@ uri_re = re.compile(r'(?P<extract_uri>[^\?]*)[\?(?P<extract_parameters>.*)]?')
|
||||||
pages_extensions = ['/', 'html', 'xhtml', 'py', 'pl', 'rb', 'php']
|
pages_extensions = ['/', 'html', 'xhtml', 'py', 'pl', 'rb', 'php']
|
||||||
viewed_http_codes = [200]
|
viewed_http_codes = [200]
|
||||||
|
|
||||||
cur_time = None
|
PRE_HOOK_DIRECTORY = './hooks/pre_analysis/*.py'
|
||||||
|
POST_HOOK_DIRECTORY = './hooks/post_analysis/*.py'
|
||||||
PRE_HOOK_DIRECTORY = './hooks_pre/*.py'
|
DB_ROOT = './output/'
|
||||||
POST_HOOK_DIRECTORY = './hooks_post/*.py'
|
META_PATH = DB_ROOT + 'meta.db'
|
||||||
|
DB_FILENAME = 'iwla.db'
|
||||||
|
|
||||||
print '==> Generating robot dictionary'
|
print '==> Generating robot dictionary'
|
||||||
|
|
||||||
awstats_robots = map(lambda (x) : re.compile(x, re.IGNORECASE), awstats_robots)
|
awstats_robots = map(lambda (x) : re.compile(x, re.IGNORECASE), awstats_robots)
|
||||||
|
|
||||||
def generate_day_stats():
|
def get_db_filename(time):
|
||||||
days_stats = {}
|
return (DB_ROOT + '%d/%d_%s') % (time.tm_year, time.tm_mon, DB_FILENAME)
|
||||||
days_stats['viewed_bandwidth'] = 0
|
|
||||||
days_stats['not_viewed_bandwidth'] = 0
|
|
||||||
days_stats['viewed_pages'] = 0
|
|
||||||
days_stats['viewed_hits'] = 0
|
|
||||||
days_stats['pages'] = set()
|
|
||||||
|
|
||||||
for k in current_visit.keys():
|
def serialize(obj, filename):
|
||||||
super_hit = current_visit[k]
|
base = os.path.dirname(filename)
|
||||||
if super_hit['robot']:
|
if not os.path.exists(base):
|
||||||
days_stats['not_viewed_bandwidth'] += super_hit['bandwith']
|
os.makedirs(base)
|
||||||
continue
|
|
||||||
|
|
||||||
days_stats['viewed_bandwidth'] += super_hit['bandwith']
|
with open(filename + '.tmp', 'wb+') as f:
|
||||||
days_stats['viewed_pages'] += super_hit['viewed_pages']
|
pickle.dump(obj, f)
|
||||||
days_stats['viewed_hits'] += super_hit['viewed_hits']
|
f.seek(0)
|
||||||
|
with gzip.open(filename, 'w') as fzip:
|
||||||
|
fzip.write(f.read())
|
||||||
|
os.remove(filename + '.tmp')
|
||||||
|
|
||||||
for p in super_hit['pages']:
|
def deserialize(filename):
|
||||||
if not p['is_page']: continue
|
if not os.path.exists(filename):
|
||||||
req = p['extract_request']
|
return None
|
||||||
days_stats['pages'].add(req['extract_uri'])
|
|
||||||
|
|
||||||
return days_stats
|
with gzip.open(filename, 'r') as f:
|
||||||
|
return pickle.load(f)
|
||||||
|
return None
|
||||||
|
|
||||||
def call_plugins(path, *kwargs):
|
def call_plugins(path, *kwargs):
|
||||||
print '==> Call plugins (%s)' % path
|
print '==> Call plugins (%s)' % path
|
||||||
|
@ -153,25 +155,79 @@ def decode_time(hit):
|
||||||
hit['time_decoded'] = time.strptime(t, time_format)
|
hit['time_decoded'] = time.strptime(t, time_format)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_month_stats():
|
||||||
|
call_plugins(PRE_HOOK_DIRECTORY, current_visit)
|
||||||
|
|
||||||
|
valid_visitors = {k: v for (k,v) in current_visit.items() if not current_visit[k]['robot']}
|
||||||
|
|
||||||
|
call_plugins(POST_HOOK_DIRECTORY, valid_visitors)
|
||||||
|
|
||||||
|
stats = {}
|
||||||
|
stats['viewed_bandwidth'] = 0
|
||||||
|
stats['not_viewed_bandwidth'] = 0
|
||||||
|
stats['viewed_pages'] = 0
|
||||||
|
stats['viewed_hits'] = 0
|
||||||
|
stats['pages'] = set()
|
||||||
|
|
||||||
|
for k in current_visit.keys():
|
||||||
|
super_hit = current_visit[k]
|
||||||
|
if super_hit['robot']:
|
||||||
|
stats['not_viewed_bandwidth'] += super_hit['bandwith']
|
||||||
|
continue
|
||||||
|
|
||||||
|
stats['viewed_bandwidth'] += super_hit['bandwith']
|
||||||
|
stats['viewed_pages'] += super_hit['viewed_pages']
|
||||||
|
stats['viewed_hits'] += super_hit['viewed_hits']
|
||||||
|
|
||||||
|
for p in super_hit['pages']:
|
||||||
|
if not p['is_page']: continue
|
||||||
|
req = p['extract_request']
|
||||||
|
stats['pages'].add(req['extract_uri'])
|
||||||
|
|
||||||
|
cur_time = meta_visit['last_time']
|
||||||
|
|
||||||
|
print "== Stats for %d/%d ==" % (cur_time.tm_year, cur_time.tm_mon)
|
||||||
|
print stats
|
||||||
|
|
||||||
|
path = get_db_filename(cur_time)
|
||||||
|
if os.path.exists(path):
|
||||||
|
os.remove(path)
|
||||||
|
|
||||||
|
print "==> Serialize to %s" % path
|
||||||
|
|
||||||
|
serialize(current_visit, path)
|
||||||
|
|
||||||
def newHit(hit):
|
def newHit(hit):
|
||||||
global cur_time
|
global current_visit
|
||||||
|
global analyse_started
|
||||||
if not decode_http_request(hit): return
|
|
||||||
|
|
||||||
for k in hit.keys():
|
|
||||||
if hit[k] == '-': hit[k] = ''
|
|
||||||
|
|
||||||
decode_time(hit)
|
decode_time(hit)
|
||||||
|
|
||||||
t = hit['time_decoded']
|
t = hit['time_decoded']
|
||||||
|
|
||||||
meta_visit['last_time'] = t
|
cur_time = meta_visit['last_time']
|
||||||
|
|
||||||
if cur_time == None:
|
if cur_time == None:
|
||||||
cur_time = t
|
current_visit = deserialize(get_db_filename(t))
|
||||||
|
if not current_visit: current_visit = {}
|
||||||
|
analyse_started = True
|
||||||
else:
|
else:
|
||||||
if cur_time.tm_mday != t.tm_mday:
|
if not analyse_started:
|
||||||
return False
|
if time.mktime(cur_time) >= time.mktime(t):
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
analyse_started = True
|
||||||
|
if cur_time.tm_mon != t.tm_mon:
|
||||||
|
generate_month_stats()
|
||||||
|
current_visit = deserialize(get_db_filename(t))
|
||||||
|
if not current_visit: current_visit = {}
|
||||||
|
|
||||||
|
meta_visit['last_time'] = t
|
||||||
|
|
||||||
|
if not decode_http_request(hit): return False
|
||||||
|
|
||||||
|
for k in hit.keys():
|
||||||
|
if hit[k] == '-': hit[k] = ''
|
||||||
|
|
||||||
remote_addr = hit['remote_addr']
|
remote_addr = hit['remote_addr']
|
||||||
if remote_addr in current_visit.keys():
|
if remote_addr in current_visit.keys():
|
||||||
|
@ -182,6 +238,11 @@ def newHit(hit):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
print '==> Analysing log'
|
print '==> Analysing log'
|
||||||
|
|
||||||
|
meta_visit = deserialize(META_PATH)
|
||||||
|
if not meta_visit:
|
||||||
|
meta_visit = {'last_time':None}
|
||||||
|
|
||||||
f = open("access.log")
|
f = open("access.log")
|
||||||
for l in f:
|
for l in f:
|
||||||
# print "line " + l;
|
# print "line " + l;
|
||||||
|
@ -195,19 +256,5 @@ for l in f:
|
||||||
print "No match " + l
|
print "No match " + l
|
||||||
f.close();
|
f.close();
|
||||||
|
|
||||||
call_plugins(PRE_HOOK_DIRECTORY, current_visit)
|
generate_month_stats()
|
||||||
|
serialize(meta_visit, META_PATH)
|
||||||
stats = generate_day_stats()
|
|
||||||
|
|
||||||
print stats
|
|
||||||
valid_visitors = {k: v for (k,v) in current_visit.items() if not current_visit[k]['robot']}
|
|
||||||
#print valid_visitors
|
|
||||||
# for ip in current_visit.keys():
|
|
||||||
# hit = current_visit[ip]
|
|
||||||
# if hit['robot']: continue
|
|
||||||
# print "%s =>" % (ip)
|
|
||||||
# for k in hit.keys():
|
|
||||||
# if k != 'pages':
|
|
||||||
# print "\t%s : %s" % (k, current_visit[ip][k])
|
|
||||||
|
|
||||||
call_plugins(POST_HOOK_DIRECTORY, valid_visitors)
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user