Update code for Python3
This commit is contained in:
parent
6f9622bb91
commit
f457f4e390
|
@ -437,7 +437,7 @@ def _toStr(v):
|
|||
if type(v) != unicode: return unicode(v)
|
||||
else: return v
|
||||
|
||||
def listToStr(l): return map(lambda(v) : _toStr(v), l)
|
||||
def listToStr(l): return map(lambda v : _toStr(v), l)
|
||||
|
||||
def generateHTMLLink(url, name=None, max_length=100, prefix=u'http'):
|
||||
url = unicode(url)
|
||||
|
|
|
@ -59,7 +59,7 @@ class IPlugin(object):
|
|||
def validConfRequirements(conf_requirements, iwla, plugin_path):
|
||||
for r in conf_requirements:
|
||||
if iwla.getConfValue(r, None) is None:
|
||||
print '\'%s\' conf value required for %s' % (r, plugin_path)
|
||||
print('\'%s\' conf value required for %s' % (r, plugin_path))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -89,7 +89,7 @@ def preloadPlugins(plugins, iwla):
|
|||
|
||||
if len(classes) > 1:
|
||||
logger.warning('More than one class found in %s, loading may fail. Selecting %s' % (plugin_path, classes[0]))
|
||||
print classes
|
||||
print(classes)
|
||||
continue
|
||||
|
||||
plugin = classes[0](iwla)
|
||||
|
|
36
iwla.py
36
iwla.py
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright Grégory Soutadé 2015
|
||||
|
@ -24,7 +24,7 @@ import shutil
|
|||
import sys
|
||||
import re
|
||||
import time
|
||||
import cPickle
|
||||
import pickle
|
||||
import gzip
|
||||
import importlib
|
||||
import argparse
|
||||
|
@ -165,12 +165,12 @@ class IWLA(object):
|
|||
else:
|
||||
self.logger.info('==> Start')
|
||||
try:
|
||||
t = gettext.translation('iwla', localedir=conf.locales_path, languages=[conf.locale], codeset='utf8')
|
||||
t = gettext.translation('iwla', localedir=conf.locales_path, languages=[conf.locale])
|
||||
self.logger.info('\tUsing locale %s' % (conf.locale))
|
||||
except IOError:
|
||||
t = gettext.NullTranslations()
|
||||
self.logger.info('\tUsing default locale en_EN')
|
||||
self._ = t.ugettext
|
||||
self._ = t.gettext
|
||||
|
||||
def getVersion(self):
|
||||
return IWLA.IWLA_VERSION
|
||||
|
@ -239,7 +239,7 @@ class IWLA(object):
|
|||
|
||||
def _clearDisplay(self):
|
||||
self.display.clear()
|
||||
return self.display
|
||||
return self.display
|
||||
|
||||
def getDBFilename(self, time):
|
||||
return os.path.join(conf.DB_ROOT, str(time.tm_year), '%02d' % (time.tm_mon), conf.DB_FILENAME)
|
||||
|
@ -261,7 +261,7 @@ class IWLA(object):
|
|||
shutil.copy(filename, filename + '.bak')
|
||||
|
||||
with open(filename + '.tmp', 'wb+') as f, self._openDB(filename, 'w') as fzip:
|
||||
cPickle.dump(obj, f)
|
||||
pickle.dump(obj, f)
|
||||
f.seek(0)
|
||||
fzip.write(f.read())
|
||||
os.fsync(fzip)
|
||||
|
@ -275,7 +275,7 @@ class IWLA(object):
|
|||
|
||||
res = None
|
||||
with self._openDB(filename) as f:
|
||||
res = cPickle.load(f)
|
||||
res = pickle.load(f)
|
||||
return res
|
||||
|
||||
def _callPlugins(self, target_root, *args):
|
||||
|
@ -326,7 +326,7 @@ class IWLA(object):
|
|||
|
||||
if not remote_addr in self.current_analysis['visits'].keys():
|
||||
self._createVisitor(hit)
|
||||
|
||||
|
||||
super_hit = self.current_analysis['visits'][remote_addr]
|
||||
# Don't keep all requests for robots
|
||||
if not super_hit['robot']:
|
||||
|
@ -336,7 +336,7 @@ class IWLA(object):
|
|||
if self.hasBeenViewed(hit):
|
||||
super_hit['bandwidth'][day] = super_hit['bandwidth'].get(day, 0) + int(hit['body_bytes_sent'])
|
||||
super_hit['bandwidth'][0] += int(hit['body_bytes_sent'])
|
||||
super_hit['last_access'] = self.meta_infos['last_time']
|
||||
super_hit['last_access'] = self.meta_infos['last_time']
|
||||
|
||||
request = hit['extract_request']
|
||||
|
||||
|
@ -345,7 +345,7 @@ class IWLA(object):
|
|||
hit['is_page'] = self.isPage(uri)
|
||||
|
||||
if super_hit['robot'] or\
|
||||
not self.hasBeenViewed(hit):
|
||||
not self.hasBeenViewed(hit):
|
||||
page_key = 'not_viewed_pages'
|
||||
hit_key = 'not_viewed_hits'
|
||||
else:
|
||||
|
@ -417,9 +417,9 @@ class IWLA(object):
|
|||
def _decodeTime(self, hit):
|
||||
try:
|
||||
hit['time_decoded'] = time.strptime(hit['time_local'], conf.time_format)
|
||||
except ValueError, e:
|
||||
except ValueError as e:
|
||||
if sys.version_info < (3, 2):
|
||||
# Try without UTC value at the end (%z not recognized)
|
||||
# Try without UTC value at the end (%z not recognized)
|
||||
gmt_offset_str = hit['time_local'][-5:]
|
||||
gmt_offset_hours = int(gmt_offset_str[1:3])*60*60
|
||||
gmt_offset_minutes = int(gmt_offset_str[3:5])*60
|
||||
|
@ -482,9 +482,9 @@ class IWLA(object):
|
|||
|
||||
row = [0, nb_visits, stats['viewed_pages'], stats['viewed_hits'], stats['viewed_bandwidth'], stats['not_viewed_bandwidth']]
|
||||
if nb_days:
|
||||
average_row = map(lambda(v): int(v/nb_days), row)
|
||||
average_row = map(lambda v: int(v/nb_days), row)
|
||||
else:
|
||||
average_row = map(lambda(v): 0, row)
|
||||
average_row = map(lambda v: 0, row)
|
||||
|
||||
average_row[0] = self._('Average')
|
||||
average_row[4] = bytesToStr(average_row[4])
|
||||
|
@ -592,7 +592,7 @@ class IWLA(object):
|
|||
if f.endswith(ext):
|
||||
self._compressFile(rootdir, f)
|
||||
break
|
||||
|
||||
|
||||
def _generateDisplay(self):
|
||||
self._generateDisplayDaysStats()
|
||||
self._callPlugins(conf.DISPLAY_HOOK_DIRECTORY)
|
||||
|
@ -658,7 +658,7 @@ class IWLA(object):
|
|||
if not year in self.meta_infos['stats'].keys():
|
||||
self.meta_infos['stats'][year] = {}
|
||||
self.meta_infos['stats'][year][month] = duplicated_stats
|
||||
|
||||
|
||||
self.logger.info("==> Serialize to %s" % (conf.META_PATH))
|
||||
self._serialize(self.meta_infos, conf.META_PATH)
|
||||
|
||||
|
@ -708,7 +708,7 @@ class IWLA(object):
|
|||
self.analyse_started = True
|
||||
else:
|
||||
if not self.analyse_started and\
|
||||
time.mktime(t) <= time.mktime(cur_time):
|
||||
time.mktime(t) <= time.mktime(cur_time):
|
||||
self.logger.debug("Not in time")
|
||||
return False
|
||||
self.analyse_started = True
|
||||
|
@ -809,7 +809,7 @@ class FileIter(object):
|
|||
self.filenames = [f for f in filenames.split(',') if f]
|
||||
for f in self.filenames:
|
||||
if not os.path.exists(f):
|
||||
print 'No such file \'%s\'' % (f)
|
||||
print('No such file \'%s\'' % (f))
|
||||
sys.exit(-1)
|
||||
self.cur_file = None
|
||||
self._openNextFile()
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
#
|
||||
|
||||
from iwla import IWLA
|
||||
from istats_diff import IWLADisplayStatsDiff
|
||||
from .istats_diff import IWLADisplayStatsDiff
|
||||
from display import *
|
||||
|
||||
"""
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
#
|
||||
|
||||
from iwla import IWLA
|
||||
from istats_diff import IWLADisplayStatsDiff
|
||||
from .istats_diff import IWLADisplayStatsDiff
|
||||
from display import *
|
||||
|
||||
"""
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
#
|
||||
|
||||
from iwla import IWLA
|
||||
from istats_diff import IWLADisplayStatsDiff
|
||||
from .istats_diff import IWLADisplayStatsDiff
|
||||
from display import *
|
||||
|
||||
"""
|
||||
|
|
|
@ -21,7 +21,7 @@
|
|||
from iwla import IWLA
|
||||
from iplugin import IPlugin
|
||||
|
||||
from iptogeo import IPToGeo
|
||||
from .iptogeo import IPToGeo
|
||||
|
||||
"""
|
||||
Post analysis hook
|
||||
|
@ -88,7 +88,7 @@ class IWLAPostAnalysisIPToGeo(IPlugin):
|
|||
geo[cc] += 1
|
||||
else:
|
||||
geo[cc] = 1
|
||||
except Exception, e:
|
||||
print e
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
month_stats['geo'] = geo
|
||||
|
|
|
@ -81,7 +81,7 @@ class IPToGeo(object):
|
|||
return res
|
||||
|
||||
def _create_request(self, ip, ip_type):
|
||||
packet = ''
|
||||
packet = b''
|
||||
packet += struct.pack('<IBBBBI', IPToGeo.MAGIC, IPToGeo.VERSION, IPToGeo.REQ,
|
||||
0, #err
|
||||
ip_type, # ip type
|
||||
|
@ -124,7 +124,7 @@ class IPToGeo(object):
|
|||
if not packet:
|
||||
raise socket.timeout
|
||||
return packet
|
||||
except socket.timeout, e:
|
||||
except socket.timeout as e:
|
||||
if second_chance:
|
||||
self._nb_requests_sent = self.MAX_REQUESTS
|
||||
return self._send_request(packet, False)
|
||||
|
|
|
@ -78,7 +78,7 @@ class IWLAPostAnalysisReferers(IPlugin):
|
|||
domain_name = self.iwla.getConfValue('domain_name', '')
|
||||
|
||||
if not domain_name:
|
||||
print 'domain_name must not be empty !'
|
||||
print('domain_name must not be empty !')
|
||||
return False
|
||||
|
||||
self.own_domain_re = re.compile(r'.*%s.*' % (domain_name))
|
||||
|
@ -114,7 +114,7 @@ class IWLAPostAnalysisReferers(IPlugin):
|
|||
key_phrase = groups.groupdict()['key_phrase']
|
||||
try:
|
||||
key_phrase = urllib.unquote_plus(key_phrase).decode('utf8')
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
print(e)
|
||||
continue
|
||||
if not key_phrase in key_phrases.keys():
|
||||
|
|
|
@ -59,11 +59,11 @@ class IWLAPreAnalysisPageToHit(IPlugin):
|
|||
def load(self):
|
||||
# Page to hit
|
||||
self.ph_regexps = self.iwla.getConfValue('page_to_hit_conf', [])
|
||||
self.ph_regexps = map(lambda(r): re.compile(r), self.ph_regexps)
|
||||
self.ph_regexps = map(lambda r: re.compile(r), self.ph_regexps)
|
||||
|
||||
# Hit to page
|
||||
self.hp_regexps = self.iwla.getConfValue('hit_to_page_conf', [])
|
||||
self.hp_regexps = map(lambda(r): re.compile(r), self.hp_regexps)
|
||||
self.hp_regexps = map(lambda r: re.compile(r), self.hp_regexps)
|
||||
|
||||
self.logger = logging.getLogger(self.__class__.__name__)
|
||||
return True
|
||||
|
|
|
@ -60,7 +60,7 @@ class IWLAPreAnalysisRobots(IPlugin):
|
|||
self.API_VERSION = 1
|
||||
|
||||
def load(self):
|
||||
self.awstats_robots = map(lambda (x) : re.compile(('.*%s.*') % (x), re.IGNORECASE), awstats_data.robots)
|
||||
self.awstats_robots = map(lambda x : re.compile(('.*%s.*') % (x), re.IGNORECASE), awstats_data.robots)
|
||||
self.robot_re = re.compile(r'.*bot.*', re.IGNORECASE)
|
||||
self.crawl_re = re.compile(r'.*crawl.*', re.IGNORECASE)
|
||||
self.logger = logging.getLogger(self.__class__.__name__)
|
||||
|
|
Loading…
Reference in New Issue
Block a user