Feeds: Add domain and number of subscribers for feed parser.

Set correct date for merged feed parsers
Remove bad BAD_FEED_PARSER state
This commit is contained in:
Gregory Soutade 2024-07-28 09:25:06 +02:00
parent 122ee875fa
commit 46c9ae4f15
2 changed files with 87 additions and 30 deletions

View File

@ -72,11 +72,13 @@ class IWLADisplayFeeds(IPlugin):
path = self.iwla.getCurDisplayPath(filename) path = self.iwla.getCurDisplayPath(filename)
page = display.createPage(title, path, self.iwla.getConfValue('css_path', [])) page = display.createPage(title, path, self.iwla.getConfValue('css_path', []))
table = display.createBlock(DisplayHTMLBlockTable, self.iwla._(u'All feeds parsers'), [self.iwla._(u'Host'), self.iwla._(u'Pages'), self.iwla._(u'Hits'), self.iwla._(u'Last Access')]) table = display.createBlock(DisplayHTMLBlockTable, self.iwla._(u'All feeds parsers'), [self.iwla._(u'Host'), self.iwla._(u'Pages'), self.iwla._(u'Hits')
table.setColsCSSClass(['', 'iwla_page', 'iwla_hit', '']) , self.iwla._(u'Domain'), self.iwla._(u'Subscribers'), self.iwla._(u'Last Access')])
table.setColsCSSClass(['', 'iwla_page', 'iwla_hit', '', '', ''])
rows = []
for super_hit in hits.values(): for super_hit in hits.values():
if not super_hit.get('feed_parser', False): continue if super_hit.get('feed_parser', None) not in (IWLAPostAnalysisFeeds.FEED_PARSER,\
if super_hit['feed_parser'] == IWLAPostAnalysisFeeds.BAD_FEED_PARSER: IWLAPostAnalysisFeeds.MERGED_FEED_PARSER):
continue continue
nb_feeds_parsers += 1 nb_feeds_parsers += 1
address = super_hit['remote_addr'] address = super_hit['remote_addr']
@ -84,11 +86,21 @@ class IWLADisplayFeeds(IPlugin):
address += ' *' address += ' *'
pages = super_hit['not_viewed_pages'][0] + super_hit['viewed_pages'][0] pages = super_hit['not_viewed_pages'][0] + super_hit['viewed_pages'][0]
hits = super_hit['not_viewed_hits'][0] + super_hit['viewed_hits'][0] hits = super_hit['not_viewed_hits'][0] + super_hit['viewed_hits'][0]
last_access = super_hit.get('feed_parser_last_access', None) last_access = super_hit.get('feed_parser_last_access', super_hit['last_access'])
if not last_access: feed_domain = super_hit.get('feed_domain', '')
last_access = super_hit['last_access'] if feed_domain:
row = [address, pages, hits, time.asctime(last_access)] link = '<a href=\'https://%s/%s\'>%s</a>' % (feed_domain, super_hit.get('feed_uri', ''), feed_domain)
table.appendRow(row, super_hit['remote_ip']) else:
link = ''
subscribers = super_hit.get('feed_subscribers', '')
# Don't overload interface
if subscribers <= 1: subscribers = ''
row = [address, pages, hits, link, subscribers, time.asctime(last_access),
super_hit['remote_ip'], last_access]
rows.append(row)
rows = sorted(rows, key=lambda t: t[7], reverse=True)
for row in rows:
table.appendRow(row[:6], row[6])
page.appendBlock(table) page.appendBlock(table)
note = DisplayHTMLRaw(self.iwla, ('<small>*%s</small>' % (self.iwla._(u'Merged feeds parsers')))) note = DisplayHTMLRaw(self.iwla, ('<small>*%s</small>' % (self.iwla._(u'Merged feeds parsers'))))
page.appendBlock(note) page.appendBlock(note)

View File

@ -19,6 +19,7 @@
# #
import re import re
import time
from iwla import IWLA from iwla import IWLA
from iplugin import IPlugin from iplugin import IPlugin
@ -47,8 +48,11 @@ Output files :
Statistics creation : Statistics creation :
remote_ip => remote_ip =>
feed_parser feed_parser
feed_name_analysed feed_name_analyzed
feed_parser_last_access (for merged parser) feed_parser_last_access (for merged parser)
feed_domain
feed_uri
feed_subscribers
Statistics update : Statistics update :
None None
@ -91,6 +95,11 @@ class IWLAPostAnalysisFeeds(IPlugin):
for f in feeds_agents: for f in feeds_agents:
self.user_agents_re.append(re.compile(f)) self.user_agents_re.append(re.compile(f))
self.bad_user_agents_re = []
self.bad_user_agents_re.append(re.compile(r'.*feedback.*'))
self.subscribers_re = re.compile(r'.* ([0-9]+) subscriber.*')
self.merge_feeds_parsers_list = [] self.merge_feeds_parsers_list = []
for f in _merge_feeds_parsers_list: for f in _merge_feeds_parsers_list:
self.merge_feeds_parsers_list.append(re.compile(f)) self.merge_feeds_parsers_list.append(re.compile(f))
@ -100,6 +109,7 @@ class IWLAPostAnalysisFeeds(IPlugin):
return True return True
def _appendToMergeCache(self, isFeedParser, key, hit): def _appendToMergeCache(self, isFeedParser, key, hit):
hit['feed_parser'] = isFeedParser
# First time, register into dict # First time, register into dict
if self.merged_feeds.get(key, None) is None: if self.merged_feeds.get(key, None) is None:
# Merged # Merged
@ -108,21 +118,27 @@ class IWLAPostAnalysisFeeds(IPlugin):
# Next time # Next time
# Current must be ignored # Current must be ignored
hit['feed_parser'] = self.NOT_A_FEED_PARSER hit['feed_parser'] = self.NOT_A_FEED_PARSER
merged_hit = hit
last_access = hit['last_access'] last_access = hit['last_access']
# Previous matched hit must be set as merged # Previous matched hit must be set as merged
isFeedParser = self.MERGED_FEED_PARSER
hit = self.merged_feeds[key] hit = self.merged_feeds[key]
if hit['last_access'] < last_access: hit['feed_parser'] = self.MERGED_FEED_PARSER
hit['feed_parser_last_access'] = last_access hit['viewed_pages'][0] += merged_hit['viewed_pages'][0]
hit['viewed_hits'][0] += merged_hit['viewed_hits'][0]
hit['not_viewed_pages'][0] += merged_hit['not_viewed_pages'][0]
hit['not_viewed_hits'][0] += merged_hit['not_viewed_hits'][0]
if hit['last_access'] < merged_hit['last_access']:
hit['feed_parser_last_access'] = merged_hit['last_access']
else: else:
hit['feed_parser_last_access'] = hit['last_access'] hit['feed_parser_last_access'] = hit['last_access']
hit['feed_parser'] = isFeedParser
def mergeFeedsParsers(self, isFeedParser, hit): def mergeFeedsParsers(self, isFeedParser, hit):
if isFeedParser: if isFeedParser in (self.FEED_PARSER, self.MERGED_FEED_PARSER):
for r in self.merge_feeds_parsers_list: for r in self.merge_feeds_parsers_list:
if r.match(hit['remote_addr']) or r.match(hit['remote_ip']): if r.match(hit['remote_addr']) or r.match(hit['remote_ip']):
self._appendToMergeCache(isFeedParser, r, hit) # One group can view multiple different feeds
key = r.pattern + hit.get('feed_domain', '') + hit.get('feed_uri', '')
self._appendToMergeCache(isFeedParser, key, hit)
return return
#print("No match for %s : %d" % (hit['remote_addr'], hit['viewed_hits'][0] + hit['not_viewed_hits'][0])) #print("No match for %s : %d" % (hit['remote_addr'], hit['viewed_hits'][0] + hit['not_viewed_hits'][0]))
# Other cases, look for user agent # Other cases, look for user agent
@ -134,22 +150,27 @@ class IWLAPostAnalysisFeeds(IPlugin):
for hit in hits.values(): for hit in hits.values():
isFeedParser = hit.get('feed_parser', None) isFeedParser = hit.get('feed_parser', None)
# Register already tagged feed parser in merged_feeds if isFeedParser == self.NOT_A_FEED_PARSER:
if self.merge_feeds_parsers and\
not isFeedParser in (None, self.BAD_FEED_PARSER):
self.mergeFeedsParsers(isFeedParser, hit)
continue continue
# Second time
if isFeedParser: if isFeedParser:
if hit['feed_parser'] == self.BAD_FEED_PARSER: continue # Update last access time
if not hit.get('feed_name_analysed', False) and\ if hit['last_access'] > hit.get('feed_parser_last_access', time.gmtime(0)):
hit['feed_parser_last_access'] = hit['last_access']
if not hit.get('feed_name_analyzed', False) and\
hit.get('dns_name_replaced', False): hit.get('dns_name_replaced', False):
hit['feed_name_analysed'] = True hit['feed_name_analyzed'] = True
addr = hit.get('remote_addr', None) addr = hit.get('remote_addr', None)
for r in self.bad_feeds_re: for r in self.bad_feeds_re:
if r.match(addr): if r.match(addr):
hit['feed_parser'] = self.BAD_FEED_PARSER hit['feed_parser'] = self.NOT_A_FEED_PARSER
break break
# Register already tagged feed parser in merged_feeds
if self.merge_feeds_parsers:
self.mergeFeedsParsers(isFeedParser, hit)
continue continue
request = hit['requests'][0] request = hit['requests'][0]
@ -164,14 +185,38 @@ class IWLAPostAnalysisFeeds(IPlugin):
isFeedParser = self.NOT_A_FEED_PARSER isFeedParser = self.NOT_A_FEED_PARSER
break break
user_agent = request['http_user_agent'].lower()
if isFeedParser == self.NOT_A_FEED_PARSER: if isFeedParser == self.NOT_A_FEED_PARSER:
user_agent = request['http_user_agent'].lower()
for regexp in self.user_agents_re: for regexp in self.user_agents_re:
if regexp.match(user_agent): if regexp.match(user_agent):
isFeedParser = self.FEED_PARSER isFeedParser = self.FEED_PARSER
break break
if isFeedParser == self.FEED_PARSER:
for regexp in self.bad_user_agents_re:
if regexp.match(user_agent):
isFeedParser = self.NOT_A_FEED_PARSER
break
if not hit.get('feed_name_analyzed', False) and\
hit.get('dns_name_replaced', False):
hit['feed_name_analyzed'] = True
addr = hit.get('remote_addr', None)
for r in self.bad_feeds_re:
if r.match(addr):
isFeedParser = hit['feed_parser'] = self.NOT_A_FEED_PARSER
break
if isFeedParser == self.FEED_PARSER:
hit['feed_domain'] = request['server_name']
hit['feed_uri'] = uri
hit['feed_subscribers'] = 0
subscribers = self.subscribers_re.match(user_agent)
if subscribers:
hit['feed_subscribers'] = int(subscribers.groups()[0])
hit['feed_parser'] = isFeedParser
if self.merge_feeds_parsers: if self.merge_feeds_parsers:
self.mergeFeedsParsers(isFeedParser, hit) self.mergeFeedsParsers(isFeedParser, hit)
else:
hit['feed_parser'] = isFeedParser