# Web server log analyzed_filename = '/var/log/apache2/access.log.1,/var/log/apache2/access.log' # Domain name to analyze domain_name = 'soutade.fr' # Display visitor IP in addition to resolved names display_visitor_ip = True # Hooks used pre_analysis_hooks = ['page_to_hit', 'robots'] post_analysis_hooks = ['referers', 'top_pages', 'top_downloads', 'operating_systems', 'browsers', 'feeds', 'hours_stats', 'reverse_dns', 'ip_to_geo'] display_hooks = ['filter_users', 'top_visitors', 'all_visits', 'referers', 'top_pages', 'top_downloads', 'referers_diff', 'ip_to_geo', 'operating_systems', 'browsers', 'feeds', 'hours_stats', 'top_downloads_diff', 'robot_bandwidth', 'top_pages_diff'] # Reverse DNS timeout reverse_dns_timeout = 0.2 # Count this addresses as hit page_to_hit_conf = [r'^.+/logo[/]?$'] # Count this addresses as page hit_to_page_conf = [r'^.+/category/.+$', r'^.+/tag/.+$', r'^.+/archive/.+$', r'^.+/ljdc[/]?$', r'^.+/source/tree/.*$', r'^.+/source/file/.*$', r'^.+/search/.+$'] # Because it's too long to build HTML when there is too much entries max_hits_displayed = 100 max_downloads_displayed = 100 # Compressed files compress_output_files = ['html', 'css', 'js'] # Locale in French #locale = 'fr' # Tracked IP tracked_ip = ['192.168.1.1'] # Filtered IP filtered_ip = [ # r'192.168.*', # Local ] filtered_users = [ # [['country_code', '=', 'cn'], ['viewed_pages', '>=', '100']], ] # Excluded IP excluded_ip = [ r'192.168.*', # Local r'117.78.58.*', # China ecs-117-78-58-25.compute.hwclouds-dns.com ] # Feeds url feeds = [r'/atom.xml', r'/rss.xml'] # Feeds referers url feeds_referers = ['https://feedly.com'] # Consider xml files as multimedia (append to current list) multimedia_files_append = ['xml'] # Don't count visitors that only do one hit (for a picture, ...) count_hit_only_visitors = False # Not all robots bandwidth (too big) create_all_robot_bandwidth_page = False