118 lines
4.4 KiB
Python
118 lines
4.4 KiB
Python
import os
|
|
from datetime import datetime
|
|
from xml.dom.minidom import parse, parseString
|
|
from dynastie.generators.generator import DynastieGenerator
|
|
from dynastie.generators.index import Index
|
|
from django.db import models
|
|
|
|
class Archive(Index):
|
|
|
|
cur_page = 0
|
|
nb_pages = 0
|
|
cur_article = 0
|
|
articles_per_page = 0
|
|
filename = 'index'
|
|
cur_year = 0
|
|
|
|
def createArchive(self, articles, dom, root, node):
|
|
if node.hasAttribute('year'):
|
|
self.replaceByText(dom, root, node, str(self.cur_year))
|
|
|
|
def createArchives(self, src, output, dom, hooks, articles):
|
|
filename = self.filename + '.html'
|
|
self.nb_pages = 0
|
|
self.cur_page = 0
|
|
self.cur_article = 0
|
|
|
|
if len(articles) > self.articles_per_page:
|
|
self.nb_pages = len(articles) / self.articles_per_page
|
|
|
|
if not os.path.exists(output + '/archive/' + str(self.cur_year)):
|
|
os.mkdir(output + '/archive/' + str(self.cur_year))
|
|
|
|
while self.cur_page <= self.nb_pages:
|
|
#print 'Generate ' + filename
|
|
nodes = dom.getElementsByTagName("*")
|
|
self.parse(hooks, articles, dom, nodes[0])
|
|
self.writeIfNotTheSame(output + '/archive/' + str(self.cur_year) + '/' + filename, nodes[0].toxml('utf8'))
|
|
self.cur_page = self.cur_page + 1
|
|
filename = self.filename + str(self.cur_page) + '.html'
|
|
dom = parse(src + '/_archive.html')
|
|
|
|
while os.path.exists(filename):
|
|
self.addReport('Removing unused ' + filename)
|
|
os.unlink(filename)
|
|
filename = filename + '.gz'
|
|
if os.path.exists(filename):
|
|
self.addReport('Removing unused ' + filename)
|
|
os.unlink(filename)
|
|
self.cur_page = self.cur_page + 1
|
|
filename = output + '/archive/' + str(self.cur_year) + '/' + self.filename + str(self.cur_page) + '.html'
|
|
|
|
def generate(self, blog, src, output):
|
|
from dynastie.models import Article, Blog
|
|
|
|
hooks = {'articles' : self.createArticles,
|
|
'navigation' : self.createNavigation,
|
|
'archive' : self.createArchive}
|
|
|
|
if not os.path.exists(src + '/_archive.html'):
|
|
self.addError('No _archive.html found, exiting')
|
|
return self.report
|
|
|
|
try:
|
|
dom = parse(src + '/_archive.html')
|
|
except xml.dom.DOMException as e:
|
|
self.addError('Error parsing _archive.html : ' + e)
|
|
return self.report
|
|
|
|
if not os.path.exists(output + '/archive'):
|
|
os.mkdir(output + '/archive')
|
|
|
|
article_nodes = dom.getElementsByTagNameNS(self.URI, "articles")
|
|
if not article_nodes is None:
|
|
if article_nodes[0].hasAttribute("limit"):
|
|
self.articles_per_page = int(article_nodes[0].getAttribute("limit"))
|
|
else:
|
|
self.articles_per_page = 5
|
|
else:
|
|
self.addError('No tag dyn:articles found')
|
|
|
|
articles = Article.objects.all().order_by('creation_date')
|
|
|
|
if articles.count() != 0:
|
|
self.cur_year = int(articles[0].creation_date.year)
|
|
|
|
my_articles = []
|
|
now = datetime.now()
|
|
nb_articles = len(articles)
|
|
for i in range(0, nb_articles):
|
|
if self.cur_year == now.year:
|
|
break
|
|
|
|
if i < nb_articles-1:
|
|
if articles[i].creation_date.year != articles[i+1].creation_date.year:
|
|
dom = parse(src + '/_archive.html')
|
|
self.createArchives(src, output, dom, hooks, my_articles)
|
|
self.cur_year = int(articles[i+1].creation_date.year)
|
|
#print 'New year ' + str(self.cur_year)
|
|
my_articles = []
|
|
if self.cur_year == int(now.year):
|
|
break
|
|
else:
|
|
my_articles.append(articles[i])
|
|
else:
|
|
# Last article
|
|
my_articles.append(article)
|
|
if nb_articles != 1 and articles[i].creation_date.year != articles[i-1].creation_date.year:
|
|
self.cur_year = int(articles[i].creation_date.year)
|
|
|
|
if len(my_articles) != 0:
|
|
self.createArchives(src, output, dom, hooks, my_articles)
|
|
|
|
if not self.somethingWrote:
|
|
self.addReport('Nothing changed')
|
|
|
|
return self.report
|
|
|