2012-07-22 20:49:11 +02:00
|
|
|
import os
|
|
|
|
from datetime import datetime
|
|
|
|
from xml.dom.minidom import parse, parseString
|
|
|
|
from dynastie.generators.generator import DynastieGenerator
|
|
|
|
from dynastie.generators.index import Index
|
|
|
|
from django.db import models
|
|
|
|
|
|
|
|
class Archive(Index):
|
|
|
|
|
|
|
|
cur_page = 0
|
|
|
|
nb_pages = 0
|
|
|
|
cur_article = 0
|
|
|
|
articles_per_page = 0
|
|
|
|
filename = 'index'
|
2012-08-14 21:26:48 +02:00
|
|
|
dirname = ''
|
2012-07-22 20:49:11 +02:00
|
|
|
cur_year = 0
|
|
|
|
|
|
|
|
def createArchive(self, articles, dom, root, node):
|
|
|
|
if node.hasAttribute('year'):
|
|
|
|
self.replaceByText(dom, root, node, str(self.cur_year))
|
|
|
|
|
|
|
|
def createArchives(self, src, output, dom, hooks, articles):
|
|
|
|
filename = self.filename + '.html'
|
|
|
|
self.nb_pages = 0
|
|
|
|
self.cur_page = 0
|
2012-08-03 21:58:04 +02:00
|
|
|
self.cur_article = 0
|
2012-08-04 20:46:32 +02:00
|
|
|
|
2012-07-22 20:49:11 +02:00
|
|
|
if len(articles) > self.articles_per_page:
|
2012-08-15 11:52:13 +02:00
|
|
|
self.nb_pages = self.computeNbPages(len(articles), self.articles_per_page)
|
2012-08-14 21:26:48 +02:00
|
|
|
|
|
|
|
self.dirname = '/archive/' + str(self.cur_year)
|
2012-07-22 20:49:11 +02:00
|
|
|
|
2012-08-14 21:26:48 +02:00
|
|
|
if not os.path.exists(output + self.dirname):
|
|
|
|
os.mkdir(output + self.dirname)
|
2012-07-22 20:49:11 +02:00
|
|
|
|
|
|
|
while self.cur_page <= self.nb_pages:
|
|
|
|
#print 'Generate ' + filename
|
|
|
|
nodes = dom.getElementsByTagName("*")
|
2012-08-14 21:26:48 +02:00
|
|
|
nodes[0] = self.parse(src, hooks, articles, dom, nodes[0])
|
|
|
|
self.writeIfNotTheSame(output + self.dirname + '/' + filename, nodes[0].toxml('utf8'))
|
2012-07-22 20:49:11 +02:00
|
|
|
self.cur_page = self.cur_page + 1
|
|
|
|
filename = self.filename + str(self.cur_page) + '.html'
|
|
|
|
dom = parse(src + '/_archive.html')
|
|
|
|
|
2012-08-03 21:58:04 +02:00
|
|
|
while os.path.exists(filename):
|
|
|
|
self.addReport('Removing unused ' + filename)
|
|
|
|
os.unlink(filename)
|
|
|
|
filename = filename + '.gz'
|
|
|
|
if os.path.exists(filename):
|
2012-07-22 20:49:11 +02:00
|
|
|
self.addReport('Removing unused ' + filename)
|
|
|
|
os.unlink(filename)
|
2012-08-03 21:58:04 +02:00
|
|
|
self.cur_page = self.cur_page + 1
|
2012-08-14 21:26:48 +02:00
|
|
|
filename = output + self.dirname + '/' + self.filename + str(self.cur_page) + '.html'
|
2012-07-22 20:49:11 +02:00
|
|
|
|
|
|
|
def generate(self, blog, src, output):
|
|
|
|
from dynastie.models import Article, Blog
|
|
|
|
|
|
|
|
hooks = {'articles' : self.createArticles,
|
|
|
|
'navigation' : self.createNavigation,
|
|
|
|
'archive' : self.createArchive}
|
|
|
|
|
|
|
|
if not os.path.exists(src + '/_archive.html'):
|
|
|
|
self.addError('No _archive.html found, exiting')
|
|
|
|
return self.report
|
|
|
|
|
|
|
|
try:
|
|
|
|
dom = parse(src + '/_archive.html')
|
|
|
|
except xml.dom.DOMException as e:
|
|
|
|
self.addError('Error parsing _archive.html : ' + e)
|
|
|
|
return self.report
|
|
|
|
|
|
|
|
if not os.path.exists(output + '/archive'):
|
|
|
|
os.mkdir(output + '/archive')
|
|
|
|
|
|
|
|
article_nodes = dom.getElementsByTagNameNS(self.URI, "articles")
|
|
|
|
if not article_nodes is None:
|
2012-08-01 22:04:41 +02:00
|
|
|
if article_nodes[0].hasAttribute("limit"):
|
|
|
|
self.articles_per_page = int(article_nodes[0].getAttribute("limit"))
|
|
|
|
else:
|
|
|
|
self.articles_per_page = 5
|
2012-07-22 20:49:11 +02:00
|
|
|
else:
|
|
|
|
self.addError('No tag dyn:articles found')
|
|
|
|
|
2012-08-14 21:26:48 +02:00
|
|
|
articles = Article.objects.filter(published=True, front_page=True).order_by('creation_date')
|
2012-07-22 20:49:11 +02:00
|
|
|
|
|
|
|
if articles.count() != 0:
|
2012-08-03 21:58:04 +02:00
|
|
|
self.cur_year = int(articles[0].creation_date.year)
|
2012-07-22 20:49:11 +02:00
|
|
|
|
|
|
|
my_articles = []
|
|
|
|
now = datetime.now()
|
2012-08-03 21:58:04 +02:00
|
|
|
nb_articles = len(articles)
|
|
|
|
for i in range(0, nb_articles):
|
2012-08-01 22:04:41 +02:00
|
|
|
if self.cur_year == now.year:
|
|
|
|
break
|
2012-07-22 20:49:11 +02:00
|
|
|
|
2012-08-03 21:58:04 +02:00
|
|
|
if i < nb_articles-1:
|
|
|
|
if articles[i].creation_date.year != articles[i+1].creation_date.year:
|
2012-08-04 20:46:32 +02:00
|
|
|
dom = parse(src + '/_archive.html')
|
2012-08-14 21:26:48 +02:00
|
|
|
my_articles.reverse()
|
2012-08-03 21:58:04 +02:00
|
|
|
self.createArchives(src, output, dom, hooks, my_articles)
|
|
|
|
self.cur_year = int(articles[i+1].creation_date.year)
|
|
|
|
#print 'New year ' + str(self.cur_year)
|
|
|
|
my_articles = []
|
|
|
|
if self.cur_year == int(now.year):
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
my_articles.append(articles[i])
|
2012-07-22 20:49:11 +02:00
|
|
|
else:
|
2012-08-04 20:46:32 +02:00
|
|
|
# Last article
|
2012-07-22 20:49:11 +02:00
|
|
|
my_articles.append(article)
|
2012-08-03 21:58:04 +02:00
|
|
|
if nb_articles != 1 and articles[i].creation_date.year != articles[i-1].creation_date.year:
|
|
|
|
self.cur_year = int(articles[i].creation_date.year)
|
2012-07-22 20:49:11 +02:00
|
|
|
|
|
|
|
if len(my_articles) != 0:
|
|
|
|
self.createArchives(src, output, dom, hooks, my_articles)
|
|
|
|
|
|
|
|
if not self.somethingWrote:
|
|
|
|
self.addReport('Nothing changed')
|
|
|
|
|
|
|
|
return self.report
|
|
|
|
|