2013-07-21 22:50:12 +02:00
|
|
|
#!/usr/bin/python
|
|
|
|
|
2013-07-27 12:58:48 +02:00
|
|
|
# TODO : What happens when a file is moved with git ?
|
|
|
|
# TODO : Test the whole thing
|
|
|
|
# TODO : What happens when I run it as a hook ?
|
|
|
|
# TODO : What happens when I commit with -a option ?
|
2013-07-27 21:23:50 +02:00
|
|
|
# TODO : git ls-files
|
2013-07-27 12:58:48 +02:00
|
|
|
|
2013-07-21 22:50:12 +02:00
|
|
|
import sys
|
2013-07-23 20:48:17 +02:00
|
|
|
import getopt
|
2013-07-21 22:50:12 +02:00
|
|
|
import shutil
|
|
|
|
import os
|
|
|
|
import datetime
|
|
|
|
import subprocess
|
2013-07-23 22:03:35 +02:00
|
|
|
import re
|
2013-07-21 22:50:12 +02:00
|
|
|
|
2013-07-22 22:33:08 +02:00
|
|
|
from time import gmtime, strftime, mktime
|
|
|
|
|
2013-07-23 21:29:14 +02:00
|
|
|
|
2013-07-27 12:58:48 +02:00
|
|
|
# Test if a variable exists (== isset function in PHP)
|
2013-07-21 22:50:12 +02:00
|
|
|
def isset(variable):
|
2013-07-22 21:01:19 +02:00
|
|
|
return variable in locals() or variable in globals()
|
2013-07-21 22:50:12 +02:00
|
|
|
|
2013-07-22 22:33:08 +02:00
|
|
|
|
2013-07-27 12:58:48 +02:00
|
|
|
# List all files in path directory
|
|
|
|
# Works recursively
|
|
|
|
# Return files list with path relative to current dir
|
2013-07-22 22:33:08 +02:00
|
|
|
def list_directory(path):
|
|
|
|
fichier = []
|
|
|
|
for root, dirs, files in os.walk(path):
|
|
|
|
for i in files:
|
|
|
|
fichier.append(os.path.join(root, i))
|
2013-07-21 22:50:12 +02:00
|
|
|
return fichier
|
|
|
|
|
2013-07-22 22:33:08 +02:00
|
|
|
|
2013-07-27 12:58:48 +02:00
|
|
|
# Return a list with the tags of a given article (fh)
|
2013-07-21 22:50:12 +02:00
|
|
|
def get_tags(fh):
|
2013-07-27 12:58:48 +02:00
|
|
|
tag_line = ''
|
|
|
|
for line in fh.readlines():
|
|
|
|
if "@tags=" in line:
|
|
|
|
tag_line = line
|
|
|
|
break
|
|
|
|
|
|
|
|
if not tag_line:
|
2013-07-22 22:33:08 +02:00
|
|
|
return []
|
|
|
|
|
2013-07-27 12:58:48 +02:00
|
|
|
tags = [x.strip() for x in line[line.find("@tags=")+6:].split(",")]
|
2013-07-22 22:33:08 +02:00
|
|
|
return tags
|
|
|
|
|
2013-07-21 22:50:12 +02:00
|
|
|
|
2013-07-27 12:58:48 +02:00
|
|
|
# Return the number latest articles in dir directory
|
2013-07-21 22:50:12 +02:00
|
|
|
def latest_articles(directory, number):
|
2013-07-22 22:33:08 +02:00
|
|
|
now = datetime.datetime.now()
|
|
|
|
counter = 0
|
|
|
|
latest_articles = []
|
2013-07-21 22:50:12 +02:00
|
|
|
|
2013-07-22 22:33:08 +02:00
|
|
|
for i in range(int(now.strftime('%Y')), 0, -1):
|
|
|
|
if counter >= number:
|
|
|
|
break
|
2013-07-21 22:50:12 +02:00
|
|
|
|
2013-07-22 22:33:08 +02:00
|
|
|
if os.path.isdir(directory+"/"+str(i)):
|
|
|
|
for j in range(12, 0, -1):
|
|
|
|
if j < 10:
|
|
|
|
j = "0"+str(j)
|
2013-07-21 22:50:12 +02:00
|
|
|
|
2013-07-22 22:33:08 +02:00
|
|
|
if os.path.isdir(directory+"/"+str(i)+"/"+str(j)):
|
|
|
|
articles_list = list_directory(directory+str(i)+"/"+str(j))
|
|
|
|
# Sort by date the articles
|
|
|
|
articles_list.sort(key=lambda x: os.stat(x).st_mtime)
|
2013-07-21 22:50:12 +02:00
|
|
|
|
2013-07-22 22:33:08 +02:00
|
|
|
latest_articles += articles_list[:number-counter]
|
|
|
|
if len(latest_articles) < number-counter:
|
|
|
|
counter += len(articles_list)
|
|
|
|
else:
|
|
|
|
counter = number
|
|
|
|
return latest_articles
|
2013-07-21 22:50:12 +02:00
|
|
|
|
|
|
|
|
2013-07-27 12:58:48 +02:00
|
|
|
# Auto create necessary directories to write a file
|
2013-07-21 22:50:12 +02:00
|
|
|
def auto_dir(path):
|
2013-07-22 22:33:08 +02:00
|
|
|
directory = os.path.dirname(path)
|
|
|
|
try:
|
|
|
|
if not os.path.exists(directory):
|
|
|
|
os.makedirs(directory)
|
|
|
|
except IOError:
|
2013-07-27 15:08:53 +02:00
|
|
|
sys.exit("[ERROR] An error occurred while creating "+path+" file "
|
2013-07-27 14:56:40 +02:00
|
|
|
"and parent dirs.")
|
2013-07-21 22:50:12 +02:00
|
|
|
|
2013-07-23 22:03:35 +02:00
|
|
|
|
2013-07-27 12:58:48 +02:00
|
|
|
# Replace some user specific syntax tags (to repplace smileys for example)
|
2013-07-23 22:03:35 +02:00
|
|
|
def replace_tags(article, search_list, replace_list):
|
2013-07-23 22:42:42 +02:00
|
|
|
return_string = article
|
2013-07-23 22:03:35 +02:00
|
|
|
for search, replace in zip(search_list, replace_list):
|
2013-07-23 22:42:42 +02:00
|
|
|
return_string = re.sub(search, replace, article)
|
|
|
|
return return_string
|
2013-07-23 22:03:35 +02:00
|
|
|
|
|
|
|
|
2013-07-21 22:50:12 +02:00
|
|
|
try:
|
2013-07-23 21:10:31 +02:00
|
|
|
opts, args = getopt.gnu_getopt(sys.argv, "hf", ["help", "force-regen"])
|
2013-07-23 20:48:17 +02:00
|
|
|
except getopt.GetoptError:
|
2013-07-27 15:08:53 +02:00
|
|
|
sys.exit("[ERROR] Unable to parse command line arguments. "
|
2013-07-27 14:56:40 +02:00
|
|
|
"See pre-commit -h for more infos on how to use.")
|
2013-07-23 20:48:17 +02:00
|
|
|
|
2013-07-23 21:10:31 +02:00
|
|
|
force_regen = False
|
2013-07-23 20:48:17 +02:00
|
|
|
for opt, arg in opts:
|
2013-07-23 21:10:31 +02:00
|
|
|
if opt in ("-h", "--help"):
|
|
|
|
print("Usage :")
|
2013-07-27 14:56:40 +02:00
|
|
|
print("This should be called automatically as a pre-commit git hook. "
|
|
|
|
"You can also launch it manually right before commiting.\n")
|
|
|
|
print("This script generates static pages ready to be served behind "
|
|
|
|
"your webserver.\n")
|
2013-07-23 21:10:31 +02:00
|
|
|
print("Usage :")
|
|
|
|
print("-h \t --help \t displays this help message.")
|
2013-07-27 12:58:48 +02:00
|
|
|
print("-f \t --force-regen \t force complete rebuild of all pages.")
|
2013-07-23 21:10:31 +02:00
|
|
|
sys.exit(0)
|
|
|
|
elif opt in ("-f", "--force-regen"):
|
|
|
|
force_regen = True
|
2013-07-21 22:50:12 +02:00
|
|
|
|
2013-07-27 12:58:48 +02:00
|
|
|
|
|
|
|
# Set parameters with params file
|
2013-07-23 22:03:35 +02:00
|
|
|
search_list = []
|
|
|
|
replace_list = []
|
2013-07-27 12:58:48 +02:00
|
|
|
try:
|
|
|
|
with open("raw/params", "r") as params_fh:
|
|
|
|
params = {}
|
|
|
|
for line in params_fh.readlines():
|
|
|
|
if line.strip() == "" or line.strip().startswith("#"):
|
|
|
|
continue
|
|
|
|
option, value = line.split("=", 1)
|
|
|
|
if option == "SEARCH":
|
|
|
|
search_list = value.strip().split(",")
|
|
|
|
elif option == "REPLACE":
|
|
|
|
replace_list = value.strip().split(",")
|
|
|
|
else:
|
|
|
|
params[option.strip()] = value.strip()
|
2013-07-27 15:08:53 +02:00
|
|
|
|
|
|
|
print("[INFO] Parameters set from raw/params file.")
|
2013-07-27 12:58:48 +02:00
|
|
|
except IOError:
|
2013-07-27 14:56:40 +02:00
|
|
|
sys.exit("[ERROR] Unable to load raw/params file which defines important "
|
|
|
|
"parameters. Does such a file exist ? See doc for more info "
|
|
|
|
"on this file.")
|
2013-07-27 12:58:48 +02:00
|
|
|
|
2013-07-21 23:52:44 +02:00
|
|
|
|
2013-07-27 12:58:48 +02:00
|
|
|
# Fill lists for modified, deleted and added files
|
2013-07-21 22:50:12 +02:00
|
|
|
modified_files = []
|
|
|
|
deleted_files = []
|
|
|
|
added_files = []
|
|
|
|
|
2013-07-27 18:28:09 +02:00
|
|
|
#Lists of years and months with modified files
|
|
|
|
years_list = []
|
|
|
|
months_list = []
|
|
|
|
|
2013-07-23 20:48:17 +02:00
|
|
|
if not force_regen:
|
2013-07-27 12:58:48 +02:00
|
|
|
# Find the changes to be committed
|
2013-07-23 21:10:31 +02:00
|
|
|
try:
|
2013-07-27 15:31:07 +02:00
|
|
|
changes = subprocess.check_output(["git",
|
|
|
|
"diff",
|
|
|
|
"--cached",
|
|
|
|
"--name-status"],
|
|
|
|
universal_newlines=True)
|
2013-07-23 21:10:31 +02:00
|
|
|
except:
|
2013-07-27 15:08:53 +02:00
|
|
|
sys.exit("[ERROR] An error occurred when fetching file changes "
|
2013-07-27 14:56:40 +02:00
|
|
|
"from git.")
|
2013-07-23 21:10:31 +02:00
|
|
|
|
|
|
|
changes = changes.strip().split("\n")
|
|
|
|
if changes == [""]:
|
2013-07-27 15:08:53 +02:00
|
|
|
sys.exit("[ERROR] Nothing to do... Did you add new files with "
|
2013-07-27 14:56:40 +02:00
|
|
|
"\"git add\" before ?")
|
2013-07-23 21:10:31 +02:00
|
|
|
|
|
|
|
for changed_file in changes:
|
2013-07-23 22:42:42 +02:00
|
|
|
if changed_file[0].startswith("A"):
|
2013-07-23 21:10:31 +02:00
|
|
|
added_files.append(changed_file[changed_file.index("\t")+1:])
|
2013-07-23 22:42:42 +02:00
|
|
|
elif changed_file[0].startswith("M"):
|
2013-07-23 21:10:31 +02:00
|
|
|
modified_files.append(changed_file[changed_file.index("\t")+1:])
|
2013-07-23 22:42:42 +02:00
|
|
|
elif changed_file[0].startswith("D"):
|
2013-07-23 21:10:31 +02:00
|
|
|
deleted_files.append(changed_file[changed_file.index("\t")+1:])
|
|
|
|
else:
|
2013-07-27 15:08:53 +02:00
|
|
|
sys.exit("[ERROR] An error occurred when running git diff.")
|
2013-07-23 20:48:17 +02:00
|
|
|
else:
|
2013-07-23 21:10:31 +02:00
|
|
|
shutil.rmtree("blog/")
|
|
|
|
shutil.rmtree("gen/")
|
|
|
|
added_files = list_directory("raw")
|
2013-07-23 20:48:17 +02:00
|
|
|
|
2013-07-23 22:42:42 +02:00
|
|
|
if not added_files and not modified_files and not deleted_files:
|
2013-07-27 15:08:53 +02:00
|
|
|
sys.exit("[ERROR] Nothing to do... Did you add new files with "
|
2013-07-27 14:56:40 +02:00
|
|
|
"\"git add\" before ?")
|
2013-07-21 22:50:12 +02:00
|
|
|
|
2013-07-27 12:58:48 +02:00
|
|
|
# Only keep modified raw articles files
|
2013-07-21 22:50:12 +02:00
|
|
|
for filename in list(added_files):
|
2013-07-27 12:58:48 +02:00
|
|
|
direct_copy = False
|
|
|
|
|
2013-07-23 22:42:42 +02:00
|
|
|
if not filename.startswith("raw/"):
|
2013-07-22 22:33:08 +02:00
|
|
|
added_files.remove(filename)
|
|
|
|
continue
|
|
|
|
|
|
|
|
try:
|
|
|
|
int(filename[4:8])
|
2013-07-27 18:28:09 +02:00
|
|
|
years_list.append(filename[4:8])
|
2013-07-22 22:33:08 +02:00
|
|
|
except ValueError:
|
2013-07-27 12:58:48 +02:00
|
|
|
direct_copy = True
|
2013-07-22 22:33:08 +02:00
|
|
|
|
2013-07-27 18:28:09 +02:00
|
|
|
try:
|
|
|
|
int(filename[8:10])
|
|
|
|
months_list.append(filename[8:10])
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
|
2013-07-27 12:58:48 +02:00
|
|
|
if ((not filename.endswith(".html") and not filename.endswith(".ignore"))
|
|
|
|
or direct_copy):
|
2013-07-27 14:56:40 +02:00
|
|
|
# Note : this deal with CSS, images or footer file
|
2013-07-27 15:08:53 +02:00
|
|
|
print("[INFO] (Direct copy) Copying directly the file "
|
2013-07-27 14:56:40 +02:00
|
|
|
+ filename[4:]+" to blog dir.")
|
2013-07-22 22:33:08 +02:00
|
|
|
|
|
|
|
auto_dir("blog/"+filename[4:])
|
|
|
|
shutil.copy(filename, "blog/"+filename[4:])
|
|
|
|
added_files.remove(filename)
|
|
|
|
continue
|
|
|
|
|
2013-07-27 12:58:48 +02:00
|
|
|
if filename.endswith(".ignore"):
|
2013-07-27 14:56:40 +02:00
|
|
|
print("[INFO] (Not published) Found not published article "
|
|
|
|
+ filename[4:-7]+".")
|
2013-07-22 22:33:08 +02:00
|
|
|
added_files.remove(filename)
|
|
|
|
continue
|
2013-07-21 22:50:12 +02:00
|
|
|
|
|
|
|
for filename in list(modified_files):
|
2013-07-27 12:58:48 +02:00
|
|
|
direct_copy = False
|
|
|
|
|
2013-07-23 22:42:42 +02:00
|
|
|
if not filename.startswith("raw/"):
|
2013-07-22 22:33:08 +02:00
|
|
|
modified_files.remove(filename)
|
|
|
|
continue
|
|
|
|
try:
|
2013-07-27 18:28:09 +02:00
|
|
|
int(filename[4:8])
|
|
|
|
years_list.append(filename[4:8])
|
2013-07-22 22:33:08 +02:00
|
|
|
except ValueError:
|
2013-07-27 12:58:48 +02:00
|
|
|
direct_copy = True
|
2013-07-22 22:33:08 +02:00
|
|
|
|
2013-07-27 18:28:09 +02:00
|
|
|
try:
|
|
|
|
int(filename[8:10])
|
|
|
|
months_list.append(filename[8:10])
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
|
2013-07-27 12:58:48 +02:00
|
|
|
if ((not filename.endswith("html") and not filename.endswith("ignore"))
|
|
|
|
or direct_copy):
|
2013-07-27 15:08:53 +02:00
|
|
|
print("[INFO] (Direct copy) Updating directly the file "
|
|
|
|
+ filename[4:]+" in blog dir.")
|
2013-07-22 22:33:08 +02:00
|
|
|
auto_dir("blog/"+filename[4:])
|
|
|
|
shutil.copy(filename, "blog/"+filename[4:])
|
|
|
|
modified_files.remove(filename)
|
|
|
|
continue
|
|
|
|
|
2013-07-23 22:42:42 +02:00
|
|
|
if filename.endswith("ignore"):
|
2013-07-27 14:56:40 +02:00
|
|
|
print("[INFO] (Not published) Found not published article "
|
|
|
|
+ filename[4:-7]+".")
|
2013-07-22 22:33:08 +02:00
|
|
|
added_files.remove(filename)
|
|
|
|
continue
|
2013-07-21 22:50:12 +02:00
|
|
|
|
|
|
|
for filename in list(deleted_files):
|
2013-07-27 12:58:48 +02:00
|
|
|
direct_copy = False
|
|
|
|
|
2013-07-23 22:42:42 +02:00
|
|
|
if not filename.startswith("raw/"):
|
2013-07-22 22:33:08 +02:00
|
|
|
deleted_files.remove(filename)
|
|
|
|
continue
|
|
|
|
|
|
|
|
try:
|
2013-07-27 18:28:09 +02:00
|
|
|
int(filename[4:8])
|
|
|
|
years_list.append(filename[4:8])
|
2013-07-22 22:33:08 +02:00
|
|
|
except ValueError:
|
2013-07-27 15:08:53 +02:00
|
|
|
direct_delete = True
|
2013-07-27 18:28:09 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
int(filename[8:10])
|
|
|
|
months_list.append(filename[8:10])
|
|
|
|
except ValueError:
|
|
|
|
pass
|
2013-07-22 22:33:08 +02:00
|
|
|
|
2013-07-27 12:58:48 +02:00
|
|
|
if ((not filename.endswith("html") and not filename.endswith("ignore"))
|
2013-07-27 15:08:53 +02:00
|
|
|
or direct_delete):
|
|
|
|
print("[INFO] (Deleted file) Delete directly copied file "
|
|
|
|
+ filename[4:]+" in blog dir.")
|
|
|
|
os.unlink(filename)
|
2013-07-22 22:33:08 +02:00
|
|
|
deleted_files.remove(filename)
|
|
|
|
continue
|
|
|
|
|
2013-07-21 22:50:12 +02:00
|
|
|
print("[INFO] Added files : "+", ".join(added_files))
|
|
|
|
print("[INFO] Modified files : "+", ".join(modified_files))
|
|
|
|
print("[INFO] Deleted filed : "+", ".join(deleted_files))
|
|
|
|
|
2013-07-27 15:08:53 +02:00
|
|
|
print("[INFO] Updating tags for added and modified files.")
|
2013-07-21 22:50:12 +02:00
|
|
|
for filename in added_files:
|
2013-07-22 22:33:08 +02:00
|
|
|
try:
|
|
|
|
with open(filename, 'r') as fh:
|
|
|
|
tags = get_tags(fh)
|
|
|
|
except IOError:
|
|
|
|
sys.exit("[ERROR] Unable to open file "+filename+".")
|
2013-07-21 22:50:12 +02:00
|
|
|
|
2013-07-27 12:58:48 +02:00
|
|
|
if not tags:
|
2013-07-27 14:56:40 +02:00
|
|
|
sys.exit("[ERROR] (TAGS) In added article "+filename[4:]+" : "
|
|
|
|
"No tags found !")
|
2013-07-27 12:58:48 +02:00
|
|
|
for tag in tags:
|
|
|
|
try:
|
|
|
|
auto_dir("gen/tags/"+tag+".tmp")
|
|
|
|
with open("gen/tags/"+tag+".tmp", 'a+') as tag_file:
|
|
|
|
tag_file.seek(0)
|
|
|
|
if filename[4:] not in tag_file.read():
|
|
|
|
tag_file.write(filename[4:]+"\n")
|
2013-07-27 14:56:40 +02:00
|
|
|
print("[INFO] (TAGS) Found tag "+tag+" in article "
|
|
|
|
+ filename[4:])
|
2013-07-27 12:58:48 +02:00
|
|
|
except IOError:
|
2013-07-27 14:56:40 +02:00
|
|
|
sys.exit("[ERROR] (TAGS) New tag found but an error "
|
2013-07-27 15:08:53 +02:00
|
|
|
"occurred in article "+filename[4:]+": "+tag+".")
|
2013-07-27 12:58:48 +02:00
|
|
|
|
2013-07-21 22:50:12 +02:00
|
|
|
for filename in modified_files:
|
2013-07-22 22:33:08 +02:00
|
|
|
try:
|
|
|
|
with open(filename, 'r') as fh:
|
|
|
|
tags = get_tags(fh)
|
|
|
|
except IOError:
|
2013-07-27 15:08:53 +02:00
|
|
|
sys.exit("[ERROR] Unable to open file "+filename[4:]+".")
|
2013-07-21 22:50:12 +02:00
|
|
|
|
2013-07-27 12:58:48 +02:00
|
|
|
if not tags:
|
2013-07-27 14:56:40 +02:00
|
|
|
sys.exit("[ERROR] (TAGS) In modified article "+filename[4:]+" : "
|
|
|
|
" No tags found !")
|
2013-07-27 12:58:48 +02:00
|
|
|
|
|
|
|
for tag in list_directory("gen/tags/"):
|
|
|
|
try:
|
|
|
|
with open(tag, 'r+') as tag_file:
|
|
|
|
if (tag[tag.index("tags/") + 5:tag.index(".tmp")] in tags
|
|
|
|
and filename[4:] not in tag_file.read()):
|
|
|
|
tag_file.seek(0, 2) # Append to end of file
|
|
|
|
tag_file.write(filename[4:]+"\n")
|
2013-07-27 15:31:07 +02:00
|
|
|
print("[INFO] (TAGS) Found new tag "
|
|
|
|
+ tag[:tag.index(".tmp")]+" for modified article "
|
|
|
|
+ filename[4:]+".")
|
2013-07-27 12:58:48 +02:00
|
|
|
tags.remove(tag_file[9:])
|
|
|
|
if (tag[tag.index("tags/") + 5:tag.index(".tmp")] not in tags
|
|
|
|
and filename[4:] in tag_file.read()):
|
2013-07-27 15:31:07 +02:00
|
|
|
tag_old = tag_file.read()
|
2013-07-27 12:58:48 +02:00
|
|
|
tag_file.truncate()
|
2013-07-27 15:31:07 +02:00
|
|
|
# Delete file in tag
|
|
|
|
tag_file_write = tag_old.replace(filename[4:]+"\n", "")
|
|
|
|
|
|
|
|
if tag_file_write:
|
|
|
|
tag_file.write(tag_file_write)
|
|
|
|
print("[INFO] (TAGS) Deleted tag " +
|
|
|
|
tag[:tag.index(".tmp")]+" in modified article " +
|
|
|
|
filename[4:]+".")
|
2013-07-27 12:58:48 +02:00
|
|
|
tags.remove(tag_file[9:])
|
|
|
|
except IOError:
|
2013-07-27 15:08:53 +02:00
|
|
|
sys.exit("[ERROR] (TAGS) An error occurred when parsing tags "
|
2013-07-27 14:56:40 +02:00
|
|
|
" of article "+filename[4:]+".")
|
2013-07-27 12:58:48 +02:00
|
|
|
|
2013-07-27 15:31:07 +02:00
|
|
|
if not tag_file_write:
|
|
|
|
try:
|
|
|
|
os.unlink(tag)
|
|
|
|
print("[INFO] (TAGS) No more article with tag " +
|
|
|
|
tag[8:-4]+", deleting it.")
|
|
|
|
except FileNotFoundError:
|
|
|
|
print("[INFO] (TAGS) "+tag+" was found to be empty "
|
|
|
|
"but there was an error during deletion. "
|
|
|
|
"You should check manually.")
|
|
|
|
|
2013-07-27 12:58:48 +02:00
|
|
|
for tag in tags: # New tags created
|
|
|
|
try:
|
|
|
|
auto_dir("gen/tags/"+tag+".tmp")
|
2013-07-27 15:31:07 +02:00
|
|
|
with open("gen/tags/"+tag+".tmp", "a+") as tag_file:
|
|
|
|
# Delete tag file here if empty after deletion
|
2013-07-27 12:58:48 +02:00
|
|
|
tag_file.write(filename[4:]+"\n")
|
2013-07-27 14:56:40 +02:00
|
|
|
print("[INFO] (TAGS) Found new tag "+tag+" for "
|
2013-07-27 15:08:53 +02:00
|
|
|
"modified article "+filename[4:]+".")
|
2013-07-27 12:58:48 +02:00
|
|
|
except IOError:
|
2013-07-27 15:08:53 +02:00
|
|
|
sys.exit("[ERROR] (TAGS) An error occurred when parsing tags "
|
2013-07-27 14:56:40 +02:00
|
|
|
"of article "+filename[4:]+".")
|
2013-07-27 12:58:48 +02:00
|
|
|
|
|
|
|
# Delete tags for deleted files and delete all generated files
|
2013-07-21 22:50:12 +02:00
|
|
|
for filename in deleted_files:
|
2013-07-22 22:33:08 +02:00
|
|
|
try:
|
|
|
|
with open(filename, 'r') as fh:
|
|
|
|
tags = get_tags(fh)
|
|
|
|
except IOError:
|
|
|
|
sys.exit("[ERROR] Unable to open file "+filename+".")
|
|
|
|
|
2013-07-27 12:58:48 +02:00
|
|
|
if not tags:
|
2013-07-27 15:08:53 +02:00
|
|
|
sys.exit("[ERROR] In deleted article "+filename[4:]+" : "
|
2013-07-27 14:56:40 +02:00
|
|
|
"No tags found !")
|
2013-07-27 12:58:48 +02:00
|
|
|
|
|
|
|
for tag in tags:
|
|
|
|
try:
|
2013-07-27 15:31:07 +02:00
|
|
|
with open("gen/tags/"+tag+".tmp", 'r+') as tag_file:
|
|
|
|
tag_old = tag_file.read()
|
2013-07-27 12:58:48 +02:00
|
|
|
tag_file.truncate()
|
|
|
|
# Delete file in tag
|
2013-07-27 15:31:07 +02:00
|
|
|
tag_file_write = tag_old.replace(filename[4:]+"\n", "")
|
2013-07-27 14:56:40 +02:00
|
|
|
if tag_file_write:
|
|
|
|
tag_file.write(tag_file_write)
|
2013-07-27 15:31:07 +02:00
|
|
|
print("[INFO] (TAGS) Deleted tag " +
|
|
|
|
tag[:tag.index(".tmp")]+" in deleted article " +
|
|
|
|
filename[4:]+".")
|
|
|
|
|
2013-07-27 12:58:48 +02:00
|
|
|
except IOError:
|
2013-07-27 15:08:53 +02:00
|
|
|
sys.exit("[ERROR] An error occurred while deleting article" +
|
2013-07-27 14:56:40 +02:00
|
|
|
filename[4:]+" from tags files.")
|
|
|
|
|
|
|
|
if not tag_file_write:
|
|
|
|
try:
|
|
|
|
os.unlink(tag)
|
2013-07-27 15:31:07 +02:00
|
|
|
print("[INFO] (TAGS) No more article with tag " +
|
|
|
|
tag[8:-4]+", deleting it.")
|
2013-07-27 14:56:40 +02:00
|
|
|
except FileNotFoundError:
|
2013-07-27 15:31:07 +02:00
|
|
|
print("[INFO] (TAGS) "+tag+" was found to be empty "
|
|
|
|
"but there was an error during deletion. "
|
|
|
|
"You should check manually.")
|
2013-07-27 12:58:48 +02:00
|
|
|
|
|
|
|
# Delete generated files
|
2013-07-22 22:33:08 +02:00
|
|
|
try:
|
|
|
|
os.unlink("gen/"+filename[4:-5]+".gen")
|
|
|
|
os.unlink("blog/"+filename[4:])
|
|
|
|
except FileNotFoundError:
|
2013-07-27 15:31:07 +02:00
|
|
|
print("[INFO] (DELETION) Article "+filename[4:]+" seems "
|
|
|
|
"to not have already been generated. "
|
|
|
|
"You should check manually.")
|
2013-07-27 14:56:40 +02:00
|
|
|
|
2013-07-27 15:31:07 +02:00
|
|
|
print("[INFO] (DELETION) Deleted article "+filename[4:] +
|
|
|
|
" in both gen and blog directories")
|
2013-07-27 14:56:40 +02:00
|
|
|
|
|
|
|
|
|
|
|
# Common lists that are used multiple times
|
|
|
|
last_articles = latest_articles("raw/", int(params["NB_ARTICLES_INDEX"]))
|
|
|
|
tags_full_list = list_directory("gen/tags")
|
2013-07-21 22:50:12 +02:00
|
|
|
|
2013-07-27 14:56:40 +02:00
|
|
|
# Generate html for each article
|
2013-07-21 22:50:12 +02:00
|
|
|
for filename in added_files+modified_files:
|
2013-07-22 22:33:08 +02:00
|
|
|
try:
|
|
|
|
with open(filename, 'r') as fh:
|
2013-07-27 21:08:13 +02:00
|
|
|
article, title, date, author, tags = "", "", "", "", ""
|
2013-07-22 22:33:08 +02:00
|
|
|
for line in fh.readlines():
|
2013-07-27 12:58:48 +02:00
|
|
|
article += line
|
2013-07-22 22:33:08 +02:00
|
|
|
if "@title=" in line:
|
2013-07-27 12:58:48 +02:00
|
|
|
title = line[line.find("@title=")+7:].strip()
|
2013-07-22 22:33:08 +02:00
|
|
|
continue
|
|
|
|
if "@date=" in line:
|
2013-07-27 12:58:48 +02:00
|
|
|
date = line[line.find("@date=")+6:].strip()
|
|
|
|
continue
|
|
|
|
if "@author=" in line:
|
|
|
|
author = line[line.find("@author=")+7:].strip()
|
2013-07-22 22:33:08 +02:00
|
|
|
continue
|
2013-07-27 12:58:48 +02:00
|
|
|
if "@tags=" in line:
|
|
|
|
tags = line[line.find("@tags=")+6:].strip()
|
|
|
|
continue
|
|
|
|
except IOError:
|
2013-07-27 15:31:07 +02:00
|
|
|
print("[ERROR] An error occurred while generating article " +
|
|
|
|
filename[4:]+".")
|
2013-07-22 22:33:08 +02:00
|
|
|
|
2013-07-27 12:58:48 +02:00
|
|
|
if not isset("tags") or not isset("title") or not isset("author"):
|
2013-07-27 15:31:07 +02:00
|
|
|
sys.exit("[ERROR] Missing parameters (title, author, date, tags) "
|
|
|
|
"in article "+filename[4:]+".")
|
2013-07-27 12:58:48 +02:00
|
|
|
|
2013-07-27 15:31:07 +02:00
|
|
|
date_readable = ("Le "+date[0:2]+"/"+date[2:4]+"/"+date[4:8] +
|
|
|
|
" à "+date[9:11]+":"+date[11:13])
|
2013-07-27 12:58:48 +02:00
|
|
|
|
|
|
|
# Write generated HTML for this article in gen /
|
2013-07-27 14:56:40 +02:00
|
|
|
article = replace_tags(article, search_list, replace_list)
|
2013-07-27 12:58:48 +02:00
|
|
|
try:
|
|
|
|
auto_dir("gen/"+filename[4:-5]+".gen")
|
|
|
|
with open("gen/"+filename[4:-5]+".gen", 'w') as article_file:
|
2013-07-27 15:31:07 +02:00
|
|
|
article_file.write("<article>\n"
|
|
|
|
"\t<nav class=\"aside_article\"></nav>\n"
|
|
|
|
"\t<div class=\"article\">\n"
|
|
|
|
"\t\t<h1>"+title+"</h1>\n"
|
|
|
|
"\t\t"+article+"\n"
|
2013-07-27 21:23:50 +02:00
|
|
|
"\t\t<p class=\"date\">"+date_readable+"</p>\n"
|
2013-07-27 15:31:07 +02:00
|
|
|
"\t</div>\n")
|
2013-07-27 15:08:53 +02:00
|
|
|
print("[INFO] (GEN ARTICLES) Article "+filename[4:]+" generated")
|
2013-07-22 22:33:08 +02:00
|
|
|
except IOError:
|
2013-07-27 15:31:07 +02:00
|
|
|
sys.exit("[ERROR] An error occurred when writing generated HTML for "
|
|
|
|
"article "+filename[4:]+".")
|
2013-07-21 22:50:12 +02:00
|
|
|
|
2013-07-27 14:56:40 +02:00
|
|
|
# Starting to generate header file (except title)
|
|
|
|
tags_header = "<ul>"
|
|
|
|
for tag in tags_full_list:
|
2013-07-27 15:31:07 +02:00
|
|
|
tags_header += "<li>"
|
|
|
|
tags_header += ("<a href=\""+params["BLOG_URL"]+tag[4:-4]+".html\">" +
|
|
|
|
tag[9:-4]+"</a>")
|
|
|
|
tags_header += "</li>"
|
2013-07-27 14:56:40 +02:00
|
|
|
tags_header += "</ul>"
|
2013-07-21 22:50:12 +02:00
|
|
|
try:
|
2013-07-22 22:33:08 +02:00
|
|
|
with open("raw/header.html", "r") as header_fh:
|
|
|
|
header = header_fh.read()
|
2013-07-21 22:50:12 +02:00
|
|
|
except IOError:
|
2013-07-22 22:33:08 +02:00
|
|
|
sys.exit("[ERROR] Unable to open raw/header.html file.")
|
2013-07-21 22:50:12 +02:00
|
|
|
|
2013-07-27 14:56:40 +02:00
|
|
|
header = header.replace("@tags", tags_header, 1)
|
|
|
|
header = header.replace("@blog_url", params["BLOG_URL"], 1)
|
|
|
|
articles_header = "<ul>"
|
|
|
|
articles_index = "<ul>"
|
2013-07-21 22:50:12 +02:00
|
|
|
|
2013-07-27 17:28:14 +02:00
|
|
|
rss = ("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
|
|
|
|
"<rss version=\"2.0\" xmlns:atom=\"http://www.w3.org/2005/Atom\" "
|
|
|
|
"xmlns:content=\"http://purl.org/rss/1.0/modules/content/\">\n")
|
|
|
|
rss += ("\t<channel>"
|
|
|
|
"\t\t<atom:link href=\""+params["BLOG_URL"]+"rss.xml\" "
|
|
|
|
"rel=\"self\" type=\"application/rss+xml\"/>\n"
|
|
|
|
"\t\t<title>"+params["BLOG_TITLE"]+"</title>\n"
|
|
|
|
"\t\t<link>"+params["BLOG_URL"]+"</link>\n"
|
|
|
|
"\t\t<description>"+params["DESCRIPTION"]+"</description>\n"
|
|
|
|
"\t\t<language>"+params["LANGUAGE"]+"</language>\n"
|
|
|
|
"\t\t<copyright>"+params["COPYRIGHT"]+"</copyright>\n"
|
|
|
|
"\t\t<webMaster>"+params["WEBMASTER"]+"</webMaster>\n"
|
|
|
|
"\t\t<lastBuildDate>" +
|
|
|
|
strftime("%a, %d %b %Y %H:%M:%S +0000", gmtime())+"</lastBuildDate>\n")
|
|
|
|
|
|
|
|
|
|
|
|
# Generate header (except title) + index file + rss file
|
2013-07-27 21:23:50 +02:00
|
|
|
for i, article in enumerate(["gen/"+x[4:-5]+".gen" for x in last_articles]):
|
2013-07-27 17:28:14 +02:00
|
|
|
content, title, tags, date, author = "", "", "", "", ""
|
2013-07-22 22:33:08 +02:00
|
|
|
try:
|
2013-07-27 14:56:40 +02:00
|
|
|
with open(article, "r") as fh:
|
|
|
|
for line in fh.readlines():
|
2013-07-27 17:28:14 +02:00
|
|
|
content += line
|
2013-07-27 14:56:40 +02:00
|
|
|
if "@title=" in line:
|
|
|
|
title = line[line.find("@title=")+7:].strip()
|
2013-07-27 17:28:14 +02:00
|
|
|
continue
|
|
|
|
if "@date=" in line:
|
|
|
|
date = line[line.find("@date=")+6:].strip()
|
|
|
|
continue
|
|
|
|
if "@author=" in line:
|
|
|
|
author = line[line.find("@author=")+7:].strip()
|
|
|
|
continue
|
|
|
|
if "@tags=" in line:
|
|
|
|
tags = line[line.find("@tags=")+6:].strip()
|
|
|
|
continue
|
2013-07-22 22:33:08 +02:00
|
|
|
except IOError:
|
2013-07-27 14:56:40 +02:00
|
|
|
sys.exit("[ERROR] Unable to open "+article+" file.")
|
2013-07-21 23:39:23 +02:00
|
|
|
|
2013-07-27 14:56:40 +02:00
|
|
|
if not isset("title"):
|
|
|
|
sys.exit("[ERROR] No title found in article "+article[4:]+".")
|
|
|
|
|
|
|
|
if i < 5:
|
2013-07-27 15:31:07 +02:00
|
|
|
articles_header += "<li>"
|
|
|
|
articles_header += ("<a href=\""+params["BLOG_URL"] +
|
2013-07-27 21:23:50 +02:00
|
|
|
article[4:-5]+".html\">"+title+"</a>")
|
2013-07-27 15:31:07 +02:00
|
|
|
articles_header += "</li>"
|
2013-07-27 14:56:40 +02:00
|
|
|
|
2013-07-27 21:23:50 +02:00
|
|
|
articles_index += content
|
2013-07-27 17:28:14 +02:00
|
|
|
date_rss = strftime("%a, %d %b %Y %H:%M:%S +0000",
|
|
|
|
gmtime(mktime(datetime.datetime.strptime(date,
|
|
|
|
"%d%m%Y-%H%M")
|
|
|
|
.timetuple())))
|
|
|
|
|
|
|
|
rss += ("\t\t<item>\n"
|
|
|
|
"\t\t\t<title>"+title+"</title>\n"
|
|
|
|
"\t\t\t<link>"+params["BLOG_URL"]+article[5:]+"</link>\n"
|
|
|
|
"\t\t\t<guid isPermaLink=\"false\">" +
|
|
|
|
params["BLOG_URL"]+article[5:]+"</guid>\n"
|
|
|
|
"\t\t\t<description><![CDATA[" +
|
|
|
|
replace_tags(article, search_list, replace_list) +
|
|
|
|
"]]></description>\n"
|
|
|
|
"\t\t\t<pubDate>"+date_rss+"</pubDate>\n"
|
|
|
|
"\t\t\t<category>"+', '.join(tags)+"</category>\n"
|
|
|
|
"\t\t\t<author>"+params["WEBMASTER"]+"</author>\n"
|
|
|
|
"\t\t</item>\n")
|
|
|
|
|
|
|
|
|
2013-07-27 14:56:40 +02:00
|
|
|
# Finishing header gen
|
|
|
|
articles_header += "</ul>"
|
|
|
|
header = header.replace("@articles", articles_header, 1)
|
2013-07-21 23:39:23 +02:00
|
|
|
|
2013-07-21 22:50:12 +02:00
|
|
|
try:
|
2013-07-27 14:56:40 +02:00
|
|
|
auto_dir("gen/header.gen")
|
|
|
|
with open("gen/header.gen", "w") as header_gen_fh:
|
|
|
|
header_gen_fh.write(header)
|
|
|
|
print("[INFO] (HEADER) Header has been generated successfully.")
|
|
|
|
except FileNotFoundError:
|
2013-07-27 15:08:53 +02:00
|
|
|
sys.exit("[ERROR] An error occurred while writing header file.")
|
2013-07-27 14:56:40 +02:00
|
|
|
except IOError:
|
|
|
|
sys.exit("[ERROR] Unable to open gen/header.gen for writing.")
|
2013-07-22 22:33:08 +02:00
|
|
|
|
2013-07-27 14:56:40 +02:00
|
|
|
# Getting content from footer file
|
|
|
|
try:
|
|
|
|
with open("raw/footer.html", "r") as footer_fh:
|
|
|
|
footer = footer_fh.read()
|
|
|
|
except IOError:
|
2013-07-27 15:31:07 +02:00
|
|
|
sys.exit("[ERROR] An error occurred while parsing footer "
|
|
|
|
"file raw/footer.html.")
|
2013-07-27 14:56:40 +02:00
|
|
|
|
|
|
|
# Finishing index gen
|
|
|
|
articles_index += "</ul>"
|
2013-07-27 15:31:07 +02:00
|
|
|
index = (header.replace("@title", params["BLOG_TITLE"], 1) +
|
|
|
|
articles_index + footer)
|
2013-07-27 14:56:40 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
with open("blog/index.html", "w") as index_fh:
|
|
|
|
index_fh.write(index)
|
|
|
|
print("[INFO] (INDEX) Index page has been generated successfully.")
|
|
|
|
except IOError:
|
|
|
|
sys.exit("[ERROR] Error while creating index.html file")
|
2013-07-21 22:50:12 +02:00
|
|
|
except IOError:
|
2013-07-22 22:33:08 +02:00
|
|
|
sys.exit("[ERROR] Unable to open index.html file for writing.")
|
2013-07-21 22:50:12 +02:00
|
|
|
|
2013-07-27 17:28:14 +02:00
|
|
|
# Finishing rss gen
|
|
|
|
rss += "\t</channel>\n</rss>"
|
|
|
|
|
|
|
|
try:
|
|
|
|
with open("blog/rss.xml", "w") as rss_fh:
|
|
|
|
rss_fh.write(rss)
|
|
|
|
except IOError:
|
|
|
|
sys.exit("[ERROR] An error occurred while writing RSS file.")
|
|
|
|
|
2013-07-27 14:56:40 +02:00
|
|
|
# Regenerate tags pages
|
|
|
|
for tag in tags_full_list:
|
2013-07-27 15:31:07 +02:00
|
|
|
tag_content = header.replace("@title", params["BLOG_TITLE"] +
|
|
|
|
" - "+tag[4:-4], 1)
|
2013-07-27 14:56:40 +02:00
|
|
|
|
|
|
|
with open(tag, "r") as tag_gen_fh:
|
|
|
|
for line in tag_gen_fh.readlines():
|
|
|
|
line = line.replace(".html", ".gen")
|
|
|
|
with open("gen/"+line.strip(), "r") as article_fh:
|
|
|
|
tag_content += article_fh.read()
|
|
|
|
|
|
|
|
tag_content += footer
|
2013-07-22 22:33:08 +02:00
|
|
|
try:
|
|
|
|
auto_dir(tag.replace("gen/", "blog/"))
|
|
|
|
with open(tag.replace("gen/", "blog/")[:-4]+".html", "w") as tag_fh:
|
|
|
|
tag_fh.write(tag_content)
|
2013-07-27 15:31:07 +02:00
|
|
|
print("[INFO] (TAGS) Tag page for "+tag[9:-4] +
|
|
|
|
" has been generated successfully.")
|
2013-07-22 22:33:08 +02:00
|
|
|
except IOError:
|
2013-07-27 15:31:07 +02:00
|
|
|
sys.exit("[ERROR] An error occurred while generating tag page \"" +
|
|
|
|
tag[9:-4]+"\"")
|
2013-07-21 22:50:12 +02:00
|
|
|
|
2013-07-27 18:20:39 +02:00
|
|
|
# Regenerate page for years / months
|
2013-07-23 21:10:31 +02:00
|
|
|
years_list.sort(reverse=True)
|
|
|
|
for i in years_list:
|
2013-07-22 22:33:08 +02:00
|
|
|
try:
|
|
|
|
int(i)
|
|
|
|
except ValueError:
|
|
|
|
continue
|
|
|
|
|
2013-07-27 14:56:40 +02:00
|
|
|
# Generate page per year
|
2013-07-27 18:20:39 +02:00
|
|
|
page_year = header.replace("@title", params["BLOG_TITLE"]+" - "+i, 1)
|
2013-07-22 22:33:08 +02:00
|
|
|
|
2013-07-23 21:10:31 +02:00
|
|
|
months_list.sort(reverse=True)
|
|
|
|
for j in months_list:
|
|
|
|
if not os.path.isdir("blog/"+i+"/"+j):
|
2013-07-22 22:33:08 +02:00
|
|
|
continue
|
|
|
|
|
2013-07-27 14:56:40 +02:00
|
|
|
# Generate pages per month
|
2013-07-27 18:28:09 +02:00
|
|
|
page_month = header.replace("@title",
|
|
|
|
params["BLOG_TITLE"]+" - "+i+"/"+j, 1)
|
2013-07-22 22:33:08 +02:00
|
|
|
|
2013-07-23 21:10:31 +02:00
|
|
|
articles_list = list_directory("gen/"+i+"/"+j)
|
|
|
|
articles_list.sort(key=lambda x: os.stat(x).st_mtime, reverse=True)
|
2013-07-23 21:29:14 +02:00
|
|
|
for article in articles_list:
|
2013-07-22 22:33:08 +02:00
|
|
|
try:
|
|
|
|
with open(article, "r") as article_fh:
|
2013-07-27 18:28:09 +02:00
|
|
|
article_content = replace_tags(article_fh.read(),
|
|
|
|
search_list, replace_list)
|
2013-07-23 21:10:31 +02:00
|
|
|
page_month += article_content
|
|
|
|
page_year += article_content
|
2013-07-22 22:33:08 +02:00
|
|
|
except IOError:
|
2013-07-27 18:28:09 +02:00
|
|
|
sys.exit("[ERROR] Error while generating years and "
|
|
|
|
"months pages. Check your gen folder, you "
|
|
|
|
"may need to regenerate some articles. The "
|
|
|
|
"error was due to "+article+".")
|
2013-07-22 22:33:08 +02:00
|
|
|
|
2013-07-27 18:20:39 +02:00
|
|
|
page_month += footer
|
2013-07-22 22:33:08 +02:00
|
|
|
try:
|
|
|
|
with open("blog/"+i+"/"+j+"/index.html", "w") as page_month_fh:
|
|
|
|
page_month_fh.write(page_month)
|
|
|
|
except IOError:
|
2013-07-23 21:29:14 +02:00
|
|
|
sys.exit("[ERROR] Unable to write index file for "+i+"/"+j+".")
|
|
|
|
|
2013-07-27 18:20:39 +02:00
|
|
|
page_year += footer
|
2013-07-22 22:33:08 +02:00
|
|
|
try:
|
|
|
|
with open("blog/"+i+"/index.html", "w") as page_year_fh:
|
|
|
|
page_year_fh.write(page_year)
|
|
|
|
except IOError:
|
|
|
|
sys.exit("[ERROR] Unable to write index file for "+i+".")
|