Compare commits
No commits in common. "13d3d653de0b0961f4258c8698328ef991cf10ea" and "9853ed432774ef19b5f1427b6ebea6909c43491e" have entirely different histories.
13d3d653de
...
9853ed4327
3 changed files with 30 additions and 138 deletions
147
makesite.py
147
makesite.py
|
|
@ -28,18 +28,7 @@ FRENCH_MONTHS = ['janv.', 'févr.', 'mars', 'avr.', 'mai', 'juin',
|
||||||
|
|
||||||
|
|
||||||
class HighlightRenderer(mistune.HTMLRenderer):
|
class HighlightRenderer(mistune.HTMLRenderer):
|
||||||
"""Custom Mistune renderer that adds syntax highlighting to code blocks using Pygments."""
|
|
||||||
|
|
||||||
def block_code(self, code, info=None):
|
def block_code(self, code, info=None):
|
||||||
"""Render code blocks with syntax highlighting.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
code: The code content to render
|
|
||||||
info: Optional language identifier for syntax highlighting
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: HTML with syntax-highlighted code or plain pre/code tags
|
|
||||||
"""
|
|
||||||
if info:
|
if info:
|
||||||
lexer = get_lexer_by_name(info, stripall=True)
|
lexer = get_lexer_by_name(info, stripall=True)
|
||||||
formatter = html.HtmlFormatter()
|
formatter = html.HtmlFormatter()
|
||||||
|
|
@ -71,20 +60,20 @@ def log(msg, *args):
|
||||||
sys.stderr.write(msg.format(*args) + "\n")
|
sys.stderr.write(msg.format(*args) + "\n")
|
||||||
|
|
||||||
|
|
||||||
def _strip_tags_and_truncate(text, words=25):
|
def truncate(text, words=25):
|
||||||
"""Remove HTML tags and truncate text to the specified number of words."""
|
"""Remove tags and truncate text to the specified number of words."""
|
||||||
return " ".join(re.sub(r"(?s)<.*?>", " ", text).split()[:words])
|
return " ".join(re.sub(r"(?s)<.*?>", " ", text).split()[:words])
|
||||||
|
|
||||||
|
|
||||||
def _parse_headers(text):
|
def read_headers(text):
|
||||||
"""Parse HTML comment headers and yield (key, value, end-index) tuples."""
|
"""Parse headers in text and yield (key, value, end-index) tuples."""
|
||||||
for match in re.finditer(r"\s*<!--\s*(.+?)\s*:\s*(.+?)\s*-->\s*|.+", text):
|
for match in re.finditer(r"\s*<!--\s*(.+?)\s*:\s*(.+?)\s*-->\s*|.+", text):
|
||||||
if not match.group(1):
|
if not match.group(1):
|
||||||
break
|
break
|
||||||
yield match.group(1), match.group(2), match.end()
|
yield match.group(1), match.group(2), match.end()
|
||||||
|
|
||||||
|
|
||||||
def _rfc_2822_format(date_str):
|
def rfc_2822_format(date_str):
|
||||||
"""Convert yyyy-mm-dd date string to RFC 2822 format date string."""
|
"""Convert yyyy-mm-dd date string to RFC 2822 format date string."""
|
||||||
d = datetime.datetime.strptime(date_str, "%Y-%m-%d")
|
d = datetime.datetime.strptime(date_str, "%Y-%m-%d")
|
||||||
return d \
|
return d \
|
||||||
|
|
@ -107,8 +96,8 @@ def slugify(value):
|
||||||
return re.sub(r"[-\s]+", "-", value)
|
return re.sub(r"[-\s]+", "-", value)
|
||||||
|
|
||||||
|
|
||||||
def parse_post_file(filename, params):
|
def read_content(filename, params):
|
||||||
"""Parse post file: read, extract metadata, convert markdown, and generate summary."""
|
"""Read content and metadata from file into a dictionary."""
|
||||||
# Read file content.
|
# Read file content.
|
||||||
text = fread(filename)
|
text = fread(filename)
|
||||||
|
|
||||||
|
|
@ -119,7 +108,7 @@ def parse_post_file(filename, params):
|
||||||
|
|
||||||
# Read headers.
|
# Read headers.
|
||||||
end = 0
|
end = 0
|
||||||
for key, val, end in _parse_headers(text):
|
for key, val, end in read_headers(text):
|
||||||
content[key] = val
|
content[key] = val
|
||||||
|
|
||||||
# slugify post title
|
# slugify post title
|
||||||
|
|
@ -132,20 +121,20 @@ def parse_post_file(filename, params):
|
||||||
if filename.endswith((".md", ".mkd", ".mkdn", ".mdown", ".markdown")):
|
if filename.endswith((".md", ".mkd", ".mkdn", ".mdown", ".markdown")):
|
||||||
summary_index = text.find("<!-- more")
|
summary_index = text.find("<!-- more")
|
||||||
if summary_index > 0:
|
if summary_index > 0:
|
||||||
summary = markdown(_strip_html_tags(text[:summary_index]))
|
summary = markdown(clean_html_tag(text[:summary_index]))
|
||||||
else:
|
else:
|
||||||
summary = _strip_tags_and_truncate(markdown(_strip_html_tags(text)))
|
summary = truncate(markdown(clean_html_tag(text)))
|
||||||
clean_text = text.replace("<!-- more -->", "")
|
clean_text = text.replace("<!-- more -->", "")
|
||||||
text = markdown(clean_text)
|
text = markdown(clean_text)
|
||||||
else:
|
else:
|
||||||
summary = _strip_tags_and_truncate(text)
|
summary = truncate(text)
|
||||||
|
|
||||||
# Update the dictionary with content and RFC 2822 date.
|
# Update the dictionary with content and RFC 2822 date.
|
||||||
content.update(
|
content.update(
|
||||||
{
|
{
|
||||||
"content": text,
|
"content": text,
|
||||||
"content_rss": _make_links_absolute(params["site_url"], text),
|
"content_rss": fix_relative_links(params["site_url"], text),
|
||||||
"rfc_2822_date": _rfc_2822_format(content["date"]),
|
"rfc_2822_date": rfc_2822_format(content["date"]),
|
||||||
"summary": summary,
|
"summary": summary,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
@ -153,16 +142,16 @@ def parse_post_file(filename, params):
|
||||||
return content
|
return content
|
||||||
|
|
||||||
|
|
||||||
def _make_links_absolute(site_url, text):
|
def fix_relative_links(site_url, text):
|
||||||
"""Convert relative links to absolute URLs for RSS feed."""
|
"""Absolute links needed in RSS feed"""
|
||||||
# TODO externalize links replacement configuration
|
# TODO externalize links replacement configuration
|
||||||
return text \
|
return text \
|
||||||
.replace("src=\"/images/20", "src=\"" + site_url + "/images/20") \
|
.replace("src=\"/images/20", "src=\"" + site_url + "/images/20") \
|
||||||
.replace("href=\"/20", "href=\"" + site_url + "/20")
|
.replace("href=\"/20", "href=\"" + site_url + "/20")
|
||||||
|
|
||||||
|
|
||||||
def _strip_html_tags(text):
|
def clean_html_tag(text):
|
||||||
"""Remove HTML tags from text."""
|
"""Remove HTML tags."""
|
||||||
while True:
|
while True:
|
||||||
original_text = text
|
original_text = text
|
||||||
text = re.sub(r"<\w+.*?>", "", text)
|
text = re.sub(r"<\w+.*?>", "", text)
|
||||||
|
|
@ -182,15 +171,6 @@ def render(template, **params):
|
||||||
|
|
||||||
|
|
||||||
def get_header_list_value(header_name, page_params):
|
def get_header_list_value(header_name, page_params):
|
||||||
"""Extract and parse a space-separated list from a header value.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
header_name: Name of the header to extract (e.g., 'category', 'tag')
|
|
||||||
page_params: Dict containing page parameters
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list: List of stripped string values from the header
|
|
||||||
"""
|
|
||||||
header_list = []
|
header_list = []
|
||||||
if header_name in page_params:
|
if header_name in page_params:
|
||||||
for s in page_params[header_name].split(" "):
|
for s in page_params[header_name].split(" "):
|
||||||
|
|
@ -285,15 +265,7 @@ def _process_comments(page_params, stacosys_url, comment_layout,
|
||||||
return len(comments), comments_html, comment_section_html
|
return len(comments), comments_html, comment_section_html
|
||||||
|
|
||||||
|
|
||||||
def _get_friendly_date(date_str):
|
def get_friendly_date(date_str):
|
||||||
"""Convert date string to French-formatted readable date.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
date_str: Date string in YYYY-MM-DD format
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: French-formatted date (e.g., "15 janv. 2024")
|
|
||||||
"""
|
|
||||||
dt = datetime.datetime.strptime(date_str, "%Y-%m-%d")
|
dt = datetime.datetime.strptime(date_str, "%Y-%m-%d")
|
||||||
french_month = FRENCH_MONTHS[dt.month - 1]
|
french_month = FRENCH_MONTHS[dt.month - 1]
|
||||||
return f"{dt.day:02d} {french_month} {dt.year}"
|
return f"{dt.day:02d} {french_month} {dt.year}"
|
||||||
|
|
@ -334,7 +306,7 @@ def _setup_page_params(content, params):
|
||||||
page_params["header"] = ""
|
page_params["header"] = ""
|
||||||
page_params["footer"] = ""
|
page_params["footer"] = ""
|
||||||
page_params["date_path"] = page_params["date"].replace("-", "/")
|
page_params["date_path"] = page_params["date"].replace("-", "/")
|
||||||
page_params["friendly_date"] = _get_friendly_date(page_params["date"])
|
page_params["friendly_date"] = get_friendly_date(page_params["date"])
|
||||||
page_params["year"] = page_params["date"].split("-")[0]
|
page_params["year"] = page_params["date"].split("-")[0]
|
||||||
page_params["post_url"] = (
|
page_params["post_url"] = (
|
||||||
page_params["year"] + "/" + page_params["slug"] + "/"
|
page_params["year"] + "/" + page_params["slug"] + "/"
|
||||||
|
|
@ -351,7 +323,7 @@ def make_posts(
|
||||||
|
|
||||||
for posix_path in Path(src).glob(src_pattern):
|
for posix_path in Path(src).glob(src_pattern):
|
||||||
src_path = str(posix_path)
|
src_path = str(posix_path)
|
||||||
content = parse_post_file(src_path, params)
|
content = read_content(src_path, params)
|
||||||
|
|
||||||
# render text / summary for basic fields
|
# render text / summary for basic fields
|
||||||
content["content"] = render(content["content"], **params)
|
content["content"] = render(content["content"], **params)
|
||||||
|
|
@ -404,7 +376,7 @@ def make_notes(
|
||||||
|
|
||||||
for posix_path in Path(src).glob(src_pattern):
|
for posix_path in Path(src).glob(src_pattern):
|
||||||
src_path = str(posix_path)
|
src_path = str(posix_path)
|
||||||
content = parse_post_file(src_path, params)
|
content = read_content(src_path, params)
|
||||||
|
|
||||||
# render text / summary for basic fields
|
# render text / summary for basic fields
|
||||||
content["content"] = render(content["content"], **params)
|
content["content"] = render(content["content"], **params)
|
||||||
|
|
@ -435,17 +407,7 @@ def make_list(
|
||||||
posts, dst, list_layout, item_layout,
|
posts, dst, list_layout, item_layout,
|
||||||
header_layout, footer_layout, **params
|
header_layout, footer_layout, **params
|
||||||
):
|
):
|
||||||
"""Generate list page for a blog.
|
"""Generate list page for a blog."""
|
||||||
|
|
||||||
Args:
|
|
||||||
posts: List of post dictionaries to include in the list
|
|
||||||
dst: Destination path for the generated HTML file
|
|
||||||
list_layout: Template for the overall list page
|
|
||||||
item_layout: Template for individual list items
|
|
||||||
header_layout: Template for page header (None to skip)
|
|
||||||
footer_layout: Template for page footer (None to skip)
|
|
||||||
**params: Additional parameters for template rendering
|
|
||||||
"""
|
|
||||||
|
|
||||||
# header
|
# header
|
||||||
if header_layout is None:
|
if header_layout is None:
|
||||||
|
|
@ -485,16 +447,6 @@ def make_list(
|
||||||
|
|
||||||
|
|
||||||
def create_blog(page_layout, list_in_page_layout, params):
|
def create_blog(page_layout, list_in_page_layout, params):
|
||||||
"""Create blog posts and paginated index pages.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
page_layout: Template for individual pages
|
|
||||||
list_in_page_layout: Template for list pages wrapped in page layout
|
|
||||||
params: Global site parameters
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list: Sorted list of all post dictionaries (newest first)
|
|
||||||
"""
|
|
||||||
banner_layout = fread("layout/banner.html")
|
banner_layout = fread("layout/banner.html")
|
||||||
paging_layout = fread("layout/paging.html")
|
paging_layout = fread("layout/paging.html")
|
||||||
post_layout = fread("layout/post.html")
|
post_layout = fread("layout/post.html")
|
||||||
|
|
@ -557,14 +509,6 @@ def create_blog(page_layout, list_in_page_layout, params):
|
||||||
|
|
||||||
def generate_categories(list_in_page_layout, item_nosummary_layout,
|
def generate_categories(list_in_page_layout, item_nosummary_layout,
|
||||||
posts, params):
|
posts, params):
|
||||||
"""Generate category pages grouping posts by category.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
list_in_page_layout: Template for list pages
|
|
||||||
item_nosummary_layout: Template for list items without summaries
|
|
||||||
posts: List of all blog posts
|
|
||||||
params: Global site parameters
|
|
||||||
"""
|
|
||||||
category_title_layout = fread("layout/category_title.html")
|
category_title_layout = fread("layout/category_title.html")
|
||||||
cat_post = {}
|
cat_post = {}
|
||||||
for post in posts:
|
for post in posts:
|
||||||
|
|
@ -588,15 +532,6 @@ def generate_categories(list_in_page_layout, item_nosummary_layout,
|
||||||
|
|
||||||
def generate_archives(blog_posts, list_in_page_layout, item_nosummary_layout,
|
def generate_archives(blog_posts, list_in_page_layout, item_nosummary_layout,
|
||||||
archive_title_layout, params):
|
archive_title_layout, params):
|
||||||
"""Generate archives page with all blog posts.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
blog_posts: List of all blog posts
|
|
||||||
list_in_page_layout: Template for list pages
|
|
||||||
item_nosummary_layout: Template for list items without summaries
|
|
||||||
archive_title_layout: Template for archive page header
|
|
||||||
params: Global site parameters
|
|
||||||
"""
|
|
||||||
make_list(
|
make_list(
|
||||||
blog_posts,
|
blog_posts,
|
||||||
"_site/archives/index.html",
|
"_site/archives/index.html",
|
||||||
|
|
@ -610,14 +545,6 @@ def generate_archives(blog_posts, list_in_page_layout, item_nosummary_layout,
|
||||||
|
|
||||||
def generate_notes(page_layout, archive_title_layout,
|
def generate_notes(page_layout, archive_title_layout,
|
||||||
list_in_page_layout, params):
|
list_in_page_layout, params):
|
||||||
"""Generate notes pages and notes index.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
page_layout: Template for individual pages
|
|
||||||
archive_title_layout: Template for notes index header
|
|
||||||
list_in_page_layout: Template for list pages
|
|
||||||
params: Global site parameters
|
|
||||||
"""
|
|
||||||
note_layout = fread("layout/note.html")
|
note_layout = fread("layout/note.html")
|
||||||
item_note_layout = fread("layout/item_note.html")
|
item_note_layout = fread("layout/item_note.html")
|
||||||
note_layout = render(page_layout, content=note_layout)
|
note_layout = render(page_layout, content=note_layout)
|
||||||
|
|
@ -642,12 +569,6 @@ def generate_notes(page_layout, archive_title_layout,
|
||||||
|
|
||||||
|
|
||||||
def generate_rss_feeds(posts, params):
|
def generate_rss_feeds(posts, params):
|
||||||
"""Generate RSS feeds: main feed and per-tag feeds.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
posts: List of all blog posts
|
|
||||||
params: Global site parameters
|
|
||||||
"""
|
|
||||||
rss_xml = fread("layout/rss.xml")
|
rss_xml = fread("layout/rss.xml")
|
||||||
rss_item_xml = fread("layout/rss_item.xml")
|
rss_item_xml = fread("layout/rss_item.xml")
|
||||||
|
|
||||||
|
|
@ -685,12 +606,6 @@ def generate_rss_feeds(posts, params):
|
||||||
|
|
||||||
|
|
||||||
def generate_sitemap(posts, params):
|
def generate_sitemap(posts, params):
|
||||||
"""Generate XML sitemap for all posts.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
posts: List of all blog posts
|
|
||||||
params: Global site parameters
|
|
||||||
"""
|
|
||||||
sitemap_xml = fread("layout/sitemap.xml")
|
sitemap_xml = fread("layout/sitemap.xml")
|
||||||
sitemap_item_xml = fread("layout/sitemap_item.xml")
|
sitemap_item_xml = fread("layout/sitemap_item.xml")
|
||||||
make_list(
|
make_list(
|
||||||
|
|
@ -705,14 +620,6 @@ def generate_sitemap(posts, params):
|
||||||
|
|
||||||
|
|
||||||
def get_params(param_file):
|
def get_params(param_file):
|
||||||
"""Load site parameters from JSON file with defaults.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
param_file: Path to JSON parameters file
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict: Site parameters with defaults and loaded values
|
|
||||||
"""
|
|
||||||
# Default parameters.
|
# Default parameters.
|
||||||
params = {
|
params = {
|
||||||
"title": "Blog",
|
"title": "Blog",
|
||||||
|
|
@ -729,24 +636,18 @@ def get_params(param_file):
|
||||||
return params
|
return params
|
||||||
|
|
||||||
|
|
||||||
def rebuild_site_directory():
|
def clean_site():
|
||||||
"""Remove existing _site directory and recreate from static files."""
|
|
||||||
if os.path.isdir("_site"):
|
if os.path.isdir("_site"):
|
||||||
shutil.rmtree("_site")
|
shutil.rmtree("_site")
|
||||||
shutil.copytree("static", "_site")
|
shutil.copytree("static", "_site")
|
||||||
|
|
||||||
|
|
||||||
def main(param_file):
|
def main(param_file):
|
||||||
"""Main entry point for static site generation.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
param_file: Path to JSON parameters file
|
|
||||||
"""
|
|
||||||
|
|
||||||
params = get_params(param_file)
|
params = get_params(param_file)
|
||||||
|
|
||||||
# Create a new _site directory from scratch.
|
# Create a new _site directory from scratch.
|
||||||
rebuild_site_directory()
|
clean_site()
|
||||||
|
|
||||||
# Load layouts.
|
# Load layouts.
|
||||||
page_layout = fread("layout/page.html")
|
page_layout = fread("layout/page.html")
|
||||||
|
|
|
||||||
18
monitor.py
18
monitor.py
|
|
@ -14,12 +14,7 @@ def fread(filename):
|
||||||
return f.read()
|
return f.read()
|
||||||
|
|
||||||
|
|
||||||
def get_comment_count():
|
def get_nb_of_comments():
|
||||||
"""Fetch the total number of comments from Stacosys API.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
int: Total comment count, or 0 if request fails
|
|
||||||
"""
|
|
||||||
req_url = params["stacosys_url"] + "/comments/count"
|
req_url = params["stacosys_url"] + "/comments/count"
|
||||||
query_params = dict(
|
query_params = dict(
|
||||||
token=params["stacosys_token"]
|
token=params["stacosys_token"]
|
||||||
|
|
@ -28,8 +23,7 @@ def get_comment_count():
|
||||||
return 0 if not resp.ok else int(resp.json()["count"])
|
return 0 if not resp.ok else int(resp.json()["count"])
|
||||||
|
|
||||||
|
|
||||||
def _exit_program():
|
def exit_program():
|
||||||
"""Exit the program with status code 0."""
|
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -45,14 +39,14 @@ if os.path.isfile("params.json"):
|
||||||
params.update(json.loads(fread("params.json")))
|
params.update(json.loads(fread("params.json")))
|
||||||
|
|
||||||
external_check_cmd = params["external_check"]
|
external_check_cmd = params["external_check"]
|
||||||
initial_count = get_comment_count()
|
initial_count = get_nb_of_comments()
|
||||||
print(f"Comments = {initial_count}")
|
print(f"Comments = {initial_count}")
|
||||||
while True:
|
while True:
|
||||||
# check number of comments every 60 seconds
|
# check number of comments every 60 seconds
|
||||||
for _ in range(15):
|
for _ in range(15):
|
||||||
time.sleep(60)
|
time.sleep(60)
|
||||||
if initial_count != get_comment_count():
|
if initial_count != get_nb_of_comments():
|
||||||
_exit_program()
|
exit_program()
|
||||||
# check if git repo changed every 15 minutes
|
# check if git repo changed every 15 minutes
|
||||||
if external_check_cmd and os.system(external_check_cmd):
|
if external_check_cmd and os.system(external_check_cmd):
|
||||||
_exit_program()
|
exit_program()
|
||||||
|
|
|
||||||
|
|
@ -16,7 +16,4 @@ dependencies = [
|
||||||
[dependency-groups]
|
[dependency-groups]
|
||||||
dev = [
|
dev = [
|
||||||
"black>=24.10.0",
|
"black>=24.10.0",
|
||||||
"mypy>=1.19.1",
|
|
||||||
"types-pygments>=2.19.0.20251121",
|
|
||||||
"types-requests>=2.32.4.20260107",
|
|
||||||
]
|
]
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue