|
@ -10,7 +10,10 @@ |
|
|
|
|
|
|
|
|
import argparse |
|
|
import argparse |
|
|
import glob |
|
|
import glob |
|
|
|
|
|
import html |
|
|
|
|
|
from io import StringIO |
|
|
import markdown |
|
|
import markdown |
|
|
|
|
|
import operator |
|
|
import os |
|
|
import os |
|
|
import re |
|
|
import re |
|
|
import shutil |
|
|
import shutil |
|
@ -24,6 +27,8 @@ deploy_directory = '~/src/www-home' |
|
|
md_extensions = [ |
|
|
md_extensions = [ |
|
|
'fenced_code', 'codehilite', 'nl2br', 'toc', 'smarty', 'tables', 'linkify'] |
|
|
'fenced_code', 'codehilite', 'nl2br', 'toc', 'smarty', 'tables', 'linkify'] |
|
|
|
|
|
|
|
|
|
|
|
blog_entries = [] |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def print_file(in_file, out_file): |
|
|
def print_file(in_file, out_file): |
|
|
print('%-62s -> %s' % (in_file, out_file)) |
|
|
print('%-62s -> %s' % (in_file, out_file)) |
|
@ -42,6 +47,13 @@ def copy_static_files(): |
|
|
shutil.copy2(source, dest) |
|
|
shutil.copy2(source, dest) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def find_update_date(text): |
|
|
|
|
|
match = re.search(r'^Published:? (\d{4}-\d{2}-\d{2})', text, re.MULTILINE) |
|
|
|
|
|
if not match: |
|
|
|
|
|
return None |
|
|
|
|
|
return match.group(1) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def process_markdown_files(): |
|
|
def process_markdown_files(): |
|
|
template = open('template.html').read() |
|
|
template = open('template.html').read() |
|
|
for (dirpath, _, filenames) in os.walk(input_directory): |
|
|
for (dirpath, _, filenames) in os.walk(input_directory): |
|
@ -50,6 +62,8 @@ def process_markdown_files(): |
|
|
if not markdown_filename.endswith('.md'): |
|
|
if not markdown_filename.endswith('.md'): |
|
|
continue |
|
|
continue |
|
|
|
|
|
|
|
|
|
|
|
blog_entry = {} |
|
|
|
|
|
|
|
|
markdown_file = open(markdown_filename) |
|
|
markdown_file = open(markdown_filename) |
|
|
text = markdown_file.read() |
|
|
text = markdown_file.read() |
|
|
markdown_file.close() |
|
|
markdown_file.close() |
|
@ -62,6 +76,9 @@ def process_markdown_files(): |
|
|
title = match.group(1).lstrip('# ') |
|
|
title = match.group(1).lstrip('# ') |
|
|
else: |
|
|
else: |
|
|
title = text |
|
|
title = text |
|
|
|
|
|
|
|
|
|
|
|
blog_entry['title'] = html.escape(title) |
|
|
|
|
|
|
|
|
title += ' | Colin McMillen' |
|
|
title += ' | Colin McMillen' |
|
|
if markdown_filename == os.path.join(input_directory, 'index.md'): |
|
|
if markdown_filename == os.path.join(input_directory, 'index.md'): |
|
|
title = 'Colin McMillen' |
|
|
title = 'Colin McMillen' |
|
@ -75,9 +92,16 @@ def process_markdown_files(): |
|
|
if page_url.endswith('index.html'): # strip off index.html |
|
|
if page_url.endswith('index.html'): # strip off index.html |
|
|
page_url = page_url[:-len('index.html')] |
|
|
page_url = page_url[:-len('index.html')] |
|
|
|
|
|
|
|
|
html = markdown.markdown( |
|
|
|
|
|
|
|
|
update_date = find_update_date(text) |
|
|
|
|
|
if update_date: |
|
|
|
|
|
blog_entry['url'] = 'https://www.mcmillen.dev/' + page_url |
|
|
|
|
|
blog_entry['date'] = update_date |
|
|
|
|
|
blog_entries.append(blog_entry) |
|
|
|
|
|
|
|
|
|
|
|
html_content = markdown.markdown( |
|
|
text, extensions=md_extensions, output_format='html5') |
|
|
text, extensions=md_extensions, output_format='html5') |
|
|
output = template.format(title=title, content=html, page_url=page_url) |
|
|
|
|
|
|
|
|
output = template.format( |
|
|
|
|
|
title=title, content=html_content, page_url=page_url) |
|
|
|
|
|
|
|
|
os.makedirs(out_dirpath, exist_ok=True) |
|
|
os.makedirs(out_dirpath, exist_ok=True) |
|
|
print_file(markdown_filename, out_fullpath) |
|
|
print_file(markdown_filename, out_fullpath) |
|
@ -93,15 +117,71 @@ grep -v ^output/google | |
|
|
grep -v ^output/drafts | |
|
|
grep -v ^output/drafts | |
|
|
perl -pe 's|output|https://www.mcmillen.dev|' |
|
|
perl -pe 's|output|https://www.mcmillen.dev|' |
|
|
> output/sitemap.txt""".split('\n')) |
|
|
> output/sitemap.txt""".split('\n')) |
|
|
|
|
|
print_file('', 'output/sitemap.txt') |
|
|
os.system(sitemap_command) |
|
|
os.system(sitemap_command) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def make_rss(): # TODO: implement. |
|
|
|
|
|
pass |
|
|
|
|
|
|
|
|
def make_atom_feed(): |
|
|
|
|
|
atom_template = '''<?xml version="1.0" encoding="utf-8"?> |
|
|
|
|
|
<feed xmlns="http://www.w3.org/2005/Atom"> |
|
|
|
|
|
|
|
|
|
|
|
<title>Colin McMillen's Blog</title> |
|
|
|
|
|
<link href="https://www.mcmillen.dev"/> |
|
|
|
|
|
<link rel="self" href="https://www.mcmillen.dev/atom.xml"/> |
|
|
|
|
|
<updated>{last_update}</updated> |
|
|
|
|
|
<author> |
|
|
|
|
|
<name>Colin McMillen</name> |
|
|
|
|
|
</author> |
|
|
|
|
|
<id>https://www.mcmillen.dev/</id> |
|
|
|
|
|
|
|
|
|
|
|
{entries} |
|
|
|
|
|
</feed> |
|
|
|
|
|
''' |
|
|
|
|
|
|
|
|
|
|
|
entry_template = ''' |
|
|
|
|
|
<entry> |
|
|
|
|
|
<title>{title}</title> |
|
|
|
|
|
<id>{url}</id> |
|
|
|
|
|
<link rel="alternate" href="{url}"/> |
|
|
|
|
|
<updated>{updated}</updated> |
|
|
|
|
|
<summary>{summary}</summary> |
|
|
|
|
|
</entry> |
|
|
|
|
|
''' |
|
|
|
|
|
|
|
|
|
|
|
blog_entries.sort(key=operator.itemgetter('date')) |
|
|
|
|
|
|
|
|
|
|
|
entries_io = StringIO() |
|
|
|
|
|
last_update = None |
|
|
|
|
|
for entry in blog_entries: |
|
|
|
|
|
# We lie and pretend that all entries were written at noon UTC. |
|
|
|
|
|
update_date = entry['date'] + 'T12:00:00+00:00' |
|
|
|
|
|
last_update = update_date |
|
|
|
|
|
entries_io.write(entry_template.format( |
|
|
|
|
|
url=entry['url'], |
|
|
|
|
|
title=entry['title'], |
|
|
|
|
|
updated=update_date, |
|
|
|
|
|
summary='TODO: fill this out.')) |
|
|
|
|
|
|
|
|
|
|
|
entries_text = entries_io.getvalue() |
|
|
|
|
|
|
|
|
|
|
|
atom_feed = atom_template.format( |
|
|
|
|
|
last_update=last_update, |
|
|
|
|
|
entries=entries_io.getvalue()) |
|
|
|
|
|
entries_io.close() |
|
|
|
|
|
|
|
|
|
|
|
atom_filename = os.path.join(output_directory, 'atom.xml') |
|
|
|
|
|
print_file('', atom_filename) |
|
|
|
|
|
atom_file = open(atom_filename, 'w') |
|
|
|
|
|
atom_file.write(atom_feed) |
|
|
|
|
|
atom_file.close() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def copy_site(): |
|
|
|
|
|
os.system('cp -r output/* %s' % deploy_directory) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def deploy_site(): |
|
|
def deploy_site(): |
|
|
os.system('cp -r output/* %s' % deploy_directory) |
|
|
|
|
|
|
|
|
copy_site() |
|
|
os.chdir(os.path.expanduser(deploy_directory)) |
|
|
os.chdir(os.path.expanduser(deploy_directory)) |
|
|
os.system('git add .') |
|
|
os.system('git add .') |
|
|
os.system('git commit -m "automated update from build.py"') |
|
|
os.system('git commit -m "automated update from build.py"') |
|
@ -116,9 +196,12 @@ def main(): |
|
|
parser.add_argument( |
|
|
parser.add_argument( |
|
|
'--fast', action='store_true', |
|
|
'--fast', action='store_true', |
|
|
help='only rebuild content files') |
|
|
help='only rebuild content files') |
|
|
|
|
|
parser.add_argument( |
|
|
|
|
|
'--copy', action='store_true', |
|
|
|
|
|
help='copy output files to www-home git repo') |
|
|
parser.add_argument( |
|
|
parser.add_argument( |
|
|
'--deploy', action='store_true', |
|
|
'--deploy', action='store_true', |
|
|
help='deploy the site by pushing to the www-home git repo') |
|
|
|
|
|
|
|
|
help='deploy the site by pushing the www-home git repo to production') |
|
|
args = parser.parse_args() |
|
|
args = parser.parse_args() |
|
|
|
|
|
|
|
|
if args.clean: |
|
|
if args.clean: |
|
@ -128,7 +211,10 @@ def main(): |
|
|
copy_static_files() |
|
|
copy_static_files() |
|
|
process_markdown_files() |
|
|
process_markdown_files() |
|
|
make_sitemap() |
|
|
make_sitemap() |
|
|
make_rss() |
|
|
|
|
|
|
|
|
make_atom_feed() |
|
|
|
|
|
|
|
|
|
|
|
if args.copy and not args.deploy: |
|
|
|
|
|
copy_site() |
|
|
|
|
|
|
|
|
if args.deploy: |
|
|
if args.deploy: |
|
|
if args.fast: |
|
|
if args.fast: |
|
|