aboutsummaryrefslogtreecommitdiffstats
path: root/Makefile
blob: f13298c22e8880b5cb8613928d43dfab660c49d8 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
# Copyright (C) 2023 Denis 'GNUtoo' Carikli <GNUtoo@cyberdimension.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <https://www.gnu.org/licenses/>.

CURL ?= curl
BLOG_URL ?= https://blog.replicant.us/page

GUILE_PATCH = v1-0001-SRFI-19-Add-support-for-ISO-8601-zones-with-a-col.patch
GUIX_COMMIT ?= b25b94335a3ee8d68d2145da8e5ea0325ecea451
GUIX_SHELL ?= guix time-machine --commit=$(GUIX_COMMIT) -- shell -C
GUIX_PYTHON_PACKAGES ?= python python-beautifulsoup4 python-html2text python-sh

CONVERT ?= $(GUIX_SHELL) $(GUIX_PYTHON_PACKAGES) -- python3 convert.py
HAUNT ?= $(GUIX_SHELL) --with-patch=guile=patches/$(GUILE_PATCH) haunt -- haunt

VENDOR_REPLICANT_ARTWORK_REV ?= fc213d2ca94cef9047d1e4a71c21c4c4c87f349d
VENDOR_REPLICANT_ARTWORK_PATH ?= external/vendor_replicant_artwork

SENTINEL =

.PHONY: all build deploy generate-wordpress-articles help serve

PAGES = \
	wordpress/pages/replicant_blog_page_8.html \
	wordpress/pages/replicant_blog_page_7.html \
	wordpress/pages/replicant_blog_page_6.html \
	wordpress/pages/replicant_blog_page_5.html \
	wordpress/pages/replicant_blog_page_4.html \
	wordpress/pages/replicant_blog_page_3.html \
	wordpress/pages/replicant_blog_page_2.html \
	wordpress/pages/replicant_blog_page_1.html \
	wordpress/pages/replicant_blog_page_0.html \
	$(SENTINEL)

all: help

wordpress/articles:
	mkdir -p $@

wordpress/pages:
	mkdir -p $@

wordpress/pages/replicant_blog_page_%.html: wordpress/pages
	$(CURL) $(BLOG_URL)/$(subst replicant_blog_page_,,$*)/ -o $@

generate-wordpress-articles: build
	$(GUIX_SHELL) $(GUIX_PYTHON_PACKAGES) \
	-- \
	python3 haunthtml2wordpress.py \
	./site/replicant-status-and-report-of-the-37c3-and-fosdem-2024-conferences.html

links.txt: $(PAGES)
	@echo -n '' > $@
	@for f in $(PAGES) ; do \
		echo "Processing $$f" > /dev/stderr ; \
		guix shell -C python python-beautifulsoup4 -- \
		python3 get_links.py $$f >> $@ ; \
	done

# do not depend on links.txt as a human is supposed to review it manually
html: wordpress/articles
	mkdir -p $@ && \
	for url in `cat links.txt` ; do \
		if [ ! -f wordpress/articles/`echo "$${url}" | \
			sed 's#https://blog.replicant.us/##' | \
			sed 's#/$$##'| \
			sed 's#/#_#g'`.html ] ; then \
			curl "$${url}" -o \
			wordpress/articles/`echo "$${url}" | \
				sed 's#https://blog.replicant.us/##' | \
				sed 's#/$$##'| \
				sed 's#/#_#g'`.html ; \
		fi \
	done
	# TODO: erase wordpress/articles directory if that fails for
	# some reason.

markdown: html
	mkdir -p $@ && \
	for url in `cat links.txt` ; do \
		echo "$${url}" | \
			sed 's#https://blog.replicant.us/##' | \
			sed 's#/$$##' ; \
		$(CONVERT) \
			wordpress/articles/`echo "$${url}" | \
				sed 's#https://blog.replicant.us/##' | \
				sed 's#/$$##'| \
				sed 's#/#_#g'`.html \
			> \
			markdown/`echo "$${url}" | \
				sed 's#https://blog.replicant.us/##' | \
				sed 's#/$$##'| \
				sed 's#/#_#g'`.md ; \
	done

$(VENDOR_REPLICANT_ARTWORK_PATH):
	git clone \
		https://git.replicant.us/replicant/vendor_replicant_artwork \
		$(VENDOR_REPLICANT_ARTWORK_PATH)

static/replicant_banner_white.png: vendor_replicant_artwork_revision.txt
	install -m 644 \
		$(VENDOR_REPLICANT_ARTWORK_PATH)/replicant_banner_white.png \
		$@

vendor_replicant_artwork_revision.txt: $(VENDOR_REPLICANT_ARTWORK_PATH) Makefile
	if [ ! -f $@ ] || [ "`cat $@`" != "$(VENDOR_REPLICANT_ARTWORK_REV)" ] ; \
	then \
		rm -f $@ ;                                                      \
		git -C $(VENDOR_REPLICANT_ARTWORK_PATH) fetch origin ;          \
		git -C $(VENDOR_REPLICANT_ARTWORK_PATH)                         \
			checkout -f $(VENDOR_REPLICANT_ARTWORK_REV) ;           \
		git -C $(VENDOR_REPLICANT_ARTWORK_PATH) reset --hard ;          \
		git -C $(VENDOR_REPLICANT_ARTWORK_PATH) clean -dfx ;            \
		echo $(VENDOR_REPLICANT_ARTWORK_REV) > $@ ;                     \
	fi

build: static/replicant_banner_white.png
	$(HAUNT) build

deploy: build
	rsync -av site/ root@git.replicant.us:/var/www/blog.test.replicant.us/

help:
	@printf "%s\n\t%s\n\t%s\n\t%s\n\t%s\n" \
	"Available commands:" \
	"links.txt # Create links.txt file with blog post links." \
	"          # For security reasons This file needs to be manually " \
	"          # reviewed manually before usage." \
	"markdown  # create markdown pages. To be done after reviewing links.txt." \

serve:
	$(HAUNT) serve -w -p $(HAUNT_PORT)

website.tar.gz: build
	tar \
		--exclude-vcs \
		--format=gnu \
		--owner=0 --group=0 --numeric-owner \
		--sort=name \
		-czf \
		website.tar.gz \
		site \
		pages/img \
		index.html \
		--transform="s#^site#web#" \
		--transform="s#^pages/img/#web/img/#"