boop.py 6.32 KB
Newer Older
1
2
3
#!/bin/env python3

import os
4
import sys
5
import distutils.dir_util
6
import locale
berumuron's avatar
berumuron committed
7
import pickle
berumuron's avatar
berumuron committed
8
import time
9

10
11
12
import boop.environment as environment
import boop.configuration as config
import boop.utils as utils
berumuron's avatar
berumuron committed
13
from boop.feed import Feed
14
15


16
locale.setlocale(locale.LC_ALL, "")
17

18

19
def main(environment):
berumuron's avatar
berumuron committed
20
21
    start = time.perf_counter()

berumuron's avatar
berumuron committed
22
23
    # Make sure ./_site folder is empty
    output_path = os.path.join(os.curdir, "_site")
24
    utils.init_output_folder(output_path)
25

berumuron's avatar
berumuron committed
26
27
28
29
30
    # Init cache dir
    cache_path = os.path.join(os.curdir, "_cache")
    if not os.path.exists(cache_path):
        os.mkdir(cache_path)

31
32
    # Load the configuration (it can be overidden by a configuration.yml file)
    configuration_path = os.path.join(os.curdir, "configuration.yml")
33
    configuration = config.load(configuration_path, environment, output_path)
34

35
    # STEP 1: we load all the Articles, Pages and Feeds
36
    pages = []
37
    series = []
38
    articles = {}
39
    feeds = []
40
41

    # Check that index file exists
42
    index_filepath = os.path.join(os.curdir, "index.html")
berumuron's avatar
berumuron committed
43
44
45
46
47
    if os.path.exists(index_filepath):
        index_page = utils.build_page_from_filepath(
            index_filepath, configuration, basename=os.curdir
        )
        pages.append(index_page)
48
49

    # Check that pages directory exists and load Pages from it
berumuron's avatar
berumuron committed
50
51
    pages_path = os.path.join(os.curdir, "pages")
    if os.path.isdir(pages_path):
52
        for filename in utils.dir_tree(pages_path, only=["html"]):
berumuron's avatar
berumuron committed
53
            page_filepath = os.path.join(pages_path, filename)
54
55
56
57
            page = utils.build_page_from_filepath(
                page_filepath, configuration, basename=pages_path
            )
            pages.append(page)
58

berumuron's avatar
berumuron committed
59
60
61
62
63
64
    # Load articles from the cache
    articles_cache_path = os.path.join(cache_path, "articles.pickle")
    if os.path.exists(articles_cache_path):
        with open(articles_cache_path, "rb") as cache_file:
            articles = pickle.load(cache_file)

65
66
67
    # Check that articles directory exists and load Articles from it
    articles_path = os.path.join(os.curdir, "articles")
    if os.path.isdir(articles_path):
68
        dirs_with_articles = list(utils.list_dirs(articles_path)) + ["."]
69
70
71
        for dirname in dirs_with_articles:
            dirpath = os.path.realpath(os.path.join(articles_path, dirname))

72
73
            serie_filepath = os.path.join(dirpath, "serie.html")
            serie = None
74
75
76

            # If the directory contains a `serie.html` file, it means it
            # contains a serie so we load a page from it.
77
            if dirname != "." and os.path.exists(serie_filepath):
78
                slug = f"serie/{dirname}"
79
80
81
                serie = utils.build_page_from_filepath(
                    serie_filepath, configuration, slug=slug
                )
82
                series.append(serie)
berumuron's avatar
berumuron committed
83

84
85
            # Let's collect all the articles from the directory
            serie_articles = []
86
            for filename in utils.list_files(dirpath, only=["md"]):
87
                article_filepath = os.path.join(dirpath, filename)
berumuron's avatar
berumuron committed
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102

                reload_article = True
                if article_filepath in articles:
                    # the article is in cache
                    article = articles[article_filepath]
                    last_modification = os.path.getmtime(article_filepath)
                    # if the two last_modification are different, the file has
                    # changed and we need to reload the article from the file.
                    reload_article = article.last_modification() != last_modification

                if reload_article:
                    article = utils.build_article_from_filepath(
                        article_filepath, configuration, serie
                    )
                    articles[article_filepath] = article
berumuron's avatar
berumuron committed
103
104
105

                if not article.is_private():
                    serie_articles.append(article)
106

107
108
109
            if serie:
                # If the directory contains a serie, we load a feed for the serie
                # and we let know the serie about its articles and feed.
110

111
                serie.set_articles(serie_articles)
112
113
114
115
116
117
118
119
120
121
122
123

                if serie_articles or configuration.get("SITE_FORCE_FEED", False):
                    feed_slug = f"feeds/{dirname}"
                    feed_title = f"{serie.title()} - {configuration['SITE_TITLE']}"
                    serie_feed = Feed(
                        feed_slug,
                        serie_articles,
                        {"title": feed_title, "site_link": serie.url()},
                        configuration=configuration,
                    )
                    feeds.append(serie_feed)
                    serie.set_feed(serie_feed)
124

berumuron's avatar
berumuron committed
125
126
    # Load a "blog" page which has access to the list of articles
    blog_template_filepath = os.path.join("templates", "blog.html")
berumuron's avatar
berumuron committed
127
    blog_articles = [
128
129
130
        article
        for article in articles.values()
        if not article.is_private() and article.on_blog()
berumuron's avatar
berumuron committed
131
    ]
berumuron's avatar
berumuron committed
132
    if os.path.exists(blog_template_filepath):
berumuron's avatar
berumuron committed
133
        blog_page = utils.build_blog_page(blog_articles, series, configuration)
berumuron's avatar
berumuron committed
134
135
        pages.append(blog_page)

136
    # And load the main feed
berumuron's avatar
berumuron committed
137
138
    if len(blog_articles) > 0 or configuration.get("SITE_FORCE_FEED", False):
        feed = Feed("feeds/all", blog_articles, configuration=configuration)
berumuron's avatar
berumuron committed
139
        feeds.append(feed)
140

141
    # STEP 2: we write all the pages, articles and feeds
142
    for page in pages + series:
143
144
145
        utils.write_content(page, output_path)
        print(f"Written page: {page.url()}")

146
147
148
149
    for feed in feeds:
        utils.write_feed(feed, output_path)
        print(f"Written feed: {feed.url()}")

150
    articles_sorted = list(articles.values())
151
    articles_sorted.sort(key=lambda article: article.date())
152
    for article in articles_sorted:
153
154
155
156
        utils.write_content(article, output_path)
        print(f"Written article: {article.url()}")

    # STEP 3: and we finish by copying the static folder.
157
158
159
160
    static_dirpath = os.path.join(os.curdir, "static")
    if os.path.isdir(static_dirpath):
        distutils.dir_util.copy_tree(static_dirpath, output_path)
        print(f"Static files copied")
161

berumuron's avatar
berumuron committed
162
163
164
165
    # STEP 4: save cache!
    with open(articles_cache_path, "wb") as cache_file:
        pickle.dump(articles, cache_file)

berumuron's avatar
berumuron committed
166
167
168
    ellapsed = time.perf_counter()
    print(f"Generated in {ellapsed - start:.2f}s")

169
170

if __name__ == "__main__":
171
    environment = environment.load_from_argv(sys.argv)
berumuron's avatar
berumuron committed
172
173
    main(environment)
    print("Boop!")