From 9a091cb2ff2b83f45aefdcf754dce14c9bc284e2 Mon Sep 17 00:00:00 2001 From: alex Date: Mon, 15 Mar 2021 12:07:27 +0000 Subject: Add robots.txt to prevent scraping of full Geminispace --- build.py | 4 ++++ 1 file changed, 4 insertions(+) (limited to 'build.py') diff --git a/build.py b/build.py index 27166721..4d3ea887 100755 --- a/build.py +++ b/build.py @@ -108,6 +108,10 @@ def build(): posts = load_posts() create_index(sorted(posts, key=lambda p: p.posted, reverse=True)) create_individual_posts(posts) + for directory, _, files in os.walk("static/gmi"): + new_dir = directory.replace("static/gmi", "build/gmi") + for file in files: + shutil.copy(f"{directory}/{file}", f"{new_dir}/{file}") if __name__ == "__main__": build() -- cgit v1.2.3