From: alex Date: Mon, 15 Mar 2021 12:07:27 +0000 (+0000) Subject: Add robots.txt to prevent scraping of full Geminispace X-Git-Tag: 20240214-emacs~226^2~110 X-Git-Url: https://xn--ix-yja.es/gitweb/?a=commitdiff_plain;h=9a091cb2ff2b83f45aefdcf754dce14c9bc284e2;p=alex.git Add robots.txt to prevent scraping of full Geminispace --- diff --git a/build.py b/build.py index 2716672..4d3ea88 100755 --- a/build.py +++ b/build.py @@ -108,6 +108,10 @@ def build(): posts = load_posts() create_index(sorted(posts, key=lambda p: p.posted, reverse=True)) create_individual_posts(posts) + for directory, _, files in os.walk("static/gmi"): + new_dir = directory.replace("static/gmi", "build/gmi") + for file in files: + shutil.copy(f"{directory}/{file}", f"{new_dir}/{file}") if __name__ == "__main__": build() diff --git a/static/gmi/robots.txt b/static/gmi/robots.txt new file mode 100644 index 0000000..da2aac5 --- /dev/null +++ b/static/gmi/robots.txt @@ -0,0 +1,2 @@ +User-agent: * +Disallow: /x/