]> xn--ix-yja.es Git - alex.git/commitdiff
Add robots.txt to prevent scraping of full Geminispace
authoralex <alex@pdp7.net>
Mon, 15 Mar 2021 12:07:27 +0000 (12:07 +0000)
committeralex <alex@pdp7.net>
Mon, 15 Mar 2021 12:07:27 +0000 (12:07 +0000)
build.py
static/gmi/robots.txt [new file with mode: 0644]

index 27166721816c13edd300d446d6da882e4dce32e5..4d3ea88772f91a30680b8a9f6bd15ef82b9cf969 100755 (executable)
--- a/build.py
+++ b/build.py
@@ -108,6 +108,10 @@ def build():
     posts = load_posts()
     create_index(sorted(posts, key=lambda p: p.posted, reverse=True))
     create_individual_posts(posts)
+    for directory, _, files in os.walk("static/gmi"):
+        new_dir = directory.replace("static/gmi", "build/gmi")
+        for file in files:
+            shutil.copy(f"{directory}/{file}", f"{new_dir}/{file}")
 
 if __name__ == "__main__":
     build()
diff --git a/static/gmi/robots.txt b/static/gmi/robots.txt
new file mode 100644 (file)
index 0000000..da2aac5
--- /dev/null
@@ -0,0 +1,2 @@
+User-agent: *
+Disallow: /x/