aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authoralex <alex@pdp7.net>2023-02-01 21:30:55 +0100
committeralex <alex@pdp7.net>2023-02-01 21:30:55 +0100
commite2b879caff26e6dcb81412640f40767863831f66 (patch)
tree6b24f29a32083d1118252401346e830d8bdda382
parentbc7cba745cd10c4a23ad89d49e0c0e66d05086b9 (diff)
Document ugly things
-rwxr-xr-xpersonal_infra/pseudo_resource_exporter.py26
-rwxr-xr-xpersonal_infra/up.py31
2 files changed, 53 insertions, 4 deletions
diff --git a/personal_infra/pseudo_resource_exporter.py b/personal_infra/pseudo_resource_exporter.py
index 0753a0d7..793f2b77 100755
--- a/personal_infra/pseudo_resource_exporter.py
+++ b/personal_infra/pseudo_resource_exporter.py
@@ -1,17 +1,37 @@
#!/usr/bin/env python3
-
import json
import pathlib
+"""
+This is an ugly hack.
+
+Puppet exported resources are very nice to generate monitoring configuration
+along with your Puppet resources. As you define something like an Apache
+virtual host, you can create a Nagios service check for it.
+
+But this requires a PuppetDB, and does not play nice with having no central
+Puppet infra.
+
+With its sibling script up.py, this script takes the JSON files generated by
+that, and manipulates them. This script moves Nagios resources to a specific
+host and does ugly trickery to fool Puppet into accepting that.
+
+This is like exported resources, but you don't need to declare a resource as
+exported.
+"""
+
+
def load_json(path):
with open(path) as f:
return json.load(f)
+
def save_json(r, path):
with open(path, "w") as f:
json.dump(r, f)
+
nagios_catalog_file = pathlib.Path("build/puppet/build/output/nagios.h1.int.pdp7.net/catalog.json")
if nagios_catalog_file.exists():
@@ -32,6 +52,7 @@ if nagios_catalog_file.exists():
nagios_resources = []
nagios_edge_targets = []
+
def is_nagios_resource(r):
return r["type"].startswith("Nagios")
@@ -39,6 +60,7 @@ def is_nagios_resource(r):
def is_nagios_edge(e):
return e["target"].startswith("Nagios")
+
for catalog_file in catalog_files:
if catalog_file == nagios_catalog_file:
continue
@@ -48,7 +70,7 @@ for catalog_file in catalog_files:
nagios_edge_targets += [e["target"] for e in catalog["edges"] if is_nagios_edge(e)]
catalog["edges"] = [e for e in catalog["edges"] if not is_nagios_edge(e)]
save_json(catalog, catalog_file)
-
+
if nagios_catalog_file.exists():
nagios_contact_position = nagios_catalog["resources"].index(nagios_contact)
diff --git a/personal_infra/up.py b/personal_infra/up.py
index 55d0f9bf..131fc9e6 100755
--- a/personal_infra/up.py
+++ b/personal_infra/up.py
@@ -8,17 +8,44 @@ import subprocess
import textwrap
import yaml
+
"""
+This script performs Puppet catalog compilation without a central server.
+
+It receives the following arguments:
+
+* directory: a working directory. The script expects to find some data, like
+ variables to use in the compilation process, facts, etc. The script also
+ generates intermediate files and output there.
+
+* modulepath: path to your modules directory
+* manifest: path to your site directory
+* host: the hosts to compile catalogs to
+
+The script expects the following content on the working directory:
+
directory/
global_vars/*.json: these JSON files will be available to all hosts
host_vars/{host}/*.json: these JSON files will be available in each host
facts/{host}.json: output from "facter -y" for each host
+And produces the following files:
+
directory/
output/
{host}/
- catalog.json
- modules/
+ catalog.json: the compiled catalog for the host
+ modules: a copy of the module directory
+
+Just ship the {host} directory to each host and run:
+
+$ puppet apply --catalog .../catalog.json --modulepath=.../modules/
+
+Check the apply_catalog Ansible role for example usage.
+
+As we have the catalogs, we can manipulate them. See
+pseudo_resource_exporter.py for an example hack. We can simulate exported
+resources without PuppetDB.
"""