Merge branch 'main' of ssh://github.com/sunet/multiverse
This commit is contained in:
commit
fe9fb32cd6
6
Makefile
6
Makefile
|
@ -6,11 +6,7 @@ cosmos:
|
||||||
upgrade:
|
upgrade:
|
||||||
fab upgrade
|
fab upgrade
|
||||||
|
|
||||||
db:
|
tag:
|
||||||
@python ./fabfile/db.py > global/overlay/etc/puppet/cosmos-db.yaml
|
|
||||||
@git add global/overlay/etc/puppet/cosmos-db.yaml && git commit -m "update db" global/overlay/etc/puppet/cosmos-db.yaml
|
|
||||||
|
|
||||||
tag: db
|
|
||||||
./bump-tag
|
./bump-tag
|
||||||
|
|
||||||
test_in_docker:
|
test_in_docker:
|
||||||
|
|
|
@ -222,7 +222,7 @@ you'll try to push to the multiverse remote!
|
||||||
Finally create a branch for the 'multiverse' upstream so you can merge changes to multiverse:
|
Finally create a branch for the 'multiverse' upstream so you can merge changes to multiverse:
|
||||||
|
|
||||||
```
|
```
|
||||||
# git checkout -b multiverse --track multiverse/master
|
# git checkout -b multiverse --track multiverse/main
|
||||||
```
|
```
|
||||||
|
|
||||||
Note that you can maintain your repo on just about any git hosting platform, including
|
Note that you can maintain your repo on just about any git hosting platform, including
|
||||||
|
|
300
docs/setup_cosmos_modules.eduid.example
Executable file
300
docs/setup_cosmos_modules.eduid.example
Executable file
|
@ -0,0 +1,300 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
#
|
||||||
|
# This script is responsible for creating/updating /etc/puppet/cosmos-modules.conf.
|
||||||
|
#
|
||||||
|
# If this script exits without creating that file, a default list of modules will be
|
||||||
|
# selected (by post-tasks.d/010cosmos-modules, the script that invokes this script).
|
||||||
|
#
|
||||||
|
# NOTES ABOUT THE IMPLEMENTATION:
|
||||||
|
#
|
||||||
|
# - Avoid any third party modules. We want this script to be re-usable in all ops-repos.
|
||||||
|
# - To make merging easier, try to keep all local alterations in the local_* functions.
|
||||||
|
# - Format with black and isort. Line width 120.
|
||||||
|
# - You probably ONLY want to change things in the local_get_modules_hook() function.
|
||||||
|
#
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import csv
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import logging.handlers
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import socket
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict, NewType, Optional, cast
|
||||||
|
|
||||||
|
from pkg_resources import parse_version
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__) # will be overwritten by _setup_logging()
|
||||||
|
|
||||||
|
# Set up types for data that is passed around in functions in this script.
|
||||||
|
# Need to use Dict (not dict) here since these aren't stripped by strip-hints, and doesn't work on Ubuntu <= 20.04.
|
||||||
|
Arguments = NewType("Arguments", argparse.Namespace)
|
||||||
|
OSInfo = Dict[str, str]
|
||||||
|
HostInfo = Dict[str, Optional[str]]
|
||||||
|
Modules = Dict[str, Dict[str, str]]
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args() -> Arguments:
|
||||||
|
"""
|
||||||
|
Parse the command line arguments
|
||||||
|
"""
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Setup cosmos-modules.conf",
|
||||||
|
add_help=True,
|
||||||
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument("--debug", dest="debug", action="store_true", default=False, help="Enable debug operation")
|
||||||
|
parser.add_argument(
|
||||||
|
"--filename", dest="filename", type=str, default="/etc/puppet/cosmos-modules.conf", help="Filename to write to"
|
||||||
|
)
|
||||||
|
|
||||||
|
return cast(Arguments, parser.parse_args())
|
||||||
|
|
||||||
|
|
||||||
|
def get_os_info() -> OSInfo:
|
||||||
|
"""Load info about the current OS (distro, release etc.)"""
|
||||||
|
os_info: OSInfo = {}
|
||||||
|
if Path("/etc/os-release").exists():
|
||||||
|
os_info.update({k.lower(): v for k, v in _parse_bash_vars("/etc/os-release").items()})
|
||||||
|
res = local_os_info_hook(os_info)
|
||||||
|
logger.debug(f"OS info:\n{json.dumps(res, sort_keys=True, indent=4)}")
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
def get_host_info() -> HostInfo:
|
||||||
|
"""Load info about the current host (hostname, fqdn, domain name etc.)"""
|
||||||
|
try:
|
||||||
|
fqdn = socket.getfqdn()
|
||||||
|
hostname = socket.gethostname()
|
||||||
|
except OSError:
|
||||||
|
host_info = {}
|
||||||
|
else:
|
||||||
|
_domainname = fqdn[len(hostname + ".") :]
|
||||||
|
|
||||||
|
host_info: HostInfo = {
|
||||||
|
"domainname": _domainname,
|
||||||
|
"fqdn": fqdn,
|
||||||
|
"hostname": hostname,
|
||||||
|
}
|
||||||
|
res = local_host_info_hook(host_info)
|
||||||
|
logger.debug(f"Host info: {json.dumps(res, sort_keys=True, indent=4)}")
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_bash_vars(path: str) -> dict[str, str]:
|
||||||
|
"""
|
||||||
|
Parses a bash script and returns a dictionary representing the
|
||||||
|
variables declared in that script.
|
||||||
|
|
||||||
|
Source: https://dev.to/htv2012/how-to-parse-bash-variables-b4f
|
||||||
|
|
||||||
|
:param path: The path to the bash script
|
||||||
|
:return: Variables as a dictionary
|
||||||
|
"""
|
||||||
|
with open(path) as stream:
|
||||||
|
contents = stream.read().strip()
|
||||||
|
|
||||||
|
var_declarations = re.findall(r"^[a-zA-Z0-9_]+=.*$", contents, flags=re.MULTILINE)
|
||||||
|
reader = csv.reader(var_declarations, delimiter="=")
|
||||||
|
bash_vars = dict(reader)
|
||||||
|
return bash_vars
|
||||||
|
|
||||||
|
|
||||||
|
def get_modules(os_info: OSInfo, host_info: HostInfo) -> Modules:
|
||||||
|
"""Load the list of default modules.
|
||||||
|
|
||||||
|
This is more or less an inventory of all the modules we have. If you don't want
|
||||||
|
to use all modules in your OPS repo, you can filter them in the local hook.
|
||||||
|
|
||||||
|
If you want to use a different tag for a module on a specific host/os, you can
|
||||||
|
do that in the local hook as well.
|
||||||
|
"""
|
||||||
|
default_modules = """
|
||||||
|
# name repo upgrade tag
|
||||||
|
apparmor https://github.com/SUNET/puppet-apparmor.git yes sunet-2*
|
||||||
|
apt https://github.com/SUNET/puppetlabs-apt.git yes sunet-2*
|
||||||
|
augeas https://github.com/SUNET/puppet-augeas.git yes sunet-2*
|
||||||
|
bastion https://github.com/SUNET/puppet-bastion.git yes sunet-2*
|
||||||
|
concat https://github.com/SUNET/puppetlabs-concat.git yes sunet-2*
|
||||||
|
cosmos https://github.com/SUNET/puppet-cosmos.git yes sunet-2*
|
||||||
|
dhcp https://github.com/SUNET/puppetlabs-dhcp.git yes sunet_dev-2*
|
||||||
|
docker https://github.com/SUNET/garethr-docker.git yes sunet-2*
|
||||||
|
hiera-gpg https://github.com/SUNET/hiera-gpg.git yes sunet-2*
|
||||||
|
munin https://github.com/SUNET/ssm-munin.git yes sunet-2*
|
||||||
|
nagioscfg https://github.com/SUNET/puppet-nagioscfg.git yes sunet-2*
|
||||||
|
network https://github.com/SUNET/attachmentgenie-network.git yes sunet-2*
|
||||||
|
pound https://github.com/SUNET/puppet-pound.git yes sunet-2*
|
||||||
|
pyff https://github.com/samlbits/puppet-pyff.git yes puppet-pyff-*
|
||||||
|
python https://github.com/SUNET/puppet-python.git yes sunet-2*
|
||||||
|
stdlib https://github.com/SUNET/puppetlabs-stdlib.git yes sunet-2*
|
||||||
|
sunet https://github.com/SUNET/puppet-sunet.git yes sunet-2*
|
||||||
|
sysctl https://github.com/SUNET/puppet-sysctl.git yes sunet-2*
|
||||||
|
ufw https://github.com/SUNET/puppet-module-ufw.git yes sunet-2*
|
||||||
|
varnish https://github.com/samlbits/puppet-varnish.git yes puppet-varnish-*
|
||||||
|
vcsrepo https://github.com/SUNET/puppetlabs-vcsrepo.git yes sunet-2*
|
||||||
|
xinetd https://github.com/SUNET/puppetlabs-xinetd.git yes sunet-2*
|
||||||
|
"""
|
||||||
|
modules: Modules = {}
|
||||||
|
for line in default_modules.splitlines():
|
||||||
|
try:
|
||||||
|
if not line.strip() or line.strip().startswith("#"):
|
||||||
|
continue
|
||||||
|
_name, _url, _upgrade, _tag = line.split()
|
||||||
|
modules[_name] = {
|
||||||
|
"repo": _url,
|
||||||
|
"upgrade": _upgrade,
|
||||||
|
"tag": _tag,
|
||||||
|
}
|
||||||
|
except ValueError:
|
||||||
|
logger.error(f"Failed to parse line: {repr(line)}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
# Remove the UFW module on Ubuntu >= 22.04 (nftables is used there instead)
|
||||||
|
if os_info.get("name") == "Ubuntu":
|
||||||
|
ver = os_info.get("version_id")
|
||||||
|
if ver:
|
||||||
|
if parse_version(ver) >= parse_version("22.04"):
|
||||||
|
logger.debug("Removing UFW module for Ubuntu >= 22.04")
|
||||||
|
del modules["ufw"]
|
||||||
|
else:
|
||||||
|
logger.debug("Keeping UFW module for Ubuntu < 22.04")
|
||||||
|
else:
|
||||||
|
logger.debug("Unknown Ubuntu module version, keeping UFW module")
|
||||||
|
|
||||||
|
return local_get_modules_hook(os_info, host_info, modules)
|
||||||
|
|
||||||
|
|
||||||
|
def local_os_info_hook(os_info: OSInfo) -> OSInfo:
|
||||||
|
"""Local hook to modify os_info in an OPS repo."""
|
||||||
|
# Start local changes in this repository
|
||||||
|
# End local changes
|
||||||
|
return os_info
|
||||||
|
|
||||||
|
|
||||||
|
def local_host_info_hook(host_info: HostInfo) -> HostInfo:
|
||||||
|
"""Local hook to modify host_info in an OPS repo."""
|
||||||
|
# Start local changes in this repository
|
||||||
|
|
||||||
|
# Regular expression to tease apart an eduID hostname
|
||||||
|
hostname_re = re.compile(
|
||||||
|
r"""^
|
||||||
|
(\w+) # function ('idp', 'apps', ...)
|
||||||
|
-
|
||||||
|
(\w+) # site ('tug', 'sthb', ...)
|
||||||
|
-
|
||||||
|
(\d+) # 1 for staging, 3 for production
|
||||||
|
""",
|
||||||
|
re.VERBOSE,
|
||||||
|
)
|
||||||
|
_hostname = host_info.get("hostname")
|
||||||
|
if _hostname:
|
||||||
|
m = hostname_re.match(_hostname)
|
||||||
|
if m:
|
||||||
|
_function, _site, _num = m.groups()
|
||||||
|
host_info["function"] = _function
|
||||||
|
host_info["site"] = _site
|
||||||
|
if _num == "1":
|
||||||
|
host_info["environment"] = "staging"
|
||||||
|
|
||||||
|
# End local changes
|
||||||
|
return host_info
|
||||||
|
|
||||||
|
|
||||||
|
def local_get_modules_hook(os_info: OSInfo, host_info: HostInfo, modules: Modules) -> Modules:
|
||||||
|
"""Local hook to modify default set of modules in an OPS repo."""
|
||||||
|
# Start local changes in this repository
|
||||||
|
|
||||||
|
_eduid_modules = {
|
||||||
|
"apparmor",
|
||||||
|
"apt",
|
||||||
|
"augeas",
|
||||||
|
"bastion",
|
||||||
|
"concat",
|
||||||
|
"docker",
|
||||||
|
"munin",
|
||||||
|
"stdlib",
|
||||||
|
"sunet",
|
||||||
|
"ufw",
|
||||||
|
}
|
||||||
|
# Only keep the modules eduID actually uses
|
||||||
|
modules = {k: v for k, v in modules.items() if k in _eduid_modules}
|
||||||
|
logger.debug(f"Adding modules: {json.dumps(modules, sort_keys=True, indent=4)}")
|
||||||
|
|
||||||
|
# Use eduID tag for puppet-sunet
|
||||||
|
modules["sunet"]["tag"] = "eduid-stable-2*"
|
||||||
|
if host_info.get("environment") == "staging":
|
||||||
|
modules["sunet"]["tag"] = "eduid_dev-2*"
|
||||||
|
|
||||||
|
# use sunet_dev-2* for some modules in staging
|
||||||
|
for dev_module in ["munin"]:
|
||||||
|
if host_info.get("environment") == "staging" and dev_module in modules:
|
||||||
|
modules[dev_module]["tag"] = "sunet_dev-2*"
|
||||||
|
|
||||||
|
# End local changes
|
||||||
|
return modules
|
||||||
|
|
||||||
|
|
||||||
|
def update_cosmos_modules(filename: str, modules: Modules) -> None:
|
||||||
|
"""Create/update the cosmos-modules.conf file.
|
||||||
|
|
||||||
|
First, we check if the file already have the right content. If so, we do nothing.
|
||||||
|
"""
|
||||||
|
content = "# This file is automatically generated by the setup_cosmos_modules script.\n# Do not edit it manually.\n"
|
||||||
|
for k, v in sorted(modules.items()):
|
||||||
|
content += f"{k:15} {v['repo']:55} {v['upgrade']:5} {v['tag']}\n"
|
||||||
|
_file = Path(filename)
|
||||||
|
if _file.exists():
|
||||||
|
# Check if the content is already correct, and avoid updating the file if so (so that the timestamp
|
||||||
|
# of the file at least indicates when the content was last updated)
|
||||||
|
with _file.open("r") as f:
|
||||||
|
current = f.read()
|
||||||
|
if current == content:
|
||||||
|
logger.debug(f"{filename} is up to date")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Create/update the file by writing the content to a temporary file and then renaming it
|
||||||
|
_tmp_file = _file.with_suffix(".tmp")
|
||||||
|
with _tmp_file.open("w") as f:
|
||||||
|
f.write(content)
|
||||||
|
_tmp_file.rename(_file)
|
||||||
|
logger.debug(f"Updated {filename}")
|
||||||
|
|
||||||
|
|
||||||
|
def _setup_logging(my_name: str, args: Arguments):
|
||||||
|
level = logging.INFO
|
||||||
|
if args.debug:
|
||||||
|
level = logging.DEBUG
|
||||||
|
logging.basicConfig(level=level, stream=sys.stderr, format="{asctime} | {levelname:7} | {message}", style="{")
|
||||||
|
global logger
|
||||||
|
logger = logging.getLogger(my_name)
|
||||||
|
# If stderr is not a TTY, change the log level of the StreamHandler (stream = sys.stderr above) to ERROR
|
||||||
|
if not sys.stderr.isatty() and not args.debug:
|
||||||
|
for this_h in logging.getLogger("").handlers:
|
||||||
|
this_h.setLevel(logging.ERROR)
|
||||||
|
if args.debug:
|
||||||
|
logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
|
||||||
|
def main(my_name: str, args: Arguments) -> bool:
|
||||||
|
_setup_logging(my_name, args)
|
||||||
|
|
||||||
|
os_info = get_os_info()
|
||||||
|
host_info = get_host_info()
|
||||||
|
modules = get_modules(os_info, host_info)
|
||||||
|
|
||||||
|
update_cosmos_modules(args.filename, modules)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
my_name = os.path.basename(sys.argv[0])
|
||||||
|
args = parse_args()
|
||||||
|
res = main(my_name, args=args)
|
||||||
|
if res:
|
||||||
|
sys.exit(0)
|
||||||
|
sys.exit(1)
|
|
@ -1,134 +1,216 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
""" Write out a puppet cosmos-modules.conf """
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from configobj import ConfigObj
|
from configobj import ConfigObj
|
||||||
|
|
||||||
os_info = ConfigObj("/etc/os-release")
|
OS_INFO = ConfigObj("/etc/os-release")
|
||||||
except (IOError, ModuleNotFoundError):
|
except (IOError, ModuleNotFoundError):
|
||||||
os_info = None
|
OS_INFO = None
|
||||||
|
|
||||||
|
|
||||||
modulesfile: str = "/etc/puppet/cosmos-modules.conf"
|
def get_file_hash(modulesfile):
|
||||||
modules: dict = {
|
"""
|
||||||
"concat": {
|
Based on https://github.com/python/cpython/pull/31930: should use
|
||||||
"repo": "https://github.com/SUNET/puppetlabs-concat.git",
|
hashlib.file_digest() but it is only available in python 3.11
|
||||||
"upgrade": "yes",
|
"""
|
||||||
"tag": "sunet-2*",
|
try:
|
||||||
},
|
with open(modulesfile, "rb") as fileobj:
|
||||||
"stdlib": {
|
digestobj = hashlib.sha256()
|
||||||
"repo": "https://github.com/SUNET/puppetlabs-stdlib.git",
|
_bufsize = 2**18
|
||||||
"upgrade": "yes",
|
buf = bytearray(_bufsize) # Reusable buffer to reduce allocations.
|
||||||
"tag": "sunet-2*",
|
view = memoryview(buf)
|
||||||
},
|
while True:
|
||||||
"cosmos": {
|
size = fileobj.readinto(buf)
|
||||||
"repo": "https://github.com/SUNET/puppet-cosmos.git",
|
if size == 0:
|
||||||
"upgrade": "yes",
|
break # EOF
|
||||||
"tag": "sunet-2*",
|
digestobj.update(view[:size])
|
||||||
},
|
except FileNotFoundError:
|
||||||
"ufw": {
|
return ""
|
||||||
"repo": "https://github.com/SUNET/puppet-module-ufw.git",
|
|
||||||
"upgrade": "yes",
|
|
||||||
"tag": "sunet-2*",
|
|
||||||
},
|
|
||||||
"apt": {
|
|
||||||
"repo": "https://github.com/SUNET/puppetlabs-apt.git",
|
|
||||||
"upgrade": "yes",
|
|
||||||
"tag": "sunet-2*",
|
|
||||||
},
|
|
||||||
"vcsrepo": {
|
|
||||||
"repo": "https://github.com/SUNET/puppetlabs-vcsrepo.git",
|
|
||||||
"upgrade": "yes",
|
|
||||||
"tag": "sunet-2*",
|
|
||||||
},
|
|
||||||
"xinetd": {
|
|
||||||
"repo": "https://github.com/SUNET/puppetlabs-xinetd.git",
|
|
||||||
"upgrade": "yes",
|
|
||||||
"tag": "sunet-2*",
|
|
||||||
},
|
|
||||||
"python": {
|
|
||||||
"repo": "https://github.com/SUNET/puppet-python.git",
|
|
||||||
"upgrade": "yes",
|
|
||||||
"tag": "sunet-2*",
|
|
||||||
},
|
|
||||||
"hiera-gpg": {
|
|
||||||
"repo": "https://github.com/SUNET/hiera-gpg.git",
|
|
||||||
"upgrade": "yes",
|
|
||||||
"tag": "sunet-2*",
|
|
||||||
},
|
|
||||||
"pound": {
|
|
||||||
"repo": "https://github.com/SUNET/puppet-pound.git",
|
|
||||||
"upgrade": "yes",
|
|
||||||
"tag": "sunet-2*",
|
|
||||||
},
|
|
||||||
"augeas": {
|
|
||||||
"repo": "https://github.com/SUNET/puppet-augeas.git",
|
|
||||||
"upgrade": "yes",
|
|
||||||
"tag": "sunet-2*",
|
|
||||||
},
|
|
||||||
"bastion": {
|
|
||||||
"repo": "https://github.com/SUNET/puppet-bastion.git",
|
|
||||||
"upgrade": "yes",
|
|
||||||
"tag": "sunet-2*",
|
|
||||||
},
|
|
||||||
"pyff": {
|
|
||||||
"repo": "https://github.com/samlbits/puppet-pyff.git",
|
|
||||||
"upgrade": "yes",
|
|
||||||
"tag": "puppet-pyff-*",
|
|
||||||
},
|
|
||||||
"dhcp": {
|
|
||||||
"repo": "https://github.com/SUNET/puppetlabs-dhcp.git",
|
|
||||||
"upgrade": "yes",
|
|
||||||
"tag": "sunet_dev-2*",
|
|
||||||
},
|
|
||||||
"varnish": {
|
|
||||||
"repo": "https://github.com/samlbits/puppet-varnish.git",
|
|
||||||
"upgrade": "yes",
|
|
||||||
"tag": "puppet-varnish-*",
|
|
||||||
},
|
|
||||||
"apparmor": {
|
|
||||||
"repo": "https://github.com/SUNET/puppet-apparmor.git",
|
|
||||||
"upgrade": "yes",
|
|
||||||
"tag": "sunet-2*",
|
|
||||||
},
|
|
||||||
"docker": {
|
|
||||||
"repo": "https://github.com/SUNET/garethr-docker.git",
|
|
||||||
"upgrade": "yes",
|
|
||||||
"tag": "sunet-2*",
|
|
||||||
},
|
|
||||||
"network": {
|
|
||||||
"repo": "https://github.com/SUNET/attachmentgenie-network.git",
|
|
||||||
"upgrade": "yes",
|
|
||||||
"tag": "sunet-2*",
|
|
||||||
},
|
|
||||||
"sunet": {
|
|
||||||
"repo": "https://github.com/SUNET/puppet-sunet.git",
|
|
||||||
"upgrade": "yes",
|
|
||||||
"tag": "sunet-2*",
|
|
||||||
},
|
|
||||||
"sysctl": {
|
|
||||||
"repo": "https://github.com/SUNET/puppet-sysctl.git",
|
|
||||||
"upgrade": "yes",
|
|
||||||
"tag": "sunet-2*",
|
|
||||||
},
|
|
||||||
"nagioscfg": {
|
|
||||||
"repo": "https://github.com/SUNET/puppet-nagioscfg.git",
|
|
||||||
"upgrade": "yes",
|
|
||||||
"tag": "sunet-2*",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
# When/if we want we can do stuff to modules here
|
return digestobj.hexdigest()
|
||||||
if os_info:
|
|
||||||
if os_info["VERSION_CODENAME"] == "bullseye":
|
|
||||||
pass
|
|
||||||
|
|
||||||
with open(modulesfile, "w") as fh:
|
|
||||||
|
def get_list_hash(file_lines):
|
||||||
|
"""Get hash of list contents"""
|
||||||
|
|
||||||
|
file_lines_hash = hashlib.sha256()
|
||||||
|
for line in file_lines:
|
||||||
|
file_lines_hash.update(line)
|
||||||
|
|
||||||
|
return file_lines_hash.hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def create_file_content(modules):
|
||||||
|
"""
|
||||||
|
Write out the expected file contents to a list so we can check the
|
||||||
|
expected checksum before writing anything
|
||||||
|
"""
|
||||||
|
file_lines = []
|
||||||
|
file_lines.append(
|
||||||
|
"# Generated by {}\n".format( # pylint: disable=consider-using-f-string
|
||||||
|
os.path.basename(sys.argv[0])
|
||||||
|
).encode("utf-8")
|
||||||
|
)
|
||||||
for key in modules:
|
for key in modules:
|
||||||
fh.write(
|
file_lines.append(
|
||||||
"{0:11} {1} {2} {3}\n".format(
|
"{0:11} {1} {2} {3}\n".format( # pylint: disable=consider-using-f-string
|
||||||
key,
|
key,
|
||||||
modules[key]["repo"],
|
modules[key]["repo"],
|
||||||
modules[key]["upgrade"],
|
modules[key]["upgrade"],
|
||||||
modules[key]["tag"],
|
modules[key]["tag"],
|
||||||
)
|
).encode("utf-8")
|
||||||
)
|
)
|
||||||
|
|
||||||
|
return file_lines
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Starting point of the program"""
|
||||||
|
|
||||||
|
modulesfile: str = "/etc/puppet/cosmos-modules.conf"
|
||||||
|
modulesfile_tmp: str = modulesfile + ".tmp"
|
||||||
|
|
||||||
|
modules: dict = {
|
||||||
|
"concat": {
|
||||||
|
"repo": "https://github.com/SUNET/puppetlabs-concat.git",
|
||||||
|
"upgrade": "yes",
|
||||||
|
"tag": "sunet-2*",
|
||||||
|
},
|
||||||
|
"stdlib": {
|
||||||
|
"repo": "https://github.com/SUNET/puppetlabs-stdlib.git",
|
||||||
|
"upgrade": "yes",
|
||||||
|
"tag": "sunet-2*",
|
||||||
|
},
|
||||||
|
"cosmos": {
|
||||||
|
"repo": "https://github.com/SUNET/puppet-cosmos.git",
|
||||||
|
"upgrade": "yes",
|
||||||
|
"tag": "sunet-2*",
|
||||||
|
},
|
||||||
|
"ufw": {
|
||||||
|
"repo": "https://github.com/SUNET/puppet-module-ufw.git",
|
||||||
|
"upgrade": "yes",
|
||||||
|
"tag": "sunet-2*",
|
||||||
|
},
|
||||||
|
"apt": {
|
||||||
|
"repo": "https://github.com/SUNET/puppetlabs-apt.git",
|
||||||
|
"upgrade": "yes",
|
||||||
|
"tag": "sunet-2*",
|
||||||
|
},
|
||||||
|
"vcsrepo": {
|
||||||
|
"repo": "https://github.com/SUNET/puppetlabs-vcsrepo.git",
|
||||||
|
"upgrade": "yes",
|
||||||
|
"tag": "sunet-2*",
|
||||||
|
},
|
||||||
|
"xinetd": {
|
||||||
|
"repo": "https://github.com/SUNET/puppetlabs-xinetd.git",
|
||||||
|
"upgrade": "yes",
|
||||||
|
"tag": "sunet-2*",
|
||||||
|
},
|
||||||
|
"python": {
|
||||||
|
"repo": "https://github.com/SUNET/puppet-python.git",
|
||||||
|
"upgrade": "yes",
|
||||||
|
"tag": "sunet-2*",
|
||||||
|
},
|
||||||
|
"hiera-gpg": {
|
||||||
|
"repo": "https://github.com/SUNET/hiera-gpg.git",
|
||||||
|
"upgrade": "yes",
|
||||||
|
"tag": "sunet-2*",
|
||||||
|
},
|
||||||
|
"pound": {
|
||||||
|
"repo": "https://github.com/SUNET/puppet-pound.git",
|
||||||
|
"upgrade": "yes",
|
||||||
|
"tag": "sunet-2*",
|
||||||
|
},
|
||||||
|
"augeas": {
|
||||||
|
"repo": "https://github.com/SUNET/puppet-augeas.git",
|
||||||
|
"upgrade": "yes",
|
||||||
|
"tag": "sunet-2*",
|
||||||
|
},
|
||||||
|
"bastion": {
|
||||||
|
"repo": "https://github.com/SUNET/puppet-bastion.git",
|
||||||
|
"upgrade": "yes",
|
||||||
|
"tag": "sunet-2*",
|
||||||
|
},
|
||||||
|
"pyff": {
|
||||||
|
"repo": "https://github.com/samlbits/puppet-pyff.git",
|
||||||
|
"upgrade": "yes",
|
||||||
|
"tag": "puppet-pyff-*",
|
||||||
|
},
|
||||||
|
"dhcp": {
|
||||||
|
"repo": "https://github.com/SUNET/puppetlabs-dhcp.git",
|
||||||
|
"upgrade": "yes",
|
||||||
|
"tag": "sunet_dev-2*",
|
||||||
|
},
|
||||||
|
"varnish": {
|
||||||
|
"repo": "https://github.com/samlbits/puppet-varnish.git",
|
||||||
|
"upgrade": "yes",
|
||||||
|
"tag": "puppet-varnish-*",
|
||||||
|
},
|
||||||
|
"apparmor": {
|
||||||
|
"repo": "https://github.com/SUNET/puppet-apparmor.git",
|
||||||
|
"upgrade": "yes",
|
||||||
|
"tag": "sunet-2*",
|
||||||
|
},
|
||||||
|
"docker": {
|
||||||
|
"repo": "https://github.com/SUNET/garethr-docker.git",
|
||||||
|
"upgrade": "yes",
|
||||||
|
"tag": "sunet-2*",
|
||||||
|
},
|
||||||
|
"network": {
|
||||||
|
"repo": "https://github.com/SUNET/attachmentgenie-network.git",
|
||||||
|
"upgrade": "yes",
|
||||||
|
"tag": "sunet-2*",
|
||||||
|
},
|
||||||
|
"sunet": {
|
||||||
|
"repo": "https://github.com/SUNET/puppet-sunet.git",
|
||||||
|
"upgrade": "yes",
|
||||||
|
"tag": "sunet-2*",
|
||||||
|
},
|
||||||
|
"sysctl": {
|
||||||
|
"repo": "https://github.com/SUNET/puppet-sysctl.git",
|
||||||
|
"upgrade": "yes",
|
||||||
|
"tag": "sunet-2*",
|
||||||
|
},
|
||||||
|
"nagioscfg": {
|
||||||
|
"repo": "https://github.com/SUNET/puppet-nagioscfg.git",
|
||||||
|
"upgrade": "yes",
|
||||||
|
"tag": "sunet-2*",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
# When/if we want we can do stuff to modules here
|
||||||
|
if OS_INFO:
|
||||||
|
if OS_INFO["VERSION_CODENAME"] == "bullseye":
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Build list of expected file content
|
||||||
|
file_lines = create_file_content(modules)
|
||||||
|
|
||||||
|
# Get hash of the list
|
||||||
|
list_hash = get_list_hash(file_lines)
|
||||||
|
|
||||||
|
# Get hash of the existing file on disk
|
||||||
|
file_hash = get_file_hash(modulesfile)
|
||||||
|
|
||||||
|
# Update the file if necessary
|
||||||
|
if list_hash != file_hash:
|
||||||
|
# Since we are reading the file with 'rb' when computing our hash use 'wb' when
|
||||||
|
# writing so we dont end up creating a file that does not match the
|
||||||
|
# expected hash
|
||||||
|
with open(modulesfile_tmp, "wb") as fileobj:
|
||||||
|
for line in file_lines:
|
||||||
|
fileobj.write(line)
|
||||||
|
|
||||||
|
# Rename it in place so the update is atomic for anything else trying to
|
||||||
|
# read the file
|
||||||
|
os.rename(modulesfile_tmp, modulesfile)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
|
@ -151,7 +151,7 @@ function edit_file_on_host() {
|
||||||
edit_gpg_file ${SECRETFILE}
|
edit_gpg_file ${SECRETFILE}
|
||||||
elif [ -f /etc/hiera/eyaml/public_certkey.pkcs7.pem ]; then
|
elif [ -f /etc/hiera/eyaml/public_certkey.pkcs7.pem ]; then
|
||||||
# default to eyaml if the key exists and none of the secrets-file above exist
|
# default to eyaml if the key exists and none of the secrets-file above exist
|
||||||
touch ${EYAMLFILE}
|
echo "---" > ${EYAMLFILE}
|
||||||
edit_eyaml_file ${EYAMLFILE}
|
edit_eyaml_file ${EYAMLFILE}
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,49 +0,0 @@
|
||||||
import os
|
|
||||||
import yaml
|
|
||||||
import re
|
|
||||||
|
|
||||||
def _all_hosts():
|
|
||||||
return filter(lambda fn: '.' in fn and not fn.startswith('.') and os.path.isdir(fn),os.listdir("."))
|
|
||||||
|
|
||||||
def _load_db():
|
|
||||||
rules = dict()
|
|
||||||
rules_file = "cosmos-rules.yaml";
|
|
||||||
if os.path.exists(rules_file):
|
|
||||||
with open(rules_file) as fd:
|
|
||||||
rules.update(yaml.load(fd))
|
|
||||||
|
|
||||||
all_hosts = _all_hosts()
|
|
||||||
|
|
||||||
members = dict()
|
|
||||||
for node_name in all_hosts:
|
|
||||||
for reg,cls in rules.iteritems():
|
|
||||||
if re.match(reg,node_name):
|
|
||||||
for cls_name in cls.keys():
|
|
||||||
h = members.get(cls_name,[])
|
|
||||||
h.append(node_name)
|
|
||||||
members[cls_name] = h
|
|
||||||
members['all'] = all_hosts
|
|
||||||
|
|
||||||
classes = dict()
|
|
||||||
for node_name in all_hosts:
|
|
||||||
node_classes = dict()
|
|
||||||
for reg,cls in rules.iteritems():
|
|
||||||
if re.match(reg,node_name):
|
|
||||||
node_classes.update(cls)
|
|
||||||
classes[node_name] = node_classes
|
|
||||||
|
|
||||||
# Sort member lists for a more easy to read diff
|
|
||||||
for cls in members.keys():
|
|
||||||
members[cls].sort()
|
|
||||||
|
|
||||||
return dict(classes=classes,members=members)
|
|
||||||
|
|
||||||
_db = None
|
|
||||||
def cosmos_db():
|
|
||||||
global _db
|
|
||||||
if _db is None:
|
|
||||||
_db = _load_db()
|
|
||||||
return _db
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
print yaml.dump(cosmos_db())
|
|
|
@ -30,7 +30,7 @@ export DEBIAN_FRONTEND='noninteractive'
|
||||||
|
|
||||||
apt-get -y update
|
apt-get -y update
|
||||||
apt-get -y upgrade
|
apt-get -y upgrade
|
||||||
for pkg in rsync git git-core wget gpg; do
|
for pkg in rsync git git-core wget gpg jq; do
|
||||||
# script is running with "set -e", use "|| true" to allow packages to not
|
# script is running with "set -e", use "|| true" to allow packages to not
|
||||||
# exist without stopping the script
|
# exist without stopping the script
|
||||||
apt-get -y install $pkg || true
|
apt-get -y install $pkg || true
|
||||||
|
@ -56,16 +56,43 @@ mv -f /etc/rc.local.new /etc/rc.local
|
||||||
touch /etc/run-cosmos-at-boot
|
touch /etc/run-cosmos-at-boot
|
||||||
|
|
||||||
# If this cloud-config is set, it will interfere with our changes to /etc/hosts
|
# If this cloud-config is set, it will interfere with our changes to /etc/hosts
|
||||||
if [ -f /etc/cloud/cloud.cfg ]; then
|
# The configuration seems to move around between cloud-config versions
|
||||||
sed -i 's/manage_etc_hosts: true/manage_etc_hosts: false/g' /etc/cloud/cloud.cfg
|
for file in /etc/cloud/cloud.cfg /etc/cloud/cloud.cfg.d/01_debian_cloud.cfg; do
|
||||||
fi
|
if [ -f ${file} ]; then
|
||||||
|
sed -i 's/manage_etc_hosts: true/manage_etc_hosts: false/g' ${file}
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
# Remove potential $hostname.novalocal line from /etc/hosts, added by cloud-init
|
# Remove potential $hostname.novalocal, added by cloud-init or Debian default
|
||||||
sed -i.bak -e "s/^127\.0\.1\.1 $(hostname)\..*novalocal.*//1" /etc/hosts
|
# from /etc/hosts. We add our own further down.
|
||||||
|
#
|
||||||
|
# From # https://www.debian.org/doc/manuals/debian-reference/ch05.en.html#_the_hostname_resolution:
|
||||||
|
# "For a system with a permanent IP address, that permanent IP address should
|
||||||
|
# be used here instead of 127.0.1.1."
|
||||||
|
sed -i.bak -e "/127\.0\.1\.1/d" /etc/hosts
|
||||||
|
|
||||||
|
vendor=$(lsb_release -is)
|
||||||
|
version=$(lsb_release -rs)
|
||||||
|
min_version=1337
|
||||||
|
host_ip=127.0.1.1
|
||||||
|
if [ "${vendor}" = "Ubuntu" ]; then
|
||||||
|
min_version=20.04
|
||||||
|
elif [ "${vendor}" = "Debian" ]; then
|
||||||
|
min_version=11
|
||||||
|
fi
|
||||||
|
|
||||||
hostname $cmd_hostname
|
hostname $cmd_hostname
|
||||||
short=`echo ${cmd_hostname} | awk -F. '{print $1}'`
|
short=`echo ${cmd_hostname} | awk -F. '{print $1}'`
|
||||||
echo "127.0.1.1 ${cmd_hostname} ${short}" >> /etc/hosts
|
# Only change behavior on modern OS where `ip -j` outputs a json predictuble
|
||||||
|
# enought to work with.
|
||||||
|
#
|
||||||
|
# Use `dpkg` to easier compare ubuntu versions.
|
||||||
|
if dpkg --compare-versions "${version}" "ge" "${min_version}"; then
|
||||||
|
# When hostname pointed to loopback in /etc/hosts containers running on the
|
||||||
|
# host tried to connect to the container itself instead of the host.
|
||||||
|
host_ip=$(ip -j address show "$(ip -j route show default | jq -r '.[0].dev')" | jq -r .[0].addr_info[0].local)
|
||||||
|
fi
|
||||||
|
echo "${host_ip} ${cmd_hostname} ${short}" >> /etc/hosts
|
||||||
|
|
||||||
# Set up cosmos models. They are in the order of most significant first, so we want
|
# Set up cosmos models. They are in the order of most significant first, so we want
|
||||||
# <host> <group (if it exists)> <global>
|
# <host> <group (if it exists)> <global>
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
SHELL=/bin/sh
|
SHELL=/bin/sh
|
||||||
PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin
|
PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin
|
||||||
|
|
||||||
*/15 * * * * root test -f /etc/no-automatic-cosmos || /usr/local/bin/run-cosmos
|
*/15 * * * * root /usr/local/libexec/cosmos-cron-wrapper
|
||||||
|
|
||||||
|
@reboot root sleep 30; /usr/local/libexec/cosmos-cron-wrapper
|
||||||
|
|
|
@ -1,7 +0,0 @@
|
||||||
/var/lib/docker/containers/*/*.log {
|
|
||||||
rotate 7
|
|
||||||
daily
|
|
||||||
compress
|
|
||||||
delaycompress
|
|
||||||
copytruncate
|
|
||||||
}
|
|
|
@ -1,18 +1,37 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
#
|
||||||
|
# Puppet 'External Node Classifier' to tell puppet what classes to apply to this node.
|
||||||
|
#
|
||||||
|
# Docs: https://puppet.com/docs/puppet/5.3/nodes_external.html
|
||||||
|
#
|
||||||
|
|
||||||
import sys
|
|
||||||
import yaml
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
rules_path = os.environ.get("COSMOS_RULES_PATH", "/etc/puppet")
|
||||||
|
|
||||||
node_name = sys.argv[1]
|
node_name = sys.argv[1]
|
||||||
|
|
||||||
db_file = os.environ.get("COSMOS_ENC_DB","/etc/puppet/cosmos-db.yaml")
|
rules = dict()
|
||||||
db = dict(classes=dict())
|
for p in rules_path.split(":"):
|
||||||
|
rules_file = os.path.join(p, "cosmos-rules.yaml")
|
||||||
|
if os.path.exists(rules_file):
|
||||||
|
with open(rules_file) as fd:
|
||||||
|
rules.update(yaml.safe_load(fd))
|
||||||
|
|
||||||
if os.path.exists(db_file):
|
found = False
|
||||||
with open(db_file) as fd:
|
classes = dict()
|
||||||
db.update(yaml.load(fd))
|
for reg, cls in rules.items():
|
||||||
|
if re.search(reg, node_name):
|
||||||
|
classes.update(cls)
|
||||||
|
found = True
|
||||||
|
|
||||||
print(yaml.dump(dict(classes=db['classes'].get(node_name,dict()),parameters=dict(roles=db.get('members',[])))))
|
if not found:
|
||||||
|
sys.stderr.write(f"{sys.argv[0]}: {node_name} not found in cosmos-rules.yaml\n")
|
||||||
|
|
||||||
|
print("---\n" + yaml.dump(dict(classes=classes)))
|
||||||
|
|
||||||
|
sys.exit(0)
|
||||||
|
|
|
@ -1,21 +1,27 @@
|
||||||
|
# Hiera version 5 configuration
|
||||||
|
#
|
||||||
---
|
---
|
||||||
:backends:
|
version: 5
|
||||||
- yaml
|
defaults:
|
||||||
- gpg
|
datadir: /etc/hiera/data
|
||||||
|
data_hash: yaml_data
|
||||||
|
|
||||||
:logger: console
|
hierarchy:
|
||||||
|
- name: "Per-node data"
|
||||||
|
path: "local.yaml"
|
||||||
|
|
||||||
:hierarchy:
|
- name: "Per-group data"
|
||||||
- "%{env}/%{location}/%{calling_module}"
|
path: "group.yaml"
|
||||||
- "%{env}/%{calling_module}"
|
|
||||||
- local
|
|
||||||
- secrets.yaml
|
|
||||||
- common
|
|
||||||
|
|
||||||
|
- name: "Per-host secrets"
|
||||||
|
path: "local.eyaml"
|
||||||
|
lookup_key: eyaml_lookup_key
|
||||||
|
options:
|
||||||
|
pkcs7_private_key: /etc/hiera/eyaml/private_key.pkcs7.pem
|
||||||
|
pkcs7_public_key: /etc/hiera/eyaml/public_certkey.pkcs7.pem
|
||||||
|
|
||||||
:yaml:
|
- name: "Overrides per distribution"
|
||||||
:datadir: /etc/hiera/data
|
path: "dist_%{::lsbdistcodename}_override.yaml"
|
||||||
|
|
||||||
:gpg:
|
- name: "Data common to whole environment"
|
||||||
:datadir: /etc/hiera/data
|
path: "common.yaml"
|
||||||
:key_dir: /etc/hiera/gpg
|
|
|
@ -16,29 +16,29 @@ lock() {
|
||||||
eval "exec $fd>$lock_file"
|
eval "exec $fd>$lock_file"
|
||||||
|
|
||||||
# acquier the lock
|
# acquier the lock
|
||||||
flock -n $fd \
|
flock -n "$fd" \
|
||||||
&& return 0 \
|
&& return 0 \
|
||||||
|| return 1
|
|| return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
eexit() {
|
eexit() {
|
||||||
local error_str="$@"
|
local error_str="$*"
|
||||||
|
|
||||||
echo $error_str
|
echo "$error_str"
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
main () {
|
main () {
|
||||||
lock $PROGNAME || eexit "Only one instance of $PROGNAME can run at one time."
|
lock "$PROGNAME" || eexit "Only one instance of $PROGNAME can run at one time."
|
||||||
cosmos $* update
|
cosmos "$@" update
|
||||||
cosmos $* apply
|
cosmos "$@" apply
|
||||||
|
|
||||||
touch /var/run/last-cosmos-ok.stamp
|
touch /var/run/last-cosmos-ok.stamp
|
||||||
|
|
||||||
find /var/lib/puppet/reports/ -type f -mtime +10 | xargs rm -f
|
find /var/lib/puppet/reports/ -type f -mtime +10 -print0 | xargs -0 rm -f
|
||||||
}
|
}
|
||||||
|
|
||||||
main $*
|
main "$@"
|
||||||
|
|
||||||
if [ -f /cosmos-reboot ]; then
|
if [ -f /cosmos-reboot ]; then
|
||||||
rm -f /cosmos-reboot
|
rm -f /cosmos-reboot
|
||||||
|
|
12
global/overlay/usr/local/libexec/cosmos-cron-wrapper
Executable file
12
global/overlay/usr/local/libexec/cosmos-cron-wrapper
Executable file
|
@ -0,0 +1,12 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
test -f /etc/no-automatic-cosmos && exit 0
|
||||||
|
|
||||||
|
RUN_COSMOS='/usr/local/bin/run-cosmos'
|
||||||
|
SCRIPTHERDER_CMD=''
|
||||||
|
|
||||||
|
if [ -x /usr/local/bin/scriptherder ]; then
|
||||||
|
SCRIPTHERDER_CMD='/usr/local/bin/scriptherder --mode wrap --syslog --name cosmos --'
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec ${SCRIPTHERDER_CMD} ${RUN_COSMOS} "$@"
|
|
@ -42,7 +42,10 @@ if [ -f $CONFIG -o $LOCALCONFIG ]; then
|
||||||
if [ "$src" != "$(git config remote.origin.url)" ]; then
|
if [ "$src" != "$(git config remote.origin.url)" ]; then
|
||||||
git config remote.origin.url $src
|
git config remote.origin.url $src
|
||||||
fi
|
fi
|
||||||
git pull -q
|
# Support master branch being renamed to main
|
||||||
|
git branch --all | grep -q '^[[:space:]]*remotes/origin/main$' && git checkout main
|
||||||
|
# Update repo and clean out any local inconsistencies
|
||||||
|
git pull -q || (git fetch && git reset --hard)
|
||||||
else
|
else
|
||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -1,13 +1,15 @@
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
if [ "x$COSMOS_VERBOSE" = "xy" ]; then
|
if [ "x$COSMOS_VERBOSE" = "xy" ]; then
|
||||||
args="--verbose --show_diff"
|
args="--verbose --show_diff"
|
||||||
else
|
else
|
||||||
args="--logdest=syslog"
|
args="--logdest=syslog"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -f /usr/bin/puppet -a -d /etc/puppet/manifests ]; then
|
if [ -f /usr/bin/puppet ] && [ -d /etc/puppet/manifests ]; then
|
||||||
for m in `find /etc/puppet/manifests -name \*.pp`; do
|
find /etc/puppet/manifests -name \*.pp | while read -r m; do
|
||||||
test "x$COSMOS_VERBOSE" = "xy" && echo "$0: Applying Puppet manifest $m"
|
test "x$COSMOS_VERBOSE" = "xy" && echo "$0: Applying Puppet manifest $m"
|
||||||
puppet apply $args $m
|
puppet apply $args $m
|
||||||
done
|
done
|
||||||
|
|
|
@ -15,5 +15,9 @@ if ! test -d "$MODEL_OVERLAY"; then
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -d "$MODEL_OVERLAY/root" ]; then
|
if [ -d "$MODEL_OVERLAY/root" ]; then
|
||||||
chmod -v 0700 "$MODEL_OVERLAY"/root
|
args=""
|
||||||
|
if [ "x$COSMOS_VERBOSE" = "xy" ]; then
|
||||||
|
args="-v"
|
||||||
|
fi
|
||||||
|
chmod ${args} 0700 "$MODEL_OVERLAY"/root
|
||||||
fi
|
fi
|
||||||
|
|
36
global/pre-tasks.d/040hiera-eyaml
Executable file
36
global/pre-tasks.d/040hiera-eyaml
Executable file
|
@ -0,0 +1,36 @@
|
||||||
|
#!/bin/sh
|
||||||
|
#
|
||||||
|
# Set up eyaml for Hiera
|
||||||
|
#
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
EYAMLDIR=/etc/hiera/eyaml
|
||||||
|
|
||||||
|
vendor=$(lsb_release -is)
|
||||||
|
version=$(lsb_release -rs)
|
||||||
|
# eyaml is only used on Ubuntu 20.04 and newer, and Debian 11 and newer (earlier OSes use hiera-gpg instead)
|
||||||
|
test "${vendor}" = "Ubuntu" && dpkg --compare-versions "${version}" "lt" "18.04" && exit 0
|
||||||
|
test "${vendor}" = "Debian" && dpkg --compare-versions "${version}" "lt" "10" && exit 0
|
||||||
|
|
||||||
|
stamp="$COSMOS_BASE/stamps/hiera-eyaml-v01.stamp"
|
||||||
|
|
||||||
|
test -f "$stamp" && exit 0
|
||||||
|
|
||||||
|
if [ ! -f /usr/bin/eyaml ] || [ ! -d /usr/share/doc/yaml-mode ]; then
|
||||||
|
apt-get update
|
||||||
|
apt-get -y install hiera-eyaml yaml-mode
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ! -f ${EYAMLDIR}/public_certkey.pkcs7.pem ] || [ ! -f ${EYAMLDIR}/private_key.pkcs7.pem ]; then
|
||||||
|
# hiera-eyaml wants a certificate and public key, not just a public key oddly enough
|
||||||
|
echo "$0: Generating eyaml key in ${EYAMLDIR} - this might take a while..."
|
||||||
|
mkdir -p /etc/hiera/eyaml
|
||||||
|
openssl req -x509 -newkey rsa:4096 -keyout ${EYAMLDIR}/private_key.pkcs7.pem \
|
||||||
|
-out ${EYAMLDIR}/public_certkey.pkcs7.pem -days 3653 -nodes -sha256 \
|
||||||
|
-subj "/C=SE/O=SUNET/OU=EYAML/CN=$(hostname)"
|
||||||
|
rm -f ${EYAMLDIR}/public_key.pkcs7.pem # cleanup
|
||||||
|
fi
|
||||||
|
|
||||||
|
mkdir -p "$(dirname "${stamp}")"
|
||||||
|
touch "$stamp"
|
|
@ -9,12 +9,21 @@ set -e
|
||||||
GNUPGHOME=/etc/hiera/gpg
|
GNUPGHOME=/etc/hiera/gpg
|
||||||
export GNUPGHOME
|
export GNUPGHOME
|
||||||
|
|
||||||
|
vendor=$(lsb_release -is)
|
||||||
|
version=$(lsb_release -rs)
|
||||||
|
# If the OS is Ubuntu 18.04 or newer, or Debian 10 or newer, we don't need to do anything (those use eyaml instead)
|
||||||
|
test "${vendor}" = "Ubuntu" && dpkg --compare-versions "${version}" "ge" "18.04" && exit 0
|
||||||
|
test "${vendor}" = "Debian" && dpkg --compare-versions "${version}" "ge" "10" && exit 0
|
||||||
|
|
||||||
|
stamp="$COSMOS_BASE/stamps/hiera-gpg-v01.stamp"
|
||||||
|
|
||||||
|
test -f "$stamp" && exit 0
|
||||||
|
|
||||||
if [ ! -f /usr/lib/ruby/vendor_ruby/gpgme.rb ]; then
|
if [ ! -f /usr/lib/ruby/vendor_ruby/gpgme.rb ]; then
|
||||||
apt-get update
|
apt-get update
|
||||||
apt-get -y install ruby-gpgme
|
apt-get -y install ruby-gpgme
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
||||||
if [ ! -s $GNUPGHOME/secring.gpg ]; then
|
if [ ! -s $GNUPGHOME/secring.gpg ]; then
|
||||||
|
|
||||||
if [ "x$1" != "x--force" ]; then
|
if [ "x$1" != "x--force" ]; then
|
||||||
|
@ -35,18 +44,21 @@ if [ ! -s $GNUPGHOME/secring.gpg ]; then
|
||||||
chmod 700 $GNUPGHOME
|
chmod 700 $GNUPGHOME
|
||||||
|
|
||||||
TMPFILE=$(mktemp /tmp/hiera-gpg.XXXXXX)
|
TMPFILE=$(mktemp /tmp/hiera-gpg.XXXXXX)
|
||||||
cat > $TMPFILE <<EOF
|
cat > "$TMPFILE" <<EOF
|
||||||
%echo Generating a default key
|
%echo Generating a default key
|
||||||
Key-Type: default
|
Key-Type: default
|
||||||
Subkey-Type: default
|
Subkey-Type: default
|
||||||
Name-Real: Cosmos Puppet
|
Name-Real: Cosmos Puppet
|
||||||
Name-Comment: Hiera GPG key
|
Name-Comment: Hiera GPG key
|
||||||
Name-Email: root@`hostname --fqdn`
|
Name-Email: root@$(hostname --fqdn)
|
||||||
Expire-Date: 0
|
Expire-Date: 0
|
||||||
# Do a commit here, so that we can later print "done" :-)
|
# Do a commit here, so that we can later print "done" :-)
|
||||||
%commit
|
%commit
|
||||||
%echo done
|
%echo done
|
||||||
EOF
|
EOF
|
||||||
gpg2 --batch --gen-key $TMPFILE
|
gpg2 --batch --gen-key "$TMPFILE"
|
||||||
rm -f $TMPFILE
|
rm -f "$TMPFILE"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
mkdir -p "$(dirname "${stamp}")"
|
||||||
|
touch "$stamp"
|
||||||
|
|
|
@ -17,15 +17,14 @@ PUPPET_ARGS=${PUPPET_ARGS-"--verbose"}
|
||||||
|
|
||||||
# Check if cosmos or puppet is already running on host
|
# Check if cosmos or puppet is already running on host
|
||||||
echo "Checking if puppet or cosmos is already running..."
|
echo "Checking if puppet or cosmos is already running..."
|
||||||
ssh root@$HOSTNAME ps aux | egrep -v "grep|edit-secrets|gpg-agent" | egrep -q "cosmos|puppet"
|
ssh root@"$HOSTNAME" ps aux | grep -Ev "grep|edit-secrets|gpg-agent" | grep -Eq "cosmos|puppet"
|
||||||
|
|
||||||
if [ $? -eq 1 ]
|
if [ $? -eq 1 ]
|
||||||
then
|
then
|
||||||
echo "Copying files to host..."
|
echo "Copying files to host..."
|
||||||
rsync -av --exclude '*~' global/overlay/etc/puppet/cosmos-rules.yaml root@$HOSTNAME:/etc/puppet/cosmos-rules.yaml
|
rsync -av --exclude '*~' global/overlay/etc/puppet/cosmos-rules.yaml root@"$HOSTNAME":/etc/puppet/cosmos-rules.yaml
|
||||||
rsync -av --exclude '*~' global/overlay/etc/puppet/manifests/cosmos-site.pp root@$HOSTNAME:/etc/puppet/manifests/cosmos-site.pp
|
rsync -av --exclude '*~' global/overlay/etc/puppet/manifests/cosmos-site.pp root@"$HOSTNAME":/etc/puppet/manifests/cosmos-site.pp
|
||||||
rsync -av --exclude '*~' global/overlay/etc/puppet/cosmos-db.yaml root@$HOSTNAME:/etc/puppet/cosmos-db.yaml
|
rsync -av --exclude '*~' global/overlay/etc/hiera/data/common.yaml root@"$HOSTNAME":/etc/hiera/data/common.yaml
|
||||||
rsync -av --exclude '*~' global/overlay/etc/hiera/data/common.yaml root@$HOSTNAME:/etc/hiera/data/common.yaml
|
|
||||||
|
|
||||||
# Test if the user has symlinked puppet-sunet correctly
|
# Test if the user has symlinked puppet-sunet correctly
|
||||||
# by first checking if the link exits and then whether
|
# by first checking if the link exits and then whether
|
||||||
|
@ -37,7 +36,7 @@ then
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "Running puppet apply..."
|
echo "Running puppet apply..."
|
||||||
ssh root@$HOSTNAME /usr/bin/puppet apply $PUPPET_ARGS /etc/puppet/manifests/cosmos-site.pp
|
ssh root@"$HOSTNAME" /usr/bin/puppet apply $PUPPET_ARGS /etc/puppet/manifests/cosmos-site.pp
|
||||||
else
|
else
|
||||||
echo "Cosmos or puppet already running. Exiting."
|
echo "Cosmos or puppet already running. Exiting."
|
||||||
exit 1
|
exit 1
|
||||||
|
|
|
@ -75,6 +75,29 @@ if grep -q '^# en_US.UTF-8 UTF-8$' $locale_gen_file; then
|
||||||
locale-gen
|
locale-gen
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if [ "$(lsb_release -is)" == "Debian" ]; then
|
||||||
|
interfaces_file='/etc/network/interfaces.d/50-cloud-init'
|
||||||
|
|
||||||
|
if [ -f "${interfaces_file}" ]; then
|
||||||
|
interface_string='iface ens3 inet6 dhcp'
|
||||||
|
accept_ra_string=' accept_ra 2'
|
||||||
|
|
||||||
|
if ! grep -qPz "${interface_string}\n${accept_ra_string}" ${interfaces_file} ; then
|
||||||
|
|
||||||
|
# By default net.ipv6.conf.ens3.accept_ra is set to 1 which
|
||||||
|
# makes the kernel throw a way the IPv6 route when
|
||||||
|
# net.ipv6.conf.all.forwarding is set to 1 by our service for
|
||||||
|
# Docker.
|
||||||
|
echo "Configuring interfaces to always accept Router Advertisements even with IP Forwarding enabled"
|
||||||
|
sed -i -r "s/(${interface_string})/\1\n${accept_ra_string}/" ${interfaces_file}
|
||||||
|
else
|
||||||
|
echo "WARN: Configuration already applied or no match for \"${interface_string}\" in ${interfaces_file}"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "WARN: ${interfaces_file} not found. File renamed in this image?"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
DEBIAN_FRONTEND="noninteractive" apt-get -y update
|
DEBIAN_FRONTEND="noninteractive" apt-get -y update
|
||||||
DEBIAN_FRONTEND="noninteractive" apt-get -o Dpkg::Options::="--force-confnew" --fix-broken --assume-yes dist-upgrade
|
DEBIAN_FRONTEND="noninteractive" apt-get -o Dpkg::Options::="--force-confnew" --fix-broken --assume-yes dist-upgrade
|
||||||
reboot
|
reboot
|
||||||
|
|
Loading…
Reference in a new issue