summaryrefslogtreecommitdiffstats
path: root/filter_plugins
diff options
context:
space:
mode:
Diffstat (limited to 'filter_plugins')
-rw-r--r--filter_plugins/oo_filters.py501
-rw-r--r--filter_plugins/oo_zabbix_filters.py51
-rw-r--r--filter_plugins/openshift_master.py536
3 files changed, 1035 insertions, 53 deletions
diff --git a/filter_plugins/oo_filters.py b/filter_plugins/oo_filters.py
index a57b0f895..3dc3f2fe9 100644
--- a/filter_plugins/oo_filters.py
+++ b/filter_plugins/oo_filters.py
@@ -1,37 +1,43 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# vim: expandtab:tabstop=4:shiftwidth=4
-'''
+"""
Custom filters for use in openshift-ansible
-'''
+"""
from ansible import errors
from operator import itemgetter
+import OpenSSL.crypto
+import os
import pdb
import re
import json
+import yaml
+from ansible.utils.unicode import to_unicode
-
+# Disabling too-many-public-methods, since filter methods are necessarily
+# public
+# pylint: disable=too-many-public-methods
class FilterModule(object):
- ''' Custom ansible filters '''
+ """ Custom ansible filters """
@staticmethod
def oo_pdb(arg):
- ''' This pops you into a pdb instance where arg is the data passed in
+ """ This pops you into a pdb instance where arg is the data passed in
from the filter.
Ex: "{{ hostvars | oo_pdb }}"
- '''
+ """
pdb.set_trace()
return arg
@staticmethod
def get_attr(data, attribute=None):
- ''' This looks up dictionary attributes of the form a.b.c and returns
+ """ This looks up dictionary attributes of the form a.b.c and returns
the value.
Ex: data = {'a': {'b': {'c': 5}}}
attribute = "a.b.c"
returns 5
- '''
+ """
if not attribute:
raise errors.AnsibleFilterError("|failed expects attribute to be set")
@@ -43,16 +49,16 @@ class FilterModule(object):
@staticmethod
def oo_flatten(data):
- ''' This filter plugin will flatten a list of lists
- '''
- if not issubclass(type(data), list):
+ """ This filter plugin will flatten a list of lists
+ """
+ if not isinstance(data, list):
raise errors.AnsibleFilterError("|failed expects to flatten a List")
return [item for sublist in data for item in sublist]
@staticmethod
def oo_collect(data, attribute=None, filters=None):
- ''' This takes a list of dict and collects all attributes specified into a
+ """ This takes a list of dict and collects all attributes specified into a
list. If filter is specified then we will include all items that
match _ALL_ of filters. If a dict entry is missing the key in a
filter it will be excluded from the match.
@@ -64,15 +70,15 @@ class FilterModule(object):
attribute = 'a'
filters = {'z': 'z'}
returns [1, 2, 3]
- '''
- if not issubclass(type(data), list):
+ """
+ if not isinstance(data, list):
raise errors.AnsibleFilterError("|failed expects to filter on a List")
if not attribute:
raise errors.AnsibleFilterError("|failed expects attribute to be set")
if filters is not None:
- if not issubclass(type(filters), dict):
+ if not isinstance(filters, dict):
raise errors.AnsibleFilterError("|failed expects filter to be a"
" dict")
retval = [FilterModule.get_attr(d, attribute) for d in data if (
@@ -84,16 +90,16 @@ class FilterModule(object):
@staticmethod
def oo_select_keys_from_list(data, keys):
- ''' This returns a list, which contains the value portions for the keys
+ """ This returns a list, which contains the value portions for the keys
Ex: data = { 'a':1, 'b':2, 'c':3 }
keys = ['a', 'c']
returns [1, 3]
- '''
+ """
- if not issubclass(type(data), list):
+ if not isinstance(data, list):
raise errors.AnsibleFilterError("|failed expects to filter on a list")
- if not issubclass(type(keys), list):
+ if not isinstance(keys, list):
raise errors.AnsibleFilterError("|failed expects first param is a list")
# Gather up the values for the list of keys passed in
@@ -103,16 +109,16 @@ class FilterModule(object):
@staticmethod
def oo_select_keys(data, keys):
- ''' This returns a list, which contains the value portions for the keys
+ """ This returns a list, which contains the value portions for the keys
Ex: data = { 'a':1, 'b':2, 'c':3 }
keys = ['a', 'c']
returns [1, 3]
- '''
+ """
- if not issubclass(type(data), dict):
+ if not isinstance(data, dict):
raise errors.AnsibleFilterError("|failed expects to filter on a dict")
- if not issubclass(type(keys), list):
+ if not isinstance(keys, list):
raise errors.AnsibleFilterError("|failed expects first param is a list")
# Gather up the values for the list of keys passed in
@@ -122,13 +128,13 @@ class FilterModule(object):
@staticmethod
def oo_prepend_strings_in_list(data, prepend):
- ''' This takes a list of strings and prepends a string to each item in the
+ """ This takes a list of strings and prepends a string to each item in the
list
Ex: data = ['cart', 'tree']
prepend = 'apple-'
returns ['apple-cart', 'apple-tree']
- '''
- if not issubclass(type(data), list):
+ """
+ if not isinstance(data, list):
raise errors.AnsibleFilterError("|failed expects first param is a list")
if not all(isinstance(x, basestring) for x in data):
raise errors.AnsibleFilterError("|failed expects first param is a list"
@@ -138,10 +144,10 @@ class FilterModule(object):
@staticmethod
def oo_combine_key_value(data, joiner='='):
- '''Take a list of dict in the form of { 'key': 'value'} and
+ """Take a list of dict in the form of { 'key': 'value'} and
arrange them as a list of strings ['key=value']
- '''
- if not issubclass(type(data), list):
+ """
+ if not isinstance(data, list):
raise errors.AnsibleFilterError("|failed expects first param is a list")
rval = []
@@ -152,20 +158,20 @@ class FilterModule(object):
@staticmethod
def oo_combine_dict(data, in_joiner='=', out_joiner=' '):
- '''Take a dict in the form of { 'key': 'value', 'key': 'value' } and
+ """Take a dict in the form of { 'key': 'value', 'key': 'value' } and
arrange them as a string 'key=value key=value'
- '''
- if not issubclass(type(data), dict):
+ """
+ if not isinstance(data, dict):
raise errors.AnsibleFilterError("|failed expects first param is a dict")
return out_joiner.join([in_joiner.join([k, v]) for k, v in data.items()])
@staticmethod
def oo_ami_selector(data, image_name):
- ''' This takes a list of amis and an image name and attempts to return
+ """ This takes a list of amis and an image name and attempts to return
the latest ami.
- '''
- if not issubclass(type(data), list):
+ """
+ if not isinstance(data, list):
raise errors.AnsibleFilterError("|failed expects first param is a list")
if not data:
@@ -181,7 +187,7 @@ class FilterModule(object):
@staticmethod
def oo_ec2_volume_definition(data, host_type, docker_ephemeral=False):
- ''' This takes a dictionary of volume definitions and returns a valid ec2
+ """ This takes a dictionary of volume definitions and returns a valid ec2
volume definition based on the host_type and the values in the
dictionary.
The dictionary should look similar to this:
@@ -189,7 +195,11 @@ class FilterModule(object):
{ 'root':
{ 'volume_size': 10, 'device_type': 'gp2',
'iops': 500
- }
+ },
+ 'docker':
+ { 'volume_size': 40, 'device_type': 'gp2',
+ 'iops': 500, 'ephemeral': 'true'
+ }
},
'node':
{ 'root':
@@ -202,8 +212,8 @@ class FilterModule(object):
}
}
}
- '''
- if not issubclass(type(data), dict):
+ """
+ if not isinstance(data, dict):
raise errors.AnsibleFilterError("|failed expects first param is a dict")
if host_type not in ['master', 'node', 'etcd']:
raise errors.AnsibleFilterError("|failed expects etcd, master or node"
@@ -214,7 +224,7 @@ class FilterModule(object):
root_vol['delete_on_termination'] = True
if root_vol['device_type'] != 'io1':
root_vol.pop('iops', None)
- if host_type == 'node':
+ if host_type in ['master', 'node'] and 'docker' in data[host_type]:
docker_vol = data[host_type]['docker']
docker_vol['device_name'] = '/dev/xvdb'
docker_vol['delete_on_termination'] = True
@@ -225,7 +235,7 @@ class FilterModule(object):
docker_vol.pop('delete_on_termination', None)
docker_vol['ephemeral'] = 'ephemeral0'
return [root_vol, docker_vol]
- elif host_type == 'etcd':
+ elif host_type == 'etcd' and 'etcd' in data[host_type]:
etcd_vol = data[host_type]['etcd']
etcd_vol['device_name'] = '/dev/xvdb'
etcd_vol['delete_on_termination'] = True
@@ -236,13 +246,28 @@ class FilterModule(object):
@staticmethod
def oo_split(string, separator=','):
- ''' This splits the input string into a list
- '''
+ """ This splits the input string into a list
+ """
return string.split(separator)
@staticmethod
+ def oo_haproxy_backend_masters(hosts):
+ """ This takes an array of dicts and returns an array of dicts
+ to be used as a backend for the haproxy role
+ """
+ servers = []
+ for idx, host_info in enumerate(hosts):
+ server = dict(name="master%s" % idx)
+ server_ip = host_info['openshift']['common']['ip']
+ server_port = host_info['openshift']['master']['api_port']
+ server['address'] = "%s:%s" % (server_ip, server_port)
+ server['opts'] = 'check'
+ servers.append(server)
+ return servers
+
+ @staticmethod
def oo_filter_list(data, filter_attr=None):
- ''' This returns a list, which contains all items where filter_attr
+ """ This returns a list, which contains all items where filter_attr
evaluates to true
Ex: data = [ { a: 1, b: True },
{ a: 3, b: False },
@@ -250,19 +275,81 @@ class FilterModule(object):
filter_attr = 'b'
returns [ { a: 1, b: True },
{ a: 5, b: True } ]
- '''
- if not issubclass(type(data), list):
+ """
+ if not isinstance(data, list):
raise errors.AnsibleFilterError("|failed expects to filter on a list")
- if not issubclass(type(filter_attr), str):
- raise errors.AnsibleFilterError("|failed expects filter_attr is a str")
+ if not isinstance(filter_attr, basestring):
+ raise errors.AnsibleFilterError("|failed expects filter_attr is a str or unicode")
# Gather up the values for the list of keys passed in
- return [x for x in data if x[filter_attr]]
+ return [x for x in data if x.has_key(filter_attr) and x[filter_attr]]
+
+ @staticmethod
+ def oo_nodes_with_label(nodes, label, value=None):
+ """ Filters a list of nodes by label and value (if provided)
+
+ It handles labels that are in the following variables by priority:
+ openshift_node_labels, cli_openshift_node_labels, openshift['node']['labels']
+
+ Examples:
+ data = ['a': {'openshift_node_labels': {'color': 'blue', 'size': 'M'}},
+ 'b': {'openshift_node_labels': {'color': 'green', 'size': 'L'}},
+ 'c': {'openshift_node_labels': {'size': 'S'}}]
+ label = 'color'
+ returns = ['a': {'openshift_node_labels': {'color': 'blue', 'size': 'M'}},
+ 'b': {'openshift_node_labels': {'color': 'green', 'size': 'L'}}]
+
+ data = ['a': {'openshift_node_labels': {'color': 'blue', 'size': 'M'}},
+ 'b': {'openshift_node_labels': {'color': 'green', 'size': 'L'}},
+ 'c': {'openshift_node_labels': {'size': 'S'}}]
+ label = 'color'
+ value = 'green'
+ returns = ['b': {'labels': {'color': 'green', 'size': 'L'}}]
+
+ Args:
+ nodes (list[dict]): list of node to node variables
+ label (str): label to filter `nodes` by
+ value (Optional[str]): value of `label` to filter by Defaults
+ to None.
+
+ Returns:
+ list[dict]: nodes filtered by label and value (if provided)
+ """
+ if not isinstance(nodes, list):
+ raise errors.AnsibleFilterError("failed expects to filter on a list")
+ if not isinstance(label, basestring):
+ raise errors.AnsibleFilterError("failed expects label to be a string")
+ if value is not None and not isinstance(value, basestring):
+ raise errors.AnsibleFilterError("failed expects value to be a string")
+
+ def label_filter(node):
+ """ filter function for testing if node should be returned """
+ if not isinstance(node, dict):
+ raise errors.AnsibleFilterError("failed expects to filter on a list of dicts")
+ if 'openshift_node_labels' in node:
+ labels = node['openshift_node_labels']
+ elif 'cli_openshift_node_labels' in node:
+ labels = node['cli_openshift_node_labels']
+ elif 'openshift' in node and 'node' in node['openshift'] and 'labels' in node['openshift']['node']:
+ labels = node['openshift']['node']['labels']
+ else:
+ return False
+
+ if isinstance(labels, basestring):
+ labels = yaml.safe_load(labels)
+ if not isinstance(labels, dict):
+ raise errors.AnsibleFilterError(
+ "failed expected node labels to be a dict or serializable to a dict"
+ )
+ return label in labels and (value is None or labels[label] == value)
+
+ return [n for n in nodes if label_filter(n)]
+
@staticmethod
def oo_parse_heat_stack_outputs(data):
- ''' Formats the HEAT stack output into a usable form
+ """ Formats the HEAT stack output into a usable form
The goal is to transform something like this:
@@ -301,7 +388,7 @@ class FilterModule(object):
"value_B2"
]
}
- '''
+ """
# Extract the “outputs” JSON snippet from the pretty-printed array
in_outputs = False
@@ -327,8 +414,304 @@ class FilterModule(object):
return revamped_outputs
+ @staticmethod
+ # pylint: disable=too-many-branches
+ def oo_parse_named_certificates(certificates, named_certs_dir, internal_hostnames):
+ """ Parses names from list of certificate hashes.
+
+ Ex: certificates = [{ "certfile": "/root/custom1.crt",
+ "keyfile": "/root/custom1.key" },
+ { "certfile": "custom2.crt",
+ "keyfile": "custom2.key" }]
+
+ returns [{ "certfile": "/etc/origin/master/named_certificates/custom1.crt",
+ "keyfile": "/etc/origin/master/named_certificates/custom1.key",
+ "names": [ "public-master-host.com",
+ "other-master-host.com" ] },
+ { "certfile": "/etc/origin/master/named_certificates/custom2.crt",
+ "keyfile": "/etc/origin/master/named_certificates/custom2.key",
+ "names": [ "some-hostname.com" ] }]
+ """
+ if not isinstance(named_certs_dir, basestring):
+ raise errors.AnsibleFilterError("|failed expects named_certs_dir is str or unicode")
+
+ if not isinstance(internal_hostnames, list):
+ raise errors.AnsibleFilterError("|failed expects internal_hostnames is list")
+
+ for certificate in certificates:
+ if 'names' in certificate.keys():
+ continue
+ else:
+ certificate['names'] = []
+
+ if not os.path.isfile(certificate['certfile']) or not os.path.isfile(certificate['keyfile']):
+ raise errors.AnsibleFilterError("|certificate and/or key does not exist '%s', '%s'" %
+ (certificate['certfile'], certificate['keyfile']))
+
+ try:
+ st_cert = open(certificate['certfile'], 'rt').read()
+ cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, st_cert)
+ certificate['names'].append(str(cert.get_subject().commonName.decode()))
+ for i in range(cert.get_extension_count()):
+ if cert.get_extension(i).get_short_name() == 'subjectAltName':
+ for name in str(cert.get_extension(i)).replace('DNS:', '').split(', '):
+ certificate['names'].append(name)
+ except:
+ raise errors.AnsibleFilterError(("|failed to parse certificate '%s', " % certificate['certfile'] +
+ "please specify certificate names in host inventory"))
+
+ certificate['names'] = [name for name in certificate['names'] if name not in internal_hostnames]
+ certificate['names'] = list(set(certificate['names']))
+ if not certificate['names']:
+ raise errors.AnsibleFilterError(("|failed to parse certificate '%s' or " % certificate['certfile'] +
+ "detected a collision with internal hostname, please specify " +
+ "certificate names in host inventory"))
+
+ for certificate in certificates:
+ # Update paths for configuration
+ certificate['certfile'] = os.path.join(named_certs_dir, os.path.basename(certificate['certfile']))
+ certificate['keyfile'] = os.path.join(named_certs_dir, os.path.basename(certificate['keyfile']))
+ return certificates
+
+ @staticmethod
+ def oo_pretty_print_cluster(data):
+ """ Read a subset of hostvars and build a summary of the cluster
+ in the following layout:
+
+"c_id": {
+ "master": {
+ "default": [
+ { "name": "c_id-master-12345", "public IP": "172.16.0.1", "private IP": "192.168.0.1" }
+ ]
+ "node": {
+ "infra": [
+ { "name": "c_id-node-infra-23456", "public IP": "172.16.0.2", "private IP": "192.168.0.2" }
+ ],
+ "compute": [
+ { "name": "c_id-node-compute-23456", "public IP": "172.16.0.3", "private IP": "192.168.0.3" },
+ ...
+ ]
+ }
+ """
+
+ def _get_tag_value(tags, key):
+ """ Extract values of a map implemented as a set.
+ Ex: tags = { 'tag_foo_value1', 'tag_bar_value2', 'tag_baz_value3' }
+ key = 'bar'
+ returns 'value2'
+ """
+ for tag in tags:
+ if tag[:len(key)+4] == 'tag_' + key:
+ return tag[len(key)+5:]
+ raise KeyError(key)
+
+ def _add_host(clusters,
+ clusterid,
+ host_type,
+ sub_host_type,
+ host):
+ """ Add a new host in the clusters data structure """
+ if clusterid not in clusters:
+ clusters[clusterid] = {}
+ if host_type not in clusters[clusterid]:
+ clusters[clusterid][host_type] = {}
+ if sub_host_type not in clusters[clusterid][host_type]:
+ clusters[clusterid][host_type][sub_host_type] = []
+ clusters[clusterid][host_type][sub_host_type].append(host)
+
+ clusters = {}
+ for host in data:
+ try:
+ _add_host(clusters=clusters,
+ clusterid=_get_tag_value(host['group_names'], 'clusterid'),
+ host_type=_get_tag_value(host['group_names'], 'host-type'),
+ sub_host_type=_get_tag_value(host['group_names'], 'sub-host-type'),
+ host={'name': host['inventory_hostname'],
+ 'public IP': host['ansible_ssh_host'],
+ 'private IP': host['ansible_default_ipv4']['address']})
+ except KeyError:
+ pass
+ return clusters
+
+ @staticmethod
+ def oo_generate_secret(num_bytes):
+ """ generate a session secret """
+
+ if not isinstance(num_bytes, int):
+ raise errors.AnsibleFilterError("|failed expects num_bytes is int")
+
+ secret = os.urandom(num_bytes)
+ return secret.encode('base-64').strip()
+
+ @staticmethod
+ def to_padded_yaml(data, level=0, indent=2, **kw):
+ """ returns a yaml snippet padded to match the indent level you specify """
+ if data in [None, ""]:
+ return ""
+
+ try:
+ transformed = yaml.safe_dump(data, indent=indent, allow_unicode=True, default_flow_style=False, **kw)
+ padded = "\n".join([" " * level * indent + line for line in transformed.splitlines()])
+ return to_unicode("\n{0}".format(padded))
+ except Exception as my_e:
+ raise errors.AnsibleFilterError('Failed to convert: %s', my_e)
+
+ @staticmethod
+ def oo_openshift_env(hostvars):
+ ''' Return facts which begin with "openshift_"
+ Ex: hostvars = {'openshift_fact': 42,
+ 'theyre_taking_the_hobbits_to': 'isengard'}
+ returns = {'openshift_fact': 42}
+ '''
+ if not issubclass(type(hostvars), dict):
+ raise errors.AnsibleFilterError("|failed expects hostvars is a dict")
+
+ facts = {}
+ regex = re.compile('^openshift_.*')
+ for key in hostvars:
+ if regex.match(key):
+ facts[key] = hostvars[key]
+ return facts
+
+ @staticmethod
+ # pylint: disable=too-many-branches
+ def oo_persistent_volumes(hostvars, groups, persistent_volumes=None):
+ """ Generate list of persistent volumes based on oo_openshift_env
+ storage options set in host variables.
+ """
+ if not issubclass(type(hostvars), dict):
+ raise errors.AnsibleFilterError("|failed expects hostvars is a dict")
+ if not issubclass(type(groups), dict):
+ raise errors.AnsibleFilterError("|failed expects groups is a dict")
+ if persistent_volumes != None and not issubclass(type(persistent_volumes), list):
+ raise errors.AnsibleFilterError("|failed expects persistent_volumes is a list")
+
+ if persistent_volumes == None:
+ persistent_volumes = []
+ for component in hostvars['openshift']['hosted']:
+ kind = hostvars['openshift']['hosted'][component]['storage']['kind']
+ create_pv = hostvars['openshift']['hosted'][component]['storage']['create_pv']
+ if kind != None and create_pv:
+ if kind == 'nfs':
+ host = hostvars['openshift']['hosted'][component]['storage']['host']
+ if host == None:
+ if len(groups['oo_nfs_to_config']) > 0:
+ host = groups['oo_nfs_to_config'][0]
+ else:
+ raise errors.AnsibleFilterError("|failed no storage host detected")
+ directory = hostvars['openshift']['hosted'][component]['storage']['nfs']['directory']
+ volume = hostvars['openshift']['hosted'][component]['storage']['volume']['name']
+ path = directory + '/' + volume
+ size = hostvars['openshift']['hosted'][component]['storage']['volume']['size']
+ access_modes = hostvars['openshift']['hosted'][component]['storage']['access_modes']
+ persistent_volume = dict(
+ name="{0}-volume".format(volume),
+ capacity=size,
+ access_modes=access_modes,
+ storage=dict(
+ nfs=dict(
+ server=host,
+ path=path)))
+ persistent_volumes.append(persistent_volume)
+ else:
+ msg = "|failed invalid storage kind '{0}' for component '{1}'".format(
+ kind,
+ component)
+ raise errors.AnsibleFilterError(msg)
+ return persistent_volumes
+
+ @staticmethod
+ def oo_persistent_volume_claims(hostvars, persistent_volume_claims=None):
+ """ Generate list of persistent volume claims based on oo_openshift_env
+ storage options set in host variables.
+ """
+ if not issubclass(type(hostvars), dict):
+ raise errors.AnsibleFilterError("|failed expects hostvars is a dict")
+ if persistent_volume_claims != None and not issubclass(type(persistent_volume_claims), list):
+ raise errors.AnsibleFilterError("|failed expects persistent_volume_claims is a list")
+
+ if persistent_volume_claims == None:
+ persistent_volume_claims = []
+ for component in hostvars['openshift']['hosted']:
+ kind = hostvars['openshift']['hosted'][component]['storage']['kind']
+ create_pv = hostvars['openshift']['hosted'][component]['storage']['create_pv']
+ if kind != None and create_pv:
+ volume = hostvars['openshift']['hosted'][component]['storage']['volume']['name']
+ size = hostvars['openshift']['hosted'][component]['storage']['volume']['size']
+ access_modes = hostvars['openshift']['hosted'][component]['storage']['access_modes']
+ persistent_volume_claim = dict(
+ name="{0}-claim".format(volume),
+ capacity=size,
+ access_modes=access_modes)
+ persistent_volume_claims.append(persistent_volume_claim)
+ return persistent_volume_claims
+
+ @staticmethod
+ def oo_31_rpm_rename_conversion(rpms, openshift_version=None):
+ """ Filters a list of 3.0 rpms and return the corresponding 3.1 rpms
+ names with proper version (if provided)
+
+ If 3.1 rpms are passed in they will only be augmented with the
+ correct version. This is important for hosts that are running both
+ Masters and Nodes.
+ """
+ if not isinstance(rpms, list):
+ raise errors.AnsibleFilterError("failed expects to filter on a list")
+ if openshift_version is not None and not isinstance(openshift_version, basestring):
+ raise errors.AnsibleFilterError("failed expects openshift_version to be a string")
+
+ rpms_31 = []
+ for rpm in rpms:
+ if not 'atomic' in rpm:
+ rpm = rpm.replace("openshift", "atomic-openshift")
+ if openshift_version:
+ rpm = rpm + openshift_version
+ rpms_31.append(rpm)
+
+ return rpms_31
+
+ @staticmethod
+ def oo_pods_match_component(pods, deployment_type, component):
+ """ Filters a list of Pods and returns the ones matching the deployment_type and component
+ """
+ if not isinstance(pods, list):
+ raise errors.AnsibleFilterError("failed expects to filter on a list")
+ if not isinstance(deployment_type, basestring):
+ raise errors.AnsibleFilterError("failed expects deployment_type to be a string")
+ if not isinstance(component, basestring):
+ raise errors.AnsibleFilterError("failed expects component to be a string")
+
+ image_prefix = 'openshift/origin-'
+ if deployment_type in ['enterprise', 'online', 'openshift-enterprise']:
+ image_prefix = 'openshift3/ose-'
+ elif deployment_type == 'atomic-enterprise':
+ image_prefix = 'aep3_beta/aep-'
+
+ matching_pods = []
+ image_regex = image_prefix + component + r'.*'
+ for pod in pods:
+ for container in pod['spec']['containers']:
+ if re.search(image_regex, container['image']):
+ matching_pods.append(pod)
+ break # stop here, don't add a pod more than once
+
+ return matching_pods
+
+ @staticmethod
+ def oo_get_hosts_from_hostvars(hostvars, hosts):
+ """ Return a list of hosts from hostvars """
+ retval = []
+ for host in hosts:
+ try:
+ retval.append(hostvars[host])
+ except errors.AnsibleError as _:
+ # host does not exist
+ pass
+
+ return retval
+
def filters(self):
- ''' returns a mapping of filters to methods '''
+ """ returns a mapping of filters to methods """
return {
"oo_select_keys": self.oo_select_keys,
"oo_select_keys_from_list": self.oo_select_keys_from_list,
@@ -342,5 +725,17 @@ class FilterModule(object):
"oo_combine_dict": self.oo_combine_dict,
"oo_split": self.oo_split,
"oo_filter_list": self.oo_filter_list,
- "oo_parse_heat_stack_outputs": self.oo_parse_heat_stack_outputs
+ "oo_parse_heat_stack_outputs": self.oo_parse_heat_stack_outputs,
+ "oo_parse_named_certificates": self.oo_parse_named_certificates,
+ "oo_haproxy_backend_masters": self.oo_haproxy_backend_masters,
+ "oo_pretty_print_cluster": self.oo_pretty_print_cluster,
+ "oo_generate_secret": self.oo_generate_secret,
+ "to_padded_yaml": self.to_padded_yaml,
+ "oo_nodes_with_label": self.oo_nodes_with_label,
+ "oo_openshift_env": self.oo_openshift_env,
+ "oo_persistent_volumes": self.oo_persistent_volumes,
+ "oo_persistent_volume_claims": self.oo_persistent_volume_claims,
+ "oo_31_rpm_rename_conversion": self.oo_31_rpm_rename_conversion,
+ "oo_pods_match_component": self.oo_pods_match_component,
+ "oo_get_hosts_from_hostvars": self.oo_get_hosts_from_hostvars,
}
diff --git a/filter_plugins/oo_zabbix_filters.py b/filter_plugins/oo_zabbix_filters.py
index c44b874e8..fcfe43777 100644
--- a/filter_plugins/oo_zabbix_filters.py
+++ b/filter_plugins/oo_zabbix_filters.py
@@ -95,6 +95,54 @@ class FilterModule(object):
return data
+ @staticmethod
+ def itservice_results_builder(data, clusters, keys):
+ '''Take a list of dict results,
+ loop through each results and create a hash
+ of:
+ [{clusterid: cluster1, key: 111 }]
+ '''
+ r_list = []
+ for cluster in clusters:
+ for results in data:
+ if cluster == results['item'][0]:
+ results = results['results']
+ if results and len(results) > 0 and all([results[0].has_key(_key) for _key in keys]):
+ tmp = {}
+ tmp['clusterid'] = cluster
+ for key in keys:
+ tmp[key] = results[0][key]
+ r_list.append(tmp)
+
+ return r_list
+
+ @staticmethod
+ def itservice_dependency_builder(data, cluster):
+ '''Take a list of dict results,
+ loop through each results and create a hash
+ of:
+ [{clusterid: cluster1, key: 111 }]
+ '''
+ r_list = []
+ for dep in data:
+ if cluster == dep['clusterid']:
+ r_list.append({'name': '%s - %s' % (dep['clusterid'], dep['description']), 'dep_type': 'hard'})
+
+ return r_list
+
+ @staticmethod
+ def itservice_dep_builder_list(data):
+ '''Take a list of dict results,
+ loop through each results and create a hash
+ of:
+ [{clusterid: cluster1, key: 111 }]
+ '''
+ r_list = []
+ for dep in data:
+ r_list.append({'name': '%s' % dep, 'dep_type': 'hard'})
+
+ return r_list
+
def filters(self):
''' returns a mapping of filters to methods '''
return {
@@ -105,4 +153,7 @@ class FilterModule(object):
"create_data": self.create_data,
"oo_build_zabbix_collect": self.oo_build_zabbix_collect,
"oo_remove_attr_from_list_dict": self.oo_remove_attr_from_list_dict,
+ "itservice_results_builder": self.itservice_results_builder,
+ "itservice_dependency_builder": self.itservice_dependency_builder,
+ "itservice_dep_builder_list": self.itservice_dep_builder_list,
}
diff --git a/filter_plugins/openshift_master.py b/filter_plugins/openshift_master.py
new file mode 100644
index 000000000..3a1d77f53
--- /dev/null
+++ b/filter_plugins/openshift_master.py
@@ -0,0 +1,536 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# vim: expandtab:tabstop=4:shiftwidth=4
+'''
+Custom filters for use in openshift-master
+'''
+import copy
+import sys
+import yaml
+
+from ansible import errors
+from ansible.runner.filter_plugins.core import bool as ansible_bool
+
+
+class IdentityProviderBase(object):
+ """ IdentityProviderBase
+
+ Attributes:
+ name (str): Identity provider Name
+ login (bool): Is this identity provider a login provider?
+ challenge (bool): Is this identity provider a challenge provider?
+ provider (dict): Provider specific config
+ _idp (dict): internal copy of the IDP dict passed in
+ _required (list): List of lists of strings for required attributes
+ _optional (list): List of lists of strings for optional attributes
+ _allow_additional (bool): Does this provider support attributes
+ not in _required and _optional
+
+ Args:
+ api_version(str): OpenShift config version
+ idp (dict): idp config dict
+
+ Raises:
+ AnsibleFilterError:
+ """
+ # disabling this check since the number of instance attributes are
+ # necessary for this class
+ # pylint: disable=too-many-instance-attributes
+ def __init__(self, api_version, idp):
+ if api_version not in ['v1']:
+ raise errors.AnsibleFilterError("|failed api version {0} unknown".format(api_version))
+
+ self._idp = copy.deepcopy(idp)
+
+ if 'name' not in self._idp:
+ raise errors.AnsibleFilterError("|failed identity provider missing a name")
+
+ if 'kind' not in self._idp:
+ raise errors.AnsibleFilterError("|failed identity provider missing a kind")
+
+ self.name = self._idp.pop('name')
+ self.login = ansible_bool(self._idp.pop('login', False))
+ self.challenge = ansible_bool(self._idp.pop('challenge', False))
+ self.provider = dict(apiVersion=api_version, kind=self._idp.pop('kind'))
+
+ mm_keys = ('mappingMethod', 'mapping_method')
+ mapping_method = None
+ for key in mm_keys:
+ if key in self._idp:
+ mapping_method = self._idp[key]
+ if mapping_method is None:
+ mapping_method = self.get_default('mappingMethod')
+ self.mapping_method = mapping_method
+
+ valid_mapping_methods = ['add', 'claim', 'generate', 'lookup']
+ if self.mapping_method not in valid_mapping_methods:
+ raise errors.AnsibleFilterError("|failed unkown mapping method "
+ "for provider {0}".format(self.__class__.__name__))
+ self._required = []
+ self._optional = []
+ self._allow_additional = True
+
+ @staticmethod
+ def validate_idp_list(idp_list):
+ ''' validates a list of idps '''
+ login_providers = [x.name for x in idp_list if x.login]
+ if len(login_providers) > 1:
+ raise errors.AnsibleFilterError("|failed multiple providers are "
+ "not allowed for login. login "
+ "providers: {0}".format(', '.join(login_providers)))
+
+ names = [x.name for x in idp_list]
+ if len(set(names)) != len(names):
+ raise errors.AnsibleFilterError("|failed more than one provider configured with the same name")
+
+ for idp in idp_list:
+ idp.validate()
+
+ def validate(self):
+ ''' validate an instance of this idp class '''
+ pass
+
+ @staticmethod
+ def get_default(key):
+ ''' get a default value for a given key '''
+ if key == 'mappingMethod':
+ return 'claim'
+ else:
+ return None
+
+ def set_provider_item(self, items, required=False):
+ ''' set a provider item based on the list of item names provided. '''
+ for item in items:
+ provider_key = items[0]
+ if item in self._idp:
+ self.provider[provider_key] = self._idp.pop(item)
+ break
+ else:
+ default = self.get_default(provider_key)
+ if default is not None:
+ self.provider[provider_key] = default
+ elif required:
+ raise errors.AnsibleFilterError("|failed provider {0} missing "
+ "required key {1}".format(self.__class__.__name__, provider_key))
+
+ def set_provider_items(self):
+ ''' set the provider items for this idp '''
+ for items in self._required:
+ self.set_provider_item(items, True)
+ for items in self._optional:
+ self.set_provider_item(items)
+ if self._allow_additional:
+ for key in self._idp.keys():
+ self.set_provider_item([key])
+ else:
+ if len(self._idp) > 0:
+ raise errors.AnsibleFilterError("|failed provider {0} "
+ "contains unknown keys "
+ "{1}".format(self.__class__.__name__, ', '.join(self._idp.keys())))
+
+ def to_dict(self):
+ ''' translate this idp to a dictionary '''
+ return dict(name=self.name, challenge=self.challenge,
+ login=self.login, mappingMethod=self.mapping_method,
+ provider=self.provider)
+
+
+class LDAPPasswordIdentityProvider(IdentityProviderBase):
+ """ LDAPPasswordIdentityProvider
+
+ Attributes:
+
+ Args:
+ api_version(str): OpenShift config version
+ idp (dict): idp config dict
+
+ Raises:
+ AnsibleFilterError:
+ """
+ def __init__(self, api_version, idp):
+ IdentityProviderBase.__init__(self, api_version, idp)
+ self._allow_additional = False
+ self._required += [['attributes'], ['url'], ['insecure']]
+ self._optional += [['ca'],
+ ['bindDN', 'bind_dn'],
+ ['bindPassword', 'bind_password']]
+
+ self._idp['insecure'] = ansible_bool(self._idp.pop('insecure', False))
+
+ if 'attributes' in self._idp and 'preferred_username' in self._idp['attributes']:
+ pref_user = self._idp['attributes'].pop('preferred_username')
+ self._idp['attributes']['preferredUsername'] = pref_user
+
+ def validate(self):
+ ''' validate this idp instance '''
+ IdentityProviderBase.validate(self)
+ if not isinstance(self.provider['attributes'], dict):
+ raise errors.AnsibleFilterError("|failed attributes for provider "
+ "{0} must be a dictionary".format(self.__class__.__name__))
+
+ attrs = ['id', 'email', 'name', 'preferredUsername']
+ for attr in attrs:
+ if attr in self.provider['attributes'] and not isinstance(self.provider['attributes'][attr], list):
+ raise errors.AnsibleFilterError("|failed {0} attribute for "
+ "provider {1} must be a list".format(attr, self.__class__.__name__))
+
+ unknown_attrs = set(self.provider['attributes'].keys()) - set(attrs)
+ if len(unknown_attrs) > 0:
+ raise errors.AnsibleFilterError("|failed provider {0} has unknown "
+ "attributes: {1}".format(self.__class__.__name__, ', '.join(unknown_attrs)))
+
+
+class KeystonePasswordIdentityProvider(IdentityProviderBase):
+ """ KeystoneIdentityProvider
+
+ Attributes:
+
+ Args:
+ api_version(str): OpenShift config version
+ idp (dict): idp config dict
+
+ Raises:
+ AnsibleFilterError:
+ """
+ def __init__(self, api_version, idp):
+ IdentityProviderBase.__init__(self, api_version, idp)
+ self._allow_additional = False
+ self._required += [['url'], ['domainName', 'domain_name']]
+ self._optional += [['ca'], ['certFile', 'cert_file'], ['keyFile', 'key_file']]
+
+
+class RequestHeaderIdentityProvider(IdentityProviderBase):
+ """ RequestHeaderIdentityProvider
+
+ Attributes:
+
+ Args:
+ api_version(str): OpenShift config version
+ idp (dict): idp config dict
+
+ Raises:
+ AnsibleFilterError:
+ """
+ def __init__(self, api_version, idp):
+ IdentityProviderBase.__init__(self, api_version, idp)
+ self._allow_additional = False
+ self._required += [['headers']]
+ self._optional += [['challengeURL', 'challenge_url'],
+ ['loginURL', 'login_url'],
+ ['clientCA', 'client_ca']]
+
+ def validate(self):
+ ''' validate this idp instance '''
+ IdentityProviderBase.validate(self)
+ if not isinstance(self.provider['headers'], list):
+ raise errors.AnsibleFilterError("|failed headers for provider {0} "
+ "must be a list".format(self.__class__.__name__))
+
+
+class AllowAllPasswordIdentityProvider(IdentityProviderBase):
+ """ AllowAllPasswordIdentityProvider
+
+ Attributes:
+
+ Args:
+ api_version(str): OpenShift config version
+ idp (dict): idp config dict
+
+ Raises:
+ AnsibleFilterError:
+ """
+ def __init__(self, api_version, idp):
+ IdentityProviderBase.__init__(self, api_version, idp)
+ self._allow_additional = False
+
+
+class DenyAllPasswordIdentityProvider(IdentityProviderBase):
+ """ DenyAllPasswordIdentityProvider
+
+ Attributes:
+
+ Args:
+ api_version(str): OpenShift config version
+ idp (dict): idp config dict
+
+ Raises:
+ AnsibleFilterError:
+ """
+ def __init__(self, api_version, idp):
+ IdentityProviderBase.__init__(self, api_version, idp)
+ self._allow_additional = False
+
+
+class HTPasswdPasswordIdentityProvider(IdentityProviderBase):
+ """ HTPasswdPasswordIdentity
+
+ Attributes:
+
+ Args:
+ api_version(str): OpenShift config version
+ idp (dict): idp config dict
+
+ Raises:
+ AnsibleFilterError:
+ """
+ def __init__(self, api_version, idp):
+ IdentityProviderBase.__init__(self, api_version, idp)
+ self._allow_additional = False
+ self._required += [['file', 'filename', 'fileName', 'file_name']]
+
+ @staticmethod
+ def get_default(key):
+ if key == 'file':
+ return '/etc/origin/htpasswd'
+ else:
+ return IdentityProviderBase.get_default(key)
+
+
+class BasicAuthPasswordIdentityProvider(IdentityProviderBase):
+ """ BasicAuthPasswordIdentityProvider
+
+ Attributes:
+
+ Args:
+ api_version(str): OpenShift config version
+ idp (dict): idp config dict
+
+ Raises:
+ AnsibleFilterError:
+ """
+ def __init__(self, api_version, idp):
+ IdentityProviderBase.__init__(self, api_version, idp)
+ self._allow_additional = False
+ self._required += [['url']]
+ self._optional += [['ca'], ['certFile', 'cert_file'], ['keyFile', 'key_file']]
+
+
+class IdentityProviderOauthBase(IdentityProviderBase):
+ """ IdentityProviderOauthBase
+
+ Attributes:
+
+ Args:
+ api_version(str): OpenShift config version
+ idp (dict): idp config dict
+
+ Raises:
+ AnsibleFilterError:
+ """
+ def __init__(self, api_version, idp):
+ IdentityProviderBase.__init__(self, api_version, idp)
+ self._allow_additional = False
+ self._required += [['clientID', 'client_id'], ['clientSecret', 'client_secret']]
+
+ def validate(self):
+ ''' validate this idp instance '''
+ IdentityProviderBase.validate(self)
+ if self.challenge:
+ raise errors.AnsibleFilterError("|failed provider {0} does not "
+ "allow challenge authentication".format(self.__class__.__name__))
+
+
+class OpenIDIdentityProvider(IdentityProviderOauthBase):
+ """ OpenIDIdentityProvider
+
+ Attributes:
+
+ Args:
+ api_version(str): OpenShift config version
+ idp (dict): idp config dict
+
+ Raises:
+ AnsibleFilterError:
+ """
+ def __init__(self, api_version, idp):
+ IdentityProviderOauthBase.__init__(self, api_version, idp)
+ self._required += [['claims'], ['urls']]
+ self._optional += [['ca'],
+ ['extraScopes'],
+ ['extraAuthorizeParameters']]
+ if 'claims' in self._idp and 'preferred_username' in self._idp['claims']:
+ pref_user = self._idp['claims'].pop('preferred_username')
+ self._idp['claims']['preferredUsername'] = pref_user
+ if 'urls' in self._idp and 'user_info' in self._idp['urls']:
+ user_info = self._idp['urls'].pop('user_info')
+ self._idp['urls']['userInfo'] = user_info
+ if 'extra_scopes' in self._idp:
+ self._idp['extraScopes'] = self._idp.pop('extra_scopes')
+ if 'extra_authorize_parameters' in self._idp:
+ self._idp['extraAuthorizeParameters'] = self._idp.pop('extra_authorize_parameters')
+
+ if 'extraAuthorizeParameters' in self._idp:
+ if 'include_granted_scopes' in self._idp['extraAuthorizeParameters']:
+ val = ansible_bool(self._idp['extraAuthorizeParameters'].pop('include_granted_scopes'))
+ self._idp['extraAuthorizeParameters']['include_granted_scopes'] = val
+
+
+ def validate(self):
+ ''' validate this idp instance '''
+ IdentityProviderOauthBase.validate(self)
+ if not isinstance(self.provider['claims'], dict):
+ raise errors.AnsibleFilterError("|failed claims for provider {0} "
+ "must be a dictionary".format(self.__class__.__name__))
+
+ if 'extraScopes' not in self.provider['extraScopes'] and not isinstance(self.provider['extraScopes'], list):
+ raise errors.AnsibleFilterError("|failed extraScopes for provider "
+ "{0} must be a list".format(self.__class__.__name__))
+ if ('extraAuthorizeParameters' not in self.provider['extraAuthorizeParameters']
+ and not isinstance(self.provider['extraAuthorizeParameters'], dict)):
+ raise errors.AnsibleFilterError("|failed extraAuthorizeParameters "
+ "for provider {0} must be a dictionary".format(self.__class__.__name__))
+
+ required_claims = ['id']
+ optional_claims = ['email', 'name', 'preferredUsername']
+ all_claims = required_claims + optional_claims
+
+ for claim in required_claims:
+ if claim in required_claims and claim not in self.provider['claims']:
+ raise errors.AnsibleFilterError("|failed {0} claim missing "
+ "for provider {1}".format(claim, self.__class__.__name__))
+
+ for claim in all_claims:
+ if claim in self.provider['claims'] and not isinstance(self.provider['claims'][claim], list):
+ raise errors.AnsibleFilterError("|failed {0} claims for "
+ "provider {1} must be a list".format(claim, self.__class__.__name__))
+
+ unknown_claims = set(self.provider['claims'].keys()) - set(all_claims)
+ if len(unknown_claims) > 0:
+ raise errors.AnsibleFilterError("|failed provider {0} has unknown "
+ "claims: {1}".format(self.__class__.__name__, ', '.join(unknown_claims)))
+
+ if not isinstance(self.provider['urls'], dict):
+ raise errors.AnsibleFilterError("|failed urls for provider {0} "
+ "must be a dictionary".format(self.__class__.__name__))
+
+ required_urls = ['authorize', 'token']
+ optional_urls = ['userInfo']
+ all_urls = required_urls + optional_urls
+
+ for url in required_urls:
+ if url not in self.provider['urls']:
+ raise errors.AnsibleFilterError("|failed {0} url missing for "
+ "provider {1}".format(url, self.__class__.__name__))
+
+ unknown_urls = set(self.provider['urls'].keys()) - set(all_urls)
+ if len(unknown_urls) > 0:
+ raise errors.AnsibleFilterError("|failed provider {0} has unknown "
+ "urls: {1}".format(self.__class__.__name__, ', '.join(unknown_urls)))
+
+
+class GoogleIdentityProvider(IdentityProviderOauthBase):
+ """ GoogleIdentityProvider
+
+ Attributes:
+
+ Args:
+ api_version(str): OpenShift config version
+ idp (dict): idp config dict
+
+ Raises:
+ AnsibleFilterError:
+ """
+ def __init__(self, api_version, idp):
+ IdentityProviderOauthBase.__init__(self, api_version, idp)
+ self._optional += [['hostedDomain', 'hosted_domain']]
+
+
+class GitHubIdentityProvider(IdentityProviderOauthBase):
+ """ GitHubIdentityProvider
+
+ Attributes:
+
+ Args:
+ api_version(str): OpenShift config version
+ idp (dict): idp config dict
+
+ Raises:
+ AnsibleFilterError:
+ """
+ def __init__(self, api_version, idp):
+ IdentityProviderOauthBase.__init__(self, api_version, idp)
+ self._optional += [['organizations']]
+
+
+class FilterModule(object):
+ ''' Custom ansible filters for use by the openshift_master role'''
+
+ @staticmethod
+ def translate_idps(idps, api_version):
+ ''' Translates a list of dictionaries into a valid identityProviders config '''
+ idp_list = []
+
+ if not isinstance(idps, list):
+ raise errors.AnsibleFilterError("|failed expects to filter on a list of identity providers")
+ for idp in idps:
+ if not isinstance(idp, dict):
+ raise errors.AnsibleFilterError("|failed identity providers must be a list of dictionaries")
+
+ cur_module = sys.modules[__name__]
+ idp_class = getattr(cur_module, idp['kind'], None)
+ idp_inst = idp_class(api_version, idp) if idp_class is not None else IdentityProviderBase(api_version, idp)
+ idp_inst.set_provider_items()
+ idp_list.append(idp_inst)
+
+
+ IdentityProviderBase.validate_idp_list(idp_list)
+ return yaml.safe_dump([idp.to_dict() for idp in idp_list], default_flow_style=False)
+
+ @staticmethod
+ def validate_pcs_cluster(data, masters=None):
+ ''' Validates output from "pcs status", ensuring that each master
+ provided is online.
+ Ex: data = ('...',
+ 'PCSD Status:',
+ 'master1.example.com: Online',
+ 'master2.example.com: Online',
+ 'master3.example.com: Online',
+ '...')
+ masters = ['master1.example.com',
+ 'master2.example.com',
+ 'master3.example.com']
+ returns True
+ '''
+ if not issubclass(type(data), basestring):
+ raise errors.AnsibleFilterError("|failed expects data is a string or unicode")
+ if not issubclass(type(masters), list):
+ raise errors.AnsibleFilterError("|failed expects masters is a list")
+ valid = True
+ for master in masters:
+ if "{0}: Online".format(master) not in data:
+ valid = False
+ return valid
+
+ @staticmethod
+ def certificates_to_synchronize(hostvars):
+ ''' Return certificates to synchronize based on facts. '''
+ if not issubclass(type(hostvars), dict):
+ raise errors.AnsibleFilterError("|failed expects hostvars is a dict")
+ certs = ['admin.crt',
+ 'admin.key',
+ 'admin.kubeconfig',
+ 'master.kubelet-client.crt',
+ 'master.kubelet-client.key',
+ 'openshift-registry.crt',
+ 'openshift-registry.key',
+ 'openshift-registry.kubeconfig',
+ 'openshift-router.crt',
+ 'openshift-router.key',
+ 'openshift-router.kubeconfig',
+ 'serviceaccounts.private.key',
+ 'serviceaccounts.public.key']
+ if bool(hostvars['openshift']['common']['version_gte_3_1_or_1_1']):
+ certs += ['master.proxy-client.crt',
+ 'master.proxy-client.key']
+ if not bool(hostvars['openshift']['common']['version_gte_3_2_or_1_2']):
+ certs += ['openshift-master.crt',
+ 'openshift-master.key',
+ 'openshift-master.kubeconfig']
+ return certs
+
+
+ def filters(self):
+ ''' returns a mapping of filters to methods '''
+ return {"translate_idps": self.translate_idps,
+ "validate_pcs_cluster": self.validate_pcs_cluster,
+ "certificates_to_synchronize": self.certificates_to_synchronize}