--- # fail is we don't have an endpoint for ES to connect to? - name: Set default image variables based on openshift_deployment_type include_vars: "{{ var_file_name }}" with_first_found: - "{{ openshift_deployment_type }}.yml" - "default_images.yml" loop_control: loop_var: var_file_name - name: Set kibana image facts set_fact: openshift_logging_kibana_image_prefix: "{{ openshift_logging_kibana_image_prefix | default(__openshift_logging_kibana_image_prefix) }}" openshift_logging_kibana_image_version: "{{ openshift_logging_kibana_image_version | default(__openshift_logging_kibana_image_version) }}" openshift_logging_kibana_proxy_image_prefix: "{{ openshift_logging_kibana_proxy_image_prefix | default(__openshift_logging_kibana_proxy_image_prefix) }}" openshift_logging_kibana_proxy_image_version: "{{ openshift_logging_kibana_proxy_image_version | default(__openshift_logging_kibana_proxy_image_version) }}" - include_tasks: determine_version.yaml # allow passing in a tempdir - name: Create temp directory for doing work in command: mktemp -d /tmp/openshift-logging-ansible-XXXXXX register: mktemp changed_when: False - set_fact: tempdir: "{{ mktemp.stdout }}" # This may not be necessary in this role - name: Create templates subdirectory file: state: directory path: "{{ tempdir }}/templates" mode: 0755 changed_when: False # we want to make sure we have all the necessary components here # create service account - name: Create Kibana service account oc_serviceaccount: state: present name: "aggregated-logging-kibana" namespace: "{{ openshift_logging_namespace }}" image_pull_secrets: "{{ openshift_logging_image_pull_secret }}" when: openshift_logging_image_pull_secret != '' - name: Create Kibana service account oc_serviceaccount: state: present name: "aggregated-logging-kibana" namespace: "{{ openshift_logging_namespace }}" when: - openshift_logging_image_pull_secret == '' - set_fact: kibana_name: "{{ 'logging-kibana' ~ ( (openshift_logging_kibana_ops_deployment | default(false) | bool) | ternary('-ops', '')) }}" kibana_component: "{{ 'kibana' ~ ( (openshift_logging_kibana_ops_deployment | default(false) | bool) | ternary('-ops', '')) }}" # Check {{ generated_certs_dir }} for session_secret and oauth_secret - name: Checking for session_secret stat: path="{{generated_certs_dir}}/session_secret" register: session_secret_file - name: Checking for oauth_secret stat: path="{{generated_certs_dir}}/oauth_secret" register: oauth_secret_file # gen session_secret if necessary - name: Generate session secret copy: content: "{{ 200 | lib_utils_oo_random_word }}" dest: "{{ generated_certs_dir }}/session_secret" when: - not session_secret_file.stat.exists # gen oauth_secret if necessary - name: Generate oauth secret copy: content: "{{ 64 | lib_utils_oo_random_word }}" dest: "{{ generated_certs_dir }}/oauth_secret" when: - not oauth_secret_file.stat.exists - name: Retrieving the cert to use when generating secrets for the logging components slurp: src: "{{ generated_certs_dir }}/{{ item.file }}" register: key_pairs with_items: - { name: "ca_file", file: "ca.crt" } - { name: "kibana_internal_key", file: "kibana-internal.key"} - { name: "kibana_internal_cert", file: "kibana-internal.crt"} - { name: "server_tls", file: "server-tls.json"} - { name: "session_secret", file: "session_secret" } - { name: "oauth_secret", file: "oauth_secret" } # services - name: Set {{ kibana_name }} service oc_service: state: present name: "{{ kibana_name }}" namespace: "{{ openshift_logging_kibana_namespace }}" selector: component: "{{ kibana_component }}" provider: openshift labels: logging-infra: 'support' ports: - port: 443 targetPort: "oaproxy" # create routes # TODO: set up these certs differently? - set_fact: kibana_key: "{{ lookup('file', openshift_logging_kibana_key) | b64encode }}" when: openshift_logging_kibana_key | trim | length > 0 changed_when: false - set_fact: kibana_cert: "{{ lookup('file', openshift_logging_kibana_cert) | b64encode }}" when: openshift_logging_kibana_cert | trim | length > 0 changed_when: false - set_fact: kibana_ca: "{{ lookup('file', openshift_logging_kibana_ca) | b64encode }}" when: openshift_logging_kibana_ca | trim | length > 0 changed_when: false - set_fact: kibana_ca: "{{ key_pairs | entry_from_named_pair('ca_file') }}" when: kibana_ca is not defined changed_when: false - name: Generating Kibana route template template: src: "{{ __base_file_dir }}/route_reencrypt.j2" dest: "{{ tempdir }}/templates/kibana-route.yaml" vars: obj_name: "{{ kibana_name }}" route_host: "{{ openshift_logging_kibana_hostname }}" service_name: "{{ kibana_name }}" tls_key: "{{ kibana_key | default('') | b64decode }}" tls_cert: "{{ kibana_cert | default('') | b64decode }}" tls_ca_cert: "{{ kibana_ca | b64decode }}" tls_dest_ca_cert: "{{ key_pairs | entry_from_named_pair('ca_file') | b64decode }}" edge_term_policy: "{{ openshift_logging_kibana_edge_term_policy | default('') }}" labels: component: support logging-infra: support provider: openshift changed_when: no # This currently has an issue if the host name changes - name: Setting Kibana route oc_obj: state: present name: "{{ kibana_name }}" namespace: "{{ openshift_logging_namespace }}" kind: route files: - "{{ tempdir }}/templates/kibana-route.yaml" # preserve list of current hostnames - name: Get current oauthclient hostnames oc_obj: state: list name: kibana-proxy namespace: "{{ openshift_logging_namespace }}" kind: oauthclient register: oauth_client_list - set_fact: proxy_hostnames={{ oauth_client_list.results.results[0].redirectURIs | default ([]) + ['https://' ~ openshift_logging_kibana_hostname] }} # create oauth client - name: Create oauth-client template template: src: "{{ __base_file_dir }}/oauth-client.j2" dest: "{{ tempdir }}/templates/oauth-client.yml" vars: kibana_hostnames: "{{ proxy_hostnames | unique }}" secret: "{{ key_pairs | entry_from_named_pair('oauth_secret') | b64decode }}" - name: Set kibana-proxy oauth-client oc_obj: state: present name: "kibana-proxy" namespace: "{{ openshift_logging_namespace }}" kind: oauthclient files: - "{{ tempdir }}/templates/oauth-client.yml" delete_after: true # create Kibana secret - name: Set Kibana secret oc_secret: state: present name: "logging-kibana" namespace: "{{ openshift_logging_namespace }}" files: - name: ca path: "{{ generated_certs_dir }}/ca.crt" - name: key path: "{{ generated_certs_dir }}/system.logging.kibana.key" - name: cert path: "{{ generated_certs_dir }}/system.logging.kibana.crt" # create Kibana-proxy secret - name: Set Kibana Proxy secret oc_secret: state: present name: "logging-kibana-proxy" namespace: "{{ openshift_logging_namespace }}" # TODO: when possible to have both files and contents for oc_secret use this #files: #- name: server-key # path: "{{ generated_certs_dir }}/kibana-internal.key" #- name: server-cert # path: "{{ generated_certs_dir }}/kibana-internal.crt" #- name: server-tls.json # path: "{{ generated_certs_dir }}/server-tls.json" contents: - path: oauth-secret data: "{{ key_pairs | entry_from_named_pair('oauth_secret') | b64decode }}" - path: session-secret data: "{{ key_pairs | entry_from_named_pair('session_secret') | b64decode }}" - path: server-key data: "{{ key_pairs | entry_from_named_pair('kibana_internal_key') | b64decode }}" - path: server-cert data: "{{ key_pairs | entry_from_named_pair('kibana_internal_cert') | b64decode }}" - path: server-tls.json data: "{{ key_pairs | entry_from_named_pair('server_tls') | b64decode }}" # create Kibana DC - name: Generate Kibana DC template template: src: "{{ __base_file_dir }}/kibana.j2" dest: "{{ tempdir }}/templates/kibana-dc.yaml" vars: component: "{{ kibana_component }}" logging_component: kibana deploy_name: "{{ kibana_name }}" image: "{{ openshift_logging_kibana_image_prefix }}logging-kibana:{{ openshift_logging_kibana_image_version }}" proxy_image: "{{ openshift_logging_kibana_proxy_image_prefix }}logging-auth-proxy:{{ openshift_logging_kibana_proxy_image_version }}" es_host: "{{ openshift_logging_kibana_es_host }}" es_port: "{{ openshift_logging_kibana_es_port }}" kibana_cpu_limit: "{{ openshift_logging_kibana_cpu_limit }}" kibana_cpu_request: "{{ openshift_logging_kibana_cpu_request | min_cpu(openshift_logging_kibana_cpu_limit | default(none)) }}" kibana_memory_limit: "{{ openshift_logging_kibana_memory_limit }}" kibana_proxy_cpu_limit: "{{ openshift_logging_kibana_proxy_cpu_limit }}" kibana_proxy_cpu_request: "{{ openshift_logging_kibana_proxy_cpu_request | min_cpu(openshift_logging_kibana_proxy_cpu_limit | default(none)) }}" kibana_proxy_memory_limit: "{{ openshift_logging_kibana_proxy_memory_limit }}" kibana_replicas: "{{ openshift_logging_kibana_replicas | default (1) }}" kibana_node_selector: "{{ openshift_logging_kibana_nodeselector | default({}) }}" kibana_env_vars: "{{ openshift_logging_kibana_env_vars | default({}) }}" - name: Set Kibana DC oc_obj: state: present name: "{{ kibana_name }}" namespace: "{{ openshift_logging_namespace }}" kind: dc files: - "{{ tempdir }}/templates/kibana-dc.yaml" delete_after: true # update master configs? - name: Delete temp directory file: name: "{{ tempdir }}" state: absent changed_when: False