Merge pull request #20 from stelucz/add-enable-force-upload
Add enable_force_upload parameter
diff --git a/.kitchen.travis.yml b/.kitchen.travis.yml
new file mode 100644
index 0000000..b54d661
--- /dev/null
+++ b/.kitchen.travis.yml
@@ -0,0 +1,6 @@
+suites:
+
+ - name: <%= ENV['SUITE'] %>
+ provisioner:
+ pillars-from-files:
+ cinder.sls: tests/pillar/<%= ENV['SUITE'] %>.sls
diff --git a/.kitchen.yml b/.kitchen.yml
index be889a8..01c5fda 100644
--- a/.kitchen.yml
+++ b/.kitchen.yml
@@ -15,39 +15,108 @@
grains:
noservices: True
dependencies:
+ - name: linux
+ repo: git
+ source: https://github.com/salt-formulas/salt-formula-linux
- name: keystone
repo: git
source: https://github.com/salt-formulas/salt-formula-keystone
state_top:
base:
"*":
+ - linux.system
- cinder
pillars:
top.sls:
base:
"*":
+ - linux_repo_openstack
- cinder
+ - release
+ release.sls:
+ cinder:
+ controller:
+ version: <%= ENV['OS_VERSION'] || 'ocata' %>
+ volume:
+ version: <%= ENV['OS_VERSION'] || 'ocata' %>
+ pillars-from-files:
+ linux_repo_openstack.sls: tests/pillar/repo_mcp_openstack_<%= ENV['OS_VERSION'] || 'ocata' %>.sls
verifier:
name: inspec
sudo: true
platforms:
- - name: ubuntu-trusty
+ - name: <%=ENV['PLATFORM'] || 'ubuntu-xenial'%>
driver_config:
- image: trevorj/salty-whales:trusty
- platform: ubuntu
-
- - name: ubuntu-xenial
- driver_config:
- image: trevorj/salty-whales:xenial
+ image: <%=ENV['PLATFORM'] || 'trevorj/salty-whales:xenial'%>
platform: ubuntu
suites:
- - name: <%=ENV['SUITE'] || 'ceph_single'%>
+ - name: ceph_single
provisioner:
pillars-from-files:
- cinder.sls: tests/pillar/<%=ENV['SUITE'] || 'ceph_single'%>.sls
+ cinder.sls: tests/pillar/ceph_single.sls
+
+ - name: control_cluster
+ provisioner:
+ pillars-from-files:
+ cinder.sls: tests/pillar/control_cluster.sls
+ pillars:
+ release.sls:
+ cinder:
+ volume:
+ enabled: false
+
+ - name: control_single
+ provisioner:
+ pillars-from-files:
+ cinder.sls: tests/pillar/control_single.sls
+ pillars:
+ release.sls:
+ cinder:
+ volume:
+ enabled: false
+
+ - name: gpfs_single
+ provisioner:
+ pillars-from-files:
+ cinder.sls: tests/pillar/gpfs_single.sls
+
+ - name: hp3par_single
+ provisioner:
+ pillars-from-files:
+ cinder.sls: tests/pillar/hp3par_single.sls
+
+ - name: lefthand_single
+ provisioner:
+ pillars-from-files:
+ cinder.sls: tests/pillar/lefthand_single.sls
+
+ - name: solidfire_single
+ provisioner:
+ pillars-from-files:
+ cinder.sls: tests/pillar/solidfire_single.sls
+
+ - name: storwize_single
+ provisioner:
+ pillars-from-files:
+ cinder.sls: tests/pillar/storwize_single.sls
+
+ - name: volume_single
+ provisioner:
+ pillars-from-files:
+ cinder.sls: tests/pillar/volume_single.sls
+
+ - name: vsp_single
+ provisioner:
+ pillars-from-files:
+ cinder.sls: tests/pillar/vsp_single.sls
+
+ - name: volume_single_barbican
+ provisioner:
+ pillars-from-files:
+ cinder.sls: tests/pillar/volume_single_barbican.sls
# vim: ft=yaml sw=2 ts=2 sts=2 tw=125
diff --git a/.travis.yml b/.travis.yml
index c1872fe..62f155b 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -17,23 +17,35 @@
- bundle install
env:
- - SUITE=ceph_single
- - SUITE=control_cluster
- - SUITE=control_single
- - SUITE=gpfs_single
- - SUITE=hp3par_single
- - SUITE=lefthand_single
- - SUITE=solidfire_single
- - SUITE=storwize_single
- - SUITE=volume_single
- - SUITE=vsp_single
+ - PLATFORM=trevorj/salty-whales:trusty OS_VERSION=mitaka SUITE=ceph_single
+ - PLATFORM=trevorj/salty-whales:xenial OS_VERSION=ocata SUITE=ceph_single
+ - PLATFORM=trevorj/salty-whales:trusty OS_VERSION=mitaka SUITE=control_cluster
+ - PLATFORM=trevorj/salty-whales:xenial OS_VERSION=ocata SUITE=control_cluster
+ - PLATFORM=trevorj/salty-whales:trusty OS_VERSION=mitaka SUITE=control_single
+ - PLATFORM=trevorj/salty-whales:xenial OS_VERSION=ocata SUITE=control_single
+ - PLATFORM=trevorj/salty-whales:trusty OS_VERSION=mitaka SUITE=gpfs_single
+ - PLATFORM=trevorj/salty-whales:xenial OS_VERSION=ocata SUITE=gpfs_single
+ - PLATFORM=trevorj/salty-whales:trusty OS_VERSION=mitaka SUITE=hp3par_single
+ - PLATFORM=trevorj/salty-whales:xenial OS_VERSION=ocata SUITE=hp3par_single
+ - PLATFORM=trevorj/salty-whales:trusty OS_VERSION=mitaka SUITE=lefthand_single
+ - PLATFORM=trevorj/salty-whales:xenial OS_VERSION=ocata SUITE=lefthand_single
+ - PLATFORM=trevorj/salty-whales:trusty OS_VERSION=mitaka SUITE=solidfire_single
+ - PLATFORM=trevorj/salty-whales:xenial OS_VERSION=ocata SUITE=solidfire_single
+ - PLATFORM=trevorj/salty-whales:trusty OS_VERSION=mitaka SUITE=storwize_single
+ - PLATFORM=trevorj/salty-whales:xenial OS_VERSION=ocata SUITE=storwize_single
+ - PLATFORM=trevorj/salty-whales:trusty OS_VERSION=mitaka SUITE=volume_single
+ - PLATFORM=trevorj/salty-whales:xenial OS_VERSION=ocata SUITE=volume_single
+ - PLATFORM=trevorj/salty-whales:trusty OS_VERSION=mitaka SUITE=vsp_single
+ - PLATFORM=trevorj/salty-whales:xenial OS_VERSION=ocata SUITE=vsp_single
+ - PLATFORM=trevorj/salty-whales:trusty OS_VERSION=mitaka SUITE=volume_single_barbican
+ - PLATFORM=trevorj/salty-whales:xenial OS_VERSION=ocata SUITE=volume_single_barbican
before_script:
- set -o pipefail
- make test | tail
script:
- - test ! -e .kitchen.yml || bundle exec kitchen test -t tests/integration
+ - KITCHEN_LOCAL_YAML=.kitchen.travis.yml bundle exec kitchen test -t tests/integration
notifications:
webhooks:
diff --git a/README.rst b/README.rst
index 1403e36..0775ac7 100644
--- a/README.rst
+++ b/README.rst
@@ -20,6 +20,10 @@
version: juno
cinder_uid: 304
cinder_gid: 304
+ nas_secure_file_permissions: false
+ nas_secure_file_operations: false
+ cinder_internal_tenant_user_id: f46924c112a14c80ab0a24a613d95eef
+ cinder_internal_tenant_project_id: b7455b8974bb4064ad247c8f375eae6c
default_volume_type: 7k2SaS
enable_force_upload: true
availability_zone_fallback: True
@@ -59,6 +63,8 @@
audit:
enabled: false
osapi_max_limit: 500
+ barbican:
+ enabled: true
cinder:
volume:
@@ -66,6 +72,10 @@
version: juno
cinder_uid: 304
cinder_gid: 304
+ nas_secure_file_permissions: false
+ nas_secure_file_operations: false
+ cinder_internal_tenant_user_id: f46924c112a14c80ab0a24a613d95eef
+ cinder_internal_tenant_project_id: b7455b8974bb4064ad247c8f375eae6c
default_volume_type: 7k2SaS
nable_force_upload: true
database:
@@ -103,6 +113,8 @@
pool: SAS7K2
audit:
enabled: false
+ barbican:
+ enabled: true
Enable CORS parameters
@@ -155,6 +167,62 @@
virtual_host: '/openstack'
....
+
+**Client-side RabbitMQ TLS configuration.**
+
+|
+
+To enable TLS for oslo.messaging you need to provide the CA certificate.
+By default system-wide CA certs are used. Nothing should be specified except `ssl.enabled`.
+
+.. code-block:: yaml
+
+ cinder:
+ controller or volume:
+ ....
+ message_queue:
+ ssl:
+ enabled: True
+
+
+
+Use `cacert_file` option to specify the CA-cert file path explicitly:
+
+.. code-block:: yaml
+
+ cinder:
+ controller or volume:
+ ....
+ message_queue:
+ ssl:
+ enabled: True
+ cacert_file: /etc/ssl/rabbitmq-ca.pem
+
+To manage content of the `cacert_file` use the `cacert` option:
+
+.. code-block:: yaml
+
+ cinder:
+ controller or volume:
+ ....
+ message_queue:
+ ssl:
+ enabled: True
+ cacert: |
+
+ -----BEGIN CERTIFICATE-----
+ ...
+ -----END CERTIFICATE-------
+
+ cacert_file: /etc/openstack/rabbitmq-ca.pem
+
+
+Notice:
+ * The `message_queue.port` is set to **5671** (AMQPS) by default if `ssl.enabled=True`.
+ * Use `message_queue.ssl.version` if you need to specify protocol version. By default is TLSv1 for python < 2.7.9 and TLSv1_2 for version above.
+
+
+
Cinder setup with zeroing deleted volumes
.. code-block:: yaml
@@ -304,6 +372,8 @@
transport_type: https
lun_space_reservation: enabled
use_multipath_for_image_xfer: True
+ nas_secure_file_operations: false
+ nas_secure_file_permissions: false
devices:
- 172.18.1.2:/vol_1
- 172.18.1.2:/vol_2
@@ -607,6 +677,14 @@
key:
conn_speed: fibre-10G
+Enable Barbican integration
+
+.. code-block:: yaml
+
+ cinder:
+ controller:
+ barbican:
+ enabled: true
Documentation and Bugs
============================
diff --git a/_modules/cinderng.py b/_modules/cinderng.py
index cd71348..a5c7d42 100644
--- a/_modules/cinderng.py
+++ b/_modules/cinderng.py
@@ -1,12 +1,12 @@
# -*- coding: utf-8 -*-
import logging
-from functools import wraps
+
LOG = logging.getLogger(__name__)
# Import third party libs
HAS_CINDER = False
try:
- from cinderclient.v3 import client
+ from cinderclient.client import Client
HAS_CINDER = True
except ImportError:
pass
@@ -15,58 +15,76 @@
def __virtual__():
- '''
+ """
Only load this module if cinder
is installed on this minion.
- '''
+ """
if HAS_CINDER:
return 'cinderng'
return False
+
def _authng(profile=None):
- '''
+ """
Set up cinder credentials
- '''
+ """
credentials = {
'username': profile['user'],
'password': profile['password'],
'project_id': profile['project_id'],
- 'auth_url': profile['protocol'] + "://" + profile['host'] + ":" + str(profile['port']) + "/v3",
+ 'auth_url': "{}://{}:{}/v2.0".format(
+ profile['protocol'],
+ profile['host'],
+ profile['port']
+ ),
'endpoint_type': profile['endpoint_type'],
'certificate': profile['certificate'],
'region_name': profile['region_name']
}
return credentials
+
def create_conn(cred=None):
- '''
+ """
create connection
- '''
- nt = client.Client(username=cred['username'], api_key=cred['password'], project_id=cred['project_id'], auth_url=cred['auth_url'], endpoint_type=cred['endpoint_type'], cacert=cred['certificate'], region_name=cred['region_name'])
+ """
+ nt = Client(
+ '2',
+ username=cred['username'],
+ api_key=cred['password'],
+ project_id=cred['project_id'],
+ auth_url=cred['auth_url'],
+ endpoint_type=cred['endpoint_type'],
+ cacert=cred['certificate'],
+ region_name=cred['region_name']
+ )
return nt
+
def list_volumes(profile=None, **kwargs):
- '''
+ """
Return list of cinder volumes.
- '''
+ """
cred = _authng(profile)
nt = create_conn(cred)
return nt.volumes.list()
+
def list_volume_type(profile=None, **kwargs):
- '''
+ """
Return list of volume types
- '''
+ """
cred = _authng(profile)
nt = create_conn(cred)
return nt.volume_types.list()
+
def get_volume_type(type_name, profile=None, **kwargs):
- '''
+ """
Returns id of the specified volume type name
- '''
+ """
vt_id = None
- vt_list = list_volume_type(profile);
+ vt_list = list_volume_type(profile)
for vt in vt_list:
if vt.name == type_name:
vt_id = vt.id
@@ -82,10 +100,11 @@
else:
return
+
def create_volume_type(type_name, profile=None, **kwargs):
- '''
+ """
Create cinder volume type
- '''
+ """
vt = get_volume_type(type_name, profile)
if not vt:
cred = _authng(profile)
@@ -100,9 +119,9 @@
def get_keys_volume_type(type_name, profile=None, **kwargs):
- '''
+ """
Return extra specs of the specified volume type.
- '''
+ """
vt = get_volume_type(type_name, profile)
if vt:
@@ -113,10 +132,11 @@
else:
return
+
def set_keys_volume_type(type_name, keys={}, profile=None, **kwargs):
- '''
+ """
Set extra specs of the specified volume type.
- '''
+ """
set_keys = False
vt = get_volume_type(type_name, profile)
if vt:
diff --git a/_states/cinderng.py b/_states/cinderng.py
index e39a4d0..18a0979 100644
--- a/_states/cinderng.py
+++ b/_states/cinderng.py
@@ -1,29 +1,33 @@
# -*- coding: utf-8 -*-
-'''
+"""
Management of Cinder resources
===============================
:depends: - cinderclient Python module
-'''
+"""
+
import ast
import logging
-from functools import wraps
+
LOG = logging.getLogger(__name__)
def __virtual__():
- '''
+ """
Only load if python-cinderclient is present in __salt__
- '''
+ """
return 'cinderng'
+
def volume_type_present(name=None, profile=None):
- '''
+ """
Ensures that the specified volume type is present.
- '''
- ret = {'name': name,
- 'changes': {},
- 'result': True,
- 'comment': 'Volume type "{0}" already exists'.format(name)}
+ """
+ ret = {
+ 'name': name,
+ 'changes': {},
+ 'result': True,
+ 'comment': 'Volume type "{0}" already exists'.format(name)
+ }
signal = __salt__['cinderng.create_volume_type'](name, profile)
if 'exists' in signal:
pass
@@ -31,71 +35,93 @@
ret['comment'] = 'Volume type {0} has been created'.format(name)
ret['changes']['Volume type'] = 'Created'
elif 'failed' in signal:
- ret = {'name': name,
- 'changes': {},
- 'result': False,
- 'comment': 'Volume type "{0}" failed to create'.format(name)}
+ ret = {
+ 'name': name,
+ 'changes': {},
+ 'result': False,
+ 'comment': 'Volume type "{0}" failed to create'.format(name)
+ }
return ret
+
def volume_type_key_present(name=None, key=None, value=None, profile=None):
- '''
+ """
Ensures that the extra specs are present on a volume type.
- '''
+ """
keys = "{u'" + key + "': u'" + value + "'}"
keys = ast.literal_eval(keys)
- ret = {'name': name,
- 'changes': {},
- 'result': True,
- 'comment': 'Volume type keys "{0}" in volume type "{1}" already exist'.format(keys, name)}
+ ret = {
+ 'name': name,
+ 'changes': {},
+ 'result': True,
+ 'comment': 'Volume type keys "{0}" '
+ 'in volume type "{1}" already exist'.format(keys, name)
+ }
signal = __salt__['cinderng.set_keys_volume_type'](name, keys, profile)
if 'exist' in signal:
pass
elif 'updated' in signal:
- ret['comment'] = 'Volume type keys "{0}" in volume type "{1}" have been updated'.format(keys, name)
+ ret['comment'] = 'Volume type keys "{0}" in volume type "{1}" ' \
+ 'have been updated'.format(keys, name)
ret['changes']['Volume type keys'] = 'Updated'
elif 'failed' in signal:
- ret = {'name': name,
- 'changes': {},
- 'result': False,
- 'comment': 'Volume type keys "{0}" in volume type "{1}" failed to update'.format(keys, name)}
+ ret = {
+ 'name': name,
+ 'changes': {},
+ 'result': False,
+ 'comment': 'Volume type keys "{0}" in volume type "{1}" '
+ 'failed to update'.format(keys, name)
+ }
elif 'not found' in signal:
- ret = {'name': name,
- 'changes': {},
- 'result': False,
- 'comment': 'Volume type "{0}" was not found'.format(name)}
+ ret = {
+ 'name': name,
+ 'changes': {},
+ 'result': False,
+ 'comment': 'Volume type "{0}" was not found'.format(name)
+ }
return ret
def _already_exists(name, resource):
- changes_dict = {'name': name,
- 'changes': {},
- 'result': True}
- changes_dict['comment'] = \
- '{0} {1} already exists'.format(resource, name)
+ changes_dict = {
+ 'name': name,
+ 'changes': {},
+ 'result': True,
+ 'comment': '{0} {1} already exists'.format(resource, name)
+ }
return changes_dict
def _created(name, resource, resource_definition):
- changes_dict = {'name': name,
- 'changes': resource_definition,
- 'result': True,
- 'comment': '{0} {1} created'.format(resource, name)}
+ changes_dict = {
+ 'name': name,
+ 'changes': resource_definition,
+ 'result': True,
+ 'comment': '{0} {1} created'.format(resource, name)
+ }
return changes_dict
+
def _updated(name, resource, resource_definition):
- changes_dict = {'name': name,
- 'changes': resource_definition,
- 'result': True,
- 'comment': '{0} {1} tenant was updated'.format(resource, name)}
+ changes_dict = {
+ 'name': name,
+ 'changes': resource_definition,
+ 'result': True,
+ 'comment': '{0} {1} tenant was updated'.format(resource, name)
+ }
return changes_dict
+
def _update_failed(name, resource):
- changes_dict = {'name': name,
- 'changes': {},
- 'comment': '{0} {1} failed to update'.format(resource, name),
- 'result': False}
+ changes_dict = {
+ 'name': name,
+ 'changes': {},
+ 'comment': '{0} {1} failed to update'.format(resource, name),
+ 'result': False
+ }
return changes_dict
+
def _no_change(name, resource, test=False):
changes_dict = {'name': name,
'changes': {},
diff --git a/cinder/controller.sls b/cinder/controller.sls
index 8a810de..79678de 100644
--- a/cinder/controller.sls
+++ b/cinder/controller.sls
@@ -1,4 +1,4 @@
-{%- from "cinder/map.jinja" import controller with context %}
+{%- from "cinder/map.jinja" import controller, system_cacerts_file with context %}
{%- if controller.get('enabled', False) %}
{%- set user = controller %}
@@ -46,7 +46,7 @@
{%- endfor %}
-{%- if controller.version == 'ocata' %}
+{%- if controller.version in ('ocata','pike') %}
/etc/apache2/conf-available/cinder-wsgi.conf:
file.managed:
@@ -63,6 +63,12 @@
- onlyif: /bin/false
{%- endif %}
- watch:
+ {%- if controller.message_queue.get('ssl',{}).get('enabled', False) %}
+ - file: rabbitmq_ca_cinder_controller
+ {%- endif %}
+ {%- if controller.database.get('ssl',{}).get('enabled', False) %}
+ - file: mysql_ca_cinder_controller
+ {%- endif %}
- file: /etc/cinder/cinder.conf
- file: /etc/cinder/api-paste.ini
- file: /etc/apache2/conf-available/cinder-wsgi.conf
@@ -77,6 +83,12 @@
- onlyif: /bin/false
{%- endif %}
- watch:
+ {%- if controller.message_queue.get('ssl',{}).get('enabled', False) %}
+ - file: rabbitmq_ca_cinder_controller
+ {%- endif %}
+ {%- if controller.database.get('ssl',{}).get('enabled', False) %}
+ - file: mysql_ca_cinder_controller
+ {%- endif %}
- file: /etc/cinder/cinder.conf
- file: /etc/cinder/api-paste.ini
@@ -102,6 +114,12 @@
- onlyif: /bin/false
{%- endif %}
- watch:
+ {%- if controller.message_queue.get('ssl',{}).get('enabled', False) %}
+ - file: rabbitmq_ca_cinder_controller
+ {%- endif %}
+ {%- if controller.database.get('ssl',{}).get('enabled', False) %}
+ - file: mysql_ca_cinder_controller
+ {%- endif %}
- file: /etc/cinder/cinder.conf
- file: /etc/cinder/api-paste.ini
@@ -114,9 +132,19 @@
- require:
- service: cinder_controller_services
-{# new way #}
{%- if not grains.get('noservices', False) %}
+{%- set identity = controller.identity %}
+{%- set credentials = {'host': identity.host,
+ 'user': identity.user,
+ 'password': identity.password,
+ 'project_id': identity.tenant,
+ 'port': identity.get('port', 35357),
+ 'protocol': identity.get('protocol', 'http'),
+ 'region_name': identity.get('region_name', 'RegionOne'),
+ 'endpoint_type': identity.get('endpoint_type', 'internalURL'),
+ 'certificate': identity.get('certificate', 'None')} %}
+
{%- for backend_name, backend in controller.get('backend', {}).iteritems() %}
{%- if backend.engine is defined and backend.engine == 'nfs' or (backend.engine == 'netapp' and backend.storage_protocol == 'nfs') %}
@@ -146,47 +174,25 @@
{%- endif %}
cinder_type_create_{{ backend_name }}:
- cmd.run:
- - name: "source /root/keystonerc; cinder type-create {{ backend.type_name }}"
- - unless: "source /root/keystonerc; cinder type-list | grep {{ backend.type_name }}"
- - shell: /bin/bash
+ cinderng.volume_type_present:
+ - name: {{ backend.type_name }}
+ - profile: {{ credentials }}
- require:
- service: cinder_controller_services
cinder_type_update_{{ backend_name }}:
- cmd.run:
- - name: "source /root/keystonerc; cinder type-key {{ backend.type_name }} set volume_backend_name={{ backend_name }}"
- - unless: "source /root/keystonerc; cinder extra-specs-list | grep \"{u'volume_backend_name': u'{{ backend_name }}'}\""
- - shell: /bin/bash
+ cinderng.volume_type_key_present:
+ - name: {{ backend.type_name }}
+ - key: volume_backend_name
+ - value: {{ backend_name }}
+ - profile: {{ credentials }}
- require:
- - cmd: cinder_type_create_{{ backend_name }}
+ - cinderng: cinder_type_create_{{ backend_name }}
{%- endfor %}
{%- endif %}
-{# old way #}
-
-{% for type in controller.get('types', []) %}
-
-cinder_type_create_{{ type.name }}:
- cmd.run:
- - name: "source /root/keystonerc; cinder type-create {{ type.name }}"
- - unless: "source /root/keystonerc; cinder type-list | grep {{ type.name }}"
- - shell: /bin/bash
- - require:
- - service: cinder_controller_services
-
-cinder_type_update_{{ type.name }}:
- cmd.run:
- - name: "source /root/keystonerc; cinder type-key {{ type.name }} set volume_backend_name={{ type.get('backend', type.name) }}"
- - unless: "source /root/keystonerc; cinder extra-specs-list | grep \"{u'volume_backend_name': u'{{ type.get('backend', type.name) }}'}\""
- - shell: /bin/bash
- - require:
- - cmd: cinder_type_create_{{ type.name }}
-
-{% endfor %}
-
{%- if controller.backup.engine != None %}
cinder_backup_packages:
@@ -198,9 +204,42 @@
- names: {{ controller.backup.services }}
- enable: true
- watch:
+ {%- if controller.message_queue.get('ssl',{}).get('enabled', False) %}
+ - file: rabbitmq_ca_cinder_controller
+ {%- endif %}
- file: /etc/cinder/cinder.conf
- file: /etc/cinder/api-paste.ini
{%- endif %}
+{%- if controller.message_queue.get('ssl',{}).get('enabled', False) %}
+rabbitmq_ca_cinder_controller:
+{%- if controller.message_queue.ssl.cacert is defined %}
+ file.managed:
+ - name: {{ controller.message_queue.ssl.cacert_file }}
+ - contents_pillar: cinder:controller:message_queue:ssl:cacert
+ - mode: 0444
+ - makedirs: true
+{%- else %}
+ file.exists:
+ - name: {{ controller.message_queue.ssl.get('cacert_file', system_cacerts_file) }}
+{%- endif %}
+{%- endif %}
+
+{%- if controller.database.get('ssl',{}).get('enabled', False) %}
+mysql_ca_cinder_controller:
+{%- if controller.database.ssl.cacert is defined %}
+ file.managed:
+ - name: {{ controller.database.ssl.cacert_file }}
+ - contents_pillar: cinder:controller:database:ssl:cacert
+ - mode: 0444
+ - makedirs: true
+
+{%- else %}
+ file.exists:
+ - name: {{ controller.database.ssl.get('cacert_file', system_cacerts_file) }}
+{%- endif %}
+{%- endif %}
+
+
{%- endif %}
diff --git a/cinder/files/backend/_ceph.conf b/cinder/files/backend/_ceph.conf
index 524bb6b..4e06c34 100644
--- a/cinder/files/backend/_ceph.conf
+++ b/cinder/files/backend/_ceph.conf
@@ -17,7 +17,7 @@
# Path to the ceph configuration file (string value)
#rbd_ceph_conf=
-rbd_ceph_conf=/etc/ceph/ceph.conf
+rbd_ceph_conf={{ backend.get('rbd_ceph_conf','/etc/ceph/ceph.conf') }}
# Flatten volumes created from snapshots to remove dependency
# from volume to snapshot (boolean value)
diff --git a/cinder/files/backend/_netapp.conf b/cinder/files/backend/_netapp.conf
index f755b43..5555cf6 100644
--- a/cinder/files/backend/_netapp.conf
+++ b/cinder/files/backend/_netapp.conf
@@ -30,3 +30,9 @@
netapp_lun_space_reservation={{ backend.get('lun_space_reservation', 'disabled') }}
use_multipath_for_image_xfer={{ backend.get('use_multipath_for_image_xfer', False) }}
netapp_copyoffload_tool_path={{ backend.get('copyoffload_tool_path', '') }}
+{%- if backend.nas_secure_file_permissions is defined %}
+nas_secure_file_permissions={{ backend.nas_secure_file_permissions }}
+{%- endif %}
+{%- if backend.nas_secure_file_operations is defined %}
+nas_secure_file_operations={{ backend.nas_secure_file_operations }}
+{%- endif %}
diff --git a/cinder/files/grafana_dashboards/cinder_prometheus.json b/cinder/files/grafana_dashboards/cinder_prometheus.json
index de1e2f8..ed1f67d 100644
--- a/cinder/files/grafana_dashboards/cinder_prometheus.json
+++ b/cinder/files/grafana_dashboards/cinder_prometheus.json
@@ -1,5 +1,8 @@
{% raw %}
{
+ "annotations": {
+ "list": []
+ },
"description": "Monitors Cinder cluster using Prometheus. Shows overall cluster processes and usage.",
"editable": true,
"gnetId": 315,
@@ -19,8 +22,8 @@
"colorValue": true,
"colors": [
"rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
+ "rgba(50, 172, 45, 0.97)",
+ "rgba(237, 129, 40, 0.89)"
],
"datasource": "prometheus",
"format": "none",
@@ -74,10 +77,10 @@
"intervalFactor": 2,
"legendFormat": "{{ service }}",
"refId": "A",
- "step": 20
+ "step": 60
}
],
- "thresholds": "1,0",
+ "thresholds": "0.5,1.5",
"title": "API Availability",
"type": "singlestat",
"valueFontSize": "80%",
@@ -89,17 +92,22 @@
},
{
"op": "=",
+ "text": "DOWN",
+ "value": "0"
+ },
+ {
+ "op": "=",
"text": "OK",
"value": "1"
},
{
"op": "=",
- "text": "DOWN",
- "value": "0"
+ "text": "UNKNOWN",
+ "value": "2"
}
],
"valueName": "current"
- },
+ },
{
"cacheTimeout": null,
"colorBackground": false,
@@ -161,7 +169,7 @@
"intervalFactor": 2,
"legendFormat": "per sec",
"refId": "A",
- "step": 20
+ "step": 60
}
],
"thresholds": "",
@@ -176,27 +184,6 @@
}
],
"valueName": "current"
- }
- ],
- "repeat": null,
- "repeatIteration": null,
- "repeatRowId": null,
- "showTitle": true,
- "title": "Service Status",
- "titleSize": "h6"
- },
- {
- "collapse": false,
- "height": "100px",
- "panels": [
- {
- "content": "<br />\n<h3 align=\"center\"> Up </h3>",
- "id": 3,
- "links": [],
- "mode": "html",
- "span": 1,
- "title": "",
- "type": "text"
},
{
"cacheTimeout": null,
@@ -216,7 +203,7 @@
"thresholdLabels": false,
"thresholdMarkers": true
},
- "id": 4,
+ "id": 3,
"interval": null,
"links": [],
"mappingType": 1,
@@ -259,11 +246,11 @@
"intervalFactor": 2,
"legendFormat": "",
"refId": "A",
- "step": 20
+ "step": 60
}
],
"thresholds": "",
- "title": "API",
+ "title": "Cinder API backends",
"type": "singlestat",
"valueFontSize": "80%",
"valueMaps": [
@@ -280,7 +267,171 @@
"repeatIteration": null,
"repeatRowId": null,
"showTitle": true,
- "title": "Cinder API",
+ "title": "Service Status",
+ "titleSize": "h6"
+ },
+ {
+ "collapse": false,
+ "height": "250",
+ "panels": [
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "fill": 1,
+ "id": 4,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "links": [],
+ "nullPointMode": "null",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "span": 6,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "sum(openstack_cinder_http_response_times_rate{host=~\"^$host$\"}) by (http_status)",
+ "format": "time_series",
+ "intervalFactor": 2,
+ "legendFormat": "{{ http_status }}",
+ "refId": "A",
+ "step": 10
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Throughput",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "ops",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ]
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "prometheus",
+ "fill": 1,
+ "id": 5,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "links": [],
+ "nullPointMode": "null",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "span": 6,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "max(openstack_cinder_http_response_times_upper_90{host=~\"^$host$\"}) by (http_method)",
+ "format": "time_series",
+ "intervalFactor": 2,
+ "legendFormat": "{{ http_method }}",
+ "refId": "A",
+ "step": 10
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Latency",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "s",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ]
+ }
+ ],
+ "repeat": null,
+ "repeatIteration": null,
+ "repeatRowId": null,
+ "showTitle": true,
+ "title": "API Performances",
"titleSize": "h6"
},
{
@@ -357,7 +508,7 @@
"intervalFactor": 2,
"legendFormat": "",
"refId": "A",
- "step": 20
+ "step": 60
}
],
"thresholds": "1,1",
@@ -434,7 +585,7 @@
"intervalFactor": 2,
"legendFormat": "",
"refId": "A",
- "step": 20
+ "step": 60
}
],
"thresholds": "1,1",
@@ -511,7 +662,7 @@
"intervalFactor": 2,
"legendFormat": "",
"refId": "A",
- "step": 20
+ "step": 60
}
],
"thresholds": "1,1",
@@ -543,7 +694,7 @@
"min": false,
"show": true,
"total": false,
- "values": false
+ "values": false
},
"lines": true,
"linewidth": 1,
@@ -566,7 +717,7 @@
"legendFormat": "{{ state }}",
"metric": "openstack_cinder_services",
"refId": "A",
- "step": 2
+ "step": 4
}
],
"thresholds": [],
@@ -576,7 +727,7 @@
"tooltip": {
"shared": true,
"sort": 0,
- "value_type": "individual"
+ "value_type": "individual"
},
"type": "graph",
"xaxis": {
@@ -584,7 +735,7 @@
"mode": "time",
"name": null,
"show": true,
- "values": []
+ "values": []
},
"yaxes": [
{
@@ -613,7 +764,7 @@
"span": 2,
"title": "",
"type": "text"
- },
+ },
{
"cacheTimeout": null,
"colorBackground": false,
@@ -675,7 +826,7 @@
"intervalFactor": 2,
"legendFormat": "",
"refId": "A",
- "step": 20
+ "step": 60
}
],
"thresholds": "1,1",
@@ -752,7 +903,7 @@
"intervalFactor": 2,
"legendFormat": "",
"refId": "A",
- "step": 20
+ "step": 60
}
],
"thresholds": "1,1",
@@ -829,7 +980,7 @@
"intervalFactor": 2,
"legendFormat": "",
"refId": "A",
- "step": 20
+ "step": 60
}
],
"thresholds": "1,1",
@@ -861,7 +1012,7 @@
"min": false,
"show": true,
"total": false,
- "values": false
+ "values": false
},
"lines": true,
"linewidth": 1,
@@ -884,7 +1035,7 @@
"legendFormat": "{{ state }}",
"metric": "openstack_cinder_services",
"refId": "A",
- "step": 2
+ "step": 4
}
],
"thresholds": [],
@@ -894,7 +1045,7 @@
"tooltip": {
"shared": true,
"sort": 0,
- "value_type": "individual"
+ "value_type": "individual"
},
"type": "graph",
"xaxis": {
@@ -902,7 +1053,7 @@
"mode": "time",
"name": null,
"show": true,
- "values": []
+ "values": []
},
"yaxes": [
{
@@ -996,7 +1147,7 @@
"intervalFactor": 2,
"legendFormat": "",
"refId": "A",
- "step": 20
+ "step": 60
}
],
"thresholds": "",
@@ -1073,7 +1224,7 @@
"intervalFactor": 2,
"legendFormat": "",
"refId": "A",
- "step": 20
+ "step": 60
}
],
"thresholds": "",
@@ -1150,7 +1301,7 @@
"intervalFactor": 2,
"legendFormat": "",
"refId": "A",
- "step": 20
+ "step": 60
}
],
"thresholds": "",
@@ -1205,7 +1356,7 @@
"legendFormat": "{{ status }}",
"metric": "openstack_cinder_volumes",
"refId": "A",
- "step": 2
+ "step": 4
}
],
"thresholds": [],
@@ -1252,7 +1403,7 @@
"rgba(50, 172, 45, 0.97)"
],
"datasource": "prometheus",
- "format": "none",
+ "format": "decbytes",
"gauge": {
"maxValue": 100,
"minValue": 0,
@@ -1303,7 +1454,7 @@
"intervalFactor": 2,
"legendFormat": "",
"refId": "A",
- "step": 20
+ "step": 60
}
],
"thresholds": "",
@@ -1329,7 +1480,7 @@
"rgba(50, 172, 45, 0.97)"
],
"datasource": "prometheus",
- "format": "none",
+ "format": "decbytes",
"gauge": {
"maxValue": 100,
"minValue": 0,
@@ -1380,7 +1531,7 @@
"intervalFactor": 2,
"legendFormat": "",
"refId": "A",
- "step": 20
+ "step": 60
}
],
"thresholds": "",
@@ -1406,7 +1557,7 @@
"rgba(50, 172, 45, 0.97)"
],
"datasource": "prometheus",
- "format": "none",
+ "format": "decbytes",
"gauge": {
"maxValue": 100,
"minValue": 0,
@@ -1457,7 +1608,7 @@
"intervalFactor": 2,
"legendFormat": "",
"refId": "A",
- "step": 20
+ "step": 60
}
],
"thresholds": "",
@@ -1512,7 +1663,7 @@
"legendFormat": "{{ status }}",
"metric": "openstack_cinder_volumes_size",
"refId": "A",
- "step": 2
+ "step": 4
}
],
"thresholds": [],
@@ -1534,7 +1685,7 @@
},
"yaxes": [
{
- "format": "short",
+ "format": "decbytes",
"logBase": 1,
"max": null,
"min": 0,
@@ -1610,7 +1761,7 @@
"intervalFactor": 2,
"legendFormat": "",
"refId": "A",
- "step": 20
+ "step": 60
}
],
"thresholds": "",
@@ -1687,7 +1838,7 @@
"intervalFactor": 2,
"legendFormat": "",
"refId": "A",
- "step": 20
+ "step": 60
}
],
"thresholds": "",
@@ -1764,7 +1915,7 @@
"intervalFactor": 2,
"legendFormat": "",
"refId": "A",
- "step": 20
+ "step": 60
}
],
"thresholds": "",
@@ -1819,7 +1970,7 @@
"legendFormat": "{{ status }}",
"metric": "openstack_cinder_snapshots",
"refId": "A",
- "step": 2
+ "step": 4
}
],
"thresholds": [],
@@ -1866,7 +2017,7 @@
"rgba(50, 172, 45, 0.97)"
],
"datasource": "prometheus",
- "format": "none",
+ "format": "decbytes",
"gauge": {
"maxValue": 100,
"minValue": 0,
@@ -1917,7 +2068,7 @@
"intervalFactor": 2,
"legendFormat": "",
"refId": "A",
- "step": 20
+ "step": 60
}
],
"thresholds": "",
@@ -1943,7 +2094,7 @@
"rgba(50, 172, 45, 0.97)"
],
"datasource": "prometheus",
- "format": "none",
+ "format": "decbytes",
"gauge": {
"maxValue": 100,
"minValue": 0,
@@ -1994,7 +2145,7 @@
"intervalFactor": 2,
"legendFormat": "",
"refId": "A",
- "step": 20
+ "step": 60
}
],
"thresholds": "",
@@ -2020,7 +2171,7 @@
"rgba(50, 172, 45, 0.97)"
],
"datasource": "prometheus",
- "format": "none",
+ "format": "decbytes",
"gauge": {
"maxValue": 100,
"minValue": 0,
@@ -2071,7 +2222,7 @@
"intervalFactor": 2,
"legendFormat": "",
"refId": "A",
- "step": 20
+ "step": 60
}
],
"thresholds": "",
@@ -2126,7 +2277,7 @@
"legendFormat": "{{ status }}",
"metric": "openstack_cinder_snapshots_size",
"refId": "A",
- "step": 2
+ "step": 4
}
],
"thresholds": [],
@@ -2148,7 +2299,7 @@
},
"yaxes": [
{
- "format": "short",
+ "format": "decbytes",
"logBase": 1,
"max": null,
"min": 0,
@@ -2179,7 +2330,29 @@
"cinder"
],
"templating": {
- "list": []
+ "list": [
+ {
+ "allValue": null,
+ "current": {},
+ "datasource": "prometheus",
+ "hide": 0,
+ "includeAll": true,
+ "label": null,
+ "multi": true,
+ "name": "host",
+ "options": [],
+ "query": "label_values(openstack_cinder_http_response_times_count,host)",
+ "refresh": 1,
+ "refresh_on_load": true,
+ "regex": "",
+ "sort": 1,
+ "tagValuesQuery": "",
+ "tags": [],
+ "tagsQuery": "",
+ "type": "query",
+ "useTags": false
+ }
+ ]
},
"time": {
"from": "now-1h",
@@ -2212,6 +2385,6 @@
},
"timezone": "browser",
"title": "Cinder",
- "version": 2
+ "version": 4
}
{% endraw %}
diff --git a/cinder/files/juno/cinder.conf.controller.Debian b/cinder/files/juno/cinder.conf.controller.Debian
index 65bceb1..d83df84 100644
--- a/cinder/files/juno/cinder.conf.controller.Debian
+++ b/cinder/files/juno/cinder.conf.controller.Debian
@@ -92,6 +92,18 @@
use_syslog=false
verbose=True
lock_path=/var/lock/cinder
+{%- if controller.nas_secure_file_permissions is defined %}
+nas_secure_file_permissions={{ controller.nas_secure_file_permissions }}
+{%- endif %}
+{%- if controller.nas_secure_file_operations is defined %}
+nas_secure_file_operations={{ controller.nas_secure_file_operations }}
+{%- endif %}
+{%- if controller.cinder_internal_tenant_user_id is defined %}
+cinder_internal_tenant_user_id={{ controller.cinder_internal_tenant_user_id }}
+{%- endif %}
+{%- if controller.cinder_internal_tenant_project_id is defined %}
+cinder_internal_tenant_project_id={{ controller.cinder_internal_tenant_project_id }}
+{%- endif %}
[keystone_authtoken]
signing_dir=/tmp/keystone-signing-cinder
diff --git a/cinder/files/juno/cinder.conf.volume.Debian b/cinder/files/juno/cinder.conf.volume.Debian
index 9888ad8..9766c2e 100644
--- a/cinder/files/juno/cinder.conf.volume.Debian
+++ b/cinder/files/juno/cinder.conf.volume.Debian
@@ -101,6 +101,18 @@
use_syslog=false
verbose=True
lock_path=/var/lock/cinder
+{%- if volume.nas_secure_file_permissions is defined %}
+nas_secure_file_permissions={{ volume.nas_secure_file_permissions }}
+{%- endif %}
+{%- if volume.nas_secure_file_operations is defined %}
+nas_secure_file_operations={{ volume.nas_secure_file_operations }}
+{%- endif %}
+{%- if volume.cinder_internal_tenant_user_id is defined %}
+cinder_internal_tenant_user_id={{ volume.cinder_internal_tenant_user_id }}
+{%- endif %}
+{%- if volume.cinder_internal_tenant_project_id is defined %}
+cinder_internal_tenant_project_id={{ volume.cinder_internal_tenant_project_id }}
+{%- endif %}
[keystone_authtoken]
signing_dir=/tmp/keystone-signing-cinder
diff --git a/cinder/files/kilo/cinder.conf.controller.Debian b/cinder/files/kilo/cinder.conf.controller.Debian
index bb8320f..71902e2 100644
--- a/cinder/files/kilo/cinder.conf.controller.Debian
+++ b/cinder/files/kilo/cinder.conf.controller.Debian
@@ -86,6 +86,18 @@
use_syslog=false
verbose=True
lock_path=/var/lock/cinder
+{%- if controller.nas_secure_file_permissions is defined %}
+nas_secure_file_permissions={{ controller.nas_secure_file_permissions }}
+{%- endif %}
+{%- if controller.nas_secure_file_operations is defined %}
+nas_secure_file_operations={{ controller.nas_secure_file_operations }}
+{%- endif %}
+{%- if controller.cinder_internal_tenant_user_id is defined %}
+cinder_internal_tenant_user_id={{ controller.cinder_internal_tenant_user_id }}
+{%- endif %}
+{%- if controller.cinder_internal_tenant_project_id is defined %}
+cinder_internal_tenant_project_id={{ controller.cinder_internal_tenant_project_id }}
+{%- endif %}
[oslo_concurrency]
diff --git a/cinder/files/kilo/cinder.conf.volume.Debian b/cinder/files/kilo/cinder.conf.volume.Debian
index f820362..3c8c720 100644
--- a/cinder/files/kilo/cinder.conf.volume.Debian
+++ b/cinder/files/kilo/cinder.conf.volume.Debian
@@ -95,6 +95,18 @@
use_syslog=false
verbose=True
+{%- if volume.nas_secure_file_permissions is defined %}
+nas_secure_file_permissions={{ volume.nas_secure_file_permissions }}
+{%- endif %}
+{%- if volume.nas_secure_file_operations is defined %}
+nas_secure_file_operations={{ volume.nas_secure_file_operations }}
+{%- endif %}
+{%- if volume.cinder_internal_tenant_user_id is defined %}
+cinder_internal_tenant_user_id={{ volume.cinder_internal_tenant_user_id }}
+{%- endif %}
+{%- if volume.cinder_internal_tenant_project_id is defined %}
+cinder_internal_tenant_project_id={{ volume.cinder_internal_tenant_project_id }}
+{%- endif %}
[oslo_concurrency]
diff --git a/cinder/files/liberty/cinder.conf.controller.Debian b/cinder/files/liberty/cinder.conf.controller.Debian
index c337bba..e263513 100644
--- a/cinder/files/liberty/cinder.conf.controller.Debian
+++ b/cinder/files/liberty/cinder.conf.controller.Debian
@@ -109,6 +109,18 @@
use_syslog=false
verbose=True
lock_path=/var/lock/cinder
+{%- if controller.nas_secure_file_permissions is defined %}
+nas_secure_file_permissions={{ controller.nas_secure_file_permissions }}
+{%- endif %}
+{%- if controller.nas_secure_file_operations is defined %}
+nas_secure_file_operations={{ controller.nas_secure_file_operations }}
+{%- endif %}
+{%- if controller.cinder_internal_tenant_user_id is defined %}
+cinder_internal_tenant_user_id={{ controller.cinder_internal_tenant_user_id }}
+{%- endif %}
+{%- if controller.cinder_internal_tenant_project_id is defined %}
+cinder_internal_tenant_project_id={{ controller.cinder_internal_tenant_project_id }}
+{%- endif %}
[oslo_concurrency]
diff --git a/cinder/files/liberty/cinder.conf.volume.Debian b/cinder/files/liberty/cinder.conf.volume.Debian
index 1a3c0bd..98fc66e 100644
--- a/cinder/files/liberty/cinder.conf.volume.Debian
+++ b/cinder/files/liberty/cinder.conf.volume.Debian
@@ -93,6 +93,18 @@
use_syslog=false
verbose=True
+{%- if volume.nas_secure_file_permissions is defined %}
+nas_secure_file_permissions={{ volume.nas_secure_file_permissions }}
+{%- endif %}
+{%- if volume.nas_secure_file_operations is defined %}
+nas_secure_file_operations={{ volume.nas_secure_file_operations }}
+{%- endif %}
+{%- if volume.cinder_internal_tenant_user_id is defined %}
+cinder_internal_tenant_user_id={{ volume.cinder_internal_tenant_user_id }}
+{%- endif %}
+{%- if volume.cinder_internal_tenant_project_id is defined %}
+cinder_internal_tenant_project_id={{ volume.cinder_internal_tenant_project_id }}
+{%- endif %}
[oslo_concurrency]
diff --git a/cinder/files/mitaka/cinder.conf.controller.Debian b/cinder/files/mitaka/cinder.conf.controller.Debian
index eef54e0..4176181 100644
--- a/cinder/files/mitaka/cinder.conf.controller.Debian
+++ b/cinder/files/mitaka/cinder.conf.controller.Debian
@@ -1,4 +1,4 @@
-{%- from "cinder/map.jinja" import controller with context %}
+{%- from "cinder/map.jinja" import controller, system_cacerts_file with context %}
[DEFAULT]
rootwrap_config = /etc/cinder/rootwrap.conf
@@ -98,6 +98,18 @@
{%- set backup_backend_fragment = "cinder/files/backup_backend/_" + controller.backup.engine + ".conf" %}
{%- include backup_backend_fragment %}
{%- endif %}
+{%- if controller.nas_secure_file_permissions is defined %}
+nas_secure_file_permissions={{ controller.nas_secure_file_permissions }}
+{%- endif %}
+{%- if controller.nas_secure_file_operations is defined %}
+nas_secure_file_operations={{ controller.nas_secure_file_operations }}
+{%- endif %}
+{%- if controller.cinder_internal_tenant_user_id is defined %}
+cinder_internal_tenant_user_id={{ controller.cinder_internal_tenant_user_id }}
+{%- endif %}
+{%- if controller.cinder_internal_tenant_project_id is defined %}
+cinder_internal_tenant_project_id={{ controller.cinder_internal_tenant_project_id }}
+{%- endif %}
[oslo_messaging_notifications]
{%- if controller.notification is mapping %}
@@ -114,14 +126,31 @@
lock_path=/var/lock/cinder
[oslo_messaging_rabbit]
+{%- set rabbit_port = controller.message_queue.get('port', 5671 if controller.message_queue.get('ssl',{}).get('enabled', False) else 5672) %}
{%- if controller.message_queue.members is defined %}
rabbit_hosts = {% for member in controller.message_queue.members -%}
- {{ member.host }}:{{ member.get('port', 5672) }}
+ {{ member.host }}:{{ member.get('port',rabbit_port) }}
{%- if not loop.last -%},{%- endif -%}
{%- endfor -%}
{%- else %}
rabbit_host = {{ controller.message_queue.host }}
-rabbit_port = {{ controller.message_queue.port }}
+rabbit_port = {{ rabbit_port }}
+{%- endif %}
+
+{%- if controller.message_queue.get('ssl',{}).get('enabled', False) %}
+rabbit_use_ssl=true
+
+{%- if controller.message_queue.ssl.version is defined %}
+kombu_ssl_version = {{ controller.message_queue.ssl.version }}
+{%- elif salt['grains.get']('pythonversion') > [2,7,8] %}
+kombu_ssl_version = TLSv1_2
+{%- endif %}
+
+{%- if controller.message_queue.ssl.cacert_file is defined %}
+kombu_ssl_ca_certs = {{ controller.message_queue.ssl.cacert_file }}
+{%- else %}
+kombu_ssl_ca_certs={{ system_cacerts_file }}
+{%- endif %}
{%- endif %}
rabbit_userid = {{ controller.message_queue.user }}
@@ -149,7 +178,7 @@
max_pool_size=30
max_retries=-1
max_overflow=40
-connection = {{ controller.database.engine }}+pymysql://{{ controller.database.user }}:{{ controller.database.password }}@{{ controller.database.host }}/{{ controller.database.name }}
+connection = {{ controller.database.engine }}+pymysql://{{ controller.database.user }}:{{ controller.database.password }}@{{ controller.database.host }}/{{ controller.database.name }}?charset=utf8{%- if controller.database.get('ssl',{}).get('enabled',False) %}&ssl_ca={{ controller.database.ssl.get('cacert_file', system_cacerts_file) }}{% endif %}
{%- if controller.backend is defined %}
@@ -208,3 +237,6 @@
{%- if controller.cors.allow_headers is defined %}
allow_headers = {{ controller.cors.allow_headers }}
{%- endif %}
+
+[oslo_middleware]
+secure_proxy_ssl_header = X-Forwarded-Proto
diff --git a/cinder/files/mitaka/cinder.conf.volume.Debian b/cinder/files/mitaka/cinder.conf.volume.Debian
index dafea1a..52f46d7 100644
--- a/cinder/files/mitaka/cinder.conf.volume.Debian
+++ b/cinder/files/mitaka/cinder.conf.volume.Debian
@@ -1,4 +1,4 @@
-{%- from "cinder/map.jinja" import volume with context %}
+{%- from "cinder/map.jinja" import volume, system_cacerts_file with context %}
[DEFAULT]
rootwrap_config = /etc/cinder/rootwrap.conf
@@ -83,6 +83,18 @@
{%- set backup_backend_fragment = "cinder/files/backup_backend/_" + volume.backup.engine + ".conf" %}
{%- include backup_backend_fragment %}
{%- endif %}
+{%- if volume.nas_secure_file_permissions is defined %}
+nas_secure_file_permissions={{ volume.nas_secure_file_permissions }}
+{%- endif %}
+{%- if volume.nas_secure_file_operations is defined %}
+nas_secure_file_operations={{ volume.nas_secure_file_operations }}
+{%- endif %}
+{%- if volume.cinder_internal_tenant_user_id is defined %}
+cinder_internal_tenant_user_id={{ volume.cinder_internal_tenant_user_id }}
+{%- endif %}
+{%- if volume.cinder_internal_tenant_project_id is defined %}
+cinder_internal_tenant_project_id={{ volume.cinder_internal_tenant_project_id }}
+{%- endif %}
[oslo_messaging_notifications]
{%- if volume.notification is mapping %}
@@ -99,14 +111,31 @@
lock_path=/var/lock/cinder
[oslo_messaging_rabbit]
+{%- set rabbit_port = volume.message_queue.get('port', 5671 if volume.message_queue.get('ssl',{}).get('enabled', False) else 5672) %}
{%- if volume.message_queue.members is defined %}
rabbit_hosts = {% for member in volume.message_queue.members -%}
- {{ member.host }}:{{ member.get('port', 5672) }}
+ {{ member.host }}:{{ member.get('port',rabbit_port) }}
{%- if not loop.last -%},{%- endif -%}
{%- endfor -%}
{%- else %}
rabbit_host = {{ volume.message_queue.host }}
-rabbit_port = {{ volume.message_queue.port }}
+rabbit_port = {{ rabbit_port }}
+{%- endif %}
+
+{%- if volume.message_queue.get('ssl',{}).get('enabled', False) %}
+rabbit_use_ssl=true
+
+{%- if volume.message_queue.ssl.version is defined %}
+kombu_ssl_version = {{ volume.message_queue.ssl.version }}
+{%- elif salt['grains.get']('pythonversion') > [2,7,8] %}
+kombu_ssl_version = TLSv1_2
+{%- endif %}
+
+{%- if volume.message_queue.ssl.cacert_file is defined %}
+kombu_ssl_ca_certs = {{ volume.message_queue.ssl.cacert_file }}
+{%- else %}
+kombu_ssl_ca_certs={{ system_cacerts_file }}
+{%- endif %}
{%- endif %}
rabbit_userid = {{ volume.message_queue.user }}
@@ -134,7 +163,7 @@
max_pool_size=30
max_retries=-1
max_overflow=40
-connection = {{ volume.database.engine }}+pymysql://{{ volume.database.user }}:{{ volume.database.password }}@{{ volume.database.host }}/{{ volume.database.name }}
+connection = {{ volume.database.engine }}+pymysql://{{ volume.database.user }}:{{ volume.database.password }}@{{ volume.database.host }}/{{ volume.database.name }}?charset=utf8{%- if volume.database.get('ssl',{}).get('enabled',False) %}&ssl_ca={{ volume.database.ssl.get('cacert_file', system_cacerts_file) }}{% endif %}
{%- if volume.backend is defined %}
diff --git a/cinder/files/newton/cinder.conf.controller.Debian b/cinder/files/newton/cinder.conf.controller.Debian
index da05503..3e16fcd 100644
--- a/cinder/files/newton/cinder.conf.controller.Debian
+++ b/cinder/files/newton/cinder.conf.controller.Debian
@@ -1,4 +1,4 @@
-{%- from "cinder/map.jinja" import controller with context %}
+{%- from "cinder/map.jinja" import controller, system_cacerts_file with context %}
[DEFAULT]
rootwrap_config = /etc/cinder/rootwrap.conf
@@ -69,7 +69,6 @@
rpc_response_timeout=3600
#Rabbit
-rpc_backend=rabbit
control_exchange=cinder
@@ -100,20 +99,33 @@
osapi_volume_extension = cinder.api.contrib.standard_extensions
+{%- set rabbit_port = controller.message_queue.get('port', 5671 if controller.message_queue.get('ssl',{}).get('enabled', False) else 5672) %}
{%- if controller.message_queue.members is defined %}
transport_url = rabbit://{% for member in controller.message_queue.members -%}
- {{ controller.message_queue.user }}:{{ controller.message_queue.password }}@{{ member.host }}:{{ member.get('port', 5672) }}
+ {{ controller.message_queue.user }}:{{ controller.message_queue.password }}@{{ member.host }}:{{ member.get('port',rabbit_port) }}
{%- if not loop.last -%},{%- endif -%}
{%- endfor -%}
/{{ controller.message_queue.virtual_host }}
{%- else %}
-transport_url = rabbit://{{ controller.message_queue.user }}:{{ controller.message_queue.password }}@{{ controller.message_queue.host }}:{{ controller.message_queue.port }}/{{ controller.message_queue.virtual_host }}
+transport_url = rabbit://{{ controller.message_queue.user }}:{{ controller.message_queue.password }}@{{ controller.message_queue.host }}:{{ rabbit_port }}/{{ controller.message_queue.virtual_host }}
{%- endif %}
{%- if controller.backup.engine != None %}
{%- set backup_backend_fragment = "cinder/files/backup_backend/_" + controller.backup.engine + ".conf" %}
{%- include backup_backend_fragment %}
{%- endif %}
+{%- if controller.nas_secure_file_permissions is defined %}
+nas_secure_file_permissions={{ controller.nas_secure_file_permissions }}
+{%- endif %}
+{%- if controller.nas_secure_file_operations is defined %}
+nas_secure_file_operations={{ controller.nas_secure_file_operations }}
+{%- endif %}
+{%- if controller.cinder_internal_tenant_user_id is defined %}
+cinder_internal_tenant_user_id={{ controller.cinder_internal_tenant_user_id }}
+{%- endif %}
+{%- if controller.cinder_internal_tenant_project_id is defined %}
+cinder_internal_tenant_project_id={{ controller.cinder_internal_tenant_project_id }}
+{%- endif %}
[oslo_messaging_notifications]
{%- if controller.notification is mapping %}
@@ -133,7 +145,23 @@
enable_proxy_headers_parsing = True
+{%- if controller.message_queue.get('ssl',{}).get('enabled', False) %}
[oslo_messaging_rabbit]
+rabbit_use_ssl=true
+
+{%- if controller.message_queue.ssl.version is defined %}
+kombu_ssl_version = {{ controller.message_queue.ssl.version }}
+{%- elif salt['grains.get']('pythonversion') > [2,7,8] %}
+kombu_ssl_version = TLSv1_2
+{%- endif %}
+
+{%- if controller.message_queue.ssl.cacert_file is defined %}
+kombu_ssl_ca_certs = {{ controller.message_queue.ssl.cacert_file }}
+{%- else %}
+kombu_ssl_ca_certs={{ system_cacerts_file }}
+{%- endif %}
+{%- endif %}
+
[keystone_authtoken]
signing_dir=/tmp/keystone-signing-cinder
@@ -163,7 +191,7 @@
max_pool_size=30
max_retries=-1
max_overflow=40
-connection = {{ controller.database.engine }}+pymysql://{{ controller.database.user }}:{{ controller.database.password }}@{{ controller.database.host }}/{{ controller.database.name }}?charset=utf8
+connection = {{ controller.database.engine }}+pymysql://{{ controller.database.user }}:{{ controller.database.password }}@{{ controller.database.host }}/{{ controller.database.name }}?charset=utf8{%- if controller.database.get('ssl',{}).get('enabled',False) %}&ssl_ca={{ controller.database.ssl.get('cacert_file', system_cacerts_file) }}{% endif %}
{%- if controller.backend is defined %}
diff --git a/cinder/files/newton/cinder.conf.volume.Debian b/cinder/files/newton/cinder.conf.volume.Debian
index fa68dc9..e81b7c2 100644
--- a/cinder/files/newton/cinder.conf.volume.Debian
+++ b/cinder/files/newton/cinder.conf.volume.Debian
@@ -1,4 +1,4 @@
-{%- from "cinder/map.jinja" import volume with context %}
+{%- from "cinder/map.jinja" import volume, system_cacerts_file with context %}
[DEFAULT]
rootwrap_config = /etc/cinder/rootwrap.conf
@@ -66,7 +66,6 @@
rpc_response_timeout=3600
#Rabbit
-rpc_backend=rabbit
control_exchange=cinder
@@ -89,20 +88,33 @@
nova_catalog_admin_info = compute:nova:adminURL
nova_catalog_info = compute:nova:{{ volume.identity.get('endpoint_type', 'publicURL') }}
+{%- set rabbit_port = volume.message_queue.get('port', 5671 if volume.message_queue.get('ssl',{}).get('enabled', False) else 5672) %}
{%- if volume.message_queue.members is defined %}
transport_url = rabbit://{% for member in volume.message_queue.members -%}
- {{ volume.message_queue.user }}:{{ volume.message_queue.password }}@{{ member.host }}:{{ member.get('port', 5672) }}
+ {{ volume.message_queue.user }}:{{ volume.message_queue.password }}@{{ member.host }}:{{ member.get('port',rabbit_port) }}
{%- if not loop.last -%},{%- endif -%}
{%- endfor -%}
/{{ volume.message_queue.virtual_host }}
{%- else %}
-transport_url = rabbit://{{ volume.message_queue.user }}:{{ volume.message_queue.password }}@{{ volume.message_queue.host }}:{{ volume.message_queue.port }}/{{ volume.message_queue.virtual_host }}
+transport_url = rabbit://{{ volume.message_queue.user }}:{{ volume.message_queue.password }}@{{ volume.message_queue.host }}:{{ rabbit_port }}/{{ volume.message_queue.virtual_host }}
{%- endif %}
{%- if volume.backup.engine != None %}
{%- set backup_backend_fragment = "cinder/files/backup_backend/_" + volume.backup.engine + ".conf" %}
{%- include backup_backend_fragment %}
{%- endif %}
+{%- if volume.nas_secure_file_permissions is defined %}
+nas_secure_file_permissions={{ volume.nas_secure_file_permissions }}
+{%- endif %}
+{%- if volume.nas_secure_file_operations is defined %}
+nas_secure_file_operations={{ volume.nas_secure_file_operations }}
+{%- endif %}
+{%- if volume.cinder_internal_tenant_user_id is defined %}
+cinder_internal_tenant_user_id={{ volume.cinder_internal_tenant_user_id }}
+{%- endif %}
+{%- if volume.cinder_internal_tenant_project_id is defined %}
+cinder_internal_tenant_project_id={{ volume.cinder_internal_tenant_project_id }}
+{%- endif %}
[oslo_messaging_notifications]
{%- if volume.notification is mapping %}
@@ -122,7 +134,22 @@
enable_proxy_headers_parsing = True
+{%- if volume.message_queue.get('ssl',{}).get('enabled', False) %}
[oslo_messaging_rabbit]
+rabbit_use_ssl=true
+
+{%- if volume.message_queue.ssl.version is defined %}
+kombu_ssl_version = {{ volume.message_queue.ssl.version }}
+{%- elif salt['grains.get']('pythonversion') > [2,7,8] %}
+kombu_ssl_version = TLSv1_2
+{%- endif %}
+
+{%- if volume.message_queue.ssl.cacert_file is defined %}
+kombu_ssl_ca_certs = {{ volume.message_queue.ssl.cacert_file }}
+{%- else %}
+kombu_ssl_ca_certs={{ system_cacerts_file }}
+{%- endif %}
+{%- endif %}
[keystone_authtoken]
signing_dir=/tmp/keystone-signing-cinder
@@ -152,7 +179,7 @@
max_pool_size=30
max_retries=-1
max_overflow=40
-connection = {{ volume.database.engine }}+pymysql://{{ volume.database.user }}:{{ volume.database.password }}@{{ volume.database.host }}/{{ volume.database.name }}?charset=utf8
+connection = {{ volume.database.engine }}+pymysql://{{ volume.database.user }}:{{ volume.database.password }}@{{ volume.database.host }}/{{ volume.database.name }}?charset=utf8{%- if volume.database.get('ssl',{}).get('enabled',False) %}&ssl_ca={{ volume.database.ssl.get('cacert_file', system_cacerts_file) }}{% endif %}
{%- if volume.backend is defined %}
diff --git a/cinder/files/ocata/cinder-wsgi.conf b/cinder/files/ocata/cinder-wsgi.conf
index 3e4de23..b228a06 100644
--- a/cinder/files/ocata/cinder-wsgi.conf
+++ b/cinder/files/ocata/cinder-wsgi.conf
@@ -1,9 +1,8 @@
{%- from "cinder/map.jinja" import controller with context %}
Listen {{ controller.osapi.host }}:8776
-LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-agent}i\" %D(us)" cinder_combined
<VirtualHost {{ controller.osapi.host }}:8776>
- WSGIDaemonProcess cinder-wsgi processes=5 threads=1 user=cinder display-name=%{GROUP}
+ WSGIDaemonProcess cinder-wsgi processes=5 threads=1 user=cinder group=cinder display-name=%{GROUP}
WSGIProcessGroup cinder-wsgi
WSGIScriptAlias / /usr/bin/cinder-wsgi
WSGIApplicationGroup %{GLOBAL}
@@ -13,10 +12,10 @@
</IfVersion>
ErrorLog /var/log/apache2/cinder_error.log
- CustomLog /var/log/apache2/cinder.log cinder_combined
+ CustomLog /var/log/apache2/cinder.log "%v:%p %h %l %u %t \"%r\" %>s %D %O \"%{Referer}i\" \"%{User-Agent}i\""
<Directory /usr/bin>
- <IfVersion >= 2.4>
+ <IfVersion >= 2.4>
Require all granted
</IfVersion>
<IfVersion < 2.4>
diff --git a/cinder/files/ocata/cinder.conf.controller.Debian b/cinder/files/ocata/cinder.conf.controller.Debian
index da05503..434f87d 100644
--- a/cinder/files/ocata/cinder.conf.controller.Debian
+++ b/cinder/files/ocata/cinder.conf.controller.Debian
@@ -1,4 +1,4 @@
-{%- from "cinder/map.jinja" import controller with context %}
+{%- from "cinder/map.jinja" import controller, system_cacerts_file with context %}
[DEFAULT]
rootwrap_config = /etc/cinder/rootwrap.conf
@@ -69,7 +69,6 @@
rpc_response_timeout=3600
#Rabbit
-rpc_backend=rabbit
control_exchange=cinder
@@ -100,20 +99,33 @@
osapi_volume_extension = cinder.api.contrib.standard_extensions
+{%- set rabbit_port = controller.message_queue.get('port', 5671 if controller.message_queue.get('ssl',{}).get('enabled', False) else 5672) %}
{%- if controller.message_queue.members is defined %}
transport_url = rabbit://{% for member in controller.message_queue.members -%}
- {{ controller.message_queue.user }}:{{ controller.message_queue.password }}@{{ member.host }}:{{ member.get('port', 5672) }}
+ {{ controller.message_queue.user }}:{{ controller.message_queue.password }}@{{ member.host }}:{{ member.get('port',rabbit_port) }}
{%- if not loop.last -%},{%- endif -%}
{%- endfor -%}
/{{ controller.message_queue.virtual_host }}
{%- else %}
-transport_url = rabbit://{{ controller.message_queue.user }}:{{ controller.message_queue.password }}@{{ controller.message_queue.host }}:{{ controller.message_queue.port }}/{{ controller.message_queue.virtual_host }}
+transport_url = rabbit://{{ controller.message_queue.user }}:{{ controller.message_queue.password }}@{{ controller.message_queue.host }}:{{ rabbit_port }}/{{ controller.message_queue.virtual_host }}
{%- endif %}
{%- if controller.backup.engine != None %}
{%- set backup_backend_fragment = "cinder/files/backup_backend/_" + controller.backup.engine + ".conf" %}
{%- include backup_backend_fragment %}
{%- endif %}
+{%- if controller.nas_secure_file_permissions is defined %}
+nas_secure_file_permissions={{ controller.nas_secure_file_permissions }}
+{%- endif %}
+{%- if controller.nas_secure_file_operations is defined %}
+nas_secure_file_operations={{ controller.nas_secure_file_operations }}
+{%- endif %}
+{%- if controller.cinder_internal_tenant_user_id is defined %}
+cinder_internal_tenant_user_id={{ controller.cinder_internal_tenant_user_id }}
+{%- endif %}
+{%- if controller.cinder_internal_tenant_project_id is defined %}
+cinder_internal_tenant_project_id={{ controller.cinder_internal_tenant_project_id }}
+{%- endif %}
[oslo_messaging_notifications]
{%- if controller.notification is mapping %}
@@ -133,7 +145,23 @@
enable_proxy_headers_parsing = True
+{%- if controller.message_queue.get('ssl',{}).get('enabled', False) %}
[oslo_messaging_rabbit]
+rabbit_use_ssl=true
+
+{%- if controller.message_queue.ssl.version is defined %}
+kombu_ssl_version = {{ controller.message_queue.ssl.version }}
+{%- elif salt['grains.get']('pythonversion') > [2,7,8] %}
+kombu_ssl_version = TLSv1_2
+{%- endif %}
+
+{%- if controller.message_queue.ssl.cacert_file is defined %}
+kombu_ssl_ca_certs = {{ controller.message_queue.ssl.cacert_file }}
+{%- else %}
+kombu_ssl_ca_certs={{ system_cacerts_file }}
+{%- endif %}
+{%- endif %}
+
[keystone_authtoken]
signing_dir=/tmp/keystone-signing-cinder
@@ -155,15 +183,19 @@
{%- endif %}
auth_version = v3
+{%- if controller.get('barbican', {}).get('enabled', False) %}
+[key_manager]
+api_class = castellan.key_manager.barbican_key_manager.BarbicanKeyManager
[barbican]
-auth_endpoint=http://{{ controller.identity.host }}:5000
+auth_endpoint = {{ controller.identity.get('protocol', 'http') }}://{{ controller.identity.get('host', 'localhost') }}:{{ controller.identity.get('port', '5000') }}/v3
+{%- endif %}
[database]
idle_timeout=3600
max_pool_size=30
max_retries=-1
max_overflow=40
-connection = {{ controller.database.engine }}+pymysql://{{ controller.database.user }}:{{ controller.database.password }}@{{ controller.database.host }}/{{ controller.database.name }}?charset=utf8
+connection = {{ controller.database.engine }}+pymysql://{{ controller.database.user }}:{{ controller.database.password }}@{{ controller.database.host }}/{{ controller.database.name }}?charset=utf8{%- if controller.database.get('ssl',{}).get('enabled',False) %}&ssl_ca={{ controller.database.ssl.get('cacert_file', system_cacerts_file) }}{% endif %}
{%- if controller.backend is defined %}
diff --git a/cinder/files/ocata/cinder.conf.volume.Debian b/cinder/files/ocata/cinder.conf.volume.Debian
index fa68dc9..90eef4f 100644
--- a/cinder/files/ocata/cinder.conf.volume.Debian
+++ b/cinder/files/ocata/cinder.conf.volume.Debian
@@ -1,4 +1,4 @@
-{%- from "cinder/map.jinja" import volume with context %}
+{%- from "cinder/map.jinja" import volume, system_cacerts_file with context %}
[DEFAULT]
rootwrap_config = /etc/cinder/rootwrap.conf
@@ -66,7 +66,6 @@
rpc_response_timeout=3600
#Rabbit
-rpc_backend=rabbit
control_exchange=cinder
@@ -89,20 +88,33 @@
nova_catalog_admin_info = compute:nova:adminURL
nova_catalog_info = compute:nova:{{ volume.identity.get('endpoint_type', 'publicURL') }}
+{%- set rabbit_port = volume.message_queue.get('port', 5671 if volume.message_queue.get('ssl',{}).get('enabled', False) else 5672) %}
{%- if volume.message_queue.members is defined %}
transport_url = rabbit://{% for member in volume.message_queue.members -%}
- {{ volume.message_queue.user }}:{{ volume.message_queue.password }}@{{ member.host }}:{{ member.get('port', 5672) }}
+ {{ volume.message_queue.user }}:{{ volume.message_queue.password }}@{{ member.host }}:{{ member.get('port',rabbit_port) }}
{%- if not loop.last -%},{%- endif -%}
{%- endfor -%}
/{{ volume.message_queue.virtual_host }}
{%- else %}
-transport_url = rabbit://{{ volume.message_queue.user }}:{{ volume.message_queue.password }}@{{ volume.message_queue.host }}:{{ volume.message_queue.port }}/{{ volume.message_queue.virtual_host }}
+transport_url = rabbit://{{ volume.message_queue.user }}:{{ volume.message_queue.password }}@{{ volume.message_queue.host }}:{{ rabbit_port }}/{{ volume.message_queue.virtual_host }}
{%- endif %}
{%- if volume.backup.engine != None %}
{%- set backup_backend_fragment = "cinder/files/backup_backend/_" + volume.backup.engine + ".conf" %}
{%- include backup_backend_fragment %}
{%- endif %}
+{%- if volume.nas_secure_file_permissions is defined %}
+nas_secure_file_permissions={{ volume.nas_secure_file_permissions }}
+{%- endif %}
+{%- if volume.nas_secure_file_operations is defined %}
+nas_secure_file_operations={{ volume.nas_secure_file_operations }}
+{%- endif %}
+{%- if volume.cinder_internal_tenant_user_id is defined %}
+cinder_internal_tenant_user_id={{ volume.cinder_internal_tenant_user_id }}
+{%- endif %}
+{%- if volume.cinder_internal_tenant_project_id is defined %}
+cinder_internal_tenant_project_id={{ volume.cinder_internal_tenant_project_id }}
+{%- endif %}
[oslo_messaging_notifications]
{%- if volume.notification is mapping %}
@@ -122,7 +134,22 @@
enable_proxy_headers_parsing = True
+{%- if volume.message_queue.get('ssl',{}).get('enabled', False) %}
[oslo_messaging_rabbit]
+rabbit_use_ssl=true
+
+{%- if volume.message_queue.ssl.version is defined %}
+kombu_ssl_version = {{ volume.message_queue.ssl.version }}
+{%- elif salt['grains.get']('pythonversion') > [2,7,8] %}
+kombu_ssl_version = TLSv1_2
+{%- endif %}
+
+{%- if volume.message_queue.ssl.cacert_file is defined %}
+kombu_ssl_ca_certs = {{ volume.message_queue.ssl.cacert_file }}
+{%- else %}
+kombu_ssl_ca_certs={{ system_cacerts_file }}
+{%- endif %}
+{%- endif %}
[keystone_authtoken]
signing_dir=/tmp/keystone-signing-cinder
@@ -144,15 +171,19 @@
{%- endif %}
auth_version = v3
+{%- if volume.get('barbican', {}).get('enabled', False) %}
+[key_manager]
+api_class = castellan.key_manager.barbican_key_manager.BarbicanKeyManager
[barbican]
-auth_endpoint=http://{{ volume.identity.host }}:5000
+auth_endpoint = {{ volume.identity.get('protocol', 'http') }}://{{ volume.identity.get('host', 'localhost') }}:{{ volume.identity.get('port', '5000') }}/v3
+{%- endif %}
[database]
idle_timeout=3600
max_pool_size=30
max_retries=-1
max_overflow=40
-connection = {{ volume.database.engine }}+pymysql://{{ volume.database.user }}:{{ volume.database.password }}@{{ volume.database.host }}/{{ volume.database.name }}?charset=utf8
+connection = {{ volume.database.engine }}+pymysql://{{ volume.database.user }}:{{ volume.database.password }}@{{ volume.database.host }}/{{ volume.database.name }}?charset=utf8{%- if volume.database.get('ssl',{}).get('enabled',False) %}&ssl_ca={{ volume.database.ssl.get('cacert_file', system_cacerts_file) }}{% endif %}
{%- if volume.backend is defined %}
diff --git a/cinder/files/pike b/cinder/files/pike
new file mode 120000
index 0000000..d5e8ce2
--- /dev/null
+++ b/cinder/files/pike
@@ -0,0 +1 @@
+ocata
\ No newline at end of file
diff --git a/cinder/map.jinja b/cinder/map.jinja
index a19420a..b8806e6 100644
--- a/cinder/map.jinja
+++ b/cinder/map.jinja
@@ -1,3 +1,7 @@
+{%- set system_cacerts_file = salt['grains.filter_by']({
+ 'Debian': '/etc/ssl/certs/ca-certificates.crt',
+ 'RedHat': '/etc/pki/tls/certs/ca-bundle.crt'
+})%}
{% set controller = salt['grains.filter_by']({
'Debian': {
@@ -81,3 +85,11 @@
'pkgs': ['python-cinderclient']
},
}, merge=pillar.cinder.get('client', {})) %}
+
+{% set monitoring = salt['grains.filter_by']({
+ 'default': {
+ 'error_log_rate': 0.2,
+ 'services_failed_warning_threshold_percent': 0.3,
+ 'services_failed_critical_threshold_percent': 0.6,
+ },
+}, grain='os_family', merge=salt['pillar.get']('cinder:monitoring')) %}
diff --git a/cinder/meta/collectd.yml b/cinder/meta/collectd.yml
index 5f0ee4b..ee9a2a9 100644
--- a/cinder/meta/collectd.yml
+++ b/cinder/meta/collectd.yml
@@ -1,12 +1,15 @@
{%- if pillar.cinder.controller is defined %}
{%- from "cinder/map.jinja" import controller with context %}
-{%- if pillar.cinder.controller.get('enabled', False) %}
+{%- if controller.get('enabled', False) %}
+{%- if controller.get('osapi') %}
local_plugin:
collectd_check_local_endpoint:
endpoint:
cinder-api:
expected_code: {% if controller.version in ('juno', 'kilo', 'liberty') %}200{% else %}300{% endif %}
url: "http://{{ controller.osapi.host|replace('0.0.0.0', '127.0.0.1') }}:8776/"
+{%- endif %}
+{%- if controller.get('identity') %}
remote_plugin:
openstack_cinder:
plugin: python
@@ -26,3 +29,4 @@
region: {{ controller.identity.region }}
{%- endif %}
{%- endif %}
+{%- endif %}
diff --git a/cinder/meta/grafana.yml b/cinder/meta/grafana.yml
index 3d30b8c..0d0ecc8 100644
--- a/cinder/meta/grafana.yml
+++ b/cinder/meta/grafana.yml
@@ -69,7 +69,7 @@
alias: "Fatal"
rawQuery: true
query: SELECT count(max) FROM openstack_cinder_http_response_times WHERE environment_label = '$environment' AND http_status = '5xx' AND $timeFilter
- main:
+ main_influxdb:
datasource: influxdb
row:
ost-control-plane:
@@ -98,3 +98,18 @@
cluster_status:
rawQuery: true
query: SELECT last(value) FROM cluster_status WHERE cluster_name = 'cinder-data' AND environment_label = '$environment' AND $timeFilter GROUP BY time($interval) fill(null)
+ main_prometheus:
+ datasource: prometheus
+ row:
+ ost-control-plane:
+ title: OpenStack Control Plane
+ panel:
+ cinder:
+ title: Cinder
+ links:
+ - dashboard: Cinder
+ title: Cinder
+ type: dashboard
+ target:
+ cluster_status:
+ expr: avg(openstack_api_check_status{service=~"cinder.*"})
diff --git a/cinder/meta/heka.yml b/cinder/meta/heka.yml
index 7186c50..5a87914 100644
--- a/cinder/meta/heka.yml
+++ b/cinder/meta/heka.yml
@@ -2,6 +2,7 @@
{%- from "cinder/map.jinja" import volume as _volume with context %}
{%- set controller = _controller.get('enabled', False) %}
{%- set volume = _volume.get('enabled', False) %}
+{%- set apache_wsgi = controller and _controller.version not in ('juno', 'kilo', 'liberty', 'mitaka', 'newton') %}
log_collector:
decoder:
cinder:
@@ -9,6 +10,16 @@
module_file: /usr/share/lma_collector/decoders/openstack_log.lua
module_dir: /usr/share/lma_collector/common;/usr/share/heka/lua_modules
adjust_timezone: true
+{%- if apache_wsgi %}
+ cinder_wsgi:
+ engine: sandbox
+ module_file: /usr/share/lma_collector/decoders/apache_wsgi_log.lua
+ module_dir: /usr/share/lma_collector/common;/usr/share/heka/lua_modules
+ config:
+ logger: openstack.cinder
+ apache_log_pattern: >-
+ %v:%p %h %l %u %t \"%r\" %>s %D %O \"%{Referer}i\" \"%{User-Agent}i\"
+{%- endif %}
splitter:
cinder:
engine: token
@@ -22,6 +33,16 @@
priority: ["^Seq"]
decoder: "cinder_decoder"
splitter: "cinder_splitter"
+{%- if apache_wsgi %}
+ cinder_wsgi_log:
+ engine: logstreamer
+ log_directory: "/var/log/apache2"
+ file_match: 'cinder\.log'
+ differentiator: ['cinder-wsgi']
+ priority: ["^Seq"]
+ decoder: "cinder_wsgi_decoder"
+ splitter: "TokenSplitter"
+{%- endif %}
metric_collector:
trigger:
{%- if controller or volume %}
diff --git a/cinder/meta/prometheus.yml b/cinder/meta/prometheus.yml
index 6269b8a..dca35fb 100644
--- a/cinder/meta/prometheus.yml
+++ b/cinder/meta/prometheus.yml
@@ -1,5 +1,4 @@
-{%- from "cinder/map.jinja" import controller as controller with context %}
-{%- from "cinder/map.jinja" import volume as volume with context %}
+{%- from "cinder/map.jinja" import controller, volume, monitoring with context %}
{%- set is_controller = controller.get('enabled', False) %}
{%- set is_volume = volume.get('enabled', False) %}
@@ -10,7 +9,7 @@
{%- raw %}
CinderAPIDown:
if: >-
- max(openstack_api_check_status{service=~"cinder.+"}) by (service) == 0
+ max(openstack_api_check_status{service=~"cinder.*"}) by (service) == 0
for: 2m
labels:
severity: down
@@ -18,11 +17,66 @@
annotations:
summary: "Endpoint check for '{{ $labels.service }}' is down"
description: >-
- Endpoint check for '{{ $labels.service }}' is down for 2 minutes
+ Endpoint check for '{{ $labels.service }}' is down for the last 2 minutes
+ CinderAPIServiceInfo:
+ if: >-
+ http_response_status{service=~"cinder-api"} == 0
+ for: 2m
+ labels:
+ severity: info
+ service: "{{ $labels.service }}"
+ annotations:
+ summary: "HTTP check for '{{ $labels.service }}' down"
+ description: >-
+ The HTTP check for '{{ $labels.service }}' is down on {{ $labels.host }} for the last 2 minutes.
+ CinderServicesInfo:
+ if: >-
+ openstack_cinder_service == 1
+ for: 2m
+ labels:
+ severity: info
+ service: "{{ $labels.service }}"
+ annotations:
+ summary: "'{{ $labels.service }}' is down"
+ description: >-
+ '{{ $labels.service }}' is down on {{ $labels.hostname }} for the last 2 minutes.
+ CinderServicesWarning:
+ if: >-
+ openstack_cinder_services{service=~"cinder-volume|cinder-scheduler", state="down"} >= on (service) sum(openstack_cinder_services{service=~"cinder-volume|cinder-scheduler"}) by (service) * {%- endraw %} {{monitoring.services_failed_warning_threshold_percent}} {%- raw %}
+ for: 2m
+ labels:
+ severity: warning
+ service: "{{ $labels.service }}"
+ annotations:
+ summary: "More than {%- endraw %} {{monitoring.services_failed_warning_threshold_percent*100}}%{%- raw %} of {{ $labels.service }} services are down"
+ description: >-
+ {{ $value }} {{ $labels.service }} services are down for the last 2 minutes (More than {%- endraw %} {{monitoring.services_failed_warning_threshold_percent*100}}%{%- raw %})
+ CinderServicesCritical:
+ if: >-
+ openstack_cinder_services{service=~"cinder-volume|cinder-scheduler", state="down"} >= on (service) sum(openstack_cinder_services{service=~"cinder-volume|cinder-scheduler"}) by (service) * {%- endraw %} {{monitoring.services_failed_critical_threshold_percent}} {%- raw %}
+ for: 2m
+ labels:
+ severity: critical
+ service: "{{ $labels.service }}"
+ annotations:
+ summary: "More than {%- endraw %} {{monitoring.services_failed_critical_threshold_percent*100}}%{%- raw %} of {{ $labels.service }} services are down"
+ description: >-
+ {{ $value }} {{ $labels.service }} services are down for the last 2 minutes (More than {%- endraw %} {{monitoring.services_failed_critical_threshold_percent*100}}%{%- raw %})
+ CinderServicesDown:
+ if: >-
+ openstack_cinder_services{state="up",service=~"cinder-volume|cinder-scheduler"} == 0
+ for: 2m
+ labels:
+ severity: down
+ service: "{{ $labels.service }}"
+ annotations:
+ summary: "All {{ $labels.service }} services are down"
+ description: >-
+ All {{ $labels.service }} services are down for the last 2 minutes
{%- endraw %}
{%- endif %}
CinderErrorLogsTooHigh:
- {%- set log_threshold = prometheus_server.get('alert', {}).get('CinderErrorLogsTooHigh', {}).get('var', {}).get('threshold', 0.2 ) %}
+ {%- set log_threshold = monitoring.error_log_rate|float %}
if: >-
sum(rate(log_messages{service="cinder",level=~"error|emergency|fatal"}[5m])) without (level) > {{ log_threshold }}
{%- raw %}
diff --git a/cinder/meta/telegraf.yml b/cinder/meta/telegraf.yml
index c315be5..c626eaf 100644
--- a/cinder/meta/telegraf.yml
+++ b/cinder/meta/telegraf.yml
@@ -1,9 +1,9 @@
{%- from "cinder/map.jinja" import controller with context %}
-{%- if controller.get('enabled', False) %}
+{%- if controller.get('enabled', False) and controller.get('osapi') %}
agent:
input:
http_response:
cinder-api:
address: "http://{{ controller.osapi.host|replace('0.0.0.0', '127.0.0.1') }}:8776/"
expected_code: {% if controller.version in ('juno', 'kilo', 'liberty') %}200{% else %}300{% endif %}
-{%- endif %}
\ No newline at end of file
+{%- endif %}
diff --git a/cinder/volume.sls b/cinder/volume.sls
index 29f7ef6..383f0ba 100644
--- a/cinder/volume.sls
+++ b/cinder/volume.sls
@@ -1,4 +1,4 @@
-{%- from "cinder/map.jinja" import volume with context %}
+{%- from "cinder/map.jinja" import volume, system_cacerts_file with context %}
{%- if volume.enabled %}
{%- if not pillar.cinder.get('controller', {}).get('enabled', False) %}
@@ -22,6 +22,34 @@
{%- if not pillar.cinder.get('controller', {}).get('enabled', False) %}
+{%- if volume.message_queue.get('ssl',{}).get('enabled', False) %}
+rabbitmq_ca_cinder_volume:
+{%- if volume.message_queue.ssl.cacert is defined %}
+ file.managed:
+ - name: {{ volume.message_queue.ssl.cacert_file }}
+ - contents_pillar: cinder:volume:message_queue:ssl:cacert
+ - mode: 0444
+ - makedirs: true
+{%- else %}
+ file.exists:
+ - name: {{ volume.message_queue.ssl.get('cacert_file', system_cacerts_file) }}
+{%- endif %}
+{%- endif %}
+
+{%- if volume.database.get('ssl',{}).get('enabled', False) %}
+mysql_ca_cinder_volume:
+{%- if volume.database.ssl.cacert is defined %}
+ file.managed:
+ - name: {{ volume.database.ssl.cacert_file }}
+ - contents_pillar: cinder:volume:database:ssl:cacert
+ - mode: 0444
+ - makedirs: true
+{%- else %}
+ file.exists:
+ - name: {{ volume.database.ssl.get('cacert_file', system_cacerts_file) }}
+{%- endif %}
+{%- endif %}
+
/etc/cinder/cinder.conf:
file.managed:
- source: salt://cinder/files/{{ volume.version }}/cinder.conf.volume.{{ grains.os_family }}
@@ -50,6 +78,12 @@
- onlyif: /bin/false
{%- endif %}
- watch:
+ {%- if volume.message_queue.get('ssl',{}).get('enabled', False) %}
+ - file: rabbitmq_ca_cinder_volume
+ {%- endif %}
+ {%- if volume.database.get('ssl',{}).get('enabled', False) %}
+ - file: mysql_ca_cinder_volume
+ {%- endif %}
- file: /etc/cinder/cinder.conf
- file: /etc/cinder/api-paste.ini
@@ -65,11 +99,15 @@
- onlyif: /bin/false
{%- endif %}
- watch:
+ {%- if volume.message_queue.get('ssl',{}).get('enabled', False) %}
+ - file: rabbitmq_ca_cinder_volume
+ {%- endif %}
+ {%- if volume.database.get('ssl',{}).get('enabled', False) %}
+ - file: mysql_ca_cinder_volume
+ {%- endif %}
- file: /etc/cinder/cinder.conf
- file: /etc/cinder/api-paste.ini
-{# new way #}
-
{%- if volume.backend is defined %}
{%- for backend_name, backend in volume.get('backend', {}).iteritems() %}
@@ -174,92 +212,4 @@
{%- endif %}
-{# old way #}
-
-{%- if volume.storage is defined %}
-
-{%- if volume.storage.engine in ['iscsi', 'hp_lefthand'] %}
-
-cinder_iscsi_packages:
- pkg.installed:
- - names:
- - iscsitarget
- - open-iscsi
- - iscsitarget-dkms
- - require:
- - pkg: cinder_volume_packages
-
-/etc/default/iscsitarget:
- file.managed:
- - source: salt://cinder/files/iscsitarget
- - template: jinja
- - require:
- - pkg: cinder_iscsi_packages
-
-cinder_scsi_service:
- service.running:
- - names:
- - iscsitarget
- - open-iscsi
- - enable: true
- {%- if grains.get('noservices') %}
- - onlyif: /bin/false
- {%- endif %}
- - watch:
- - file: /etc/default/iscsitarget
-
-{%- endif %}
-
-{%- if volume.storage.engine == 'hitachi_vsp' %}
-
-{%- if grains.os_family == 'Debian' and volume.version == 'juno' %}
-
-hitachi_pkgs:
- pkg.latest:
- - names:
- - horcm
- - hbsd
-
-cinder_hitachi_vps_dir:
- file.directory:
- - name: /var/lock/hbsd
- - user: cinder
- - group: cinder
-
-{%- endif %}
-
-{%- endif %}
-
-{%- if volume.storage.engine == 'hp3par' %}
-
-hp3parclient:
- pkg.latest:
- - name: python-hp3parclient
-
-{%- endif %}
-
-{%- if volume.storage.engine == 'fujitsu' %}
-
-cinder_driver_fujitsu:
- pkg.latest:
- - name: cinder-driver-fujitsu
-
-{%- for type in volume.get('types', []) %}
-
-/etc/cinder/cinder_fujitsu_eternus_dx_{{ type.name }}.xml:
- file.managed:
- - source: salt://cinder/files/{{ volume.version }}/cinder_fujitsu_eternus_dx.xml
- - template: jinja
- - defaults:
- volume_type_name: "{{ type.pool }}"
- - require:
- - pkg: cinder-driver-fujitsu
-
-{%- endfor %}
-
-{%- endif %}
-
-{%- endif %}
-
-
{%- endif %}
diff --git a/tests/pillar/netapp.sls b/tests/pillar/netapp.sls
index 7746433..1f72880 100644
--- a/tests/pillar/netapp.sls
+++ b/tests/pillar/netapp.sls
@@ -2,6 +2,28 @@
controller:
enabled: true
version: mitaka
+ message_queue:
+ engine: rabbitmq
+ host: 127.0.0.1
+ port: 5672
+ user: openstack
+ password: pwd
+ virtual_host: '/openstack'
+ identity:
+ engine: keystone
+ host: 127.0.0.1
+ port: 35357
+ tenant: service
+ user: cinder
+ password: pwd
+ region: regionOne
+ database:
+ engine: mysql
+ host: 127.0.0.1
+ port: 3306
+ name: cinder
+ user: cinder
+ password: pwd
backend:
netapp:
engine: netapp
@@ -14,6 +36,8 @@
transport_type: https
netapp_lun_space_reservation: enabled
use_multipath_for_image_xfer: True
+ nas_secure_file_operations: false
+ nas_secure_file_permissions: false
devices:
- 172.18.2.2:/vol_1
- 172.18.2.2:/vol_2
@@ -22,6 +46,20 @@
volume:
enabled: true
version: mitaka
+ database:
+ engine: mysql
+ host: 127.0.0.1
+ port: 3306
+ name: cinder
+ user: cinder
+ password: pwd
+ message_queue:
+ engine: rabbitmq
+ host: 127.0.0.1
+ port: 5672
+ user: openstack
+ password: pwd
+ virtual_host: '/openstack'
linux:
system:
package:
diff --git a/tests/pillar/nfs.sls b/tests/pillar/nfs.sls
index c79b3c1..9cec3cb 100644
--- a/tests/pillar/nfs.sls
+++ b/tests/pillar/nfs.sls
@@ -3,6 +3,28 @@
enabled: true
version: liberty
default_volume_type: nfs-driver
+ message_queue:
+ engine: rabbitmq
+ host: 127.0.0.1
+ port: 5672
+ user: openstack
+ password: pwd
+ virtual_host: '/openstack'
+ identity:
+ engine: keystone
+ host: 127.0.0.1
+ port: 35357
+ tenant: service
+ user: cinder
+ password: pwd
+ region: regionOne
+ database:
+ engine: mysql
+ host: 127.0.0.1
+ port: 3306
+ name: cinder
+ user: cinder
+ password: pwd
backend:
nfs-driver:
engine: nfs
@@ -16,9 +38,23 @@
enabled: true
version: liberty
default_volume_type: nfs-driver
+ database:
+ engine: mysql
+ host: 127.0.0.1
+ port: 3306
+ name: cinder
+ user: cinder
+ password: pwd
+ message_queue:
+ engine: rabbitmq
+ host: 127.0.0.1
+ port: 5672
+ user: openstack
+ password: pwd
+ virtual_host: '/openstack'
backend:
nfs-driver:
enabled: true
engine: nfs
type_name: nfs-driver
- volume_group: cinder-volume
\ No newline at end of file
+ volume_group: cinder-volume
diff --git a/tests/pillar/repo_mcp_openstack_mitaka.sls b/tests/pillar/repo_mcp_openstack_mitaka.sls
new file mode 100644
index 0000000..ea24305
--- /dev/null
+++ b/tests/pillar/repo_mcp_openstack_mitaka.sls
@@ -0,0 +1,44 @@
+linux:
+ system:
+ enabled: true
+ repo:
+ mirantis_openstack_repo:
+ source: "deb http://mirror.fuel-infra.org/mcp-repos/mitaka/{{ grains.get('oscodename') }} mitaka main"
+ architectures: amd64
+ key_url: "http://mirror.fuel-infra.org/mcp-repos/mitaka/{{ grains.get('oscodename') }}/archive-mcpmitaka.key"
+ pin:
+ - pin: 'release a=mitaka'
+ priority: 1050
+ package: '*'
+ mirantis_openstack_hotfix:
+ source: "deb http://mirror.fuel-infra.org/mcp-repos/mitaka/{{ grains.get('oscodename') }} mitaka-hotfix main"
+ architectures: amd64
+ key_url: "http://mirror.fuel-infra.org/mcp-repos/mitaka/{{ grains.get('oscodename') }}/archive-mcpmitaka.key"
+ pin:
+ - pin: 'release a=mitaka-hotfix'
+ priority: 1050
+ package: '*'
+ mirantis_openstack_security:
+ source: "deb http://mirror.fuel-infra.org/mcp-repos/mitaka/{{ grains.get('oscodename') }} mitaka-security main"
+ architectures: amd64
+ key_url: "http://mirror.fuel-infra.org/mcp-repos/mitaka/{{ grains.get('oscodename') }}/archive-mcpmitaka.key"
+ pin:
+ - pin: 'release a=mitaka-security'
+ priority: 1050
+ package: '*'
+ mirantis_openstack_updates:
+ source: "deb http://mirror.fuel-infra.org/mcp-repos/mitaka/{{ grains.get('oscodename') }} mitaka-updates main"
+ architectures: amd64
+ key_url: "http://mirror.fuel-infra.org/mcp-repos/mitaka/{{ grains.get('oscodename') }}/archive-mcpmitaka.key"
+ pin:
+ - pin: 'release a=mitaka-uptades'
+ priority: 1050
+ package: '*'
+ mirantis_openstack_holdback:
+ source: "deb http://mirror.fuel-infra.org/mcp-repos/mitaka/{{ grains.get('oscodename') }} mitaka-holdback main"
+ architectures: amd64
+ key_url: "http://mirror.fuel-infra.org/mcp-repos/mitaka/{{ grains.get('oscodename') }}/archive-mcpmitaka.key"
+ pin:
+ - pin: 'release a=mitaka-holdback'
+ priority: 1050
+ package: '*'
diff --git a/tests/pillar/repo_mcp_openstack_newton.sls b/tests/pillar/repo_mcp_openstack_newton.sls
new file mode 100644
index 0000000..9504235
--- /dev/null
+++ b/tests/pillar/repo_mcp_openstack_newton.sls
@@ -0,0 +1,44 @@
+linux:
+ system:
+ enabled: true
+ repo:
+ mirantis_openstack_repo:
+ source: "deb http://mirror.fuel-infra.org/mcp-repos/newton/{{ grains.get('oscodename') }} newton main"
+ architectures: amd64
+ key_url: "http://mirror.fuel-infra.org/mcp-repos/newton/{{ grains.get('oscodename') }}/archive-mcpnewton.key"
+ pin:
+ - pin: 'release a=newton'
+ priority: 1050
+ package: '*'
+ mirantis_openstack_hotfix:
+ source: "deb http://mirror.fuel-infra.org/mcp-repos/newton/{{ grains.get('oscodename') }} newton-hotfix main"
+ architectures: amd64
+ key_url: "http://mirror.fuel-infra.org/mcp-repos/newton/{{ grains.get('oscodename') }}/archive-mcpnewton.key"
+ pin:
+ - pin: 'release a=newton-hotfix'
+ priority: 1050
+ package: '*'
+ mirantis_openstack_security:
+ source: "deb http://mirror.fuel-infra.org/mcp-repos/newton/{{ grains.get('oscodename') }} newton-security main"
+ architectures: amd64
+ key_url: "http://mirror.fuel-infra.org/mcp-repos/newton/{{ grains.get('oscodename') }}/archive-mcpnewton.key"
+ pin:
+ - pin: 'release a=newton-security'
+ priority: 1050
+ package: '*'
+ mirantis_openstack_updates:
+ source: "deb http://mirror.fuel-infra.org/mcp-repos/newton/{{ grains.get('oscodename') }} newton-updates main"
+ architectures: amd64
+ key_url: "http://mirror.fuel-infra.org/mcp-repos/newton/{{ grains.get('oscodename') }}/archive-mcpnewton.key"
+ pin:
+ - pin: 'release a=newton-uptades'
+ priority: 1050
+ package: '*'
+ mirantis_openstack_holdback:
+ source: "deb http://mirror.fuel-infra.org/mcp-repos/newton/{{ grains.get('oscodename') }} newton-holdback main"
+ architectures: amd64
+ key_url: "http://mirror.fuel-infra.org/mcp-repos/newton/{{ grains.get('oscodename') }}/archive-mcpnewton.key"
+ pin:
+ - pin: 'release a=newton-holdback'
+ priority: 1050
+ package: '*'
diff --git a/tests/pillar/repo_mcp_openstack_ocata.sls b/tests/pillar/repo_mcp_openstack_ocata.sls
new file mode 100644
index 0000000..e601208
--- /dev/null
+++ b/tests/pillar/repo_mcp_openstack_ocata.sls
@@ -0,0 +1,44 @@
+linux:
+ system:
+ enabled: true
+ repo:
+ mirantis_openstack_repo:
+ source: "deb http://mirror.fuel-infra.org/mcp-repos/ocata/{{ grains.get('oscodename') }} ocata main"
+ architectures: amd64
+ key_url: "http://mirror.fuel-infra.org/mcp-repos/ocata/{{ grains.get('oscodename') }}/archive-mcpocata.key"
+ pin:
+ - pin: 'release a=ocata'
+ priority: 1050
+ package: '*'
+ mirantis_openstack_hotfix:
+ source: "deb http://mirror.fuel-infra.org/mcp-repos/ocata/{{ grains.get('oscodename') }} ocata-hotfix main"
+ architectures: amd64
+ key_url: "http://mirror.fuel-infra.org/mcp-repos/ocata/{{ grains.get('oscodename') }}/archive-mcpocata.key"
+ pin:
+ - pin: 'release a=ocata-hotfix'
+ priority: 1050
+ package: '*'
+ mirantis_openstack_security:
+ source: "deb http://mirror.fuel-infra.org/mcp-repos/ocata/{{ grains.get('oscodename') }} ocata-security main"
+ architectures: amd64
+ key_url: "http://mirror.fuel-infra.org/mcp-repos/ocata/{{ grains.get('oscodename') }}/archive-mcpocata.key"
+ pin:
+ - pin: 'release a=ocata-security'
+ priority: 1050
+ package: '*'
+ mirantis_openstack_updates:
+ source: "deb http://mirror.fuel-infra.org/mcp-repos/ocata/{{ grains.get('oscodename') }} ocata-updates main"
+ architectures: amd64
+ key_url: "http://mirror.fuel-infra.org/mcp-repos/ocata/{{ grains.get('oscodename') }}/archive-mcpocata.key"
+ pin:
+ - pin: 'release a=ocata-uptades'
+ priority: 1050
+ package: '*'
+ mirantis_openstack_holdback:
+ source: "deb http://mirror.fuel-infra.org/mcp-repos/ocata/{{ grains.get('oscodename') }} ocata-holdback main"
+ architectures: amd64
+ key_url: "http://mirror.fuel-infra.org/mcp-repos/ocata/{{ grains.get('oscodename') }}/archive-mcpocata.key"
+ pin:
+ - pin: 'release a=ocata-holdback'
+ priority: 1050
+ package: '*'
diff --git a/tests/pillar/ssl.sls b/tests/pillar/ssl.sls
new file mode 100644
index 0000000..abb1ec6
--- /dev/null
+++ b/tests/pillar/ssl.sls
@@ -0,0 +1,19 @@
+include:
+ - .ceph_single
+cinder:
+ controller:
+ database:
+ ssl:
+ enabled: True
+ message_queue:
+ port: 5671
+ ssl:
+ enabled: True
+ volume:
+ database:
+ ssl:
+ enabled: True
+ message_queue:
+ port: 5671
+ ssl:
+ enabled: True
diff --git a/tests/pillar/volume_single_barbican.sls b/tests/pillar/volume_single_barbican.sls
new file mode 100644
index 0000000..5f28d06
--- /dev/null
+++ b/tests/pillar/volume_single_barbican.sls
@@ -0,0 +1,40 @@
+cinder:
+ volume:
+ enabled: true
+ version: ocata
+ barbican:
+ enabled: true
+ osapi:
+ host: 127.0.0.1
+ database:
+ engine: mysql
+ host: 127.0.0.1
+ port: 3306
+ name: cinder
+ user: cinder
+ password: password
+ identity:
+ engine: keystone
+ host: 127.0.0.1
+ port: 35357
+ tenant: service
+ user: cinder
+ password: password
+ endpoint_type: internalURL
+ region: regionOne
+ glance:
+ host: 127.0.0.1
+ port: 9292
+ message_queue:
+ engine: rabbitmq
+ host: 127.0.0.1
+ port: 5672
+ user: openstack
+ password: password
+ virtual_host: '/openstack'
+ storage:
+ engine: storwize
+ host: 192.168.0.1
+ port: 22
+ user: username
+ password: pass
diff --git a/tests/run_tests.sh b/tests/run_tests.sh
index 6710a50..688643f 100755
--- a/tests/run_tests.sh
+++ b/tests/run_tests.sh
@@ -126,6 +126,7 @@
run() {
for pillar in ${PILLARDIR}/*.sls; do
+ grep ${FORMULA_NAME}: ${pillar} &>/dev/null || continue
state_name=$(basename ${pillar%.sls})
salt_run --id=${state_name} state.show_sls ${FORMULA_NAME} || (log_err "Execution of ${FORMULA_NAME}.${state_name} failed"; exit 1)
done