blob: 30fdd8dcc9f523c1caf5c4d7625477668e0c86bc [file] [log] [blame]
Hanna Arhipova54fec802020-10-30 12:45:46 +02001import pytest
2
3from tcp_tests import logger
4
5LOG = logger.logger
6
7
8@pytest.fixture(scope='module')
9def add_xtra_node_to_salt(salt_actions, underlay_actions, config):
10 """
11
12 :return:
13 """
14 LOG.info("Executing pytest SETUP from add_xtra_node_to_salt fixture")
15 xtra_node = [node['node_name'] for node in config.underlay.ssh
16 if 'xtra' in node.get('node_name')][0]
17
18 cfg_node = [node['node_name'] for node in config.underlay.ssh
19 if 'salt_master' in node.get('roles')][0]
20
21 underlay_actions.check_call(
22 "salt-key -a {node} --include-all -y".format(node=xtra_node),
23 node_name=cfg_node,
24 raise_on_err=False)
25 # Need to restart salt-minion service after accepting it in Salt Master
26 underlay_actions.check_call(
27 "systemctl restart salt-minion",
28 node_name=xtra_node,
29 raise_on_err=False)
30 yield
31
32 # LOG.info("Executing pytest TEARDOWN from add_xtra_node_to_salt fixture")
33 # underlay_actions.check_call(
34 # "salt-key -r {node} --include-all -y".format(node=node),
35 # node_name=cfg_node,
36 # raise_on_err=False)
37
38
39@pytest.mark.usefixtures("add_xtra_node_to_salt")
40class TestCephOsd(object):
41
42 @pytest.fixture
43 def describe_node_in_reclass(self,
44 reclass_actions, salt_actions):
45 LOG.info("Executing pytest SETUP "
46 "from describe_node_in_reclass fixture")
47 reclass = reclass_actions
48 # ---- cluster/*/ceph/init.yml ---------------
49 path = "cluster/*/ceph/init.yml"
50 reclass.add_key("parameters._param.ceph_osd_node04_hostname",
51 "xtra",
52 path)
53 reclass.add_key("parameters._param.ceph_osd_node04_address",
54 "10.6.0.205",
55 path)
56 reclass.add_key("parameters._param.ceph_osd_system_codename",
57 "xenial",
58 path)
59 reclass.add_key("parameters.linux.network.host.xtra.address",
60 "${_param:ceph_osd_node04_address}",
61 path)
62 reclass.add_key(
63 key="parameters.linux.network.host.xtra.names",
64 value="['${_param:ceph_osd_node04_hostname}', "
65 "'${_param:ceph_osd_node04_hostname}.${_param:cluster_domain}']",
66 short_path=path)
67
68 # ------- cluster/infra/config/init.yml -----------
69 path = "cluster/*/infra/config/init.yml"
70 parameter = "parameters.reclass.storage.node.ceph_osd_node04"
71 reclass.add_key(parameter + ".name",
72 "${_param:ceph_osd_node04_hostname}",
73 path)
74 reclass.add_key(parameter + ".domain",
75 "${_param:cluster_domain}",
76 path)
77 reclass.add_key(parameter + ".classes",
78 "['cluster.${_param:cluster_name}.ceph.osd']",
79 path)
80 reclass.add_key(parameter + ".params.salt_master_host",
81 "${_param:reclass_config_master}",
82 path)
83 reclass.add_key(parameter + ".params.linux_system_codename",
84 "${_param:ceph_osd_system_codename}",
85 path)
86 reclass.add_key(parameter + ".params.single_address",
87 "${_param:ceph_osd_node04_address}",
88 path)
89 reclass.add_key(parameter + ".params.ceph_crush_parent",
90 "rack02",
91 path)
92
93 def test_add_node_process(self, describe_node_in_reclass,
94 drivetrain_actions):
95 """
96 https://docs.mirantis.com/mcp/q4-18/mcp-operations-guide/openstack-operations/ceph-operations/manage-nodes/add-osd-nodes.html
97 :param describe_node_in_reclass:
98 :param drivetrain_actions:
99 :return:
100 test took about 20 min
101 """
102 dt = drivetrain_actions
103
104 job_name = "ceph-add-osd-upmap"
105 job_parameters = {
106 'HOST': 'xtra*',
107 'HOST_TYPE': 'osd'
108 }
Hanna Arhipova508f6532021-01-27 15:52:45 +0200109 job_result, job_description = dt.start_job_on_jenkins(
Hanna Arhipova54fec802020-10-30 12:45:46 +0200110 job_name=job_name,
111 job_parameters=job_parameters,
112 verbose=True)
Hanna Arhipova508f6532021-01-27 15:52:45 +0200113 assert job_result == 'SUCCESS', job_description
Hanna Arhipova54fec802020-10-30 12:45:46 +0200114
115 def test_added_node(self):
116 # root@osd001:~# ceph osd tree in
117 # ID CLASS WEIGHT TYPE NAME STATUS REWEIGHT PRI-AFF
118 # -1 0.18585 root default
119 # -3 0.04646 host osd001
120 # 0 hdd 0.01549 osd.0 up 1.00000 1.00000
121 # 1 hdd 0.01549 osd.1 up 1.00000 1.00000
122 # 2 hdd 0.01549 osd.2 up 1.00000 1.00000
123 # -5 0.04646 host osd002
124 # 3 hdd 0.01549 osd.3 up 1.00000 1.00000
125 # 5 hdd 0.01549 osd.5 up 1.00000 1.00000
126 # 6 hdd 0.01549 osd.6 up 1.00000 1.00000
127 # -7 0.04646 host osd003
128 # 4 hdd 0.01549 osd.4 up 1.00000 1.00000
129 # 7 hdd 0.01549 osd.7 up 1.00000 1.00000
130 # 8 hdd 0.01549 osd.8 up 1.00000 1.00000
131 # -9 0.04646 host xtra
132 # 9 hdd 0.01549 osd.9 up 1.00000 1.00000
133 # 10 hdd 0.01549 osd.10 up 1.00000 1.00000
134 # 11 hdd 0.01549 osd.11 up 1.00000 1.00000
135 pass
136
137 def test_delete_node_process(self, drivetrain_actions):
138 dt = drivetrain_actions
139
140 job_name = "ceph-remove-node"
141 job_parameters = {
142 'HOST': 'xtra*',
143 'HOST_TYPE': 'osd'
144 }
Hanna Arhipova508f6532021-01-27 15:52:45 +0200145 job_result, job_description = dt.start_job_on_jenkins(
Hanna Arhipova54fec802020-10-30 12:45:46 +0200146 job_name=job_name,
147 job_parameters=job_parameters,
148 verbose=True)
Hanna Arhipova508f6532021-01-27 15:52:45 +0200149 assert job_result == 'SUCCESS', job_description
Hanna Arhipova54fec802020-10-30 12:45:46 +0200150
151
152class TestCephMon(object):
153 def test_add_node(self):
154 pass
155
156 def test_delete_node(self):
157 pass
158
159
160class TestCephMgr(object):
161 def test_add_node(self):
162 pass
163
164 def test_delete_node(self):
165 pass