blob: b7ab9221a1f2aa3a345a98f46159376bea88ee6b [file] [log] [blame]
Oleh Hryhorov43bbc8c2020-12-03 15:30:09 +02001resource_registry:
2 "MCP2::NetworkAcc": ../fragments/NetworkAccVM.yaml
3 "MCP2::NetworkAccStorage": ../fragments/NetworkAccVMStorage.yaml
4 "MCP2::NetworkPrvFl": ../fragments/NetworkPrvFl.yaml
5 "MCP2::NetworkIronicFlat": ../fragments/NetworkIronicFlat.yaml
6 "MCP2::SrvInstances": ../fragments/SrvInstancesVM.yaml
7 "MCP2::SrvInstancesCeph": ../fragments/SrvInstancesVMCeph.yaml
8 "MCP2::SrvInstancesCephOSD": ../fragments/SrvInstancesVMCephOSD.yaml
9 "MCP2::NetworkTun": ../fragments/NetworkTun.yaml
10
11parameters:
12 image: bionic-server-cloudimg-amd64-20190612
13 public_net_id: public
14 masters_size: 0
15 worker_size: 0
16 cmp_size: 0
17 gtw_size: 0
18 lma_size: 0
19 osd_size: 0
20 ucp_boot_timeout: 3600
21 cluster_public_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCp0evjOaK8c8SKYK4r2+0BN7g+8YSvQ2n8nFgOURCyvkJqOHi1qPGZmuN0CclYVdVuZiXbWw3VxRbSW3EH736VzgY1U0JmoTiSamzLHaWsXvEIW8VCi7boli539QJP0ikJiBaNAgZILyCrVPN+A6mfqtacs1KXdZ0zlMq1BPtFciR1JTCRcVs5vP2Wwz5QtY2jMIh3aiwkePjMTQPcfmh1TkOlxYu5IbQyZ3G1ahA0mNKI9a0dtF282av/F6pwB/N1R1nEZ/9VtcN2I1mf1NW/tTHEEcTzXYo1R/8K9vlqAN8QvvGLZtZduGviNVNoNWvoxaXxDt8CPv2B2NCdQFZp
22 private_floating_network_cidr: '10.11.12.0/24'
23 private_floating_interface: ''
24 tunnel_interface: 'ens3'
Oleh Hryhorovcd1a3bd2020-12-04 13:21:06 +020025 ucp_metadata: {"labels": {"openstack-control-plane":"enabled","openstack-compute-node":"enabled","openvswitch":"enabled", "openstack-gateway":"enabled","role":"ceph-osd-node","local-volume-provisioner": "enabled", "openstack-compute-node":"enabled","openvswitch":"enabled", "role":"ceph-osd-node"}}
Oleh Hryhorov43bbc8c2020-12-03 15:30:09 +020026 #compact.cid: RAM 32768 | Disk 100 | VCPU 8
Oleh Hryhorovcd1a3bd2020-12-04 13:21:06 +020027 ucp_flavor: 'mosk.aio.ephemeral'
28 #ucp_flavor: 'compact.cid'
Oleh Hryhorov43bbc8c2020-12-03 15:30:09 +020029 # hardware_metadata which is used for Ceph requires flavor with
30 # ephemeral storage because it is used for Ceph bluestore.
31 workers_flavor: 'system.compact.openstack.control.ephemeral'
32 cmps_flavor: 'system.compact.openstack.control.ephemeral'
33 storage_frontend_network_cidr: '10.10.0.0/24'
34 storage_backend_network_cidr: '10.10.0.0/24'
35 single_node: 'true'
36 hardware_metadata: |
37 '00:00:00:00:00:00':
38 write_files:
39 - path: /usr/share/metadata/ceph.yaml
40 content: |
41 storageDevices:
42 - name: vdb
43 role: hdd
Oleh Hryhorovcd1a3bd2020-12-04 13:21:06 +020044 sizeGb: 2
Oleh Hryhorov43bbc8c2020-12-03 15:30:09 +020045 ramGb: 8
46 cores: 2
47 # The roles will be assigned based on node labels.
48 # roles:
49 # - mon
50 # - mgr
51 ips:
52 - 192.168.122.101
53 crushPath: {}