blob: a763caefc5eb19503022973413812679816b2d7b [file] [log] [blame]
Ilya Bumarskovfcdd6aa2023-07-28 13:17:28 +02001resource_registry:
2 "MCP2::NetworkAcc": ../fragments/NetworkAccVM.yaml
3 "MCP2::NetworkAccStorage": ../fragments/NetworkAccVMStorage.yaml
4 "MCP2::NetworkPrvFl": ../fragments/NetworkPrvFl.yaml
5 "MCP2::NetworkIronicFlat": ../fragments/NetworkIronicFlat.yaml
6 "MCP2::NetworkTun": ../fragments/NetworkTun.yaml
7 "MCP2::SrvInstances": ../fragments/SrvInstancesVM.yaml
8 "MCP2::SrvInstancesCeph": ../fragments/SrvInstancesVMCeph.yaml
9 "MCP2::SrvInstancesCephOSD": ../fragments/SrvInstancesVMCephOSD.yaml
10 "MCP2::VSRX": ../fragments/vSRX.yaml
11
12parameters:
13 image: bionic-server-cloudimg-amd64-20190612
14 public_net_id: public
15 masters_size: 0
16 worker_size: 5
17 cmp_size: 3
18 acmp_size: 2 # cmp-gw
19 gtw_size: 0
20 lma_size: 0
21 osd_size: 0
22 ntw_size: 3
23 ucp_boot_timeout: 3600
24 cluster_public_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCp0evjOaK8c8SKYK4r2+0BN7g+8YSvQ2n8nFgOURCyvkJqOHi1qPGZmuN0CclYVdVuZiXbWw3VxRbSW3EH736VzgY1U0JmoTiSamzLHaWsXvEIW8VCi7boli539QJP0ikJiBaNAgZILyCrVPN+A6mfqtacs1KXdZ0zlMq1BPtFciR1JTCRcVs5vP2Wwz5QtY2jMIh3aiwkePjMTQPcfmh1TkOlxYu5IbQyZ3G1ahA0mNKI9a0dtF282av/F6pwB/N1R1nEZ/9VtcN2I1mf1NW/tTHEEcTzXYo1R/8K9vlqAN8QvvGLZtZduGviNVNoNWvoxaXxDt8CPv2B2NCdQFZp
25 tungstenfabric_enabled: true
26 vsrx_enabled: true
27 metallb_address_pools: '10.11.12.201-10.11.12.254'
28 private_floating_network_cidr: '10.11.12.0/24'
29 private_floating_network_ipam_pool_start: '10.11.12.3'
30 private_floating_network_ipam_pool_end: '10.11.12.70'
31 private_floating_network_gateway: '10.11.12.1'
32 control_network_ext_router_ip: '10.10.0.131'
33 tun_network_ext_router_ip: '10.15.0.131'
34 private_floating_interface: 'ens4'
35 tunnel_interface: 'ens8'
36 worker_metadata: {"labels": {"openstack-control-plane":"enabled","local-volume-provisioner": "enabled"}}
37 cmp_metadata: {"labels": {"openstack-compute-node":"enabled","tfvrouter":"enabled", "role":"ceph-osd-node"}}
38 acmp_metadata: {"labels": {"openstack-compute-node":"enabled","tfvrouter":"enabled", "role":"ceph-osd-node","openstack-gateway": "enabled"}}
39 ntw_metadata: {"labels": {"tfconfig":"enabled","tfconfigdb":"enabled","tfcontrol":"enabled","tfanalytics":"enabled","tfanalyticsdb":"enabled","tfwebui":"enabled","local-volume-provisioner": "enabled"}}
40 # hardware_metadata which is used for Ceph requires flavor with
41 # ephemeral storage because it is used for Ceph bluestore.
42 workers_flavor: 'system.compact.openstack.control.ephemeral'
43 cmps_flavor: 'mosk.s.compute.ephemeral'
44 storage_frontend_network_cidr: '10.12.1.0/24'
45 storage_backend_network_cidr: '10.12.0.0/24'
46 hardware_metadata: |
47 '00:00:00:00:00:00':
48 write_files:
49 - path: /usr/share/metadata/ceph.yaml
50 content: |
51 storageDevices:
52 - name: vdb
53 role: hdd
54 sizeGb: 20
55 ramGb: 8
56 cores: 2
57 # The roles will be assigned based on node labels.
58 # roles:
59 # - mon
60 # - mgr
61 ips:
62 - 192.168.122.101
63 crushPath: {}