updated templates and scripts for Airship 1.3
[yaml_builds.git] / site_type / sriov-a13 / templates / software / charts / osh / openstack-tenant-ceph / ceph-osd.j2
1 ---
2 # The purpose of this file is to define environment-specific parameters for
3 # ceph-osd
4 schema: armada/Chart/v1
5 metadata:
6   schema: metadata/Document/v1
7   name: tenant-ceph-osd
8   layeringDefinition:
9     abstract: false
10     layer: site
11     parentSelector:
12       name: tenant-ceph-osd-global
13     actions:
14       - method: replace
15         path: .values.conf.storage.osd
16       - method: merge
17         path: .
18   storagePolicy: cleartext
19 data:
20   values:
21     labels:
22       osd:
23         node_selector_key: tenant-ceph-osd
24         node_selector_value: enabled
25     conf:
26       storage:
27         failure_domain: "rack"
28         # NEWSITE-CHANGEME: The OSD count and configuration here should not need
29         # to change if your HW matches the HW used in this environment.
30         # Otherwise you may need to add or subtract disks to this list.
31         # no need to create below jounal partitons as ceph charts will create them
32         # default size of  journal partions is 10GB
33         osd:
34 {% for osd in yaml.tenant_storage.osds %}
35           - data:
36               type: block-logical
37               location: {{osd.data}}
38             journal:
39               type: block-logical
40               location: {{osd.journal}}
41 {% endfor %}
42       overrides:
43         ceph_osd:
44           hosts:
45             - name: {{yaml.genesis.name}}
46               conf:
47                 storage:
48                   failure_domain_name: "{{yaml.genesis.name}}_rack"
49 {% for server in yaml.masters %}
50             - name: {{server.name}}
51               conf:
52                 storage:
53                   failure_domain_name: "{{server.name}}_rack"
54 {% endfor %}
55 {% if 'workers' in yaml %}{% for server in yaml.workers %}
56             - name: {{server.name}}
57               conf:
58                 storage:
59                   failure_domain_name: "{{server.name}}_rack"
60 {% endfor %}{% endif %}
61 ...