I'd like to load a zshenv file (using source command) and then use the ENVs in another task.
This is what I have. I'm hoping there's a better solution
files
directory structure
.
├── ansible.cfg
├── hosts.yaml
├── profiles
│ └── macos.yaml
├── roles
│ └── base
│ ├── tasks
│ │ ├── git.yaml
│ │ └── main.yaml
│ └── vars
└── tools
└── zsh
└── .zshenv
./ansible.cfg
[defaults]
inventory = ./hosts.yaml
roles_path = ./roles/
stdout_callback = yaml
./hosts.yaml
---
all:
hosts:
localhost
./profiles/macos.yaml
---
# run MacOS configs
# - hosts: localhost
# connection: local
# tags: macos
# roles:
# - macos
# # when: ansible_distribution == "MacOSX"
- hosts: localhost
connection: local
tags: base
roles:
- base
./roles/base/main.yaml
---
- import_tasks: tasks/git.yaml
./roles/base/git.yaml
---
- name: source zshenv
shell:
cmd: source ../tools/zsh/.zshenv; echo $GIT_CONFIG_PATH
register: gitConfigPath
- name: Link gitconfig file
file:
# PWD: ./profiles
src: "{{ ansible_env.PWD }}/../tools/git/.gitconfig"
dest: "{{ gitConfigPath.stdout }}"
state: link
# - name: print ansible_env
# debug:
# msg: "{{ ansible_env }}"
#
# - name: print gitConfigPath
# debug:
# msg: "{{ gitConfigPath.stdout }}"
#
./tools/zsh/.zshenv
export XDG_CONFIG_HOME="$HOME/.config"
export GIT_CONFIG_PATH="$XDG_CONFIG_HOME/git/config"
command to run
ansible-playbook profiles/macos.yaml -v
PS: It'd be easier to do something like this in ansible
source tools/zsh/.zshenv && ansible-playbook profiles/macos.yaml -v
Given the simplified project without a role
shell> tree -a .
.
├── ansible.cfg
├── hosts
├── pb.yml
└── tools
├── git
└── zsh
└── .zshenv
shell> cat hosts
localhost
shell> cat tools/zsh/.zshenv
export GIT_CONFIG_PATH=/home/admin/git/.gitconfig
export ENV1=env1
export ENV2=env2
export ENV3=env3
eval "$(direnv hook zsh)"
Parse the environment on your own. For example
zshenv: "{{ dict(lookup('file', 'tools/zsh/.zshenv').splitlines()|
select('match', '^\\s*export .*$')|
map('regex_replace', '^\\s*export\\s+', '')|
map('split', '=')) }}"
gives
zshenv:
ENV1: env1
ENV2: env2
ENV3: env3
GIT_CONFIG_PATH: /home/admin/git/.gitconfig
Then, use the dictionary zshenv
- name: Link gitconfig file
file:
dest: "{{ playbook_dir }}/tools/git/.gitconfig"
src: "{{ zshenv.GIT_CONFIG_PATH }}"
state: link
gives, running with --check -- diff options
TASK [Link gitconfig file] *******************************************************************
--- before
+++ after
## -1,2 +1,2 ##
path: /export/scratch/tmp7/test-116/tools/git/.gitconfig
-state: absent
+state: link
changed: [localhost]
Notes
Example of a complete playbook for testing
shell> cat pb.yml
- hosts: localhost
vars:
zshenv: "{{ dict(lookup('file', 'tools/zsh/.zshenv').splitlines()|
select('match', '^\\s*export .*$')|
map('regex_replace', '^\\s*export\\s+', '')|
map('split', '=')) }}"
tasks:
- debug:
var: zshenv
- name: Link gitconfig file
file:
dest: "{{ playbook_dir }}/tools/git/.gitconfig"
src: "{{ zshenv.GIT_CONFIG_PATH }}"
state: link
gives
shell> ansible-playbook pb.yml
PLAY [localhost] *****************************************************************************
TASK [debug] *********************************************************************************
ok: [localhost] =>
zshenv:
ENV1: env1
ENV2: env2
ENV3: env3
GIT_CONFIG_PATH: /home/admin/git/.gitconfig
TASK [Link gitconfig file] *******************************************************************
changed: [localhost]
PLAY RECAP ***********************************************************************************
localhost: ok=2 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
The link tools/git/.gitconfig -> /home/admin/git/.gitconfig was created
shell> tree -a .
.
├── ansible.cfg
├── hosts
├── pb.yml
└── tools
├── git
│ └── .gitconfig -> /home/admin/git/.gitconfig
└── zsh
└── .zshenv
You can use the dictionary zshenv to set the environment. For example,
- command: echo $GIT_CONFIG_PATH
environment: "{{ zshenv }}"
register: out
- debug:
var: out.stdout
gives
out.stdout: /home/admin/git/.gitconfig
Cache the dictionary if you want to use this environment globally in the whole play. For example,
shell> grep fact_caching ansible.cfg
fact_caching = jsonfile
fact_caching_connection = /tmp/ansible_cache
fact_caching_prefix = ansible_facts_
fact_caching_timeout = 86400
- set_fact:
zshenv: "{{ dict(lookup('file', 'tools/zsh/.zshenv').splitlines()|
select('match', '^\\s*export .*$')|
map('regex_replace', '^\\s*export\\s+', '')|
map('split', '=')) }}"
cacheable: true
Then,
- hosts: localhost
environment: "{{ zshenv }}"
tasks:
- command: echo $GIT_CONFIG_PATH
register: out
- debug:
var: out.stdout
gives
PLAY [localhost] *****************************************************************************
TASK [command] *******************************************************************************
changed: [localhost]
TASK [debug] *********************************************************************************
ok: [localhost] =>
out.stdout: /home/admin/git/.gitconfig
PLAY RECAP ***********************************************************************************
localhost: ok=7 changed=2 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
With the help from Vladimir Botka, using the answer from https://stackoverflow.com/a/74924664/3053548
I modified a little bit of his code
TLDR
source the zshenv file
print out all the ENV in the shell session
store the output to as ansible facts
access the ENV in a task
Files
./profiles/macos.yaml
---
# run MacOS configs
- hosts: localhost
connection: local
tags: always
tasks:
- name: source zshenv
shell:
cmd: source ../tools/zsh/.zshenv; env
register: out
changed_when: false
- name: store zshenv as fact
set_fact:
zshenv: "{{ dict(out.stdout.splitlines() | map('split', '=')) }}"
changed_when: false
# - hosts: localhost
# connection: local
# tags: macos
# roles:
# - macos
# # when: ansible_distribution == "MacOSX"
- hosts: localhost
connection: local
tags: base
roles:
- base
./roles/base/tasks/git.yaml
---
- name: Link gitconfig file
file:
src: "{{ ansible_env.PWD }}/../tools/git/.gitconfig"
dest: "{{ zshenv.GIT_CONFIG_PATH }}"
state: link
command to run
ansible-playbook profiles/macos.yaml
Related
I have the current node role:
$ tree roles/node
roles/node
├── defaults
│ └── main.yaml
└── tasks
├── main.yaml
├── reset.yaml
└── unmount.yaml
Current provisioning.yaml playbook is using the main tasks:
- name: Node Provisioning
hosts: node
become: true
gather_facts: true
roles:
- role: node
I would like to create a separate reset.yaml playbook which uses the reset tasks:
- name: Node Reset
hosts: node
become: true
gather_facts: true
roles:
- role: node
I understand I could create a separate role or use tags, but my goal is to use the same role name and define into playbook the reset tasks name, instead of main.
Is there a proper solution allowing me to use a specific tasks_from in my playbook scenario? The example above is a simplified playbook, for proof of concept.
There are three ways to include a role in your playbook:
Use the classic roles: directive in the play;
Using the dynamic include_role task
Using the static import_role task
While the roles: directive doesn't support a tasks_from argument, the other two options do. You could write:
- name: Node Reset
hosts: node
become: true
gather_facts: true
tasks:
- import_role:
name: node
tasks_from: reset.yaml
Here's a complete test walk-through. If used the following layout:
.
├── playbook.yaml
└── roles
└── node
└── tasks
├── main.yaml
├── reset.yaml
└── umount.yaml
Where roles/node/tasks/reset.yaml contains:
- debug:
msg: "This is reset.yaml"
- name: Umount filesystem
ansible.builtin.include_tasks:
file: umount.yaml
with_items:
- /run/netns
- /var/lib/kubelet
loop_control:
loop_var: mounted_fs
And roles/node/tasks/unmount.yaml contains:
- debug:
msg: "This is umount.yaml; fs: {{ mounted_fs }}"
If I run this playbook.yml:
- hosts: localhost
gather_facts: false
tasks:
- import_role:
name: node
tasks_from: reset
I get as output:
PLAY [localhost] ***************************************************************
TASK [node : debug] ************************************************************
ok: [localhost] => {
"msg": "This is reset.yaml"
}
TASK [node : Umount filesystem] ************************************************
included: /home/lars/tmp/ansible/roles/node/tasks/umount.yaml for localhost => (item=/run/netns)
included: /home/lars/tmp/ansible/roles/node/tasks/umount.yaml for localhost => (item=/var/lib/kubelet)
TASK [node : debug] ************************************************************
ok: [localhost] => {
"msg": "This is umount.yaml; fs: /run/netns"
}
TASK [node : debug] ************************************************************
ok: [localhost] => {
"msg": "This is umount.yaml; fs: /var/lib/kubelet"
}
PLAY RECAP *********************************************************************
localhost : ok=5 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
You can find my complete test setup here.
How to change the value of src to use the first file found or use another path/file if none is found ignoring errors, for example, I want to have a file for a specific host, but default to another file if the first one is not found or skip, I can solve this by duplication the following block with a different src each:
- name: create file unique per host:
template:
src: "{{ ansible_host }}/{{ item }}.conf.j2"
dest: "/etc/wireguard/{{ item }}.conf"
loop: "{{ files }}"
- name: create file:
template:
src: "{{ item }}.conf.j2"
dest: "/etc/wireguard/{{ item }}.conf"
loop: "{{ files }}"
In the role directory, I have inside the templates a directory per host (IP), for example:
roles/
`templates/
| default.conf.j2
`10.1.2.3/
|wg.100.conf.j2
`wg.200.conf.j2
When the task runs against 10.1.2.3 I want to use the files wg.100.conf.j2 & wg.200.conf.j2 but for other hosts use default.conf.j2 or skip if nothing is defined
Any idea how to do it in a single task?
For example, I would like to call the playbook like this:
- roles:
- role: test_role
configs:
- wg.test-1
- wg.test-2
The expected result is to have the files wg.test-1 and wg.test-2 in all hosts:
/etc/wireguard/wg.test-1.conf
/etc/wireguard/wg.test-1.conf
But also include unique files per host, for example having the following structure within the role, host 10.1.2.3 will have the files wg.100 & wg.200 and host 10.1.2.4 the file wg.300
test_role
├── tasks
│ └── main.yml
└── templates
├── 10.1.2.3
│ ├── wg.100.conf.j2
│ └── wg.200.conf.j2
├── 10.1.2.4
│ └── wg.300.conf.j2
│
├── wg.test-2.conf.j2
└── wg.test-1.conf.j2
The reason I want to do all within a task is to simplify the restart of only changed services despite if the src is different and use something like:
- name: create config
template:
src: "{{ ansible_host }}/{{ item }}.conf.j2" || {{ item }}.conf.j2
dest: "/etc/wireguard/{{ item }}.conf"
loop: "{{ configs }}"
notify: restart service
register: changes
- set_fact:
restart_service: "{{ restart_service | default([]) + [item.item] }}"
when: item.changed
loop: "{{ changes.results }}"
no_log: true
This is the current working role that I have, but would like to find a better approach (prevent duplicating blocks)
---
- name: Create wg.file
template:
src: "{{ item }}.conf.j2"
dest: "/etc/wireguard/{{ item }}.conf"
loop: "{{ vpns }}"
notify: restart vpn
register: changes
- set_fact:
restart_service: "{{ restart_service | default([]) + [item.item] }}"
when: item.changed
loop: "{{ changes.results }}"
no_log: true
- name: find per host wg.files
set_fact:
per_host_wg_files: "{{ lookup('fileglob', 'templates/{{ ansible_host }}/*.j2', wantlist=True) }}"
- name: Create per host wg.files
template:
src: "{{ ansible_host }}/{{ item | basename }}"
dest: "/etc/wireguard/{{ (item | basename).split('.')[:3] | join('.') }}"
loop: "{{ per_host_wg_files }}"
notify: restart vpn
register: changes
- name: create fact with changes
set_fact:
restart_service: "{{ restart_service | default([]) + [(item.item | basename).split('.')[:2] | join('.')] }}"
when: item.changed
loop: "{{ changes.results }}"
no_log: true
Just in case this is the restart handler:
- name: restart vpn
systemd:
daemon_reload: true
state: restarted
enabled: true
name: "wg-quick#{{ item }}"
async: 10
poll: 5
loop: "{{ restart_service }}"
when: restart_service is defined
Example 1.
Given the tree for testing
shell> pwd
/scratch/tmp7/test-151
shell> tree .
.
├── ansible.cfg
├── hosts
├── pb.yml
└── roles
└── role_A
├── tasks
│ └── main.yml
└── templates
├── 10.1.2.3
│ ├── test-1.conf.j2
│ └── test-2.conf.j2
├── 10.1.2.4
│ └── test-2.conf.j2
└── default.conf.j2
6 directories, 8 files
shell> cat ansible.cfg
[defaults]
gathering = explicit
inventory = $PWD/hosts
roles_path = $PWD/roles
remote_tmp = ~/.ansible/tmp
retry_files_enabled = false
stdout_callback = yaml
shell> cat hosts
host1 ansible_host=10.1.2.3
host2 ansible_host=10.1.2.4
host3 ansible_host=10.1.2.5
shell> cat pb.yml
- hosts: all
vars:
files: [test-1, test-2, test-3]
findme: "{{ [ansible_host]|
product(files|
product(['.conf.j2'])|
map('join'))|
map('join', '/') + ['default.conf.j2'] }}"
found_file: "{{ lookup('ansible.builtin.first_found', found_params) }}"
found_params:
files: "{{ findme }}"
paths: ['templates']
skip: true
roles:
- role_A
shell> cat roles/role_A/tasks/main.yml
- debug:
var: findme
- debug:
var: found_file
- name: Do it in this single task
debug:
msg: "{{ lookup('template', found_file) }}"
shell> cat roles/role_A/templates/10.1.2.3/test-1.conf.j2
10.1.2.3 test-1
shell> cat roles/role_A/templates/10.1.2.3/test-2.conf.j2
10.1.2.3 test-2
shell cat roles/role_A/templates/10.1.2.4/test-2.conf.j2
10.1.2.4 test-2
shell> cat roles/role_A/templates/default.conf.j2
{{ ansible_host }} default
Create a list of files. Declare the variables
files: [test-1, test-2, test-3]
findme: "{{ [ansible_host]|
product(files|
product(['.conf.j2'])|
map('join'))|
map('join', '/') + ['default.conf.j2'] }}"
gives
TASK [role_A : debug] ************************************************************************
ok: [host3] =>
findme:
- 10.1.2.5/test-1.conf.j2
- 10.1.2.5/test-2.conf.j2
- 10.1.2.5/test-3.conf.j2
- default.conf.j2
ok: [host1] =>
findme:
- 10.1.2.3/test-1.conf.j2
- 10.1.2.3/test-2.conf.j2
- 10.1.2.3/test-3.conf.j2
- default.conf.j2
ok: [host2] =>
findme:
- 10.1.2.4/test-1.conf.j2
- 10.1.2.4/test-2.conf.j2
- 10.1.2.4/test-3.conf.j2
- default.conf.j2
Find the first file available.
The plugin first_found by default doesn't look for files in templates. You must add this subdirectory to the parameter paths when you put files into templates.
The lookup below always succeeds because each list of files is terminated by default.conf.j2.
skip: true means the lookup returns an empty list [] when no files are matched.
found_file: "{{ lookup('ansible.builtin.first_found', found_params) }}"
found_params:
files: "{{ findme }}"
paths: ['templates']
skip: true
gives
TASK [role_A : debug] ************************************************************************
ok: [host2] =>
found_file: /export/scratch/tmp7/test-151/roles/role_A/templates/10.1.2.4/test-2.conf.j2
ok: [host3] =>
found_file: /export/scratch/tmp7/test-151/roles/role_A/templates/default.conf.j2
ok: [host1] =>
found_file: /export/scratch/tmp7/test-151/roles/role_A/templates/10.1.2.3/test-1.conf.j2
Use the file.
- debug:
msg: "{{ lookup('template', found_file) }}"
If you want to allow an empty variable found_file test it
- debug:
msg: "{{ lookup('template', found_file) }}"
when: found_file|length > 0
gives
TASK [role_A : debug] ************************************************************************
ok: [host1] =>
msg: |-
10.1.2.3 test-1
ok: [host2] =>
msg: |-
10.1.2.4 test-2
ok: [host3] =>
msg: |-
10.1.2.5 default
Example 2.
Q: "Given the following structure within the role, the expected result is to have the files wg.test-1 and wg.test-2 in all hosts and also include unique files per host. Host 10.1.2.3 will have the files wg.100 and wg.200, and host 10.1.2.4 the file wg.300."
test_role
├── tasks
│ └── main.yml
└── templates
├── 10.1.2.3
│ ├── wg.100.conf.j2
│ └── wg.200.conf.j2
├── 10.1.2.4
│ └── wg.300.conf.j2
│
├── wg.test-2.conf.j2
└── wg.test-1.conf.j2
A: In this case the plugin first_found is not needed. Use the plugin fileglob instead.
Given the tree for testing
shell> tree .
.
├── ansible.cfg
├── hosts
├── pb.yml
└── roles
└── role_A
├── defaults
│ └── main.yml
├── tasks
│ └── main.yml
└── templates
├── 10.1.2.3
│ ├── wg.100.conf.j2
│ └── wg.200.conf.j2
├── 10.1.2.4
│ └── wg.300.conf.j2
├── wg.test-1.conf.j2
└── wg.test-2.conf.j2
shell> cat pb.yml
- hosts: all
roles:
- role_A
shell> cat roles/role_A/defaults/main.yml
files_global_pattern: templates/*.j2
files_global: "{{ q('ansible.builtin.fileglob', files_global_pattern) }}"
files_local_pattern: "templates/{{ ansible_host }}/*.j2"
files_local: "{{ q('ansible.builtin.fileglob', files_local_pattern) }}"
shell> cat roles/role_A/tasks/main.yml
- debug:
msg: |
files_global:
{{ files_global|to_nice_yaml }}
files_local:
{{ files_local|to_nice_yaml }}
- debug:
msg: |
src: {{ item }}
dest: {{ _dest }}
content: {{ lookup('template', item) }}
loop: "{{ files_global + files_local }}"
vars:
_dest: "{{ item|basename|splitext|first }}"
shell> find roles/role_A/templates -type f | xargs cat
{{ ansible_host }} wg.test-1
{{ ansible_host }} wg.test-2
10.1.2.3 wg.200
10.1.2.3 wg.100
10.1.2.4 wg.300
Declare the lists of global and local files
shell> cat roles/role_A/defaults/main.yml
files_global_pattern: templates/*.j2
files_global: "{{ q('ansible.builtin.fileglob', files_global_pattern) }}"
files_local_pattern: "templates/{{ ansible_host }}/*.j2"
files_local: "{{ q('ansible.builtin.fileglob', files_local_pattern) }}"
gives
TASK [role_A : debug] ************************************************************************
ok: [host1] =>
msg: |-
files_global:
- /export/scratch/tmp7/test-152/roles/role_A/templates/wg.test-1.conf.j2
- /export/scratch/tmp7/test-152/roles/role_A/templates/wg.test-2.conf.j2
files_local:
- /export/scratch/tmp7/test-152/roles/role_A/templates/10.1.2.3/wg.200.conf.j2
- /export/scratch/tmp7/test-152/roles/role_A/templates/10.1.2.3/wg.100.conf.j2
[WARNING]: Unable to find 'templates/10.1.2.5' in expected paths (use -vvvvv to see paths)
ok: [host3] =>
msg: |-
files_global:
- /export/scratch/tmp7/test-152/roles/role_A/templates/wg.test-1.conf.j2
- /export/scratch/tmp7/test-152/roles/role_A/templates/wg.test-2.conf.j2
files_local:
[]
ok: [host2] =>
msg: |-
files_global:
- /export/scratch/tmp7/test-152/roles/role_A/templates/wg.test-1.conf.j2
- /export/scratch/tmp7/test-152/roles/role_A/templates/wg.test-2.conf.j2
files_local:
- /export/scratch/tmp7/test-152/roles/role_A/templates/10.1.2.4/wg.300.conf.j2
Use the lists
- debug:
msg: |
src: {{ item }}
dest: {{ _dest }}
content: {{ lookup('template', item) }}
loop: "{{ files_global + files_local }}"
vars:
_dest: "{{ item|basename|splitext|first }}"
gives
TASK [role_A : debug] ************************************************************************
[WARNING]: Unable to find 'templates/10.1.2.5' in expected paths (use -vvvvv to see paths)
ok: [host1] => (item=/export/scratch/tmp7/test-152/roles/role_A/templates/wg.test-1.conf.j2) =>
msg: |-
src: /export/scratch/tmp7/test-152/roles/role_A/templates/wg.test-1.conf.j2
dest: wg.test-1.conf
content: 10.1.2.3 wg.test-1
ok: [host2] => (item=/export/scratch/tmp7/test-152/roles/role_A/templates/wg.test-1.conf.j2) =>
msg: |-
src: /export/scratch/tmp7/test-152/roles/role_A/templates/wg.test-1.conf.j2
dest: wg.test-1.conf
content: 10.1.2.4 wg.test-1
ok: [host1] => (item=/export/scratch/tmp7/test-152/roles/role_A/templates/wg.test-2.conf.j2) =>
msg: |-
src: /export/scratch/tmp7/test-152/roles/role_A/templates/wg.test-2.conf.j2
dest: wg.test-2.conf
content: 10.1.2.3 wg.test-2
ok: [host2] => (item=/export/scratch/tmp7/test-152/roles/role_A/templates/wg.test-2.conf.j2) =>
msg: |-
src: /export/scratch/tmp7/test-152/roles/role_A/templates/wg.test-2.conf.j2
dest: wg.test-2.conf
content: 10.1.2.4 wg.test-2
ok: [host3] => (item=/export/scratch/tmp7/test-152/roles/role_A/templates/wg.test-1.conf.j2) =>
msg: |-
src: /export/scratch/tmp7/test-152/roles/role_A/templates/wg.test-1.conf.j2
dest: wg.test-1.conf
content: 10.1.2.5 wg.test-1
ok: [host2] => (item=/export/scratch/tmp7/test-152/roles/role_A/templates/10.1.2.4/wg.300.conf.j2) =>
msg: |-
src: /export/scratch/tmp7/test-152/roles/role_A/templates/10.1.2.4/wg.300.conf.j2
dest: wg.300.conf
content: 10.1.2.4 wg.300
ok: [host1] => (item=/export/scratch/tmp7/test-152/roles/role_A/templates/10.1.2.3/wg.200.conf.j2) =>
msg: |-
src: /export/scratch/tmp7/test-152/roles/role_A/templates/10.1.2.3/wg.200.conf.j2
dest: wg.200.conf
content: 10.1.2.3 wg.200
ok: [host3] => (item=/export/scratch/tmp7/test-152/roles/role_A/templates/wg.test-2.conf.j2) =>
msg: |-
src: /export/scratch/tmp7/test-152/roles/role_A/templates/wg.test-2.conf.j2
dest: wg.test-2.conf
content: 10.1.2.5 wg.test-2
ok: [host1] => (item=/export/scratch/tmp7/test-152/roles/role_A/templates/10.1.2.3/wg.100.conf.j2) =>
msg: |-
src: /export/scratch/tmp7/test-152/roles/role_A/templates/10.1.2.3/wg.100.conf.j2
dest: wg.100.conf
content: 10.1.2.3 wg.100
Q: "How do you prevent the error: 'ansible_host' is undefined?"
A: Use default if you want to allow undefined variable. For example,
files_local_pattern: "templates/{{ ansible_host|d('udefined') }}/*.j2"
The result will be the warning below
[WARNING]: Unable to find 'templates/udefined' in expected paths
I have this vars file in Ansible:
for_create:
client: ["VK","SB"]
folders: ["toula","tina"]
for_delete:
client: ["VK","SB"]
folders: ["invoices","scripts"]
for_rename:
client: ["VK", "SB"]
old_name: ["home"]
new_name: ["town"]
Is it possible to do something like that in YAML? The following code in Ruby
clients = ["VK", "SB"]
folders = ["toula","tina"]
clients.each do |client|
folders.each do |folder|
puts "folder #{folder} for client #{client} created"
end
end
To sum up I want each client to create the folders ["toula", "tina"]
I have tried a lot but I can't manage to make it loop twice for each folder.
The output of the above code
folder toula for client VK created
folder tina for client VK created
folder toula for client SB created
folder tina for client SB created
Iterate the lists with_nested. For example,
- debug:
msg: "folder {{ item.1 }} for client {{ item.0 }} created"
with_nested:
- "{{ for_create.client }}"
- "{{ for_create.folders }}"
vars:
for_create:
client: [VK, SB]
folders: [toula, tina]
gives (abridged)
msg: folder toula for client VK created
msg: folder tina for client VK created
msg: folder toula for client SB created
msg: folder tina for client SB created
To rename folders zip the lists
- debug:
msg: "folder {{ item.1 }} renamed to {{ item.2 }} for client {{ item.0 }}"
with_nested:
- "{{ for_rename.client }}"
- "{{ for_rename.old_name|zip(for_rename.new_name) }}"
vars:
for_rename:
client: [VK, SB]
old_name: [home]
new_name: [town]
gives (abridged)
msg: folder home renamed to town for client VK
msg: folder home renamed to town for client SB
Q: "Is it possible to check if the folder exists inside the loop?"
A: Yes. It is. Use the parameters creates or removes. For example, given the tree
shell> tree /tmp/home/
/tmp/home/
├── SB
│ └── home
└── VK
└── home
The playbook below
shell: cat pb.yml
- hosts: localhost
tasks:
- command:
cmd: "mv {{ main_path }}/{{ item.1 }} {{ main_path }}/{{ item.2 }}"
removes: "{{ main_path }}/{{ item.1 }}"
with_nested:
- "{{ for_rename.client }}"
- "{{ for_rename.old_name|zip(for_rename.new_name) }}"
vars:
main_path: "/tmp/home/{{ item.0 }}"
for_rename:
client: [VK, SB]
old_name: [home]
new_name: [town]
moves the files only if exist
shell> ansible-playbook pb.yml
PLAY [localhost] *****************************************************************************
TASK [command] *******************************************************************************
changed: [localhost] => (item=['VK', 'home', 'town'])
changed: [localhost] => (item=['SB', 'home', 'town'])
PLAY RECAP ***********************************************************************************
localhost: ok=1 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
shell> tree /tmp/home/
/tmp/home/
├── SB
│ └── town
└── VK
└── town
2 directories, 2 files
The playbook is idempotent. The commands will not execute if the files are missing
shell> ansible-playbook pb.yml
PLAY [localhost] *****************************************************************************
TASK [command] *******************************************************************************
ok: [localhost] => (item=['VK', 'home', 'town'])
ok: [localhost] => (item=['SB', 'home', 'town'])
PLAY RECAP ***********************************************************************************
localhost: ok=1 changed=0 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
You can use the module command the same way also to create or remove files. The next option is using the module file. For example, the playbook below
shell> cat pb.yml
- hosts: localhost
tasks:
- file:
state: touch
path: "{{ main_path }}/{{ item.1 }}"
with_nested:
- "{{ for_create.client }}"
- "{{ for_create.folders }}"
vars:
main_path: "/tmp/home/{{ item.0 }}"
for_create:
client: [VK, SB]
folders: [toula, tina]
creates the files
shell> ansible-playbook pb.yml
PLAY [localhost] *****************************************************************************
TASK [file] **********************************************************************************
changed: [localhost] => (item=['VK', 'toula'])
changed: [localhost] => (item=['VK', 'tina'])
changed: [localhost] => (item=['SB', 'toula'])
changed: [localhost] => (item=['SB', 'tina'])
PLAY RECAP ***********************************************************************************
localhost: ok=1 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
shell> tree /tmp/home/
/tmp/home/
├── SB
│ ├── tina
│ ├── toula
│ └── town
└── VK
├── tina
├── toula
└── town
This task is not idempotent because the files will receive updated file access and modification times (similar to the way touch works from the command line). Preserve access_time and modification_time to make the task idempotent
- file:
state: touch
path: "{{ main_path }}/{{ item.1 }}"
access_time: preserve
modification_time: preserve
...
I'm having a very difficult time understanding how to organize large playbooks with many roles, using inventory with multiple "environments" and using sub-plays to try and organize things. All the while having common variables at the parent playbook, sharing those with sub-plays. I use ansible but in a very limited way so I'm trying to expand my knowledge of it by doing this exercise.
Directory structure (simplified for testing)
├── inventory
│ ├── dev
│ │ ├── group_vars
│ │ │ └── all.yml
│ │ └── hosts
│ └── prod
│ ├── group_vars
│ │ └── all.yml
│ └── hosts
├── playbooks
│ └── infra
│ └── site.yml
├── site.yml
└── vars
└── secrets.yml
Various secrets are in the secrets.yml file, including the ansible_ssh_user and ansible_become_pass.
Contents of all.yml
---
ansible_ssh_user: "{{ vault_ansible_ssh_user }}"
ansible_become_pass: "{{ vault_ansible_become_pass }}"
Contents of site.yml
---
- name: test plays
hosts: all
vars_files:
- vars/secrets.yml
become: true
gather_facts: true
pre_tasks:
- include_vars: secrets.yml
tasks:
- debug:
var: ansible_ssh_user
- import_playbook: playbooks/infra/site.yml
Content of playbooks/infra/site.yml
---
- name: test sub-play
hosts: all
become: true
gather_facts: true
tasks:
- debug:
var: ansible_ssh_user
The main parent playbook is being called with ansible-playbook -i inventory/dev site.yml. The problem is I can't access vault_ansible_ssh_user or vault_ansible_become_pass (or any secrets in vault) from within the sub-play if I don't include both var_files AND pre_tasks: - include_vars
If I remove var_files, I can't access the secrets in the parent playbook. If I remove pre_tasks: - include_vars, I can't access any secrets in the imported sub-play. Any idea why I need both of these variable include statements for this to work? Also, is this just a terrible design and I'm doing things completely wrong? I'm having a hard time wrapping my head around the best way to organize huge playbooks with a lot of required variables so I ended up with a directory structure like this to try and compartmentalize the variables to avoid very large variables files and the need to duplicate variable files all over the place. This probably just boils down to me wanting to fit a round peg in a square hole but I can't find a great best practices example for something like this.
This issue might also have to do with me trying to put ansible vault variables in an inventory var file maybe. If so, is that something I should or shouldn't be doing? As I was writing this, I may have had a "light bulb" moment and finally understand how I should handle this but I need to test some things to understand it fully but regardless, I'm still very interested in what the stackoverflow community has to say about how I'm currently doing this.
EDIT: turns out my "light bulb" idea is just the same as I have here just moved around in a different way, with the same issues
Q: "If I remove ... include_vars, I can't access any secrets in the imported sub-play."
A: To share variables among the plays use include_vars or set_fact. Quoting from Variable scope: how long is a value available?
Variable values associated directly with a host or group, including variables defined in inventory, by vars plugins, or using modules like set_fact and include_vars, are available to all plays. These ‘host scope’ variables are also available via the hostvars[] dictionary.
Given the files below
shell> cat inventory/prod/hosts
test_01
test_02
shell> cat inventory/prod/group_vars/all.yml
ansible_ssh_user: "{{ vault_ansible_ssh_user }}"
ansible_become_pass: "{{ vault_ansible_become_pass }}"
shell> cat vars/secrets.yml
vault_ansible_ssh_user: ansible-ssh-user
vault_ansible_become_pass: ansible-become-pass
shell> cat site.yml
- name: test plays
hosts: all
gather_facts: false
vars_files: vars/secrets.yml
tasks:
- debug:
var: ansible_ssh_user
- debug:
var: ansible_become_pass
- import_playbook: playbooks/infra/site.yml
shell> cat playbooks/infra/site.yml
- name: test sub-plays
hosts: all
gather_facts: false
tasks:
- debug:
var: ansible_ssh_user
The variables declared by vars_files are not shared among the plays and the second play will fail. The abridged result is
shell> ANSIBLE_INVENTORY=$PWD/inventory/prod/hosts ansible-playbook site.yml
PLAY [test plays] ****
TASK [debug] ****
ok: [test_01] => {
"ansible_ssh_user": "ansible-ssh-user"
}
ok: [test_02] => {
"ansible_ssh_user": "ansible-ssh-user"
}
TASK [debug] ****
ok: [test_01] => {
"ansible_become_pass": "ansible-become-pass"
}
ok: [test_02] => {
"ansible_become_pass": "ansible-become-pass"
}
PLAY [test sub-plays] ****
TASK [debug] ****
fatal: [test_01]: FAILED! => {"msg": "The field 'become_pass' has an invalid value, which includes an undefined variable. The error was: 'vault_ansible_become_pass' is undefined"}
fatal: [test_02]: FAILED! => {"msg": "The field 'become_pass' has an invalid value, which includes an undefined variable. The error was: 'vault_ansible_become_pass' is undefined"}
The problem will disappear if you use include_vars or set_fact, i.e. "instantiate" the variables. Commenting set_fact and uncommenting include_vars, or uncommenting both, will give the same result
- name: test plays
hosts: all
gather_facts: false
vars_files: vars/secrets.yml
tasks:
- debug:
var: ansible_ssh_user
- debug:
var: ansible_become_pass
# - include_vars: secrets.yml
- set_fact:
ansible_ssh_user: "{{ ansible_ssh_user }}"
ansible_become_pass: "{{ ansible_become_pass }}"
- import_playbook: playbooks/infra/site.yml
Then the abridged result is
shell> ANSIBLE_INVENTORY=$PWD/inventory/prod/hosts ansible-playbook site.yml
PLAY [test plays] ****
TASK [debug] ****
ok: [test_01] => {
"ansible_ssh_user": "ansible-ssh-user"
}
ok: [test_02] => {
"ansible_ssh_user": "ansible-ssh-user"
}
TASK [debug] ****
ok: [test_01] => {
"ansible_become_pass": "ansible-become-pass"
}
ok: [test_02] => {
"ansible_become_pass": "ansible-become-pass"
}
TASK [set_fact] ****
ok: [test_01]
ok: [test_02]
PLAY [test sub-plays] ****
TASK [debug] ****
ok: [test_02] => {
"ansible_ssh_user": "ansible-ssh-user"
}
ok: [test_01] => {
"ansible_ssh_user": "ansible-ssh-user"
}
Notes
In this example, it's not important whether the variables are encrypted or not.
become and gather_facts don't influence this problem.
There might be other issues. It's a good idea to review include and import issues.
Q: "Why is the vars_files needed?"
A: The variable ansible_become_pass is needed to escalate the user's privilege when a task is sent to the remote host. As a result, when the variable vault_ansible_become_pass is declared in the task include_vars only, this variable won't be available before the tasks are executed, and the play will fail with the error
fatal: [test_01]: FAILED! => {"msg": "The field 'become_pass' has an invalid value, which includes an undefined variable. The error was: 'vault_ansible_become_pass' is undefined"}
See
Understanding variable precedence
Understanding privilege escalation: become
No vars_files is needed if there are user-defined variables only. For example, the playbook below works as expected
shell> cat inventory/prod/group_vars/all.yml
var1: "{{ vault_var1 }}"
var2: "{{ vault_var2 }}"
shell> cat vars/secrets2.yml
vault_var1: test-var1
vault_var2: test-var2
shell> cat site2.yml
- name: test plays
hosts: all
gather_facts: false
tasks:
- include_vars: secrets2.yml
- debug:
var: var1
- debug:
var: var2
- import_playbook: playbooks/infra/site2.yml
shell> cat playbooks/infra/site2.yml
- name: test sub-plays
hosts: all
gather_facts: false
tasks:
- debug:
var: var1
- debug:
var: var2
Use case
List all files in a directory with the format - a1.{{ env }}.js, a2.{{ env }}.js
Find corresponding files in the destination directory with the format - a1.js, a2.js
Copy a1.{{ env }}.js in the directory where a1.js exists, Copy a2.{{ env }}.js in the directory where a2.js exists
Sample code: This code does a direct find and replace
- name: Find files in archive
find:
paths: "archive/"
file_type: file
recurse: yes
register: tmp_file_path
- name: Find files in code matching names in archive
find:
paths: "code/"
file_type: file
recurse: yes
patterns: "{{ tmp_file_path.files | map(attribute='path') | map('basename') | list }}"
register: code_file_path
- set_fact:
code_files: "{{ code_files|default([]) +
[{'path': item, 'name': item|basename}] }}"
loop: "{{ code_file_path.files|map(attribute='path')|list }}"
- name: Copy files from archive to code directory
command: cp "{{ item.0 }}" "{{ item.1.path }}"
when:
- item.0|basename == item.1.path|basename
with_together:
- "{{ tmp_file_path.files|map(attribute='path')|list|sort }}"
- "{{ code_files|sort(attribute='name') }}"
Below listed is the directory structure
├── archive
│ ├── a1.test.js
│ ├── a2.test.js
│ ├── a3.test.js
│ └── a4.test.js
└── code
├── a1.js
├── dir1
│ └── a2.js
└── dir2
├── a4.js
└── dir3
└── a3.js
Copy archive/a1.test.js to code/
Copy archive/a2.test.js to code/dir1/
Copy archive/a3.test.js to code/dir1/dir2/dir3/
Copy archive/a4.test.js to code/dir1/dir2/
Is there a solution to do a direct copy as per the above use case?
Some explanation on the approach:
The entire idea of it is based on the creation of a dictionary instructing the playbook on which file should be added where.
The dictionary, for your use case would look like this:
{
"a1.js": {
"archive": "archive/a1.test.js",
"paths": [
"code"
]
},
"a2.js": {
"archive": "archive/a2.test.js",
"paths": [
"code/dir1"
]
},
"a3.js": {
"archive": "archive/a3.test.js",
"paths": [
"code/dir1/dir2/dir3"
]
},
"a4.js": {
"archive": "archive/a4.test.js",
"paths": [
"code/dir1/dir2"
]
}
}
Where the keys are the files we are searching for under the code folder, the key archive represent the file we aim to copy from the archive folder and paths is an array where the said file should find its destination(s).
Most of the logic is done by the Ansible filter regex_replace, that extract the name of the file to find in the code folder via a quite simple expression: (.*)\..*\.js$
item.path | basename | regex_replace('(.*)\\..*\\.js$', '\\1.js')
Another filter used here that might be interesting to explore is the combine filter, with the parameter recursive=true, that allows to create the paths where the files should find their destination(s).
There is also a Python operation used here: dict.keys(), in order to create the comma separated list of files to search in the code folder out of the keys of the dictionary above.
It is also making use of loop with the subelement filter, to traverse both the dictionary and its sub array paths at the same time.
And to be complete, here are the other, more commonly used filters used in this playbook:
default: to specify a default value when a variable is not defined
basename: to get only the name of a file out of its full path
dirname: to get only the directory from a full path to a file
join: to concatenate the elements of an array, here, based on a separator
Yes, it might be over-engineered compared to your use case, the playbook here-under is able to cope with the fact that a1.js could be in two different folders and will be able to copy a1.test.js in both those folders.
So, here is a solution:
- hosts: localhost
gather_facts: no
tasks:
- find:
paths: archive
file_type: file
recurse: yes
register: archives
- set_fact:
searches: "{{ searches | default({}) | combine({ key: value }) }}"
vars:
key: "{{ item.path | basename | regex_replace('(.*)\\..*\\.js$', '\\1.js') }}"
value: "{{ { 'archive': item.path, 'paths': [] } }}"
loop: "{{ archives.files }}"
loop_control:
label: "{{ item.path }}"
- find:
path: code
file_type: file
recurse: yes
pattern: "{{ searches.keys() | join(',') }}"
register: paths
- set_fact:
searches: "{{ searches | combine({key: value}, recursive=true) }}"
vars:
key: "{{ item.path | basename }}"
value: "{{ { 'paths': [item.path | dirname] + searches[item.path | basename].paths } }}"
loop: "{{ paths.files }}"
loop_control:
label: "{{ item.path }}"
- copy:
src: "{{ item.0.archive }}"
dest: "{{ item.1 ~ '/' ~ item.0.archive | basename }}"
loop: "{{ searches | subelements('paths') }}"
loop_control:
label: "{{ item.0.archive }}"
Situation before:
tree archive code
archive
├── a1.test.js
├── a2.test.js
├── a3.test.js
└── a4.test.js
code
├── a1.js
└── dir1
├── a2.js
└── dir2
├── a4.js
└── dir3
└── a3.js
3 directories, 8 files
Recap of the playbook:
PLAY [localhost] **************************************************************************************************
TASK [find] *******************************************************************************************************
ok: [localhost]
TASK [set_fact] ***************************************************************************************************
ok: [localhost] => (item=archive/a3.test.js)
ok: [localhost] => (item=archive/a2.test.js)
ok: [localhost] => (item=archive/a1.test.js)
ok: [localhost] => (item=archive/a4.test.js)
TASK [find] *******************************************************************************************************
ok: [localhost]
TASK [set_fact] ***************************************************************************************************
ok: [localhost] => (item=code/a1.js)
ok: [localhost] => (item=code/dir1/a2.js)
ok: [localhost] => (item=code/dir1/dir2/a4.js)
ok: [localhost] => (item=code/dir1/dir2/dir3/a3.js)
TASK [copy] *******************************************************************************************************
changed: [localhost] => (item=archive/a3.test.js)
changed: [localhost] => (item=archive/a2.test.js)
changed: [localhost] => (item=archive/a1.test.js)
changed: [localhost] => (item=archive/a4.test.js)
PLAY RECAP ********************************************************************************************************
localhost : ok=5 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
Situation after:
tree code
code
├── a1.js
├── a1.test.js
└── dir1
├── a2.js
├── a2.test.js
└── dir2
├── a4.js
├── a4.test.js
└── dir3
├── a3.js
└── a3.test.js
3 directories, 8 files