Compare commits

...

392 commits

Author SHA1 Message Date
Matt Clay
611d34acf3 Pin httptester SHA. 2018-03-20 12:16:56 -07:00
Matt Clay
e72b41d3eb Disable failing subversion test on osx. 2018-03-20 09:09:58 -07:00
Matt Clay
ebae05adf5 Fix pip integration test.
(cherry picked from commit ea70b49b11)
2018-01-31 23:36:40 -08:00
Matt Clay
b08dc0c056 Remove EOL openSUSE Leap 42.2 from CI. 2018-01-31 21:54:35 -08:00
Matt Clay
90623b1ffb Update FreeBSD version used in CI. 2017-12-05 21:20:45 -08:00
Matt Clay
0b16e9257a
Pin Shippable build image to v5.4.1. (#32993)
* Pin Shippable build image to v5.4.1.
* Remove `pre_ci` to eliminate extra git sync.
* Upgrade pip in venv for pip test.
* Limit paramiko and cryptography versions.
* Update opensuse versions used in CI.
* Fix zypper* integration tests.
2017-11-20 17:47:53 -08:00
Matt Clay
df14dceb1f Use the new Parallels servers for OS X in CI. 2017-10-13 16:20:37 -07:00
Matt Clay
6c82b90587 Upgrade pip to support cryptography 2.1 and later. 2017-10-13 16:07:14 -07:00
Matt Clay
5d0ccb68af Use python-crypto instead of python2-cryptography.
(cherry picked from commit c869423e0c)
2017-08-03 13:45:06 -07:00
Matt Clay
672a8b3ec9 Update RPM spec and make targets. (#27712)
(cherry picked from commit b54d00f2de)
2017-08-03 13:44:59 -07:00
James Cammarata
4d585c9035 Don't include dependent roles in the dep chain for include_role
The dependency chain should not include roles below the parent, as it
can introduce very weird things like conditionals from child deps impacting
non-related roles.

Fixes #25136

(cherry picked from commit 495a809f46)
2017-07-17 23:57:32 -04:00
Brian Coca
d189b668cb updated chlog 2017-06-26 16:01:57 -04:00
Eugen C
55339cd0b4 Fix ansible ad-hoc to respect ANSIBLE_STDOUT_CALLBACK (#26098)
* Fix ansible ad-hoc to respect ANSIBLE_STDOUT_CALLBACK

* Ansible ad-hoc 'stdout_callback' should work only with 'bin_ansible_callbacks'

(cherry picked from commit 6d59160744)
2017-06-26 16:01:57 -04:00
Matt Clay
df551246a3 Use stronger password for MySQL (#25821)
Previously we were getting "Your password does not satisfy the current policy requirements"
Possibly caused by a software update on Fedora

(cherry picked from commit 7ee7fa7)
2017-06-22 12:11:27 -07:00
Matt Clay
6e3f5404f9 MySQL Password updates (#25825)
* MySQL Password updates

* Disable user_password_update_test

(cherry picked from commit 448efdb)
2017-06-22 11:26:50 -07:00
Brian Coca
6e474bb995 updated chnlog with backport 2017-06-22 13:48:51 -04:00
Matt Clay
9adfb2d6f5 Fix git tests. 2017-06-15 16:17:18 -07:00
Brian Coca
0cd22c730c updated with view patch 2017-06-15 11:33:28 -04:00
Brian Coca
9e6fc2b182 pager should not log
fixes #25719

(cherry picked from commit df2fcecd62)
2017-06-15 11:32:52 -04:00
James Cammarata
f5be18f409 New release v2.2.3.0-1 2017-05-09 09:50:19 -05:00
James Cammarata
da260cb013 Updating CHANGELOG for 2.2.3 (and 2.2.2) 2017-05-09 09:44:21 -05:00
James Cammarata
f0e348f5ee Fixing security issue with lookup returns not tainting the jinja2 environment
CVE-2017-7481

Lookup returns wrap the result in unsafe, however when used through the
standard templar engine, this does not result in the jinja2 environment being
marked as unsafe as a whole. This means the lookup result looses the unsafe
protection and may become simple unicode strings, which can result in bad
things being re-templated.

This also adds a global lookup param and cfg options for lookups to allow
unsafe returns, so users can force the previous (insecure) behavior.

(cherry picked from commit 72dfb1570d22ac519350a8c09e76c458789120ed)
2017-05-08 15:57:20 -05:00
Trishna Guha
f779227859 fix timeout assignment eos (#24235)
Signed-off-by: Trishna Guha <trishnaguha17@gmail.com>
2017-05-03 19:04:47 +05:30
Nick Piper
341ffef08e Minor typo correction varibles -> variables
No impact as variable wasn't used.
(cherry picked from commit 403c142750)
2017-05-01 08:45:11 -04:00
Matt Clay
d600266d52 Use Shippable image: drydock/u16pytall:master 2017-04-26 16:46:53 +08:00
abirami-n
0feb231a9c Fixes#18663-Bad Handling of existing config in OS6 module (#23701)
* module_utils_fix_dellos6

* module_utils_fix
2017-04-25 13:15:44 +01:00
Brian Coca
8899f5e068 fix hashing when path is symlink
(cherry picked from commit 631a10745d)
2017-04-20 10:36:12 -04:00
Matt Martz
ad6ccf8ea9 When become_method is su, self._play_context.prompt is a function. Fixes #23689
(cherry picked from commit f82d95ae28)
2017-04-18 13:02:27 -04:00
Brian Coca
7b6557a23b tolerate 'batch' systems that mess with stdin (#23596)
* tolerate 'batch' systems taht mess with stdin

fixes #23541

* have pause on windows tolerate devnull

* tuplie

(cherry picked from commit 586fcae398)
2017-04-18 11:41:46 -04:00
Brian Coca
7cc351a237 removed debug print
(cherry picked from commit 313591f8b8)
2017-04-13 15:13:06 -04:00
Brian Coca
34511d1fc9 fix environment validation, make setup exception
removed bare vars
now environment inheritance correclty prepends
this allows more local to override more general

fixes #23180

(cherry picked from commit df5895e585)
2017-04-12 18:36:36 -04:00
Brian Coca
3f8c56a7e3 removed failing and unused import 2017-04-12 12:49:24 -04:00
Brian Coca
b12b5c376d correctly get all hosts in 'all'
(cherry picked from commit 0d5d5f2bf6)
2017-04-12 12:49:24 -04:00
Pierre-Alexandre
792832b53e BUGFIX : using yaml hosts inventory, hosts in groups weren't added to the group 'all'
(cherry picked from commit 283a88444f)
2017-04-12 12:49:24 -04:00
Sorin Sbarnea
6ee3a42673 py3 compatibility fix reported on #17038 (#19569)
Signed-off-by: Sorin Sbarnea <ssbarnea@redhat.com>
(cherry picked from commit 4ebd763de0)
2017-04-11 09:26:17 -07:00
James Cammarata
7ff9fa52cf Revert "Fixing another corner case for security related to CVE-2016-9587"
This reverts commit eb8c26c105.
2017-04-06 10:49:33 -05:00
Nathaniel Case
4a852ec287 Update submodule ref 2017-04-05 12:55:52 -04:00
James Cammarata
df3e71a83f New release v2.2.3.0-0.1.rc1 2017-04-03 11:53:09 -05:00
Brian Coca
42afc7dc45 fix 'ungrouped' issue with some inventory formats (#23018)
fixes #23016
(cherry picked from commit c4cff44e77)
2017-03-29 16:47:56 -04:00
Toshio Kuratomi
04f65a4d29 Split on newlines when searching for become prompt
The fix for leading junk in sudo output: fee6e29 causes problems with
ssh + sudo.  On the initial connection using ControlPersist, the output
that we scan for the prompt contains both the command we're sending to
configure the prompt and the prompt itself.  The code in fee6e29 ends up
sending the password when it sees the line configuring the prompt which
is too early.

Switch to a version that splits on lines and then checks whether the
first or last line starts with the prompt to decide if it's time to send
the password.

Fixes #23054
References #20858

(cherry picked from commit 6f77498700)
2017-03-29 12:12:10 -07:00
Pavel Glushchak
9b71a6a0b0 Added Virtuozzo distribution support
Virtuozzo Linux is based on CentOS sources. Thus OS family
should be recognized as 'RedHat'.

Signed-off-by: Pavel Glushchak <pglushchak@virtuozzo.com>
(cherry picked from commit 097173c6f5)
2017-03-29 08:50:29 -07:00
Victor Perron
972660968a paramiko_ssh: fix crash upon pass prompt in py3
The pass prompt expects an answer and compares a `str` to a binary buffer, thus crashing.

It's an obvious fix to help transitioning towards Python3 and hopes it does not need a specific test.
(cherry picked from commit bc44175d8d)
2017-03-29 07:48:09 -07:00
Toshio Kuratomi
2273800f7c Update submodule ref 2017-03-28 07:52:25 -07:00
James Cammarata
31300bd36b New release v2.2.2.0-1 2017-03-27 14:04:34 -05:00
Toshio Kuratomi
147c29f0e9 Update submodule refs 2017-03-22 20:49:07 -07:00
Brian Coca
de331a9488 clarify facts assignment for several corner cases
run_once/delegate_facts:
 now delegate_facts > run_once, previously run_once always published facts to all hosts in play

include_vars/delegate_to:
  now include_vars allows to delegate to a specific host

also fix task_vars exception in delegate_facts/loop as var was removed

fixes #15365

(cherry picked from commit 519f5db7ccb2f0114485521d432fb008c5b2dfce)
2017-03-15 16:02:36 -04:00
Brian Coca
36b462ba85 deal with other 'ungrouped' corner cases
(cherry picked from commit 2d9bf88897)
2017-03-09 20:12:01 -05:00
Brian Coca
b33c41dc5f readd all.add_host as for loop uses diff data
(cherry picked from commit 78e116077a)
2017-03-08 15:57:01 -05:00
Toshio Kuratomi
8b8f0597ed update submodule 2017-03-08 12:34:56 -08:00
Brian Coca
3ba057d28d backport inventory fix, ensure all/ungrouped 2017-03-08 14:58:10 -05:00
Brian Coca
ac4ce95810 backport of file cache perms fix
see #13093
2017-03-08 10:28:23 -05:00
Brian Coca
dae298e429 catch bad extra vars data earlier
Bad extra early (#22322)
(cherry picked from commit c71b15a696)
2017-03-07 13:46:30 -05:00
Matt Davis
c40d9e63bb refresh azure_rm.py from devel 2017-03-06 15:52:21 -08:00
Matt Davis
75c04b6773 refresh azure_rm.py from devel 2017-03-06 15:52:16 -08:00
James Cammarata
6ca528aea4 New release v2.2.2.0-0.2.rc2 2017-03-03 16:34:42 -06:00
Matt Davis
1a6e27a6ac fix azure_rm version checks (#22270)
* Use packaging.version.Version instead of LooseVersion for better support of prerelease tags (eg, 0.30.0 > 0.30.0rc6)
* Add explicit check/error for msrestazure package
(cherry picked from commit d12c93ef2b)
2017-03-03 13:29:06 -08:00
Toshio Kuratomi
d45f2d3288 Fix for traceback when we encounter non-utf8 characters in diff
We can diff non-utf8 files (as part of copy, for instance) but when we
try to turn the bytes into text for display, the characters cause
a traceback.  Since diff output is only informational, we can replace
those problematic bytes with replacement characters.  We do not want to
do this to other fields because those fields may be used inside of the
playbook (for templating another variable or matching in a conditional).

Fixes #21803
Fixes #21804

(cherry picked from commit 49db03c384)
2017-02-24 12:19:28 -08:00
Will Thames
e9e39e4fd6 Ensure ssh hostkey checks respect server port (#20840)
* Add tests for `get_fqdn_and_port` method.

Currently tests verify original behavior - returning default `ssh-keyscan` port
Add test around `add_host_key` to verify underlying command arguments
Add some new expectations for `get_fqdn_and_port`
Test that non-standard port is passed to `ssh-keyscan` command

* Ensure ssh hostkey checks respect server port

ssh-keyscan will default to getting the host key for port 22.
If the ssh service is running on a different port, ssh-keyscan
will need to know this.

Tidy up minor flake8 issues

* Update known_hosts tests for port being None

Ensure that git urls don't try and set port when a path
is specified

Update known_hosts tests to meet flake8

* Fix stdin swap context for test_known_hosts

Move test_known_hosts from under basic, as it is its own library.
Remove module_utils.known_hosts from pep8 legacy files list

(cherry picked from commit 103ede26df)
2017-02-24 12:19:28 -08:00
Matt Clay
1eda29bfa8 Test www.redhat.com instead of docs.ansible.com.
(cherry picked from commit 9462707f21)
2017-02-21 20:36:40 -08:00
James Cammarata
6791061395 New release v2.2.2.0-0.1.rc1 2017-02-21 18:03:37 -06:00
James Cammarata
694011a897 Additional lock down of conditionals
(cherry picked from commit 9751bf440e2b026874e70f950920e6dbee2e9115)
2017-02-21 17:42:44 -06:00
James Cammarata
cfd57fcae2 Rework how the Conditional class deals with undefined vars
Previously, the Conditional class did a simple check when an
AnsibleUndefinedVariable error was raised to see if certain strings were
present. This patch tries to be smarter by evaluating the variable contained
in the error string and compared to the defined/not defined conditionals in
the conditional string.

This also modifies the UndefinedError message from HostVars slightly to
match the format returned jinja2 in general, making it easier to match the
error message in the Conditional code.

Fixes #18514

(cherry picked from commit 81aa12eb1b)
2017-02-21 17:42:23 -06:00
James Cammarata
097dbb2daf Use proper YAML constructor class for safe loading
(cherry picked from commit 9f0b354023)
2017-02-21 16:24:45 -06:00
jctanner
c459f87a42 Skip fact gathering if the entire play was included via conditional and False (#21734)
Addresses #21528
(cherry picked from commit 40235d7b99)
2017-02-21 14:18:31 -06:00
jjlorenzo
547dcf4b9e set no_log for url_password
(cherry picked from commit 3befc894e1)
2017-02-21 10:44:17 -06:00
Virgil Dupras
f47356f5c7 Add missing entry in changelog for v2.2.1 (#19943)
That change was introduced in PR #18617
2017-02-17 08:18:08 -06:00
James Cammarata
fe3ede881d Relocate creation of Templar in process_pending_results
Moving it to after the blocks where per-item results are calculated,
as it's not used there and causes quite a performance hit being there.

Fixes #21340

(cherry picked from commit 7bf56ceee3)
2017-02-17 00:26:40 -06:00
Toshio Kuratomi
5dcce0666a Retain vault password as bytes in 2.2
Prior to 2.2.1, the vault password was read in as byes and then remained
bytes all the way through the code.  A bug existed where bytes and text
were mixed, leading to a traceback with non-ascii passwords.  In devel,
this was fixed by changing the read in password to text type to match
with our overall strategy of converting at the borders.  This was
backported to stable-2.2 for the 2.2.1 release.

On reflection, this should not have been backported as it causes
passwords which were originally non-utf-8 to become utf-8.  People will
then have their working 2.2.x vault files become in-accessible.

this commit pipes bytes all the way through the system for vault
password.  That way if a password is read in as a non-utf-8 character
sequence, it will continue to work in 2.2.2+.  This change is only for
the 2.2 branch, not for 2.3 and beyond.

Why not everywhere?  The reason is that non-utf-8 passwords will cause
problems when vault files are shared between systems or users.  If the
password is read from the prompt and one user/machine has a latin1
encoded locale while a second one has utf-8, the non-ascii password
typed in won't match between machines.  Deal with this by making sure
that when we encrypt the data, we always use valid utf-8.

Fixes #20398
2017-02-16 13:18:20 -08:00
James Cammarata
c92ce0c2ca Fix bug introduced in 0df3767 regarding undefined entries in HostVars
Fixes #21084

(cherry picked from commit eec88b63c2)
2017-02-15 16:23:24 -06:00
Toshio Kuratomi
1262e5fdca Fix hash filter for non-ascii strings and Python3
hashlib hashes operate on byte strings.  When given a text string on
Python3, hashlib backtraces.  When given a text string on Python2,
hashlib will backtrace if the string contains non-ascii characters.
Encode the text string to utf-8 prior to hashing to avoid this problem.

Fixes #21452

(cherry picked from commit 99fd2328af)
2017-02-15 11:59:42 -08:00
Matt Clay
cb93ecaef9 Fix @contextmanager leak on exception. (#21031)
* Fix @contextmanager leak on exception.
* Fix test leaks of global module args cache.

(cherry picked from commit 272ff10fa1)
2017-02-15 11:57:16 -08:00
James Cammarata
6176c95838 Also clean template data even if marked unsafe
Fixes #20568

(cherry picked from commit 86beb55a90)
2017-02-10 10:09:50 -06:00
Brian Coca
ef24d56c8f removed warn, which is added in 2.3
(cherry picked from commit 68dbed2a5e)
2017-02-10 08:31:22 -05:00
Brian Coca
f1217a9b94 use regex vs list to weed out password fields
- also warn as module SHOULD have no_log
 - make password regex exportable for testing
 - avoids boolean fields

(cherry picked from commit 403e9d35df)
2017-02-09 18:18:48 -05:00
Matt Davis
9029547603 bump submodule refs 2017-02-09 11:27:12 -08:00
Brian Coca
81e96c754a add url_password to 'cleanse' list
(cherry picked from commit 2f1ab29855)
2017-02-09 08:53:50 -05:00
jerry
c4b09cbcb4 removing unwanted variables and using to_native instead of str() 2017-02-08 15:53:19 -05:00
jeronimog
ef1ecfd2c2 Adding self.log() on IOErrors and OSErrros 2017-02-08 15:53:19 -05:00
Matt Clay
39acdde5aa Use older setuptools for sanity tests. 2017-02-06 14:53:05 -08:00
Toshio Kuratomi
7b2fcb2d4e Make sure that we're comparing text all the way through.
On Darwin, sys.platform returns byte strings on both python2 and
python3.  Turn it into a text string everywhere in order to remedy that.

Fixes #19845

(cherry picked from commit bfffd1952f)
2017-02-06 13:10:25 -08:00
Eugene Krokhalev
4e257fad84 [cloud] unicode is absent on Python 3, use text_type (#20861) 2017-02-03 16:34:52 -05:00
Toshio Kuratomi
3551de98c6 Split on newlines when checking for prompt matches (#20945)
* Check for the prompt as a substring of the output

sudo sometimes spits out warnings to stdout before getting to the
password prompt.  Account for that when trying to match a password
prompt.

Fixes #20858
(cherry picked from commit fee6e2953b)
2017-02-02 14:31:58 -08:00
Toshio Kuratomi
a0104cfe81 Do not substitute ssh_exeuctable until we need to
We need to use ssh_executable instead of hardcoding ssh in the command
we run but we need to use "ssh" when we lookup the value of the
{command}_extra_args variable.  Do this by leaving binary as "ssh" and
only expanding when we place it into b_command.

Fixes #20862

(cherry picked from commit 62ba084003)
2017-02-02 13:44:55 -08:00
Toshio Kuratomi
5f5d143194 Fix import of urlparse on python3
Should fix the error reported here:
https://github.com/ansible/ansible/issues/17495#issuecomment-267921719

(cherry picked from commit 4f960a4f42)
2017-01-30 09:44:04 -08:00
Peter Oliver
04b2e1fbc0 Fix setup on Solaris 8 and 9 (#20780)
Neither the `virtinfo` nor `smbios` commands exist on these systems, which was causing `setup` to fail with the error `Argument 'args' to run_command must be list or string`.
2017-01-27 17:46:45 -05:00
Brian Coca
65ee9d2e46 only ignore become for making tmpdir 2017-01-26 15:18:10 -05:00
Brian Coca
75de5737f7 fix improper setting become user for tmpdir
fixes #20706

(cherry picked from commit 0a8b856102)
2017-01-26 14:39:49 -05:00
Brian Coca
21106cc95b fix powershell mkdtemp
(cherry picked from commit 9e0fd313d7)
2017-01-25 13:19:51 -05:00
Brian Coca
89fdca0929 make sure tmpdir resolvs user dirs (#20486)
* make sure tmpdir resolvs user dirs

fixes #20332
supercedes #20484

* typo fix

(cherry picked from commit 10fa2cd0ef)
2017-01-25 13:19:51 -05:00
Matt Martz
7164956cc6 Add jinja2 groupby filter override to cast namedtuple to tuple. Fixes #20098 (#20362)
* Add jinja2 groupby filter override to cast namedtuple to tuple. Fixes #20098

* Address some of the requested changes

* Quoting

* Print the python path and version

* Be less explicitly verbose, rely on implicit verbosity
2017-01-19 12:23:03 -08:00
Matt Clay
0240ffe220 Disable pull of updated Shippable docker image.
This should improve reliability of our jobs on Shippable.

The Shippable AMIs should already have the latest docker image,
per Shippable support, so pull doesn't provide any benefit for us.

(cherry picked from commit 25caa67d41)
2017-01-19 11:45:35 -08:00
Matt Clay
db22594955 Update test to use keyserver.ubuntu.com.
(cherry picked from commit 4d616366c0)
2017-01-19 11:39:39 -08:00
Matt Clay
9e9d202d6f Use jinja2 import instead of pip to get version.
This resolves issues with older versions of pip.

(cherry picked from commit a8fb6f0958)
2017-01-19 11:07:43 -08:00
Matt Clay
b2942f1084 Only test map on jinja2 >= 2.7
(cherry picked from commit ad65274643)
2017-01-19 09:01:25 -08:00
Toshio Kuratomi
26f5e4b32d Fix linenos being wrong in inventory errors
Fixes #20356

(cherry picked from commit 05879d331a)
2017-01-19 08:41:05 -08:00
James Cammarata
e85f3f4461 Add representer to AnsibleDumper for AnsibleUnsafeText
Fixes #20253
Fixes #20290
2017-01-19 10:12:22 -06:00
James Cammarata
f4ecbc4c3f Fixing iterator bug related to reworking of end-of-role detection
Bug was introduced in cae682607

(cherry picked from commit 9d549c3498)
2017-01-19 10:11:52 -06:00
James Cammarata
b12256acf2 Reworking the way end of role detection is done
Rather than trying to enumerate tasks or track an ever changing cur_role
flag in PlayIterator, this change simply sets a flag on the last block in
the list of blocks returned by Role.compile(). The PlayIterator then checks
for that flag when the cur_block number is incremented, and marks the role
as complete if the given host had any tasks run in that role.

Fixes #20224

(cherry picked from commit cae682607c)
2017-01-19 10:11:44 -06:00
Toshio Kuratomi
f649a87a19 Remove restriction on maximum jinja2 version as we're hoping the next release will work with jinja2-2.9 2017-01-16 11:24:22 -08:00
James Cammarata
2f0dba4f36 Fix for bug in Conditional for older jinja2 versions
Fixes #20309

(cherry picked from commit af96cba7e1)
2017-01-16 13:17:06 -06:00
Toshio Kuratomi
6a9572e1e5 Update submodule refs 2017-01-16 11:11:25 -08:00
James Cammarata
5362910000 New release v2.2.1.0-1 2017-01-16 10:13:31 -06:00
James Cammarata
009ac7b65e Forgot to add the release date to the CHANGELOG 2017-01-16 09:59:06 -06:00
James Cammarata
c6ef74d81b Updating CHANGELOG 2017-01-16 09:51:22 -06:00
James Cammarata
0c153146e3 New release v2.2.1.0-0.5.rc5 2017-01-13 16:44:45 -06:00
James Cammarata
86eadc5814 Additional security fixes for CVE-2016-9587
(cherry picked from commit b7cdc21aee)
2017-01-13 16:23:14 -06:00
Brian Coca
4b495d7e43 fix issue when proc read returns None
fixes #20225
2017-01-13 14:06:11 -05:00
Matt Clay
c846e915f8 Correct freebsd startup test for CI. 2017-01-13 10:03:11 -08:00
Matt Clay
a93e7506a8 Test with jinja2 < 2.9. 2017-01-12 14:00:39 -08:00
Brian Coca
49fc0cfe4f deal with remote_src and tmp dirs properly
fixes #20128

(cherry picked from commit 8c6b5621f8)
2017-01-12 14:10:16 -05:00
Brian Coca
d1c5a39420 ansible doc does not need plugin deprecation error
(cherry picked from commit 74421f42e1)
2017-01-12 13:10:44 -05:00
Toshio Kuratomi
06ed25e788 Add jinja2 version constraint.
We're not yet compatible with jinja2-2.9 so help out people using pip to
install to get a working installation.
2017-01-12 10:01:29 -08:00
James Cammarata
b3daa9dd64 New release v2.2.1.0-0.4.rc4 2017-01-11 16:25:28 -06:00
James Cammarata
6dee2b21e6 Partial revert of 76f7ce55
(cherry picked from commit a94a48f85f)
2017-01-11 15:53:39 -06:00
James Cammarata
cc4634a5e7 Additional fixes for security related to CVE-2016-9587
(cherry picked from commit d316068831)
2017-01-11 15:53:38 -06:00
Computest
eb8c26c105 Fixing another corner case for security related to CVE-2016-9587
(cherry picked from commit bcceada5d9)
2017-01-11 15:53:37 -06:00
Marius Gedminas
240c388e6c Fix a test failure on Python 3.6 (#20030)
* Fix a test failure on Python 3.6

tox -e py36 failed with

    ======================================================================
    ERROR: test_action_base__execute_module (units.plugins.action.test_action.TestActionBase)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "/home/mg/src/ansible/test/units/plugins/action/test_action.py", line 507, in test_action_base__execute_module
        self.assertEqual(action_base._execute_module(), dict(_ansible_parsed=True, rc=0, stdout="ok", stdout_lines=['ok']))
      File "/home/mg/src/ansible/lib/ansible/plugins/action/__init__.py", line 596, in _execute_module
        remote_module_path = self._connection._shell.join_path(tmp, remote_module_filename)
      File "/home/mg/opt/python36/lib/python3.6/unittest/mock.py", line 939, in __call__
        return _mock_self._mock_call(*args, **kwargs)
      File "/home/mg/opt/python36/lib/python3.6/unittest/mock.py", line 1005, in _mock_call
        ret_val = effect(*args, **kwargs)
      File "/home/mg/src/ansible/.tox/py36/lib/python3.6/posixpath.py", line 92, in join
        genericpath._check_arg_types('join', a, *p)
      File "/home/mg/src/ansible/.tox/py36/lib/python3.6/genericpath.py", line 149, in _check_arg_types
        (funcname, s.__class__.__name__)) from None
    TypeError: join() argument must be str or bytes, not 'MagicMock'

because os.path.join() now checks argument types since Python 3.6 (due
to pathlib support, I expect).

* Use a more realistic module name in test

(cherry picked from commit d9b89ca577)
2017-01-10 07:37:36 -08:00
Matt Clay
941552d107 Fix group_by test to work with jinja2 >= 2.9.
(cherry picked from commit cc3d131f50)
2017-01-09 15:26:52 -08:00
James Cammarata
d8c9b8d347 New release v2.2.1.0-0.3.rc3 2017-01-09 10:49:01 -06:00
James Cammarata
ec84ff6de6 Fixing security bugs for CVE-2016-9587
(cherry picked from c8f8d0607c5c123522951835603ccb7948e663d5)
2017-01-09 10:43:38 -06:00
Toshio Kuratomi
56de9d8ae7 Update submodule refs 2017-01-05 15:56:25 -08:00
Toshio Kuratomi
3e2f6e5094 Allow OSError to skip scriptdir removal
On Ubuntu the scriptdir gets placed into sys.path.  This makes some
modules (copy) fail because the ansible module gets loaded instead of
the stdlib copy module.  So we remove scriptdir there.  Unfortunately,
the scriptdir code uses abspath().  When pipelining, abspath() has to
find the cwd.  On OSX, finding the cwd when that directory is not
executable by the user raises an OSError.  Since OSX does not suffer
from the scriptdir problem we're able to just skip scriptdir handling if
we get that exception.

Fixes #19729

(cherry picked from commit 03510ec4ce)
2017-01-05 15:56:00 -08:00
Brian Coca
577ea88f78 bad module commit 2017-01-05 18:01:30 -05:00
Brian Coca
7e10994b6d allow modules to set custom stats
can be per run or per host, also aggregate or not
set_stats action plugin as reference implementation
added doc stub
2017-01-05 16:39:39 -05:00
Toshio Kuratomi
f6a3c4f071 Update submodule refs 2017-01-04 17:19:08 -08:00
Toshio Kuratomi
8439d1813e Fix for atomic_move on RHEL5
When becoming an unprivileged user using non-sudo on a platform where
getlogin() failed in our situation we were not able to detect that the
user had switched.  This meant that all of our logic to use move vs copy
if the user had switched was attempting the wrong thing.  This change
tries the to do the right thing but then falls back to an acceptable
second choice if it doesn't work.

The bug wasn't easily detected because:
* sudo was not affected because sudo records that the user's have been
  switched so we were able to detect that.
* getlogin() works on most platforms.  RHEL5 with python-2.4 seems to be
  the only platform we still care about where getlogin() fails for this
  case.
* It had to be becoming an unprivileged user.  When becoming
  a privileged user, the user would be able to successfully perform the
  best case tasks.

(cherry picked from commit 02e3f4b526)
2017-01-04 14:10:36 -08:00
Brian Coca
e223349edc fixed id queries, should rely on effective
(cherry picked from commit 92e8c53879)
2017-01-04 14:23:50 -05:00
Toshio Kuratomi
b25c06725a Update submodule refs 2017-01-04 10:17:05 -08:00
Adrian Likins
22f7ca8c97 Add a encode() to AnsibleVaultEncryptedUnicode (#19840)
* Add a encode() to AnsibleVaultEncryptedUnicode

Without it, calling encode() on it results in a bytestring
of the encrypted !vault-encrypted string.

ssh connection plugin triggers this if ansible_password
is from a var using !vault-encrypted. That path ends up
calling .encode() instead of using the __str__.

Fixes #19795

* Fix str.encode() errors on py2.6

py2.6 str.encode() does not take keyword arguments.

(cherry picked from commit c771ab34c7)
2017-01-04 12:17:38 -05:00
James Cammarata
a0a2392c87 Fixing an iteration bug introduced by fed079e4
(cherry picked from commit ac00c8a660)
2017-01-04 10:34:38 -06:00
Toshio Kuratomi
3c4ac877f6 Fix traceback on Darwin with Python3
os.write() needs bytes objects on python3 while python2 can work with
either a byte or unicode string.  Mark the DUMMY_CA_CERT string as
a byte string so it will work.

Fixes #19265
Fixes #19266
2017-01-04 00:34:52 -08:00
Matt Clay
38bf7ab71f Use newer test images for Fedora and openSUSE. 2017-01-03 13:56:53 -08:00
James Cammarata
47e16bef08 Fix role completion detection problem
When the same role is listed consecutively in a play, the previous role
completion detection failed to mark it as complete as it only checked to
see if the role changed.

This patch addresses that by also keeping track of which task in the role
we are on, so that even if the same role is encountered during later passes
the task number will be less than or equal to the last noted task position.

Related to #15409

(cherry picked from commit fed079e4cb)
2017-01-03 14:33:11 -06:00
Arne Demmers
e05222a4cc Fix key lookup in gather facts flag.
(cherry picked from commit bd7466d56c)
2017-01-03 13:53:33 -05:00
Antonio Huete Jimenez
32dad09d2e Check for DragonFly BSD as well for DATE
(cherry picked from commit b9e4a4dede)
2017-01-02 12:59:28 -05:00
Brian Coca
a04d0f485b warn when examined group_vars is not dir
(cherry picked from commit 6f5ecb7efd)
2017-01-02 12:59:28 -05:00
Matt Davis
7683715caf fix multiple handler notifications
Fixes #19647
Adds integration test to catch multiple handler notifications
(cherry picked from commit c2495677b0364d7e31dfcb51865976ba46586732)
2016-12-23 09:48:23 -08:00
James Cammarata
3c7987f3a4 Removing print debug statements
(cherry picked from commit 78d4f6bbc1)
2016-12-22 19:14:53 -06:00
Gordon Gao
3e72e0c173 let chdir support relative path in more modules (#16736)
(cherry picked from commit d9e1e374b2)
2016-12-22 00:21:30 -08:00
James Cammarata
10a3053525 Correctly set loader path when using list tags/tasks
Fixes #19398

(cherry picked from commit b688f11474)
2016-12-21 21:04:57 -06:00
bart2
29762c87c8 Fixes #16936 - Digest authentication not working in uri module (#18736) 2016-12-21 18:15:21 -08:00
James Cammarata
477043c422 Wrap unhandled errors from lookups in an AnsibleError
This provides better error handling, and prevents errors like KeyError
from bubbliing up to code in odd places.

Fixes #17482

(cherry picked from commit 85bbce9d6b)
2016-12-21 11:49:26 -06:00
James Cammarata
f89abc705b Also fix default callback to use ignore_errors param
Rather than the value in the task, which may not be templated.

Related to #18289

(cherry picked from commit d70d279c4e)
2016-12-21 11:37:25 -06:00
James Cammarata
2d8ebbfe8c Template "original_task" fields in _process_pending_results
Since we no longer use a post-validated task in _process_pending_results, we
need to be sure to template fields used in original_task as they are raw and
may contain variables.

This patch also moves the handler tracking to be per-uuid, not per-object.
Doing it per-object had implications for the above due to the fact that the
copy of the original task is now being used, so the only sure way is to track
based on the uuid instead.

Fixes #18289

(cherry picked from commit dd0257b995)
2016-12-21 10:11:59 -06:00
Brian Coca
e1b459470d log on target based on nolog, not verbosity
fies #18569

(cherry picked from commit b1cefcf176dea8b3bb2a33aebda0b8f6e4dc6cfb)
2016-12-21 11:09:33 -05:00
Brian Coca
1bdf25561a initialize module name
removing verbosity exposed missing name at certain stages, initialize to file name
and update later once module args are parsed

(cherry picked from commit cea1acf1462a323dea976ecfa0b1a9e403b4e31b)
2016-12-21 11:09:32 -05:00
Brian Coca
b1e44d1195 still needs static for task object
(cherry picked from commit 48dee1b6d0)
2016-12-16 23:52:25 -05:00
Brian Coca
8ad67b44dc removed unused static
backport of #19454
2016-12-16 15:17:39 -05:00
Jake Morrison
cfbb58adae Handle inability to read /proc for ansible_service_mgr. Fixes #18957
(cherry picked from commit 00859a4ced)
2016-12-16 12:28:55 -05:00
Brian Coca
f90a6439c4 correct template lookup path
now all paths get 'templates/'

(cherry picked from commit ed933421fe)
2016-12-16 12:28:55 -05:00
Toshio Kuratomi
d8449b3013 Update submodule refs 2016-12-14 14:42:51 -08:00
Toshio Kuratomi
1168524f22 Change release templates so they work with py3
(cherry picked from 0b440a9289)
2016-12-14 14:42:23 -08:00
Toshio Kuratomi
99472c42e3 Pull in f5 py3 fix 2016-12-14 10:17:03 -08:00
Toshio Kuratomi
00378515e2 * Add test for git with local modifications
(cherry picked from afca957396)

From PR: Fix UnboundLocalError remote_head in git (#19057)
2016-12-14 08:46:11 -08:00
Toshio Kuratomi
2639016847 Update submodule refs for git module fix 2016-12-14 08:45:01 -08:00
Andrea Tartaglia
be07fcc6d9 Removed dict.iteritems() in several other files.
This is for py3 compatibility #18506

(cherry picked from commit 59227d8c31)
2016-12-14 08:39:29 -08:00
Dag Wieers
268645c17a Fix regression in search path behaviour
This PR fixes a few issues:

- Missing role parent directory for relative paths
- Fix integration tests (add missing stage)
- Redesign integration tests
- Incorrect order with tasks-lookups
- Duplicate paths are listed
- Repetitive tasks/tasks or files/files were possible

==== using copy with test.txt
Before:
```
   491 1481281038.29393: search_path:
        /home/dag/home-made/ansible.testing/roles/test134/files/test.txt
        /home/dag/home-made/ansible.testing/roles/test134/tasks/test.txt
        /home/dag/home-made/ansible.testing/roles/test134/tasks/files/test.txt
        /home/dag/home-made/ansible.testing/roles/test134/tasks/tasks/test.txt
        /home/dag/home-made/ansible.testing/files/test.txt
        /home/dag/home-made/ansible.testing/test.txt
```

After:
```
 32505 1481280963.22418: search_path:
        /home/dag/home-made/ansible.testing/roles/test134/files/test.txt
        /home/dag/home-made/ansible.testing/roles/test134/test.txt
        /home/dag/home-made/ansible.testing/roles/test134/tasks/files/test.txt
        /home/dag/home-made/ansible.testing/roles/test134/tasks/test.txt
        /home/dag/home-made/ansible.testing/files/test.txt
        /home/dag/home-made/ansible.testing/test.txt
```

==== Using copy with files/test.txt

Before:
```
 31523 1481280499.63052: search_path:
        /home/dag/home-made/ansible.testing/roles/test134/files/test.txt
        /home/dag/home-made/ansible.testing/roles/test134/tasks/files/test.txt
        /home/dag/home-made/ansible.testing/roles/test134/tasks/files/test.txt
        /home/dag/home-made/ansible.testing/roles/test134/tasks/tasks/files/test.txt
        /home/dag/home-made/ansible.testing/files/files/test.txt
        /home/dag/home-made/ansible.testing/files/test.txt
```

After:
```
 31110 1481280299.38778: search_path:
        /home/dag/home-made/ansible.testing/roles/test134/files/test.txt
        /home/dag/home-made/ansible.testing/roles/test134/tasks/files/test.txt
        /home/dag/home-made/ansible.testing/files/test.txt
```

==== Using template with files/test.txt.j2
Before:
```
 30074 1481280064.15191: search_path:
        /home/dag/home-made/ansible.testing/roles/test134/templates/files/test.txt.j2
        /home/dag/home-made/ansible.testing/roles/test134/tasks/files/test.txt.j2
        /home/dag/home-made/ansible.testing/roles/test134/tasks/templates/files/test.txt.j2
        /home/dag/home-made/ansible.testing/roles/test134/tasks/tasks/files/test.txt.j2
        /home/dag/home-made/ansible.testing/templates/files/test.txt.j2
        /home/dag/home-made/ansible.testing/files/test.txt.j2
```

After:
```
 29201 1481279823.52752: search_path:
        /home/dag/home-made/ansible.testing/roles/test134/templates/files/test.txt.j2
        /home/dag/home-made/ansible.testing/roles/test134/files/test.txt.j2
        /home/dag/home-made/ansible.testing/roles/test134/tasks/templates/files/test.txt.j2
        /home/dag/home-made/ansible.testing/roles/test134/tasks/files/test.txt.j2
        /home/dag/home-made/ansible.testing/templates/files/test.txt.j2
        /home/dag/home-made/ansible.testing/files/test.txt.j2
```

This fixes #19048

(cherry picked from commit 7c71c678fa)
2016-12-14 11:18:31 -05:00
René Moser
e715221a66 cloudstack: utils: fail friendlier if no zones available (#19332)
(cherry picked from commit 18b7852940)
2016-12-14 16:37:09 +01:00
James Cammarata
fe33c937c4 New release v2.2.1.0-0.2.rc2 2016-12-14 09:09:02 -06:00
Toshio Kuratomi
cbf1f23e7e Fixes for uri under python3 and local (non-httptester) testing
(cherry picked from c6b42028c4)
2016-12-13 15:55:06 -08:00
Lumír 'Frenzy' Balhar
0db1c77041 Fix AST nodes for Python 3 and enable dependent test_uri (#18597)
* Enable tests on python 3 for uri

* Added one more node type to SAFE_NODES into safe_eval module.

ast.USub represents unary operators. This is necessary for
parsing some unusual but still valid JSON files during testing
with Python 3.

(cherry picked from commit 84544ee8fd)
2016-12-13 07:38:07 -08:00
Toshio Kuratomi
ddc8d3d988 Update submodule refs 2016-12-13 05:57:46 -08:00
Toshio Kuratomi
3beac89893 Remove itervalues (not available on py3)
(cherry picked from 51491c9904)
2016-12-13 05:57:46 -08:00
ikelos
abc0eeac02 Fix overwrite parameter in module_utils.junos (#18671)
The overwrite parameter is forcibly set to false, meaning a module
passing that parameter will have no effect.  The overwrite facility
is necessary to ensure that conflicting options can be written the
configuration (which, in replace mode, they cannot).

This change ensures that if overwrite is set, it will not be changed
to False in the logic.
(cherry picked from commit 9e5d4de49a)
2016-12-12 15:58:03 -05:00
Patrick Ogenstad
886f8d224e Disable CLI errors when typing enable (#18531)
(cherry picked from commit 05e5474c52)
2016-12-12 15:55:56 -05:00
Juan Antonio Valiño García
76be9aa693 Fixes #18663. Bad handling of existing config in dellos9 module. (#18664)
* Fixes #18663. Bad handling of existing config in dellos9 module.

The dellos9 module doesn't build correctly the internal
structures used to represent the existing config of the managed
network device. This leads to apply changes every time the
playbook is run, even if the existing config is the same that the
one you are trying to push into the device.

Probably this problem exist also in the dellos6 and dellos10
modules, but I only fixed it in the dellos9 module.

The fix modifies two methods. The first one is `get_config`,
where the return clause didn't work correctly when the flow
doesn't enter in the `if` block. In that case the `contents`
variable is not an array an this should be handled.

The second fix is in the `get_sublevel_config` method. In this
case the indentation whitespaces of the parents should be rebuild
because further functions and methods required it to handle
correctly comparisons used to check if changes should be pushed
into device.

* Fixes #18663 for dellos10 module with the same patches as dellos9.

(cherry picked from commit 40ddbe026d)
2016-12-12 15:52:52 -05:00
Matt Clay
acad2ba246 Support script interpreters for async_wrapper. (#18592)
(cherry picked from commit d61b2ed0a3)
2016-12-06 06:55:15 -08:00
Toshio Kuratomi
47cd4867a1 Update submodule refs 2016-12-05 04:02:39 -08:00
Toshio Kuratomi
69301f2823 make hash_params more robust in the face of many corner cases (#18701)
* make hash_params more robust in the face of many corner cases

Fixes #18680
Alternative fix to #18681

* add test case for role.hash_params

* Add role.hash_params test for more types

A set, a generator/iterable, and a Container that
is not Iterable.

(cherry picked from commit 5f5ea06ca4)
2016-12-05 04:02:21 -08:00
Toshio Kuratomi
6025e97d13 ssh-keyscan can fail to find keys for a host.
When it does, we need to fail otherwise other code will fail later.

Fixes #18676

(cherry picked from commit 0d9afa84d5)
2016-12-01 23:44:22 -08:00
Toshio Kuratomi
01fa3d3024 Transform vault_pass into text when we read it in from the user or a file. (#18699)
Fixes #18684
(cherry picked from commit 74a10d7ca2)
2016-12-01 23:43:11 -08:00
Benoît Allard
18aba3ebec with_sequence: Fix indentation
This doesn't need to run for every parameters. Once is enough.
(cherry picked from commit 7db4ed02ee)
2016-11-30 21:14:17 -08:00
Alberto Murillo
b15e1b743f Fix fetching files with scp (#18673)
Commit ec2521f intended to fix the scp command to fetch files
from a remote machine but it has src and dest swapped.

This change correctly treats src as the location in the remote machine
and dest as the location in the local machine.

Signed-off-by: Alberto Murillo Silva <alberto.murillo.silva@intel.com>
(cherry picked from commit 7542dae26b)
2016-11-30 20:13:07 -08:00
Matt Clay
97444c5e59 Corrected placement templates for test_lookups. 2016-11-29 15:16:38 -08:00
Matt Davis
4ef8493a11 bump submodule refs 2016-11-29 14:28:32 -08:00
Virgil Dupras
4854705267 Fix regression in jinja2 include search path (#18617)
* Fix regression in jinja2 include search path

Since commit 3c39bb5, the 'ansible_search_path' variable is used to set
jinja2's search path for {% include %} directives. However, this path is
the the proper one because our templates live in 'templates' subdirs in
our search path.

This is a regression because previously, our include search path would
include the dirname of the currently interpreted file, which worked most
of the time.

fixes #18526

* Fix template lookup search path

Improve fix in commit c96c853 so that the search path contain both
template-suffixed paths as well as original paths.

ref PR #18617

* Add integration test for template lookups

Tests regression at #18526

This test fails on current devel branch and succeeds on PR #18617

(cherry picked from commit bf48383610)
2016-11-29 16:23:23 -05:00
James Cammarata
ff60245e2b Updating core submodules for mysql fix 2016-11-29 13:25:46 -06:00
Daniel Miranda
a7abe4be19 Make sure include_role inherit variables from parent role (#18627)
* Make sure include_role inherit variables from parent role

Setting the parent of task blocks generated by include_role after they
have been produced is not sufficient - it means the tasks don't have the
correct dependency chain set afterwards, and therefore, don't properly
inherit variables from outer roles.

In addition to manually setting the parents, pass the dep_chain when
compiling the role, such that variables are correctly imported.

Fixes #18540.

* Add tests for include_role

* Fix include_role variable inheritance for multiple parent levels

(cherry picked from commit 57f4a9885e)
2016-11-29 11:26:17 -05:00
Toshio Kuratomi
faaabec397 Fix for AnsiballZ when the remote clock is behind (#18660)
Some machines have system clocks which can fall behind (for instance,
a host without a CMOS battery like Raspberry Pi).  When managing those
machines we have to workaround the fact that the zip format does not
handle file timestamps before 1980.  The workaround is to substitute in
the timestamp from the controller instead of from the managed machine.

Fixes #18640
(cherry picked from commit 3c6d71522e)
2016-11-29 01:22:22 -08:00
Adrian Likins
288f6684cf Fix traceback in atomic_move (#18649)
Commit 8b08a28c89 removed a
call to get_exception() that was needed. Without it, the fail_json
references an undefined variable ('exception') and throws an exception.

Add the get_exception() back in where needed and update references.

Now the proper module failure is returned.

Fixes #18628
(cherry picked from commit dbbd2d79ff)
2016-11-28 15:32:30 -05:00
Peter Sprygada
b878e8f0f0 fixes timeout param in netconf provider for junos (#18634)
This change will now cause the netconf provider to honor the module
timeout value when making calls to pyez.
(cherry picked from commit a757a77159)
2016-11-26 22:25:37 -05:00
Peter Sprygada
4d7760c0b1 pass module timeout value to fetch_url in eapi connection (#18633)
This change causes the eapi connection to honor the module timeout
value when calling fetch_url
(cherry picked from commit eec6980f3e)
2016-11-26 22:23:43 -05:00
Alberto Murillo
b9a1b2836a Fix ssh plugin to correctly fetch files when using scp (#18614)
Fetch module uses fetch_file() from plugin/connection/ssh.py to
retrieve files from the remote hosts which in turns uses
_file_transport_command(self, in_path, out_path, sftp_action) being
sftp_action = 'get'

When using scp rather than sftp, sftp_action variable is not used
and the scp command is formed in a way that the file is always
sent to the remote machine

This patch fixes _file_transport_command() to correctly form the scp
swaping src and dest if sftp_action is 'get'

Bug introduced at 8e47b9b
Fixes #18603

Signed-off-by: Alberto Murillo Silva <alberto.murillo.silva@intel.com>
(cherry picked from commit ec2521f6af)
2016-11-26 17:01:53 -06:00
Toshio Kuratomi
8dee7f3138 Update core submodule for mount on BSD's fix 2016-11-23 13:27:16 -08:00
Michael Schuett
f4a2332d48 Fix auth in collins.py inventory
This forces basic auth to be used. Using the normal HTTPPasswordMgrWithDefaultRealm
password manager from urllib2 fails since collins doesn't send a 401 retry on failure.
More about this can be seen here http://stackoverflow.com/questions/2407126/python-urllib2-basic-auth-problem.
I added a small comment about the format of the host so others don't waste time like i did.

(cherry picked from commit 21813ed83e)
2016-11-23 14:39:37 -05:00
James Cammarata
a60a7279d0 Look for _get_parent_attribute method in both src and dst dict
When determining which getter style to use for the object in question,
the BaseMeta class should look at both dict's to try and locate the method.

Fixes #18522

(cherry picked from commit 4859e0a419)
2016-11-23 12:51:54 -06:00
Toshio Kuratomi
f1f6752686 Add mount module fixes to changelog
Add Py3 fixes to changelog
2016-11-23 07:27:55 -08:00
Toshio Kuratomi
42e0efbbbf Update submodule refs 2016-11-23 07:24:44 -08:00
Toshio Kuratomi
ac076dfc12 Fix the Solaris POSIX acl fix
For setfacl on Solaris we need to specify permissions like r-x.
For chmod, we need to specify them as rx (r-x means to make the file
readable and *not* executable)

(cherry picked from commit 255a5b5d75)
2016-11-22 12:38:06 -08:00
Toshio Kuratomi
0bed5d4d85 Update core submodule ref for another py3 fix 2016-11-22 11:45:23 -08:00
Toshio Kuratomi
b39f48121d Update submodule refs 2016-11-22 11:27:44 -08:00
koralsky
bf8902f371 import reduce from six label:python3 (#18561)
* import 'reduce' from six
* import reduce in facts fix

(cherry picked from commit 4e194d71bd)
2016-11-22 11:23:57 -08:00
Michael Noseworthy
27be8a1022 Fix unicode handling in fixup_perms2 errorhandling (#18565)
The _fixup_perms2 method checks to see if the user that is being sudo'd
is an unprivileged user or root. If it is an unprivileged user, some
checks are done to see if becoming this user would lock the ssh user out
of temp files, among other things. If this check fails, an error prints
telling the user to check the documentation for becoming an unprivileged
user.

On some systems, the stderr prints out the unprivileged user the ssh
user was trying to become contained in smartquotes. These quotes aren't
in the ASCII range, and so when we're trying to call `str.format()` to
combine the stderr message with the error text we get a
UnicodeEncodeError as python can't coerce the smartquotes using the
system default encoding. By calling `to_native()` on the error message
we can ensure that the error message is a native string for the
`Exception` handling, as `Exception` messages need to be native strings
to avoid errors (byte strings in python2, and text strings in python3)

Fixes: #18444
(cherry picked from commit bb5d8fb476)
2016-11-22 11:20:21 -08:00
Toshio Kuratomi
f62224497e Fix setfacl for Solaris with POSIX acl support.
Tested on Linux and freebsd.

Fixes #17919

(cherry picked from commit d90638ad40)
2016-11-21 13:25:23 -08:00
James Cammarata
529adb574a Cache dynamically included blocks for later lookup via uuid
Fixes #18357

(cherry picked from commit 7e2305f953)
2016-11-21 13:05:14 -06:00
James Cammarata
528426ce0c Check for negative defined logic in conditionals
(cherry picked from commit 8ee3ef587a)
2016-11-21 12:22:56 -06:00
James Cammarata
a34793d7fb When iterating over hostvars yield the hostname not the host object
Also fixes HostVars to raise the correct jinja2 error type.

Fixes #16836

(cherry picked from commit 0df3767d4d)
2016-11-21 12:22:56 -06:00
Toshio Kuratomi
a91788e25a Convert playbook basedir into unicode at the borders
(cherry picked from commit 0b96d61162)
2016-11-21 09:25:22 -08:00
Brian Coca
74bb122598 change to ~ instead of $HOME to avoid undefined (#18551)
fixes #16032
(cherry picked from commit 6dece90a57)
2016-11-21 07:36:15 -08:00
Andrea Tartaglia
66779698f0 Replaced iterkeys with 'for key in dict' for #18507 (#18538)
(cherry picked from commit 62697ad77f)
2016-11-21 07:19:32 -08:00
James Cammarata
d4e8cdc84f New release v2.2.1.0-0.1.rc1 2016-11-18 23:56:46 -06:00
James Cammarata
ead92bee3d Updating CHANGELOG for 2.2.1 release 2016-11-18 23:53:04 -06:00
James Cammarata
23b5764b57 Properly sort groups by name as well as depth when getting vars
Fixes #17243
2016-11-18 22:56:37 -06:00
James Cammarata
3ce19f4c58 With role params also keep special params in the ds as well as params
With 2.0, we decided to create a special list of param names which were
taken out of the role data structure and stored as params instead (connection,
port, and remote_user). This causes problems with inheritance of these params,
so we are now deprecating that while also keeping those keys in the ds so they
are brought in as attributes on the Role correctly.

Fixes #17395

(cherry picked from commit f36926f8d3)
2016-11-18 22:20:08 -06:00
Toshio Kuratomi
585c57fca4 Update submodule refs 2016-11-18 12:47:36 -08:00
Ben Cordero
aed616ab31 openstack: iterate through nova addresses with six (#18408)
(cherry picked from commit a9a2f12adb)
2016-11-18 12:45:55 -08:00
Matt Davis
f6b47c53d7 async fix changelog update 2016-11-18 11:50:08 -08:00
Matt Davis
8696ce9e00 bump submodule refs 2016-11-18 11:48:19 -08:00
Matt Davis
d637559825 manual backport of win_async test changes from devel 2016-11-18 11:47:08 -08:00
jamessewell
c2f9846278 Moved the _inventory.clear_group_dict_cache() from creating a group w… (#17766)
* Moved the _inventory.clear_group_dict_cache() from creating a group which doesn't exist, to adding members to the group.

* Update __init__.py

Update to use changed: block to catch all changes for cache clear as suggested

(cherry picked from commit b91d4d884d)
2016-11-18 13:27:52 -06:00
Andrew Haines
35e198a616 Default include_role results to empty list in linear strategy plugin
Fixes #18544.

When a loop is over an empty list, the result is set to

    {'skipped_reason': u'No items in the list', 'skipped': True, 'changed': False}

which means that accessing `hr._result['results']` throws a `KeyError`.

(cherry picked from commit 200d6bdb23)
2016-11-18 12:14:02 -05:00
James Cammarata
0871d955fe Reworking iterator logic regarding failed states during always
Previous changes addressed a corner case, which unfortunately introduced
another bug. This patch adds a new flag to the host state (did_rescue) which
is set to true when the rescue portion of a block completes. This flag is
then checked in _check_failed_state() when the fail_state != FAILED_NONE.

This lead to the discovery of another bug - current strategies are not advancing
hosts to ITERATING_COMPLETE after doing a peek at the next task, leaving the
host state in the run_state of the final task. To address this, before gathering
the list of failed hosts in StrategyBase.run(), a final pass through the iterator
for all hosts is done to ensure each host is in its final state. This way, no
strategy derived from StrategyBase has to worry about it and it's handled.

Fixes #17983

(cherry picked from commit ca5b361ad8)
2016-11-18 11:09:36 -06:00
James Cammarata
159399fea4 Return failed instead of raising an error when an include_vars file is missing
Fixes #18508

(cherry picked from commit 937d872f4b)
2016-11-18 09:57:57 -06:00
Adrian Likins
47d5f0f0a8 Fix 'ansible-vault edit' crash on changed nonascii
ansible-vault edit was attempting to decode the file contents
and failing.

Fixes #18428

(cherry picked from commit c09060e8ff)
2016-11-18 10:11:00 -05:00
Brian Coca
bc539adddc added changes from devel needed by systemd fixes 2016-11-18 10:06:12 -05:00
Brian Coca
ecbac4cf73 fix for filter fix
(cherry picked from commit 5d043b65d3)
2016-11-17 13:42:44 -05:00
Brian Coca
1a247de0b6 remove rsync path from returned facts
(cherry picked from commit 7c960d440f)
2016-11-17 13:21:14 -05:00
James Cammarata
30d5d5fa67 Don't fail on missing vars files for delegated hosts
Fixes #17965

(cherry picked from commit f7fe6dc19c)
2016-11-17 11:59:42 -06:00
James Tanner
39cb6797a3 Port has_key to python3 compatible syntax
(cherry picked from commit 2d2bb626d4)
2016-11-16 13:02:46 -08:00
Brian Coca
6f5ec79e91 corrected service filtered option to singular
(cherry picked from commit afaec3da82)
2016-11-16 10:13:02 -05:00
Brian Coca
42fb088807 always template when called from template (#18490)
* Have template action plugin call do_template

Avoids all the magic done for 'inline templating' for ansible plays.
renamed _do_template to do_template in templar to make externally accessible.
fixes #18192

* added backwards compat as per feedback

(cherry picked from commit bd70397e24)
2016-11-15 15:17:06 -05:00
James Cammarata
7602a2a030 Catch loop eval errors and only raise them again if the task is not skipped
This should help on issues like #16222.

(cherry picked from commit 57cf5e431c)
2016-11-15 10:39:27 -06:00
James Cammarata
413f6ab7f1 Add option to prepend inherited attributes when extending values
Fixes #18483

(cherry picked from commit 435ca620b2)
2016-11-14 16:42:46 -06:00
Brian Coca
832cc5bb5b added alias to argumetns 'args' to blacklist
this should fix https://github.com/ansible/ansible-modules-core/issues/5584

(cherry picked from commit f4391d34e4)
2016-11-14 15:19:21 -05:00
Chris Church
28883975bf Increment changed stat for a failed task if changed. (#18014)
(cherry picked from commit 534bd12ae9)
2016-11-14 01:24:01 -06:00
James Cammarata
c23d99b786 Alternately track listening handlers by uuid if no name is set
Fixes #17846

(cherry picked from commit 4f06a86161)
2016-11-13 15:28:38 -06:00
Matt Clay
0cde6fdaca Add changelog entry for make deb changes. 2016-11-11 13:15:35 -08:00
Matt Davis
fad7f1de7c bump extras submodule ref for win_nssm bugfix 2016-11-11 10:44:20 -08:00
James Cammarata
6bd4bec9de Fixing incorrect use of version_compare in docker integration test 2016-11-11 11:45:37 -06:00
James Cammarata
debfb798dd Don't copy the parent block of TaskIncludes when loading statically
When loading an include statically, we previously were simply doing a
copy() of the TaskInclude object, which recurses up the parents creating
a new lineage of objects. This caused problems when used inside load_list_of_blocks
as the new parent Block of the new TaskInclude was not actually in the list
of blocks being operated on. In most circumstances, this did not cause a
problem as the new parent block was a proper copy, however when used in
combination with PlaybookInclude (which copies conditionals to the list of
blocks loaded) this untracked parent was not being properly updated, leading
to tasks being run improperly.

Fixes #18206

(cherry picked from commit 5b87951d6c)
2016-11-11 08:10:00 -06:00
Shane McDonald
dfad25bd38 Build debs with pbuilder (#18165)
* Build debs with pbuilder
* Update README in packaging/debian
* Add Dockerfile for building debs
* Add local_deb makefile target - Allows users to build debs using locally installed dependencies. This was the `deb` target before moving to pbuilder.

(cherry picked from commit 4ae0d5b843)
2016-11-10 14:29:54 -08:00
Matt Clay
b482cdcf03 Fix docker connection unit tests.
- Use assertRaisesRegexp to make sure correct exceptions are raised.
- Set docker_command to avoid docker dependency (skips find_executable).
- Use a fake path for docker_command to make sure mock.patch is working.

(cherry picked from commit 8552ad6bf1)
2016-11-09 10:45:47 -08:00
James Tanner
c920c8bc3b ini lookup: add 'default' to the list of parsed keys
Fixes #18369

(cherry picked from commit 20fb74b1b1)
2016-11-08 11:07:50 -05:00
Brian Coca
9499ed5360 restore play_hosts variables to not show removed
Also adds ansible_play_hosts_all with original list of hosts the play targeted

(cherry picked from commit 5dd195b52f)
2016-11-08 10:51:51 -05:00
James Cammarata
caba50c778 Merge class dict with parent dict when creating meta attributes
In some situations, where the Base class defines an Attribute, the
BaseMeta class doesn't properly see the _get_parent_attribute or
_get_attr_<whatever> methods because of multiple layers of subclasses
(ie. Handler, which subclasses Task). This addresses that by merging
the __dict__ of the parent with the current classes __dict__ meaning
all future iterations see available special methods.

Fixes #18378

(cherry picked from commit 4794b5da45)
2016-11-07 22:36:09 -06:00
Toshio Kuratomi
58d8a0fca0 Text's .translate() is easier to use than bytes
Text strings and byte strings both have a translate method but the byte
string version is harder to use.  It requires a mapping of all 256 bytes
to a translation value.  Text strings only require a mapping from the
characters that are changing to the new string.  Switching to text
strings on both py2 and py3 allows us to state what we're getting rid of
simply without having to rely on the maketrans() helper function.

(cherry picked from commit ee14e0cc2a)
2016-11-07 10:24:51 -08:00
Michael Scherer
aaedf0bd73 Make facts detection work on OpenBSD with Python3
The traceback is the following:

    Traceback (most recent call last):
      File \"/tmp/ansible_8s0bj604/ansible_module_setup.py\", line 134, in <module>
        main()
      File \"/tmp/ansible_8s0bj604/ansible_module_setup.py\", line 126, in main
        data = get_all_facts(module)
      File \"/tmp/ansible_8s0bj604/ansible_modlib.zip/ansible/module_utils/facts.py\", line 3641, in get_all_facts
      File \"/tmp/ansible_8s0bj604/ansible_modlib.zip/ansible/module_utils/facts.py\", line 3584, in ansible_facts
      File \"/tmp/ansible_8s0bj604/ansible_modlib.zip/ansible/module_utils/facts.py\", line 1600, in populate
      File \"/tmp/ansible_8s0bj604/ansible_modlib.zip/ansible/module_utils/facts.py\", line 1649, in get_memory_facts
    TypeError: translate() takes exactly one argument (2 given)

And the swapctl output is this:

    # /sbin/swapctl -sk
    total: 83090 1K-blocks allocated, 0 used, 83090 available

The only use of the code is to remove prefix in case they are present, so just
replacing them with empty space is sufficient.

(cherry picked from commit df145df962)
2016-11-07 09:51:10 -08:00
Adrian Likins
366bfe14c3 Fix bug (#18355) where encrypted inventories fail 18355 (#18373)
* Fix bug (#18355) where encrypted inventories fail

This is first part of fix for #18355
* Make DataLoader._get_file_contents return bytes

The issue #18355 is caused by a change to inventory to
stop using _get_file_contents so that it can handle text
encoding itself to better protect against harmless text
encoding errors in ini files (invalid unicode text in
comment fields).

So this makes _get_file_contents return bytes so it and other
callers can handle the to_text().

The data returned by _get_file_contents() is now a bytes object
instead of a text object. The callers of _get_file_contents() have
been updated to call to_text() themselves on the results.

Previously, the ini parser attempted to work around
ini files that potentially include non-vailid unicode
in comment lines. To do this, it stopped using
DataLoader._get_file_contents() which does the decryption of
files if vault encrypted. It didn't use that because _get_file_contents
previously did to_text() on the read data itself.

_get_file_contents() returns a bytestring now, so ini.py
can call it and still special case ini file comments when
converting to_text(). That also means encrypted inventory files
are decrypted first.

Fixes #18355

(cherry picked from commit dd0189839e)
2016-11-07 10:15:09 -05:00
Dag Wieers
780d2c4bc4 Remove unnecessary warnings (#18121)
(cherry picked from commit cd784cd345)
2016-11-07 14:52:13 +01:00
Brian Coca
1cc26cf7cd resolve inventory path on init
This allows meta refresh_inventory to work with relative paths
Added option to unfrackpath to not resolv symlinks
fixes #16857

(cherry picked from commit 8217c1c39c)
2016-11-04 17:11:44 -04:00
Toshio Kuratomi
de7fbd407a Limit how much of the file we read to test if it's an encrypted vault file
Fixes memory errors reported in #16391

(cherry picked from commit ed134d81f1)
2016-11-04 12:31:28 -07:00
Jasper Lievisse Adriaanse
9d81ad6423 Rework how OpenBSD processor facts are resolved
(cherry picked from commit c17dad0def)
2016-11-04 08:58:58 -07:00
Matt Clay
5bf1269aaf Python 3 fixes for ansible-doc.
(cherry picked from commit 88dbb5a630)
2016-11-03 16:10:08 -07:00
James Cammarata
3f25088bf0 Correctly reassign implicit block parents when an include is involved
(cherry picked from commit 7ff9942ec6)
2016-11-03 15:28:35 -05:00
James Cammarata
09fc911a48 Correctly assign parent when squashing adjacent implict Blocks
Related to: #18315
Related to: #18206
Related to: #17810

(cherry picked from commit d12475c98b)
2016-11-03 15:28:28 -05:00
Tony Kinsley
2b9659945c Resolves #18312 python3 support for ec2.py
(cherry picked from commit 73da2663a3)
2016-11-03 11:47:21 -07:00
Robin Roth
182943f3b3 Exclude floppy disks from lsblk call for uuids
Fixes #18326

(cherry picked from commit 6ca6a9a291)
2016-11-03 10:52:28 -07:00
Toshio Kuratomi
b229898f80 Add include_vars fix to changelog 2016-11-03 07:33:13 -07:00
Brian Coca
5c4a4703d9 only validate extensions when using dir loading
fixes #18223

(cherry picked from commit 32a7b4ce71)
2016-11-03 07:31:32 -07:00
Patrick Uiterwijk
06599f49eb Fix adding the same trusted certificates multiple times (#18296)
If there is an intermittent network failure, we might be trying to reach
an URL multiple times. Without this patch, we would be re-adding the same
certificate to the OpenSSL default context multiple times.
Normally, this is no big issue, as OpenSSL will just silently ignore them,
after registering the error in its own error stack.
However, when python-cryptography initializes, it verifies that the current
error stack of the default OpenSSL context is empty, which it no longer is
due to us adding the certificates multiple times.
This results in cryptography throwing an Unknown OpenSSL Error with details:

OpenSSLErrorWithText(code=185057381L, lib=11, func=124, reason=101,
reason_text='error:0B07C065:x509 certificate routines:X509_STORE_add_cert:cert already in hash table'),

Signed-off-by: Patrick Uiterwijk <puiterwijk@redhat.com>
(cherry picked from commit 77af3a68de)
2016-11-02 10:41:14 -07:00
Toshio Kuratomi
3a577966ba Add dnf fixes to changelog 2016-11-02 07:43:46 -07:00
Toshio Kuratomi
6824b1ea1e Update submodule refs 2016-11-02 07:42:07 -07:00
Daniel Menet
00bdada50e fix iteritems for python 3
(cherry picked from commit 19fdb58948)
2016-11-01 12:47:46 -07:00
Adrian Likins
32971e8639 Fix 'vault rekey' with vault secret env var
if ANSIBLE_VAULT_PASSWORD_FILE is set, 'ansible-vault rekey myvault.yml'
will fail to prompt for the new vault password file, and will use
None.

Fix is to split out 'ask_vault_passwords' into 'ask_vault_passwords'
and 'ask_new_vault_passwords' to make the logic simpler. And then
make sure new_vault_pass is always set for 'rekey', and if not, then
call ask_new_vault_passwords() to set it.

ask_vault_passwords() would return values for vault_pass and new
vault_pass, and vault cli previously would not prompt for new_vault_pass
if there was a vault_pass set via a vault password file.

Fixes #18247

(cherry picked from commit 309f54b709)
2016-11-01 13:13:10 -04:00
Toshio Kuratomi
70824e06b5 Update submodule refs 2016-11-01 08:04:24 -07:00
Steve Kuznetsov
e13f3e3c07 Change v2_playbook_on_start logic to positively detect legacy plugins
In order to support legacy plugins, the following two method signatures
are allowed for `CallbackBase.v2_playbook_on_start`:

def v2_playbook_on_start(self):
def v2_playbook_on_start(self, playbook):

Previously, the logic to handle this divergence checked to see if the
callback plugin being called supported an argument named `playbook`
in its `v2_playbook_on_start` method. This was fragile in a few ways:
 - if a plugin author did not use the literal `playbook` to name their
   method argument, their plugin would not be called correctly
 - if a plugin author wrapped their `v2_playbook_on_start` method and
   by doing so changed the argspec to no longer expose an argument
   with that literal name, their plugin would not be called correctly

In order to continue to support both types of callback for backwards
compatibility while making the call more robust for plugin authors,
the logic can be reversed in order to have a positive check for the old
method signature instead of a positive check for the new one.

Signed-off-by: Steve Kuznetsov <skuznets@redhat.com>
(cherry picked from commit 0bc35354ce)
2016-11-01 07:53:04 -07:00
jasdeep-hundal
6adbc7d64a Fix OpenSSH-related ssh process exit race
Mitigate the effects of observing the ssh process still running
after seeing an EOF on stdout when using OpenSSH with
ControlPersist, since it does not close the stderr file descriptor
in this case.

(cherry picked from commit 679da00236)
2016-11-01 07:53:04 -07:00
Matt Robinson
325bf617e9 Set ansible_os_family correctly under KDE neon
As neon is derived from Ubuntu, ansible_os_family should have the value
"Debian" instead of "Neon".  Add a test case for KDE neon and set
os_family correctly for it.

(cherry picked from commit 4ff8890ec1)
2016-11-01 07:53:04 -07:00
Michael Riss
2c572ba786 Improved caching for urls
- When there is no file at the destination yet, we have no modification time for the `If-Modified-Since`-Header. In this case trust the cache to make the right decision to either serve a cached version or to refresh from origin. This should help with mass-deployment scenarios where you want to use a local cache to relieve your uplink.
- If you don't trust the cache to make the right decision you can still force it to refresh by providing the `force: yes` option.

(cherry picked from commit c05bad9f74)
2016-11-01 07:53:04 -07:00
Toshio Kuratomi
503537eb25 Update for pip fix 2016-11-01 07:45:36 -07:00
James Cammarata
cdec853e37 New release v2.2.0.0-1 2016-10-31 22:20:38 -05:00
James Cammarata
23812ab87d Updating CHANGELOG for CVEs fixed in 2.2.0 2016-10-31 22:19:37 -05:00
Nathaniel Case
1f80e35312 Exception.message gone in 3.x (#18221)
* Exception.message gone in 3.x
(cherry picked from commit 4a067c3f50)
2016-10-31 11:17:06 -07:00
Toshio Kuratomi
c07f6d1bdd Update submodule refs 2016-10-31 11:11:07 -07:00
Toshio Kuratomi
d559355b29 Add tests for dnf modelled after the yum tests (#18226)
(cherry picked from commit 02859a3e32)
2016-10-31 10:57:11 -07:00
Sijis Aviles
219a20277f Simplify surrogate check in to_text() (#18211)
* Simplify surrogate check in to_text()

* Simplify surrogateescape check even further

(cherry picked from commit b365f44fa1)
2016-10-29 09:12:39 -07:00
Michael Scherer
3de9d8373b Cleanup StringIO import for module_utils/shell.py
(cherry picked from commit 6052c1294b)
2016-10-28 08:01:42 -07:00
James Cammarata
9d4ce0a94e New release v2.2.0.0-0.4.rc4 2016-10-27 13:41:06 -05:00
Toshio Kuratomi
c5d4134f37 Add hint that python3 might be too old
This limitation of python-3.4 mkstemp() is the final reason we made
python-3.5 our minimum version.  Since we know about it, give a nice
error to the user with a hint that Python3.4 could be the issue.

Fixes #18160

(cherry picked from commit fda933723c)
2016-10-27 07:45:34 -07:00
jctanner
95a8bbdbda iterate through task results only if the key is not at the root level (#18214)
Fixes https://github.com/ansible/ansible-modules-core/issues/5396
(cherry picked from commit 5a0621db55)
2016-10-27 09:46:06 -04:00
Rene Moser
1ebc94f290 tasks_queue_manager: fix fork calculation if serial in %
(cherry picked from commit 3763283d01)
2016-10-26 15:27:05 -04:00
Toshio Kuratomi
a2df07ade3 A few fixes for python3
* socket interfaces take bytes so convert text strings to bytes when
  using them.
* Use b64encode() instead of str.encode('base64')

(cherry picked from commit 56086f3b9e)
2016-10-26 11:49:05 -07:00
Foxlik
6b603b026c Fix #10865
Slightly better handling of http headers from http (CONNECT) proxy. Buffers up to 128KiB of headers and raises exception if this size is exceeded.

This could be optimized further, but for the time being it does the trick.
(cherry picked from commit 8bb01d4c29)
2016-10-26 11:48:47 -07:00
Matt Davis
deb1e3ebc7 bump core submodule ref for win_shell/win_command fixes 2016-10-25 17:40:08 -07:00
Matt Davis
79e43925b1 add large interleaved stdout/stderr integration tests for win_shell/win_command
(cherry picked from commit c1b7d2e560)
2016-10-25 17:38:25 -07:00
Matt Davis
0eb23f5a86 fix version check to support >=rc5
(cherry picked from commit d1e1898b0e)
(cherry picked from commit 12a38bc75f)
2016-10-25 17:37:16 -07:00
Matt Davis
066a360a36 backport various docker_common fixes from devel
(cherry picked from commit b5c95ea6fa)
2016-10-25 17:34:56 -07:00
Will
731422a6dc Fix lxd_container module fails if certificate already in trust store
When the client certificate is already stored, lxd returns a JSON error with message "Certificate already in trust store". This "error" will occur on every task run after the initial run. The cert should be in the trust store after the first run and this error message should really only be viewed as informational as it does not indicate a real problem.

Fixes:
ansible/ansible-modules-extras#2750
(cherry picked from commit 1f30bc8a6f)
2016-10-24 20:41:24 -07:00
Toshio Kuratomi
96d3f06743 Add a new potential su prompt
Two parts to this change:
* Add a new string that requests password
* Add a new glyph that can be used to separate the prompt from the
  user's input as it seems it can use fullwidth colon rather than colon.

Fixes #17867

(cherry picked from commit 188ae18b1c)
2016-10-24 16:58:13 -07:00
James Cammarata
eafb4043c9 New release v2.2.0.0-0.3.rc3 2016-10-24 18:39:31 -05:00
Thomas Quinot
35938b907d Filter out internal magic and connection variables from facts returns
Fixes #15925

(cherry picked from commit f826370ab8befacf2e8867ee3d7e2b814a3da385)
2016-10-24 17:27:33 -05:00
Sam Doran
bab1ac1d5c Fish hacking setup fix (#18084)
* Remove old egg-info files before creating new ones

Currently, setup.py generates egg files then they are deleted. This change
fixes this behavior and matches that in env-setup.

* Do not try to move ansible*egg-info to lib/

setup.py creates the ansible.egg-info in lib/ so this step is unnecessary. Matches env-setup behavior.

* Better test for number of arguments in argv

This prevents an erronous error message from being thrown since set -q returns an error code with the number of variables not defined, resulting in a non-zero exit if no arguments are passed.

Indent case statement within switch statement.

(cherry picked from commit cf8639ff62)
2016-10-24 10:35:36 -07:00
Robin Roth
f3fc029726 Change all links in readme to https
Fixes #17954
2016-10-24 09:50:01 -07:00
Rene Moser
e4ebe721f5 handler: notify a handler by name _and_ listen
Before we only allowed either notify by name or listen and name had precedence.

(cherry picked from commit e69d26270f)
2016-10-24 10:59:38 -04:00
Toshio Kuratomi
2fa12438dd Update submodule refs to pick up python3 fix for uri 2016-10-24 07:20:24 -07:00
Toshio Kuratomi
9d82a3aa0c Fix the uri testserver to run on python3
(cherry picked from commit 589e71dbc5)
2016-10-24 06:51:10 -07:00
Adrian Likins
fef9de30d9 test-module _ansible_selinux_special_fs arg added
modules need to have _ansible_selinux_special_fs passed in
as an arg, so add the default to the args.

(cherry picked from commit cf39a1abab)
2016-10-24 09:14:35 -04:00
Matt Davis
5169252641 bump core submodule ref for win async bugfix 2016-10-24 00:04:16 -07:00
Matt Davis
4d5368e93b reenable win_async loop test
(cherry picked from commit 9a78273665)
2016-10-24 00:03:42 -07:00
Toshio Kuratomi
02ed599035 Update submodule refs 2016-10-23 16:55:29 -07:00
Michael Scherer
bf503e4ff2 Fix 18151, by converting float to int
(cherry picked from commit 6a76a9299d)
2016-10-23 14:01:18 -07:00
Michael Scherer
9022862624 Fix some errors in CHANGELOG.md (#18149)
(cherry picked from commit a1032bc44b)
2016-10-23 13:17:53 +02:00
Daniel Yates
6c118252b6 Correctly read use_private_network as boolean
This fixes the use of public IPs in the discovered hosts by
ensuring that the use_private_network check doesn't always evaluate
to False if the associated .ini file specifies this option.

(cherry picked from commit 39e86ae2bc)
2016-10-23 13:17:26 +02:00
Nijin Ashok
e97a00de9e Fix improper handling of machine_type in ovirt inventory (#16251)
Currently the machine_type will not work if the instance type is set in ovirt. In that case, inst.get_instance_type will be an object and will fails while converting to json. This only work if the instance type is not set in ovirt where inst.get_instance_type is a Null value. The current change make sure that correct "instance type" is passed when instance is set in ovirt and Null when it's not set in ovirt.
(cherry picked from commit 1f3d82dd18)
2016-10-23 02:15:22 +02:00
stephane
f5240d2953 Set Suse family for openSUSE Tumbleweed & Leap
On openSUSE Tumbleweed, lsb-release -a currently reports
the distributor ID as "openSUSE Tumbleweed". On openSUSE
Leap, the distributor ID is "SUSE LINUX".

Add them to the OS_FAMILY dict as Suse family systems.

Also add an entry to TESTSETS in test_distribution_version.py
for openSUSE Tumbleweed.

(cherry picked from commit 77868a4104)
2016-10-23 02:05:32 +02:00
Matt Robinson
92c851a894 Make bcrypt + passlib work in password_hash filter
If hashtype for the password_hash filter is 'blowfish' and passlib is
available, hashing fails as the hash function for this is named 'bcrypt'
(and not 'blowfish_crypt').  Special case this so that the correct
function is called.

(cherry picked from commit 692bfa872a)
2016-10-23 01:50:26 +02:00
Toshio Kuratomi
84485c29ee Add changelog for apt_key 2016-10-22 09:06:33 -07:00
Toshio Kuratomi
806fc1ac74 submodule ref update to pull in apt and apt_key fixes 2016-10-22 08:57:30 -07:00
Ryan S. Brown
b702d3810e Bump core modules submodule ref 2016-10-21 13:28:03 -04:00
Toshio Kuratomi
8c6d749ad9 Update submodule ref for py3 compile test fix 2016-10-21 09:39:16 -07:00
Toshio Kuratomi
8dbc564fc6 Enable the git test on py3
(cherry picked from commit 2ac12432ef)
2016-10-21 09:07:18 -07:00
Toshio Kuratomi
e8c97768b7 Update submodule refs for py3 and git fixes 2016-10-21 09:06:38 -07:00
Ssawa
90d3824678 Handle 'smart' scp_if_ssh option for fetch (#18125)
(cherry picked from commit 8e47b9bc70)
2016-10-21 10:02:51 -04:00
Michael Scherer
5bd6a9b76c Enable filters test for python 3
(cherry picked from commit 2804e64ed5)
2016-10-20 23:37:32 -07:00
Toshio Kuratomi
a15d3106e9 Now that we convert salt inside of do_encryptas needed, keep salt as text type until then.
(cherry picked from commit 6d9f780937)
2016-10-20 22:50:01 -07:00
Toshio Kuratomi
3ee4effb7a pip tests now pass on python3. Enable them.
(cherry picked from commit 67ac375188)
2016-10-20 13:59:03 -07:00
Toshio Kuratomi
90b06bc8b4 Update submodule refs 2016-10-20 13:53:43 -07:00
Michael Scherer
b6e51d670a Enable test for lookups on python 3
Since passlib algo sometime takes a bytes, and sometime
not, depending on a internal variable, we have to convert
bnased on it, or it fail with "TypeError: salt must be bytes,
not str" (or unicode instead of bytes)

However, that's not great to use internal structure for that.

(cherry picked from commit 578da9a615)
2016-10-20 13:24:34 -07:00
Brian Coca
dcc6a15ce3 role now fails if specified file not found
fixes https://github.com/ansible/ansible-modules-core/issues/5166

(cherry picked from commit ea428e716d)
2016-10-20 14:40:06 -04:00
Brian Coca
25e4398d5b fix for check_mode/async interaction
fixes #18110

(cherry picked from commit 432633e4c1)
2016-10-20 14:38:54 -04:00
Toshio Kuratomi
f15ec38788 Thanks to mscherer, these tests are now passing
(cherry picked from commit fd14048d46)
2016-10-20 10:59:46 -07:00
Michael Scherer
ddbc01dfe5 Let authorized_keys be tested on python 3
(cherry picked from commit e93a8814a3)
2016-10-20 10:58:10 -07:00
Toshio Kuratomi
6e36d1899c Update submodule refs 2016-10-20 10:46:47 -07:00
John R Barker
ba8e1f88a9 Port validate-modules to stable-2.2 (#18119) 2016-10-20 18:22:58 +01:00
James Tanner
1dfa0e06eb Update 2.2 submodule refs for core 2016-10-20 10:51:31 -04:00
Michael Scherer
e9b7d42205 Use six.move for module in module_utils/facts.py
(cherry picked from commit 4549604cc7)
2016-10-19 21:33:11 -07:00
Toshio Kuratomi
d2998f0811 Update submodule refs to pick up git fix 2016-10-19 15:27:18 -07:00
Adrian Likins
fa8f9e9ead Fix test_filters fail because of dict sort (#18105)
Fixes #17308
(cherry picked from commit f99ffb5620)
2016-10-19 16:11:37 -04:00
Toshio Kuratomi
257182e46a Pixelrebel amc pr2654 (#18089)
* Add tag verification test (ansible-modules-core PR 2654)

* Fix typo

* Use smaller repo for testing, add dependency control

* Test is gpg exists before running git signing tasks

* Correct the test conditionals so that gpg1 is tested

(cherry picked from commit b902b5d046)
2016-10-19 08:42:17 -07:00
James Cammarata
bce9bfce51 New release v2.2.0.0-0.2.rc2 2016-10-18 15:35:11 -05:00
Toshio Kuratomi
cc91c34f36 Update submodule ref 2016-10-18 13:25:34 -07:00
Toshio Kuratomi
a0a4d0e3f4 Add changelog entries for ansible_managed and yum fix 2016-10-18 13:24:53 -07:00
Toshio Kuratomi
b9e8aa72be Make the default Ansible_managed string static so it doesn't interfere with idempotency
(cherry picked from commit 5037dc4e69)
2016-10-18 13:24:32 -07:00
Brian Coca
fb921042db properly propagate loop vars
fixes #17966

(cherry picked from commit 99220a5f6c)
2016-10-18 15:27:28 -04:00
Brian Coca
885b218a7b fix include_role dynamic path
fixes #17966

(cherry picked from commit 01b75f966b)
2016-10-18 13:28:24 -04:00
Brian Coca
c5b155ba1a include_role now allows duplicates by default
setting is overridable by user

(cherry picked from commit 38d0f77a0f)
2016-10-18 11:14:24 -04:00
Toshio Kuratomi
f6295677c9 Update submodule refs 2016-10-18 07:21:24 -07:00
Toshio Kuratomi
1e54f424ec epdb doesn't work on python3 so we need a different package to test pip
(cherry picked from commit 7e0bd5632d)
2016-10-17 18:45:38 -07:00
Toshio Kuratomi
b373f67368 Fix ansible-pull on python3
On python3, we can't write bytes directly to sys.stdout.

(cherry picked from commit 60acfd1e87)
2016-10-17 16:32:47 -07:00
James Cammarata
924e0726df Break out of linear strategy if all hosts are failed and the result is not OK
Fixes #18021

(cherry picked from commit 5be2a3a9e0)
2016-10-17 16:32:20 -05:00
Toshio Kuratomi
69ff46b8ca Update extras submodule ref to pick up no_log fix for ovirt_auth 2016-10-17 13:07:50 -07:00
Toshio Kuratomi
19516d8c19 Note subversion no_log fix 2016-10-17 12:49:05 -07:00
Toshio Kuratomi
531023ad1f Update submodule ref to pick up subversion no_log fix 2016-10-17 12:47:01 -07:00
Aaron Bieber
1af1cca59f Remove -b option from pbrun.
The -b option reads as follows:
` The target job is directed to ignore hangup signals. This is particularly
useful for running the target program in the background.`

If needed, '-b' can be added to become_flags

Squashed commit of the following:

commit f2c9f5c011ae8be610301d597a34bfba1a391e08
Author: Aaron Bieber <aaron@bolddaemon.com>
Date:   Mon Oct 17 10:58:14 2016 -0600

    remove pbrun flags

commit f402679ac177c931ad64bd13306f62512a14fcd6
Author: Aaron Bieber <aaron@bolddaemon.com>
Date:   Fri Oct 14 15:29:29 2016 -0600

    use Password: vs assword: for matching pbrun prompt

commit cd2e90cb65854c4cc5dd8773404e520d40f82765
Author: Aaron Bieber <aaron@bolddaemon.com>
Date:   Fri Oct 14 15:28:58 2016 -0600

    move -b to pbrun_flags

(cherry picked from commit 3fc1b4da53)
2016-10-17 14:43:02 -04:00
Brian Coca
dde882c91f updated pbrun test to match expected output
(cherry picked from commit 04b86df815)
2016-10-17 14:43:02 -04:00
Andrew Gaffney
7ee14f466e Fix search path for relative paths in roles (fixes #17877)
(cherry picked from commit 72f0aaf606aa3399c0713ad1eaac394c3846813c)
(cherry picked from commit ef3fa115d3ba319b7f6a7a86a85655d9cf766120)
2016-10-17 11:20:44 -04:00
Brian Coca
9a9f767857 fixes to ansible_search_path
now gets basedir (no need to frontload as dwim already does that)
added comment about basedir to search_path usage to avoid dupes

(cherry picked from commit b14bce867d645af716641a8bf674fa9eba9ddb98)
2016-10-17 11:20:44 -04:00
Brian Coca
d4b8178b4b avoid errors when dynamic role
(cherry picked from commit 6f2936e57b268e9f8a2ed5c23c6af49888f869ae)
2016-10-17 11:20:44 -04:00
Toshio Kuratomi
141ed26e02 Update extras submodule ref to pull in password hiding fix for postgresql_lang and postgresql_ext 2016-10-17 08:12:19 -07:00
Toshio Kuratomi
5bf850568a Update submodule refs 2016-10-16 22:47:47 -07:00
Robin Roth
24d7555d0b Allow unicode inventory filename
Fixes #17932

(cherry picked from commit 3922328954)
2016-10-15 16:55:19 -07:00
Bill Nottingham
d3dd82f3cf Change <support@ansible.com> - it's being retired. 2016-10-15 16:48:22 -07:00
Toshio Kuratomi
e1101f78bd Fixes to handle non-ascii become passwords
Fixes for non-ascii passwords on
* both python2 and python3,
* local and paramiko_ssh (ssh tested working with these changes)
* sudo and su

Fixes #16557

(cherry picked from commit f24c10c32b)
2016-10-15 16:26:23 -07:00
Toshio Kuratomi
3d3ebbf98e Fix become password using non-ascii for local connection
Fixes #18029
2016-10-15 11:27:14 -07:00
Toshio Kuratomi
261013354f unarchive tests now pass on python3
(cherry picked from commit 91c1fdec3d)
2016-10-15 08:51:00 -07:00
James Cammarata
15f4b83564 Don't mark parent role complete for nested include_role calls
The PlayIterator was written without nested roles in mind, but since
include_role can nest them we need to check to see if we've moved into
a new role which is a child via nesting.

Fixes #18026

(cherry picked from commit 0d5206f90c)
2016-10-14 14:37:46 -05:00
Ryan S. Brown
747a5ef791 update extras submodule ref 2016-10-14 12:30:39 -04:00
Brian Coca
c1a34b5eff Better error for bad role def
(cherry picked from commit ff1e52184f)
2016-10-14 11:38:18 -04:00
James Cammarata
b8c25d8f70 Make sure free strategy is returning proper TQM constants
Fixes #18023

(cherry picked from commit 6bdcb3a392)
2016-10-14 09:24:01 -05:00
James Tanner
05dfed7575 Update submodule refs 2016-10-13 19:34:22 -04:00
Brian Coca
b6e317c045 toggle missing handler errors/warnings via config
(cherry picked from commit b169a61c20)
2016-10-13 16:56:31 -04:00
Peter Sprygada
a94db01b89 fixes issue when checking if sessions are supported (#18013)
the supports_sessions() call was sending the command as a string instead
of a list which is required when transport is eapi.  This fixes that bug
(cherry picked from commit 936bca9fc6)
2016-10-13 15:47:58 -04:00
Peter Sprygada
6d909bd65c adds additional exception handling in open() (#18003)
The open() method will now catch a socket.timeout exception and raise
a ShellError.
(cherry picked from commit b1666020a9)
2016-10-13 15:14:04 -04:00
Peter Sprygada
ab3d4731a3 minor fix for checking kwargs in get_config() (#18002)
The get_config() method was checking for a nonexistent kwarg that would
cause an exception.  This fixes that problem.
(cherry picked from commit bce31a11c2)
2016-10-13 15:13:55 -04:00
Brian J. Dowling
ed9d0cdf4a Quick ansible-doc fix -- don't run pager if there was an error (no text)
(cherry picked from commit 2be2f35373)
2016-10-13 15:10:55 -04:00
Brian Coca
cb8c28870c always log unexpected exceptions
(cherry picked from commit 18d3e0533f)
2016-10-13 13:34:11 -04:00
Brian Coca
2accc28d14 restored 'results' filters
tests do not work the same, restoring old filters to keep backwards compat
tests now only implment the new normalized 'tense'

(cherry picked from commit cdb5a222c5)
2016-10-13 13:27:44 -04:00
Nathaniel Case
d8155cc4fa Update submodule refs 2016-10-13 13:09:32 -04:00
Peter Sprygada
edff94f96d adds log message for successful connection and disconnection (#17993)
The network module will now log a message when it connects to a remote host
successfully and specify the transport used.  It will also log a message
when the module discconnect() method is called.
(cherry picked from commit 65ea24f4bb)
2016-10-12 21:48:59 -04:00
Peter Sprygada
1acd258931 fixes issue in eos shared module for earlier versions of EOS (#17980)
Earlier versions of EOS that do not support config sessions would
create an exception.  This fix will now check if the device supports
sessions and if it doesn't, it will fall back to not using sessions
(cherry picked from commit 3badb212fb)
2016-10-12 20:17:13 -04:00
Toshio Kuratomi
7034a34ce4 Update submodule refs 2016-10-12 15:18:04 -07:00
Toshio Kuratomi
4cd32ee1ac Only dispkay failure to use cryptography at a higher verbosity
Fixes #17982

(cherry picked from commit bf3d546d9a)
2016-10-12 10:49:13 -07:00
stephane
9f4a656929 Correct delegated_host_name check
In fb50698da3 a check for delegated_host_name being defined was added. Make this
check safer as it breaks some playbooks.

(cherry picked from commit a32e48555d)
2016-10-11 11:17:32 -07:00
Bruno Rocha
3db274ac21 Fix unbound method call for JSONEncoder (#17970)
* Fix unbound method call for JSONEncoder

The way it is currently it will lead to unbound method error

```python
In [1]: import json

In [2]: json.JSONEncoder.default('object_here')
---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-2-872fdacfda50> in <module>()
----> 1 json.JSONEncoder.default('object_here')

TypeError: unbound method default() must be called with JSONEncoder instance as first argument (got str instance instead)

```

But what is really wanted is to let the json module to raise the "is not serializable error" which demands a bounded instance of `JSONEncoder()`

```python
In [3]: json.JSONEncoder().default('object_here')
---------------------------------------------------------------------------
TypeError: 'object_here' is not JSON serializable

```

BTW: I think it would try to call `.to_json` of object before raising as it is a common pattern.

* Calling JSONEncoder bounded `default` method using super()

(cherry picked from commit b06fb2022c)
2016-10-11 08:33:02 -07:00
Ryan S. Brown
3a822faeae Update extras submodule reference 2016-10-07 16:42:46 -04:00
Pavlo Shchelokovskyy
3c9966d6fc Make interprocess polling interval configurable (#16560)
As recently there was back-and-forth with this hardcoded value
(0.001 -> 0.01 -> 0.005), obviousely the optimal value for it depends on
Ansible usage scanario and is better to be configurable.

This patch adds a new config option in DEFAULT section,
`internal_poll_interval`, with default of 0.001 corresponding to the
value hardcoded in Ansible v2.1.
This config option is then used instead of hardcoded values where
needed.

Related GH issue: 14219
(cherry picked from commit aa1ec8af17)
2016-10-06 14:33:09 -05:00
Toshio Kuratomi
2a7f728fdf Remove archive from the 2.2 will be working to get it updated for 2.3 2016-10-06 11:23:19 -07:00
Toshio Kuratomi
aafa7ab471 Update submodule refs 2016-10-06 11:01:16 -07:00
Toshio Kuratomi
f59430aba8 Add a whitelist for checking for six. Use it for digital_ocean.py
(cherry picked from commit 6a61b6d431)
2016-10-06 11:00:26 -07:00
Strahinja Kustudic
448cac16db Moves 'statically included' messages to -vv verbosity (#17918)
(cherry picked from commit 9962245b92)
2016-10-06 08:55:54 -05:00
James Cammarata
ede5eb78ab Sleep briefly while waiting for pending results to reduce CPU churn
(cherry picked from commit e26bce5221)
2016-10-06 08:50:50 -05:00
Peter Sprygada
7063ed8ceb adds new option to get_config to grab config with passwords (#17915)
In order for the config to be returned with vpn passwords, the get_config()
method now supports a keyword arg include=passwords to return the desired
configuration.  This replaces the show_command argument
(cherry picked from commit 087fb4265f)
2016-10-05 22:12:03 -04:00
Nathaniel Case
be6396d5e9 Update submodule refs 2016-10-05 12:21:14 -04:00
James Cammarata
e4efe0b2f1 Move searching for roles in the cur basedir to last
Searching the DEFAULT_ROLES_PATH and the roles basedir should come
before this, and it has been a long standing oversight.

Fixes #17882

(cherry picked from commit 0a86ddc251)
2016-10-05 01:25:48 -05:00
Brian Coca
dea2cabe94 fixed storing of cwd
(cherry picked from commit d9d7e413a5)
2016-10-04 14:25:52 -04:00
Toshio Kuratomi
08b646684b Make ini parsing slightly more robust
Prior to this commit, the ini parser would fail if the inventory was
not 100% utf-8.  This commit makes this slightly more robust by
omitting full line comments from that requirement.

Fixes #17593

(cherry picked from commit 23305540b4)
2016-10-04 11:25:45 -07:00
Brian Coca
255b9364ab better inventory error messages
(cherry picked from commit 74b7590211)
2016-10-04 07:24:12 -07:00
Adrian Likins
9596b9218c Specify run_command decode error style as arg (#17886)
* Specify run_command decode error style as arg

Instead of getting the stdout/stderr text from
run_command, and then decoding to utf-8 with a
particular error scheme, use the 'errors' arg
to run_command so it does that itself.

* Use 'surrogate_or_replace' instead of 'replace'

For the text decoding error scheme in run_command calls.

* Let the local_facts run_command use default errors

* fix typo

(cherry picked from commit d0bdfc2abb)
2016-10-04 06:25:01 -07:00
John R Barker
66ebe7a461 asa_template wasn't ever officially released
asa_template was added during 2.2 development, and removed before 2.2 hit RC1 so no need to give notice of deprecation.
2016-10-04 14:07:55 +01:00
James Cammarata
6a76d7fbef Check for substates in is_failed before checking main state failure
Fixes #17882

(cherry picked from commit d09f57fb3a)
2016-10-04 01:43:27 -05:00
Toshio Kuratomi
b878c47d5e Fix for run_command tests now that it returns native strings
(cherry picked from commit 08a58ae025)
2016-10-03 18:46:55 -07:00
Toshio Kuratomi
f7d3ed6eb3 Update submodule refs 2016-10-03 18:46:36 -07:00
Toshio Kuratomi
8456686f4b Make run_command return native strings
This changes the return value on python3   Return value on python2 was
already byte strings (which is the native str type there.)

(cherry picked from commit ddd20627a4)
2016-10-03 18:46:36 -07:00
Brian Coca
fb4c0a085f no need for warnings in first_found
(cherry picked from commit 125a8d3c65)
2016-10-03 20:23:41 -04:00
Brian Coca
6b85c31fdf fix for include_role conflating vars/directives
(cherry picked from commit 54ce6a9b7a)
2016-10-03 19:25:22 -04:00
Brian Coca
767dba8f24 include_role process name from options, not task
(cherry picked from commit d4b2ea3ec8)
2016-10-03 18:30:08 -04:00
Adrian Likins
5109d50adb cast/copy keys() to list to avoid py3 errors
In py3, dict.keys() is a view and not a copy of the
dicts keys, so attempting to delete items from the dict
while iterating over the keys results int

RuntimeError: dictionary changed size during iteration

Resolve by casting .keys() to a list() type.

(cherry picked from commit 2addc09050)
2016-10-03 13:12:41 -07:00
Adrian Likins
f4b1d87ec0 open anziballs payload 'wb' for py3
(cherry picked from commit 9f673e0725)
2016-10-03 13:08:30 -07:00
Matt Clay
ae52943719 Update default branch for generate-tests. 2016-10-03 12:34:51 -07:00
Matt Clay
6ba009f913 Update submodule refs. 2016-10-03 12:16:04 -07:00
Matt Clay
52173e7707 Update CI config for stable-2.2 branch. (#17880) 2016-10-03 11:39:01 -07:00
James Cammarata
44faad0593 New release v2.2.0.0-0.1.rc1 2016-10-03 10:00:08 -05:00
224 changed files with 4918 additions and 1501 deletions

2
.gitignore vendored
View file

@ -43,7 +43,7 @@ docsite/htmlout
docs-api/rst/
docs-api/_build/
# deb building stuff...
debian/
/debian/
deb-build
# Vim swap files
*.swp

View file

@ -1,10 +1,88 @@
Ansible Changes By Release
==========================
## 2.2 "The Battle of Evermore" - ACTIVE DEVELOPMENT
## 2.2.4 "The Battle of Evermore" - TBD
* avoid vault view writing to logs
* moved htpasswd module to use LooseVersion vs StrictVersion to make usable on Debian
* fix for adhoc not obeying callback options
## 2.2.3 "The Battle of Evermore" - 05-09-2017
### Major Changes:
* [SECURITY] (HIGH): fix for CVE-2017-7466, which was caused by an incomplete cherry-picking of commits related to CVE-2016-9587. This can lead to some jinja2 syntax not being stripped out of templated results.
* [SECURITY] (MODERATE): fix for CVE-2017-7481, in which data for lookup plugins used as variables was not being correctly marked as "unsafe".
### Minor Changes:
* Fixes a bug when using YAML inventory where hosts were not put in the 'all' group, and some other 'ungrouped' issues in inventory.
* Fixes a bug when using ansible commands without a tty for stdin.
* Split on newlines when searching for become prompt.
* Fix crash upon pass prompt in py3 when using the paramiko connection type.
## 2.2.2 "The Battle of Evermore" - 03-27-2017
### Major Changes:
* [SECURITY] (HIGH): (continued fix for CVE-2016-9587) Handle some additional corner cases in the way conditionals are parsed and evaluated.
* [SECURITY] (LOW): properly filter passwords out of URLs when displaying output from some modules.
### Minor Changes:
* Fix azure_rm version checks (#22270).
* Fix for traceback when we encounter non-utf8 characters when using --diff.
* Ensure ssh hostkey checks respect server port.
* Use proper PyYAML classes for safe loading YAML files.
* Fix for bug related to when statements for older jinja2 versions.
* Fix a bug/traceback when using to_yaml/to_nice_yaml.
* Properly clean data of jinja2-like syntax, even if that data came from an unsafe source.
* Fix bug regarding undefined entries in HostVars.
* Skip fact gathering if the entire play was included via conditional which evaluates to False.
* Fixed a performance regression when using a large number of items in a with loop.
* Fixed a bug in the way the end of role was detected, which in some cases could cause a role to be run more than once.
* Add jinja2 groupby filter override to cast namedtuple to tuple to handle a non-compatible change in jinja2 2.9.4-2.9.5.
* Fixed several bugs related to temp directory creation on remote systems when using shell expansions and become privilege escalation.
* Fixed a bug related to spliting/parsing the output of a become privilege escalation when looking for a password prompt.
* Several unicode/bytes fixes.
## 2.2.1 "The Battle of Evermore" - 01-16-2017
### Major Changes:
* Security fix for CVE-2016-9587 - An attacker with control over a client system being managed by Ansible and the ability to send facts back to the Ansible server could use this flaw to execute arbitrary code on the Ansible server as the user and group Ansible is running as.
### Minor Changes:
* Fixes a bug where undefined variables in with_* loops would cause a task failure even if the when condition would cause the task to be skipped.
* Fixed a bug related to roles where in certain situations a role may be run more than once despite not allowing duplicates.
* Fixed some additional bugs related to atomic_move for modules.
* Fixes multiple bugs related to field/attribute inheritance in nested blocks and includes, as well as task iteration logic during failures.
* Fixed pip installing packages into virtualenvs using the system pip instead of the virtualenv pip.
* Fixed dnf on systems with dnf-2.0.x (some changes in the API).
* Fixed traceback with dnf install of groups.
* Fixes a bug in which include_vars was not working with failed_when.
* Fix for include_vars only loading files with .yml, .yaml, and .json extensions. This was only supposed to apply to loading a directory of vars files.
* Fixes several bugs related to properly incrementing the failed count in the host statistics.
* Fixes a bug with listening handlers which did not specify a `name` field.
* Fixes a bug with the `play_hosts` internal variable, so that it properly reflects the current list of hosts.
* Fixes a bug related to the v2_playbook_on_start callback method and legacy (v1) plugins.
* Fixes an openssh related process exit race condition, related to the fact that connections using ControlPersist do not close stderr.
* Improvements and fixes to OpenBSD fact gathering.
* Updated `make deb` to use pbuilder. Use `make local_deb` for the previous non-pbuilder build.
* Fixed Windows async to avoid blocking due to handle inheritance.
* Fixed bugs in the mount module on older Linux kernels and *BSDs
* Fix regression in jinja2 include search path.
* Various minor fixes for Python 3
* Inserted some checks for jinja2-2.9, which can cause some issues with Ansible currently.
## 2.2 "The Battle of Evermore" - 11-01-2016
###Major Changes:
* Security fix for CVE-2016-8628 - Command injection by compromised server via fact variables. In some situations, facts returned by modules could overwrite connection-based facts or some other special variables, leading to injected commands running on the Ansible controller as the user running Ansible (or via escalated permissions).
* Security fix for CVE-2016-8614 - apt_key module not properly validating keys in some situations.
* Added the `listen` feature for modules. This feature allows tasks to more easily notify multiple handlers, as well as making it easier for handlers from decoupled roles to be notified.
* Major performance improvements.
* Added support for binary modules
@ -33,16 +111,15 @@ Ansible Changes By Release
* Tech Preview: Work has been done to get Ansible running under Python3. This work is not complete enough to depend upon in production environments but it is enough to begin testing it.
* Most of the controller side should now work. Users should be able to run python3 /usr/bin/ansible and python3 /usr/bin/ansible-playbook and have core features of ansible work.
* A few of the most essential modules have been audited and are known to work. Others work out of the box.
* We are using unit and integration tests to help us port code and not regress later. Even if you are not famiriar with python you can still help by contributing integration tests (just ansible roles) that exercise more of the code to make sure it continues to run on both Python2 and Python3.
* We are using unit and integration tests to help us port code and not regress later. Even if you are not familiar with python you can still help by contributing integration tests (just ansible roles) that exercise more of the code to make sure it continues to run on both Python2 and Python3.
* scp_if_ssh now supports True, False and "smart". "smart" is the default and will retry failed sftp transfers with scp.
* Network:
* Refactored all network modules to remove dulicate code and take advantage of Ansiballz implementation
* Refactored all network modules to remove duplicate code and take advantage of Ansiballz implementation
* All functionality from *_template network modules have been combined into *_config module
* Network *_command modules not longer allow configuration mode statements
####New Modules:
- apache2_mod_proxy
- archive
- asa
* asa_acl
* asa_command
@ -283,6 +360,13 @@ Ansible Changes By Release
* Fix a problem with the pip module updating the python pip package itself.
* ansible_play_hosts is a new magic variable to provide a list of hosts in scope for the current play. Unlike play_hosts it is not subject to the 'serial' keyword.
* ansible_play_batch is a new magic variable meant to substitute the current play_hosts.
* The subversion module from core now marks its password parameter as no_log so
the password is obscured when logging.
* The postgresql_lang and postgresql_ext modules from extras now mark
login_password as no_log so the password is obscured when logging.
* Fix for yum module incorrectly thinking it succeeded in installing packages
* Make the default ansible_managed template string into a static string since
all of the replacable values lead to non-idempotent behaviour.
###For custom front ends using the API:
* ansible.parsing.vault:
@ -292,7 +376,7 @@ Ansible Changes By Release
Ansible. The feature it was intended to support has now been implemented
without using this.
* VaultAES, the older, insecure encrypted format that debuted in Ansible-1.5
and was relaced by VaultAES256 less than a week later, now has a deprecation
and was replaced by VaultAES256 less than a week later, now has a deprecation
warning. **It will be removed in 2.3**. In the unlikely event that you
wrote a vault file in that 1 week window and have never modified the file
since (ansible-vault automatically re-encrypts the file using VaultAES256
@ -308,7 +392,6 @@ Ansible Changes By Release
###Deprecations
Notice given that the following will be removed in Ansible 2.4:
* Modules
* asa_template
* eos_template
* ios_template
* iosxr_template

View file

@ -47,7 +47,7 @@ else
GITINFO = ""
endif
ifeq ($(shell echo $(OS) | egrep -c 'Darwin|FreeBSD|OpenBSD'),1)
ifeq ($(shell echo $(OS) | egrep -c 'Darwin|FreeBSD|OpenBSD|DragonFly'),1)
DATE := $(shell date -j -r $(shell git log -n 1 --format="%at") +%Y%m%d%H%M)
else
DATE := $(shell date --utc --date="$(GIT_DATE)" +%Y%m%d%H%M)
@ -77,6 +77,12 @@ DEB_PPA ?= ppa
# Choose the desired Ubuntu release: lucid precise saucy trusty
DEB_DIST ?= unstable
# pbuilder parameters
PBUILDER_ARCH ?= amd64
PBUILDER_CACHE_DIR = /var/cache/pbuilder
PBUILDER_BIN ?= pbuilder
PBUILDER_OPTS ?= --debootstrapopts --variant=buildd --architecture $(PBUILDER_ARCH) --debbuildopts -b
# RPM build parameters
RPMSPECDIR= packaging/rpm
RPMSPEC = $(RPMSPECDIR)/ansible.spec
@ -85,6 +91,10 @@ RPMRELEASE = $(RELEASE)
ifneq ($(OFFICIAL),yes)
RPMRELEASE = 100.git$(DATE)$(GITINFO)
endif
ifeq ($(PUBLISH),nightly)
# https://fedoraproject.org/wiki/Packaging:Versioning#Snapshots
RPMRELEASE = $(RELEASE).$(DATE)git.$(GIT_HASH)
endif
RPMNVR = "$(NAME)-$(VERSION)-$(RPMRELEASE)$(RPMDIST)"
# MOCK build parameters
@ -176,17 +186,17 @@ sdist_upload: clean docs
rpmcommon: $(MANPAGES) sdist
@mkdir -p rpm-build
@cp dist/*.gz rpm-build/
@sed -e 's#^Version:.*#Version: $(VERSION)#' -e 's#^Release:.*#Release: $(RPMRELEASE)%{?dist}#' $(RPMSPEC) >rpm-build/$(NAME).spec
@sed -e 's#^Version:.*#Version: $(VERSION)#' -e 's#^Release:.*#Release: $(RPMRELEASE)%{?dist}$(REPOTAG)#' $(RPMSPEC) >rpm-build/$(NAME).spec
mock-srpm: /etc/mock/$(MOCK_CFG).cfg rpmcommon
$(MOCK_BIN) -r $(MOCK_CFG) --resultdir rpm-build/ --buildsrpm --spec rpm-build/$(NAME).spec --sources rpm-build/
$(MOCK_BIN) -r $(MOCK_CFG) $(MOCK_ARGS) --resultdir rpm-build/ --buildsrpm --spec rpm-build/$(NAME).spec --sources rpm-build/
@echo "#############################################"
@echo "Ansible SRPM is built:"
@echo rpm-build/*.src.rpm
@echo "#############################################"
mock-rpm: /etc/mock/$(MOCK_CFG).cfg mock-srpm
$(MOCK_BIN) -r $(MOCK_CFG) --resultdir rpm-build/ --rebuild rpm-build/$(NAME)-*.src.rpm
$(MOCK_BIN) -r $(MOCK_CFG) $(MOCK_ARGS) --resultdir rpm-build/ --rebuild rpm-build/$(NAME)-*.src.rpm
@echo "#############################################"
@echo "Ansible RPM is built:"
@echo rpm-build/*.noarch.rpm
@ -230,7 +240,23 @@ debian: sdist
sed -ie "s|%VERSION%|$(VERSION)|g;s|%RELEASE%|$(DEB_RELEASE)|;s|%DIST%|$${DIST}|g;s|%DATE%|$(DEB_DATE)|g" deb-build/$${DIST}/$(NAME)-$(VERSION)/debian/changelog ; \
done
deb: debian
deb: deb-src
@for DIST in $(DEB_DIST) ; do \
PBUILDER_OPTS="$(PBUILDER_OPTS) --distribution $${DIST} --basetgz $(PBUILDER_CACHE_DIR)/$${DIST}-$(PBUILDER_ARCH)-base.tgz --buildresult $(CURDIR)/deb-build/$${DIST}" ; \
$(PBUILDER_BIN) create $${PBUILDER_OPTS} --othermirror "deb http://archive.ubuntu.com/ubuntu $${DIST} universe" ; \
$(PBUILDER_BIN) update $${PBUILDER_OPTS} ; \
$(PBUILDER_BIN) build $${PBUILDER_OPTS} deb-build/$${DIST}/$(NAME)_$(VERSION)-$(DEB_RELEASE)~$${DIST}.dsc ; \
done
@echo "#############################################"
@echo "Ansible DEB artifacts:"
@for DIST in $(DEB_DIST) ; do \
echo deb-build/$${DIST}/$(NAME)_$(VERSION)-$(DEB_RELEASE)~$${DIST}_amd64.changes ; \
done
@echo "#############################################"
# Build package outside of pbuilder, with locally installed dependencies.
# Install BuildRequires as noted in packaging/debian/control.
local_deb: debian
@for DIST in $(DEB_DIST) ; do \
(cd deb-build/$${DIST}/$(NAME)-$(VERSION)/ && $(DEBUILD) -b) ; \
done

View file

@ -1,5 +1,5 @@
[![PyPI version](https://img.shields.io/pypi/v/ansible.svg)](https://pypi.python.org/pypi/ansible)
[![Build Status](https://api.shippable.com/projects/573f79d02a8192902e20e34b/badge?branch=devel)](https://app.shippable.com/projects/573f79d02a8192902e20e34b)
[![Build Status](https://api.shippable.com/projects/573f79d02a8192902e20e34b/badge?branch=stable-2.2)](https://app.shippable.com/projects/573f79d02a8192902e20e34b)
Ansible
@ -7,13 +7,13 @@ Ansible
Ansible is a radically simple IT automation system. It handles configuration-management, application deployment, cloud provisioning, ad-hoc task-execution, and multinode orchestration - including trivializing things like zero downtime rolling updates with load balancers.
Read the documentation and more at http://ansible.com/
Read the documentation and more at https://ansible.com/
Many users run straight from the development branch (it's generally fine to do so), but you might also wish to consume a release.
You can find instructions [here](http://docs.ansible.com/intro_getting_started.html) for a variety of platforms. If you decide to go with the development branch, be sure to run `git submodule update --init --recursive` after doing a checkout.
You can find instructions [here](https://docs.ansible.com/intro_getting_started.html) for a variety of platforms. If you decide to go with the development branch, be sure to run `git submodule update --init --recursive` after doing a checkout.
If you want to download a tarball of a release, go to [releases.ansible.com](http://releases.ansible.com/ansible), though most users use `yum` (using the EPEL instructions linked above), `apt` (using the PPA instructions linked above), or `pip install ansible`.
If you want to download a tarball of a release, go to [releases.ansible.com](https://releases.ansible.com/ansible), though most users use `yum` (using the EPEL instructions linked above), `apt` (using the PPA instructions linked above), or `pip install ansible`.
Design Principles
=================
@ -31,11 +31,11 @@ Design Principles
Get Involved
============
* Read [Community Information](http://docs.ansible.com/community.html) for all kinds of ways to contribute to and interact with the project, including mailing list information and how to submit bug reports and code to Ansible.
* Read [Community Information](https://docs.ansible.com/community.html) for all kinds of ways to contribute to and interact with the project, including mailing list information and how to submit bug reports and code to Ansible.
* All code submissions are done through pull requests. Take care to make sure no merge commits are in the submission, and use `git rebase` vs `git merge` for this reason. If submitting a large code change (other than modules), it's probably a good idea to join ansible-devel and talk about what you would like to do or add first and to avoid duplicate efforts. This not only helps everyone know what's going on, it also helps save time and effort if we decide some changes are needed.
* Users list: [ansible-project](http://groups.google.com/group/ansible-project)
* Development list: [ansible-devel](http://groups.google.com/group/ansible-devel)
* Announcement list: [ansible-announce](http://groups.google.com/group/ansible-announce) - read only
* Users list: [ansible-project](https://groups.google.com/group/ansible-project)
* Development list: [ansible-devel](https://groups.google.com/group/ansible-devel)
* Announcement list: [ansible-announce](https://groups.google.com/group/ansible-announce) - read only
* irc.freenode.net: #ansible
Branch Info
@ -45,13 +45,13 @@ Branch Info
* The devel branch corresponds to the release actively under development.
* As of 1.8, modules are kept in different repos, you'll want to follow [core](https://github.com/ansible/ansible-modules-core) and [extras](https://github.com/ansible/ansible-modules-extras)
* Various release-X.Y branches exist for previous releases.
* We'd love to have your contributions, read [Community Information](http://docs.ansible.com/community.html) for notes on how to get started.
* We'd love to have your contributions, read [Community Information](https://docs.ansible.com/community.html) for notes on how to get started.
Authors
=======
Ansible was created by [Michael DeHaan](https://github.com/mpdehaan) (michael.dehaan/gmail/com) and has contributions from over 1000 users (and growing). Thanks everyone!
Ansible is sponsored by [Ansible, Inc](http://ansible.com)
Ansible is sponsored by [Ansible, Inc](https://ansible.com)

View file

@ -1,74 +1,81 @@
Ansible Releases at a Glance
============================
Active Development
++++++++++++++++++
2.2 TBD - in progress
Released
++++++++
2.1.0 "The Song Remains the Same" in progress
2.0.2 "Over the Hills and Far Away" 04-19-2016
2.0.1 "Over the Hills and Far Away" 02-24-2016
2.0.0 "Over the Hills and Far Away" 01-12-2016
1.9.6 "Dancing In the Streets" 04-15-2016
1.9.5 "Dancing In the Streets" 03-21-2016
1.9.4 "Dancing In the Streets" 10-09-2015
1.9.3 "Dancing In the Streets" 09-03-2015
1.9.2 "Dancing In the Streets" 06-24-2015
1.9.1 "Dancing In the Streets" 04-27-2015
1.9.0 "Dancing In the Streets" 03-25-2015
1.8.4 "You Really Got Me" ---- 02-19-2015
1.8.3 "You Really Got Me" ---- 02-17-2015
1.8.2 "You Really Got Me" ---- 12-04-2014
1.8.1 "You Really Got Me" ---- 11-26-2014
1.7.2 "Summer Nights" -------- 09-24-2014
1.7.1 "Summer Nights" -------- 08-14-2014
1.7 "Summer Nights" -------- 08-06-2014
1.6.10 "The Cradle Will Rock" - 07-25-2014
1.6.9 "The Cradle Will Rock" - 07-24-2014
1.6.8 "The Cradle Will Rock" - 07-22-2014
1.6.7 "The Cradle Will Rock" - 07-21-2014
1.6.6 "The Cradle Will Rock" - 07-01-2014
1.6.5 "The Cradle Will Rock" - 06-25-2014
1.6.4 "The Cradle Will Rock" - 06-25-2014
1.6.3 "The Cradle Will Rock" - 06-09-2014
1.6.2 "The Cradle Will Rock" - 05-23-2014
1.6.1 "The Cradle Will Rock" - 05-07-2014
1.6 "The Cradle Will Rock" - 05-05-2014
1.5.5 "Love Walks In" -------- 04-18-2014
1.5.4 "Love Walks In" -------- 04-01-2014
1.5.3 "Love Walks In" -------- 03-13-2014
1.5.2 "Love Walks In" -------- 03-11-2014
1.5.1 "Love Walks In" -------- 03-10-2014
1.5 "Love Walks In" -------- 02-28-2014
1.4.5 "Could This Be Magic?" - 02-12-2014
1.4.4 "Could This Be Magic?" - 01-06-2014
1.4.3 "Could This Be Magic?" - 12-20-2013
1.4.2 "Could This Be Magic?" - 12-18-2013
1.4.1 "Could This Be Magic?" - 11-27-2013
1.4 "Could This Be Magic?" - 11-21-2013
1.3.4 "Top of the World" ----- 10-29-2013
1.3.3 "Top of the World" ----- 10-09-2013
1.3.2 "Top of the World" ----- 09-19-2013
1.3.1 "Top of the World" ----- 09-16-2013
1.3 "Top of the World" ----- 09-13-2013
1.2.3 "Hear About It Later" -- 08-21-2013
1.2.2 "Hear About It Later" -- 07-05-2013
1.2.1 "Hear About It Later" -- 07-04-2013
1.2 "Right Now" ------------ 06-10-2013
1.1 "Mean Street" ---------- 04-02-2013
1.0 "Eruption" ------------- 02-01-2013
0.9 "Dreams" --------------- 11-30-2012
0.8 "Cathedral" ------------ 10-19-2012
0.7 "Panama" --------------- 09-06-2012
0.6 "Cabo" ----------------- 08-06-2012
0.5 "Amsterdam" ------------ 07-04-2012
0.4 "Unchained" ------------ 05-23-2012
0.3 "Baluchitherium" ------- 04-23-2012
0.0.2 Untitled
0.0.1 Untitled
VERSION RELEASE CODE NAME
++++++++++++++++++++++++++++++
2.4.0 TBD "Dancing Days"
2.3.0 04-12-2017 "Ramble On"
2.2.3 05-09-2017 "The Battle of Evermore"
2.2.2 03-27-2017 "The Battle of Evermore"
2.2.1 01-16-2017 "The Battle of Evermore"
2.2.0 11-01-2016 "The Battle of Evermore"
2.1.5 03-27-2017 "The Song Remains the Same"
2.1.4 01-16-2017 "The Song Remains the Same"
2.1.3 11-04-2016 "The Song Remains the Same"
2.1.2 09-29-2016 "The Song Remains the Same"
2.1.1 07-28-2016 "The Song Remains the Same"
2.1.0 05-25-2016 "The Song Remains the Same"
2.0.2 04-19-2016 "Over the Hills and Far Away"
2.0.1 02-24-2016 "Over the Hills and Far Away"
2.0.0 01-12-2016 "Over the Hills and Far Away"
1.9.6 04-15-2016 "Dancing In the Streets"
1.9.5 03-21-2016 "Dancing In the Streets"
1.9.4 10-09-2015 "Dancing In the Streets"
1.9.3 09-03-2015 "Dancing In the Streets"
1.9.2 06-24-2015 "Dancing In the Streets"
1.9.1 04-27-2015 "Dancing In the Streets"
1.9.0 03-25-2015 "Dancing In the Streets"
1.8.4 02-19-2015 "You Really Got Me"
1.8.3 02-17-2015 "You Really Got Me"
1.8.2 12-04-2014 "You Really Got Me"
1.8.1 11-26-2014 "You Really Got Me"
1.8.0 11-25-2014 "You Really Got Me"
1.7.2 09-24-2014 "Summer Nights"
1.7.1 08-14-2014 "Summer Nights"
1.7.0 08-06-2014 "Summer Nights"
1.6.10 07-25-2014 "The Cradle Will Rock"
1.6.9 07-24-2014 "The Cradle Will Rock"
1.6.8 07-22-2014 "The Cradle Will Rock"
1.6.7 07-21-2014 "The Cradle Will Rock"
1.6.6 07-01-2014 "The Cradle Will Rock"
1.6.5 06-25-2014 "The Cradle Will Rock"
1.6.4 06-25-2014 "The Cradle Will Rock"
1.6.3 06-09-2014 "The Cradle Will Rock"
1.6.2 05-23-2014 "The Cradle Will Rock"
1.6.1 05-07-2014 "The Cradle Will Rock"
1.6.0 05-05-2014 "The Cradle Will Rock"
1.5.5 04-18-2014 "Love Walks In"
1.5.4 04-01-2014 "Love Walks In"
1.5.3 03-13-2014 "Love Walks In"
1.5.2 03-11-2014 "Love Walks In"
1.5.1 03-10-2014 "Love Walks In"
1.5.0 02-28-2014 "Love Walks In"
1.4.5 02-12-2014 "Could This Be Magic?"
1.4.4 01-06-2014 "Could This Be Magic?"
1.4.3 12-20-2013 "Could This Be Magic?"
1.4.2 12-18-2013 "Could This Be Magic?"
1.4.1 11-27-2013 "Could This Be Magic?"
1.4.0 11-21-2013 "Could This Be Magic?"
1.3.4 10-29-2013 "Top of the World"
1.3.3 10-09-2013 "Top of the World"
1.3.2 09-19-2013 "Top of the World"
1.3.1 09-16-2013 "Top of the World"
1.3.0 09-13-2013 "Top of the World"
1.2.3 08-21-2013 "Right Now"
1.2.2 07-05-2013 "Right Now"
1.2.1 07-04-2013 "Right Now"
1.2.0 06-10-2013 "Right Now"
1.1.0 04-02-2013 "Mean Street"
1.0.0 02-01-2013 "Eruption"
0.9.0 11-30-2012 "Dreams"
0.8.0 10-19-2012 "Cathedral"
0.7.0 09-06-2012 "Panama"
0.6.0 08-06-2012 "Cabo"
0.5.0 07-04-2012 "Amsterdam"
0.4.0 05-23-2012 "Unchained"
0.3.0 04-23-2012 "Baluchitherium"
0.2.0 ? "Untitled"
0.1.0 ? "Untitled"
0.0.2 ? "Untitled"
0.0.1 ? "Untitled"

View file

@ -1 +1 @@
2.2.0 0.0.devel
2.2.3.0 1

View file

@ -126,9 +126,11 @@ if __name__ == '__main__':
have_cli_options = cli is not None and cli.options is not None
display.error("Unexpected Exception: %s" % to_text(e), wrap_text=False)
if not have_cli_options or have_cli_options and cli.options.verbosity > 2:
display.display(u"the full traceback was:\n\n%s" % to_text(traceback.format_exc()))
log_only = False
else:
display.display("to see the full traceback, use -vvv")
log_only = True
display.display(u"the full traceback was:\n\n%s" % to_text(traceback.format_exc()), log_only=log_only)
exit_code = 250
finally:
# Remove ansible tempdir

View file

@ -23,7 +23,7 @@
Azure External Inventory Script
===============================
Generates dynamic inventory by making API requests to the Azure Resource
Manager using the AAzure Python SDK. For instruction on installing the
Manager using the Azure Python SDK. For instruction on installing the
Azure Python SDK see http://azure-sdk-for-python.readthedocs.org/
Authentication
@ -32,7 +32,7 @@ The order of precedence is command line arguments, environment variables,
and finally the [default] profile found in ~/.azure/credentials.
If using a credentials file, it should be an ini formatted file with one or
more sections, which we refer to as profiles. The script looks for a
more sections, which we refer to as profiles. The script looks for a
[default] section, if a profile is not specified either on the command line
or with an environment variable. The keys in a profile will match the
list of command line arguments below.
@ -42,7 +42,7 @@ in your ~/.azure/credentials file, or a service principal or Active Directory
user.
Command line arguments:
- profile
- profile
- client_id
- secret
- subscription_id
@ -61,7 +61,7 @@ Environment variables:
Run for Specific Host
-----------------------
When run for a specific host using the --host option, a resource group is
When run for a specific host using the --host option, a resource group is
required. For a specific host, this script returns the following variables:
{
@ -191,7 +191,7 @@ import os
import re
import sys
from distutils.version import LooseVersion
from packaging.version import Version
from os.path import expanduser
@ -309,7 +309,7 @@ class AzureRM(object):
def _get_env_credentials(self):
env_credentials = dict()
for attribute, env_variable in AZURE_CREDENTIAL_ENV_MAPPING.iteritems():
for attribute, env_variable in AZURE_CREDENTIAL_ENV_MAPPING.items():
env_credentials[attribute] = os.environ.get(env_variable, None)
if env_credentials['profile'] is not None:
@ -328,7 +328,7 @@ class AzureRM(object):
self.log('Getting credentials')
arg_credentials = dict()
for attribute, env_variable in AZURE_CREDENTIAL_ENV_MAPPING.iteritems():
for attribute, env_variable in AZURE_CREDENTIAL_ENV_MAPPING.items():
arg_credentials[attribute] = getattr(params, attribute)
# try module params
@ -362,7 +362,11 @@ class AzureRM(object):
resource_client = self.rm_client
resource_client.providers.register(key)
except Exception as exc:
self.fail("One-time registration of {0} failed - {1}".format(key, str(exc)))
self.log("One-time registration of {0} failed - {1}".format(key, str(exc)))
self.log("You might need to register {0} using an admin account".format(key))
self.log(("To register a provider using the Python CLI: "
"https://docs.microsoft.com/azure/azure-resource-manager/"
"resource-manager-common-deployment-errors#noregisteredproviderfound"))
@property
def network_client(self):
@ -442,7 +446,7 @@ class AzureInventory(object):
def _parse_cli_args(self):
# Parse command line arguments
parser = argparse.ArgumentParser(
description='Produce an Ansible Inventory file for an Azure subscription')
description='Produce an Ansible Inventory file for an Azure subscription')
parser.add_argument('--list', action='store_true', default=True,
help='List instances (default: True)')
parser.add_argument('--debug', action='store_true', default=False,
@ -664,7 +668,7 @@ class AzureInventory(object):
self._inventory['azure'].append(host_name)
if self.group_by_tag and vars.get('tags'):
for key, value in vars['tags'].iteritems():
for key, value in vars['tags'].items():
safe_key = self._to_safe(key)
safe_value = safe_key + '_' + self._to_safe(value)
if not self._inventory.get(safe_key):
@ -724,7 +728,7 @@ class AzureInventory(object):
def _get_env_settings(self):
env_settings = dict()
for attribute, env_variable in AZURE_CONFIG_SETTINGS.iteritems():
for attribute, env_variable in AZURE_CONFIG_SETTINGS.items():
env_settings[attribute] = os.environ.get(env_variable, None)
return env_settings
@ -786,11 +790,11 @@ class AzureInventory(object):
def main():
if not HAS_AZURE:
sys.exit("The Azure python sdk is not installed (try 'pip install azure==2.0.0rc5') - {0}".format(HAS_AZURE_EXC))
sys.exit("The Azure python sdk is not installed (try `pip install 'azure>=2.0.0rc5' --upgrade`) - {0}".format(HAS_AZURE_EXC))
if LooseVersion(azure_compute_version) != LooseVersion(AZURE_MIN_VERSION):
if Version(azure_compute_version) < Version(AZURE_MIN_VERSION):
sys.exit("Expecting azure.mgmt.compute.__version__ to be {0}. Found version {1} "
"Do you have Azure == 2.0.0rc5 installed?".format(AZURE_MIN_VERSION, azure_compute_version))
"Do you have Azure >= 2.0.0rc5 installed? (try `pip install 'azure>=2.0.0rc5' --upgrade`)".format(AZURE_MIN_VERSION, azure_compute_version))
AzureInventory()

View file

@ -3,6 +3,8 @@
[collins]
# You should not have a trailing slash or collins
# will not properly match the URI
host = http://localhost:9000
username = blake

View file

@ -201,7 +201,8 @@ class CollinsInventory(object):
response = open_url(query_url,
timeout=self.collins_timeout_secs,
url_username=self.collins_username,
url_password=self.collins_password)
url_password=self.collins_password,
force_basic_auth=True)
json_response = json.loads(response.read())
# Adds any assets found to the array of assets.
assets += json_response['data']['Data']

View file

@ -260,7 +260,7 @@ or environment variables (DO_API_TOKEN)\n''')
# Private IP Address
if config.has_option('digital_ocean', 'use_private_network'):
self.use_private_network = config.get('digital_ocean', 'use_private_network')
self.use_private_network = config.getboolean('digital_ocean', 'use_private_network')
# Group variables
if config.has_option('digital_ocean', 'group_variables'):

View file

@ -1313,7 +1313,7 @@ class Ec2Inventory(object):
elif key == 'ec2_tags':
for k, v in value.items():
if self.expand_csv_tags and ',' in v:
v = map(lambda x: x.strip(), v.split(','))
v = list(map(lambda x: x.strip(), v.split(',')))
key = self.to_safe('ec2_tag_' + k)
instance_vars[key] = v
elif key == 'ec2_groups':

View file

@ -261,7 +261,7 @@ class GceInventory(object):
if inst is None:
return {}
if inst.extra['metadata'].has_key('items'):
if 'items' in inst.extra['metadata']:
for entry in inst.extra['metadata']['items']:
md[entry['key']] = entry['value']
@ -326,7 +326,7 @@ class GceInventory(object):
if zones and zone not in zones:
continue
if groups.has_key(zone): groups[zone].append(name)
if zone in groups: groups[zone].append(name)
else: groups[zone] = [name]
tags = node.extra['tags']
@ -335,25 +335,25 @@ class GceInventory(object):
tag = t[6:]
else:
tag = 'tag_%s' % t
if groups.has_key(tag): groups[tag].append(name)
if tag in groups: groups[tag].append(name)
else: groups[tag] = [name]
net = node.extra['networkInterfaces'][0]['network'].split('/')[-1]
net = 'network_%s' % net
if groups.has_key(net): groups[net].append(name)
if net in groups: groups[net].append(name)
else: groups[net] = [name]
machine_type = node.size
if groups.has_key(machine_type): groups[machine_type].append(name)
if machine_type in groups: groups[machine_type].append(name)
else: groups[machine_type] = [name]
image = node.image and node.image or 'persistent_disk'
if groups.has_key(image): groups[image].append(name)
if image in groups: groups[image].append(name)
else: groups[image] = [name]
status = node.extra['status']
stat = 'status_%s' % status.lower()
if groups.has_key(stat): groups[stat].append(name)
if stat in groups: groups[stat].append(name)
else: groups[stat] = [name]
groups["_meta"] = meta

View file

@ -26,7 +26,7 @@ import re
import os
import ConfigParser
from novaclient import client as nova_client
from six import iteritems
from six import iteritems, itervalues
try:
import json
@ -105,7 +105,7 @@ def get_ips(server, access_ip=True):
# Iterate through each servers network(s), get addresses and get type
addresses = getattr(server, 'addresses', {})
if len(addresses) > 0:
for network in addresses.itervalues():
for network in itervalues(addresses):
for address in network:
if address.get('OS-EXT-IPS:type', False) == 'fixed':
private.append(address['addr'])

View file

@ -201,7 +201,7 @@ class NSoTInventory(object):
_inventory_group()
'''
inventory = dict()
for group, contents in self.config.iteritems():
for group, contents in self.config.items():
group_response = self._inventory_group(group, contents)
inventory.update(group_response)
inventory.update({'_meta': self._meta})

View file

@ -211,7 +211,7 @@ class OVirtInventory(object):
'ovirt_uuid': inst.get_id(),
'ovirt_id': inst.get_id(),
'ovirt_image': inst.get_os().get_type(),
'ovirt_machine_type': inst.get_instance_type(),
'ovirt_machine_type': self.get_machine_type(inst),
'ovirt_ips': ips,
'ovirt_name': inst.get_name(),
'ovirt_description': inst.get_description(),
@ -230,6 +230,11 @@ class OVirtInventory(object):
"""
return [x.get_name() for x in inst.get_tags().list()]
def get_machine_type(self,inst):
inst_type = inst.get_instance_type()
if inst_type:
return self.driver.instancetypes.get(id=inst_type.id).name
# noinspection PyBroadException,PyUnusedLocal
def get_instance(self, instance_name):
"""Gets details about a specific instance """

View file

@ -14,7 +14,7 @@ class RackhdInventory(object):
for nodeid in nodeids:
self._load_inventory_data(nodeid)
inventory = {}
for nodeid,info in self._inventory.iteritems():
for nodeid,info in self._inventory.items():
inventory[nodeid]= (self._format_output(nodeid, info))
print(json.dumps(inventory))
@ -24,7 +24,7 @@ class RackhdInventory(object):
info['lookup'] = RACKHD_URL + '/api/common/lookups/?q={0}'.format(nodeid)
results = {}
for key,url in info.iteritems():
for key,url in info.items():
r = requests.get( url, verify=False)
results[key] = r.text
self._inventory[nodeid] = results
@ -36,7 +36,7 @@ class RackhdInventory(object):
if len(node_info) > 0:
ipaddress = node_info[0]['ipAddress']
output = { 'hosts':[ipaddress],'vars':{}}
for key,result in info.iteritems():
for key,result in info.items():
output['vars'][key] = json.loads(result)
output['vars']['ansible_ssh_user'] = 'monorail'
except KeyError:

View file

@ -210,7 +210,7 @@ class VMWareInventory(object):
config.read(vmware_ini_path)
# apply defaults
for k,v in defaults['vmware'].iteritems():
for k,v in defaults['vmware'].items():
if not config.has_option('vmware', k):
config.set('vmware', k, str(v))
@ -356,7 +356,7 @@ class VMWareInventory(object):
# Reset the inventory keys
for k,v in name_mapping.iteritems():
for k,v in name_mapping.items():
if not host_mapping or not k in host_mapping:
continue
@ -389,7 +389,7 @@ class VMWareInventory(object):
continue
self.debugl('# filter: %s' % hf)
filter_map = self.create_template_mapping(inventory, hf, dtype='boolean')
for k,v in filter_map.iteritems():
for k,v in filter_map.items():
if not v:
# delete this host
inventory['all']['hosts'].remove(k)
@ -402,7 +402,7 @@ class VMWareInventory(object):
# Create groups
for gbp in self.groupby_patterns:
groupby_map = self.create_template_mapping(inventory, gbp)
for k,v in groupby_map.iteritems():
for k,v in groupby_map.items():
if v not in inventory:
inventory[v] = {}
inventory[v]['hosts'] = []
@ -417,7 +417,7 @@ class VMWareInventory(object):
''' Return a hash of uuid to templated string from pattern '''
mapping = {}
for k,v in inventory['_meta']['hostvars'].iteritems():
for k,v in inventory['_meta']['hostvars'].items():
t = jinja2.Template(pattern)
newkey = None
try:

View file

@ -251,7 +251,7 @@ Tower Support Questions
Ansible `Tower <http://ansible.com/tower>`_ is a UI, Server, and REST endpoint for Ansible, produced by Ansible, Inc.
If you have a question about Tower, email `support@ansible.com <mailto:support@ansible.com>`_ rather than using the IRC
If you have a question about Tower, visit `support.ansible.com <https://support.ansible.com/>`_ rather than using the IRC
channel or the general project mailing list.
IRC Channel

View file

@ -85,6 +85,20 @@ different locations::
Most users will not need to use this feature. See :doc:`developing_plugins` for more details.
.. _allow_unsafe_lookups:
allow_unsafe_lookups
====================
.. versionadded:: 2.2.3, 2.3.1
When enabled, this option allows lookup plugins (whether used in variables as `{{lookup('foo')}}` or as a loop as `with_foo`) to return data that is **not** marked "unsafe". By default, such data is marked as unsafe to prevent the templating engine from evaluating any jinja2 templating language, as this could represent a security risk.
This option is provided to allow for backwards-compatibility, however users should first consider adding `allow_unsafe=True` to any lookups which may be expected to contain data which may be run through the templating engine later. For example::
{{lookup('pipe', '/path/to/some/command', allow_unsafe=True)}}
.. _allow_world_readable_tmpfiles:
allow_world_readable_tmpfiles
@ -443,6 +457,20 @@ implications and wish to disable it, you may do so here by setting the value to
host_key_checking=True
.. _internal_poll_interval:
internal_poll_interval
======================
.. versionadded:: 2.2
This sets the interval (in seconds) of Ansible internal processes polling each other.
Lower values improve performance with large playbooks at the expense of extra CPU load.
Higher values are more suitable for Ansible usage in automation scenarios, when UI responsiveness is not required but CPU usage might be a concern.
Default corresponds to the value hardcoded in Ansible ≤ 2.1::
internal_poll_interval=0.001
.. _inventory_file:
inventory

View file

@ -13,8 +13,8 @@
#inventory = /etc/ansible/hosts
#library = /usr/share/my_modules/
#remote_tmp = $HOME/.ansible/tmp
#local_tmp = $HOME/.ansible/tmp
#remote_tmp = ~/.ansible/tmp
#local_tmp = ~/.ansible/tmp
#forks = 5
#poll_interval = 15
#sudo_user = root
@ -70,6 +70,9 @@
#task_includes_static = True
#handler_includes_static = True
# Controls if a missing handler for a notification event is an error or a warning
#error_on_missing_handler = True
# change this for alternative sudo implementations
#sudo_exe = sudo
@ -121,8 +124,9 @@
# templates indicates to users editing templates files will be replaced.
# replacing {file}, {host} and {uid} and strftime codes with proper values.
#ansible_managed = Ansible managed: {file} modified on %Y-%m-%d %H:%M:%S by {uid} on {host}
# This short version is better used in templates as it won't flag the file as changed every run.
#ansible_managed = Ansible managed: {file} on {host}
# {file}, {host}, {uid}, and the timestamp can all interfere with idempotence
# in some situations so the default is a static string:
#ansible_managed = Ansible managed
# by default, ansible-playbook will display "Skipping [host]" if it determines a task
# should not be run on a host. Set this to "False" if you don't want to see these "Skipping"
@ -261,6 +265,12 @@
# set to 0 for unlimited (RAM may suffer!).
#max_diff_size = 1048576
# When enabled, this option allows lookups (via variables like {{lookup('foo')}} or when used as
# a loop with `with_foo`) to return data that is not marked "unsafe". This means the data may contain
# jinja2 templating language which will be run through the templating engine.
# ENABLING THIS COULD BE A SECURITY RISK
#allow_unsafe_lookups = False
[privilege_escalation]
#become=True
#become_method=sudo

View file

@ -4,16 +4,16 @@
set HACKING_DIR (dirname (status -f))
set FULL_PATH (python -c "import os; print(os.path.realpath('$HACKING_DIR'))")
set ANSIBLE_HOME (dirname $FULL_PATH)
set PREFIX_PYTHONPATH $ANSIBLE_HOME/lib
set PREFIX_PATH $ANSIBLE_HOME/bin
set PREFIX_PYTHONPATH $ANSIBLE_HOME/lib
set PREFIX_PATH $ANSIBLE_HOME/bin
set PREFIX_MANPATH $ANSIBLE_HOME/docs/man
# set quiet flag
if set -q argv
if test (count $argv) -ge 1
switch $argv
case '-q' '--quiet'
set QUIET "true"
case '*'
case '-q' '--quiet'
set QUIET "true"
case '*'
end
end
@ -49,15 +49,14 @@ set -gx ANSIBLE_LIBRARY $ANSIBLE_HOME/library
# Generate egg_info so that pkg_resources works
pushd $ANSIBLE_HOME
if test -e $PREFIX_PYTHONPATH/ansible*.egg-info
rm -r $PREFIX_PYTHONPATH/ansible*.egg-info
end
if [ $QUIET ]
python setup.py -q egg_info
else
python setup.py egg_info
end
if test -e $PREFIX_PYTHONPATH/ansible*.egg-info
rm -r $PREFIX_PYTHONPATH/ansible*.egg-info
end
mv ansible*egg-info $PREFIX_PYTHONPATH
find . -type f -name "*.pyc" -delete
popd

View file

@ -105,6 +105,10 @@ def boilerplate_module(modfile, args, interpreter, check, destfile):
#included_boilerplate = module_data.find(module_common.REPLACER) != -1 or module_data.find("import ansible.module_utils") != -1
complex_args = {}
# default selinux fs list is pass in as _ansible_selinux_special_fs arg
complex_args['_ansible_selinux_special_fs'] = C.DEFAULT_SELINUX_SPECIAL_FS
if args.startswith("@"):
# Argument is a YAML file (JSON is a subset of YAML)
complex_args = utils_vars.combine_vars(complex_args, loader.load_from_file(args[1:]))

View file

@ -157,33 +157,37 @@ class CLI(object):
@staticmethod
def ask_vault_passwords(ask_new_vault_pass=False, rekey=False):
def ask_vault_passwords():
''' prompt for vault password and/or password change '''
vault_pass = None
new_vault_pass = None
try:
if rekey or not ask_new_vault_pass:
vault_pass = getpass.getpass(prompt="Vault password: ")
vault_pass = getpass.getpass(prompt="Vault password: ")
if ask_new_vault_pass:
new_vault_pass = getpass.getpass(prompt="New Vault password: ")
new_vault_pass2 = getpass.getpass(prompt="Confirm New Vault password: ")
if new_vault_pass != new_vault_pass2:
raise AnsibleError("Passwords do not match")
except EOFError:
pass
# enforce no newline chars at the end of passwords
if vault_pass:
vault_pass = to_bytes(vault_pass, errors='strict', nonstring='simplerepr').strip()
vault_pass = to_bytes(vault_pass, errors='surrogate_or_strict', nonstring='simplerepr').strip()
return vault_pass
@staticmethod
def ask_new_vault_passwords():
new_vault_pass = None
try:
new_vault_pass = getpass.getpass(prompt="New Vault password: ")
new_vault_pass2 = getpass.getpass(prompt="Confirm New Vault password: ")
if new_vault_pass != new_vault_pass2:
raise AnsibleError("Passwords do not match")
except EOFError:
pass
if new_vault_pass:
new_vault_pass = to_bytes(new_vault_pass, errors='strict', nonstring='simplerepr').strip()
new_vault_pass = to_bytes(new_vault_pass, errors='surrogate_or_strict', nonstring='simplerepr').strip()
if ask_new_vault_pass and not rekey:
vault_pass = new_vault_pass
return vault_pass, new_vault_pass
return new_vault_pass
def ask_passwords(self):
''' prompt for connection and become passwords if needed '''
@ -515,10 +519,10 @@ class CLI(object):
''' find reasonable way to display text '''
# this is a much simpler form of what is in pydoc.py
if not sys.stdout.isatty():
display.display(text)
display.display(text, screen_only=True)
elif 'PAGER' in os.environ:
if sys.platform == 'win32':
display.display(text)
display.display(text, screen_only=True)
else:
self.pager_pipe(text, os.environ['PAGER'])
else:
@ -527,7 +531,7 @@ class CLI(object):
if p.returncode == 0:
self.pager_pipe(text, 'less')
else:
display.display(text)
display.display(text, screen_only=True)
@staticmethod
def pager_pipe(text, cmd):
@ -573,7 +577,7 @@ class CLI(object):
stdout, stderr = p.communicate()
if p.returncode != 0:
raise AnsibleError("Vault password script %s returned non-zero (%s): %s" % (this_path, p.returncode, p.stderr))
vault_pass = stdout.strip('\r\n')
vault_pass = stdout.strip(b'\r\n')
else:
try:
f = open(this_path, "rb")

View file

@ -107,7 +107,7 @@ class AdHocCLI(CLI):
sshpass = None
becomepass = None
vault_pass = None
b_vault_pass = None
self.normalize_become_options()
(sshpass, becomepass) = self.ask_passwords()
@ -117,11 +117,11 @@ class AdHocCLI(CLI):
if self.options.vault_password_file:
# read vault_pass from a file
vault_pass = CLI.read_vault_password_file(self.options.vault_password_file, loader=loader)
loader.set_vault_password(vault_pass)
b_vault_pass = CLI.read_vault_password_file(self.options.vault_password_file, loader=loader)
loader.set_vault_password(b_vault_pass)
elif self.options.ask_vault_pass:
vault_pass = self.ask_vault_passwords()[0]
loader.set_vault_password(vault_pass)
b_vault_pass = self.ask_vault_passwords()
loader.set_vault_password(b_vault_pass)
variable_manager = VariableManager()
variable_manager.extra_vars = load_extra_vars(loader=loader, options=self.options)
@ -176,6 +176,9 @@ class AdHocCLI(CLI):
cb = self.callback
elif self.options.one_line:
cb = 'oneline'
# Respect custom 'stdout_callback' only with enabled 'bin_ansible_callbacks'
elif C.DEFAULT_LOAD_CALLBACK_PLUGINS and C.DEFAULT_STDOUT_CALLBACK != 'default':
cb = C.DEFAULT_STDOUT_CALLBACK
else:
cb = 'minimal'

View file

@ -89,7 +89,7 @@ class DocCLI(CLI):
try:
# if the module lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs
filename = module_loader.find_plugin(module, mod_type='.py')
filename = module_loader.find_plugin(module, mod_type='.py', ignore_deprecated=True)
if filename is None:
display.warning("module %s not found in %s\n" % (module, DocCLI.print_paths(module_loader)))
continue
@ -100,7 +100,7 @@ class DocCLI(CLI):
try:
doc, plainexamples, returndocs = module_docs.get_docstring(filename, verbose=(self.options.verbosity > 0))
except:
display.vvv(traceback.print_exc())
display.vvv(traceback.format_exc())
display.error("module %s has a documentation error formatting or is missing documentation\nTo see exact traceback use -vvv" % module)
continue
@ -133,10 +133,11 @@ class DocCLI(CLI):
# probably a quoting issue.
raise AnsibleError("Parsing produced an empty object.")
except Exception as e:
display.vvv(traceback.print_exc())
display.vvv(traceback.format_exc())
raise AnsibleError("module %s missing documentation (or could not parse documentation): %s\n" % (module, str(e)))
self.pager(text)
if text:
self.pager(text)
return 0
def find_modules(self, path):
@ -173,7 +174,7 @@ class DocCLI(CLI):
continue
# if the module lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs
filename = module_loader.find_plugin(module, mod_type='.py')
filename = module_loader.find_plugin(module, mod_type='.py', ignore_deprecated=True)
if filename is None:
continue

View file

@ -94,7 +94,7 @@ class PlaybookCLI(CLI):
# Manage passwords
sshpass = None
becomepass = None
vault_pass = None
b_vault_pass = None
passwords = {}
# don't deal with privilege escalation or passwords when we don't need to
@ -107,11 +107,11 @@ class PlaybookCLI(CLI):
if self.options.vault_password_file:
# read vault_pass from a file
vault_pass = CLI.read_vault_password_file(self.options.vault_password_file, loader=loader)
loader.set_vault_password(vault_pass)
b_vault_pass = CLI.read_vault_password_file(self.options.vault_password_file, loader=loader)
loader.set_vault_password(b_vault_pass)
elif self.options.ask_vault_pass:
vault_pass = self.ask_vault_passwords()[0]
loader.set_vault_password(vault_pass)
b_vault_pass = self.ask_vault_passwords()
loader.set_vault_password(b_vault_pass)
# initial error check, to make sure all specified playbooks are accessible
# before we start running anything through the playbook executor
@ -163,6 +163,12 @@ class PlaybookCLI(CLI):
display.display('\nplaybook: %s' % p['playbook'])
for idx, play in enumerate(p['plays']):
if play._included_path is not None:
loader.set_basedir(play._included_path)
else:
pb_dir = os.path.realpath(os.path.dirname(p['playbook']))
loader.set_basedir(pb_dir)
msg = "\n play #%d (%s): %s" % (idx + 1, ','.join(play.hosts), play.name)
mytags = set(play.tags)
msg += '\tTAGS: [%s]' % (','.join(mytags))

View file

@ -42,8 +42,8 @@ class VaultCLI(CLI):
def __init__(self, args):
self.vault_pass = None
self.new_vault_pass = None
self.b_vault_pass = None
self.b_new_vault_pass = None
super(VaultCLI, self).__init__(args)
def parse(self):
@ -99,23 +99,25 @@ class VaultCLI(CLI):
if self.options.vault_password_file:
# read vault_pass from a file
self.vault_pass = CLI.read_vault_password_file(self.options.vault_password_file, loader)
else:
newpass = False
rekey = False
if not self.options.new_vault_password_file:
newpass = (self.action in ['create', 'rekey', 'encrypt'])
rekey = (self.action == 'rekey')
self.vault_pass, self.new_vault_pass = self.ask_vault_passwords(ask_new_vault_pass=newpass, rekey=rekey)
self.b_vault_pass = CLI.read_vault_password_file(self.options.vault_password_file, loader)
if self.options.new_vault_password_file:
# for rekey only
self.new_vault_pass = CLI.read_vault_password_file(self.options.new_vault_password_file, loader)
self.b_new_vault_pass = CLI.read_vault_password_file(self.options.new_vault_password_file, loader)
if not self.vault_pass:
if not self.b_vault_pass or self.options.ask_vault_pass:
self.b_vault_pass = self.ask_vault_passwords()
if not self.b_vault_pass:
raise AnsibleOptionsError("A password is required to use Ansible's Vault")
self.editor = VaultEditor(self.vault_pass)
if self.action == 'rekey':
if not self.b_new_vault_pass:
self.b_new_vault_pass = self.ask_new_vault_passwords()
if not self.b_new_vault_pass:
raise AnsibleOptionsError("A password is required to rekey Ansible's Vault")
self.editor = VaultEditor(self.b_vault_pass)
self.execute()
@ -171,6 +173,6 @@ class VaultCLI(CLI):
raise AnsibleError(f + " does not exist")
for f in self.args:
self.editor.rekey_file(f, self.new_vault_pass)
self.editor.rekey_file(f, self.b_new_vault_pass)
display.display("Rekey successful", stderr=True)

View file

@ -43,6 +43,7 @@ if PY3:
class_types = type,
text_type = str
binary_type = bytes
cmp = lambda a, b: (a > b) - (a < b)
MAXSIZE = sys.maxsize
else:
@ -51,6 +52,7 @@ else:
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
cmp = cmp
if sys.platform.startswith("java"):
# Jython always uses 32 bits.

View file

@ -146,7 +146,7 @@ DEFAULT_COW_WHITELIST = ['bud-frogs', 'bunny', 'cheese', 'daemon', 'default', 'd
DEFAULTS='defaults'
# FIXME: add deprecation warning when these get set
#### DEPRECATED VARS ####
#### DEPRECATED VARS ####
# use more sanely named 'inventory'
DEPRECATED_HOST_LIST = get_config(p, DEFAULTS, 'hostfile', 'ANSIBLE_HOSTS', '/etc/ansible/hosts', ispath=True)
# this is not used since 0.5 but people might still have in config
@ -157,8 +157,8 @@ DEFAULT_DEBUG = get_config(p, DEFAULTS, 'debug', 'ANSIBLE
DEFAULT_HOST_LIST = get_config(p, DEFAULTS,'inventory', 'ANSIBLE_INVENTORY', DEPRECATED_HOST_LIST, ispath=True)
DEFAULT_MODULE_PATH = get_config(p, DEFAULTS, 'library', 'ANSIBLE_LIBRARY', None, ispathlist=True)
DEFAULT_ROLES_PATH = get_config(p, DEFAULTS, 'roles_path', 'ANSIBLE_ROLES_PATH', '/etc/ansible/roles', ispathlist=True, expand_relative_paths=True)
DEFAULT_REMOTE_TMP = get_config(p, DEFAULTS, 'remote_tmp', 'ANSIBLE_REMOTE_TEMP', '$HOME/.ansible/tmp')
DEFAULT_LOCAL_TMP = get_config(p, DEFAULTS, 'local_tmp', 'ANSIBLE_LOCAL_TEMP', '$HOME/.ansible/tmp', istmppath=True)
DEFAULT_REMOTE_TMP = get_config(p, DEFAULTS, 'remote_tmp', 'ANSIBLE_REMOTE_TEMP', '~/.ansible/tmp')
DEFAULT_LOCAL_TMP = get_config(p, DEFAULTS, 'local_tmp', 'ANSIBLE_LOCAL_TEMP', '~/.ansible/tmp', istmppath=True)
DEFAULT_MODULE_NAME = get_config(p, DEFAULTS, 'module_name', None, 'command')
DEFAULT_FORKS = get_config(p, DEFAULTS, 'forks', 'ANSIBLE_FORKS', 5, integer=True)
DEFAULT_MODULE_ARGS = get_config(p, DEFAULTS, 'module_args', 'ANSIBLE_MODULE_ARGS', '')
@ -176,7 +176,7 @@ DEFAULT_VAULT_PASSWORD_FILE = get_config(p, DEFAULTS, 'vault_password_file', 'AN
DEFAULT_TRANSPORT = get_config(p, DEFAULTS, 'transport', 'ANSIBLE_TRANSPORT', 'smart')
DEFAULT_SCP_IF_SSH = get_config(p, 'ssh_connection', 'scp_if_ssh', 'ANSIBLE_SCP_IF_SSH', 'smart')
DEFAULT_SFTP_BATCH_MODE = get_config(p, 'ssh_connection', 'sftp_batch_mode', 'ANSIBLE_SFTP_BATCH_MODE', True, boolean=True)
DEFAULT_MANAGED_STR = get_config(p, DEFAULTS, 'ansible_managed', None, 'Ansible managed: {file} modified on %Y-%m-%d %H:%M:%S by {uid} on {host}')
DEFAULT_MANAGED_STR = get_config(p, DEFAULTS, 'ansible_managed', None, 'Ansible managed')
DEFAULT_SYSLOG_FACILITY = get_config(p, DEFAULTS, 'syslog_facility', 'ANSIBLE_SYSLOG_FACILITY', 'LOG_USER')
DEFAULT_KEEP_REMOTE_FILES = get_config(p, DEFAULTS, 'keep_remote_files', 'ANSIBLE_KEEP_REMOTE_FILES', False, boolean=True)
DEFAULT_HASH_BEHAVIOUR = get_config(p, DEFAULTS, 'hash_behaviour', 'ANSIBLE_HASH_BEHAVIOUR', 'replace')
@ -190,6 +190,9 @@ DEFAULT_LOG_PATH = get_config(p, DEFAULTS, 'log_path', 'ANSIB
DEFAULT_FORCE_HANDLERS = get_config(p, DEFAULTS, 'force_handlers', 'ANSIBLE_FORCE_HANDLERS', False, boolean=True)
DEFAULT_INVENTORY_IGNORE = get_config(p, DEFAULTS, 'inventory_ignore_extensions', 'ANSIBLE_INVENTORY_IGNORE', ["~", ".orig", ".bak", ".ini", ".cfg", ".retry", ".pyc", ".pyo"], islist=True)
DEFAULT_VAR_COMPRESSION_LEVEL = get_config(p, DEFAULTS, 'var_compression_level', 'ANSIBLE_VAR_COMPRESSION_LEVEL', 0, integer=True)
DEFAULT_INTERNAL_POLL_INTERVAL = get_config(p, DEFAULTS, 'internal_poll_interval', None, 0.001, floating=True)
DEFAULT_ALLOW_UNSAFE_LOOKUPS = get_config(p, DEFAULTS, 'allow_unsafe_lookups', None, False, boolean=True)
ERROR_ON_MISSING_HANDLER = get_config(p, DEFAULTS, 'error_on_missing_handler', 'ANSIBLE_ERROR_ON_MISSING_HANDLER', True, boolean=True)
# static includes
DEFAULT_TASK_INCLUDES_STATIC = get_config(p, DEFAULTS, 'task_includes_static', 'ANSIBLE_TASK_INCLUDES_STATIC', False, boolean=True)

View file

@ -1,4 +1,4 @@
# (c) 2016 - Red Hat, Inc. <support@ansible.com>
# (c) 2016 - Red Hat, Inc. <info@ansible.com>
#
# This file is part of Ansible
#

View file

@ -22,6 +22,7 @@ __metaclass__ = type
import ast
import base64
import datetime
import imp
import json
import os
@ -106,10 +107,21 @@ import __main__
# Ubuntu15.10 with python2.7 Works
# Ubuntu15.10 with python3.4 Fails without this
# Ubuntu16.04.1 with python3.5 Fails without this
# To test on another platform:
# * use the copy module (since this shadows the stdlib copy module)
# * Turn off pipelining
# * Make sure that the destination file does not exist
# * ansible ubuntu16-test -m copy -a 'src=/etc/motd dest=/var/tmp/m'
# This will traceback in shutil. Looking at the complete traceback will show
# that shutil is importing copy which finds the ansible module instead of the
# stdlib module
scriptdir = None
try:
scriptdir = os.path.dirname(os.path.abspath(__main__.__file__))
except AttributeError:
except (AttributeError, OSError):
# Some platforms don't set __file__ when reading from stdin
# OSX raises OSError if using abspath() in a directory we don't have
# permission to read.
pass
if scriptdir is not None:
sys.path = [p for p in sys.path if p != scriptdir]
@ -212,12 +224,12 @@ def debug(command, zipped_mod, json_params):
directory = os.path.dirname(dest_filename)
if not os.path.exists(directory):
os.makedirs(directory)
f = open(dest_filename, 'w')
f = open(dest_filename, 'wb')
f.write(z.read(filename))
f.close()
# write the args file
f = open(args_path, 'w')
f = open(args_path, 'wb')
f.write(json_params)
f.close()
@ -317,7 +329,12 @@ if __name__ == '__main__':
# py3: zipped_mod will be text, py2: it's bytes. Need bytes at the end
sitecustomize = u'import sys\\nsys.path.insert(0,"%%s")\\n' %% zipped_mod
sitecustomize = sitecustomize.encode('utf-8')
z.writestr('sitecustomize.py', sitecustomize)
# Use a ZipInfo to work around zipfile limitation on hosts with
# clocks set to a pre-1980 year (for instance, Raspberry Pi)
zinfo = zipfile.ZipInfo()
zinfo.filename = 'sitecustomize.py'
zinfo.date_time = ( %(year)i, %(month)i, %(day)i, %(hour)i, %(minute)i, %(second)i)
z.writestr(zinfo, sitecustomize)
z.close()
exitcode = invoke_module(module, zipped_mod, ANSIBALLZ_PARAMS)
@ -680,6 +697,7 @@ def _find_snippet_imports(module_name, module_data, module_path, module_args, ta
interpreter_parts = interpreter.split(u' ')
interpreter = u"'{0}'".format(u"', '".join(interpreter_parts))
now=datetime.datetime.utcnow()
output.write(to_bytes(ACTIVE_ANSIBALLZ_TEMPLATE % dict(
zipdata=zipdata,
ansible_module=module_name,
@ -687,6 +705,12 @@ def _find_snippet_imports(module_name, module_data, module_path, module_args, ta
shebang=shebang,
interpreter=interpreter,
coding=ENCODING_STRING,
year=now.year,
month=now.month,
day=now.day,
hour=now.hour,
minute=now.minute,
second=now.second,
)))
module_data = output.getvalue()

View file

@ -28,6 +28,7 @@ from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.playbook.block import Block
from ansible.playbook.task import Task
from ansible.playbook.role_include import IncludeRole
from ansible.utils.boolean import boolean
@ -48,7 +49,6 @@ class HostState:
self.cur_regular_task = 0
self.cur_rescue_task = 0
self.cur_always_task = 0
self.cur_role = None
self.cur_dep_chain = None
self.run_state = PlayIterator.ITERATING_SETUP
self.fail_state = PlayIterator.FAILED_NONE
@ -56,6 +56,7 @@ class HostState:
self.tasks_child_state = None
self.rescue_child_state = None
self.always_child_state = None
self.did_rescue = False
self.did_start_at_task = False
def __repr__(self):
@ -80,18 +81,18 @@ class HostState:
ret.append(states[i])
return "|".join(ret)
return "HOST STATE: block=%d, task=%d, rescue=%d, always=%d, role=%s, run_state=%s, fail_state=%s, pending_setup=%s, tasks child state? %s, rescue child state? %s, always child state? %s, did start at task? %s" % (
return "HOST STATE: block=%d, task=%d, rescue=%d, always=%d, run_state=%s, fail_state=%s, pending_setup=%s, tasks child state? (%s), rescue child state? (%s), always child state? (%s), did rescue? %s, did start at task? %s" % (
self.cur_block,
self.cur_regular_task,
self.cur_rescue_task,
self.cur_always_task,
self.cur_role,
_run_state_to_string(self.run_state),
_failed_state_to_string(self.fail_state),
self.pending_setup,
self.tasks_child_state,
self.rescue_child_state,
self.always_child_state,
self.did_rescue,
self.did_start_at_task,
)
@ -101,7 +102,7 @@ class HostState:
for attr in (
'_blocks', 'cur_block', 'cur_regular_task', 'cur_rescue_task', 'cur_always_task',
'cur_role', 'run_state', 'fail_state', 'pending_setup', 'cur_dep_chain',
'run_state', 'fail_state', 'pending_setup', 'cur_dep_chain',
'tasks_child_state', 'rescue_child_state', 'always_child_state'
):
if getattr(self, attr) != getattr(other, attr):
@ -118,10 +119,10 @@ class HostState:
new_state.cur_regular_task = self.cur_regular_task
new_state.cur_rescue_task = self.cur_rescue_task
new_state.cur_always_task = self.cur_always_task
new_state.cur_role = self.cur_role
new_state.run_state = self.run_state
new_state.fail_state = self.fail_state
new_state.pending_setup = self.pending_setup
new_state.did_rescue = self.did_rescue
new_state.did_start_at_task = self.did_start_at_task
if self.cur_dep_chain is not None:
new_state.cur_dep_chain = self.cur_dep_chain[:]
@ -177,6 +178,9 @@ class PlayIterator:
if gather_timeout:
setup_task.args['gather_timeout'] = gather_timeout
setup_task.set_loader(self._play._loader)
# short circuit fact gathering if the entire playbook is conditional
if self._play._included_conditional is not None:
setup_task.when = self._play._included_conditional[:]
setup_block.block = [setup_task]
setup_block = setup_block.filter_tagged_tasks(play_context, all_vars)
@ -198,7 +202,7 @@ class PlayIterator:
self._host_states[host.name] = HostState(blocks=self._blocks)
# if the host's name is in the variable manager's fact cache, then set
# its _gathered_facts flag to true for smart gathering tests later
if host.name in variable_manager._fact_cache and variable_manager._fact_cache.get('module_setup', False):
if host.name in variable_manager._fact_cache and variable_manager._fact_cache.get(host.name).get('module_setup', False):
host._gathered_facts = True
# if we're looking to start at a specific task, iterate through
# the tasks for this host until we find the specified task
@ -259,19 +263,6 @@ class PlayIterator:
old_s = s
(s, task) = self._get_next_task_from_state(s, host=host, peek=peek)
def _roles_are_different(ra, rb):
if ra != rb:
return True
else:
return old_s.cur_dep_chain != task.get_dep_chain()
if task and task._role:
# if we had a current role, mark that role as completed
if s.cur_role and _roles_are_different(task._role, s.cur_role) and host.name in s.cur_role._had_task_run and not peek:
s.cur_role._completed[host.name] = True
s.cur_role = task._role
s.cur_dep_chain = task.get_dep_chain()
if not peek:
self._host_states[host.name] = s
@ -281,7 +272,7 @@ class PlayIterator:
return (s, task)
def _get_next_task_from_state(self, state, host, peek):
def _get_next_task_from_state(self, state, host, peek, in_child=False):
task = None
@ -347,7 +338,7 @@ class PlayIterator:
# have one recurse into it for the next task. If we're done with the child
# state, we clear it and drop back to geting the next task from the list.
if state.tasks_child_state:
(state.tasks_child_state, task) = self._get_next_task_from_state(state.tasks_child_state, host=host, peek=peek)
(state.tasks_child_state, task) = self._get_next_task_from_state(state.tasks_child_state, host=host, peek=peek, in_child=True)
if self._check_failed_state(state.tasks_child_state):
# failed child state, so clear it and move into the rescue portion
state.tasks_child_state = None
@ -376,7 +367,6 @@ class PlayIterator:
if isinstance(task, Block) or state.tasks_child_state is not None:
state.tasks_child_state = HostState(blocks=[task])
state.tasks_child_state.run_state = self.ITERATING_TASKS
state.tasks_child_state.cur_role = state.cur_role
# since we've created the child state, clear the task
# so we can pick up the child state on the next pass
task = None
@ -386,7 +376,7 @@ class PlayIterator:
# The process here is identical to ITERATING_TASKS, except instead
# we move into the always portion of the block.
if state.rescue_child_state:
(state.rescue_child_state, task) = self._get_next_task_from_state(state.rescue_child_state, host=host, peek=peek)
(state.rescue_child_state, task) = self._get_next_task_from_state(state.rescue_child_state, host=host, peek=peek, in_child=True)
if self._check_failed_state(state.rescue_child_state):
state.rescue_child_state = None
self._set_failed_state(state)
@ -401,12 +391,12 @@ class PlayIterator:
if len(block.rescue) > 0:
state.fail_state = self.FAILED_NONE
state.run_state = self.ITERATING_ALWAYS
state.did_rescue = True
else:
task = block.rescue[state.cur_rescue_task]
if isinstance(task, Block) or state.rescue_child_state is not None:
state.rescue_child_state = HostState(blocks=[task])
state.rescue_child_state.run_state = self.ITERATING_TASKS
state.rescue_child_state.cur_role = state.cur_role
task = None
state.cur_rescue_task += 1
@ -416,13 +406,14 @@ class PlayIterator:
# run state to ITERATING_COMPLETE in the event of any errors, or when we
# have hit the end of the list of blocks.
if state.always_child_state:
(state.always_child_state, task) = self._get_next_task_from_state(state.always_child_state, host=host, peek=peek)
(state.always_child_state, task) = self._get_next_task_from_state(state.always_child_state, host=host, peek=peek, in_child=True)
if self._check_failed_state(state.always_child_state):
state.always_child_state = None
self._set_failed_state(state)
else:
if task is None or state.always_child_state.run_state == self.ITERATING_COMPLETE:
state.always_child_state = None
continue
else:
if state.cur_always_task >= len(block.always):
if state.fail_state != self.FAILED_NONE:
@ -436,12 +427,17 @@ class PlayIterator:
state.tasks_child_state = None
state.rescue_child_state = None
state.always_child_state = None
state.did_rescue = False
# we're advancing blocks, so if this was an end-of-role block we
# mark the current role complete
if block._eor and host.name in block._role._had_task_run and not in_child:
block._role._completed[host.name] = True
else:
task = block.always[state.cur_always_task]
if isinstance(task, Block) or state.always_child_state is not None:
state.always_child_state = HostState(blocks=[task])
state.always_child_state.run_state = self.ITERATING_TASKS
state.always_child_state.cur_role = state.cur_role
task = None
state.cur_always_task += 1
@ -492,6 +488,7 @@ class PlayIterator:
s = self._set_failed_state(s)
display.debug("^ failed state is now: %s" % s)
self._host_states[host.name] = s
self._play._removed_hosts.append(host.name)
def get_failed_hosts(self):
return dict((host, True) for (host, state) in iteritems(self._host_states) if self._check_failed_state(state))
@ -499,23 +496,23 @@ class PlayIterator:
def _check_failed_state(self, state):
if state is None:
return False
elif state.run_state == self.ITERATING_RESCUE and self._check_failed_state(state.rescue_child_state):
return True
elif state.run_state == self.ITERATING_ALWAYS and self._check_failed_state(state.always_child_state):
return True
elif state.fail_state != self.FAILED_NONE:
if state.run_state == self.ITERATING_RESCUE and state.fail_state&self.FAILED_RESCUE == 0:
return False
elif state.run_state == self.ITERATING_ALWAYS and state.fail_state&self.FAILED_ALWAYS == 0:
return False
else:
return True
return not state.did_rescue
elif state.run_state == self.ITERATING_TASKS and self._check_failed_state(state.tasks_child_state):
cur_block = self._blocks[state.cur_block]
if len(cur_block.rescue) > 0 and state.fail_state & self.FAILED_RESCUE == 0:
return False
else:
return True
elif state.run_state == self.ITERATING_RESCUE and self._check_failed_state(state.rescue_child_state):
return True
elif state.run_state == self.ITERATING_ALWAYS and self._check_failed_state(state.always_child_state):
return True
return False
def is_failed(self, host):

View file

@ -68,21 +68,25 @@ class WorkerProcess(multiprocessing.Process):
self._variable_manager = variable_manager
self._shared_loader_obj = shared_loader_obj
# dupe stdin, if we have one
self._new_stdin = sys.stdin
try:
fileno = sys.stdin.fileno()
if fileno is not None:
try:
self._new_stdin = os.fdopen(os.dup(fileno))
except OSError:
# couldn't dupe stdin, most likely because it's
# not a valid file descriptor, so we just rely on
# using the one that was passed in
pass
except (AttributeError, ValueError):
# couldn't get stdin's fileno, so we just carry on
pass
if sys.stdin.isatty():
# dupe stdin, if we have one
self._new_stdin = sys.stdin
try:
fileno = sys.stdin.fileno()
if fileno is not None:
try:
self._new_stdin = os.fdopen(os.dup(fileno))
except OSError:
# couldn't dupe stdin, most likely because it's
# not a valid file descriptor, so we just rely on
# using the one that was passed in
pass
except (AttributeError, ValueError):
# couldn't get stdin's fileno, so we just carry on
pass
else:
# set to /dev/null
self._new_stdin = os.devnull
def run(self):
'''

View file

@ -19,6 +19,8 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.utils.vars import merge_hash
class AggregateStats:
''' holds stats about per-host activity during playbook runs '''
@ -31,6 +33,9 @@ class AggregateStats:
self.changed = {}
self.skipped = {}
# user defined stats, which can be per host or global
self.custom = {}
def increment(self, what, host):
''' helper function to bump a statistic '''
@ -49,3 +54,31 @@ class AggregateStats:
skipped = self.skipped.get(host, 0)
)
def set_custom_stats(self, which, what, host=None):
''' allow setting of a custom fact '''
if host is None:
host = '_run'
if host not in self.custom:
self.custom[host] = {which: what}
else:
self.custom[host][which] = what
def update_custom_stats(self, which, what, host=None):
''' allow aggregation of a custom fact '''
if host is None:
host = '_run'
if host not in self.custom or which not in self.custom[host]:
return self.set_custom_stats(which, what, host)
# mismatching types
if type(what) != type(self.custom[host][which]):
return None
if isinstance(what, dict):
self.custom[host][which] = merge_hash(self.custom[host][which], what)
else:
# let overloaded + take care of other types
self.custom[host][which] += what

View file

@ -71,6 +71,7 @@ class TaskExecutor:
self._shared_loader_obj = shared_loader_obj
self._connection = None
self._rslt_q = rslt_q
self._loop_eval_error = None
self._task.squash()
@ -85,10 +86,13 @@ class TaskExecutor:
display.debug("in run()")
try:
# get search path for this task to pass to lookup plugins
self._job_vars['ansible_search_path'] = self._task.get_search_path()
try:
items = self._get_loop_items()
except AnsibleUndefinedVariable as e:
# save the error raised here for use later
items = None
self._loop_eval_error = e
items = self._get_loop_items()
if items is not None:
if len(items) > 0:
item_results = self._run_loop(items)
@ -126,17 +130,26 @@ class TaskExecutor:
if 'changed' not in res:
res['changed'] = False
def _clean_res(res):
def _clean_res(res, errors='surrogate_or_strict'):
if isinstance(res, UnsafeProxy):
return res._obj
elif isinstance(res, binary_type):
return to_text(res, errors='surrogate_or_strict')
return to_text(res, errors=errors)
elif isinstance(res, dict):
for k in res:
res[k] = _clean_res(res[k])
try:
res[k] = _clean_res(res[k], errors=errors)
except UnicodeError:
if k == 'diff':
# If this is a diff, substitute a replacement character if the value
# is undecodable as utf8. (Fix #21804)
display.warning("We were unable to decode all characters, replaced some in an effort to return as much as possible")
res[k] = _clean_res(res[k], errors='surrogate_then_replace')
else:
raise
elif isinstance(res, list):
for idx,item in enumerate(res):
res[idx] = _clean_res(item)
res[idx] = _clean_res(item, errors=errors)
return res
display.debug("dumping result to json")
@ -173,6 +186,10 @@ class TaskExecutor:
old_vars[k] = self._job_vars[k]
self._job_vars[k] = play_context_vars[k]
# get search path for this task to pass to lookup plugins
self._job_vars['ansible_search_path'] = self._task.get_search_path()
templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=self._job_vars)
items = None
if self._task.loop:
@ -212,6 +229,11 @@ class TaskExecutor:
for idx, item in enumerate(items):
if item is not None and not isinstance(item, UnsafeProxy):
items[idx] = UnsafeProxy(item)
# ensure basedir is always in (dwim already searches here but we need to display it)
if self._loader.get_basedir() not in self._job_vars['ansible_search_path']:
self._job_vars['ansible_search_path'].append(self._loader.get_basedir())
return items
def _run_loop(self, items):
@ -396,6 +418,11 @@ class TaskExecutor:
if not self._task.evaluate_conditional(templar, variables):
display.debug("when evaluation failed, skipping this task")
return dict(changed=False, skipped=True, skip_reason='Conditional check failed', _ansible_no_log=self._play_context.no_log)
# since we're not skipping, if there was a loop evaluation error
# raised earlier we need to raise it now to halt the execution of
# this task
if self._loop_eval_error is not None:
raise self._loop_eval_error
except AnsibleError:
# skip conditional exception in the case of includes as the vars needed might not be avaiable except in the included tasks or due to tags
if self._task.action not in ['include', 'include_role']:
@ -416,14 +443,10 @@ class TaskExecutor:
include_file = templar.template(include_file)
return dict(include=include_file, include_variables=include_variables)
# TODO: not needed?
# if this task is a IncludeRole, we just return now with a success code so the main thread can expand the task list for the given host
elif self._task.action == 'include_role':
include_variables = self._task.args.copy()
role = templar.template(self._task._role_name)
if not role:
return dict(failed=True, msg="No role was specified to include")
return dict(include_role=role, include_variables=include_variables)
return dict(include_role=self._task, include_variables=include_variables)
# Now we do final validation on the task, which sets all fields to their final values.
self._task.post_validate(templar=templar)
@ -499,7 +522,7 @@ class TaskExecutor:
vars_copy[self._task.register] = wrap_var(result.copy())
if self._task.async > 0:
if self._task.poll > 0:
if self._task.poll > 0 and not result.get('skipped'):
result = self._poll_async_result(result=result, templar=templar, task_vars=vars_copy)
# ensure no log is preserved

View file

@ -138,8 +138,8 @@ class TaskQueueManager:
# then initialize it with the given handler list
for handler in handler_list:
if handler not in self._notified_handlers:
self._notified_handlers[handler] = []
if handler._uuid not in self._notified_handlers:
self._notified_handlers[handler._uuid] = []
if handler.listen:
listeners = handler.listen
if not isinstance(listeners, list):
@ -147,7 +147,7 @@ class TaskQueueManager:
for listener in listeners:
if listener not in self._listening_handlers:
self._listening_handlers[listener] = []
self._listening_handlers[listener].append(handler.get_name())
self._listening_handlers[listener].append(handler._uuid)
def load_callbacks(self):
'''
@ -222,7 +222,7 @@ class TaskQueueManager:
)
# Fork # of forks, # of hosts or serial, whichever is lowest
num_hosts = len(self._inventory.get_hosts(new_play.hosts))
num_hosts = len(self._inventory.get_hosts(new_play.hosts, ignore_restrictions=True))
max_serial = 0
if new_play.serial:
@ -353,17 +353,20 @@ class TaskQueueManager:
for method in methods:
try:
# temporary hack, required due to a change in the callback API, so
# we don't break backwards compatibility with callbacks which were
# designed to use the original API
# Previously, the `v2_playbook_on_start` callback API did not accept
# any arguments. In recent versions of the v2 callback API, the play-
# book that started execution is given. In order to support both of
# these method signatures, we need to use this `inspect` hack to send
# no arguments to the methods that don't accept them. This way, we can
# not break backwards compatibility until that API is deprecated.
# FIXME: target for removal and revert to the original code here after a year (2017-01-14)
if method_name == 'v2_playbook_on_start':
import inspect
(f_args, f_varargs, f_keywords, f_defaults) = inspect.getargspec(method)
if 'playbook' in f_args:
method(*args, **kwargs)
else:
argspec = inspect.getargspec(method)
if argspec.args == ['self']:
method()
else:
method(*args, **kwargs)
else:
method(*args, **kwargs)
except Exception as e:

View file

@ -62,11 +62,13 @@ class TaskResult:
return self._check_key('unreachable')
def _check_key(self, key):
if self._result.get('results', []):
'''get a specific key from the result or it's items'''
if isinstance(self._result, dict) and key in self._result:
return self._result.get(key, False)
else:
flag = False
for res in self._result.get('results', []):
if isinstance(res, dict):
flag |= res.get(key, False)
return flag
else:
return self._result.get(key, False)

View file

@ -21,7 +21,6 @@ __metaclass__ = type
import fnmatch
import os
import subprocess
import sys
import re
import itertools
@ -38,6 +37,7 @@ from ansible.module_utils._text import to_bytes, to_text
from ansible.parsing.utils.addresses import parse_address
from ansible.plugins import vars_loader
from ansible.utils.vars import combine_vars
from ansible.utils.path import unfrackpath
try:
from __main__ import display
@ -58,7 +58,7 @@ class Inventory(object):
# the host file file, or script path, or list of hosts
# if a list, inventory data will NOT be loaded
self.host_list = host_list
self.host_list = unfrackpath(host_list, follow=False)
self._loader = loader
self._variable_manager = variable_manager
self.localhost = None
@ -158,17 +158,39 @@ class Inventory(object):
self._vars_plugins = [ x for x in vars_loader.all(self) ]
### POST PROCESS groups and hosts after specific parser was invoked
hosts = []
group_names = set()
# set group vars from group_vars/ files and vars plugins
for g in self.groups:
group = self.groups[g]
group.vars = combine_vars(group.vars, self.get_group_variables(group.name))
self.get_group_vars(group)
group_names.add(group.name)
hosts.extend(group.get_hosts())
# get host vars from host_vars/ files and vars plugins
for host in self.get_hosts(ignore_limits=True, ignore_restrictions=True):
for host in hosts:
host.vars = combine_vars(host.vars, self.get_host_variables(host.name))
self.get_host_vars(host)
mygroups = host.get_groups()
# ensure hosts are always in 'all'
if all not in mygroups:
all.add_host(host)
if ungrouped in mygroups:
# clear ungrouped of any incorrectly stored by parser
if set(mygroups).difference(set([all, ungrouped])):
host.remove_group(ungrouped)
else:
# add ungrouped hosts to ungrouped
length = len(mygroups)
if length == 0 or (length == 1 and all in mygroups):
ungrouped.add_host(host)
def _match(self, str, pattern_str):
try:
if pattern_str.startswith('~'):
@ -780,7 +802,10 @@ class Inventory(object):
path = os.path.realpath(os.path.join(basedir, 'group_vars'))
found_vars = set()
if os.path.exists(path):
found_vars = set(os.listdir(to_text(path)))
if os.path.isdir(path):
found_vars = set(os.listdir(to_text(path)))
else:
display.warning("Found group_vars that is not a directory, skipping: %s" % path)
return found_vars
def _find_host_vars_files(self, basedir):

View file

@ -25,6 +25,7 @@ import os
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.utils.vars import combine_vars
from ansible.module_utils._text import to_native
#FIXME: make into plugins
from ansible.inventory.ini import InventoryParser as InventoryINIParser
@ -44,11 +45,10 @@ def get_file_parser(hostsfile, groups, loader):
parser = None
try:
inv_file = open(hostsfile)
first_line = inv_file.readlines()[0]
inv_file.close()
if first_line.startswith('#!'):
shebang_present = True
with open(hostsfile, 'rb') as inv_file:
initial_chars = inv_file.read(2)
if initial_chars.startswith(b'#!'):
shebang_present = True
except:
pass
@ -59,7 +59,7 @@ def get_file_parser(hostsfile, groups, loader):
parser = InventoryScript(loader=loader, groups=groups, filename=hostsfile)
processed = True
except Exception as e:
myerr.append(str(e))
myerr.append('Attempted to execute "%s" as inventory script: %s' % (hostsfile, to_native(e)))
elif shebang_present:
myerr.append("The inventory file \'%s\' looks like it should be an executable inventory script, but is not marked executable. Perhaps you want to correct this with `chmod +x %s`?" % (hostsfile, hostsfile))
@ -70,7 +70,7 @@ def get_file_parser(hostsfile, groups, loader):
parser = InventoryYAMLParser(loader=loader, groups=groups, filename=hostsfile)
processed = True
except Exception as e:
myerr.append(str(e))
myerr.append('Attempted to read "%s" as YAML: %s' % (to_native(hostsfile), to_native(e)))
# ini
if not processed and not shebang_present:
@ -78,7 +78,7 @@ def get_file_parser(hostsfile, groups, loader):
parser = InventoryINIParser(loader=loader, groups=groups, filename=hostsfile)
processed = True
except Exception as e:
myerr.append(str(e))
myerr.append('Attempted to read "%s" as ini file: %s ' % (to_native(hostsfile), to_native(e)))
if not processed and myerr:
raise AnsibleError('\n'.join(myerr))

View file

@ -114,6 +114,12 @@ class Group:
host.add_group(self)
self.clear_hosts_cache()
def remove_host(self, host):
self.hosts.remove(host)
host.remove_group(self)
self.clear_hosts_cache()
def set_variable(self, key, value):
self.vars[key] = value

View file

@ -112,6 +112,10 @@ class Host:
self.groups.append(group)
def remove_group(self, group):
self.groups.remove(group)
def set_variable(self, key, value):
self.vars[key]=value
@ -138,6 +142,6 @@ class Host:
def get_group_vars(self):
results = {}
groups = self.get_groups()
for group in sorted(groups, key=lambda g: g.depth):
for group in sorted(groups, key=lambda g: (g.depth, g.name)):
results = combine_vars(results, group.get_vars())
return results

View file

@ -38,9 +38,10 @@ class InventoryParser(object):
Takes an INI-format inventory file and builds a list of groups and subgroups
with their associated hosts and variable settings.
"""
_COMMENT_MARKERS = frozenset((u';', u'#'))
b_COMMENT_MARKERS = frozenset((b';', b'#'))
def __init__(self, loader, groups, filename=C.DEFAULT_HOST_LIST):
self._loader = loader
self.filename = filename
# Start with an empty host list and whatever groups we're passed in
@ -52,13 +53,28 @@ class InventoryParser(object):
# Read in the hosts, groups, and variables defined in the
# inventory file.
if loader:
(data, private) = loader._get_file_contents(filename)
(b_data, private) = loader._get_file_contents(filename)
else:
with open(filename) as fh:
data = to_text(fh.read())
data = data.split('\n')
with open(filename, 'rb') as fh:
b_data = fh.read()
try:
# Faster to do to_text once on a long string than many
# times on smaller strings
data = to_text(b_data, errors='surrogate_or_strict').splitlines()
except UnicodeError:
# Handle non-utf8 in comment lines: https://github.com/ansible/ansible/issues/17593
data = []
for line in b_data.splitlines():
if line and line[0] in self.b_COMMENT_MARKERS:
# Replace is okay for comment lines
#data.append(to_text(line, errors='surrogate_or_replace'))
# Currently we only need these lines for accurate lineno in errors
data.append(u'')
else:
# Non-comment lines still have to be valid uf-8
data.append(to_text(line, errors='surrogate_or_strict'))
self._parse(data)
@ -89,7 +105,7 @@ class InventoryParser(object):
line = line.strip()
# Skip empty lines and comments
if line == '' or line.startswith(";") or line.startswith("#"):
if not line or line[0] in self._COMMENT_MARKERS:
continue
# Is this a [section] header? That tells us what group we're parsing

View file

@ -70,7 +70,7 @@ class InventoryParser(object):
# 'all' at the time it was created.
for group in self.groups.values():
if group.depth == 0 and group.name not in ('all', 'ungrouped'):
self.groups['all'].add_child_group(Group(group_name))
self.groups['all'].add_child_group(group)
def _parse_groups(self, group, group_data):

View file

@ -173,12 +173,6 @@ def to_text(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
else:
errors = 'strict'
if errors is None:
if PY3:
errors = 'surrogateescape'
else:
errors = 'replace'
if isinstance(obj, binary_type):
return obj.decode(encoding, errors)

View file

@ -35,8 +35,6 @@ from ansible.module_utils.network import to_list
from ansible.module_utils.shell import CliBase
from ansible.module_utils.netcli import Command
add_argument('show_command', dict(default='show running-config',
choices=['show running-config', 'more system:running-config']))
add_argument('context', dict(required=False))
@ -64,8 +62,13 @@ class Cli(CliBase):
def authorize(self, params, **kwargs):
passwd = params['auth_pass']
errors = self.shell.errors
# Disable errors (if already in enable mode)
self.shell.errors = []
cmd = Command('enable', prompt=self.NET_PASSWD_RE, response=passwd)
self.execute([cmd, 'no terminal pager'])
# Reapply error handling
self.shell.errors = errors
def change_context(self, params):
context = params['context']
@ -88,10 +91,16 @@ class Cli(CliBase):
responses = self.execute(cmds)
return responses[1:]
def get_config(self, include_defaults=False):
def get_config(self, include=None):
if include not in [None, 'defaults', 'passwords']:
raise ValueError('include must be one of None, defaults, passwords')
cmd = 'show running-config'
if include_defaults:
cmd += ' all'
if include == 'passwords':
cmd = 'more system:running-config'
elif include == 'defaults':
cmd = 'show running-config all'
else:
cmd = 'show running-config'
return self.run_commands(cmd)[0]
def load_config(self, commands):

View file

@ -27,7 +27,7 @@ import copy
import importlib
import inspect
from distutils.version import LooseVersion
from packaging.version import Version
from os.path import expanduser
from ansible.module_utils.basic import *
@ -72,9 +72,18 @@ AZURE_FAILED_STATE = "Failed"
HAS_AZURE = True
HAS_AZURE_EXC = None
HAS_MSRESTAZURE = True
HAS_MSRESTAZURE_EXC = None
# NB: packaging issue sometimes cause msrestazure not to be installed, check it separately
try:
from msrest.serialization import Serializer
except ImportError as exc:
HAS_MSRESTAZURE_EXC = exc
HAS_MSRESTAZURE = False
try:
from enum import Enum
from msrest.serialization import Serializer
from msrestazure.azure_exceptions import CloudError
from azure.mgmt.network.models import PublicIPAddress, NetworkSecurityGroup, SecurityRule, NetworkInterface, \
NetworkInterfaceIPConfiguration, Subnet
@ -92,7 +101,6 @@ except ImportError as exc:
HAS_AZURE_EXC = exc
HAS_AZURE = False
def azure_id_to_dict(id):
pieces = re.sub(r'^\/', '', id).split('/')
result = {}
@ -112,14 +120,6 @@ AZURE_EXPECTED_VERSIONS = dict(
AZURE_MIN_RELEASE = '2.0.0rc5'
def check_client_version(client_name, client_version, expected_version):
# Pinning Azure modules to 2.0.0rc5.
if LooseVersion(client_version) != LooseVersion(expected_version):
self.fail("Installed {0} client version is {1}. The supported version is {2}. Try "
"`pip install azure=={3}`".format(client_name, client_version, expected_version,
AZURE_MIN_RELEASE))
class AzureRMModuleBase(object):
def __init__(self, derived_arg_spec, bypass_checks=False, no_log=False,
@ -150,8 +150,12 @@ class AzureRMModuleBase(object):
supports_check_mode=supports_check_mode,
required_if=merged_required_if)
if not HAS_MSRESTAZURE:
self.fail("Do you have msrestazure installed? Try `pip install msrestazure`"
"- {0}".format(HAS_MSRESTAZURE_EXC))
if not HAS_AZURE:
self.fail("Do you have azure=={1} installed? Try `pip install azure=={1}`"
self.fail("Do you have azure>={1} installed? Try `pip install 'azure>={1}' --upgrade`"
"- {0}".format(HAS_AZURE_EXC, AZURE_MIN_RELEASE))
self._network_client = None
@ -192,6 +196,13 @@ class AzureRMModuleBase(object):
res = self.exec_module(**self.module.params)
self.module.exit_json(**res)
def check_client_version(self, client_name, client_version, expected_version):
# Ensure Azure modules are at least 2.0.0rc5.
if Version(client_version) < Version(expected_version):
self.fail("Installed {0} client version is {1}. The supported version is {2}. Try "
"`pip install azure>={3} --upgrade`".format(client_name, client_version, expected_version,
AZURE_MIN_RELEASE))
def exec_module(self, **kwargs):
self.fail("Error: {0} failed to implement exec_module method.".format(self.__class__.__name__))
@ -241,12 +252,12 @@ class AzureRMModuleBase(object):
new_tags = copy.copy(tags) if isinstance(tags, dict) else dict()
changed = False
if isinstance(self.module.params.get('tags'), dict):
for key, value in self.module.params['tags'].iteritems():
for key, value in self.module.params['tags'].items():
if not new_tags.get(key) or new_tags[key] != value:
changed = True
new_tags[key] = value
if isinstance(tags, dict):
for key, value in tags.iteritems():
for key, value in tags.items():
if not self.module.params['tags'].get(key):
new_tags.pop(key)
changed = True
@ -319,7 +330,7 @@ class AzureRMModuleBase(object):
def _get_env_credentials(self):
env_credentials = dict()
for attribute, env_variable in AZURE_CREDENTIAL_ENV_MAPPING.iteritems():
for attribute, env_variable in AZURE_CREDENTIAL_ENV_MAPPING.items():
env_credentials[attribute] = os.environ.get(env_variable, None)
if env_credentials['profile']:
@ -338,7 +349,7 @@ class AzureRMModuleBase(object):
self.log('Getting credentials')
arg_credentials = dict()
for attribute, env_variable in AZURE_CREDENTIAL_ENV_MAPPING.iteritems():
for attribute, env_variable in AZURE_CREDENTIAL_ENV_MAPPING.items():
arg_credentials[attribute] = params.get(attribute, None)
# try module params
@ -574,7 +585,7 @@ class AzureRMModuleBase(object):
def storage_client(self):
self.log('Getting storage client...')
if not self._storage_client:
check_client_version('storage', storage_client_version, AZURE_EXPECTED_VERSIONS['storage_client_version'])
self.check_client_version('storage', storage_client_version, AZURE_EXPECTED_VERSIONS['storage_client_version'])
self._storage_client = StorageManagementClient(self.azure_credentials, self.subscription_id)
self._register('Microsoft.Storage')
return self._storage_client
@ -583,7 +594,7 @@ class AzureRMModuleBase(object):
def network_client(self):
self.log('Getting network client')
if not self._network_client:
check_client_version('network', network_client_version, AZURE_EXPECTED_VERSIONS['network_client_version'])
self.check_client_version('network', network_client_version, AZURE_EXPECTED_VERSIONS['network_client_version'])
self._network_client = NetworkManagementClient(self.azure_credentials, self.subscription_id)
self._register('Microsoft.Network')
return self._network_client
@ -592,7 +603,7 @@ class AzureRMModuleBase(object):
def rm_client(self):
self.log('Getting resource manager client')
if not self._resource_client:
check_client_version('resource', resource_client_version, AZURE_EXPECTED_VERSIONS['resource_client_version'])
self.check_client_version('resource', resource_client_version, AZURE_EXPECTED_VERSIONS['resource_client_version'])
self._resource_client = ResourceManagementClient(self.azure_credentials, self.subscription_id)
return self._resource_client
@ -600,7 +611,7 @@ class AzureRMModuleBase(object):
def compute_client(self):
self.log('Getting compute client')
if not self._compute_client:
check_client_version('compute', compute_client_version, AZURE_EXPECTED_VERSIONS['compute_client_version'])
self.check_client_version('compute', compute_client_version, AZURE_EXPECTED_VERSIONS['compute_client_version'])
self._compute_client = ComputeManagementClient(self.azure_credentials, self.subscription_id)
self._register('Microsoft.Compute')
return self._compute_client

View file

@ -5,6 +5,7 @@
# to the complete work.
#
# Copyright (c), Michael DeHaan <michael.dehaan@gmail.com>, 2012-2013
# Copyright (c), Toshio Kuratomi <tkuratomi@ansible.com> 2016
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
@ -143,6 +144,8 @@ from ansible.module_utils.six import (PY2, PY3, b, binary_type, integer_types,
from ansible.module_utils.six.moves import map, reduce
from ansible.module_utils._text import to_native, to_bytes, to_text
PASSWORD_MATCH = re.compile(r'^(?:.+[-_\s])?pass(?:[-_\s]?(?:word|phrase|wrd|wd)?)(?:[-_\s].+)?$', re.I)
_NUMBERTYPES = tuple(list(integer_types) + [float])
# Deprecated compat. Only kept in case another module used these names Using
@ -634,6 +637,7 @@ class AnsibleModule(object):
see library/* for examples
'''
self._name = os.path.basename(__file__) #initialize name until we can parse from options
self.argument_spec = argument_spec
self.supports_check_mode = supports_check_mode
self.check_mode = False
@ -713,7 +717,7 @@ class AnsibleModule(object):
self._set_defaults(pre=False)
if not self.no_log and self._verbosity >= 3:
if not self.no_log:
self._log_invocation()
# finally, make sure we're in a sane working dir
@ -1664,16 +1668,17 @@ class AnsibleModule(object):
# TODO: generalize a separate log function and make log_invocation use it
# Sanitize possible password argument when logging.
log_args = dict()
passwd_keys = ['password', 'login_password']
for param in self.params:
canon = self.aliases.get(param, param)
arg_opts = self.argument_spec.get(canon, {})
no_log = arg_opts.get('no_log', False)
arg_type = arg_opts.get('type', 'str')
if self.boolean(no_log):
log_args[param] = 'NOT_LOGGING_PARAMETER'
elif param in passwd_keys:
# try to capture all passwords/passphrase named fields
elif arg_type != 'bool' and PASSWORD_MATCH.search(param):
log_args[param] = 'NOT_LOGGING_PASSWORD'
else:
param_val = self.params[param]
@ -1843,7 +1848,7 @@ class AnsibleModule(object):
(filename, algorithm, ', '.join(AVAILABLE_HASH_ALGORITHMS)))
blocksize = 64 * 1024
infile = open(filename, 'rb')
infile = open(os.path.realpath(filename), 'rb')
block = infile.read(blocksize)
while block:
digest_method.update(block)
@ -1924,18 +1929,6 @@ class AnsibleModule(object):
creating = not os.path.exists(b_dest)
try:
login_name = os.getlogin()
except OSError:
# not having a tty can cause the above to fail, so
# just get the LOGNAME environment variable instead
login_name = os.environ.get('LOGNAME', None)
# if the original login_name doesn't match the currently
# logged-in user, or if the SUDO_USER environment variable
# is set, then this user has switched their credentials
switched_user = login_name and login_name != pwd.getpwuid(os.getuid())[0] or os.environ.get('SUDO_USER')
try:
# Optimistically try a rename, solves some corner cases and can avoid useless work, throws exception if not atomic.
os.rename(b_src, b_dest)
@ -1959,19 +1952,28 @@ class AnsibleModule(object):
except (OSError, IOError):
e = get_exception()
self.fail_json(msg='The destination directory (%s) is not writable by the current user. Error was: %s' % (os.path.dirname(dest), e))
except TypeError:
# We expect that this is happening because python3.4.x and
# below can't handle byte strings in mkstemp(). Traceback
# would end in something like:
# file = _os.path.join(dir, pre + name + suf)
# TypeError: can't concat bytes to str
self.fail_json(msg='Failed creating temp file for atomic move. This usually happens when using Python3 less than Python3.5. Please use Python2.x or Python3.5 or greater.', exception=sys.exc_info())
b_tmp_dest_name = to_bytes(tmp_dest_name, errors='surrogate_or_strict')
try:
try:
# close tmp file handle before file operations to prevent text file busy errors on vboxfs synced folders (windows host)
os.close(tmp_dest_fd)
# leaves tmp file behind when sudo and not root
if switched_user and os.getuid() != 0:
# leaves tmp file behind when sudo and not root
try:
shutil.move(b_src, b_tmp_dest_name)
except OSError:
# cleanup will happen by 'rm' of tempdir
# copy2 will preserve some metadata
shutil.copy2(b_src, b_tmp_dest_name)
else:
shutil.move(b_src, b_tmp_dest_name)
if self.selinux_enabled():
self.set_context_if_different(
b_tmp_dest_name, context, False)
@ -1986,12 +1988,14 @@ class AnsibleModule(object):
try:
os.rename(b_tmp_dest_name, b_dest)
except (shutil.Error, OSError, IOError):
e = get_exception()
if unsafe_writes:
self._unsafe_writes(b_tmp_dest_name, b_dest, get_exception())
self._unsafe_writes(b_tmp_dest_name, b_dest, e)
else:
self.fail_json(msg='Could not replace file: %s to %s: %s' % (src, dest, exception))
self.fail_json(msg='Could not replace file: %s to %s: %s' % (src, dest, e))
except (shutil.Error, OSError, IOError):
self.fail_json(msg='Could not replace file: %s to %s: %s' % (src, dest, exception))
e = get_exception()
self.fail_json(msg='Could not replace file: %s to %s: %s' % (src, dest, e))
finally:
self.cleanup(b_tmp_dest_name)
@ -2001,8 +2005,12 @@ class AnsibleModule(object):
umask = os.umask(0)
os.umask(umask)
os.chmod(b_dest, DEFAULT_PERM & ~umask)
if switched_user:
os.chown(b_dest, os.getuid(), os.getgid())
try:
os.chown(b_dest, os.geteuid(), os.getegid())
except OSError:
# We're okay with trying our best here. If the user is not
# root (or old Unices) they won't be able to chown.
pass
if self.selinux_enabled():
# rename might not preserve context
@ -2030,7 +2038,7 @@ class AnsibleModule(object):
else:
self.fail_json(msg='Could not replace file: %s to %s: %s' % (src, dest, exception))
def run_command(self, args, check_rc=False, close_fds=True, executable=None, data=None, binary_data=False, path_prefix=None, cwd=None, use_unsafe_shell=False, prompt_regex=None, environ_update=None, umask=None):
def run_command(self, args, check_rc=False, close_fds=True, executable=None, data=None, binary_data=False, path_prefix=None, cwd=None, use_unsafe_shell=False, prompt_regex=None, environ_update=None, umask=None, encoding='utf-8', errors='surrogate_or_strict'):
'''
Execute a command, returns rc, stdout, and stderr.
@ -2052,8 +2060,27 @@ class AnsibleModule(object):
:kw prompt_regex: Regex string (not a compiled regex) which can be
used to detect prompts in the stdout which would otherwise cause
the execution to hang (especially if no input data is specified)
:kwarg environ_update: dictionary to *update* os.environ with
:kw environ_update: dictionary to *update* os.environ with
:kw umask: Umask to be used when running the command. Default None
:kw encoding: Since we return native strings, on python3 we need to
know the encoding to use to transform from bytes to text. If you
want to always get bytes back, use encoding=None. The default is
"utf-8". This does not affect transformation of strings given as
args.
:kw errors: Since we return native strings, on python3 we need to
transform stdout and stderr from bytes to text. If the bytes are
undecodable in the ``encoding`` specified, then use this error
handler to deal with them. The default is ``surrogate_or_strict``
which means that the bytes will be decoded using the
surrogateescape error handler if available (available on all
python3 versions we support) otherwise a UnicodeError traceback
will be raised. This does not affect transformations of strings
given as args.
:returns: A 3-tuple of return code (integer), stdout (native string),
and stderr (native string). On python2, stdout and stderr are both
byte strings. On python3, stdout and stderr are text strings converted
according to the encoding and errors parameters. If you want byte
strings on python3, use encoding=None to turn decoding to text off.
'''
shell = False
@ -2167,14 +2194,13 @@ class AnsibleModule(object):
stderr=subprocess.PIPE,
)
if cwd and os.path.isdir(cwd):
kwargs['cwd'] = cwd
# store the pwd
prev_dir = os.getcwd()
# make sure we're in the right working directory
if cwd and os.path.isdir(cwd):
cwd = os.path.abspath(os.path.expanduser(cwd))
kwargs['cwd'] = cwd
try:
os.chdir(cwd)
except (OSError, IOError):
@ -2186,13 +2212,8 @@ class AnsibleModule(object):
old_umask = os.umask(umask)
try:
if self._debug:
if isinstance(args, list):
running = ' '.join(args)
else:
running = args
self.log('Executing: ' + running)
self.log('Executing: ' + clean_args)
cmd = subprocess.Popen(args, **kwargs)
# the communication logic here is essentially taken from that
@ -2225,6 +2246,10 @@ class AnsibleModule(object):
# if we're checking for prompts, do it now
if prompt_re:
if prompt_re.search(stdout) and not data:
if encoding:
stdout = to_native(stdout, encoding=encoding, errors=errors)
else:
stdout = stdout
return (257, stdout, "A prompt was encountered while running a command, but no input data was specified")
# only break out if no pipes are left to read or
# the pipes are completely read and
@ -2246,9 +2271,11 @@ class AnsibleModule(object):
rc = cmd.returncode
except (OSError, IOError):
e = get_exception()
self.log("Error Executing CMD:%s Exception:%s" % (clean_args, to_native(e)))
self.fail_json(rc=e.errno, msg=to_native(e), cmd=clean_args)
except Exception:
e = get_exception()
self.log("Error Executing CMD:%s Exception:%s" % (clean_args,to_native(traceback.format_exc())))
self.fail_json(rc=257, msg=to_native(e), exception=traceback.format_exc(), cmd=clean_args)
# Restore env settings
@ -2268,6 +2295,9 @@ class AnsibleModule(object):
# reset the pwd
os.chdir(prev_dir)
if encoding is not None:
return (rc, to_native(stdout, encoding=encoding, errors=errors),
to_native(stderr, encoding=encoding, errors=errors))
return (rc, stdout, stderr)
def append_to_file(self, filename, str):

View file

@ -28,6 +28,7 @@
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import time
from ansible.module_utils.six import iteritems
try:
from cs import CloudStack, CloudStackException, read_config
@ -148,7 +149,7 @@ class AnsibleCloudStack(object):
def has_changed(self, want_dict, current_dict, only_keys=None):
for key, value in want_dict.iteritems():
for key, value in want_dict.items():
# Optionally limit by a list of keys
if only_keys and key not in only_keys:
@ -343,6 +344,9 @@ class AnsibleCloudStack(object):
zone = self.module.params.get('zone')
zones = self.cs.listZones()
if not zones:
self.module.fail_json(msg="No zones available. Please create a zone first")
# use the first zone if no zone param given
if not zone:
self.zone = zones['zone'][0]
@ -510,12 +514,12 @@ class AnsibleCloudStack(object):
if resource:
returns = self.common_returns.copy()
returns.update(self.returns)
for search_key, return_key in returns.iteritems():
for search_key, return_key in returns.items():
if search_key in resource:
self.result[return_key] = resource[search_key]
# Bad bad API does not always return int when it should.
for search_key, return_key in self.returns_to_int.iteritems():
for search_key, return_key in self.returns_to_int.items():
if search_key in resource:
self.result[return_key] = int(resource[search_key])

View file

@ -42,8 +42,9 @@ def get_config(module):
if not contents:
contents = module.config.get_config()
module.params['config'] = contents
return NetworkConfig(indent=1, contents=contents[0])
return NetworkConfig(indent=1, contents=contents[0])
else:
return NetworkConfig(indent=1, contents=contents)
def get_sublevel_config(running_config, module):
@ -54,11 +55,13 @@ def get_sublevel_config(running_config, module):
contents = obj.children
contents[:0] = module.params['parents']
indent = 0
for c in contents:
if isinstance(c, str):
current_config_contents.append(c)
current_config_contents.append(c.rjust(len(c) + indent, ' '))
if isinstance(c, ConfigLine):
current_config_contents.append(c.raw)
indent = indent + 1
sublevel_config = '\n'.join(current_config_contents)
return sublevel_config

View file

@ -38,28 +38,25 @@ from ansible.module_utils.netcfg import NetworkConfig, ConfigLine, ignore_line,
def get_config(module):
contents = module.params['config']
if not contents:
contents = module.config.get_config()
module.params['config'] = contents
return Dellos6NetworkConfig(indent=0, contents=contents[0])
return Dellos6NetworkConfig(indent=0, contents=contents[0])
else:
return Dellos6NetworkConfig(indent=0, contents=contents)
def get_sublevel_config(running_config, module):
contents = list()
current_config_contents = list()
sublevel_config = Dellos6NetworkConfig(indent=0)
obj = running_config.get_object(module.params['parents'])
if obj:
contents = obj.children
for c in contents:
if isinstance(c, ConfigLine):
current_config_contents.append(c.raw)
sublevel_config.add(current_config_contents, module.params['parents'])
return sublevel_config
@ -68,6 +65,7 @@ def os6_parse(lines, indent=None, comment_tokens=None):
re.compile(r'^vlan.*$'),
re.compile(r'^stack.*$'),
re.compile(r'^interface.*$'),
re.compile(r'datacenter-bridging.*$'),
re.compile(r'line (console|telnet|ssh).*$'),
re.compile(r'ip ssh !(server).*$'),
re.compile(r'ip (dhcp|vrf).*$'),
@ -85,54 +83,66 @@ def os6_parse(lines, indent=None, comment_tokens=None):
re.compile(r'banner motd.*$'),
re.compile(r'openflow.*$'),
re.compile(r'support-assist.*$'),
re.compile(r'template.*$'),
re.compile(r'address-family.*$'),
re.compile(r'spanning-tree mst configuration.*$'),
re.compile(r'logging.*$'),
re.compile(r'(radius-server|tacacs-server) host.*$')]
childline = re.compile(r'^exit$')
config = list()
inSubLevel = False
parent = None
children = list()
subcommandcount = 0
parent = list()
children = []
parent_match = False
for line in str(lines).split('\n'):
text = str(re.sub(r'([{};])', '', line)).strip()
cfg = ConfigLine(text)
cfg.raw = line
if not text or ignore_line(text, comment_tokens):
parent = None
children = list()
inSubLevel = False
parent = list()
children = []
continue
if inSubLevel is False:
else:
parent_match = False
# handle sublevel parent
for pr in sublevel_cmds:
if pr.match(line):
parent = cfg
config.append(parent)
inSubLevel = True
if len(parent) != 0:
cfg.parents.extend(parent)
parent.append(cfg)
config.append(cfg)
if children:
children.insert(len(parent) - 1, [])
children[len(parent) - 2].append(cfg)
parent_match = True
continue
if parent is None:
# handle exit
if childline.match(line):
if children:
parent[len(children) - 1].children.extend(children[len(children) - 1])
if len(children) > 1:
parent[len(children) - 2].children.extend(parent[len(children) - 1].children)
cfg.parents.extend(parent)
children.pop()
parent.pop()
if not children:
children = list()
if parent:
cfg.parents.extend(parent)
parent = list()
config.append(cfg)
# handle sublevel children
elif parent_match is False and len(parent) > 0:
if not children:
cfglist = [cfg]
children.append(cfglist)
else:
children[len(parent) - 1].append(cfg)
cfg.parents.extend(parent)
config.append(cfg)
# handle global commands
elif not parent:
config.append(cfg)
# handle sub level commands
elif inSubLevel and childline.match(line):
parent.children = children
inSubLevel = False
children = list()
parent = None
# handle top level commands
elif inSubLevel:
children.append(cfg)
cfg.parents = [parent]
config.append(cfg)
else: # global level
config.append(cfg)
return config
@ -141,6 +151,18 @@ class Dellos6NetworkConfig(NetworkConfig):
def load(self, contents):
self._config = os6_parse(contents, self.indent, DEFAULT_COMMENT_TOKENS)
def diff_line(self, other, path=None):
diff = list()
for item in self.items:
if str(item) == "exit":
for diff_item in diff:
if item.parents == diff_item.parents:
diff.append(item)
break
elif item not in other:
diff.append(item)
return diff
class Cli(CliBase):
@ -160,11 +182,9 @@ class Cli(CliBase):
re.compile(r"[^\r\n]+ not found", re.I),
re.compile(r"'[^']' +returned error code: ?\d+")]
def connect(self, params, **kwargs):
super(Cli, self).connect(params, kickstart=False, **kwargs)
def authorize(self, params, **kwargs):
passwd = params['auth_pass']
self.run_commands(
@ -172,7 +192,6 @@ class Cli(CliBase):
)
self.run_commands('terminal length 0')
def configure(self, commands, **kwargs):
cmds = ['configure terminal']
cmds.extend(to_list(commands))
@ -181,17 +200,13 @@ class Cli(CliBase):
responses.pop(0)
return responses
def get_config(self, **kwargs):
return self.execute(['show running-config'])
def load_config(self, commands, **kwargs):
return self.configure(commands)
def save_config(self):
self.execute(['copy running-config startup-config'])
Cli = register_transport('cli', default=True)(Cli)

View file

@ -42,8 +42,9 @@ def get_config(module):
if not contents:
contents = module.config.get_config()
module.params['config'] = contents
return NetworkConfig(indent=1, contents=contents[0])
return NetworkConfig(indent=1, contents=contents[0])
else:
return NetworkConfig(indent=1, contents=contents)
def get_sublevel_config(running_config, module):
@ -54,11 +55,13 @@ def get_sublevel_config(running_config, module):
contents = obj.children
contents[:0] = module.params['parents']
indent = 0
for c in contents:
if isinstance(c, str):
current_config_contents.append(c)
current_config_contents.append(c.rjust(len(c) + indent, ' '))
if isinstance(c, ConfigLine):
current_config_contents.append(c.raw)
indent = indent + 1
sublevel_config = '\n'.join(current_config_contents)
return sublevel_config

View file

@ -21,10 +21,10 @@ import re
import json
import sys
import copy
from distutils.version import LooseVersion
from urlparse import urlparse
from ansible.module_utils.basic import *
from ansible.module_utils.basic import AnsibleModule, BOOLEANS_TRUE, BOOLEANS_FALSE
from ansible.module_utils.six.moves.urllib.parse import urlparse
HAS_DOCKER_PY = True
HAS_DOCKER_ERROR = None
@ -446,8 +446,7 @@ class AnsibleDockerClient(Client):
'''
self.log("Pulling image %s:%s" % (name, tag))
try:
for line in self.pull(name, tag=tag, stream=True):
line = json.loads(line)
for line in self.pull(name, tag=tag, stream=True, decode=True):
self.log(line, pretty_print=True)
if line.get('error'):
if line.get('errorDetail'):

View file

@ -52,7 +52,7 @@ try:
except:
HAS_LOOSE_VERSION = False
from ansible.module_utils.six import string_types
from ansible.module_utils.six import string_types, binary_type, text_type
class AnsibleAWSError(Exception):
pass
@ -232,8 +232,8 @@ def get_aws_connection_info(module, boto3=False):
boto_params['validate_certs'] = validate_certs
for param, value in boto_params.items():
if isinstance(value, str):
boto_params[param] = unicode(value, 'utf-8', 'strict')
if isinstance(value, binary_type):
boto_params[param] = text_type(value, 'utf-8', 'strict')
return region, ec2_url, boto_params
@ -343,7 +343,7 @@ def camel_dict_to_snake_dict(camel_dict):
snake_dict = {}
for k, v in camel_dict.iteritems():
for k, v in camel_dict.items():
if isinstance(v, dict):
snake_dict[camel_to_snake(k)] = camel_dict_to_snake_dict(v)
elif isinstance(v, list):
@ -378,7 +378,7 @@ def ansible_dict_to_boto3_filter_list(filters_dict):
"""
filters_list = []
for k,v in filters_dict.iteritems():
for k,v in filters_dict.items():
filter_dict = {'Name': k}
if isinstance(v, string_types):
filter_dict['Values'] = [v]
@ -443,7 +443,7 @@ def ansible_dict_to_boto3_tag_list(tags_dict):
"""
tags_list = []
for k,v in tags_dict.iteritems():
for k,v in tags_dict.items():
tags_list.append({'Key': k, 'Value': v})
return tags_list

View file

@ -35,6 +35,7 @@ from ansible.module_utils.network import add_argument, register_transport, to_li
from ansible.module_utils.netcli import Command
from ansible.module_utils.shell import CliBase
from ansible.module_utils.urls import fetch_url, url_argument_spec
from ansible.module_utils._text import to_native
EAPI_FORMATS = ['json', 'text']
@ -50,7 +51,7 @@ class EosConfigMixin(object):
cmds = ['configure terminal']
cmds.extend(to_list(commands))
cmds.append('end')
responses = self.execute(commands)
responses = self.execute(cmds)
return responses[1:-1]
def get_config(self, include_defaults=False, **kwargs):
@ -60,6 +61,12 @@ class EosConfigMixin(object):
return self.execute([cmd])[0]
def load_config(self, config, commit=False, replace=False):
if self.supports_sessions():
return self.load_config_session(config, commit, replace)
else:
return self.configure(config)
def load_config_session(self, config, commit=False, replace=False):
""" Loads the configuration into the remote device
"""
session = 'ansible_%s' % int(time.time())
@ -82,7 +89,7 @@ class EosConfigMixin(object):
self.execute(['no configure session %s' % session])
except NetworkError:
exc = get_exception()
if 'timeout trying to send command' in exc.message:
if 'timeout trying to send command' in to_native(exc):
# try to get control back and get out of config mode
if isinstance(self, Cli):
self.execute(['\x03', 'end'])
@ -116,6 +123,17 @@ class EosConfigMixin(object):
commands = ['configure session %s' % session, 'abort']
self.execute(commands)
def supports_sessions(self):
try:
if isinstance(self, Eapi):
self.execute(['show configuration sessions'], output='text')
else:
self.execute('show configuration sessions')
return True
except NetworkError:
return False
class Eapi(EosConfigMixin):
@ -148,6 +166,7 @@ class Eapi(EosConfigMixin):
self.url_args.params['url_username'] = params['username']
self.url_args.params['url_password'] = params['password']
self.url_args.params['validate_certs'] = params['validate_certs']
self.url_args.params['timeout'] = params['timeout']
if params['use_ssl']:
proto = 'https'
@ -187,10 +206,11 @@ class Eapi(EosConfigMixin):
data = json.dumps(body)
headers = {'Content-Type': 'application/json-rpc'}
timeout = self.url_args.params['timeout']
response, headers = fetch_url(
self.url_args, self.url, data=data, headers=headers,
method='POST'
method='POST', timeout=timeout
)
if headers['status'] != 200:

View file

@ -143,7 +143,7 @@ class ExoDns(object):
def has_changed(self, want_dict, current_dict, only_keys=None):
changed = False
for key, value in want_dict.iteritems():
for key, value in want_dict.items():
# Optionally limit by a list of keys
if only_keys and key not in only_keys:
continue

View file

@ -34,29 +34,8 @@ import pwd
from ansible.module_utils.basic import get_all_subclasses
from ansible.module_utils.six import PY3, iteritems
from ansible.module_utils._text import to_native
# py2 vs py3; replace with six via ansiballz
try:
# python2
import ConfigParser as configparser
except ImportError:
# python3
import configparser
try:
# python2
from StringIO import StringIO
except ImportError:
# python3
from io import StringIO
try:
# python2
from string import maketrans
except ImportError:
# python3
maketrans = str.maketrans # TODO: is this really identical?
from ansible.module_utils.six.moves import configparser, StringIO, reduce
from ansible.module_utils._text import to_native, to_text
try:
import selinux
@ -261,9 +240,8 @@ class Facts(object):
# try to read it as json first
# if that fails read it with ConfigParser
# if that fails, skip it
rc, out, err = self.module.run_command(fn)
try:
out = out.decode('utf-8', 'strict')
rc, out, err = self.module.run_command(fn)
except UnicodeError:
fact = 'error loading fact - output of running %s was not utf-8' % fn
local[fact_base] = fact
@ -350,11 +328,15 @@ class Facts(object):
else:
proc_1 = os.path.basename(proc_1)
# The ps command above may return "COMMAND" if the user cannot read /proc, e.g. with grsecurity
if proc_1 == "COMMAND\n":
proc_1 = None
if proc_1 is not None:
proc_1 = to_native(proc_1)
proc_1 = proc_1.strip()
if proc_1 == 'init' or proc_1.endswith('sh'):
if proc_1 is not None and (proc_1 == 'init' or proc_1.endswith('sh')):
# many systems return init, so this cannot be trusted, if it ends in 'sh' it probalby is a shell in a container
proc_1 = None
@ -394,9 +376,8 @@ class Facts(object):
def get_lsb_facts(self):
lsb_path = self.module.get_bin_path('lsb_release')
if lsb_path:
rc, out, err = self.module.run_command([lsb_path, "-a"])
rc, out, err = self.module.run_command([lsb_path, "-a"], errors='surrogate_or_replace')
if rc == 0:
out = out.decode('utf-8', 'replace')
self.facts['lsb'] = {}
for line in out.split('\n'):
if len(line) < 1 or ':' not in line:
@ -466,8 +447,7 @@ class Facts(object):
def get_caps_facts(self):
capsh_path = self.module.get_bin_path('capsh')
if capsh_path:
rc, out, err = self.module.run_command([capsh_path, "--print"])
out = out.decode('utf-8', 'replace')
rc, out, err = self.module.run_command([capsh_path, "--print"], errors='surrogate_or_replace')
enforced_caps = []
enforced = 'NA'
for line in out.split('\n'):
@ -648,13 +628,13 @@ class Distribution(object):
OS_FAMILY = dict(
RedHat = 'RedHat', Fedora = 'RedHat', CentOS = 'RedHat', Scientific = 'RedHat',
SLC = 'RedHat', Ascendos = 'RedHat', CloudLinux = 'RedHat', PSBM = 'RedHat',
OracleLinux = 'RedHat', OVS = 'RedHat', OEL = 'RedHat', Amazon = 'RedHat',
OracleLinux = 'RedHat', OVS = 'RedHat', OEL = 'RedHat', Amazon = 'RedHat', Virtuozzo = 'RedHat',
XenServer = 'RedHat', Ubuntu = 'Debian', Debian = 'Debian', Raspbian = 'Debian', Slackware = 'Slackware', SLES = 'Suse',
SLED = 'Suse', openSUSE = 'Suse', SuSE = 'Suse', SLES_SAP = 'Suse', Gentoo = 'Gentoo', Funtoo = 'Gentoo',
SLED = 'Suse', openSUSE = 'Suse', openSUSE_Tumbleweed = 'Suse', SuSE = 'Suse', SLES_SAP = 'Suse', SUSE_LINUX = 'Suse', Gentoo = 'Gentoo', Funtoo = 'Gentoo',
Archlinux = 'Archlinux', Manjaro = 'Archlinux', Mandriva = 'Mandrake', Mandrake = 'Mandrake', Altlinux = 'Altlinux',
Solaris = 'Solaris', Nexenta = 'Solaris', OmniOS = 'Solaris', OpenIndiana = 'Solaris',
SmartOS = 'Solaris', AIX = 'AIX', Alpine = 'Alpine', MacOSX = 'Darwin',
FreeBSD = 'FreeBSD', HPUX = 'HP-UX', openSUSE_Leap = 'Suse'
FreeBSD = 'FreeBSD', HPUX = 'HP-UX', openSUSE_Leap = 'Suse', Neon = 'Debian'
)
def __init__(self, module):
@ -1026,11 +1006,11 @@ class LinuxHardware(Hardware):
key = data[0]
if key in self.ORIGINAL_MEMORY_FACTS:
val = data[1].strip().split(' ')[0]
self.facts["%s_mb" % key.lower()] = int(val) / 1024
self.facts["%s_mb" % key.lower()] = int(val) // 1024
if key in self.MEMORY_FACTS:
val = data[1].strip().split(' ')[0]
memstats[key.lower()] = int(val) / 1024
memstats[key.lower()] = int(val) // 1024
if None not in (memstats.get('memtotal'), memstats.get('memfree')):
memstats['real:used'] = memstats['memtotal'] - memstats['memfree']
@ -1230,7 +1210,11 @@ class LinuxHardware(Hardware):
self.facts[k] = 'NA'
def _run_lsblk(self, lsblk_path):
args = ['--list', '--noheadings', '--paths', '--output', 'NAME,UUID']
# call lsblk and collect all uuids
# --exclude 2 makes lsblk ignore floppy disks, which are slower to answer than typical timeouts
# this uses the linux major device number
# for details see https://www.kernel.org/doc/Documentation/devices.txt
args = ['--list', '--noheadings', '--paths', '--output', 'NAME,UUID', '--exclude', '2']
cmd = [lsblk_path] + args
rc, out, err = self.module.run_command(cmd)
return rc, out, err
@ -1268,7 +1252,7 @@ class LinuxHardware(Hardware):
def _run_findmnt(self, findmnt_path):
args = ['--list', '--noheadings', '--notruncate']
cmd = [findmnt_path] + args
rc, out, err = self.module.run_command(cmd)
rc, out, err = self.module.run_command(cmd, errors='surrogate_or_replace')
return rc, out, err
def _find_bind_mounts(self):
@ -1280,7 +1264,6 @@ class LinuxHardware(Hardware):
rc, out, err = self._run_findmnt(findmnt_path)
if rc != 0:
return bind_mounts
out = out.decode('utf-8', 'replace')
# find bind mounts, in case /etc/mtab is a symlink to /proc/mounts
for line in out.splitlines():
@ -1359,8 +1342,7 @@ class LinuxHardware(Hardware):
self.facts['devices'] = {}
lspci = self.module.get_bin_path('lspci')
if lspci:
rc, pcidata, err = self.module.run_command([lspci, '-D'])
pcidata = pcidata.decode('utf-8', 'replace')
rc, pcidata, err = self.module.run_command([lspci, '-D'], errors='surrogate_or_replace')
else:
pcidata = None
@ -1557,10 +1539,10 @@ class SunOSHardware(Hardware):
reserved = int(out.split()[5][:-1])
used = int(out.split()[8][:-1])
free = int(out.split()[10][:-1])
self.facts['swapfree_mb'] = free / 1024
self.facts['swaptotal_mb'] = (free + used) / 1024
self.facts['swap_allocated_mb'] = allocated / 1024
self.facts['swap_reserved_mb'] = reserved / 1024
self.facts['swapfree_mb'] = free // 1024
self.facts['swaptotal_mb'] = (free + used) // 1024
self.facts['swap_allocated_mb'] = allocated // 1024
self.facts['swap_reserved_mb'] = reserved // 1024
@timeout(10)
def get_mount_facts(self):
@ -1589,7 +1571,6 @@ class OpenBSDHardware(Hardware):
- devices
"""
platform = 'OpenBSD'
DMESG_BOOT = '/var/run/dmesg.boot'
def populate(self):
self.sysctl = self.get_sysctl()
@ -1631,8 +1612,8 @@ class OpenBSDHardware(Hardware):
# 0 0 0 47512 28160 51 0 0 0 0 0 1 0 116 89 17 0 1 99
rc, out, err = self.module.run_command("/usr/bin/vmstat")
if rc == 0:
self.facts['memfree_mb'] = int(out.splitlines()[-1].split()[4]) / 1024
self.facts['memtotal_mb'] = int(self.sysctl['hw.usermem']) / 1024 / 1024
self.facts['memfree_mb'] = int(out.splitlines()[-1].split()[4]) // 1024
self.facts['memtotal_mb'] = int(self.sysctl['hw.usermem']) // 1024 // 1024
# Get swapctl info. swapctl output looks like:
# total: 69268 1K-blocks allocated, 0 used, 69268 available
@ -1640,26 +1621,26 @@ class OpenBSDHardware(Hardware):
# total: 69268k bytes allocated = 0k used, 69268k available
rc, out, err = self.module.run_command("/sbin/swapctl -sk")
if rc == 0:
swaptrans = maketrans(' ', ' ')
data = out.split()
self.facts['swapfree_mb'] = int(data[-2].translate(swaptrans, "kmg")) / 1024
self.facts['swaptotal_mb'] = int(data[1].translate(swaptrans, "kmg")) / 1024
swaptrans = { ord(u'k'): None, ord(u'm'): None, ord(u'g'): None}
data = to_text(out, errors='surrogate_or_strict').split()
self.facts['swapfree_mb'] = int(data[-2].translate(swaptrans)) // 1024
self.facts['swaptotal_mb'] = int(data[1].translate(swaptrans)) // 1024
def get_processor_facts(self):
processor = []
dmesg_boot = get_file_content(OpenBSDHardware.DMESG_BOOT)
if not dmesg_boot:
rc, dmesg_boot, err = self.module.run_command("/sbin/dmesg")
i = 0
for line in dmesg_boot.splitlines():
if line.split(' ', 1)[0] == 'cpu%i:' % i:
processor.append(line.split(' ', 1)[1])
i = i + 1
processor_count = i
for i in range(int(self.sysctl['hw.ncpu'])):
processor.append(self.sysctl['hw.model'])
self.facts['processor'] = processor
self.facts['processor_count'] = processor_count
# I found no way to figure out the number of Cores per CPU in OpenBSD
self.facts['processor_cores'] = 'NA'
# The following is partly a lie because there is no reliable way to
# determine the number of physical CPUs in the system. We can only
# query the number of logical CPUs, which hides the number of cores.
# On amd64/i386 we could try to inspect the smt/core/package lines in
# dmesg, however even those have proven to be unreliable.
# So take a shortcut and report the logical number of processors in
# 'processor_count' and 'processor_cores' and leave it at that.
self.facts['processor_count'] = self.sysctl['hw.ncpu']
self.facts['processor_cores'] = self.sysctl['hw.ncpu']
def get_device_facts(self):
devices = []
@ -1718,8 +1699,8 @@ class FreeBSDHardware(Hardware):
pagecount = int(data[1])
if 'vm.stats.vm.v_free_count' in line:
freecount = int(data[1])
self.facts['memtotal_mb'] = pagesize * pagecount / 1024 / 1024
self.facts['memfree_mb'] = pagesize * freecount / 1024 / 1024
self.facts['memtotal_mb'] = pagesize * pagecount // 1024 // 1024
self.facts['memfree_mb'] = pagesize * freecount // 1024 // 1024
# Get swapinfo. swapinfo output looks like:
# Device 1M-blocks Used Avail Capacity
# /dev/ada0p3 314368 0 314368 0%
@ -1730,8 +1711,8 @@ class FreeBSDHardware(Hardware):
lines.pop()
data = lines[-1].split()
if data[0] != 'Device':
self.facts['swaptotal_mb'] = int(data[1]) / 1024
self.facts['swapfree_mb'] = int(data[3]) / 1024
self.facts['swaptotal_mb'] = int(data[1]) // 1024
self.facts['swapfree_mb'] = int(data[3]) // 1024
@timeout(10)
def get_mount_facts(self):
@ -1860,7 +1841,7 @@ class NetBSDHardware(Hardware):
key = data[0]
if key in NetBSDHardware.MEMORY_FACTS:
val = data[1].strip().split(' ')[0]
self.facts["%s_mb" % key.lower()] = int(val) / 1024
self.facts["%s_mb" % key.lower()] = int(val) // 1024
@timeout(10)
def get_mount_facts(self):
@ -1930,8 +1911,8 @@ class AIX(Hardware):
pagecount = int(data[0])
if 'free pages' in line:
freecount = int(data[0])
self.facts['memtotal_mb'] = pagesize * pagecount / 1024 / 1024
self.facts['memfree_mb'] = pagesize * freecount / 1024 / 1024
self.facts['memtotal_mb'] = pagesize * pagecount // 1024 // 1024
self.facts['memfree_mb'] = pagesize * freecount // 1024 // 1024
# Get swapinfo. swapinfo output looks like:
# Device 1M-blocks Used Avail Capacity
# /dev/ada0p3 314368 0 314368 0%
@ -2072,12 +2053,12 @@ class HPUX(Hardware):
pagesize = 4096
rc, out, err = self.module.run_command("/usr/bin/vmstat | tail -1", use_unsafe_shell=True)
data = int(re.sub(' +',' ',out).split(' ')[5].strip())
self.facts['memfree_mb'] = pagesize * data / 1024 / 1024
self.facts['memfree_mb'] = pagesize * data // 1024 // 1024
if self.facts['architecture'] == '9000/800':
try:
rc, out, err = self.module.run_command("grep Physical /var/adm/syslog/syslog.log")
data = re.search('.*Physical: ([0-9]*) Kbytes.*',out).groups()[0].strip()
self.facts['memtotal_mb'] = int(data) / 1024
self.facts['memtotal_mb'] = int(data) // 1024
except AttributeError:
#For systems where memory details aren't sent to syslog or the log has rotated, use parsed
#adb output. Unfortunately /dev/kmem doesn't have world-read, so this only works as root.
@ -2168,11 +2149,11 @@ class Darwin(Hardware):
self.facts['processor_cores'] = self.sysctl['hw.physicalcpu']
def get_memory_facts(self):
self.facts['memtotal_mb'] = int(self.sysctl['hw.memsize']) / 1024 / 1024
self.facts['memtotal_mb'] = int(self.sysctl['hw.memsize']) // 1024 // 1024
rc, out, err = self.module.run_command("sysctl hw.usermem")
if rc == 0:
self.facts['memfree_mb'] = int(out.splitlines()[-1].split()[1]) / 1024 / 1024
self.facts['memfree_mb'] = int(out.splitlines()[-1].split()[1]) // 1024 // 1024
class Network(Facts):
@ -2254,8 +2235,7 @@ class LinuxNetwork(Network):
continue
if v == 'v6' and not socket.has_ipv6:
continue
rc, out, err = self.module.run_command(command[v])
out = out.decode('utf-8', 'replace')
rc, out, err = self.module.run_command(command[v], errors='surrogate_or_replace')
if not out:
# v6 routing may result in
# RTNETLINK answers: Invalid argument
@ -2425,12 +2405,10 @@ class LinuxNetwork(Network):
ip_path = self.module.get_bin_path("ip")
args = [ip_path, 'addr', 'show', 'primary', device]
rc, stdout, stderr = self.module.run_command(args)
primary_data = stdout.decode('utf-8', 'replace')
rc, primary_data, stderr = self.module.run_command(args, errors='surrogate_or_replace')
args = [ip_path, 'addr', 'show', 'secondary', device]
rc, stdout, stderr = self.module.run_command(args)
secondary_data = stdout.decode('utf-8', 'decode')
rc, secondary_data, stderr = self.module.run_command(args, errors='surrogate_or_replace')
parse_ip_output(primary_data)
parse_ip_output(secondary_data, secondary=True)
@ -2452,8 +2430,7 @@ class LinuxNetwork(Network):
ethtool_path = self.module.get_bin_path("ethtool")
if ethtool_path:
args = [ethtool_path, '-k', device]
rc, stdout, stderr = self.module.run_command(args)
stdout = stdout.decode('utf-8', 'replace')
rc, stdout, stderr = self.module.run_command(args, errors='surrogate_or_replace')
if rc == 0:
for line in stdout.strip().split('\n'):
if not line or line.endswith(":"):
@ -3365,21 +3342,22 @@ class SunOSVirtual(Virtual):
else:
smbios = self.module.get_bin_path('smbios')
rc, out, err = self.module.run_command(smbios)
if rc == 0:
for line in out.split('\n'):
if 'VMware' in line:
self.facts['virtualization_type'] = 'vmware'
self.facts['virtualization_role'] = 'guest'
elif 'Parallels' in line:
self.facts['virtualization_type'] = 'parallels'
self.facts['virtualization_role'] = 'guest'
elif 'VirtualBox' in line:
self.facts['virtualization_type'] = 'virtualbox'
self.facts['virtualization_role'] = 'guest'
elif 'HVM domU' in line:
self.facts['virtualization_type'] = 'xen'
self.facts['virtualization_role'] = 'guest'
if smbios:
rc, out, err = self.module.run_command(smbios)
if rc == 0:
for line in out.split('\n'):
if 'VMware' in line:
self.facts['virtualization_type'] = 'vmware'
self.facts['virtualization_role'] = 'guest'
elif 'Parallels' in line:
self.facts['virtualization_type'] = 'parallels'
self.facts['virtualization_role'] = 'guest'
elif 'VirtualBox' in line:
self.facts['virtualization_type'] = 'virtualbox'
self.facts['virtualization_role'] = 'guest'
elif 'HVM domU' in line:
self.facts['virtualization_type'] = 'xen'
self.facts['virtualization_role'] = 'guest'
class Ohai(Facts):
"""

View file

@ -111,6 +111,7 @@ class Netconf(object):
try:
self.device = Device(host, **kwargs)
self.device.open()
self.device.timeout = params['timeout']
except ConnectError:
exc = get_exception()
self.raise_exc('unable to connect to %s: %s' % (host, str(exc)))
@ -183,8 +184,8 @@ class Netconf(object):
merge = False
overwrite = False
elif overwrite:
merge = True
overwrite = False
merge = False
overwrite = True
else:
merge = True
overwrite = False

View file

@ -28,6 +28,7 @@
import os
import hmac
import re
try:
import urlparse
@ -41,23 +42,26 @@ except ImportError:
HASHED_KEY_MAGIC = "|1|"
def add_git_host_key(module, url, accept_hostkey=True, create_dir=True):
""" idempotently add a git url hostkey """
if is_ssh_url(url):
fqdn = get_fqdn(url)
fqdn, port = get_fqdn_and_port(url)
if fqdn:
known_host = check_hostkey(module, fqdn)
if not known_host:
if accept_hostkey:
rc, out, err = add_host_key(module, fqdn, create_dir=create_dir)
rc, out, err = add_host_key(module, fqdn, port=port, create_dir=create_dir)
if rc != 0:
module.fail_json(msg="failed to add %s hostkey: %s" % (fqdn, out + err))
else:
module.fail_json(msg="%s has an unknown hostkey. Set accept_hostkey to True or manually add the hostkey prior to running the git module" % fqdn)
module.fail_json(msg="%s has an unknown hostkey. Set accept_hostkey to True "
"or manually add the hostkey prior to running the git module" % fqdn)
def is_ssh_url(url):
@ -70,45 +74,51 @@ def is_ssh_url(url):
return True
return False
def get_fqdn(repo_url):
""" chop the hostname out of a url """
def get_fqdn_and_port(repo_url):
result = None
""" chop the hostname and port out of a url """
fqdn = None
port = None
ipv6_re = re.compile('(\[[^]]*\])(?::([0-9]+))?')
if "@" in repo_url and "://" not in repo_url:
# most likely an user@host:path or user@host/path type URL
repo_url = repo_url.split("@", 1)[1]
if repo_url.startswith('['):
result = repo_url.split(']', 1)[0] + ']'
match = ipv6_re.match(repo_url)
# For this type of URL, colon specifies the path, not the port
if match:
fqdn, path = match.groups()
elif ":" in repo_url:
result = repo_url.split(":")[0]
fqdn = repo_url.split(":")[0]
elif "/" in repo_url:
result = repo_url.split("/")[0]
fqdn = repo_url.split("/")[0]
elif "://" in repo_url:
# this should be something we can parse with urlparse
parts = urlparse.urlparse(repo_url)
# parts[1] will be empty on python2.4 on ssh:// or git:// urls, so
# ensure we actually have a parts[1] before continuing.
if parts[1] != '':
result = parts[1]
if "@" in result:
result = result.split("@", 1)[1]
fqdn = parts[1]
if "@" in fqdn:
fqdn = fqdn.split("@", 1)[1]
match = ipv6_re.match(fqdn)
if match:
fqdn, port = match.groups()
elif ":" in fqdn:
fqdn, port = fqdn.split(":")[0:2]
return fqdn, port
if result[0].startswith('['):
result = result.split(']', 1)[0] + ']'
elif ":" in result:
result = result.split(":")[0]
return result
def check_hostkey(module, fqdn):
return not not_in_host_file(module, fqdn)
# this is a variant of code found in connection_plugins/paramiko.py and we should modify
# the paramiko code to import and use this.
def not_in_host_file(self, host):
if 'USER' in os.environ:
user_host_file = os.path.expandvars("~${USER}/.ssh/known_hosts")
else:
@ -159,7 +169,7 @@ def not_in_host_file(self, host):
return True
def add_host_key(module, fqdn, key_type="rsa", create_dir=False):
def add_host_key(module, fqdn, port=22, key_type="rsa", create_dir=False):
""" use ssh-keyscan to add the hostkey """
@ -184,10 +194,15 @@ def add_host_key(module, fqdn, key_type="rsa", create_dir=False):
elif not os.path.isdir(user_ssh_dir):
module.fail_json(msg="%s is not a directory" % user_ssh_dir)
this_cmd = "%s -t %s %s" % (keyscan_cmd, key_type, fqdn)
if port:
this_cmd = "%s -t %s -p %s %s" % (keyscan_cmd, key_type, port, fqdn)
else:
this_cmd = "%s -t %s %s" % (keyscan_cmd, key_type, fqdn)
rc, out, err = module.run_command(this_cmd)
# ssh-keyscan gives a 0 exit code and prints nothins on timeout
if rc != 0 or not out:
module.fail_json(msg='failed to get the hostkey for %s' % fqdn)
module.append_to_file(user_host_file, out)
return rc, out, err

View file

@ -121,6 +121,8 @@ class LXDClient(object):
if resp_type == 'error':
if ok_error_codes is not None and resp_json['error_code'] in ok_error_codes:
return resp_json
if resp_json['error'] == "Certificate already in trust store":
return resp_json
self._raise_err_from_json(resp_json)
return resp_json
except socket.error as e:

View file

@ -32,6 +32,7 @@ from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback, get_exception
from ansible.module_utils.netcli import Cli, Command
from ansible.module_utils.netcfg import Config
from ansible.module_utils._text import to_native
NET_TRANSPORT_ARGS = dict(
host=dict(required=True),
@ -105,7 +106,7 @@ class NetworkModule(AnsibleModule):
self.fail_json(msg='Unknown transport or no default transport specified')
except (TypeError, NetworkError):
exc = get_exception()
self.fail_json(msg=exc.message)
self.fail_json(msg=to_native(exc))
if connect_on_load:
self.connect()
@ -147,17 +148,20 @@ class NetworkModule(AnsibleModule):
self.connection.connect(self.params)
if self.params['authorize']:
self.connection.authorize(self.params)
self.log('connected to %s:%s using %s' % (self.params['host'],
self.params['port'], self.params['transport']))
except NetworkError:
exc = get_exception()
self.fail_json(msg=exc.message)
self.fail_json(msg=to_native(exc))
def disconnect(self):
try:
if self.connected:
self.connection.disconnect()
self.log('disconnected from %s' % self.params['host'])
except NetworkError:
exc = get_exception()
self.fail_json(msg=exc.message)
self.fail_json(msg=to_native(exc))
def register_transport(transport, default=False):
def register(cls):

View file

@ -28,6 +28,7 @@
import os
from ansible.module_utils.six import iteritems
def openstack_argument_spec():
# DEPRECATED: This argument spec is only used for the deprecated old
@ -61,7 +62,7 @@ def openstack_argument_spec():
def openstack_find_nova_addresses(addresses, ext_tag, key_name=None):
ret = []
for (k, v) in addresses.iteritems():
for (k, v) in iteritems(addresses):
if key_name and k == key_name:
ret.extend([addrs['addr'] for addrs in v])
else:

View file

@ -27,4 +27,24 @@
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# This file is a placeholder for common code for the future split 'service' modules.
import os
import glob
def sysv_is_enabled(name):
return bool(glob.glob('/etc/rc?.d/S??%s' % name))
def get_sysv_script(name):
if name.startswith('/'):
result = name
else:
result = '/etc/init.d/%s' % name
return result
def sysv_exists(name):
return os.path.exists(get_sysv_script(name))
def fail_if_missing(module, found, service, msg=''):
if not found:
module.fail_json(msg='Could not find the requested service %s: %s' % (service, msg))

View file

@ -21,11 +21,6 @@ import re
import socket
import time
# py2 vs py3; replace with six via ansiballz
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
try:
import paramiko
@ -36,6 +31,8 @@ except ImportError:
from ansible.module_utils.basic import get_exception
from ansible.module_utils.network import NetworkError
from ansible.module_utils.six.moves import StringIO
from ansible.module_utils._text import to_native
ANSI_RE = [
re.compile(r'(\x1b\[\?1h\x1b=)'),
@ -55,7 +52,6 @@ class ShellError(Exception):
def __init__(self, msg, command=None):
super(ShellError, self).__init__(msg)
self.message = msg
self.command = command
@ -106,6 +102,8 @@ class Shell(object):
raise ShellError("unable to resolve host name")
except AuthenticationException:
raise ShellError('Unable to authenticate to remote device')
except socket.timeout:
raise ShellError("timeout trying to connect to remote device")
except socket.error:
exc = get_exception()
if exc.errno == 60:
@ -157,7 +155,7 @@ class Shell(object):
raise ShellError("timeout trying to send command: %s" % cmd)
except socket.error:
exc = get_exception()
raise ShellError("problem sending command to host: %s" % exc.message)
raise ShellError("problem sending command to host: %s" % to_native(exc))
return responses
def close(self):
@ -230,7 +228,7 @@ class CliBase(object):
except ShellError:
exc = get_exception()
raise NetworkError(
msg='failed to connect to %s:%s' % (host, port), exc=str(exc)
msg='failed to connect to %s:%s' % (host, port), exc=to_native(exc)
)
self._connected = True
@ -249,7 +247,7 @@ class CliBase(object):
return self.shell.send(commands)
except ShellError:
exc = get_exception()
raise NetworkError(exc.message, commands=commands)
raise NetworkError(to_native(exc), commands=commands)
def run_commands(self, commands):
return self.execute(to_list(commands))

View file

@ -38,6 +38,7 @@ if PY3:
class_types = type,
text_type = str
binary_type = bytes
cmp = lambda a, b: (a > b) - (a < b)
MAXSIZE = sys.maxsize
else:
@ -46,6 +47,7 @@ else:
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
cmp = cmp
if sys.platform.startswith("java"):
# Jython always uses 32 bits.

View file

@ -105,8 +105,6 @@ import platform
import tempfile
import base64
from ansible.module_utils.basic import get_distribution, get_exception
try:
import httplib
except ImportError:
@ -115,7 +113,9 @@ except ImportError:
import ansible.module_utils.six.moves.urllib.request as urllib_request
import ansible.module_utils.six.moves.urllib.error as urllib_error
from ansible.module_utils.basic import get_distribution, get_exception
from ansible.module_utils.six import b
from ansible.module_utils._text import to_bytes, to_text
try:
# python3
@ -182,6 +182,8 @@ if not HAS_SSLCONTEXT and HAS_SSL:
del libssl
LOADED_VERIFY_LOCATIONS = set()
HAS_MATCH_HOSTNAME = True
try:
from ssl import match_hostname, CertificateError
@ -307,7 +309,7 @@ if not HAS_MATCH_HOSTNAME:
# ca cert, regardless of validity, for Python on Mac OS to use the
# keychain functionality in OpenSSL for validating SSL certificates.
# See: http://mercurial.selenic.com/wiki/CACertificates#Mac_OS_X_10.6_and_higher
DUMMY_CA_CERT = """-----BEGIN CERTIFICATE-----
b_DUMMY_CA_CERT = b("""-----BEGIN CERTIFICATE-----
MIICvDCCAiWgAwIBAgIJAO8E12S7/qEpMA0GCSqGSIb3DQEBBQUAMEkxCzAJBgNV
BAYTAlVTMRcwFQYDVQQIEw5Ob3J0aCBDYXJvbGluYTEPMA0GA1UEBxMGRHVyaGFt
MRAwDgYDVQQKEwdBbnNpYmxlMB4XDTE0MDMxODIyMDAyMloXDTI0MDMxNTIyMDAy
@ -324,7 +326,7 @@ MUB80IR6knq9K/tY+hvPsZer6eFMzO3JGkRFBh2kn6JdMDnhYGX7AXVHGflrwNQH
qFy+aenWXsC0ZvrikFxbQnX8GVtDADtVznxOi7XzFw7JOxdsVrpXgSN0eh0aMzvV
zKPZsZ2miVGclicJHzm5q080b1p/sZtuKIEZk6vZqEg=
-----END CERTIFICATE-----
"""
""")
#
# Exceptions
@ -510,9 +512,15 @@ def RedirectHandlerFactory(follow_redirects=None, validate_certs=True):
newheaders = dict((k,v) for k,v in req.headers.items()
if k.lower() not in ("content-length", "content-type")
)
try:
# Python 2-3.3
origin_req_host = req.get_origin_req_host()
except AttributeError:
# Python 3.4+
origin_req_host = req.origin_req_host
return urllib_request.Request(newurl,
headers=newheaders,
origin_req_host=req.get_origin_req_host(),
origin_req_host=origin_req_host,
unverifiable=True)
else:
raise urllib_error.HTTPError(req.get_full_url(), code, msg, hdrs, fp)
@ -568,21 +576,21 @@ class SSLValidationHandler(urllib_request.BaseHandler):
ca_certs = []
paths_checked = []
system = platform.system()
system = to_text(platform.system(), errors='surrogate_or_strict')
# build a list of paths to check for .crt/.pem files
# based on the platform type
paths_checked.append('/etc/ssl/certs')
if system == 'Linux':
if system == u'Linux':
paths_checked.append('/etc/pki/ca-trust/extracted/pem')
paths_checked.append('/etc/pki/tls/certs')
paths_checked.append('/usr/share/ca-certificates/cacert.org')
elif system == 'FreeBSD':
elif system == u'FreeBSD':
paths_checked.append('/usr/local/share/certs')
elif system == 'OpenBSD':
elif system == u'OpenBSD':
paths_checked.append('/etc/ssl')
elif system == 'NetBSD':
elif system == u'NetBSD':
ca_certs.append('/etc/openssl/certs')
elif system == 'SunOS':
elif system == u'SunOS':
paths_checked.append('/opt/local/etc/openssl/certs')
# fall back to a user-deployed cert in a standard
@ -590,10 +598,12 @@ class SSLValidationHandler(urllib_request.BaseHandler):
paths_checked.append('/etc/ansible')
tmp_fd, tmp_path = tempfile.mkstemp()
to_add_fd, to_add_path = tempfile.mkstemp()
to_add = False
# Write the dummy ca cert if we are running on Mac OS X
if system == 'Darwin':
os.write(tmp_fd, DUMMY_CA_CERT)
if system == u'Darwin':
os.write(tmp_fd, b_DUMMY_CA_CERT)
# Default Homebrew path for OpenSSL certs
paths_checked.append('/usr/local/etc/openssl')
@ -608,13 +618,21 @@ class SSLValidationHandler(urllib_request.BaseHandler):
if os.path.isfile(full_path) and os.path.splitext(f)[1] in ('.crt','.pem'):
try:
cert_file = open(full_path, 'rb')
os.write(tmp_fd, cert_file.read())
os.write(tmp_fd, b('\n'))
cert = cert_file.read()
cert_file.close()
os.write(tmp_fd, cert)
os.write(tmp_fd, b('\n'))
if full_path not in LOADED_VERIFY_LOCATIONS:
to_add = True
os.write(to_add_fd, cert)
os.write(to_add_fd, b('\n'))
LOADED_VERIFY_LOCATIONS.add(full_path)
except (OSError, IOError):
pass
return (tmp_path, paths_checked)
if not to_add:
to_add_path = None
return (tmp_path, to_add_path, paths_checked)
def validate_proxy_response(self, response, valid_codes=[200]):
'''
@ -643,17 +661,18 @@ class SSLValidationHandler(urllib_request.BaseHandler):
return False
return True
def _make_context(self, tmp_ca_cert_path):
def _make_context(self, to_add_ca_cert_path):
context = create_default_context()
context.load_verify_locations(tmp_ca_cert_path)
if to_add_ca_cert_path:
context.load_verify_locations(to_add_ca_cert_path)
return context
def http_request(self, req):
tmp_ca_cert_path, paths_checked = self.get_ca_certs()
tmp_ca_cert_path, to_add_ca_cert_path, paths_checked = self.get_ca_certs()
https_proxy = os.environ.get('https_proxy')
context = None
if HAS_SSLCONTEXT:
context = self._make_context(tmp_ca_cert_path)
context = self._make_context(to_add_ca_cert_path)
# Detect if 'no_proxy' environment variable is set and if our URL is included
use_proxy = self.detect_no_proxy(req.get_full_url())
@ -672,9 +691,14 @@ class SSLValidationHandler(urllib_request.BaseHandler):
s.sendall(self.CONNECT_COMMAND % (self.hostname, self.port))
if proxy_parts.get('username'):
credentials = "%s:%s" % (proxy_parts.get('username',''), proxy_parts.get('password',''))
s.sendall('Proxy-Authorization: Basic %s\r\n' % credentials.encode('base64').strip())
s.sendall('\r\n')
connect_result = s.recv(4096)
s.sendall(b('Proxy-Authorization: Basic %s\r\n') % base64.b64encode(to_bytes(credentials, errors='surrogate_or_strict')).strip())
s.sendall(b('\r\n'))
connect_result = b("")
while connect_result.find(b("\r\n\r\n")) <= 0:
connect_result += s.recv(4096)
# 128 kilobytes of headers should be enough for everyone.
if len(connect_result) > 131072:
raise ProxyError('Proxy sent too verbose headers. Only 128KiB allowed.')
self.validate_proxy_response(connect_result)
if context:
ssl_s = context.wrap_socket(s, server_hostname=self.hostname)
@ -714,6 +738,14 @@ class SSLValidationHandler(urllib_request.BaseHandler):
except:
pass
try:
# cleanup the temp file created, don't worry
# if it fails for some reason
if to_add_ca_cert_path:
os.remove(to_add_ca_cert_path)
except:
pass
return req
https_request = http_request
@ -793,9 +825,11 @@ def open_url(url, data=None, headers=None, method=None, use_proxy=True,
# use this username/password combination for urls
# for which `theurl` is a super-url
authhandler = urllib_request.HTTPBasicAuthHandler(passman)
digest_authhandler = urllib_request.HTTPDigestAuthHandler(passman)
# create the AuthHandler
handlers.append(authhandler)
handlers.append(digest_authhandler)
elif username and force_basic_auth:
headers["Authorization"] = basic_auth_header(username, password)
@ -836,6 +870,7 @@ def open_url(url, data=None, headers=None, method=None, use_proxy=True,
opener = urllib_request.build_opener(*handlers)
urllib_request.install_opener(opener)
data = to_bytes(data, nonstring='passthru')
if method:
if method.upper() not in ('OPTIONS','GET','HEAD','POST','PUT','DELETE','TRACE','CONNECT','PATCH'):
raise ConnectionError('invalid HTTP request method; %s' % method.upper())
@ -848,13 +883,14 @@ def open_url(url, data=None, headers=None, method=None, use_proxy=True,
if http_agent:
request.add_header('User-agent', http_agent)
# if we're ok with getting a 304, set the timestamp in the
# header, otherwise make sure we don't get a cached copy
if last_mod_time and not force:
# Cache control
# Either we directly force a cache refresh
if force:
request.add_header('cache-control', 'no-cache')
# or we do it if the original is more recent than our copy
elif last_mod_time:
tstamp = last_mod_time.strftime('%a, %d %b %Y %H:%M:%S +0000')
request.add_header('If-Modified-Since', tstamp)
else:
request.add_header('cache-control', 'no-cache')
# user defined headers now, which may override things we've set above
if headers:
@ -878,7 +914,10 @@ def open_url(url, data=None, headers=None, method=None, use_proxy=True,
def basic_auth_header(username, password):
return "Basic %s" % base64.b64encode("%s:%s" % (username, password))
"""Takes a username and password and returns a byte string suitable for
using as value of an Authorization header to do basic auth.
"""
return b("Basic %s") % base64.b64encode(to_bytes("%s:%s" % (username, password), errors='surrogate_or_strict'))
def url_argument_spec():
@ -893,7 +932,7 @@ def url_argument_spec():
use_proxy=dict(default='yes', type='bool'),
validate_certs=dict(default='yes', type='bool'),
url_username=dict(required=False),
url_password=dict(required=False),
url_password=dict(required=False, no_log=True),
force_basic_auth=dict(required=False, type='bool', default='no'),
)

@ -1 +1 @@
Subproject commit 6c4d71a7fab60601846363506bc8eebe9c52c240
Subproject commit ebdd66c2b6cb15e2364b2ecc41a9da9c1d02d64f

@ -1 +1 @@
Subproject commit df35d324d62e6034ab86db0fb4a56d3ca122d4b2
Subproject commit 0cfb1c4c3492045d891cdaa2bbb9636ec683636f

View file

@ -31,7 +31,7 @@ from ansible.errors import AnsibleFileNotFound, AnsibleParserError, AnsibleError
from ansible.errors.yaml_strings import YAML_SYNTAX_ERROR
from ansible.module_utils.basic import is_executable
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.parsing.vault import VaultLib, is_encrypted, is_encrypted_file
from ansible.parsing.vault import VaultLib, b_HEADER, is_encrypted, is_encrypted_file
from ansible.parsing.quoting import unquote
from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleUnicode
@ -71,9 +71,9 @@ class DataLoader():
# initialize the vault stuff with an empty password
self.set_vault_password(None)
def set_vault_password(self, vault_password):
self._vault_password = vault_password
self._vault = VaultLib(password=vault_password)
def set_vault_password(self, b_vault_password):
self._b_vault_password = b_vault_password
self._vault = VaultLib(b_password=b_vault_password)
def load(self, data, file_name='<string>', show_content=True):
'''
@ -116,7 +116,9 @@ class DataLoader():
parsed_data = self._FILE_CACHE[file_name]
else:
# read the file contents and load the data structure from them
(file_data, show_content) = self._get_file_contents(file_name)
(b_file_data, show_content) = self._get_file_contents(file_name)
file_data = to_text(b_file_data, errors='surrogate_or_strict')
parsed_data = self.load(data=file_data, file_name=file_name, show_content=show_content)
# cache the file contents for next time
@ -149,7 +151,7 @@ class DataLoader():
def _safe_load(self, stream, file_name=None):
''' Implements yaml.safe_load(), except using our custom loader class. '''
loader = AnsibleLoader(stream, file_name, self._vault_password)
loader = AnsibleLoader(stream, file_name, self._b_vault_password)
try:
return loader.get_single_data()
finally:
@ -178,7 +180,6 @@ class DataLoader():
data = self._vault.decrypt(data, filename=b_file_name)
show_content = False
data = to_text(data, errors='surrogate_or_strict')
return (data, show_content)
except (IOError, OSError) as e:
@ -300,6 +301,7 @@ class DataLoader():
result = test_path
else:
search = []
display.debug(u'evaluation_path:\n\t%s' % '\n\t'.join(paths))
for path in paths:
upath = unfrackpath(path)
b_upath = to_bytes(upath, errors='surrogate_or_strict')
@ -313,15 +315,21 @@ class DataLoader():
search.append(os.path.join(os.path.dirname(b_mydir), b_dirname, b_source))
search.append(os.path.join(b_mydir, b_source))
else:
search.append(os.path.join(b_upath, b_dirname, b_source))
search.append(os.path.join(b_upath, b'tasks', b_source))
# don't add dirname if user already is using it in source
if b_source.split(b'/')[0] != b_dirname:
search.append(os.path.join(b_upath, b_dirname, b_source))
search.append(os.path.join(b_upath, b_source))
elif b_dirname not in b_source.split(b'/'):
# don't add dirname if user already is using it in source
search.append(os.path.join(b_upath, b_dirname, b_source))
if b_source.split(b'/')[0] != dirname:
search.append(os.path.join(b_upath, b_dirname, b_source))
search.append(os.path.join(b_upath, b_source))
# always append basedir as last resort
search.append(os.path.join(to_bytes(self.get_basedir()), b_dirname, b_source))
# don't add dirname if user already is using it in source
if b_source.split(b'/')[0] != dirname:
search.append(os.path.join(to_bytes(self.get_basedir()), b_dirname, b_source))
search.append(os.path.join(to_bytes(self.get_basedir()), b_source))
display.debug(u'search_path:\n\t%s' % to_text(b'\n\t'.join(search)))
@ -351,7 +359,7 @@ class DataLoader():
raise AnsibleError("Problem running vault password script %s (%s)."
" If this is not a script, remove the executable bit from the file." % (' '.join(this_path), to_native(e)))
stdout, stderr = p.communicate()
self.set_vault_password(stdout.strip('\r\n'))
self.set_vault_password(stdout.strip(b'\r\n'))
else:
try:
f = open(this_path, "rb")
@ -389,18 +397,21 @@ class DataLoader():
raise AnsibleFileNotFound("the file_name '%s' does not exist, or is not readable" % to_native(file_path))
if not self._vault:
self._vault = VaultLib(password="")
self._vault = VaultLib(b_password="")
real_path = self.path_dwim(file_path)
try:
with open(to_bytes(real_path), 'rb') as f:
if is_encrypted_file(f):
# Limit how much of the file is read since we do not know
# whether this is a vault file and therefore it could be very
# large.
if is_encrypted_file(f, count=len(b_HEADER)):
# if the file is encrypted and no password was specified,
# the decrypt call would throw an error, but we check first
# since the decrypt function doesn't know the file name
data = f.read()
if not self._vault_password:
if not self._b_vault_password:
raise AnsibleParserError("A vault password must be specified to decrypt %s" % file_path)
data = self._vault.decrypt(data, filename=real_path)

View file

@ -83,9 +83,9 @@ try:
except ImportError:
pass
except Exception as e:
display.warning("Optional dependency 'cryptography' raised an exception, falling back to 'Crypto'")
display.vvvv("Optional dependency 'cryptography' raised an exception, falling back to 'Crypto'.")
import traceback
display.debug("Traceback from import of cryptography was {0}".format(traceback.format_exc()))
display.vvvv("Traceback from import of cryptography was {0}".format(traceback.format_exc()))
HAS_ANY_PBKDF2HMAC = HAS_PBKDF2 or HAS_PBKDF2HMAC
@ -164,8 +164,8 @@ def is_encrypted_file(file_obj, start_pos=0, count=-1):
class VaultLib:
def __init__(self, password):
self.b_password = to_bytes(password, errors='strict', encoding='utf-8')
def __init__(self, b_password):
self.b_password = to_bytes(b_password, errors='strict', encoding='utf-8')
self.cipher_name = None
self.b_version = b'1.1'
@ -311,8 +311,8 @@ class VaultLib:
class VaultEditor:
def __init__(self, password):
self.vault = VaultLib(password)
def __init__(self, b_password):
self.vault = VaultLib(b_password)
# TODO: mv shred file stuff to it's own class
def _shred_file_custom(self, tmp_path):
@ -398,18 +398,18 @@ class VaultEditor:
self._shred_file(tmp_path)
raise
tmpdata = self.read_data(tmp_path)
b_tmpdata = self.read_data(tmp_path)
# Do nothing if the content has not changed
if existing_data == tmpdata and not force_save:
if existing_data == b_tmpdata and not force_save:
self._shred_file(tmp_path)
return
# encrypt new data and write out to tmp
# An existing vaultfile will always be UTF-8,
# so decode to unicode here
enc_data = self.vault.encrypt(tmpdata.decode())
self.write_data(enc_data, tmp_path)
b_ciphertext = self.vault.encrypt(b_tmpdata)
self.write_data(b_ciphertext, tmp_path)
# shuffle tmp file into place
self.shuffle_files(tmp_path, filename)
@ -420,9 +420,9 @@ class VaultEditor:
# A file to be encrypted into a vaultfile could be any encoding
# so treat the contents as a byte string.
plaintext = self.read_data(filename)
ciphertext = self.vault.encrypt(plaintext)
self.write_data(ciphertext, output_file or filename)
b_plaintext = self.read_data(filename)
b_ciphertext = self.vault.encrypt(b_plaintext)
self.write_data(b_ciphertext, output_file or filename)
def decrypt_file(self, filename, output_file=None):
@ -475,7 +475,7 @@ class VaultEditor:
return plaintext
def rekey_file(self, filename, new_password):
def rekey_file(self, filename, b_new_password):
check_prereqs()
@ -486,7 +486,11 @@ class VaultEditor:
except AnsibleError as e:
raise AnsibleError("%s for %s" % (to_bytes(e),to_bytes(filename)))
new_vault = VaultLib(new_password)
# This is more or less an assert, see #18247
if b_new_password is None:
raise AnsibleError('The value for the new_password to rekey %s with is not valid' % filename)
new_vault = VaultLib(b_new_password)
new_ciphertext = new_vault.encrypt(plaintext)
self.write_data(new_ciphertext, filename)

View file

@ -19,7 +19,7 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from yaml.constructor import Constructor, ConstructorError
from yaml.constructor import SafeConstructor, ConstructorError
from yaml.nodes import MappingNode
from ansible.module_utils._text import to_bytes
@ -35,13 +35,13 @@ except ImportError:
display = Display()
class AnsibleConstructor(Constructor):
def __init__(self, file_name=None, vault_password=None):
self._vault_password = vault_password
class AnsibleConstructor(SafeConstructor):
def __init__(self, file_name=None, b_vault_password=None):
self._b_vault_password = b_vault_password
self._ansible_file_name = file_name
super(AnsibleConstructor, self).__init__()
self._vaults = {}
self._vaults['default'] = VaultLib(password=self._vault_password)
self._vaults['default'] = VaultLib(b_password=self._b_vault_password)
def construct_yaml_map(self, node):
data = AnsibleMapping()
@ -98,7 +98,7 @@ class AnsibleConstructor(Constructor):
value = self.construct_scalar(node)
ciphertext_data = to_bytes(value)
if self._vault_password is None:
if self._b_vault_password is None:
raise ConstructorError(None, None,
"found vault but no vault password provided", node.start_mark)

View file

@ -25,6 +25,7 @@ from ansible.compat.six import PY3
from ansible.parsing.yaml.objects import AnsibleUnicode, AnsibleSequence, AnsibleMapping
from ansible.parsing.yaml.objects import AnsibleVaultEncryptedUnicode
from ansible.vars.hostvars import HostVars
from ansible.vars.unsafe_proxy import AnsibleUnsafeText
class AnsibleDumper(yaml.SafeDumper):
@ -51,6 +52,11 @@ AnsibleDumper.add_representer(
represent_unicode,
)
AnsibleDumper.add_representer(
AnsibleUnsafeText,
represent_unicode,
)
AnsibleDumper.add_representer(
HostVars,
represent_hostvars,

View file

@ -34,7 +34,7 @@ if HAVE_PYYAML_C:
class AnsibleLoader(CParser, AnsibleConstructor, Resolver):
def __init__(self, stream, file_name=None, vault_password=None):
CParser.__init__(self, stream)
AnsibleConstructor.__init__(self, file_name=file_name, vault_password=vault_password)
AnsibleConstructor.__init__(self, file_name=file_name, b_vault_password=vault_password)
Resolver.__init__(self)
else:
from yaml.composer import Composer
@ -48,5 +48,5 @@ else:
Scanner.__init__(self)
Parser.__init__(self)
Composer.__init__(self)
AnsibleConstructor.__init__(self, file_name=file_name, vault_password=vault_password)
AnsibleConstructor.__init__(self, file_name=file_name, b_vault_password=vault_password)
Resolver.__init__(self)

View file

@ -132,3 +132,6 @@ class AnsibleVaultEncryptedUnicode(yaml.YAMLObject, AnsibleUnicode):
def __unicode__(self):
return unicode(self.data)
def encode(self, encoding=None, errors=None):
return self.data.encode(encoding, errors)

View file

@ -21,11 +21,12 @@ __metaclass__ = type
import os
from ansible import constants as C
from ansible.errors import AnsibleParserError
from ansible.module_utils._text import to_text
from ansible.playbook.play import Play
from ansible.playbook.playbook_include import PlaybookInclude
from ansible.plugins import get_all_plugin_loaders
from ansible import constants as C
try:
from __main__ import display
@ -43,7 +44,7 @@ class Playbook:
# Entries in the datastructure of a playbook may
# be either a play or an include statement
self._entries = []
self._basedir = os.getcwd()
self._basedir = to_text(os.getcwd(), errors='surrogate_or_strict')
self._loader = loader
self._file_name = None

View file

@ -111,7 +111,7 @@ class BaseMeta(type):
method = "_get_attr_%s" % attr_name
if method in src_dict or method in dst_dict:
getter = partial(_generic_g_method, attr_name)
elif '_get_parent_attribute' in dst_dict and value.inherit:
elif ('_get_parent_attribute' in dst_dict or '_get_parent_attribute' in src_dict) and value.inherit:
getter = partial(_generic_g_parent, attr_name)
else:
getter = partial(_generic_g, attr_name)
@ -131,7 +131,9 @@ class BaseMeta(type):
for parent in parents:
if hasattr(parent, '__dict__'):
_create_attrs(parent.__dict__, dst_dict)
_process_parents(parent.__bases__, dst_dict)
new_dst_dict = parent.__dict__.copy()
new_dst_dict.update(dst_dict)
_process_parents(parent.__bases__, new_dst_dict)
# create some additional class attributes
dct['_attributes'] = dict()
@ -480,7 +482,7 @@ class Base(with_metaclass(BaseMeta, object)):
except TypeError as e:
raise AnsibleParserError("Invalid variable name in vars specified for %s: %s" % (self.__class__.__name__, e), obj=ds)
def _extend_value(self, value, new_value):
def _extend_value(self, value, new_value, prepend=False):
'''
Will extend the value given with new_value (and will turn both
into lists if they are not so already). The values are run through
@ -492,7 +494,12 @@ class Base(with_metaclass(BaseMeta, object)):
if not isinstance(new_value, list):
new_value = [ new_value ]
return [i for i,_ in itertools.groupby(value + new_value) if i is not None]
if prepend:
combined = new_value + value
else:
combined = value + new_value
return [i for i,_ in itertools.groupby(combined) if i is not None]
def serialize(self):
'''

View file

@ -49,6 +49,9 @@ class Block(Base, Become, Conditional, Taggable):
self._use_handlers = use_handlers
self._implicit = implicit
# end of role flag
self._eor = False
if task_include:
self._parent = task_include
elif parent_block:
@ -56,6 +59,9 @@ class Block(Base, Become, Conditional, Taggable):
super(Block, self).__init__()
def __repr__(self):
return "BLOCK(uuid=%s)(id=%s)(parent=%s)" % (self._uuid, id(self), self._parent)
def get_vars(self):
'''
Blocks do not store variables directly, however they may be a member
@ -175,6 +181,7 @@ class Block(Base, Become, Conditional, Taggable):
new_me = super(Block, self).copy()
new_me._play = self._play
new_me._use_handlers = self._use_handlers
new_me._eor = self._eor
if self._dep_chain is not None:
new_me._dep_chain = self._dep_chain[:]
@ -207,6 +214,7 @@ class Block(Base, Become, Conditional, Taggable):
data[attr] = getattr(self, attr)
data['dep_chain'] = self.get_dep_chain()
data['eor'] = self._eor
if self._role is not None:
data['role'] = self._role.serialize()
@ -234,6 +242,7 @@ class Block(Base, Become, Conditional, Taggable):
setattr(self, attr, data.get(attr))
self._dep_chain = data.get('dep_chain', None)
self._eor = data.get('eor', False)
# if there was a serialized role, unpack it too
role_data = data.get('role')
@ -255,17 +264,6 @@ class Block(Base, Become, Conditional, Taggable):
self._parent = p
self._dep_chain = self._parent.get_dep_chain()
def evaluate_conditional(self, templar, all_vars):
dep_chain = self.get_dep_chain()
if dep_chain:
for dep in dep_chain:
if not dep.evaluate_conditional(templar, all_vars):
return False
if self._parent is not None:
if not self._parent.evaluate_conditional(templar, all_vars):
return False
return super(Block, self).evaluate_conditional(templar, all_vars)
def set_loader(self, loader):
self._loader = loader
if self._parent:
@ -279,9 +277,9 @@ class Block(Base, Become, Conditional, Taggable):
dep.set_loader(loader)
def _get_attr_environment(self):
return self._get_parent_attribute('environment', extend=True)
return self._get_parent_attribute('environment', extend=True, prepend=True)
def _get_parent_attribute(self, attr, extend=False):
def _get_parent_attribute(self, attr, extend=False, prepend=False):
'''
Generic logic to get the attribute or parent attribute for a block value.
'''
@ -294,7 +292,7 @@ class Block(Base, Become, Conditional, Taggable):
try:
parent_value = getattr(self._parent, attr, None)
if extend:
value = self._extend_value(value, parent_value)
value = self._extend_value(value, parent_value, prepend)
else:
value = parent_value
except AttributeError:
@ -303,7 +301,7 @@ class Block(Base, Become, Conditional, Taggable):
try:
parent_value = getattr(self._role, attr, None)
if extend:
value = self._extend_value(value, parent_value)
value = self._extend_value(value, parent_value, prepend)
else:
value = parent_value
@ -313,7 +311,7 @@ class Block(Base, Become, Conditional, Taggable):
for dep in dep_chain:
dep_value = getattr(dep, attr, None)
if extend:
value = self._extend_value(value, dep_value)
value = self._extend_value(value, dep_value, prepend)
else:
value = dep_value
@ -325,7 +323,7 @@ class Block(Base, Become, Conditional, Taggable):
try:
parent_value = getattr(self._play, attr, None)
if extend:
value = self._extend_value(value, parent_value)
value = self._extend_value(value, parent_value, prepend)
else:
value = parent_value
except AttributeError:

View file

@ -19,14 +19,23 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import ast
import re
from jinja2.compiler import generate
from jinja2.exceptions import UndefinedError
from ansible.compat.six import text_type
from ansible.errors import AnsibleError, AnsibleUndefinedVariable
from ansible.playbook.attribute import FieldAttribute
from ansible.template import Templar
from ansible.template.safe_eval import safe_eval
from ansible.module_utils._text import to_native
LOOKUP_REGEX = re.compile(r'lookup\s*\(')
VALID_VAR_REGEX = re.compile("^[_A-Za-z][_a-zA-Z0-9]*$")
DEFINED_REGEX = re.compile(r'(hostvars\[.+\]|[\w_]+)\s+(not\s+is|is|is\s+not)\s+(defined|undefined)')
class Conditional:
'''
@ -51,6 +60,29 @@ class Conditional:
if not isinstance(value, list):
setattr(self, name, [ value ])
def _get_attr_when(self):
'''
Override for the 'tags' getattr fetcher, used from Base.
'''
when = self._attributes['when']
if when is None:
when = []
if hasattr(self, '_get_parent_attribute'):
when = self._get_parent_attribute('when', extend=True, prepend=True)
return when
def extract_defined_undefined(self, conditional):
results = []
cond = conditional
m = DEFINED_REGEX.search(cond)
while m:
results.append(m.groups())
cond = cond[m.end():]
m = DEFINED_REGEX.search(cond)
return results
def evaluate_conditional(self, templar, all_vars):
'''
Loops through the conditionals set on this object, returning
@ -73,7 +105,9 @@ class Conditional:
if not self._check_conditional(conditional, templar, all_vars):
return False
except Exception as e:
raise AnsibleError("The conditional check '%s' failed. The error was: %s" % (to_native(conditional), to_native(e)), obj=ds)
raise AnsibleError(
"The conditional check '%s' failed. The error was: %s" % (to_native(conditional), to_native(e)), obj=ds
)
return True
@ -88,21 +122,75 @@ class Conditional:
if conditional is None or conditional == '':
return True
if conditional in all_vars and '-' not in text_type(all_vars[conditional]):
# pull the "bare" var out, which allows for nested conditionals
# and things like:
# - assert:
# that:
# - item
# with_items:
# - 1 == 1
if conditional in all_vars and VALID_VAR_REGEX.match(conditional):
conditional = all_vars[conditional]
# make sure the templar is using the variables specified with this method
templar.set_available_variables(variables=all_vars)
try:
conditional = templar.template(conditional)
# if the conditional is "unsafe", disable lookups
disable_lookups = hasattr(conditional, '__UNSAFE__')
conditional = templar.template(conditional, disable_lookups=disable_lookups)
if not isinstance(conditional, text_type) or conditional == "":
return conditional
# a Jinja2 evaluation that results in something Python can eval!
# update the lookups flag, as the string returned above may now be unsafe
# and we don't want future templating calls to do unsafe things
disable_lookups |= hasattr(conditional, '__UNSAFE__')
# First, we do some low-level jinja2 parsing involving the AST format of the
# statement to ensure we don't do anything unsafe (using the disable_lookup flag above)
class CleansingNodeVisitor(ast.NodeVisitor):
def generic_visit(self, node, inside_call=False, inside_yield=False):
if isinstance(node, ast.Call):
inside_call = True
elif isinstance(node, ast.Yield):
inside_yield = True
elif isinstance(node, ast.Str):
if disable_lookups:
if inside_call and node.s.startswith("__"):
# calling things with a dunder is generally bad at this point...
raise AnsibleError(
"Invalid access found in the conditional: '%s'" % conditional
)
elif inside_yield:
# we're inside a yield, so recursively parse and traverse the AST
# of the result to catch forbidden syntax from executing
parsed = ast.parse(node.s, mode='exec')
cnv = CleansingNodeVisitor()
cnv.visit(parsed)
# iterate over all child nodes
for child_node in ast.iter_child_nodes(node):
self.generic_visit(
child_node,
inside_call=inside_call,
inside_yield=inside_yield
)
try:
e = templar.environment.overlay()
e.filters.update(templar._get_filters())
e.tests.update(templar._get_tests())
res = e._parse(conditional, None, None)
res = generate(res, e, None, None)
parsed = ast.parse(res, mode='exec')
cnv = CleansingNodeVisitor()
cnv.visit(parsed)
except Exception as e:
raise AnsibleError("Invalid conditional detected: %s" % to_native(e))
# and finally we generate and template the presented string and look at the resulting string
presented = "{%% if %s %%} True {%% else %%} False {%% endif %%}" % conditional
conditional = templar.template(presented)
val = conditional.strip()
val = templar.template(presented, disable_lookups=disable_lookups).strip()
if val == "True":
return True
elif val == "False":
@ -110,14 +198,33 @@ class Conditional:
else:
raise AnsibleError("unable to evaluate conditional: %s" % original)
except (AnsibleUndefinedVariable, UndefinedError) as e:
# the templating failed, meaning most likely a
# variable was undefined. If we happened to be
# looking for an undefined variable, return True,
# otherwise fail
if "is undefined" in original:
return True
elif "is defined" in original:
return False
else:
raise AnsibleError("error while evaluating conditional (%s): %s" % (original, e))
# the templating failed, meaning most likely a variable was undefined. If we happened
# to be looking for an undefined variable, return True, otherwise fail
try:
# first we extract the variable name from the error message
var_name = re.compile(r"'(hostvars\[.+\]|[\w_]+)' is undefined").search(str(e)).groups()[0]
# next we extract all defined/undefined tests from the conditional string
def_undef = self.extract_defined_undefined(conditional)
# then we loop through these, comparing the error variable name against
# each def/undef test we found above. If there is a match, we determine
# whether the logic/state mean the variable should exist or not and return
# the corresponding True/False
for (du_var, logic, state) in def_undef:
# when we compare the var names, normalize quotes because something
# like hostvars['foo'] may be tested against hostvars["foo"]
if var_name.replace("'", '"') == du_var.replace("'", '"'):
# the should exist is a xor test between a negation in the logic portion
# against the state (defined or undefined)
should_exist = ('not' in logic) != (state == 'defined')
if should_exist:
return False
else:
return True
# as nothing above matched the failed var name, re-raise here to
# trigger the AnsibleUndefinedVariable exception again below
raise
except Exception as new_e:
raise AnsibleUndefinedVariable(
"error while evaluating conditional (%s): %s" % (original, e)
)

View file

@ -40,6 +40,8 @@ def load_list_of_blocks(ds, play, parent_block=None, role=None, task_include=Non
# we import here to prevent a circular dependency with imports
from ansible.playbook.block import Block
from ansible.playbook.task_include import TaskInclude
from ansible.playbook.role_include import IncludeRole
assert isinstance(ds, (list, type(None)))
@ -54,14 +56,17 @@ def load_list_of_blocks(ds, play, parent_block=None, role=None, task_include=Non
task_include=task_include,
use_handlers=use_handlers,
variable_manager=variable_manager,
loader=loader
loader=loader,
)
# Implicit blocks are created by bare tasks listed in a play without
# an explicit block statement. If we have two implicit blocks in a row,
# squash them down to a single block to save processing time later.
if b._implicit and len(block_list) > 0 and block_list[-1]._implicit:
for t in b.block:
t._block = block_list[-1]
if isinstance(t._parent, (TaskInclude, IncludeRole)):
t._parent._parent = block_list[-1]
else:
t._parent = block_list[-1]
block_list[-1].block.extend(b.block)
else:
block_list.append(b)
@ -197,7 +202,7 @@ def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_h
# the same fashion used by the on_include callback. We also do it here,
# because the recursive nature of helper methods means we may be loading
# nested includes, and we want the include order printed correctly
display.display("statically included: %s" % include_file, color=C.COLOR_SKIP)
display.vv("statically included: %s" % include_file)
except AnsibleFileNotFound:
if t.static or \
C.DEFAULT_TASK_INCLUDES_STATIC or \
@ -214,11 +219,13 @@ def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_h
task_list.append(t)
continue
ti_copy = t.copy(exclude_parent=True)
ti_copy._parent = block
included_blocks = load_list_of_blocks(
data,
play=play,
parent_block=None,
task_include=t.copy(),
task_include=ti_copy,
role=role,
use_handlers=use_handlers,
loader=loader,
@ -228,12 +235,12 @@ def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_h
# pop tags out of the include args, if they were specified there, and assign
# them to the include. If the include already had tags specified, we raise an
# error so that users know not to specify them both ways
tags = t.vars.pop('tags', [])
tags = ti_copy.vars.pop('tags', [])
if isinstance(tags, string_types):
tags = tags.split(',')
if len(tags) > 0:
if len(t.tags) > 0:
if len(ti_copy.tags) > 0:
raise AnsibleParserError(
"Include tasks should not specify tags in more than one way (both via args and directly on the task). " \
"Mixing styles in which tags are specified is prohibited for whole import hierarchy, not only for single import statement",
@ -242,7 +249,7 @@ def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_h
)
display.deprecated("You should not specify tags in the include parameters. All tags should be specified using the task-level option")
else:
tags = t.tags[:]
tags = ti_copy.tags[:]
# now we extend the tags on each of the included blocks
for b in included_blocks:
@ -289,6 +296,7 @@ def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_h
(use_handlers and C.DEFAULT_HANDLER_INCLUDES_STATIC) or \
(not needs_templating and ir.all_parents_static() and not ir.loop)
display.debug('Determined that if include_role static is %s' % str(is_static))
if is_static:
# uses compiled list from object
t = task_list.extend(ir.get_block_list(variable_manager=variable_manager, loader=loader))

View file

@ -95,7 +95,9 @@ class Play(Base, Taggable, Become):
def __init__(self):
super(Play, self).__init__()
self._included_conditional = None
self._included_path = None
self._removed_hosts = []
self.ROLE_CACHE = {}
def __repr__(self):
@ -202,7 +204,7 @@ class Play(Base, Taggable, Become):
for prompt_data in new_ds:
if 'name' not in prompt_data:
display.deprecated("Using the 'short form' for vars_prompt has been deprecated")
for vname, prompt in prompt_data.iteritems():
for vname, prompt in prompt_data.items():
vars_prompts.append(dict(
name = vname,
prompt = prompt,
@ -327,5 +329,6 @@ class Play(Base, Taggable, Become):
def copy(self):
new_me = super(Play, self).copy()
new_me.ROLE_CACHE = self.ROLE_CACHE.copy()
new_me._included_conditional = self._included_conditional
new_me._included_path = self._included_path
return new_me

View file

@ -31,6 +31,7 @@ import string
from ansible.compat.six import iteritems, string_types
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.module_utils._text import to_bytes
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
from ansible.utils.boolean import boolean
@ -84,38 +85,39 @@ MAGIC_VARIABLE_MAPPING = dict(
module_compression = ('ansible_module_compression',),
)
SU_PROMPT_LOCALIZATIONS = [
'Password',
'암호',
'パスワード',
'Adgangskode',
'Contraseña',
'Contrasenya',
'Hasło',
'Heslo',
'Jelszó',
'Lösenord',
'Mật khẩu',
'Mot de passe',
'Parola',
'Parool',
'Pasahitza',
'Passord',
'Passwort',
'Salasana',
'Sandi',
'Senha',
'Wachtwoord',
'ססמה',
'Лозинка',
'Парола',
'Пароль',
'गुप्तशब्द',
'शब्दकूट',
'సంకేతపదము',
'හස්පදය',
'密码',
'密碼',
b_SU_PROMPT_LOCALIZATIONS = [
to_bytes('Password'),
to_bytes('암호'),
to_bytes('パスワード'),
to_bytes('Adgangskode'),
to_bytes('Contraseña'),
to_bytes('Contrasenya'),
to_bytes('Hasło'),
to_bytes('Heslo'),
to_bytes('Jelszó'),
to_bytes('Lösenord'),
to_bytes('Mật khẩu'),
to_bytes('Mot de passe'),
to_bytes('Parola'),
to_bytes('Parool'),
to_bytes('Pasahitza'),
to_bytes('Passord'),
to_bytes('Passwort'),
to_bytes('Salasana'),
to_bytes('Sandi'),
to_bytes('Senha'),
to_bytes('Wachtwoord'),
to_bytes('ססמה'),
to_bytes('Лозинка'),
to_bytes('Парола'),
to_bytes('Пароль'),
to_bytes('गुप्तशब्द'),
to_bytes('शब्दकूट'),
to_bytes('సంకేతపదము'),
to_bytes('හස්පදය'),
to_bytes('密码'),
to_bytes('密碼'),
to_bytes('口令'),
]
TASK_ATTRIBUTE_OVERRIDES = (
@ -515,21 +517,24 @@ class PlayContext(Base):
elif self.become_method == 'su':
# passing code ref to examine prompt as simple string comparisson isn't good enough with su
def detect_su_prompt(data):
SU_PROMPT_LOCALIZATIONS_RE = re.compile("|".join(['(\w+\'s )?' + x + ' ?: ?' for x in SU_PROMPT_LOCALIZATIONS]), flags=re.IGNORECASE)
return bool(SU_PROMPT_LOCALIZATIONS_RE.match(data))
def detect_su_prompt(b_data):
b_password_string = b"|".join([b'(\w+\'s )?' + x for x in b_SU_PROMPT_LOCALIZATIONS])
# Colon or unicode fullwidth colon
b_password_string = b_password_string + to_bytes(u' ?(:|) ?')
b_SU_PROMPT_LOCALIZATIONS_RE = re.compile(b_password_string, flags=re.IGNORECASE)
return bool(b_SU_PROMPT_LOCALIZATIONS_RE.match(b_data))
prompt = detect_su_prompt
becomecmd = '%s %s %s -c %s' % (exe, flags, self.become_user, pipes.quote(command))
elif self.become_method == 'pbrun':
prompt='assword:'
becomecmd = '%s -b %s -u %s %s' % (exe, flags, self.become_user, success_cmd)
prompt='Password:'
becomecmd = '%s %s -u %s %s' % (exe, flags, self.become_user, success_cmd)
elif self.become_method == 'ksu':
def detect_ksu_prompt(data):
return re.match("Kerberos password for .*@.*:", data)
def detect_ksu_prompt(b_data):
return re.match(b"Kerberos password for .*@.*:", b_data)
prompt = detect_ksu_prompt
becomecmd = '%s %s %s -e %s' % (exe, self.become_user, flags, command)

View file

@ -49,6 +49,7 @@ class PlaybookInclude(Base, Conditional, Taggable):
# import here to avoid a dependency loop
from ansible.playbook import Playbook
from ansible.playbook.play import Play
# first, we use the original parent method to correctly load the object
# via the load_data/preprocess_data system we normally use for other
@ -61,15 +62,6 @@ class PlaybookInclude(Base, Conditional, Taggable):
templar = Templar(loader=loader, variables=all_vars)
try:
forward_conditional = False
if not new_obj.evaluate_conditional(templar=templar, all_vars=all_vars):
return None
except AnsibleError:
# conditional evaluation raised an error, so we set a flag to indicate
# we need to forward the conditionals on to the included play(s)
forward_conditional = True
# then we use the object to load a Playbook
pb = Playbook(loader=loader)
@ -82,6 +74,11 @@ class PlaybookInclude(Base, Conditional, Taggable):
# finally, update each loaded playbook entry with any variables specified
# on the included playbook and/or any tags which may have been set
for entry in pb._entries:
# conditional includes on a playbook need a marker to skip gathering
if new_obj.when and isinstance(entry, Play):
entry._included_conditional = new_obj.when[:]
temp_vars = entry.vars.copy()
temp_vars.update(new_obj.vars)
param_tags = temp_vars.pop('tags', None)
@ -95,9 +92,9 @@ class PlaybookInclude(Base, Conditional, Taggable):
# Check to see if we need to forward the conditionals on to the included
# plays. If so, we can take a shortcut here and simply prepend them to
# those attached to each block (if any)
if forward_conditional:
for task_block in entry.pre_tasks + entry.roles + entry.tasks + entry.post_tasks:
task_block.when = self.when[:] + task_block.when
if new_obj.when:
for task_block in (entry.pre_tasks + entry.roles + entry.tasks + entry.post_tasks):
task_block._attributes['when'] = new_obj.when[:] + task_block.when[:]
return pb

View file

@ -19,10 +19,10 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.six import iteritems
import collections
import os
from ansible.compat.six import iteritems, binary_type, text_type
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
@ -41,25 +41,54 @@ __all__ = ['Role', 'hash_params']
# the role due to the fact that it would require the use of self
# in a static method. This is also used in the base class for
# strategies (ansible/plugins/strategy/__init__.py)
def hash_params(params):
if not isinstance(params, dict):
if isinstance(params, list):
return frozenset(params)
"""
Construct a data structure of parameters that is hashable.
This requires changing any mutable data structures into immutable ones.
We chose a frozenset because role parameters have to be unique.
.. warning:: this does not handle unhashable scalars. Two things
mitigate that limitation:
1) There shouldn't be any unhashable scalars specified in the yaml
2) Our only choice would be to return an error anyway.
"""
# Any container is unhashable if it contains unhashable items (for
# instance, tuple() is a Hashable subclass but if it contains a dict, it
# cannot be hashed)
if isinstance(params, collections.Container) and not isinstance(params, (text_type, binary_type)):
if isinstance(params, collections.Mapping):
try:
# Optimistically hope the contents are all hashable
new_params = frozenset(params.items())
except TypeError:
new_params = set()
for k, v in params.items():
# Hash each entry individually
new_params.update((k, hash_params(v)))
new_params = frozenset(new_params)
elif isinstance(params, (collections.Set, collections.Sequence)):
try:
# Optimistically hope the contents are all hashable
new_params = frozenset(params)
except TypeError:
new_params = set()
for v in params:
# Hash each entry individually
new_params.update(hash_params(v))
new_params = frozenset(new_params)
else:
return params
else:
s = set()
for k,v in iteritems(params):
if isinstance(v, dict):
s.update((k, hash_params(v)))
elif isinstance(v, list):
things = []
for item in v:
things.append(hash_params(item))
s.update((k, tuple(things)))
else:
s.update((k, v))
return frozenset(s)
# This is just a guess.
new_params = frozenset(params)
return new_params
# Note: We do not handle unhashable scalars but our only choice would be
# to raise an error there anyway.
return frozenset((params,))
class Role(Base, Become, Conditional, Taggable):
@ -207,6 +236,8 @@ class Role(Base, Become, Conditional, Taggable):
main_file = self._resolve_main(file_path, main)
if self._loader.path_exists(main_file):
return self._loader.load_from_file(main_file)
elif main is not None:
raise AnsibleParserError("Could not find specified file in role: %s" % main)
return None
def _resolve_main(self, basepath, main=None):
@ -377,12 +408,14 @@ class Role(Base, Become, Conditional, Taggable):
dep_blocks = dep.compile(play=play, dep_chain=new_dep_chain)
block_list.extend(dep_blocks)
for task_block in self._task_blocks:
for idx, task_block in enumerate(self._task_blocks):
new_task_block = task_block.copy(exclude_parent=True)
if task_block._parent:
new_task_block._parent = task_block._parent.copy()
new_task_block._dep_chain = new_dep_chain
new_task_block._play = play
if idx == len(self._task_blocks) - 1:
new_task_block._eor = True
block_list.append(new_task_block)
return block_list

View file

@ -34,6 +34,12 @@ from ansible.playbook.taggable import Taggable
from ansible.template import Templar
from ansible.utils.path import unfrackpath
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
__all__ = ['RoleDefinition']
@ -138,18 +144,22 @@ class RoleDefinition(Base, Become, Conditional, Taggable):
# we always start the search for roles in the base directory of the playbook
role_search_paths = [
os.path.join(self._loader.get_basedir(), u'roles'),
self._loader.get_basedir(),
]
# also search in the configured roles path
if C.DEFAULT_ROLES_PATH:
role_search_paths.extend(C.DEFAULT_ROLES_PATH)
# finally, append the roles basedir, if it was set, so we can
# next, append the roles basedir, if it was set, so we can
# search relative to that directory for dependent roles
if self._role_basedir:
role_search_paths.append(self._role_basedir)
# finally as a last resort we look in the current basedir as set
# in the loader (which should be the playbook dir itself) but without
# the roles/ dir appended
role_search_paths.append(self._loader.get_basedir())
# create a templar class to template the dependency names, in
# case they contain variables
if self._variable_manager is not None:
@ -193,6 +203,11 @@ class RoleDefinition(Base, Become, Conditional, Taggable):
# or make this list more automatic in some way so we don't have to
# remember to update it manually.
if key not in base_attribute_names or key in ('connection', 'port', 'remote_user'):
if key in ('connection', 'port', 'remote_user'):
display.deprecated("Using '%s' as a role param has been deprecated. " % key + \
"In the future, these values should be entered in the `vars:` " + \
"section for roles, but for now we'll store it as both a param and an attribute.")
role_def[key] = value
# this key does not match a field attribute, so it must be a role param
role_params[key] = value
else:

View file

@ -28,6 +28,7 @@ from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject
from ansible.playbook.attribute import Attribute, FieldAttribute
from ansible.playbook.role.definition import RoleDefinition
from ansible.playbook.role.requirement import RoleRequirement
from ansible.module_utils._text import to_native
__all__ = ['RoleInclude']
@ -49,7 +50,9 @@ class RoleInclude(RoleDefinition):
@staticmethod
def load(data, play, current_role_path=None, parent_role=None, variable_manager=None, loader=None):
assert isinstance(data, string_types) or isinstance(data, dict) or isinstance(data, AnsibleBaseYAMLObject)
if not (isinstance(data, string_types) or isinstance(data, dict) or isinstance(data, AnsibleBaseYAMLObject)):
raise AnsibleParserError("Invalid role definition: %s" % to_native(data))
if isinstance(data, string_types) and ',' in data:
data = RoleRequirement.role_spec_parse(data)

View file

@ -21,6 +21,7 @@ __metaclass__ = type
from os.path import basename
from ansible.errors import AnsibleParserError
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.task import Task
from ansible.playbook.role import Role
@ -46,8 +47,9 @@ class IncludeRole(Task):
# ATTRIBUTES
# private as this is a 'module options' vs a task property
_static = FieldAttribute(isa='bool', default=None, private=True)
_allow_duplicates = FieldAttribute(isa='bool', default=True, private=True)
_private = FieldAttribute(isa='bool', default=None, private=True)
_static = FieldAttribute(isa='bool', default=None)
def __init__(self, block=None, role=None, task_include=None):
@ -56,6 +58,7 @@ class IncludeRole(Task):
self.statically_loaded = False
self._from_files = {}
self._parent_role = role
self._role_name = None
def get_block_list(self, play=None, variable_manager=None, loader=None):
@ -66,16 +69,22 @@ class IncludeRole(Task):
else:
myplay = play
ri = RoleInclude.load(self.name, play=myplay, variable_manager=variable_manager, loader=loader)
ri = RoleInclude.load(self._role_name, play=myplay, variable_manager=variable_manager, loader=loader)
ri.vars.update(self.vars)
# build role
actual_role = Role.load(ri, myplay, parent_role=self._parent_role, from_files=self._from_files)
actual_role._metadata.allow_duplicates = self.allow_duplicates
# compile role
blocks = actual_role.compile(play=myplay)
# compile role with parent roles as dependencies to ensure they inherit
# variables
if not self._parent_role:
dep_chain = []
else:
dep_chain = list(self._parent_role._parents)
dep_chain.append(self._parent_role)
# set parent to ensure proper inheritance
blocks = actual_role.compile(play=myplay, dep_chain=dep_chain)
for b in blocks:
b._parent = self
@ -89,19 +98,25 @@ class IncludeRole(Task):
ir = IncludeRole(block, role, task_include=task_include).load_data(data, variable_manager=variable_manager, loader=loader)
# set built in's
attributes = frozenset(ir._valid_attrs.keys())
for builtin in attributes:
if ir.args.get(builtin):
setattr(ir, builtin, ir.args.get(builtin))
### Process options
# name is needed, or use role as alias
ir._role_name = ir.args.get('name', ir.args.get('role'))
if ir._role_name is None:
raise AnsibleParserError("'name' is a required field for include_role.")
# build options for role includes
for key in ['tasks', 'vars', 'defaults']:
from_key = key + '_from'
from_key ='%s_from' % key
if ir.args.get(from_key):
ir._from_files[key] = basename(ir.args.get(from_key))
return ir.load_data(data, variable_manager=variable_manager, loader=loader)
#FIXME: find a way to make this list come from object ( attributes does not work as per below)
# manual list as otherwise the options would set other task parameters we don't want.
for option in ['private', 'allow_duplicates']:
if option in ir.args:
setattr(ir, option, ir.args.get(option))
return ir
def copy(self, exclude_parent=False, exclude_tasks=False):
@ -109,6 +124,7 @@ class IncludeRole(Task):
new_me.statically_loaded = self.statically_loaded
new_me._from_files = self._from_files.copy()
new_me._parent_role = self._parent_role
new_me._role_name = self._role_name
return new_me

View file

@ -21,8 +21,8 @@ __metaclass__ = type
import os
from ansible.compat.six import iteritems, string_types
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable
from ansible.module_utils.six import iteritems, string_types
from ansible.module_utils._text import to_native
from ansible.parsing.mod_args import ModuleArgsParser
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping, AnsibleUnicode
@ -259,25 +259,39 @@ class Task(Base, Conditional, Taggable, Become):
Override post validation of vars on the play, as we don't want to
template these too early.
'''
if value is None:
return dict()
env = {}
if value is not None:
elif isinstance(value, list):
if len(value) == 1:
return templar.template(value[0], convert_bare=True)
else:
env = []
def _parse_env_kv(k, v):
try:
env[k] = templar.template(v, convert_bare=False)
except AnsibleUndefinedVariable as e:
if self.action in ('setup', 'gather_facts') and 'ansible_env' in to_native(e):
# ignore as fact gathering sets ansible_env
pass
if isinstance(value, list):
for env_item in value:
if isinstance(env_item, (string_types, AnsibleUnicode)) and env_item in templar._available_variables.keys():
env[env_item] = templar.template(env_item, convert_bare=False)
elif isinstance(value, dict):
env = dict()
for env_item in value:
if isinstance(env_item, (string_types, AnsibleUnicode)) and env_item in templar._available_variables.keys():
env[env_item] = templar.template(value[env_item], convert_bare=False)
if isinstance(env_item, dict):
for k in env_item:
_parse_env_kv(k, env_item[k])
else:
isdict = templar.template(env_item, convert_bare=False)
if isinstance(isdict, dict):
env.update(isdict)
else:
display.warning("could not parse environment value, skipping: %s" % value)
# at this point it should be a simple string
return templar.template(value, convert_bare=True)
elif isinstance(value, dict):
# should not really happen
env = dict()
for env_item in value:
_parse_env_kv(env_item, value[env_item])
else:
# at this point it should be a simple string, also should not happen
env = templar.template(value, convert_bare=False)
return env
def _post_validate_changed_when(self, attr, value, templar):
'''
@ -376,12 +390,6 @@ class Task(Base, Conditional, Taggable, Become):
super(Task, self).deserialize(data)
def evaluate_conditional(self, templar, all_vars):
if self._parent is not None:
if not self._parent.evaluate_conditional(templar, all_vars):
return False
return super(Task, self).evaluate_conditional(templar, all_vars)
def set_loader(self, loader):
'''
Sets the loader on this object and recursively on parent, child objects.
@ -394,7 +402,7 @@ class Task(Base, Conditional, Taggable, Become):
if self._parent:
self._parent.set_loader(loader)
def _get_parent_attribute(self, attr, extend=False):
def _get_parent_attribute(self, attr, extend=False, prepend=False):
'''
Generic logic to get the attribute or parent attribute for a task value.
'''
@ -405,7 +413,7 @@ class Task(Base, Conditional, Taggable, Become):
if self._parent and (value is None or extend):
parent_value = getattr(self._parent, attr, None)
if extend:
value = self._extend_value(value, parent_value)
value = self._extend_value(value, parent_value, prepend)
else:
value = parent_value
except KeyError:
@ -417,7 +425,7 @@ class Task(Base, Conditional, Taggable, Become):
'''
Override for the 'tags' getattr fetcher, used from Base.
'''
return self._get_parent_attribute('environment', extend=True)
return self._get_parent_attribute('environment', extend=True, prepend=True)
def get_dep_chain(self):
if self._parent:

View file

@ -221,7 +221,7 @@ class PluginLoader:
self._extra_dirs.append(directory)
self._paths = None
def find_plugin(self, name, mod_type=''):
def find_plugin(self, name, mod_type='', ignore_deprecated=False):
''' Find a plugin named name '''
if mod_type:
@ -297,7 +297,7 @@ class PluginLoader:
alias_name = '_' + name
# We've already cached all the paths at this point
if alias_name in pull_cache:
if not os.path.islink(pull_cache[alias_name]):
if not ignore_deprecated and not os.path.islink(pull_cache[alias_name]):
display.deprecated('%s is kept for backwards compatibility '
'but usage is discouraged. The module '
'documentation details page may explain '
@ -373,6 +373,7 @@ class PluginLoader:
def all(self, *args, **kwargs):
''' instantiates all plugins with the same arguments '''
path_only = kwargs.pop('path_only', False)
class_only = kwargs.pop('class_only', False)
all_matches = []
found_in_cache = True
@ -385,6 +386,10 @@ class PluginLoader:
if '__init__' in name:
continue
if path_only:
yield path
continue
if path not in self._module_cache:
self._module_cache[path] = self._load_module_source(name, path)
found_in_cache = False

View file

@ -30,15 +30,16 @@ import tempfile
import time
from abc import ABCMeta, abstractmethod
from ansible.compat.six import binary_type, text_type, iteritems, with_metaclass
from ansible import constants as C
from ansible.compat.six import binary_type, string_types, text_type, iteritems, with_metaclass
from ansible.errors import AnsibleError, AnsibleConnectionFailure
from ansible.executor.module_common import modify_module
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.module_utils.json_utils import _filter_non_json_lines
from ansible.parsing.utils.jsonify import jsonify
from ansible.playbook.play_context import MAGIC_VARIABLE_MAPPING
from ansible.release import __version__
from ansible.vars.unsafe_proxy import wrap_var
try:
@ -217,7 +218,12 @@ class ActionBase(with_metaclass(ABCMeta, object)):
tmp_mode = 0o700
cmd = self._connection._shell.mkdtemp(basefile, use_system_tmp, tmp_mode)
if use_system_tmp:
tmpdir = None
else:
tmpdir = self._remote_expand_user(C.DEFAULT_REMOTE_TMP, sudoable=False)
cmd = self._connection._shell.mkdtemp(basefile, use_system_tmp, tmp_mode, tmpdir)
result = self._low_level_execute_command(cmd, sudoable=False)
# error handling on this seems a little aggressive?
@ -358,11 +364,16 @@ class ActionBase(with_metaclass(ABCMeta, object)):
# Try to use file system acls to make the files readable for sudo'd
# user
if execute:
mode = 'rx'
chmod_mode = 'rx'
setfacl_mode = 'r-x'
else:
mode = 'rX'
chmod_mode = 'rX'
### Note: this form fails silently on freebsd. We currently
# never call _fixup_perms2() with execute=False but if we
# start to we'll have to fix this.
setfacl_mode = 'r-X'
res = self._remote_set_user_facl(remote_paths, self._play_context.become_user, mode)
res = self._remote_set_user_facl(remote_paths, self._play_context.become_user, setfacl_mode)
if res['rc'] != 0:
# File system acls failed; let's try to use chown next
# Set executable bit first as on some systems an
@ -370,7 +381,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
if execute:
res = self._remote_chmod(remote_paths, 'u+x')
if res['rc'] != 0:
raise AnsibleError('Failed to set file mode on remote temporary files (rc: {0}, err: {1})'.format(res['rc'], res['stderr']))
raise AnsibleError('Failed to set file mode on remote temporary files (rc: {0}, err: {1})'.format(res['rc'], to_native(res['stderr'])))
res = self._remote_chown(remote_paths, self._play_context.become_user)
if res['rc'] != 0 and remote_user == 'root':
@ -384,20 +395,20 @@ class ActionBase(with_metaclass(ABCMeta, object)):
display.warning('Using world-readable permissions for temporary files Ansible needs to create when becoming an unprivileged user.'
' This may be insecure. For information on securing this, see'
' https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user')
res = self._remote_chmod(remote_paths, 'a+%s' % mode)
res = self._remote_chmod(remote_paths, 'a+%s' % chmod_mode)
if res['rc'] != 0:
raise AnsibleError('Failed to set file mode on remote files (rc: {0}, err: {1})'.format(res['rc'], res['stderr']))
raise AnsibleError('Failed to set file mode on remote files (rc: {0}, err: {1})'.format(res['rc'], to_native(res['stderr'])))
else:
raise AnsibleError('Failed to set permissions on the temporary files Ansible needs to create when becoming an unprivileged user'
' (rc: {0}, err: {1}). For information on working around this,'
' see https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user'.format(res['rc'], res['stderr']))
' see https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user'.format(res['rc'], to_native(res['stderr'])))
elif execute:
# Can't depend on the file being transferred with execute
# permissions. Only need user perms because no become was
# used here
res = self._remote_chmod(remote_paths, 'u+x')
if res['rc'] != 0:
raise AnsibleError('Failed to set file mode on remote files (rc: {0}, err: {1})'.format(res['rc'], res['stderr']))
raise AnsibleError('Failed to set file mode on remote files (rc: {0}, err: {1})'.format(res['rc'], to_native(res['stderr'])))
return remote_paths
@ -448,6 +459,8 @@ class ActionBase(with_metaclass(ABCMeta, object)):
# happens sometimes when it is a dir and not on bsd
if 'checksum' not in mystat['stat']:
mystat['stat']['checksum'] = ''
elif not isinstance(mystat['stat']['checksum'], string_types):
raise AnsibleError("Invalid checksum returned by stat: expected a string type but got %s" % type(mystat['stat']['checksum']))
return mystat['stat']
@ -477,7 +490,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
finally:
return x
def _remote_expand_user(self, path):
def _remote_expand_user(self, path, sudoable=True):
''' takes a remote path and performs tilde expansion on the remote host '''
if not path.startswith('~'): # FIXME: Windows paths may start with "~ instead of just ~
return path
@ -485,13 +498,11 @@ class ActionBase(with_metaclass(ABCMeta, object)):
# FIXME: Can't use os.path.sep for Windows paths.
split_path = path.split(os.path.sep, 1)
expand_path = split_path[0]
if expand_path == '~':
if self._play_context.become and self._play_context.become_user:
expand_path = '~%s' % self._play_context.become_user
if sudoable and expand_path == '~' and self._play_context.become and self._play_context.become_user:
expand_path = '~%s' % self._play_context.become_user
cmd = self._connection._shell.expand_user(expand_path)
data = self._low_level_execute_command(cmd, sudoable=False)
#initial_fragment = utils.last_non_blank_line(data['stdout'])
initial_fragment = data['stdout'].strip().splitlines()[-1]
if not initial_fragment:
@ -663,6 +674,39 @@ class ActionBase(with_metaclass(ABCMeta, object)):
display.debug("done with _execute_module (%s, %s)" % (module_name, module_args))
return data
def _clean_returned_data(self, data):
remove_keys = set()
fact_keys = set(data.keys())
# first we add all of our magic variable names to the set of
# keys we want to remove from facts
for magic_var in MAGIC_VARIABLE_MAPPING:
remove_keys.update(fact_keys.intersection(MAGIC_VARIABLE_MAPPING[magic_var]))
# next we remove any connection plugin specific vars
for conn_path in self._shared_loader_obj.connection_loader.all(path_only=True):
try:
conn_name = os.path.splitext(os.path.basename(conn_path))[0]
re_key = re.compile('^ansible_%s_' % conn_name)
for fact_key in fact_keys:
if re_key.match(fact_key):
remove_keys.add(fact_key)
except AttributeError:
pass
# remove some KNOWN keys
for hard in ['ansible_rsync_path', 'ansible_playbook_python']:
if hard in fact_keys:
remove_keys.add(hard)
# finally, we search for interpreter keys to remove
re_interp = re.compile('^ansible_.*_interpreter$')
for fact_key in fact_keys:
if re_interp.match(fact_key):
remove_keys.add(fact_key)
# then we remove them (except for ssh host keys)
for r_key in remove_keys:
if not r_key.startswith('ansible_ssh_host_key_'):
del data[r_key]
def _parse_returned_data(self, res):
try:
filtered_output, warnings = _filter_non_json_lines(res.get('stdout', u''))
@ -670,6 +714,12 @@ class ActionBase(with_metaclass(ABCMeta, object)):
display.warning(w)
data = json.loads(filtered_output)
data['_ansible_parsed'] = True
if 'ansible_facts' in data and isinstance(data['ansible_facts'], dict):
self._clean_returned_data(data['ansible_facts'])
data['ansible_facts'] = wrap_var(data['ansible_facts'])
if 'add_host' in data and isinstance(data['add_host'].get('host_vars', None), dict):
self._clean_returned_data(data['add_host']['host_vars'])
data['add_host'] = wrap_var(data['add_host'])
except ValueError:
# not valid json, lets try to capture error
data = dict(failed=True, _ansible_parsed=False)
@ -721,8 +771,8 @@ class ActionBase(with_metaclass(ABCMeta, object)):
# Change directory to basedir of task for command execution when connection is local
if self._connection.transport == 'local':
os.chdir(self._loader.get_basedir())
cwd = os.getcwd()
os.chdir(self._loader.get_basedir())
try:
rc, stdout, stderr = self._connection.exec_command(cmd, in_data=in_data, sudoable=sudoable)
finally:
@ -817,6 +867,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
to get back the first existing file found.
'''
# dwim already deals with playbook basedirs
path_stack = self._task.get_search_path()
result = self._loader.path_dwim_relative_stack(path_stack, dirname, needle)

View file

@ -103,13 +103,8 @@ class ActionModule(ActionBase):
return result
remote_user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user
if not tmp:
tmp = self._make_tmp_path(remote_user)
self._cleanup_remote_tmp = True
if boolean(remote_src):
result.update(self._execute_module(tmp=tmp, task_vars=task_vars, delete_remote_tmp=False))
self._remove_tmp_path(tmp)
result.update(self._execute_module(tmp=tmp, task_vars=task_vars))
return result
else:
try:
@ -119,6 +114,10 @@ class ActionModule(ActionBase):
result['msg'] = to_native(e)
return result
if not tmp:
tmp = self._make_tmp_path(remote_user)
self._cleanup_remote_tmp = True
if not os.path.isdir(src):
result['failed'] = True
result['msg'] = u"Source (%s) is not a directory" % src

View file

@ -91,7 +91,15 @@ class ActionModule(ActionBase):
async_limit = self._task.async
async_jid = str(random.randint(0, 999999999999))
async_cmd = [env_string, remote_async_module_path, async_jid, async_limit, remote_module_path]
# call the interpreter for async_wrapper directly
# this permits use of a script for an interpreter on non-Linux platforms
# TODO: re-implement async_wrapper as a regular module to avoid this special case
interpreter = shebang.replace('#!', '').strip()
async_cmd = [interpreter, remote_async_module_path, async_jid, async_limit, remote_module_path]
if env_string:
async_cmd.insert(0, env_string)
if argsfile:
async_cmd.append(argsfile)
else:

View file

@ -86,7 +86,7 @@ class ActionModule(ActionBase):
# if we have first_available_file in our vars
# look up the files and use the first one we find as src
elif remote_src:
result.update(self._execute_module(module_name='copy', module_args=self._task.args, task_vars=task_vars, delete_remote_tmp=False))
result.update(self._execute_module(module_name='copy', module_args=self._task.args, task_vars=task_vars))
return result
else: # find in expected paths
try:

View file

@ -22,7 +22,7 @@ from os import path, walk
import re
from ansible.errors import AnsibleError
from ansible.module_utils._text import to_native
from ansible.module_utils._text import to_native, to_text
from ansible.plugins.action import ActionBase
@ -137,8 +137,8 @@ class ActionModule(ActionBase):
results.update(updated_results)
except AnsibleError as e:
failed = True
err_msg = to_native(e)
raise AnsibleError(err_msg)
if self.return_results_as_name:
scope = dict()
@ -226,7 +226,7 @@ class ActionModule(ActionBase):
return success
return success
def _load_files(self, filename):
def _load_files(self, filename, validate_extensions=False):
""" Loads a file and converts the output into a valid Python dict.
Args:
filename (str): The source file.
@ -237,7 +237,7 @@ class ActionModule(ActionBase):
results = dict()
failed = False
err_msg = ''
if not self._is_valid_file_ext(filename):
if validate_extensions and not self._is_valid_file_ext(filename):
failed = True
err_msg = (
'{0} does not have a valid extension: {1}'
@ -245,7 +245,9 @@ class ActionModule(ActionBase):
)
return failed, err_msg, results
data, show_content = self._loader._get_file_contents(filename)
b_data, show_content = self._loader._get_file_contents(filename)
data = to_text(b_data, errors='surrogate_or_strict')
self.show_content = show_content
data = self._loader.load(data, show_content)
if not data:
@ -287,7 +289,7 @@ class ActionModule(ActionBase):
if not stop_iter and not failed:
if path.exists(filepath) and not self._ignore_file(filename):
failed, err_msg, loaded_data = self._load_files(filepath)
failed, err_msg, loaded_data = self._load_files(filepath, validate_extensions=True)
if not failed:
results.update(loaded_data)

View file

@ -125,8 +125,9 @@ class ActionModule(ActionBase):
fd = None
try:
fd = self._connection._new_stdin.fileno()
except ValueError:
# someone is using a closed file descriptor as stdin
except (ValueError, AttributeError):
# ValueError: someone is using a closed file descriptor as stdin
# AttributeError: someone is using a null file descriptor as stdin on windoez
pass
if fd is not None:
if isatty(fd):

View file

@ -26,7 +26,7 @@ class ActionModule(ActionBase):
TRANSFERS_FILES = False
UNUSED_PARAMS = {
'systemd': ['pattern', 'runlevels', 'sleep', 'arguments'],
'systemd': ['pattern', 'runlevel', 'sleep', 'arguments', 'args'],
}
def run(self, tmp=None, task_vars=None):

View file

@ -0,0 +1,73 @@
# Copyright 2016 Ansible (RedHat, Inc)
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.six import iteritems, string_types
from ansible.constants import mk_boolean as boolean
from ansible.plugins.action import ActionBase
from ansible.utils.vars import isidentifier
class ActionModule(ActionBase):
TRANSFERS_FILES = False
#TODO: document this in non-empty set_stats.py module
def run(self, tmp=None, task_vars=None):
if task_vars is None:
task_vars = dict()
result = super(ActionModule, self).run(tmp, task_vars)
stats = {'data': {}, 'per_host': False, 'aggregate': True}
if self._task.args:
data = self._task.args.get('data', {})
if not isinstance(data, dict):
data = self._templar.template(data, convert_bare=False, fail_on_undefined=True)
if not isinstance(data, dict):
result['failed'] = True
result['msg'] = "The 'data' option needs to be a dictionary/hash"
return result
# set boolean options, defaults are set above in stats init
for opt in ['per_host', 'aggregate']:
val = self._task.args.get(opt, None)
if val is not None:
if not isinstance(val, bool):
stats[opt] = boolean(self._templar.template(val))
else:
stats[opt] = val
for (k, v) in iteritems(data):
k = self._templar.template(k)
if not isidentifier(k):
result['failed'] = True
result['msg'] = "The variable name '%s' is not valid. Variables must start with a letter or underscore character, and contain only letters, numbers and underscores." % k
return result
stats['data'][k] = self._templar.template(v)
result['changed'] = False
result['ansible_stats'] = stats
return result

View file

@ -114,7 +114,7 @@ class ActionModule(ActionBase):
# connection to the remote host
if 'ansible_syslog_facility' in task_vars:
del task_vars['ansible_syslog_facility']
for key in task_vars.keys():
for key in list(task_vars.keys()):
if key.startswith("ansible_") and key.endswith("_interpreter"):
del task_vars[key]

View file

@ -23,6 +23,7 @@ import pwd
import time
from ansible import constants as C
from ansible.compat.six import string_types
from ansible.errors import AnsibleError
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.plugins.action import ActionBase
@ -115,19 +116,28 @@ class ActionModule(ActionBase):
time.localtime(os.path.getmtime(b_source))
)
# Create a new searchpath list to assign to the templar environment's file
# loader, so that it knows about the other paths to find template files
searchpath = [self._loader._basedir, os.path.dirname(source)]
if self._task._role is not None:
if C.DEFAULT_ROLES_PATH:
searchpath[:0] = C.DEFAULT_ROLES_PATH
searchpath.insert(1, self._task._role._role_path)
searchpath = []
# set jinja2 internal search path for includes
if 'ansible_search_path' in task_vars:
searchpath = task_vars['ansible_search_path']
# our search paths aren't actually the proper ones for jinja includes.
searchpath.extend([self._loader._basedir, os.path.dirname(source)])
# We want to search into the 'templates' subdir of each search path in
# addition to our original search paths.
newsearchpath = []
for p in searchpath:
newsearchpath.append(os.path.join(p, 'templates'))
newsearchpath.append(p)
searchpath = newsearchpath
self._templar.environment.loader.searchpath = searchpath
old_vars = self._templar._available_variables
self._templar.set_available_variables(temp_vars)
resultant = self._templar.template(template_data, preserve_trailing_newlines=True, escape_backslashes=False, convert_data=False)
resultant = self._templar.do_template(template_data, preserve_trailing_newlines=True, escape_backslashes=False)
self._templar.set_available_variables(old_vars)
except Exception as e:
result['failed'] = True

View file

@ -63,8 +63,11 @@ class CacheModule(BaseCacheModule):
try:
os.makedirs(self._cache_dir)
except (OSError,IOError) as e:
display.warning("error in 'jsonfile' cache plugin while trying to create cache dir %s : %s" % (self._cache_dir, to_bytes(e)))
return None
raise AnsibleError("error in 'jsonfile' cache plugin while trying to create cache dir %s : %s" % (self._cache_dir, to_bytes(e)))
else:
for x in (os.R_OK, os.W_OK, os.X_OK):
if not os.access(self._cache_dir, x):
raise AnsibleError("error in '%s' cache, configured path (%s) does not have necessary permissions (rwx), disabling plugin" % (self.plugin_name, self._cache_dir))
def get(self, key):
""" This checks the in memory cache first as the fact was not expired at 'gather time'
@ -124,7 +127,7 @@ class CacheModule(BaseCacheModule):
return False
else:
display.warning("error in 'jsonfile' cache plugin while trying to stat %s : %s" % (cachefile, to_bytes(e)))
pass
return False
if time.time() - st.st_mtime <= self._timeout:
return False

View file

@ -65,7 +65,7 @@ class CallbackModule(CallbackBase):
else:
self._display.display("fatal: [%s]: FAILED! => %s" % (result._host.get_name(), self._dump_results(result._result)), color=C.COLOR_ERROR)
if result._task.ignore_errors:
if ignore_errors:
self._display.display("...ignoring", color=C.COLOR_SKIP)
def v2_runner_on_ok(self, result):

Some files were not shown because too many files have changed in this diff Show more