re-enable non-pipelined mode for Powershell (#26124)

* fixes #23986
* fixes 3rd-party Windows connection plugins that don't support pipelining (eg awsrun)

(cherry picked from commit 6677559c698f15c582fba80d866f46458848f974)
This commit is contained in:
Matt Davis 2017-06-26 22:57:59 -07:00 committed by GitHub
parent 940a78c5ba
commit 6cca156fde
7 changed files with 128 additions and 122 deletions

View file

@ -37,7 +37,7 @@ from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.module_utils._text import to_bytes, to_text
from ansible.plugins import module_utils_loader
from ansible.plugins.shell.powershell import async_watchdog, async_wrapper, become_wrapper, leaf_exec
from ansible.plugins.shell.powershell import async_watchdog, async_wrapper, become_wrapper, leaf_exec, exec_wrapper
# Must import strategy and use write_locks from there
# If we import write_locks directly then we end up binding a
# variable to the object and then it never gets updated.
@ -582,7 +582,8 @@ def _is_binary(b_module_data):
return bool(start.translate(None, textchars))
def _find_module_utils(module_name, b_module_data, module_path, module_args, task_vars, module_compression):
def _find_module_utils(module_name, b_module_data, module_path, module_args, task_vars, module_compression, async_timeout, become,
become_method, become_user, become_password, environment):
"""
Given the source of the module, convert it to a Jinja2 template to insert
module code and return whether it's a new or old style module.
@ -742,8 +743,55 @@ def _find_module_utils(module_name, b_module_data, module_path, module_args, tas
# Windows text editors
shebang = u'#!powershell'
# powershell wrapper build is currently handled in build_windows_module_payload, called in action
# _configure_module after this function returns.
exec_manifest = dict(
module_entry=to_text(base64.b64encode(b_module_data)),
powershell_modules=dict(),
module_args=module_args,
actions=['exec'],
environment=environment
)
exec_manifest['exec'] = to_text(base64.b64encode(to_bytes(leaf_exec)))
if async_timeout > 0:
exec_manifest["actions"].insert(0, 'async_watchdog')
exec_manifest["async_watchdog"] = to_text(base64.b64encode(to_bytes(async_watchdog)))
exec_manifest["actions"].insert(0, 'async_wrapper')
exec_manifest["async_wrapper"] = to_text(base64.b64encode(to_bytes(async_wrapper)))
exec_manifest["async_jid"] = str(random.randint(0, 999999999999))
exec_manifest["async_timeout_sec"] = async_timeout
if become and become_method == 'runas':
exec_manifest["actions"].insert(0, 'become')
exec_manifest["become_user"] = become_user
exec_manifest["become_password"] = become_password
exec_manifest["become"] = to_text(base64.b64encode(to_bytes(become_wrapper)))
lines = b_module_data.split(b'\n')
module_names = set()
requires_module_list = re.compile(r'(?i)^#requires \-module(?:s?) (.+)')
for line in lines:
# legacy, equivalent to #Requires -Modules powershell
if REPLACER_WINDOWS in line:
module_names.add(b'powershell')
# TODO: add #Requires checks for Ansible.ModuleUtils.X
for m in module_names:
m = to_text(m)
exec_manifest["powershell_modules"][m] = to_text(
base64.b64encode(
to_bytes(
_slurp(os.path.join(_MODULE_UTILS_PATH, m + ".ps1"))
)
)
)
# FUTURE: smuggle this back as a dict instead of serializing here; the connection plugin may need to modify it
module_json = json.dumps(exec_manifest)
b_module_data = exec_wrapper.replace(b"$json_raw = ''", b"$json_raw = @'\r\n%s\r\n'@" % to_bytes(module_json))
elif module_substyle == 'jsonargs':
module_args_json = to_bytes(json.dumps(module_args))
@ -767,7 +815,8 @@ def _find_module_utils(module_name, b_module_data, module_path, module_args, tas
return (b_module_data, module_style, shebang)
def modify_module(module_name, module_path, module_args, task_vars=dict(), module_compression='ZIP_STORED'):
def modify_module(module_name, module_path, module_args, task_vars=dict(), module_compression='ZIP_STORED', async_timeout=0, become=False,
become_method=None, become_user=None, become_password=None, environment=dict()):
"""
Used to insert chunks of code into modules before transfer rather than
doing regular python imports. This allows for more efficient transfer in
@ -793,7 +842,10 @@ def modify_module(module_name, module_path, module_args, task_vars=dict(), modul
# read in the module source
b_module_data = f.read()
(b_module_data, module_style, shebang) = _find_module_utils(module_name, b_module_data, module_path, module_args, task_vars, module_compression)
(b_module_data, module_style, shebang) = _find_module_utils(module_name, b_module_data, module_path, module_args, task_vars, module_compression,
async_timeout=async_timeout, become=become, become_method=become_method,
become_user=become_user, become_password=become_password,
environment=environment)
if module_style == 'binary':
return (b_module_data, module_style, to_text(shebang, nonstring='passthru'))
@ -820,54 +872,3 @@ def modify_module(module_name, module_path, module_args, task_vars=dict(), modul
shebang = to_bytes(shebang, errors='surrogate_or_strict')
return (b_module_data, module_style, to_text(shebang, nonstring='passthru'))
def build_windows_module_payload(module_name, module_path, b_module_data, module_args, task_vars, task, play_context, environment):
exec_manifest = dict(
module_entry=to_text(base64.b64encode(b_module_data)),
powershell_modules=dict(),
module_args=module_args,
actions=['exec'],
environment=environment
)
exec_manifest['exec'] = to_text(base64.b64encode(to_bytes(leaf_exec)))
if task.async > 0:
exec_manifest["actions"].insert(0, 'async_watchdog')
exec_manifest["async_watchdog"] = to_text(base64.b64encode(to_bytes(async_watchdog)))
exec_manifest["actions"].insert(0, 'async_wrapper')
exec_manifest["async_wrapper"] = to_text(base64.b64encode(to_bytes(async_wrapper)))
exec_manifest["async_jid"] = str(random.randint(0, 999999999999))
exec_manifest["async_timeout_sec"] = task.async
if play_context.become and play_context.become_method=='runas':
exec_manifest["actions"].insert(0, 'become')
exec_manifest["become_user"] = play_context.become_user
exec_manifest["become_password"] = play_context.become_pass
exec_manifest["become"] = to_text(base64.b64encode(to_bytes(become_wrapper)))
lines = b_module_data.split(b'\n')
module_names = set()
requires_module_list = re.compile(r'(?i)^#requires \-module(?:s?) (.+)')
for line in lines:
# legacy, equivalent to #Requires -Modules powershell
if REPLACER_WINDOWS in line:
module_names.add(b'powershell')
# TODO: add #Requires checks for Ansible.ModuleUtils.X
for m in module_names:
m = to_text(m)
exec_manifest["powershell_modules"][m] = to_text(
base64.b64encode(
to_bytes(
_slurp(os.path.join(_MODULE_UTILS_PATH, m + ".ps1"))
)
)
)
# FUTURE: smuggle this back as a dict instead of serializing here; the connection plugin may need to modify it
b_module_data = json.dumps(exec_manifest)
return b_module_data

View file

@ -33,7 +33,7 @@ from ansible import constants as C
from ansible.compat.six import binary_type, string_types, text_type, iteritems, with_metaclass
from ansible.compat.six.moves import shlex_quote
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleActionSkip, AnsibleActionFail
from ansible.executor.module_common import modify_module, build_windows_module_payload
from ansible.executor.module_common import modify_module
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.module_utils.json_utils import _filter_non_json_lines
from ansible.parsing.utils.jsonify import jsonify
@ -153,18 +153,15 @@ class ActionBase(with_metaclass(ABCMeta, object)):
"run 'git pull --rebase' to correct this problem." % (module_name))
# insert shared code and arguments into the module
(module_data, module_style, module_shebang) = modify_module(module_name, module_path, module_args,
task_vars=task_vars, module_compression=self._play_context.module_compression)
final_environment = dict()
self._compute_environment_string(final_environment)
# FUTURE: we'll have to get fancier about this to support powershell over SSH on Windows...
if self._connection.transport == "winrm":
# WinRM always pipelines, so we need to build up a fancier module payload...
final_environment = dict()
self._compute_environment_string(final_environment)
module_data = build_windows_module_payload(module_name=module_name, module_path=module_path,
b_module_data=module_data, module_args=module_args,
task_vars=task_vars, task=self._task,
play_context=self._play_context, environment=final_environment)
(module_data, module_style, module_shebang) = modify_module(module_name, module_path, module_args,
task_vars=task_vars, module_compression=self._play_context.module_compression,
async_timeout=self._task.async, become=self._play_context.become,
become_method=self._play_context.become_method, become_user=self._play_context.become_user,
become_password=self._play_context.become_pass,
environment=final_environment)
return (module_style, module_shebang, module_data, module_path)
@ -184,7 +181,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
# block then task 'win' in precedence
environments.reverse()
for environment in environments:
if environment is None:
if environment is None or len(environment) == 0:
continue
temp_environment = self._templar.template(environment)
if not isinstance(temp_environment, dict):
@ -193,7 +190,8 @@ class ActionBase(with_metaclass(ABCMeta, object)):
# these environment settings should not need to merge sub-dicts
final_environment.update(temp_environment)
final_environment = self._templar.template(final_environment)
if len(final_environment) > 0:
final_environment = self._templar.template(final_environment)
if isinstance(raw_environment_out, dict):
raw_environment_out.clear()
@ -212,13 +210,11 @@ class ActionBase(with_metaclass(ABCMeta, object)):
'''
Determines if we are required and can do pipelining
'''
if self._connection.always_pipeline_modules:
return True #eg, winrm
# any of these require a true
for condition in [
self._connection.has_pipelining,
self._play_context.pipelining,
self._play_context.pipelining or self._connection.always_pipeline_modules, # pipelining enabled for play or connection requires it (eg winrm)
module_style == "new", # old style modules do not support pipelining
not C.DEFAULT_KEEP_REMOTE_FILES, # user wants remote files
not wrap_async, # async does not support pipelining

View file

@ -326,17 +326,17 @@ class Connection(ConnectionBase):
def exec_command(self, cmd, in_data=None, sudoable=True):
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
cmd_parts = self._shell._encode_script(exec_wrapper, as_list=True, strict_mode=False, preserve_rc=False)
cmd_parts = self._shell._encode_script(cmd, as_list=True, strict_mode=False, preserve_rc=False)
# TODO: display something meaningful here
display.vvv("EXEC (via pipeline wrapper)")
if not in_data:
payload = self._create_raw_wrapper_payload(cmd)
else:
payload = in_data
stdin_iterator = None
result = self._winrm_exec(cmd_parts[0], cmd_parts[1:], from_exec=True, stdin_iterator=self._wrapper_payload_stream(payload))
if in_data:
stdin_iterator = self._wrapper_payload_stream(in_data)
result = self._winrm_exec(cmd_parts[0], cmd_parts[1:], from_exec=True, stdin_iterator=stdin_iterator)
result.std_out = to_bytes(result.std_out)
result.std_err = to_bytes(result.std_err)

View file

@ -55,7 +55,8 @@ begin {
# stream JSON including become_pw, ps_module_payload, bin_module_payload, become_payload, write_payload_path, preserve directives
# exec runspace, capture output, cleanup, return module output
$json_raw = ""
# NB: do not adjust the following line- it is replaced when doing non-streamed module output
$json_raw = ''
}
process {
$input_as_string = [string]$input
@ -332,7 +333,6 @@ Function Run($payload) {
$username = $payload.become_user
$password = $payload.become_password
Add-Type -TypeDefinition $helper_def
Add-Type -TypeDefinition $helper_def -Debug:$false
$exec_args = $null
@ -726,7 +726,6 @@ Function Run($payload) {
[System.IO.Directory]::CreateDirectory([System.IO.Path]::GetDirectoryName($results_path)) | Out-Null
Add-Type -TypeDefinition $native_process_util
Add-Type -TypeDefinition $native_process_util -Debug:$false
# FUTURE: create under new job to ensure all children die on exit?
@ -1103,7 +1102,7 @@ class ShellModule(object):
def build_module_command(self, env_string, shebang, cmd, arg_path=None, rm_tmp=None):
# pipelining bypass
if cmd == '':
return ''
return '-'
# non-pipelining
@ -1192,15 +1191,22 @@ class ShellModule(object):
def _encode_script(self, script, as_list=False, strict_mode=True, preserve_rc=True):
'''Convert a PowerShell script to a single base64-encoded command.'''
script = to_text(script)
if strict_mode:
script = u'Set-StrictMode -Version Latest\r\n%s' % script
# try to propagate exit code if present- won't work with begin/process/end-style scripts (ala put_file)
# NB: the exit code returned may be incorrect in the case of a successful command followed by an invalid command
if preserve_rc:
script = u'%s\r\nIf (-not $?) { If (Get-Variable LASTEXITCODE -ErrorAction SilentlyContinue) { exit $LASTEXITCODE } Else { exit 1 } }\r\n' % script
script = '\n'.join([x.strip() for x in script.splitlines() if x.strip()])
encoded_script = base64.b64encode(script.encode('utf-16-le'))
cmd_parts = _common_args + ['-EncodedCommand', encoded_script]
if script == u'-':
cmd_parts = _common_args + ['-']
else:
if strict_mode:
script = u'Set-StrictMode -Version Latest\r\n%s' % script
# try to propagate exit code if present- won't work with begin/process/end-style scripts (ala put_file)
# NB: the exit code returned may be incorrect in the case of a successful command followed by an invalid command
if preserve_rc:
script = u'%s\r\nIf (-not $?) { If (Get-Variable LASTEXITCODE -ErrorAction SilentlyContinue) { exit $LASTEXITCODE } Else { exit 1 } }\r\n'\
% script
script = '\n'.join([x.strip() for x in script.splitlines() if x.strip()])
encoded_script = base64.b64encode(script.encode('utf-16-le'))
cmd_parts = _common_args + ['-EncodedCommand', encoded_script]
if as_list:
return cmd_parts
return ' '.join(cmd_parts)

View file

@ -94,14 +94,14 @@
- "raw_result.stdout_lines[0] == 'wwe=raw'"
# TODO: this test doesn't work anymore since we had to internally map Write-Host to Write-Output
#- name: run a raw command with unicode chars and quoted args (from https://github.com/ansible/ansible-modules-core/issues/1929)
# raw: Write-Host --% icacls D:\somedir\ /grant "! ЗАО. Руководство":F
# register: raw_result2
#
#- name: make sure raw passes command as-is and doesn't split/rejoin args
# assert:
# that:
# - "raw_result2.stdout_lines[0] == '--% icacls D:\\\\somedir\\\\ /grant \"! ЗАО. Руководство\":F'"
- name: run a raw command with unicode chars and quoted args (from https://github.com/ansible/ansible-modules-core/issues/1929)
raw: Write-Host --% icacls D:\somedir\ /grant "! ЗАО. Руководство":F
register: raw_result2
- name: make sure raw passes command as-is and doesn't split/rejoin args
assert:
that:
- "raw_result2.stdout_lines[0] == '--% icacls D:\\\\somedir\\\\ /grant \"! ЗАО. Руководство\":F'"
# Assumes MaxShellsPerUser == 30 (the default)
@ -116,12 +116,13 @@
- "not raw_with_items_result|failed"
- "raw_with_items_result.results|length == 32"
- name: test raw with job to ensure that preamble-free InputEncoding is working
raw: Start-Job { echo yo } | Receive-Job -Wait
register: raw_job_result
- name: check raw with job result
assert:
that:
- raw_job_result | succeeded
- raw_job_result.stdout_lines[0] == 'yo'
# TODO: this test fails, since we're back to passing raw commands without modification
#- name: test raw with job to ensure that preamble-free InputEncoding is working
# raw: Start-Job { echo yo } | Receive-Job -Wait
# register: raw_job_result
#
#- name: check raw with job result
# assert:
# that:
# - raw_job_result | succeeded
# - raw_job_result.stdout_lines[0] == 'yo'

View file

@ -193,15 +193,16 @@
- "test_script_bool_result.stdout_lines[0] == 'System.Boolean'"
- "test_script_bool_result.stdout_lines[1] == 'True'"
- name: run test script that uses envvars
script: test_script_with_env.ps1
environment:
taskenv: task
register: test_script_env_result
- name: ensure that script ran and that environment var was passed
assert:
that:
- test_script_env_result | succeeded
- test_script_env_result.stdout_lines[0] == 'task'
# FIXME: re-enable this test once script can run under the wrapper with powershell
#- name: run test script that uses envvars
# script: test_script_with_env.ps1
# environment:
# taskenv: task
# register: test_script_env_result
#
#- name: ensure that script ran and that environment var was passed
# assert:
# that:
# - test_script_env_result | succeeded
# - test_script_env_result.stdout_lines[0] == 'task'
#

View file

@ -90,6 +90,7 @@ class TestActionBase(unittest.TestCase):
# create our fake task
mock_task = MagicMock()
mock_task.action = "copy"
mock_task.async = 0
# create a mock connection, so we don't actually try and connect to things
mock_connection = MagicMock()