diff --git a/src/ansiblelint/schemas/__store__.json b/src/ansiblelint/schemas/__store__.json index bbe922699c..414085dc6d 100644 --- a/src/ansiblelint/schemas/__store__.json +++ b/src/ansiblelint/schemas/__store__.json @@ -36,7 +36,7 @@ "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/molecule.json" }, "playbook": { - "etag": "152dbc4407b1538dc940c58159135ea608ce18c9de7832d4351782ae644a5970", + "etag": "4f8cbba62fcf8a1fa6e8ef5e42696aec5b0876487478df83a7ffdf8bdbb4abcf", "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/playbook.json" }, "requirements": { diff --git a/src/ansiblelint/utils.py b/src/ansiblelint/utils.py index 2b98571ea2..3859004afc 100644 --- a/src/ansiblelint/utils.py +++ b/src/ansiblelint/utils.py @@ -41,6 +41,7 @@ from ansible.parsing.dataloader import DataLoader from ansible.parsing.mod_args import ModuleArgsParser from ansible.parsing.plugin_docs import read_docstring +from ansible.parsing.splitter import split_args from ansible.parsing.yaml.constructor import AnsibleConstructor, AnsibleMapping from ansible.parsing.yaml.loader import AnsibleLoader from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleSequence @@ -217,26 +218,18 @@ def ansible_template( } -def tokenize(line: str) -> tuple[str, list[str], dict[str, str]]: +def tokenize(value: str) -> tuple[list[str], dict[str, str]]: """Parse a string task invocation.""" - tokens = line.lstrip().split(" ") - if tokens[0] == "-": - tokens = tokens[1:] - if tokens[0] == "action:" or tokens[0] == "local_action:": - tokens = tokens[1:] - command = tokens[0].replace(":", "") - - args = [] - kwargs = {} - non_kv_found = False - for arg in tokens[1:]: - if "=" in arg and not non_kv_found: - key_value = arg.split("=", 1) - kwargs[key_value[0]] = key_value[1] + parts = split_args(value) + args: list[str] = [] + kwargs: dict[str, str] = {} + for part in parts: + if "=" not in part: + args.append(part) else: - non_kv_found = True - args.append(arg) - return (command, args, kwargs) + k, v = part.split("=", 1) + kwargs[k] = v + return (args, kwargs) def playbook_items(pb_data: AnsibleBaseYAMLObject) -> ItemsView: # type: ignore[type-arg] @@ -326,8 +319,7 @@ def include_children( return [] # handle include: filename.yml tags=blah - # pylint: disable=unused-variable - (command, args, kwargs) = tokenize(f"{k}: {v}") + (args, _) = tokenize(v) result = path_dwim(basedir, args[0]) while basedir not in ["", "/"]: diff --git a/test/test_utils.py b/test/test_utils.py index 1554b15eed..0d8afcdc19 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -51,37 +51,38 @@ @pytest.mark.parametrize( - ("string", "expected_cmd", "expected_args", "expected_kwargs"), + ("string", "expected_args", "expected_kwargs"), ( - pytest.param("", "", [], {}, id="blank"), - pytest.param("vars:", "vars", [], {}, id="single_word"), - pytest.param("hello: a=1", "hello", [], {"a": "1"}, id="string_module_and_arg"), - pytest.param("action: hello a=1", "hello", [], {"a": "1"}, id="strips_action"), + pytest.param("", [], {}, id="a"), + pytest.param("a=1", [], {"a": "1"}, id="b"), + pytest.param("hello a=1", ["hello"], {"a": "1"}, id="c"), pytest.param( - "action: whatever bobbins x=y z=x c=3", - "whatever", - ["bobbins", "x=y", "z=x", "c=3"], - {}, + "whatever bobbins x=y z=x c=3", + ["whatever", "bobbins"], + {"x": "y", "z": "x", "c": "3"}, id="more_than_one_arg", ), pytest.param( - "action: command chdir=wxy creates=zyx tar xzf zyx.tgz", - "command", - ["tar", "xzf", "zyx.tgz"], + "command chdir=wxy creates=zyx tar xzf zyx.tgz", + ["command", "tar", "xzf", "zyx.tgz"], {"chdir": "wxy", "creates": "zyx"}, id="command_with_args", ), + pytest.param( + "{{ varset }}.yml", + ["{{ varset }}.yml"], + {}, + id="x", + ), ), ) def test_tokenize( string: str, - expected_cmd: str, expected_args: Sequence[str], expected_kwargs: dict[str, Any], ) -> None: """Test that tokenize works for different input types.""" - (cmd, args, kwargs) = utils.tokenize(string) - assert cmd == expected_cmd + (args, kwargs) = utils.tokenize(string) assert args == expected_args assert kwargs == expected_kwargs