summaryrefslogtreecommitdiffstats
path: root/lib/ansible/parsing/dataloader.py
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-05 16:16:49 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-05 16:16:49 +0000
commit48e387c5c12026a567eb7b293a3a590241c0cecb (patch)
tree80f2573be2d7d534b8ac4d2a852fe43f7ac35324 /lib/ansible/parsing/dataloader.py
parentReleasing progress-linux version 2.16.6-1~progress7.99u1. (diff)
downloadansible-core-48e387c5c12026a567eb7b293a3a590241c0cecb.tar.xz
ansible-core-48e387c5c12026a567eb7b293a3a590241c0cecb.zip
Merging upstream version 2.17.0.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'lib/ansible/parsing/dataloader.py')
-rw-r--r--lib/ansible/parsing/dataloader.py35
1 files changed, 23 insertions, 12 deletions
diff --git a/lib/ansible/parsing/dataloader.py b/lib/ansible/parsing/dataloader.py
index 13a57e4..17fc534 100644
--- a/lib/ansible/parsing/dataloader.py
+++ b/lib/ansible/parsing/dataloader.py
@@ -2,9 +2,7 @@
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import copy
import os
@@ -79,30 +77,43 @@ class DataLoader:
'''Backwards compat for now'''
return from_yaml(data, file_name, show_content, self._vault.secrets, json_only=json_only)
- def load_from_file(self, file_name: str, cache: bool = True, unsafe: bool = False, json_only: bool = False) -> t.Any:
- ''' Loads data from a file, which can contain either JSON or YAML. '''
+ def load_from_file(self, file_name: str, cache: str = 'all', unsafe: bool = False, json_only: bool = False) -> t.Any:
+ '''
+ Loads data from a file, which can contain either JSON or YAML.
+
+ :param file_name: The name of the file to load data from.
+ :param cache: Options for caching: none|all|vaulted
+ :param unsafe: If True, returns the parsed data as-is without deep copying.
+ :param json_only: If True, only loads JSON data from the file.
+ :return: The loaded data, optionally deep-copied for safety.
+ '''
+ # Resolve the file name
file_name = self.path_dwim(file_name)
+
+ # Log the file being loaded
display.debug("Loading data from %s" % file_name)
- # if the file has already been read in and cached, we'll
- # return those results to avoid more file/vault operations
- if cache and file_name in self._FILE_CACHE:
+ # Check if the file has been cached and use the cached data if available
+ if cache != 'none' and file_name in self._FILE_CACHE:
parsed_data = self._FILE_CACHE[file_name]
else:
- # read the file contents and load the data structure from them
+ # Read the file contents and load the data structure from them
(b_file_data, show_content) = self._get_file_contents(file_name)
file_data = to_text(b_file_data, errors='surrogate_or_strict')
parsed_data = self.load(data=file_data, file_name=file_name, show_content=show_content, json_only=json_only)
- # cache the file contents for next time
- self._FILE_CACHE[file_name] = parsed_data
+ # Cache the file contents for next time based on the cache option
+ if cache == 'all':
+ self._FILE_CACHE[file_name] = parsed_data
+ elif cache == 'vaulted' and not show_content:
+ self._FILE_CACHE[file_name] = parsed_data
+ # Return the parsed data, optionally deep-copied for safety
if unsafe:
return parsed_data
else:
- # return a deep copy here, so the cache is not affected
return copy.deepcopy(parsed_data)
def path_exists(self, path: str) -> bool: