Initial project commit
This commit is contained in:
241
collections/ansible_collections/community/general/plugins/cache/memcached.py
vendored
Normal file
241
collections/ansible_collections/community/general/plugins/cache/memcached.py
vendored
Normal file
@ -0,0 +1,241 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2014, Brian Coca, Josh Drake, et al
|
||||
# Copyright (c) 2017 Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = '''
|
||||
author: Unknown (!UNKNOWN)
|
||||
name: memcached
|
||||
short_description: Use memcached DB for cache
|
||||
description:
|
||||
- This cache uses JSON formatted, per host records saved in memcached.
|
||||
requirements:
|
||||
- memcache (python lib)
|
||||
options:
|
||||
_uri:
|
||||
description:
|
||||
- List of connection information for the memcached DBs
|
||||
default: ['127.0.0.1:11211']
|
||||
type: list
|
||||
elements: string
|
||||
env:
|
||||
- name: ANSIBLE_CACHE_PLUGIN_CONNECTION
|
||||
ini:
|
||||
- key: fact_caching_connection
|
||||
section: defaults
|
||||
_prefix:
|
||||
description: User defined prefix to use when creating the DB entries
|
||||
default: ansible_facts
|
||||
env:
|
||||
- name: ANSIBLE_CACHE_PLUGIN_PREFIX
|
||||
ini:
|
||||
- key: fact_caching_prefix
|
||||
section: defaults
|
||||
_timeout:
|
||||
default: 86400
|
||||
description: Expiration timeout in seconds for the cache plugin data. Set to 0 to never expire
|
||||
env:
|
||||
- name: ANSIBLE_CACHE_PLUGIN_TIMEOUT
|
||||
ini:
|
||||
- key: fact_caching_timeout
|
||||
section: defaults
|
||||
type: integer
|
||||
'''
|
||||
|
||||
import collections
|
||||
import os
|
||||
import time
|
||||
from multiprocessing import Lock
|
||||
from itertools import chain
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.module_utils.common._collections_compat import MutableSet
|
||||
from ansible.plugins.cache import BaseCacheModule
|
||||
from ansible.utils.display import Display
|
||||
|
||||
try:
|
||||
import memcache
|
||||
HAS_MEMCACHE = True
|
||||
except ImportError:
|
||||
HAS_MEMCACHE = False
|
||||
|
||||
display = Display()
|
||||
|
||||
|
||||
class ProxyClientPool(object):
|
||||
"""
|
||||
Memcached connection pooling for thread/fork safety. Inspired by py-redis
|
||||
connection pool.
|
||||
|
||||
Available connections are maintained in a deque and released in a FIFO manner.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.max_connections = kwargs.pop('max_connections', 1024)
|
||||
self.connection_args = args
|
||||
self.connection_kwargs = kwargs
|
||||
self.reset()
|
||||
|
||||
def reset(self):
|
||||
self.pid = os.getpid()
|
||||
self._num_connections = 0
|
||||
self._available_connections = collections.deque(maxlen=self.max_connections)
|
||||
self._locked_connections = set()
|
||||
self._lock = Lock()
|
||||
|
||||
def _check_safe(self):
|
||||
if self.pid != os.getpid():
|
||||
with self._lock:
|
||||
if self.pid == os.getpid():
|
||||
# bail out - another thread already acquired the lock
|
||||
return
|
||||
self.disconnect_all()
|
||||
self.reset()
|
||||
|
||||
def get_connection(self):
|
||||
self._check_safe()
|
||||
try:
|
||||
connection = self._available_connections.popleft()
|
||||
except IndexError:
|
||||
connection = self.create_connection()
|
||||
self._locked_connections.add(connection)
|
||||
return connection
|
||||
|
||||
def create_connection(self):
|
||||
if self._num_connections >= self.max_connections:
|
||||
raise RuntimeError("Too many memcached connections")
|
||||
self._num_connections += 1
|
||||
return memcache.Client(*self.connection_args, **self.connection_kwargs)
|
||||
|
||||
def release_connection(self, connection):
|
||||
self._check_safe()
|
||||
self._locked_connections.remove(connection)
|
||||
self._available_connections.append(connection)
|
||||
|
||||
def disconnect_all(self):
|
||||
for conn in chain(self._available_connections, self._locked_connections):
|
||||
conn.disconnect_all()
|
||||
|
||||
def __getattr__(self, name):
|
||||
def wrapped(*args, **kwargs):
|
||||
return self._proxy_client(name, *args, **kwargs)
|
||||
return wrapped
|
||||
|
||||
def _proxy_client(self, name, *args, **kwargs):
|
||||
conn = self.get_connection()
|
||||
|
||||
try:
|
||||
return getattr(conn, name)(*args, **kwargs)
|
||||
finally:
|
||||
self.release_connection(conn)
|
||||
|
||||
|
||||
class CacheModuleKeys(MutableSet):
|
||||
"""
|
||||
A set subclass that keeps track of insertion time and persists
|
||||
the set in memcached.
|
||||
"""
|
||||
PREFIX = 'ansible_cache_keys'
|
||||
|
||||
def __init__(self, cache, *args, **kwargs):
|
||||
self._cache = cache
|
||||
self._keyset = dict(*args, **kwargs)
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self._keyset
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._keyset)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._keyset)
|
||||
|
||||
def add(self, value):
|
||||
self._keyset[value] = time.time()
|
||||
self._cache.set(self.PREFIX, self._keyset)
|
||||
|
||||
def discard(self, value):
|
||||
del self._keyset[value]
|
||||
self._cache.set(self.PREFIX, self._keyset)
|
||||
|
||||
def remove_by_timerange(self, s_min, s_max):
|
||||
for k in list(self._keyset.keys()):
|
||||
t = self._keyset[k]
|
||||
if s_min < t < s_max:
|
||||
del self._keyset[k]
|
||||
self._cache.set(self.PREFIX, self._keyset)
|
||||
|
||||
|
||||
class CacheModule(BaseCacheModule):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
connection = ['127.0.0.1:11211']
|
||||
|
||||
super(CacheModule, self).__init__(*args, **kwargs)
|
||||
if self.get_option('_uri'):
|
||||
connection = self.get_option('_uri')
|
||||
self._timeout = self.get_option('_timeout')
|
||||
self._prefix = self.get_option('_prefix')
|
||||
|
||||
if not HAS_MEMCACHE:
|
||||
raise AnsibleError("python-memcached is required for the memcached fact cache")
|
||||
|
||||
self._cache = {}
|
||||
self._db = ProxyClientPool(connection, debug=0)
|
||||
self._keys = CacheModuleKeys(self._db, self._db.get(CacheModuleKeys.PREFIX) or [])
|
||||
|
||||
def _make_key(self, key):
|
||||
return "{0}{1}".format(self._prefix, key)
|
||||
|
||||
def _expire_keys(self):
|
||||
if self._timeout > 0:
|
||||
expiry_age = time.time() - self._timeout
|
||||
self._keys.remove_by_timerange(0, expiry_age)
|
||||
|
||||
def get(self, key):
|
||||
if key not in self._cache:
|
||||
value = self._db.get(self._make_key(key))
|
||||
# guard against the key not being removed from the keyset;
|
||||
# this could happen in cases where the timeout value is changed
|
||||
# between invocations
|
||||
if value is None:
|
||||
self.delete(key)
|
||||
raise KeyError
|
||||
self._cache[key] = value
|
||||
|
||||
return self._cache.get(key)
|
||||
|
||||
def set(self, key, value):
|
||||
self._db.set(self._make_key(key), value, time=self._timeout, min_compress_len=1)
|
||||
self._cache[key] = value
|
||||
self._keys.add(key)
|
||||
|
||||
def keys(self):
|
||||
self._expire_keys()
|
||||
return list(iter(self._keys))
|
||||
|
||||
def contains(self, key):
|
||||
self._expire_keys()
|
||||
return key in self._keys
|
||||
|
||||
def delete(self, key):
|
||||
del self._cache[key]
|
||||
self._db.delete(self._make_key(key))
|
||||
self._keys.discard(key)
|
||||
|
||||
def flush(self):
|
||||
for key in self.keys():
|
||||
self.delete(key)
|
||||
|
||||
def copy(self):
|
||||
return self._keys.copy()
|
||||
|
||||
def __getstate__(self):
|
||||
return dict()
|
||||
|
||||
def __setstate__(self, data):
|
||||
self.__init__()
|
69
collections/ansible_collections/community/general/plugins/cache/pickle.py
vendored
Normal file
69
collections/ansible_collections/community/general/plugins/cache/pickle.py
vendored
Normal file
@ -0,0 +1,69 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2017, Brian Coca
|
||||
# Copyright (c) 2017 Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = '''
|
||||
name: pickle
|
||||
short_description: Pickle formatted files.
|
||||
description:
|
||||
- This cache uses Python's pickle serialization format, in per host files, saved to the filesystem.
|
||||
author: Brian Coca (@bcoca)
|
||||
options:
|
||||
_uri:
|
||||
required: true
|
||||
description:
|
||||
- Path in which the cache plugin will save the files
|
||||
env:
|
||||
- name: ANSIBLE_CACHE_PLUGIN_CONNECTION
|
||||
ini:
|
||||
- key: fact_caching_connection
|
||||
section: defaults
|
||||
_prefix:
|
||||
description: User defined prefix to use when creating the files
|
||||
env:
|
||||
- name: ANSIBLE_CACHE_PLUGIN_PREFIX
|
||||
ini:
|
||||
- key: fact_caching_prefix
|
||||
section: defaults
|
||||
_timeout:
|
||||
default: 86400
|
||||
description: Expiration timeout in seconds for the cache plugin data. Set to 0 to never expire
|
||||
env:
|
||||
- name: ANSIBLE_CACHE_PLUGIN_TIMEOUT
|
||||
ini:
|
||||
- key: fact_caching_timeout
|
||||
section: defaults
|
||||
'''
|
||||
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
|
||||
from ansible.module_utils.six import PY3
|
||||
from ansible.plugins.cache import BaseFileCacheModule
|
||||
|
||||
|
||||
class CacheModule(BaseFileCacheModule):
|
||||
"""
|
||||
A caching module backed by pickle files.
|
||||
"""
|
||||
|
||||
def _load(self, filepath):
|
||||
# Pickle is a binary format
|
||||
with open(filepath, 'rb') as f:
|
||||
if PY3:
|
||||
return pickle.load(f, encoding='bytes')
|
||||
else:
|
||||
return pickle.load(f)
|
||||
|
||||
def _dump(self, value, filepath):
|
||||
with open(filepath, 'wb') as f:
|
||||
# Use pickle protocol 2 which is compatible with Python 2.3+.
|
||||
pickle.dump(value, f, protocol=2)
|
232
collections/ansible_collections/community/general/plugins/cache/redis.py
vendored
Normal file
232
collections/ansible_collections/community/general/plugins/cache/redis.py
vendored
Normal file
@ -0,0 +1,232 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2014, Brian Coca, Josh Drake, et al
|
||||
# Copyright (c) 2017 Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = '''
|
||||
author: Unknown (!UNKNOWN)
|
||||
name: redis
|
||||
short_description: Use Redis DB for cache
|
||||
description:
|
||||
- This cache uses JSON formatted, per host records saved in Redis.
|
||||
requirements:
|
||||
- redis>=2.4.5 (python lib)
|
||||
options:
|
||||
_uri:
|
||||
description:
|
||||
- A colon separated string of connection information for Redis.
|
||||
- The format is V(host:port:db:password), for example V(localhost:6379:0:changeme).
|
||||
- To use encryption in transit, prefix the connection with V(tls://), as in V(tls://localhost:6379:0:changeme).
|
||||
- To use redis sentinel, use separator V(;), for example V(localhost:26379;localhost:26379;0:changeme). Requires redis>=2.9.0.
|
||||
required: true
|
||||
env:
|
||||
- name: ANSIBLE_CACHE_PLUGIN_CONNECTION
|
||||
ini:
|
||||
- key: fact_caching_connection
|
||||
section: defaults
|
||||
_prefix:
|
||||
description: User defined prefix to use when creating the DB entries
|
||||
default: ansible_facts
|
||||
env:
|
||||
- name: ANSIBLE_CACHE_PLUGIN_PREFIX
|
||||
ini:
|
||||
- key: fact_caching_prefix
|
||||
section: defaults
|
||||
_keyset_name:
|
||||
description: User defined name for cache keyset name.
|
||||
default: ansible_cache_keys
|
||||
env:
|
||||
- name: ANSIBLE_CACHE_REDIS_KEYSET_NAME
|
||||
ini:
|
||||
- key: fact_caching_redis_keyset_name
|
||||
section: defaults
|
||||
version_added: 1.3.0
|
||||
_sentinel_service_name:
|
||||
description: The redis sentinel service name (or referenced as cluster name).
|
||||
env:
|
||||
- name: ANSIBLE_CACHE_REDIS_SENTINEL
|
||||
ini:
|
||||
- key: fact_caching_redis_sentinel
|
||||
section: defaults
|
||||
version_added: 1.3.0
|
||||
_timeout:
|
||||
default: 86400
|
||||
description: Expiration timeout in seconds for the cache plugin data. Set to 0 to never expire
|
||||
env:
|
||||
- name: ANSIBLE_CACHE_PLUGIN_TIMEOUT
|
||||
ini:
|
||||
- key: fact_caching_timeout
|
||||
section: defaults
|
||||
type: integer
|
||||
'''
|
||||
|
||||
import re
|
||||
import time
|
||||
import json
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.parsing.ajson import AnsibleJSONEncoder, AnsibleJSONDecoder
|
||||
from ansible.plugins.cache import BaseCacheModule
|
||||
from ansible.utils.display import Display
|
||||
|
||||
try:
|
||||
from redis import StrictRedis, VERSION
|
||||
HAS_REDIS = True
|
||||
except ImportError:
|
||||
HAS_REDIS = False
|
||||
|
||||
display = Display()
|
||||
|
||||
|
||||
class CacheModule(BaseCacheModule):
|
||||
"""
|
||||
A caching module backed by redis.
|
||||
|
||||
Keys are maintained in a zset with their score being the timestamp
|
||||
when they are inserted. This allows for the usage of 'zremrangebyscore'
|
||||
to expire keys. This mechanism is used or a pattern matched 'scan' for
|
||||
performance.
|
||||
"""
|
||||
_sentinel_service_name = None
|
||||
re_url_conn = re.compile(r'^([^:]+|\[[^]]+\]):(\d+):(\d+)(?::(.*))?$')
|
||||
re_sent_conn = re.compile(r'^(.*):(\d+)$')
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
uri = ''
|
||||
|
||||
super(CacheModule, self).__init__(*args, **kwargs)
|
||||
if self.get_option('_uri'):
|
||||
uri = self.get_option('_uri')
|
||||
self._timeout = float(self.get_option('_timeout'))
|
||||
self._prefix = self.get_option('_prefix')
|
||||
self._keys_set = self.get_option('_keyset_name')
|
||||
self._sentinel_service_name = self.get_option('_sentinel_service_name')
|
||||
|
||||
if not HAS_REDIS:
|
||||
raise AnsibleError("The 'redis' python module (version 2.4.5 or newer) is required for the redis fact cache, 'pip install redis'")
|
||||
|
||||
self._cache = {}
|
||||
kw = {}
|
||||
|
||||
# tls connection
|
||||
tlsprefix = 'tls://'
|
||||
if uri.startswith(tlsprefix):
|
||||
kw['ssl'] = True
|
||||
uri = uri[len(tlsprefix):]
|
||||
|
||||
# redis sentinel connection
|
||||
if self._sentinel_service_name:
|
||||
self._db = self._get_sentinel_connection(uri, kw)
|
||||
# normal connection
|
||||
else:
|
||||
connection = self._parse_connection(self.re_url_conn, uri)
|
||||
self._db = StrictRedis(*connection, **kw)
|
||||
|
||||
display.vv('Redis connection: %s' % self._db)
|
||||
|
||||
@staticmethod
|
||||
def _parse_connection(re_patt, uri):
|
||||
match = re_patt.match(uri)
|
||||
if not match:
|
||||
raise AnsibleError("Unable to parse connection string")
|
||||
return match.groups()
|
||||
|
||||
def _get_sentinel_connection(self, uri, kw):
|
||||
"""
|
||||
get sentinel connection details from _uri
|
||||
"""
|
||||
try:
|
||||
from redis.sentinel import Sentinel
|
||||
except ImportError:
|
||||
raise AnsibleError("The 'redis' python module (version 2.9.0 or newer) is required to use redis sentinel.")
|
||||
|
||||
if ';' not in uri:
|
||||
raise AnsibleError('_uri does not have sentinel syntax.')
|
||||
|
||||
# format: "localhost:26379;localhost2:26379;0:changeme"
|
||||
connections = uri.split(';')
|
||||
connection_args = connections.pop(-1)
|
||||
if len(connection_args) > 0: # handle if no db nr is given
|
||||
connection_args = connection_args.split(':')
|
||||
kw['db'] = connection_args.pop(0)
|
||||
try:
|
||||
kw['password'] = connection_args.pop(0)
|
||||
except IndexError:
|
||||
pass # password is optional
|
||||
|
||||
sentinels = [self._parse_connection(self.re_sent_conn, shost) for shost in connections]
|
||||
display.vv('\nUsing redis sentinels: %s' % sentinels)
|
||||
scon = Sentinel(sentinels, **kw)
|
||||
try:
|
||||
return scon.master_for(self._sentinel_service_name, socket_timeout=0.2)
|
||||
except Exception as exc:
|
||||
raise AnsibleError('Could not connect to redis sentinel: %s' % to_native(exc))
|
||||
|
||||
def _make_key(self, key):
|
||||
return self._prefix + key
|
||||
|
||||
def get(self, key):
|
||||
|
||||
if key not in self._cache:
|
||||
value = self._db.get(self._make_key(key))
|
||||
# guard against the key not being removed from the zset;
|
||||
# this could happen in cases where the timeout value is changed
|
||||
# between invocations
|
||||
if value is None:
|
||||
self.delete(key)
|
||||
raise KeyError
|
||||
self._cache[key] = json.loads(value, cls=AnsibleJSONDecoder)
|
||||
|
||||
return self._cache.get(key)
|
||||
|
||||
def set(self, key, value):
|
||||
|
||||
value2 = json.dumps(value, cls=AnsibleJSONEncoder, sort_keys=True, indent=4)
|
||||
if self._timeout > 0: # a timeout of 0 is handled as meaning 'never expire'
|
||||
self._db.setex(self._make_key(key), int(self._timeout), value2)
|
||||
else:
|
||||
self._db.set(self._make_key(key), value2)
|
||||
|
||||
if VERSION[0] == 2:
|
||||
self._db.zadd(self._keys_set, time.time(), key)
|
||||
else:
|
||||
self._db.zadd(self._keys_set, {key: time.time()})
|
||||
self._cache[key] = value
|
||||
|
||||
def _expire_keys(self):
|
||||
if self._timeout > 0:
|
||||
expiry_age = time.time() - self._timeout
|
||||
self._db.zremrangebyscore(self._keys_set, 0, expiry_age)
|
||||
|
||||
def keys(self):
|
||||
self._expire_keys()
|
||||
return self._db.zrange(self._keys_set, 0, -1)
|
||||
|
||||
def contains(self, key):
|
||||
self._expire_keys()
|
||||
return (self._db.zrank(self._keys_set, key) is not None)
|
||||
|
||||
def delete(self, key):
|
||||
if key in self._cache:
|
||||
del self._cache[key]
|
||||
self._db.delete(self._make_key(key))
|
||||
self._db.zrem(self._keys_set, key)
|
||||
|
||||
def flush(self):
|
||||
for key in list(self.keys()):
|
||||
self.delete(key)
|
||||
|
||||
def copy(self):
|
||||
# TODO: there is probably a better way to do this in redis
|
||||
ret = dict([(k, self.get(k)) for k in self.keys()])
|
||||
return ret
|
||||
|
||||
def __getstate__(self):
|
||||
return dict()
|
||||
|
||||
def __setstate__(self, data):
|
||||
self.__init__()
|
66
collections/ansible_collections/community/general/plugins/cache/yaml.py
vendored
Normal file
66
collections/ansible_collections/community/general/plugins/cache/yaml.py
vendored
Normal file
@ -0,0 +1,66 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2017, Brian Coca
|
||||
# Copyright (c) 2017 Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = '''
|
||||
name: yaml
|
||||
short_description: YAML formatted files.
|
||||
description:
|
||||
- This cache uses YAML formatted, per host, files saved to the filesystem.
|
||||
author: Brian Coca (@bcoca)
|
||||
options:
|
||||
_uri:
|
||||
required: true
|
||||
description:
|
||||
- Path in which the cache plugin will save the files
|
||||
env:
|
||||
- name: ANSIBLE_CACHE_PLUGIN_CONNECTION
|
||||
ini:
|
||||
- key: fact_caching_connection
|
||||
section: defaults
|
||||
_prefix:
|
||||
description: User defined prefix to use when creating the files
|
||||
env:
|
||||
- name: ANSIBLE_CACHE_PLUGIN_PREFIX
|
||||
ini:
|
||||
- key: fact_caching_prefix
|
||||
section: defaults
|
||||
_timeout:
|
||||
default: 86400
|
||||
description: Expiration timeout in seconds for the cache plugin data. Set to 0 to never expire
|
||||
env:
|
||||
- name: ANSIBLE_CACHE_PLUGIN_TIMEOUT
|
||||
ini:
|
||||
- key: fact_caching_timeout
|
||||
section: defaults
|
||||
type: integer
|
||||
'''
|
||||
|
||||
|
||||
import codecs
|
||||
|
||||
import yaml
|
||||
|
||||
from ansible.parsing.yaml.loader import AnsibleLoader
|
||||
from ansible.parsing.yaml.dumper import AnsibleDumper
|
||||
from ansible.plugins.cache import BaseFileCacheModule
|
||||
|
||||
|
||||
class CacheModule(BaseFileCacheModule):
|
||||
"""
|
||||
A caching module backed by yaml files.
|
||||
"""
|
||||
|
||||
def _load(self, filepath):
|
||||
with codecs.open(filepath, 'r', encoding='utf-8') as f:
|
||||
return AnsibleLoader(f).get_single_data()
|
||||
|
||||
def _dump(self, value, filepath):
|
||||
with codecs.open(filepath, 'w', encoding='utf-8') as f:
|
||||
yaml.dump(value, f, Dumper=AnsibleDumper, default_flow_style=False)
|
Reference in New Issue
Block a user