initial project commit

This commit is contained in:
2024-02-05 16:28:05 -05:00
commit 5861055c15
3299 changed files with 458518 additions and 0 deletions

View File

@ -0,0 +1,9 @@
<!--
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
-->
## Azure Pipelines Configuration
Please see the [Documentation](https://github.com/ansible/community/wiki/Testing:-Azure-Pipelines) for more information.

View File

@ -0,0 +1,421 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
trigger:
batch: true
branches:
include:
- main
- stable-*
pr:
autoCancel: true
branches:
include:
- main
- stable-*
schedules:
- cron: 0 8 * * *
displayName: Nightly (main)
always: true
branches:
include:
- main
- cron: 0 10 * * *
displayName: Nightly (active stable branches)
always: true
branches:
include:
- stable-8
- stable-7
- cron: 0 11 * * 0
displayName: Weekly (old stable branches)
always: true
branches:
include:
- stable-6
variables:
- name: checkoutPath
value: ansible_collections/community/general
- name: coverageBranches
value: main
- name: pipelinesCoverage
value: coverage
- name: entryPoint
value: tests/utils/shippable/shippable.sh
- name: fetchDepth
value: 0
resources:
containers:
- container: default
image: quay.io/ansible/azure-pipelines-test-container:4.0.1
pool: Standard
stages:
### Sanity
- stage: Sanity_devel
displayName: Sanity devel
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Test {0}
testFormat: devel/sanity/{0}
targets:
- test: 1
- test: 2
- test: 3
- test: 4
- test: extra
- stage: Sanity_2_16
displayName: Sanity 2.16
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Test {0}
testFormat: 2.16/sanity/{0}
targets:
- test: 1
- test: 2
- test: 3
- test: 4
- stage: Sanity_2_15
displayName: Sanity 2.15
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Test {0}
testFormat: 2.15/sanity/{0}
targets:
- test: 1
- test: 2
- test: 3
- test: 4
- stage: Sanity_2_14
displayName: Sanity 2.14
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Test {0}
testFormat: 2.14/sanity/{0}
targets:
- test: 1
- test: 2
- test: 3
- test: 4
### Units
- stage: Units_devel
displayName: Units devel
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Python {0}
testFormat: devel/units/{0}/1
targets:
- test: 3.7
- test: 3.8
- test: 3.9
- test: '3.10'
- test: '3.11'
- test: '3.12'
- stage: Units_2_16
displayName: Units 2.16
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Python {0}
testFormat: 2.16/units/{0}/1
targets:
- test: 2.7
- test: 3.6
- test: "3.11"
- stage: Units_2_15
displayName: Units 2.15
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Python {0}
testFormat: 2.15/units/{0}/1
targets:
- test: 3.5
- test: "3.10"
- stage: Units_2_14
displayName: Units 2.14
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Python {0}
testFormat: 2.14/units/{0}/1
targets:
- test: 3.9
## Remote
- stage: Remote_devel_extra_vms
displayName: Remote devel extra VMs
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: devel/{0}
targets:
- name: Alpine 3.18
test: alpine/3.18
# - name: Fedora 39
# test: fedora/39
- name: Ubuntu 22.04
test: ubuntu/22.04
groups:
- vm
- stage: Remote_devel
displayName: Remote devel
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: devel/{0}
targets:
- name: macOS 13.2
test: macos/13.2
- name: RHEL 9.3
test: rhel/9.3
- name: FreeBSD 13.2
test: freebsd/13.2
groups:
- 1
- 2
- 3
- stage: Remote_2_16
displayName: Remote 2.16
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: 2.16/{0}
targets:
#- name: macOS 13.2
# test: macos/13.2
- name: RHEL 9.2
test: rhel/9.2
- name: RHEL 8.8
test: rhel/8.8
#- name: FreeBSD 13.2
# test: freebsd/13.2
groups:
- 1
- 2
- 3
- stage: Remote_2_15
displayName: Remote 2.15
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: 2.15/{0}
targets:
- name: RHEL 9.1
test: rhel/9.1
- name: RHEL 8.7
test: rhel/8.7
- name: RHEL 7.9
test: rhel/7.9
# - name: FreeBSD 13.1
# test: freebsd/13.1
# - name: FreeBSD 12.4
# test: freebsd/12.4
groups:
- 1
- 2
- 3
- stage: Remote_2_14
displayName: Remote 2.14
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: 2.14/{0}
targets:
#- name: macOS 12.0
# test: macos/12.0
- name: RHEL 9.0
test: rhel/9.0
#- name: FreeBSD 12.4
# test: freebsd/12.4
groups:
- 1
- 2
- 3
### Docker
- stage: Docker_devel
displayName: Docker devel
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: devel/linux/{0}
targets:
- name: Fedora 39
test: fedora39
- name: Ubuntu 20.04
test: ubuntu2004
- name: Ubuntu 22.04
test: ubuntu2204
- name: Alpine 3
test: alpine3
groups:
- 1
- 2
- 3
- stage: Docker_2_16
displayName: Docker 2.16
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: 2.16/linux/{0}
targets:
- name: Fedora 38
test: fedora38
- name: openSUSE 15
test: opensuse15
groups:
- 1
- 2
- 3
- stage: Docker_2_15
displayName: Docker 2.15
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: 2.15/linux/{0}
targets:
- name: Fedora 37
test: fedora37
- name: CentOS 7
test: centos7
groups:
- 1
- 2
- 3
- stage: Docker_2_14
displayName: Docker 2.14
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: 2.14/linux/{0}
targets:
- name: Alpine 3
test: alpine3
groups:
- 1
- 2
- 3
### Community Docker
- stage: Docker_community_devel
displayName: Docker (community images) devel
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: devel/linux-community/{0}
targets:
- name: Debian Bullseye
test: debian-bullseye/3.9
- name: Debian Bookworm
test: debian-bookworm/3.11
- name: ArchLinux
test: archlinux/3.11
groups:
- 1
- 2
- 3
### Generic
- stage: Generic_devel
displayName: Generic devel
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Python {0}
testFormat: devel/generic/{0}/1
targets:
- test: '3.7'
- test: '3.12'
- stage: Generic_2_16
displayName: Generic 2.16
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Python {0}
testFormat: 2.16/generic/{0}/1
targets:
- test: '2.7'
- test: '3.6'
- test: '3.11'
- stage: Generic_2_15
displayName: Generic 2.15
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Python {0}
testFormat: 2.15/generic/{0}/1
targets:
- test: '3.9'
- stage: Generic_2_14
displayName: Generic 2.14
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Python {0}
testFormat: 2.14/generic/{0}/1
targets:
- test: '3.10'
- stage: Summary
condition: succeededOrFailed()
dependsOn:
- Sanity_devel
- Sanity_2_16
- Sanity_2_15
- Sanity_2_14
- Units_devel
- Units_2_16
- Units_2_15
- Units_2_14
- Remote_devel_extra_vms
- Remote_devel
- Remote_2_16
- Remote_2_15
- Remote_2_14
- Docker_devel
- Docker_2_16
- Docker_2_15
- Docker_2_14
- Docker_community_devel
# Right now all generic tests are disabled. Uncomment when at least one of them is re-enabled.
# - Generic_devel
# - Generic_2_16
# - Generic_2_15
# - Generic_2_14
jobs:
- template: templates/coverage.yml

View File

@ -0,0 +1,24 @@
#!/usr/bin/env bash
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
# Aggregate code coverage results for later processing.
set -o pipefail -eu
agent_temp_directory="$1"
PATH="${PWD}/bin:${PATH}"
mkdir "${agent_temp_directory}/coverage/"
options=(--venv --venv-system-site-packages --color -v)
ansible-test coverage combine --group-by command --export "${agent_temp_directory}/coverage/" "${options[@]}"
if ansible-test coverage analyze targets generate --help >/dev/null 2>&1; then
# Only analyze coverage if the installed version of ansible-test supports it.
# Doing so allows this script to work unmodified for multiple Ansible versions.
ansible-test coverage analyze targets generate "${agent_temp_directory}/coverage/coverage-analyze-targets.json" "${options[@]}"
fi

View File

@ -0,0 +1,64 @@
#!/usr/bin/env python
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
"""
Combine coverage data from multiple jobs, keeping the data only from the most recent attempt from each job.
Coverage artifacts must be named using the format: "Coverage $(System.JobAttempt) {StableUniqueNameForEachJob}"
The recommended coverage artifact name format is: Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)
Keep in mind that Azure Pipelines does not enforce unique job display names (only names).
It is up to pipeline authors to avoid name collisions when deviating from the recommended format.
"""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import re
import shutil
import sys
def main():
"""Main program entry point."""
source_directory = sys.argv[1]
if '/ansible_collections/' in os.getcwd():
output_path = "tests/output"
else:
output_path = "test/results"
destination_directory = os.path.join(output_path, 'coverage')
if not os.path.exists(destination_directory):
os.makedirs(destination_directory)
jobs = {}
count = 0
for name in os.listdir(source_directory):
match = re.search('^Coverage (?P<attempt>[0-9]+) (?P<label>.+)$', name)
label = match.group('label')
attempt = int(match.group('attempt'))
jobs[label] = max(attempt, jobs.get(label, 0))
for label, attempt in jobs.items():
name = 'Coverage {attempt} {label}'.format(label=label, attempt=attempt)
source = os.path.join(source_directory, name)
source_files = os.listdir(source)
for source_file in source_files:
source_path = os.path.join(source, source_file)
destination_path = os.path.join(destination_directory, source_file + '.' + label)
print('"%s" -> "%s"' % (source_path, destination_path))
shutil.copyfile(source_path, destination_path)
count += 1
print('Coverage file count: %d' % count)
print('##vso[task.setVariable variable=coverageFileCount]%d' % count)
print('##vso[task.setVariable variable=outputPath]%s' % output_path)
if __name__ == '__main__':
main()

View File

@ -0,0 +1,28 @@
#!/usr/bin/env bash
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
# Check the test results and set variables for use in later steps.
set -o pipefail -eu
if [[ "$PWD" =~ /ansible_collections/ ]]; then
output_path="tests/output"
else
output_path="test/results"
fi
echo "##vso[task.setVariable variable=outputPath]${output_path}"
if compgen -G "${output_path}"'/junit/*.xml' > /dev/null; then
echo "##vso[task.setVariable variable=haveTestResults]true"
fi
if compgen -G "${output_path}"'/bot/ansible-test-*' > /dev/null; then
echo "##vso[task.setVariable variable=haveBotResults]true"
fi
if compgen -G "${output_path}"'/coverage/*' > /dev/null; then
echo "##vso[task.setVariable variable=haveCoverageData]true"
fi

View File

@ -0,0 +1,105 @@
#!/usr/bin/env python
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
"""
Upload code coverage reports to codecov.io.
Multiple coverage files from multiple languages are accepted and aggregated after upload.
Python coverage, as well as PowerShell and Python stubs can all be uploaded.
"""
import argparse
import dataclasses
import pathlib
import shutil
import subprocess
import tempfile
import typing as t
import urllib.request
@dataclasses.dataclass(frozen=True)
class CoverageFile:
name: str
path: pathlib.Path
flags: t.List[str]
@dataclasses.dataclass(frozen=True)
class Args:
dry_run: bool
path: pathlib.Path
def parse_args() -> Args:
parser = argparse.ArgumentParser()
parser.add_argument('-n', '--dry-run', action='store_true')
parser.add_argument('path', type=pathlib.Path)
args = parser.parse_args()
# Store arguments in a typed dataclass
fields = dataclasses.fields(Args)
kwargs = {field.name: getattr(args, field.name) for field in fields}
return Args(**kwargs)
def process_files(directory: pathlib.Path) -> t.Tuple[CoverageFile, ...]:
processed = []
for file in directory.joinpath('reports').glob('coverage*.xml'):
name = file.stem.replace('coverage=', '')
# Get flags from name
flags = name.replace('-powershell', '').split('=') # Drop '-powershell' suffix
flags = [flag if not flag.startswith('stub') else flag.split('-')[0] for flag in flags] # Remove "-01" from stub files
processed.append(CoverageFile(name, file, flags))
return tuple(processed)
def upload_files(codecov_bin: pathlib.Path, files: t.Tuple[CoverageFile, ...], dry_run: bool = False) -> None:
for file in files:
cmd = [
str(codecov_bin),
'--name', file.name,
'--file', str(file.path),
]
for flag in file.flags:
cmd.extend(['--flags', flag])
if dry_run:
print(f'DRY-RUN: Would run command: {cmd}')
continue
subprocess.run(cmd, check=True)
def download_file(url: str, dest: pathlib.Path, flags: int, dry_run: bool = False) -> None:
if dry_run:
print(f'DRY-RUN: Would download {url} to {dest} and set mode to {flags:o}')
return
with urllib.request.urlopen(url) as resp:
with dest.open('w+b') as f:
# Read data in chunks rather than all at once
shutil.copyfileobj(resp, f, 64 * 1024)
dest.chmod(flags)
def main():
args = parse_args()
url = 'https://ansible-ci-files.s3.amazonaws.com/codecov/linux/codecov'
with tempfile.TemporaryDirectory(prefix='codecov-') as tmpdir:
codecov_bin = pathlib.Path(tmpdir) / 'codecov'
download_file(url, codecov_bin, 0o755, args.dry_run)
files = process_files(args.path)
upload_files(codecov_bin, files, args.dry_run)
if __name__ == '__main__':
main()

View File

@ -0,0 +1,19 @@
#!/usr/bin/env bash
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
# Generate code coverage reports for uploading to Azure Pipelines and codecov.io.
set -o pipefail -eu
PATH="${PWD}/bin:${PATH}"
if ! ansible-test --help >/dev/null 2>&1; then
# Install the devel version of ansible-test for generating code coverage reports.
# This is only used by Ansible Collections, which are typically tested against multiple Ansible versions (in separate jobs).
# Since a version of ansible-test is required that can work the output from multiple older releases, the devel version is used.
pip install https://github.com/ansible/ansible/archive/devel.tar.gz --disable-pip-version-check
fi
ansible-test coverage xml --group-by command --stub --venv --venv-system-site-packages --color -v

View File

@ -0,0 +1,38 @@
#!/usr/bin/env bash
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
# Configure the test environment and run the tests.
set -o pipefail -eu
entry_point="$1"
test="$2"
read -r -a coverage_branches <<< "$3" # space separated list of branches to run code coverage on for scheduled builds
export COMMIT_MESSAGE
export COMPLETE
export COVERAGE
export IS_PULL_REQUEST
if [ "${SYSTEM_PULLREQUEST_TARGETBRANCH:-}" ]; then
IS_PULL_REQUEST=true
COMMIT_MESSAGE=$(git log --format=%B -n 1 HEAD^2)
else
IS_PULL_REQUEST=
COMMIT_MESSAGE=$(git log --format=%B -n 1 HEAD)
fi
COMPLETE=
COVERAGE=
if [ "${BUILD_REASON}" = "Schedule" ]; then
COMPLETE=yes
if printf '%s\n' "${coverage_branches[@]}" | grep -q "^${BUILD_SOURCEBRANCHNAME}$"; then
COVERAGE=yes
fi
fi
"${entry_point}" "${test}" 2>&1 | "$(dirname "$0")/time-command.py"

View File

@ -0,0 +1,29 @@
#!/usr/bin/env python
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
"""Prepends a relative timestamp to each input line from stdin and writes it to stdout."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import time
def main():
"""Main program entry point."""
start = time.time()
sys.stdin.reconfigure(errors='surrogateescape')
sys.stdout.reconfigure(errors='surrogateescape')
for line in sys.stdin:
seconds = time.time() - start
sys.stdout.write('%02d:%02d %s' % (seconds // 60, seconds % 60, line))
sys.stdout.flush()
if __name__ == '__main__':
main()

View File

@ -0,0 +1,44 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
# This template adds a job for processing code coverage data.
# It will upload results to Azure Pipelines and codecov.io.
# Use it from a job stage that completes after all other jobs have completed.
# This can be done by placing it in a separate summary stage that runs after the test stage(s) have completed.
jobs:
- job: Coverage
displayName: Code Coverage
container: default
workspace:
clean: all
steps:
- checkout: self
fetchDepth: $(fetchDepth)
path: $(checkoutPath)
- task: DownloadPipelineArtifact@2
displayName: Download Coverage Data
inputs:
path: coverage/
patterns: "Coverage */*=coverage.combined"
- bash: .azure-pipelines/scripts/combine-coverage.py coverage/
displayName: Combine Coverage Data
- bash: .azure-pipelines/scripts/report-coverage.sh
displayName: Generate Coverage Report
condition: gt(variables.coverageFileCount, 0)
- task: PublishCodeCoverageResults@1
inputs:
codeCoverageTool: Cobertura
# Azure Pipelines only accepts a single coverage data file.
# That means only Python or PowerShell coverage can be uploaded, but not both.
# Set the "pipelinesCoverage" variable to determine which type is uploaded.
# Use "coverage" for Python and "coverage-powershell" for PowerShell.
summaryFileLocation: "$(outputPath)/reports/$(pipelinesCoverage).xml"
displayName: Publish to Azure Pipelines
condition: gt(variables.coverageFileCount, 0)
- bash: .azure-pipelines/scripts/publish-codecov.py "$(outputPath)"
displayName: Publish to codecov.io
condition: gt(variables.coverageFileCount, 0)
continueOnError: true

View File

@ -0,0 +1,60 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
# This template uses the provided targets and optional groups to generate a matrix which is then passed to the test template.
# If this matrix template does not provide the required functionality, consider using the test template directly instead.
parameters:
# A required list of dictionaries, one per test target.
# Each item in the list must contain a "test" or "name" key.
# Both may be provided. If one is omitted, the other will be used.
- name: targets
type: object
# An optional list of values which will be used to multiply the targets list into a matrix.
# Values can be strings or numbers.
- name: groups
type: object
default: []
# An optional format string used to generate the job name.
# - {0} is the name of an item in the targets list.
- name: nameFormat
type: string
default: "{0}"
# An optional format string used to generate the test name.
# - {0} is the name of an item in the targets list.
- name: testFormat
type: string
default: "{0}"
# An optional format string used to add the group to the job name.
# {0} is the formatted name of an item in the targets list.
# {{1}} is the group -- be sure to include the double "{{" and "}}".
- name: nameGroupFormat
type: string
default: "{0} - {{1}}"
# An optional format string used to add the group to the test name.
# {0} is the formatted test of an item in the targets list.
# {{1}} is the group -- be sure to include the double "{{" and "}}".
- name: testGroupFormat
type: string
default: "{0}/{{1}}"
jobs:
- template: test.yml
parameters:
jobs:
- ${{ if eq(length(parameters.groups), 0) }}:
- ${{ each target in parameters.targets }}:
- name: ${{ format(parameters.nameFormat, coalesce(target.name, target.test)) }}
test: ${{ format(parameters.testFormat, coalesce(target.test, target.name)) }}
- ${{ if not(eq(length(parameters.groups), 0)) }}:
- ${{ each group in parameters.groups }}:
- ${{ each target in parameters.targets }}:
- name: ${{ format(format(parameters.nameGroupFormat, parameters.nameFormat), coalesce(target.name, target.test), group) }}
test: ${{ format(format(parameters.testGroupFormat, parameters.testFormat), coalesce(target.test, target.name), group) }}

View File

@ -0,0 +1,50 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
# This template uses the provided list of jobs to create test one or more test jobs.
# It can be used directly if needed, or through the matrix template.
parameters:
# A required list of dictionaries, one per test job.
# Each item in the list must contain a "job" and "name" key.
- name: jobs
type: object
jobs:
- ${{ each job in parameters.jobs }}:
- job: test_${{ replace(replace(replace(job.test, '/', '_'), '.', '_'), '-', '_') }}
displayName: ${{ job.name }}
container: default
workspace:
clean: all
steps:
- checkout: self
fetchDepth: $(fetchDepth)
path: $(checkoutPath)
- bash: .azure-pipelines/scripts/run-tests.sh "$(entryPoint)" "${{ job.test }}" "$(coverageBranches)"
displayName: Run Tests
- bash: .azure-pipelines/scripts/process-results.sh
condition: succeededOrFailed()
displayName: Process Results
- bash: .azure-pipelines/scripts/aggregate-coverage.sh "$(Agent.TempDirectory)"
condition: eq(variables.haveCoverageData, 'true')
displayName: Aggregate Coverage Data
- task: PublishTestResults@2
condition: eq(variables.haveTestResults, 'true')
inputs:
testResultsFiles: "$(outputPath)/junit/*.xml"
displayName: Publish Test Results
- task: PublishPipelineArtifact@1
condition: eq(variables.haveBotResults, 'true')
displayName: Publish Bot Results
inputs:
targetPath: "$(outputPath)/bot/"
artifactName: "Bot $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)"
- task: PublishPipelineArtifact@1
condition: eq(variables.haveCoverageData, 'true')
displayName: Publish Coverage Data
inputs:
targetPath: "$(Agent.TempDirectory)/coverage/"
artifactName: "Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)"

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,153 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
name: Bug report
description: Create a report to help us improve
body:
- type: markdown
attributes:
value: |
Verify first that your issue is not [already reported on GitHub][issue search].
Also test if the latest release and devel branch are affected too.
*Complete **all** sections as described, this form is processed automatically.*
[issue search]: https://github.com/ansible-collections/community.general/search?q=is%3Aissue&type=issues
- type: textarea
attributes:
label: Summary
description: Explain the problem briefly below.
placeholder: >-
When I try to do X with the collection from the main branch on GitHub, Y
breaks in a way Z under the env E. Here are all the details I know
about this problem...
validations:
required: true
- type: dropdown
attributes:
label: Issue Type
# FIXME: Once GitHub allows defining the default choice, update this
options:
- Bug Report
validations:
required: true
- type: textarea
attributes:
# For smaller collections we could use a multi-select and hardcode the list
# May generate this list via GitHub action and walking files under https://github.com/ansible-collections/community.general/tree/main/plugins
# Select from list, filter as you type (`mysql` would only show the 3 mysql components)
# OR freeform - doesn't seem to be supported in adaptivecards
label: Component Name
description: >-
Write the short name of the module, plugin, task or feature below,
*use your best guess if unsure*. Do not include `community.general.`!
placeholder: dnf, apt, yum, pip, user etc.
validations:
required: true
- type: textarea
attributes:
label: Ansible Version
description: >-
Paste verbatim output from `ansible --version` between
tripple backticks.
value: |
```console (paste below)
$ ansible --version
```
validations:
required: true
- type: textarea
attributes:
label: Community.general Version
description: >-
Paste verbatim output from "ansible-galaxy collection list community.general"
between tripple backticks.
value: |
```console (paste below)
$ ansible-galaxy collection list community.general
```
validations:
required: true
- type: textarea
attributes:
label: Configuration
description: >-
If this issue has an example piece of YAML that can help to reproduce this problem, please provide it.
This can be a piece of YAML from, e.g., an automation, script, scene or configuration.
Paste verbatim output from `ansible-config dump --only-changed` between quotes
value: |
```console (paste below)
$ ansible-config dump --only-changed
```
- type: textarea
attributes:
label: OS / Environment
description: >-
Provide all relevant information below, e.g. target OS versions,
network device firmware, etc.
placeholder: RHEL 8, CentOS Stream etc.
validations:
required: false
- type: textarea
attributes:
label: Steps to Reproduce
description: |
Describe exactly how to reproduce the problem, using a minimal test-case. It would *really* help us understand your problem if you could also passed any playbooks, configs and commands you used.
**HINT:** You can paste https://gist.github.com links for larger files.
value: |
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
```
validations:
required: true
- type: textarea
attributes:
label: Expected Results
description: >-
Describe what you expected to happen when running the steps above.
placeholder: >-
I expected X to happen because I assumed Y.
that it did not.
validations:
required: true
- type: textarea
attributes:
label: Actual Results
description: |
Describe what actually happened. If possible run with extra verbosity (`-vvvv`).
Paste verbatim command output between quotes.
value: |
```console (paste below)
```
- type: checkboxes
attributes:
label: Code of Conduct
description: |
Read the [Ansible Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_form--ansible-collections) first.
options:
- label: I agree to follow the Ansible Code of Conduct
required: true
...

View File

@ -0,0 +1,31 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
# Ref: https://help.github.com/en/github/building-a-strong-community/configuring-issue-templates-for-your-repository#configuring-the-template-chooser
blank_issues_enabled: false # default: true
contact_links:
- name: Security bug report
url: https://docs.ansible.com/ansible-core/devel/community/reporting_bugs_and_features.html?utm_medium=github&utm_source=issue_template_chooser_ansible_collections
about: |
Please learn how to report security vulnerabilities here.
For all security related bugs, email security@ansible.com
instead of using this issue tracker and you will receive
a prompt response.
For more information, see
https://docs.ansible.com/ansible/latest/community/reporting_bugs_and_features.html
- name: Ansible Code of Conduct
url: https://docs.ansible.com/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_template_chooser_ansible_collections
about: Be nice to other members of the community.
- name: Talks to the community
url: https://docs.ansible.com/ansible/latest/community/communication.html?utm_medium=github&utm_source=issue_template_chooser#mailing-list-information
about: Please ask and answer usage questions here
- name: Working groups
url: https://github.com/ansible/community/wiki
about: Interested in improving a specific area? Become a part of a working group!
- name: For Enterprise
url: https://www.ansible.com/products/engine?utm_medium=github&utm_source=issue_template_chooser_ansible_collections
about: Red Hat offers support for the Ansible Automation Platform

View File

@ -0,0 +1,129 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
name: Documentation Report
description: Ask us about docs
# NOTE: issue body is enabled to allow screenshots
body:
- type: markdown
attributes:
value: |
Verify first that your issue is not [already reported on GitHub][issue search].
Also test if the latest release and devel branch are affected too.
*Complete **all** sections as described, this form is processed automatically.*
[issue search]: https://github.com/ansible-collections/community.general/search?q=is%3Aissue&type=issues
- type: textarea
attributes:
label: Summary
description: |
Explain the problem briefly below, add suggestions to wording or structure.
**HINT:** Did you know the documentation has an `Edit on GitHub` link on every page?
placeholder: >-
I was reading the Collection documentation of version X and I'm having
problems understanding Y. It would be very helpful if that got
rephrased as Z.
validations:
required: true
- type: dropdown
attributes:
label: Issue Type
# FIXME: Once GitHub allows defining the default choice, update this
options:
- Documentation Report
validations:
required: true
- type: input
attributes:
label: Component Name
description: >-
Write the short name of the file, module, plugin, task or feature below,
*use your best guess if unsure*. Do not include `community.general.`!
placeholder: mysql_user
validations:
required: true
- type: textarea
attributes:
label: Ansible Version
description: >-
Paste verbatim output from `ansible --version` between
tripple backticks.
value: |
```console (paste below)
$ ansible --version
```
validations:
required: false
- type: textarea
attributes:
label: Community.general Version
description: >-
Paste verbatim output from "ansible-galaxy collection list community.general"
between tripple backticks.
value: |
```console (paste below)
$ ansible-galaxy collection list community.general
```
validations:
required: true
- type: textarea
attributes:
label: Configuration
description: >-
Paste verbatim output from `ansible-config dump --only-changed` between quotes.
value: |
```console (paste below)
$ ansible-config dump --only-changed
```
validations:
required: false
- type: textarea
attributes:
label: OS / Environment
description: >-
Provide all relevant information below, e.g. OS version,
browser, etc.
placeholder: Fedora 33, Firefox etc.
validations:
required: false
- type: textarea
attributes:
label: Additional Information
description: |
Describe how this improves the documentation, e.g. before/after situation or screenshots.
**Tip:** It's not possible to upload the screenshot via this field directly but you can use the last textarea in this form to attach them.
**HINT:** You can paste https://gist.github.com links for larger files.
placeholder: >-
When the improvement is applied, it makes it more straightforward
to understand X.
validations:
required: false
- type: checkboxes
attributes:
label: Code of Conduct
description: |
Read the [Ansible Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_form--ansible-collections) first.
options:
- label: I agree to follow the Ansible Code of Conduct
required: true
...

View File

@ -0,0 +1,73 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
name: Feature request
description: Suggest an idea for this project
body:
- type: markdown
attributes:
value: |
Verify first that your issue is not [already reported on GitHub][issue search].
Also test if the latest release and devel branch are affected too.
*Complete **all** sections as described, this form is processed automatically.*
[issue search]: https://github.com/ansible-collections/community.general/search?q=is%3Aissue&type=issues
- type: textarea
attributes:
label: Summary
description: Describe the new feature/improvement briefly below.
placeholder: >-
I am trying to do X with the collection from the main branch on GitHub and
I think that implementing a feature Y would be very helpful for me and
every other user of community.general because of Z.
validations:
required: true
- type: dropdown
attributes:
label: Issue Type
# FIXME: Once GitHub allows defining the default choice, update this
options:
- Feature Idea
validations:
required: true
- type: input
attributes:
label: Component Name
description: >-
Write the short name of the module or plugin, or which other part(s) of the collection this feature affects.
*use your best guess if unsure*. Do not include `community.general.`!
placeholder: dnf, apt, yum, pip, user etc.
validations:
required: true
- type: textarea
attributes:
label: Additional Information
description: |
Describe how the feature would be used, why it is needed and what it would solve.
**HINT:** You can paste https://gist.github.com links for larger files.
value: |
<!--- Paste example playbooks or commands between quotes below -->
```yaml (paste below)
```
validations:
required: false
- type: checkboxes
attributes:
label: Code of Conduct
description: |
Read the [Ansible Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html?utm_medium=github&utm_source=issue_form--ansible-collections) first.
options:
- label: I agree to follow the Ansible Code of Conduct
required: true
...

View File

@ -0,0 +1,11 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"

View File

@ -0,0 +1,9 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
backport_branch_prefix: patchback/backports/
backport_label_prefix: backport-
target_branch_prefix: stable-
...

View File

@ -0,0 +1,32 @@
##### SUMMARY
<!--- Describe the change below, including rationale and design decisions -->
<!--- HINT: Include "Fixes #nnn" if you are fixing an existing issue -->
<!--- Please do not forget to include a changelog fragment:
https://docs.ansible.com/ansible/devel/community/collection_development_process.html#creating-changelog-fragments
No need to include one for docs-only or test-only PR, and for new plugin/module PRs.
Read about more details in CONTRIBUTING.md.
-->
##### ISSUE TYPE
<!--- Pick one or more below and delete the rest.
'Test Pull Request' is for PRs that add/extend tests without code changes. -->
- Bugfix Pull Request
- Docs Pull Request
- Feature Pull Request
- New Module/Plugin Pull Request
- Refactoring Pull Request
- Test Pull Request
##### COMPONENT NAME
<!--- Write the SHORT NAME of the module, plugin, task or feature below. -->
##### ADDITIONAL INFORMATION
<!--- Include additional information to help people understand the change here -->
<!--- A step-by-step reproduction of the problem is helpful if there is no related issue -->
<!--- Paste verbatim command output below, e.g. before and after your change -->
```paste below
```

View File

@ -0,0 +1,3 @@
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later

View File

@ -0,0 +1,11 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
# DO NOT MODIFY
# Settings: https://probot.github.io/apps/settings/
# Pull settings from https://github.com/ansible-collections/.github/blob/master/.github/settings.yml
_extends: ".github"

View File

@ -0,0 +1,181 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
# For the comprehensive list of the inputs supported by the ansible-community/ansible-test-gh-action GitHub Action, see
# https://github.com/marketplace/actions/ansible-test
name: EOL CI
on:
# Run EOL CI against all pushes (direct commits, also merged PRs), Pull Requests
push:
branches:
- main
- stable-*
pull_request:
# Run EOL CI once per day (at 08:00 UTC)
schedule:
- cron: '0 8 * * *'
concurrency:
# Make sure there is at most one active run per PR, but do not cancel any non-PR runs
group: ${{ github.workflow }}-${{ (github.head_ref && github.event.number) || github.run_id }}
cancel-in-progress: true
jobs:
sanity:
name: EOL Sanity (Ⓐ${{ matrix.ansible }})
strategy:
matrix:
ansible:
- '2.13'
# Ansible-test on various stable branches does not yet work well with cgroups v2.
# Since ubuntu-latest now uses Ubuntu 22.04, we need to fall back to the ubuntu-20.04
# image for these stable branches. The list of branches where this is necessary will
# shrink over time, check out https://github.com/ansible-collections/news-for-maintainers/issues/28
# for the latest list.
runs-on: ubuntu-latest
steps:
- name: Perform sanity testing
uses: felixfontein/ansible-test-gh-action@main
with:
ansible-core-version: stable-${{ matrix.ansible }}
coverage: ${{ github.event_name == 'schedule' && 'always' || 'never' }}
pull-request-change-detection: 'true'
testing-type: sanity
units:
# Ansible-test on various stable branches does not yet work well with cgroups v2.
# Since ubuntu-latest now uses Ubuntu 22.04, we need to fall back to the ubuntu-20.04
# image for these stable branches. The list of branches where this is necessary will
# shrink over time, check out https://github.com/ansible-collections/news-for-maintainers/issues/28
# for the latest list.
runs-on: ubuntu-latest
name: EOL Units (Ⓐ${{ matrix.ansible }}+py${{ matrix.python }})
strategy:
# As soon as the first unit test fails, cancel the others to free up the CI queue
fail-fast: true
matrix:
ansible:
- ''
python:
- ''
exclude:
- ansible: ''
include:
- ansible: '2.13'
python: '2.7'
- ansible: '2.13'
python: '3.8'
- ansible: '2.13'
python: '2.7'
- ansible: '2.13'
python: '3.8'
steps:
- name: >-
Perform unit testing against
Ansible version ${{ matrix.ansible }}
uses: felixfontein/ansible-test-gh-action@main
with:
ansible-core-version: stable-${{ matrix.ansible }}
coverage: ${{ github.event_name == 'schedule' && 'always' || 'never' }}
pre-test-cmd: >-
mkdir -p ../../ansible
;
git clone --depth=1 --single-branch https://github.com/ansible-collections/community.internal_test_tools.git ../../community/internal_test_tools
pull-request-change-detection: 'true'
target-python-version: ${{ matrix.python }}
testing-type: units
integration:
# Ansible-test on various stable branches does not yet work well with cgroups v2.
# Since ubuntu-latest now uses Ubuntu 22.04, we need to fall back to the ubuntu-20.04
# image for these stable branches. The list of branches where this is necessary will
# shrink over time, check out https://github.com/ansible-collections/news-for-maintainers/issues/28
# for the latest list.
runs-on: ubuntu-latest
name: EOL I (Ⓐ${{ matrix.ansible }}+${{ matrix.docker }}+py${{ matrix.python }}:${{ matrix.target }})
strategy:
fail-fast: false
matrix:
ansible:
- ''
docker:
- ''
python:
- ''
target:
- ''
exclude:
- ansible: ''
include:
# 2.13
- ansible: '2.13'
docker: fedora35
python: ''
target: azp/posix/1/
- ansible: '2.13'
docker: fedora35
python: ''
target: azp/posix/2/
- ansible: '2.13'
docker: fedora35
python: ''
target: azp/posix/3/
- ansible: '2.13'
docker: opensuse15py2
python: ''
target: azp/posix/1/
- ansible: '2.13'
docker: opensuse15py2
python: ''
target: azp/posix/2/
- ansible: '2.13'
docker: opensuse15py2
python: ''
target: azp/posix/3/
- ansible: '2.13'
docker: alpine3
python: ''
target: azp/posix/1/
- ansible: '2.13'
docker: alpine3
python: ''
target: azp/posix/2/
- ansible: '2.13'
docker: alpine3
python: ''
target: azp/posix/3/
# Right now all generic tests are disabled. Uncomment when at least one of them is re-enabled.
# - ansible: '2.13'
# docker: default
# python: '3.9'
# target: azp/generic/1/
steps:
- name: >-
Perform integration testing against
Ansible version ${{ matrix.ansible }}
under Python ${{ matrix.python }}
uses: felixfontein/ansible-test-gh-action@main
with:
ansible-core-version: stable-${{ matrix.ansible }}
coverage: ${{ github.event_name == 'schedule' && 'always' || 'never' }}
docker-image: ${{ matrix.docker }}
integration-continue-on-error: 'false'
integration-diff: 'false'
integration-retry-on-error: 'true'
pre-test-cmd: >-
mkdir -p ../../ansible
;
git clone --depth=1 --single-branch https://github.com/ansible-collections/ansible.posix.git ../../ansible/posix
;
git clone --depth=1 --single-branch https://github.com/ansible-collections/community.crypto.git ../../community/crypto
;
git clone --depth=1 --single-branch https://github.com/ansible-collections/community.internal_test_tools.git ../../community/internal_test_tools
pull-request-change-detection: 'true'
target: ${{ matrix.target }}
target-python-version: ${{ matrix.python }}
testing-type: integration

View File

@ -0,0 +1,36 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
name: "Code scanning - action"
on:
schedule:
- cron: '26 19 * * 1'
workflow_dispatch:
permissions:
contents: read
jobs:
CodeQL-Build:
permissions:
actions: read # for github/codeql-action/init to get workflow details
contents: read # for actions/checkout to fetch code
security-events: write # for github/codeql-action/autobuild to send a status report
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3

View File

@ -0,0 +1,35 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
name: Verify REUSE
on:
push:
branches: [main]
pull_request_target:
types: [opened, synchronize, reopened]
branches: [main]
# Run CI once per day (at 07:30 UTC)
schedule:
- cron: '30 7 * * *'
jobs:
check:
permissions:
contents: read
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha || '' }}
- name: Install dependencies
run: |
pip install reuse
- name: Check REUSE compliance
run: |
reuse lint

View File

@ -0,0 +1,514 @@
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
# Created by https://www.toptal.com/developers/gitignore/api/vim,git,macos,linux,pydev,emacs,dotenv,python,windows,webstorm,pycharm+all,jupyternotebooks
# Edit at https://www.toptal.com/developers/gitignore?templates=vim,git,macos,linux,pydev,emacs,dotenv,python,windows,webstorm,pycharm+all,jupyternotebooks
### dotenv ###
.env
### Emacs ###
# -*- mode: gitignore; -*-
*~
\#*\#
/.emacs.desktop
/.emacs.desktop.lock
*.elc
auto-save-list
tramp
.\#*
# Org-mode
.org-id-locations
*_archive
# flymake-mode
*_flymake.*
# eshell files
/eshell/history
/eshell/lastdir
# elpa packages
/elpa/
# reftex files
*.rel
# AUCTeX auto folder
/auto/
# cask packages
.cask/
dist/
# Flycheck
flycheck_*.el
# server auth directory
/server/
# projectiles files
.projectile
# directory configuration
.dir-locals.el
# network security
/network-security.data
### Git ###
# Created by git for backups. To disable backups in Git:
# $ git config --global mergetool.keepBackup false
*.orig
# Created by git when using merge tools for conflicts
*.BACKUP.*
*.BASE.*
*.LOCAL.*
*.REMOTE.*
*_BACKUP_*.txt
*_BASE_*.txt
*_LOCAL_*.txt
*_REMOTE_*.txt
### JupyterNotebooks ###
# gitignore template for Jupyter Notebooks
# website: http://jupyter.org/
.ipynb_checkpoints
*/.ipynb_checkpoints/*
# IPython
profile_default/
ipython_config.py
# Remove previous ipynb_checkpoints
# git rm -r .ipynb_checkpoints/
### Linux ###
# temporary files which can be created if a process still has a handle open of a deleted file
.fuse_hidden*
# KDE directory preferences
.directory
# Linux trash folder which might appear on any partition or disk
.Trash-*
# .nfs files are created when an open file is removed but is still being accessed
.nfs*
### macOS ###
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two \r
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
### macOS Patch ###
# iCloud generated files
*.icloud
### PyCharm+all ###
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# User-specific stuff
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/**/usage.statistics.xml
.idea/**/dictionaries
.idea/**/shelf
# AWS User-specific
.idea/**/aws.xml
# Generated files
.idea/**/contentModel.xml
# Sensitive or high-churn files
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
# Gradle
.idea/**/gradle.xml
.idea/**/libraries
# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
# .idea/artifacts
# .idea/compiler.xml
# .idea/jarRepositories.xml
# .idea/modules.xml
# .idea/*.iml
# .idea/modules
# *.iml
# *.ipr
# CMake
cmake-build-*/
# Mongo Explorer plugin
.idea/**/mongoSettings.xml
# File-based project format
*.iws
# IntelliJ
out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Cursive Clojure plugin
.idea/replstate.xml
# SonarLint plugin
.idea/sonarlint/
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
# Editor-based Rest Client
.idea/httpRequests
# Android studio 3.1+ serialized cache file
.idea/caches/build_file_checksums.ser
### PyCharm+all Patch ###
# Ignore everything but code style settings and run configurations
# that are supposed to be shared within teams.
.idea/*
!.idea/codeStyles
!.idea/runConfigurations
### pydev ###
.pydevproject
### Python ###
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
# IPython
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
### Vim ###
# Swap
[._]*.s[a-v][a-z]
!*.svg # comment out if you don't need vector files
[._]*.sw[a-p]
[._]s[a-rt-v][a-z]
[._]ss[a-gi-z]
[._]sw[a-p]
# Session
Session.vim
Sessionx.vim
# Temporary
.netrwhist
# Auto-generated tag files
tags
# Persistent undo
[._]*.un~
### WebStorm ###
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# User-specific stuff
# AWS User-specific
# Generated files
# Sensitive or high-churn files
# Gradle
# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
# .idea/artifacts
# .idea/compiler.xml
# .idea/jarRepositories.xml
# .idea/modules.xml
# .idea/*.iml
# .idea/modules
# *.iml
# *.ipr
# CMake
# Mongo Explorer plugin
# File-based project format
# IntelliJ
# mpeltonen/sbt-idea plugin
# JIRA plugin
# Cursive Clojure plugin
# SonarLint plugin
# Crashlytics plugin (for Android Studio and IntelliJ)
# Editor-based Rest Client
# Android studio 3.1+ serialized cache file
### WebStorm Patch ###
# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721
# *.iml
# modules.xml
# .idea/misc.xml
# *.ipr
# Sonarlint plugin
# https://plugins.jetbrains.com/plugin/7973-sonarlint
.idea/**/sonarlint/
# SonarQube Plugin
# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin
.idea/**/sonarIssues.xml
# Markdown Navigator plugin
# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced
.idea/**/markdown-navigator.xml
.idea/**/markdown-navigator-enh.xml
.idea/**/markdown-navigator/
# Cache file creation bug
# See https://youtrack.jetbrains.com/issue/JBR-2257
.idea/$CACHE_FILE$
# CodeStream plugin
# https://plugins.jetbrains.com/plugin/12206-codestream
.idea/codestream.xml
### Windows ###
# Windows thumbnail cache files
Thumbs.db
Thumbs.db:encryptable
ehthumbs.db
ehthumbs_vista.db
# Dump file
*.stackdump
# Folder config file
[Dd]esktop.ini
# Recycle Bin used on file shares
$RECYCLE.BIN/
# Windows Installer files
*.cab
*.msi
*.msix
*.msm
*.msp
# Windows shortcuts
*.lnk
# End of https://www.toptal.com/developers/gitignore/api/vim,git,macos,linux,pydev,emacs,dotenv,python,windows,webstorm,pycharm+all,jupyternotebooks
# Integration tests cloud configs
tests/integration/cloud-config-*.ini

View File

@ -0,0 +1,5 @@
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
Files: changelogs/fragments/*
Copyright: Ansible Project
License: GPL-3.0-or-later

View File

@ -0,0 +1,508 @@
===============================
Community General Release Notes
===============================
.. contents:: Topics
This changelog describes changes after version 7.0.0.
v8.2.0
======
Release Summary
---------------
Regular bugfix and feature release.
Minor Changes
-------------
- ipa_dnsrecord - adds ability to manage NS record types (https://github.com/ansible-collections/community.general/pull/7737).
- ipa_pwpolicy - refactor module and exchange a sequence ``if`` statements with a ``for`` loop (https://github.com/ansible-collections/community.general/pull/7723).
- ipa_pwpolicy - update module to support ``maxrepeat``, ``maxsequence``, ``dictcheck``, ``usercheck``, ``gracelimit`` parameters in FreeIPA password policies (https://github.com/ansible-collections/community.general/pull/7723).
- keycloak_realm_key - the ``config.algorithm`` option now supports 8 additional key algorithms (https://github.com/ansible-collections/community.general/pull/7698).
- keycloak_realm_key - the ``config.certificate`` option value is no longer defined with ``no_log=True`` (https://github.com/ansible-collections/community.general/pull/7698).
- keycloak_realm_key - the ``provider_id`` option now supports RSA encryption key usage (value ``rsa-enc``) (https://github.com/ansible-collections/community.general/pull/7698).
- keycloak_user_federation - allow custom user storage providers to be set through ``provider_id`` (https://github.com/ansible-collections/community.general/pull/7789).
- mail - add ``Message-ID`` header; which is required by some mail servers (https://github.com/ansible-collections/community.general/pull/7740).
- mail module, mail callback plugin - allow to configure the domain name of the Message-ID header with a new ``message_id_domain`` option (https://github.com/ansible-collections/community.general/pull/7765).
- ssh_config - new feature to set ``AddKeysToAgent`` option to ``yes`` or ``no`` (https://github.com/ansible-collections/community.general/pull/7703).
- ssh_config - new feature to set ``IdentitiesOnly`` option to ``yes`` or ``no`` (https://github.com/ansible-collections/community.general/pull/7704).
- xcc_redfish_command - added support for raw POSTs (``command=PostResource`` in ``category=Raw``) without a specific action info (https://github.com/ansible-collections/community.general/pull/7746).
Bugfixes
--------
- keycloak_identity_provider - ``mappers`` processing was not idempotent if the mappers configuration list had not been sorted by name (in ascending order). Fix resolves the issue by sorting mappers in the desired state using the same key which is used for obtaining existing state (https://github.com/ansible-collections/community.general/pull/7418).
- keycloak_identity_provider - it was not possible to reconfigure (add, remove) ``mappers`` once they were created initially. Removal was ignored, adding new ones resulted in dropping the pre-existing unmodified mappers. Fix resolves the issue by supplying correct input to the internal update call (https://github.com/ansible-collections/community.general/pull/7418).
- keycloak_user - when ``force`` is set, but user does not exist, do not try to delete it (https://github.com/ansible-collections/community.general/pull/7696).
- proxmox_kvm - running ``state=template`` will first check whether VM is already a template (https://github.com/ansible-collections/community.general/pull/7792).
- statusio_maintenance - fix error caused by incorrectly formed API data payload. Was raising "Failed to create maintenance HTTP Error 400 Bad Request" caused by bad data type for date/time and deprecated dict keys (https://github.com/ansible-collections/community.general/pull/7754).
New Plugins
-----------
Connection
~~~~~~~~~~
- incus - Run tasks in Incus instances via the Incus CLI.
Filter
~~~~~~
- from_ini - Converts INI text input into a dictionary
- to_ini - Converts a dictionary to the INI file format
Lookup
~~~~~~
- github_app_access_token - Obtain short-lived Github App Access tokens
New Modules
-----------
- dnf_config_manager - Enable or disable dnf repositories using config-manager
- keycloak_component_info - Retrive component info in Keycloak
- keycloak_realm_rolemapping - Allows administration of Keycloak realm role mappings into groups with the Keycloak API
- proxmox_node_info - Retrieve information about one or more Proxmox VE nodes
- proxmox_storage_contents_info - List content from a Proxmox VE storage
v8.1.0
======
Release Summary
---------------
Regular bugfix and feature release.
Minor Changes
-------------
- bitwarden lookup plugin - when looking for items using an item ID, the item is now accessed directly with ``bw get item`` instead of searching through all items. This doubles the lookup speed (https://github.com/ansible-collections/community.general/pull/7468).
- elastic callback plugin - close elastic client to not leak resources (https://github.com/ansible-collections/community.general/pull/7517).
- git_config - allow multiple git configs for the same name with the new ``add_mode`` option (https://github.com/ansible-collections/community.general/pull/7260).
- git_config - the ``after`` and ``before`` fields in the ``diff`` of the return value can be a list instead of a string in case more configs with the same key are affected (https://github.com/ansible-collections/community.general/pull/7260).
- git_config - when a value is unset, all configs with the same key are unset (https://github.com/ansible-collections/community.general/pull/7260).
- gitlab modules - add ``ca_path`` option (https://github.com/ansible-collections/community.general/pull/7472).
- gitlab modules - remove duplicate ``gitlab`` package check (https://github.com/ansible-collections/community.general/pull/7486).
- gitlab_runner - add support for new runner creation workflow (https://github.com/ansible-collections/community.general/pull/7199).
- ipa_config - adds ``passkey`` choice to ``ipauserauthtype`` parameter's choices (https://github.com/ansible-collections/community.general/pull/7588).
- ipa_sudorule - adds options to include denied commands or command groups (https://github.com/ansible-collections/community.general/pull/7415).
- ipa_user - adds ``idp`` and ``passkey`` choice to ``ipauserauthtype`` parameter's choices (https://github.com/ansible-collections/community.general/pull/7589).
- irc - add ``validate_certs`` option, and rename ``use_ssl`` to ``use_tls``, while keeping ``use_ssl`` as an alias. The default value for ``validate_certs`` is ``false`` for backwards compatibility. We recommend to every user of this module to explicitly set ``use_tls=true`` and `validate_certs=true`` whenever possible, especially when communicating to IRC servers over the internet (https://github.com/ansible-collections/community.general/pull/7550).
- keycloak module utils - expose error message from Keycloak server for HTTP errors in some specific situations (https://github.com/ansible-collections/community.general/pull/7645).
- keycloak_user_federation - add option for ``krbPrincipalAttribute`` (https://github.com/ansible-collections/community.general/pull/7538).
- lvol - change ``pvs`` argument type to list of strings (https://github.com/ansible-collections/community.general/pull/7676, https://github.com/ansible-collections/community.general/issues/7504).
- lxd connection plugin - tighten the detection logic for lxd ``Instance not found`` errors, to avoid false detection on unrelated errors such as ``/usr/bin/python3: not found`` (https://github.com/ansible-collections/community.general/pull/7521).
- netcup_dns - adds support for record types ``OPENPGPKEY``, ``SMIMEA``, and ``SSHFP`` (https://github.com/ansible-collections/community.general/pull/7489).
- nmcli - add support for new connection type ``loopback`` (https://github.com/ansible-collections/community.general/issues/6572).
- nmcli - allow for ``infiniband`` slaves of ``bond`` interface types (https://github.com/ansible-collections/community.general/pull/7569).
- nmcli - allow for the setting of ``MTU`` for ``infiniband`` and ``bond`` interface types (https://github.com/ansible-collections/community.general/pull/7499).
- onepassword lookup plugin - support 1Password Connect with the opv2 client by setting the connect_host and connect_token parameters (https://github.com/ansible-collections/community.general/pull/7116).
- onepassword_raw lookup plugin - support 1Password Connect with the opv2 client by setting the connect_host and connect_token parameters (https://github.com/ansible-collections/community.general/pull/7116)
- passwordstore - adds ``timestamp`` and ``preserve`` parameters to modify the stored password format (https://github.com/ansible-collections/community.general/pull/7426).
- proxmox - adds ``template`` value to the ``state`` parameter, allowing conversion of container to a template (https://github.com/ansible-collections/community.general/pull/7143).
- proxmox - adds ``update`` parameter, allowing update of an already existing containers configuration (https://github.com/ansible-collections/community.general/pull/7540).
- proxmox inventory plugin - adds an option to exclude nodes from the dynamic inventory generation. The new setting is optional, not using this option will behave as usual (https://github.com/ansible-collections/community.general/issues/6714, https://github.com/ansible-collections/community.general/pull/7461).
- proxmox_disk - add ability to manipulate CD-ROM drive (https://github.com/ansible-collections/community.general/pull/7495).
- proxmox_kvm - adds ``template`` value to the ``state`` parameter, allowing conversion of a VM to a template (https://github.com/ansible-collections/community.general/pull/7143).
- proxmox_kvm - support the ``hookscript`` parameter (https://github.com/ansible-collections/community.general/issues/7600).
- proxmox_ostype - it is now possible to specify the ``ostype`` when creating an LXC container (https://github.com/ansible-collections/community.general/pull/7462).
- proxmox_vm_info - add ability to retrieve configuration info (https://github.com/ansible-collections/community.general/pull/7485).
- redfish_info - adding the ``BootProgress`` property when getting ``Systems`` info (https://github.com/ansible-collections/community.general/pull/7626).
- ssh_config - adds ``controlmaster``, ``controlpath`` and ``controlpersist`` parameters (https://github.com/ansible-collections/community.general/pull/7456).
Bugfixes
--------
- apt-rpm - the module did not upgrade packages if a newer version exists. Now the package will be reinstalled if the candidate is newer than the installed version (https://github.com/ansible-collections/community.general/issues/7414).
- cloudflare_dns - fix Cloudflare lookup of SHFP records (https://github.com/ansible-collections/community.general/issues/7652).
- interface_files - also consider ``address_family`` when changing ``option=method`` (https://github.com/ansible-collections/community.general/issues/7610, https://github.com/ansible-collections/community.general/pull/7612).
- irc - replace ``ssl.wrap_socket`` that was removed from Python 3.12 with code for creating a proper SSL context (https://github.com/ansible-collections/community.general/pull/7542).
- keycloak_* - fix Keycloak API client to quote ``/`` properly (https://github.com/ansible-collections/community.general/pull/7641).
- keycloak_authz_permission - resource payload variable for scope-based permission was constructed as a string, when it needs to be a list, even for a single item (https://github.com/ansible-collections/community.general/issues/7151).
- log_entries callback plugin - replace ``ssl.wrap_socket`` that was removed from Python 3.12 with code for creating a proper SSL context (https://github.com/ansible-collections/community.general/pull/7542).
- lvol - test for output messages in both ``stdout`` and ``stderr`` (https://github.com/ansible-collections/community.general/pull/7601, https://github.com/ansible-collections/community.general/issues/7182).
- onepassword lookup plugin - field and section titles are now case insensitive when using op CLI version two or later. This matches the behavior of version one (https://github.com/ansible-collections/community.general/pull/7564).
- redhat_subscription - use the D-Bus registration on RHEL 7 only on 7.4 and
greater; older versions of RHEL 7 do not have it
(https://github.com/ansible-collections/community.general/issues/7622,
https://github.com/ansible-collections/community.general/pull/7624).
- terraform - fix multiline string handling in complex variables (https://github.com/ansible-collections/community.general/pull/7535).
New Plugins
-----------
Lookup
~~~~~~
- onepassword_doc - Fetch documents stored in 1Password
Test
~~~~
- fqdn_valid - Validates fully-qualified domain names against RFC 1123
New Modules
-----------
- git_config_info - Read git configuration
- gitlab_issue - Create, update, or delete GitLab issues
- nomad_token - Manage Nomad ACL tokens
v8.0.2
======
Release Summary
---------------
Bugfix release for inclusion in Ansible 9.0.0rc1.
Bugfixes
--------
- ocapi_utils, oci_utils, redfish_utils module utils - replace ``type()`` calls with ``isinstance()`` calls (https://github.com/ansible-collections/community.general/pull/7501).
- pipx module utils - change the CLI argument formatter for the ``pip_args`` parameter (https://github.com/ansible-collections/community.general/issues/7497, https://github.com/ansible-collections/community.general/pull/7506).
v8.0.1
======
Release Summary
---------------
Bugfix release for inclusion in Ansible 9.0.0b1.
Bugfixes
--------
- gitlab_group_members - fix gitlab constants call in ``gitlab_group_members`` module (https://github.com/ansible-collections/community.general/issues/7467).
- gitlab_project_members - fix gitlab constants call in ``gitlab_project_members`` module (https://github.com/ansible-collections/community.general/issues/7467).
- gitlab_protected_branches - fix gitlab constants call in ``gitlab_protected_branches`` module (https://github.com/ansible-collections/community.general/issues/7467).
- gitlab_user - fix gitlab constants call in ``gitlab_user`` module (https://github.com/ansible-collections/community.general/issues/7467).
- proxmox_pool_member - absent state for type VM did not delete VMs from the pools (https://github.com/ansible-collections/community.general/pull/7464).
- redfish_command - fix usage of message parsing in ``SimpleUpdate`` and ``MultipartHTTPPushUpdate`` commands to treat the lack of a ``MessageId`` as no message (https://github.com/ansible-collections/community.general/issues/7465, https://github.com/ansible-collections/community.general/pull/7471).
v8.0.0
======
Release Summary
---------------
This is release 8.0.0 of ``community.general``, released on 2023-11-01.
Minor Changes
-------------
- The collection will start using semantic markup (https://github.com/ansible-collections/community.general/pull/6539).
- VarDict module utils - add method ``VarDict.as_dict()`` to convert to a plain ``dict`` object (https://github.com/ansible-collections/community.general/pull/6602).
- apt_rpm - extract package name from local ``.rpm`` path when verifying
installation success. Allows installing packages from local ``.rpm`` files
(https://github.com/ansible-collections/community.general/pull/7396).
- cargo - add option ``executable``, which allows user to specify path to the cargo binary (https://github.com/ansible-collections/community.general/pull/7352).
- cargo - add option ``locked`` which allows user to specify install the locked version of dependency instead of latest compatible version (https://github.com/ansible-collections/community.general/pull/6134).
- chroot connection plugin - add ``disable_root_check`` option (https://github.com/ansible-collections/community.general/pull/7099).
- cloudflare_dns - add CAA record support (https://github.com/ansible-collections/community.general/pull/7399).
- cobbler inventory plugin - add ``exclude_mgmt_classes`` and ``include_mgmt_classes`` options to exclude or include hosts based on management classes (https://github.com/ansible-collections/community.general/pull/7184).
- cobbler inventory plugin - add ``inventory_hostname`` option to allow using the system name for the inventory hostname (https://github.com/ansible-collections/community.general/pull/6502).
- cobbler inventory plugin - add ``want_ip_addresses`` option to collect all interface DNS name to IP address mapping (https://github.com/ansible-collections/community.general/pull/6711).
- cobbler inventory plugin - add primary IP addess to ``cobbler_ipv4_address`` and IPv6 address to ``cobbler_ipv6_address`` host variable (https://github.com/ansible-collections/community.general/pull/6711).
- cobbler inventory plugin - add warning for systems with empty profiles (https://github.com/ansible-collections/community.general/pull/6502).
- cobbler inventory plugin - convert Ansible unicode strings to native Python unicode strings before passing user/password to XMLRPC client (https://github.com/ansible-collections/community.general/pull/6923).
- consul_session - drops requirement for the ``python-consul`` library to communicate with the Consul API, instead relying on the existing ``requests`` library requirement (https://github.com/ansible-collections/community.general/pull/6755).
- copr - respawn module to use the system python interpreter when the ``dnf`` python module is not available in ``ansible_python_interpreter`` (https://github.com/ansible-collections/community.general/pull/6522).
- cpanm - minor refactor when creating the ``CmdRunner`` object (https://github.com/ansible-collections/community.general/pull/7231).
- datadog_monitor - adds ``notification_preset_name``, ``renotify_occurrences`` and ``renotify_statuses`` parameters (https://github.com/ansible-collections/community.general/issues/6521,https://github.com/ansible-collections/community.general/issues/5823).
- dig lookup plugin - add TCP option to enable the use of TCP connection during DNS lookup (https://github.com/ansible-collections/community.general/pull/7343).
- ejabberd_user - module now using ``CmdRunner`` to execute external command (https://github.com/ansible-collections/community.general/pull/7075).
- filesystem - add ``uuid`` parameter for UUID change feature (https://github.com/ansible-collections/community.general/pull/6680).
- gitlab_group - add option ``force_delete`` (default: false) which allows delete group even if projects exists in it (https://github.com/ansible-collections/community.general/pull/7364).
- gitlab_group_variable - add support for ``raw`` variables suboption (https://github.com/ansible-collections/community.general/pull/7132).
- gitlab_project_variable - add support for ``raw`` variables suboption (https://github.com/ansible-collections/community.general/pull/7132).
- gitlab_project_variable - minor refactor removing unnecessary code statements (https://github.com/ansible-collections/community.general/pull/6928).
- gitlab_runner - minor refactor removing unnecessary code statements (https://github.com/ansible-collections/community.general/pull/6927).
- htpasswd - minor code improvements in the module (https://github.com/ansible-collections/community.general/pull/6901).
- htpasswd - the parameter ``crypt_scheme`` is being renamed as ``hash_scheme`` and added as an alias to it (https://github.com/ansible-collections/community.general/pull/6841).
- icinga2_host - the ``ip`` option is no longer required, since Icinga 2 allows for an empty address attribute (https://github.com/ansible-collections/community.general/pull/7452).
- ini_file - add ``ignore_spaces`` option (https://github.com/ansible-collections/community.general/pull/7273).
- ini_file - add ``modify_inactive_option`` option (https://github.com/ansible-collections/community.general/pull/7401).
- ipa_config - add module parameters to manage FreeIPA user and group objectclasses (https://github.com/ansible-collections/community.general/pull/7019).
- ipa_config - adds ``idp`` choice to ``ipauserauthtype`` parameter's choices (https://github.com/ansible-collections/community.general/pull/7051).
- jenkins_build - add new ``detach`` option, which allows the module to exit successfully as long as the build is created (default functionality is still waiting for the build to end before exiting) (https://github.com/ansible-collections/community.general/pull/7204).
- jenkins_build - add new ``time_between_checks`` option, which allows to configure the wait time between requests to the Jenkins server (https://github.com/ansible-collections/community.general/pull/7204).
- keycloak_authentication - added provider ID choices, since Keycloak supports only those two specific ones (https://github.com/ansible-collections/community.general/pull/6763).
- keycloak_client_rolemapping - adds support for subgroups with additional parameter ``parents`` (https://github.com/ansible-collections/community.general/pull/6687).
- keycloak_role - add composite roles support for realm and client roles (https://github.com/ansible-collections/community.general/pull/6469).
- keyring - minor refactor removing unnecessary code statements (https://github.com/ansible-collections/community.general/pull/6927).
- ldap_* - add new arguments ``client_cert`` and ``client_key`` to the LDAP modules in order to allow certificate authentication (https://github.com/ansible-collections/community.general/pull/6668).
- ldap_search - add a new ``page_size`` option to enable paged searches (https://github.com/ansible-collections/community.general/pull/6648).
- locale_gen - module has been refactored to use ``ModuleHelper`` and ``CmdRunner`` (https://github.com/ansible-collections/community.general/pull/6903).
- locale_gen - module now using ``CmdRunner`` to execute external commands (https://github.com/ansible-collections/community.general/pull/6820).
- lvg - add ``active`` and ``inactive`` values to the ``state`` option for active state management feature (https://github.com/ansible-collections/community.general/pull/6682).
- lvg - add ``reset_vg_uuid``, ``reset_pv_uuid`` options for UUID reset feature (https://github.com/ansible-collections/community.general/pull/6682).
- lxc connection plugin - properly handle a change of the ``remote_addr`` option (https://github.com/ansible-collections/community.general/pull/7373).
- lxd connection plugin - automatically translate ``remote_addr`` from FQDN to (short) hostname (https://github.com/ansible-collections/community.general/pull/7360).
- lxd connection plugin - update error parsing to work with newer messages mentioning instances (https://github.com/ansible-collections/community.general/pull/7360).
- lxd inventory plugin - add ``server_cert`` option for trust anchor to use for TLS verification of server certificates (https://github.com/ansible-collections/community.general/pull/7392).
- lxd inventory plugin - add ``server_check_hostname`` option to disable hostname verification of server certificates (https://github.com/ansible-collections/community.general/pull/7392).
- make - add new ``targets`` parameter allowing multiple targets to be used with ``make`` (https://github.com/ansible-collections/community.general/pull/6882, https://github.com/ansible-collections/community.general/issues/4919).
- make - allows ``params`` to be used without value (https://github.com/ansible-collections/community.general/pull/7180).
- mas - disable sign-in check for macOS 12+ as ``mas account`` is non-functional (https://github.com/ansible-collections/community.general/pull/6520).
- newrelic_deployment - add option ``app_name_exact_match``, which filters results for the exact app_name provided (https://github.com/ansible-collections/community.general/pull/7355).
- nmap inventory plugin - now has a ``use_arp_ping`` option to allow the user to disable the default ARP ping query for a more reliable form (https://github.com/ansible-collections/community.general/pull/7119).
- nmcli - add support for ``ipv4.dns-options`` and ``ipv6.dns-options`` (https://github.com/ansible-collections/community.general/pull/6902).
- nomad_job, nomad_job_info - add ``port`` parameter (https://github.com/ansible-collections/community.general/pull/7412).
- npm - minor improvement on parameter validation (https://github.com/ansible-collections/community.general/pull/6848).
- npm - module now using ``CmdRunner`` to execute external commands (https://github.com/ansible-collections/community.general/pull/6989).
- onepassword lookup plugin - add service account support (https://github.com/ansible-collections/community.general/issues/6635, https://github.com/ansible-collections/community.general/pull/6660).
- onepassword lookup plugin - introduce ``account_id`` option which allows specifying which account to use (https://github.com/ansible-collections/community.general/pull/7308).
- onepassword_raw lookup plugin - add service account support (https://github.com/ansible-collections/community.general/issues/6635, https://github.com/ansible-collections/community.general/pull/6660).
- onepassword_raw lookup plugin - introduce ``account_id`` option which allows specifying which account to use (https://github.com/ansible-collections/community.general/pull/7308).
- opentelemetry callback plugin - add span attributes in the span event (https://github.com/ansible-collections/community.general/pull/6531).
- opkg - add ``executable`` parameter allowing to specify the path of the ``opkg`` command (https://github.com/ansible-collections/community.general/pull/6862).
- opkg - remove default value ``""`` for parameter ``force`` as it causes the same behaviour of not having that parameter (https://github.com/ansible-collections/community.general/pull/6513).
- pagerduty - adds in option to use v2 API for creating pagerduty incidents (https://github.com/ansible-collections/community.general/issues/6151)
- parted - on resize, use ``--fix`` option if available (https://github.com/ansible-collections/community.general/pull/7304).
- pnpm - set correct version when state is latest or version is not mentioned. Resolves previous idempotency problem (https://github.com/ansible-collections/community.general/pull/7339).
- pritunl module utils - ensure ``validate_certs`` parameter is honoured in all methods (https://github.com/ansible-collections/community.general/pull/7156).
- proxmox - add ``vmid`` (and ``taskid`` when possible) to return values (https://github.com/ansible-collections/community.general/pull/7263).
- proxmox - support ``timezone`` parameter at container creation (https://github.com/ansible-collections/community.general/pull/6510).
- proxmox inventory plugin - add composite variables support for Proxmox nodes (https://github.com/ansible-collections/community.general/issues/6640).
- proxmox_kvm - added support for ``tpmstate0`` parameter to configure TPM (Trusted Platform Module) disk. TPM is required for Windows 11 installations (https://github.com/ansible-collections/community.general/pull/6533).
- proxmox_kvm - enabled force restart of VM, bringing the ``force`` parameter functionality in line with what is described in the docs (https://github.com/ansible-collections/community.general/pull/6914).
- proxmox_kvm - re-use ``timeout`` module param to forcefully shutdown a virtual machine when ``state`` is ``stopped`` (https://github.com/ansible-collections/community.general/issues/6257).
- proxmox_snap - add ``retention`` parameter to delete old snapshots (https://github.com/ansible-collections/community.general/pull/6576).
- proxmox_vm_info - ``node`` parameter is no longer required. Information can be obtained for the whole cluster (https://github.com/ansible-collections/community.general/pull/6976).
- proxmox_vm_info - non-existing provided by name/vmid VM would return empty results instead of failing (https://github.com/ansible-collections/community.general/pull/7049).
- pubnub_blocks - minor refactor removing unnecessary code statements (https://github.com/ansible-collections/community.general/pull/6928).
- random_string - added new ``ignore_similar_chars`` and ``similar_chars`` option to ignore certain chars (https://github.com/ansible-collections/community.general/pull/7242).
- redfish_command - add ``MultipartHTTPPushUpdate`` command (https://github.com/ansible-collections/community.general/issues/6471, https://github.com/ansible-collections/community.general/pull/6612).
- redfish_command - add ``account_types`` and ``oem_account_types`` as optional inputs to ``AddUser`` (https://github.com/ansible-collections/community.general/issues/6823, https://github.com/ansible-collections/community.general/pull/6871).
- redfish_command - add new option ``update_oem_params`` for the ``MultipartHTTPPushUpdate`` command (https://github.com/ansible-collections/community.general/issues/7331).
- redfish_config - add ``CreateVolume`` command to allow creation of volumes on servers (https://github.com/ansible-collections/community.general/pull/6813).
- redfish_config - add ``DeleteAllVolumes`` command to allow deletion of all volumes on servers (https://github.com/ansible-collections/community.general/pull/6814).
- redfish_config - adding ``SetSecureBoot`` command (https://github.com/ansible-collections/community.general/pull/7129).
- redfish_info - add ``AccountTypes`` and ``OEMAccountTypes`` to the output of ``ListUsers`` (https://github.com/ansible-collections/community.general/issues/6823, https://github.com/ansible-collections/community.general/pull/6871).
- redfish_info - add support for ``GetBiosRegistries`` command (https://github.com/ansible-collections/community.general/pull/7144).
- redfish_info - adds ``LinkStatus`` to NIC inventory (https://github.com/ansible-collections/community.general/pull/7318).
- redfish_info - adds ``ProcessorArchitecture`` to CPU inventory (https://github.com/ansible-collections/community.general/pull/6864).
- redfish_info - fix for ``GetVolumeInventory``, Controller name was getting populated incorrectly and duplicates were seen in the volumes retrieved (https://github.com/ansible-collections/community.general/pull/6719).
- redfish_info - report ``Id`` in the output of ``GetManagerInventory`` (https://github.com/ansible-collections/community.general/pull/7140).
- redfish_utils - use ``Controllers`` key in redfish data to obtain Storage controllers properties (https://github.com/ansible-collections/community.general/pull/7081).
- redfish_utils module utils - add support for ``PowerCycle`` reset type for ``redfish_command`` responses feature (https://github.com/ansible-collections/community.general/issues/7083).
- redfish_utils module utils - add support for following ``@odata.nextLink`` pagination in ``software_inventory`` responses feature (https://github.com/ansible-collections/community.general/pull/7020).
- redfish_utils module utils - support ``Volumes`` in response for ``GetDiskInventory`` (https://github.com/ansible-collections/community.general/pull/6819).
- redhat_subscription - the internal ``RegistrationBase`` class was folded
into the other internal ``Rhsm`` class, as the separation had no purpose
anymore
(https://github.com/ansible-collections/community.general/pull/6658).
- redis_info - refactor the redis_info module to use the redis module_utils enabling to pass TLS parameters to the Redis client (https://github.com/ansible-collections/community.general/pull/7267).
- rhsm_release - improve/harden the way ``subscription-manager`` is run;
no behaviour change is expected
(https://github.com/ansible-collections/community.general/pull/6669).
- rhsm_repository - the interaction with ``subscription-manager`` was
refactored by grouping things together, removing unused bits, and hardening
the way it is run; also, the parsing of ``subscription-manager repos --list``
was improved and made slightly faster; no behaviour change is expected
(https://github.com/ansible-collections/community.general/pull/6783,
https://github.com/ansible-collections/community.general/pull/6837).
- scaleway_security_group_rule - minor refactor removing unnecessary code statements (https://github.com/ansible-collections/community.general/pull/6928).
- shutdown - use ``shutdown -p ...`` with FreeBSD to halt and power off machine (https://github.com/ansible-collections/community.general/pull/7102).
- snap - add option ``dangerous`` to the module, that will map into the command line argument ``--dangerous``, allowing unsigned snap files to be installed (https://github.com/ansible-collections/community.general/pull/6908, https://github.com/ansible-collections/community.general/issues/5715).
- snap - module is now aware of channel when deciding whether to install or refresh the snap (https://github.com/ansible-collections/community.general/pull/6435, https://github.com/ansible-collections/community.general/issues/1606).
- sorcery - add grimoire (repository) management support (https://github.com/ansible-collections/community.general/pull/7012).
- sorcery - minor refactor (https://github.com/ansible-collections/community.general/pull/6525).
- supervisorctl - allow to stop matching running processes before removing them with ``stop_before_removing=true`` (https://github.com/ansible-collections/community.general/pull/7284).
- tss lookup plugin - allow to fetch secret IDs which are in a folder based on folder ID. Previously, we could not fetch secrets based on folder ID but now use ``fetch_secret_ids_from_folder`` option to indicate to fetch secret IDs based on folder ID (https://github.com/ansible-collections/community.general/issues/6223).
- tss lookup plugin - allow to fetch secret by path. Previously, we could not fetch secret by path but now use ``secret_path`` option to indicate to fetch secret by secret path (https://github.com/ansible-collections/community.general/pull/6881).
- unixy callback plugin - add support for ``check_mode_markers`` option (https://github.com/ansible-collections/community.general/pull/7179).
- vardict module utils - added convenience methods to ``VarDict`` (https://github.com/ansible-collections/community.general/pull/6647).
- xenserver_guest_info - minor refactor removing unnecessary code statements (https://github.com/ansible-collections/community.general/pull/6928).
- xenserver_guest_powerstate - minor refactor removing unnecessary code statements (https://github.com/ansible-collections/community.general/pull/6928).
- yum_versionlock - add support to pin specific package versions instead of only the package itself (https://github.com/ansible-collections/community.general/pull/6861, https://github.com/ansible-collections/community.general/issues/4470).
Breaking Changes / Porting Guide
--------------------------------
- collection_version lookup plugin - remove compatibility code for ansible-base 2.10 and ansible-core 2.11 (https://github.com/ansible-collections/community.general/pull/7269).
- gitlab_project - add ``default_branch`` support for project update. If you used the module so far with ``default_branch`` to update a project, the value of ``default_branch`` was ignored. Make sure that you either do not pass a value if you are not sure whether it is the one you want to have to avoid unexpected breaking changes (https://github.com/ansible-collections/community.general/pull/7158).
- selective callback plugin - remove compatibility code for Ansible 2.9 and ansible-core 2.10 (https://github.com/ansible-collections/community.general/pull/7269).
- vardict module utils - ``VarDict`` will no longer accept variables named ``_var``, ``get_meta``, and ``as_dict`` (https://github.com/ansible-collections/community.general/pull/6647).
- version module util - remove fallback for ansible-core 2.11. All modules and plugins that do version collections no longer work with ansible-core 2.11 (https://github.com/ansible-collections/community.general/pull/7269).
Deprecated Features
-------------------
- CmdRunner module utils - deprecate ``cmd_runner_fmt.as_default_type()`` formatter (https://github.com/ansible-collections/community.general/pull/6601).
- MH VarsMixin module utils - deprecates ``VarsMixin`` and supporting classes in favor of plain ``vardict`` module util (https://github.com/ansible-collections/community.general/pull/6649).
- ansible_galaxy_install - the ``ack_ansible29`` and ``ack_min_ansiblecore211`` options have been deprecated and will be removed in community.general 9.0.0 (https://github.com/ansible-collections/community.general/pull/7358).
- consul - the ``ack_params_state_absent`` option has been deprecated and will be removed in community.general 10.0.0 (https://github.com/ansible-collections/community.general/pull/7358).
- cpanm - value ``compatibility`` is deprecated as default for parameter ``mode`` (https://github.com/ansible-collections/community.general/pull/6512).
- ejabberd_user - deprecate the parameter ``logging`` in favour of producing more detailed information in the module output (https://github.com/ansible-collections/community.general/pull/7043).
- flowdock - module relies entirely on no longer responsive API endpoints, and it will be removed in community.general 9.0.0 (https://github.com/ansible-collections/community.general/pull/6930).
- proxmox - old feature flag ``proxmox_default_behavior`` will be removed in community.general 10.0.0 (https://github.com/ansible-collections/community.general/pull/6836).
- proxmox_kvm - deprecate the option ``proxmox_default_behavior`` (https://github.com/ansible-collections/community.general/pull/7377).
- redfish_info, redfish_config, redfish_command - the default value ``10`` for the ``timeout`` option is deprecated and will change to ``60`` in community.general 9.0.0 (https://github.com/ansible-collections/community.general/pull/7295).
- redhat module utils - the ``module_utils.redhat`` module is deprecated, as
effectively unused: the ``Rhsm``, ``RhsmPool``, and ``RhsmPools`` classes
will be removed in community.general 9.0.0; the ``RegistrationBase`` class
will be removed in community.general 10.0.0 together with the
``rhn_register`` module, as it is the only user of this class; this means
that the whole ``module_utils.redhat`` module will be dropped in
community.general 10.0.0, so importing it without even using anything of it
will fail
(https://github.com/ansible-collections/community.general/pull/6663).
- redhat_subscription - the ``autosubscribe`` alias for the ``auto_attach`` option has been
deprecated for many years, although only in the documentation. Officially mark this alias
as deprecated, and it will be removed in community.general 9.0.0
(https://github.com/ansible-collections/community.general/pull/6646).
- redhat_subscription - the ``pool`` option is deprecated in favour of the
more precise and flexible ``pool_ids`` option
(https://github.com/ansible-collections/community.general/pull/6650).
- rhsm_repository - ``state=present`` has not been working as expected for many years,
and it seems it was not noticed so far; also, "presence" is not really a valid concept
for subscription repositories, which can only be enabled or disabled. Hence, mark the
``present`` and ``absent`` values of the ``state`` option as deprecated, slating them
for removal in community.general 10.0.0
(https://github.com/ansible-collections/community.general/pull/6673).
- stackdriver - module relies entirely on no longer existent API endpoints, and it will be removed in community.general 9.0.0 (https://github.com/ansible-collections/community.general/pull/6887).
- webfaction_app - module relies entirely on no longer existent API endpoints, and it will be removed in community.general 9.0.0 (https://github.com/ansible-collections/community.general/pull/6909).
- webfaction_db - module relies entirely on no longer existent API endpoints, and it will be removed in community.general 9.0.0 (https://github.com/ansible-collections/community.general/pull/6909).
- webfaction_domain - module relies entirely on no longer existent API endpoints, and it will be removed in community.general 9.0.0 (https://github.com/ansible-collections/community.general/pull/6909).
- webfaction_mailbox - module relies entirely on no longer existent API endpoints, and it will be removed in community.general 9.0.0 (https://github.com/ansible-collections/community.general/pull/6909).
- webfaction_site - module relies entirely on no longer existent API endpoints, and it will be removed in community.general 9.0.0 (https://github.com/ansible-collections/community.general/pull/6909).
Removed Features (previously deprecated)
----------------------------------------
- The collection no longer supports ansible-core 2.11 and ansible-core 2.12. Parts of the collection might still work on these ansible-core versions, but others might not (https://github.com/ansible-collections/community.general/pull/7269).
- ansible_galaxy_install - support for Ansible 2.9 and ansible-base 2.10 has been removed (https://github.com/ansible-collections/community.general/pull/7358).
- consul - when ``state=absent``, the options ``script``, ``ttl``, ``tcp``, ``http``, and ``interval`` can no longer be specified (https://github.com/ansible-collections/community.general/pull/7358).
- gconftool2 - ``state=get`` has been removed. Use the module ``community.general.gconftool2_info`` instead (https://github.com/ansible-collections/community.general/pull/7358).
- gitlab_runner - remove the default value for the ``access_level`` option. To restore the previous behavior, explicitly set it to ``ref_protected`` (https://github.com/ansible-collections/community.general/pull/7358).
- htpasswd - removed code for passlib <1.6 (https://github.com/ansible-collections/community.general/pull/6901).
- manageiq_polices - ``state=list`` has been removed. Use the module ``community.general.manageiq_policies_info`` instead (https://github.com/ansible-collections/community.general/pull/7358).
- manageiq_tags - ``state=list`` has been removed. Use the module ``community.general.manageiq_tags_info`` instead (https://github.com/ansible-collections/community.general/pull/7358).
- mh.mixins.cmd module utils - the ``ArgFormat`` class has been removed (https://github.com/ansible-collections/community.general/pull/7358).
- mh.mixins.cmd module utils - the ``CmdMixin`` mixin has been removed. Use ``community.general.plugins.module_utils.cmd_runner.CmdRunner`` instead (https://github.com/ansible-collections/community.general/pull/7358).
- mh.mixins.cmd module utils - the mh.mixins.cmd module utils has been removed after all its contents were removed (https://github.com/ansible-collections/community.general/pull/7358).
- mh.module_helper module utils - the ``CmdModuleHelper`` and ``CmdStateModuleHelper`` classes have been removed. Use ``community.general.plugins.module_utils.cmd_runner.CmdRunner`` instead (https://github.com/ansible-collections/community.general/pull/7358).
- proxmox module utils - removed unused imports (https://github.com/ansible-collections/community.general/pull/6873).
- xfconf - the deprecated ``disable_facts`` option was removed (https://github.com/ansible-collections/community.general/pull/7358).
Bugfixes
--------
- CmdRunner module utils - does not attempt to resolve path if executable is a relative or absolute path (https://github.com/ansible-collections/community.general/pull/7200).
- MH DependencyMixin module utils - deprecation notice was popping up for modules not using dependencies (https://github.com/ansible-collections/community.general/pull/6644, https://github.com/ansible-collections/community.general/issues/6639).
- bitwarden lookup plugin - the plugin made assumptions about the structure of a Bitwarden JSON object which may have been broken by an update in the Bitwarden API. Remove assumptions, and allow queries for general fields such as ``notes`` (https://github.com/ansible-collections/community.general/pull/7061).
- cmd_runner module utils - when a parameter in ``argument_spec`` has no type, meaning it is implicitly a ``str``, ``CmdRunner`` would fail trying to find the ``type`` key in that dictionary (https://github.com/ansible-collections/community.general/pull/6968).
- cobbler inventory plugin - fix calculation of cobbler_ipv4/6_address (https://github.com/ansible-collections/community.general/pull/6925).
- composer - fix impossible to run ``working_dir`` dependent commands. The module was throwing an error when trying to run a ``working_dir`` dependent command, because it tried to get the command help without passing the ``working_dir`` (https://github.com/ansible-collections/community.general/issues/3787).
- csv module utils - detects and remove unicode BOM markers from incoming CSV content (https://github.com/ansible-collections/community.general/pull/6662).
- datadog_downtime - presence of ``rrule`` param lead to the Datadog API returning Bad Request due to a missing recurrence type (https://github.com/ansible-collections/community.general/pull/6811).
- ejabberd_user - module was failing to detect whether user was already created and/or password was changed (https://github.com/ansible-collections/community.general/pull/7033).
- ejabberd_user - provide meaningful error message when the ``ejabberdctl`` command is not found (https://github.com/ansible-collections/community.general/pull/7028, https://github.com/ansible-collections/community.general/issues/6949).
- github_deploy_key - fix pagination behaviour causing a crash when only a single page of deploy keys exist (https://github.com/ansible-collections/community.general/pull/7375).
- gitlab_group - the module passed parameters to the API call even when not set. The module is now filtering out ``None`` values to remediate this (https://github.com/ansible-collections/community.general/pull/6712).
- gitlab_group_variable - deleted all variables when used with ``purge=true`` due to missing ``raw`` property in KNOWN attributes (https://github.com/ansible-collections/community.general/issues/7250).
- gitlab_project_variable - deleted all variables when used with ``purge=true`` due to missing ``raw`` property in KNOWN attributes (https://github.com/ansible-collections/community.general/issues/7250).
- icinga2_host - fix a key error when updating an existing host (https://github.com/ansible-collections/community.general/pull/6748).
- ini_file - add the ``follow`` paramter to follow the symlinks instead of replacing them (https://github.com/ansible-collections/community.general/pull/6546).
- ini_file - fix a bug where the inactive options were not used when possible (https://github.com/ansible-collections/community.general/pull/6575).
- ipa_dnszone - fix 'idnsallowsyncptr' key error for reverse zone (https://github.com/ansible-collections/community.general/pull/6906, https://github.com/ansible-collections/community.general/issues/6905).
- kernel_blacklist - simplified the mechanism to update the file, fixing the error (https://github.com/ansible-collections/community.general/pull/7382, https://github.com/ansible-collections/community.general/issues/7362).
- keycloak module util - fix missing ``http_agent``, ``timeout``, and ``validate_certs`` ``open_url()`` parameters (https://github.com/ansible-collections/community.general/pull/7067).
- keycloak module utils - fix ``is_struct_included`` handling of lists of lists/dictionaries (https://github.com/ansible-collections/community.general/pull/6688).
- keycloak module utils - the function ``get_user_by_username`` now return the user representation or ``None`` as stated in the documentation (https://github.com/ansible-collections/community.general/pull/6758).
- keycloak_authentication - fix Keycloak authentication flow (step or sub-flow) indexing during update, if not specified by the user (https://github.com/ansible-collections/community.general/pull/6734).
- keycloak_client inventory plugin - fix missing client secret (https://github.com/ansible-collections/community.general/pull/6931).
- ldap_search - fix string normalization and the ``base64_attributes`` option on Python 3 (https://github.com/ansible-collections/community.general/issues/5704, https://github.com/ansible-collections/community.general/pull/7264).
- locale_gen - now works for locales without the underscore character such as ``C.UTF-8`` (https://github.com/ansible-collections/community.general/pull/6774, https://github.com/ansible-collections/community.general/issues/5142, https://github.com/ansible-collections/community.general/issues/4305).
- lvol - add support for percentage of origin size specification when creating snapshot volumes (https://github.com/ansible-collections/community.general/issues/1630, https://github.com/ansible-collections/community.general/pull/7053).
- lxc connection plugin - now handles ``remote_addr`` defaulting to ``inventory_hostname`` correctly (https://github.com/ansible-collections/community.general/pull/7104).
- lxc connection plugin - properly evaluate options (https://github.com/ansible-collections/community.general/pull/7369).
- machinectl become plugin - mark plugin as ``require_tty`` to automatically disable pipelining, with which this plugin is not compatible (https://github.com/ansible-collections/community.general/issues/6932, https://github.com/ansible-collections/community.general/pull/6935).
- mail - skip headers containing equals characters due to missing ``maxsplit`` on header key/value parsing (https://github.com/ansible-collections/community.general/pull/7303).
- memset module utils - make compatible with ansible-core 2.17 (https://github.com/ansible-collections/community.general/pull/7379).
- nmap inventory plugin - fix ``get_option`` calls (https://github.com/ansible-collections/community.general/pull/7323).
- nmap inventory plugin - now uses ``get_option`` in all cases to get its configuration information (https://github.com/ansible-collections/community.general/pull/7119).
- nmcli - fix bond option ``xmit_hash_policy`` (https://github.com/ansible-collections/community.general/pull/6527).
- nmcli - fix support for empty list (in compare and scrape) (https://github.com/ansible-collections/community.general/pull/6769).
- nsupdate - fix a possible ``list index out of range`` exception (https://github.com/ansible-collections/community.general/issues/836).
- oci_utils module util - fix inappropriate logical comparison expressions and makes them simpler. The previous checks had logical short circuits (https://github.com/ansible-collections/community.general/pull/7125).
- oci_utils module utils - avoid direct type comparisons (https://github.com/ansible-collections/community.general/pull/7085).
- onepassword - fix KeyError exception when trying to access value of a field that is not filled out in OnePassword item (https://github.com/ansible-collections/community.general/pull/7241).
- openbsd_pkg - the pkg_info(1) behavior has changed in OpenBSD >7.3. The error message ``Can't find`` should not lead to an error case (https://github.com/ansible-collections/community.general/pull/6785).
- pacman - module recognizes the output of ``yay`` running as ``root`` (https://github.com/ansible-collections/community.general/pull/6713).
- portage - fix ``changed_use`` and ``newuse`` not triggering rebuilds (https://github.com/ansible-collections/community.general/issues/6008, https://github.com/ansible-collections/community.general/pull/6548).
- pritunl module utils - fix incorrect URL parameter for orgnization add method (https://github.com/ansible-collections/community.general/pull/7161).
- proxmox - fix error when a configuration had no ``template`` field (https://github.com/ansible-collections/community.general/pull/6838, https://github.com/ansible-collections/community.general/issues/5372).
- proxmox module utils - add logic to detect whether an old Promoxer complains about the ``token_name`` and ``token_value`` parameters and provide a better error message when that happens (https://github.com/ansible-collections/community.general/pull/6839, https://github.com/ansible-collections/community.general/issues/5371).
- proxmox module utils - fix proxmoxer library version check (https://github.com/ansible-collections/community.general/issues/6974, https://github.com/ansible-collections/community.general/issues/6975, https://github.com/ansible-collections/community.general/pull/6980).
- proxmox_disk - fix unable to create ``cdrom`` media due to ``size`` always being appended (https://github.com/ansible-collections/community.general/pull/6770).
- proxmox_kvm - ``absent`` state with ``force`` specified failed to stop the VM due to the ``timeout`` value not being passed to ``stop_vm`` (https://github.com/ansible-collections/community.general/pull/6827).
- proxmox_kvm - ``restarted`` state did not actually restart a VM in some VM configurations. The state now uses the Proxmox reboot endpoint instead of calling the ``stop_vm`` and ``start_vm`` functions (https://github.com/ansible-collections/community.general/pull/6773).
- proxmox_kvm - allow creation of VM with existing name but new vmid (https://github.com/ansible-collections/community.general/issues/6155, https://github.com/ansible-collections/community.general/pull/6709).
- proxmox_kvm - when ``name`` option is provided without ``vmid`` and VM with that name already exists then no new VM will be created (https://github.com/ansible-collections/community.general/issues/6911, https://github.com/ansible-collections/community.general/pull/6981).
- proxmox_tasks_info - remove ``api_user`` + ``api_password`` constraint from ``required_together`` as it causes to require ``api_password`` even when API token param is used (https://github.com/ansible-collections/community.general/issues/6201).
- proxmox_template - require ``requests_toolbelt`` module to fix issue with uploading large templates (https://github.com/ansible-collections/community.general/issues/5579, https://github.com/ansible-collections/community.general/pull/6757).
- proxmox_user_info - avoid direct type comparisons (https://github.com/ansible-collections/community.general/pull/7085).
- redfish_info - fix ``ListUsers`` to not show empty account slots (https://github.com/ansible-collections/community.general/issues/6771, https://github.com/ansible-collections/community.general/pull/6772).
- redhat_subscription - use the right D-Bus options for the consumer type when
registering a RHEL system older than 9 or a RHEL 9 system older than 9.2
and using ``consumer_type``
(https://github.com/ansible-collections/community.general/pull/7378).
- refish_utils module utils - changing variable names to avoid issues occuring when fetching Volumes data (https://github.com/ansible-collections/community.general/pull/6883).
- rhsm_repository - when using the ``purge`` option, the ``repositories``
dictionary element in the returned JSON is now properly updated according
to the pruning operation
(https://github.com/ansible-collections/community.general/pull/6676).
- rundeck - fix ``TypeError`` on 404 API response (https://github.com/ansible-collections/community.general/pull/6983).
- selective callback plugin - fix length of task name lines in output always being 3 characters longer than desired (https://github.com/ansible-collections/community.general/pull/7374).
- snap - an exception was being raised when snap list was empty (https://github.com/ansible-collections/community.general/pull/7124, https://github.com/ansible-collections/community.general/issues/7120).
- snap - assume default track ``latest`` in parameter ``channel`` when not specified (https://github.com/ansible-collections/community.general/pull/6835, https://github.com/ansible-collections/community.general/issues/6821).
- snap - change the change detection mechanism from "parsing installation" to "comparing end state with initial state" (https://github.com/ansible-collections/community.general/pull/7340, https://github.com/ansible-collections/community.general/issues/7265).
- snap - fix crash when multiple snaps are specified and one has ``---`` in its description (https://github.com/ansible-collections/community.general/pull/7046).
- snap - fix the processing of the commands' output, stripping spaces and newlines from it (https://github.com/ansible-collections/community.general/pull/6826, https://github.com/ansible-collections/community.general/issues/6803).
- sorcery - fix interruption of the multi-stage process (https://github.com/ansible-collections/community.general/pull/7012).
- sorcery - fix queue generation before the whole system rebuild (https://github.com/ansible-collections/community.general/pull/7012).
- sorcery - latest state no longer triggers update_cache (https://github.com/ansible-collections/community.general/pull/7012).
- terraform - prevents ``-backend-config`` option double encapsulating with ``shlex_quote`` function. (https://github.com/ansible-collections/community.general/pull/7301).
- tss lookup plugin - fix multiple issues when using ``fetch_attachments=true`` (https://github.com/ansible-collections/community.general/pull/6720).
- zypper - added handling of zypper exitcode 102. Changed state is set correctly now and rc 102 is still preserved to be evaluated by the playbook (https://github.com/ansible-collections/community.general/pull/6534).
Known Issues
------------
- Ansible markup will show up in raw form on ansible-doc text output for ansible-core before 2.15. If you have trouble deciphering the documentation markup, please upgrade to ansible-core 2.15 (or newer), or read the HTML documentation on https://docs.ansible.com/ansible/devel/collections/community/general/ (https://github.com/ansible-collections/community.general/pull/6539).
New Plugins
-----------
Lookup
~~~~~~
- bitwarden_secrets_manager - Retrieve secrets from Bitwarden Secrets Manager
New Modules
-----------
- consul_policy - Manipulate Consul policies
- consul_role - Manipulate Consul roles
- facter_facts - Runs the discovery program C(facter) on the remote system and return Ansible facts
- gio_mime - Set default handler for MIME type, for applications using Gnome GIO
- gitlab_instance_variable - Creates, updates, or deletes GitLab instance variables
- gitlab_merge_request - Create, update, or delete GitLab merge requests
- jenkins_build_info - Get information about Jenkins builds
- keycloak_authentication_required_actions - Allows administration of Keycloak authentication required actions
- keycloak_authz_custom_policy - Allows administration of Keycloak client custom Javascript policies via Keycloak API
- keycloak_authz_permission - Allows administration of Keycloak client authorization permissions via Keycloak API
- keycloak_authz_permission_info - Query Keycloak client authorization permissions information
- keycloak_realm_key - Allows administration of Keycloak realm keys via Keycloak API
- keycloak_user - Create and configure a user in Keycloak
- lvg_rename - Renames LVM volume groups
- pnpm - Manage node.js packages with pnpm
- proxmox_pool - Pool management for Proxmox VE cluster
- proxmox_pool_member - Add or delete members from Proxmox VE cluster pools
- proxmox_vm_info - Retrieve information about one or more Proxmox VE virtual machines
- simpleinit_msb - Manage services on Source Mage GNU/Linux

View File

@ -0,0 +1,3 @@
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
SPDX-FileCopyrightText: Ansible Project

View File

@ -0,0 +1,123 @@
<!--
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
-->
# Contributing
We follow [Ansible Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html) in all our contributions and interactions within this repository.
If you are a committer, also refer to the [collection's committer guidelines](https://github.com/ansible-collections/community.general/blob/main/commit-rights.md).
## Issue tracker
Whether you are looking for an opportunity to contribute or you found a bug and already know how to solve it, please go to the [issue tracker](https://github.com/ansible-collections/community.general/issues).
There you can find feature ideas to implement, reports about bugs to solve, or submit an issue to discuss your idea before implementing it which can help choose a right direction at the beginning of your work and potentially save a lot of time and effort.
Also somebody may already have started discussing or working on implementing the same or a similar idea,
so you can cooperate to create a better solution together.
* If you are interested in starting with an easy issue, look for [issues with an `easyfix` label](https://github.com/ansible-collections/community.general/labels/easyfix).
* Often issues that are waiting for contributors to pick up have [the `waiting_on_contributor` label](https://github.com/ansible-collections/community.general/labels/waiting_on_contributor).
## Open pull requests
Look through currently [open pull requests](https://github.com/ansible-collections/community.general/pulls).
You can help by reviewing them. Reviews help move pull requests to merge state. Some good pull requests cannot be merged only due to a lack of reviews. And it is always worth saying that good reviews are often more valuable than pull requests themselves.
Note that reviewing does not only mean code review, but also offering comments on new interfaces added to existing plugins/modules, interfaces of new plugins/modules, improving language (not everyone is a native english speaker), or testing bugfixes and new features!
Also, consider taking up a valuable, reviewed, but abandoned pull request which you could politely ask the original authors to complete yourself.
* Try committing your changes with an informative but short commit message.
* Do not squash your commits and force-push to your branch if not needed. Reviews of your pull request are much easier with individual commits to comprehend the pull request history. All commits of your pull request branch will be squashed into one commit by GitHub upon merge.
* Do not add merge commits to your PR. The bot will complain and you will have to rebase ([instructions for rebasing](https://docs.ansible.com/ansible/latest/dev_guide/developing_rebasing.html)) to remove them before your PR can be merged. To avoid that git automatically does merges during pulls, you can configure it to do rebases instead by running `git config pull.rebase true` inside the repository checkout.
* Make sure your PR includes a [changelog fragment](https://docs.ansible.com/ansible/devel/community/development_process.html#creating-changelog-fragments). (You must not include a fragment for new modules or new plugins. Also you shouldn't include one for docs-only changes. If you're not sure, simply don't include one, we'll tell you whether one is needed or not :) )
* Avoid reformatting unrelated parts of the codebase in your PR. These types of changes will likely be requested for reversion, create additional work for reviewers, and may cause approval to be delayed.
You can also read [our Quick-start development guide](https://github.com/ansible/community-docs/blob/main/create_pr_quick_start_guide.rst).
## Test pull requests
If you want to test a PR locally, refer to [our testing guide](https://github.com/ansible/community-docs/blob/main/test_pr_locally_guide.rst) for instructions on how do it quickly.
If you find any inconsistencies or places in this document which can be improved, feel free to raise an issue or pull request to fix it.
## Run sanity, unit or integration tests locally
You have to check out the repository into a specific path structure to be able to run `ansible-test`. The path to the git checkout must end with `.../ansible_collections/community/general`. Please see [our testing guide](https://github.com/ansible/community-docs/blob/main/test_pr_locally_guide.rst) for instructions on how to check out the repository into a correct path structure. The short version of these instructions is:
```.bash
mkdir -p ~/dev/ansible_collections/community
git clone https://github.com/ansible-collections/community.general.git ~/dev/ansible_collections/community/general
cd ~/dev/ansible_collections/community/general
```
Then you can run `ansible-test` (which is a part of [ansible-core](https://pypi.org/project/ansible-core/)) inside the checkout. The following example commands expect that you have installed Docker or Podman. Note that Podman has only been supported by more recent ansible-core releases. If you are using Docker, the following will work with Ansible 2.9+.
The following commands show how to run sanity tests:
```.bash
# Run sanity tests for all files in the collection:
ansible-test sanity --docker -v
# Run sanity tests for the given files and directories:
ansible-test sanity --docker -v plugins/modules/system/pids.py tests/integration/targets/pids/
```
The following commands show how to run unit tests:
```.bash
# Run all unit tests:
ansible-test units --docker -v
# Run all unit tests for one Python version (a lot faster):
ansible-test units --docker -v --python 3.8
# Run a specific unit test (for the nmcli module) for one Python version:
ansible-test units --docker -v --python 3.8 tests/unit/plugins/modules/net_tools/test_nmcli.py
```
The following commands show how to run integration tests:
```.bash
# Run integration tests for the interfaces_files module in a Docker container using the
# fedora35 operating system image (the supported images depend on your ansible-core version):
ansible-test integration --docker fedora35 -v interfaces_file
# Run integration tests for the flattened lookup **without any isolation**:
ansible-test integration -v lookup_flattened
```
If you are unsure about the integration test target name for a module or plugin, you can take a look in `tests/integration/targets/`. Tests for plugins have the plugin type prepended.
## Creating new modules or plugins
Creating new modules and plugins requires a bit more work than other Pull Requests.
1. Please make sure that your new module or plugin is of interest to a larger audience. Very specialized modules or plugins that
can only be used by very few people should better be added to more specialized collections.
2. Please do not add more than one plugin/module in one PR, especially if it is the first plugin/module you are contributing.
That makes it easier for reviewers, and increases the chance that your PR will get merged. If you plan to contribute a group
of plugins/modules (say, more than a module and a corresponding ``_info`` module), please mention that in the first PR. In
such cases, you also have to think whether it is better to publish the group of plugins/modules in a new collection.
3. When creating a new module or plugin, please make sure that you follow various guidelines:
- Follow [development conventions](https://docs.ansible.com/ansible/devel/dev_guide/developing_modules_best_practices.html);
- Follow [documentation standards](https://docs.ansible.com/ansible/devel/dev_guide/developing_modules_documenting.html) and
the [Ansible style guide](https://docs.ansible.com/ansible/devel/dev_guide/style_guide/index.html#style-guide);
- Make sure your modules and plugins are [GPL-3.0-or-later](https://www.gnu.org/licenses/gpl-3.0-standalone.html) licensed
(new module_utils can also be [BSD-2-clause](https://opensource.org/licenses/BSD-2-Clause) licensed);
- Make sure that new plugins and modules have tests (unit tests, integration tests, or both); it is preferable to have some tests
which run in CI.
4. Action plugins need to be accompanied by a module, even if the module file only contains documentation
(`DOCUMENTATION`, `EXAMPLES` and `RETURN`). The module must have the same name and directory path in `plugins/modules/`
than the action plugin has in `plugins/action/`.
5. Make sure to add a BOTMETA entry for your new module/plugin in `.github/BOTMETA.yml`. Search for other plugins/modules in the
same directory to see how entries could look. You should list all authors either as `maintainers` or under `ignore`. People
listed as `maintainers` will be pinged for new issues and PRs that modify the module/plugin or its tests.
When you add a new plugin/module, we expect that you perform maintainer duty for at least some time after contributing it.

View File

@ -0,0 +1,675 @@
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<http://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<http://www.gnu.org/philosophy/why-not-lgpl.html>.

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,8 @@
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -0,0 +1 @@
../COPYING

View File

@ -0,0 +1,9 @@
MIT License
Copyright (c) <year> <copyright holders>
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -0,0 +1,48 @@
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
--------------------------------------------
1. This LICENSE AGREEMENT is between the Python Software Foundation
("PSF"), and the Individual or Organization ("Licensee") accessing and
otherwise using this software ("Python") in source or binary form and
its associated documentation.
2. Subject to the terms and conditions of this License Agreement, PSF hereby
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
analyze, test, perform and/or display publicly, prepare derivative works,
distribute, and otherwise use Python alone or in any derivative version,
provided, however, that PSF's License Agreement and PSF's notice of copyright,
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021 Python Software Foundation;
All Rights Reserved" are retained in Python alone or in any derivative version
prepared by Licensee.
3. In the event Licensee prepares a derivative work that is based on
or incorporates Python or any part thereof, and wants to make
the derivative work available to others as provided herein, then
Licensee hereby agrees to include in any such work a brief summary of
the changes made to Python.
4. PSF is making Python available to Licensee on an "AS IS"
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
7. Nothing in this License Agreement shall be deemed to create any
relationship of agency, partnership, or joint venture between PSF and
Licensee. This License Agreement does not grant permission to use PSF
trademarks or trade name in a trademark sense to endorse or promote
products or services of Licensee, or any third party.
8. By copying, installing or otherwise using Python, Licensee
agrees to be bound by the terms and conditions of this License
Agreement.

View File

@ -0,0 +1,30 @@
{
"collection_info": {
"namespace": "community",
"name": "general",
"version": "8.2.0",
"authors": [
"Ansible (https://github.com/ansible)"
],
"readme": "README.md",
"tags": [
"community"
],
"description": "The community.general collection is a part of the Ansible package and includes many modules and plugins supported by Ansible community which are not part of more specialized community collections.",
"license": [],
"license_file": "COPYING",
"dependencies": {},
"repository": "https://github.com/ansible-collections/community.general",
"documentation": "https://docs.ansible.com/ansible/latest/collections/community/general/",
"homepage": "https://github.com/ansible-collections/community.general",
"issues": "https://github.com/ansible-collections/community.general/issues"
},
"file_manifest_file": {
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "150ebe285b4e562a0ce4f4265188c406584ebb601b16846c65ecd47dcf436c1b",
"format": 1
},
"format": 1
}

View File

@ -0,0 +1,141 @@
<!--
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
-->
# Community General Collection
[![Build Status](https://dev.azure.com/ansible/community.general/_apis/build/status/CI?branchName=stable-8)](https://dev.azure.com/ansible/community.general/_build?definitionId=31)
[![EOL CI](https://github.com/ansible-collections/community.general/workflows/EOL%20CI/badge.svg?event=push)](https://github.com/ansible-collections/community.general/actions)
[![Codecov](https://img.shields.io/codecov/c/github/ansible-collections/community.general)](https://codecov.io/gh/ansible-collections/community.general)
This repository contains the `community.general` Ansible Collection. The collection is a part of the Ansible package and includes many modules and plugins supported by Ansible community which are not part of more specialized community collections.
You can find [documentation for this collection on the Ansible docs site](https://docs.ansible.com/ansible/latest/collections/community/general/).
Please note that this collection does **not** support Windows targets. Only connection plugins included in this collection might support Windows targets, and will explicitly mention that in their documentation if they do so.
## Code of Conduct
We follow [Ansible Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html) in all our interactions within this project.
If you encounter abusive behavior violating the [Ansible Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html), please refer to the [policy violations](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html#policy-violations) section of the Code of Conduct for information on how to raise a complaint.
## Tested with Ansible
Tested with the current ansible-core 2.13, ansible-core 2.14, ansible-core 2.15, ansible-core 2.16 releases and the current development version of ansible-core. Ansible-core versions before 2.13.0 are not supported. This includes all ansible-base 2.10 and Ansible 2.9 releases.
## External requirements
Some modules and plugins require external libraries. Please check the requirements for each plugin or module you use in the documentation to find out which requirements are needed.
## Included content
Please check the included content on the [Ansible Galaxy page for this collection](https://galaxy.ansible.com/ui/repo/published/community/general/) or the [documentation on the Ansible docs site](https://docs.ansible.com/ansible/latest/collections/community/general/).
## Using this collection
This collection is shipped with the Ansible package. So if you have it installed, no more action is required.
If you have a minimal installation (only Ansible Core installed) or you want to use the latest version of the collection along with the whole Ansible package, you need to install the collection from [Ansible Galaxy](https://galaxy.ansible.com/ui/repo/published/community/general/) manually with the `ansible-galaxy` command-line tool:
ansible-galaxy collection install community.general
You can also include it in a `requirements.yml` file and install it via `ansible-galaxy collection install -r requirements.yml` using the format:
```yaml
collections:
- name: community.general
```
Note that if you install the collection manually, it will not be upgraded automatically when you upgrade the Ansible package. To upgrade the collection to the latest available version, run the following command:
```bash
ansible-galaxy collection install community.general --upgrade
```
You can also install a specific version of the collection, for example, if you need to downgrade when something is broken in the latest version (please report an issue in this repository). Use the following syntax where `X.Y.Z` can be any [available version](https://galaxy.ansible.com/ui/repo/published/community/general/):
```bash
ansible-galaxy collection install community.general:==X.Y.Z
```
See [Ansible Using collections](https://docs.ansible.com/ansible/latest/user_guide/collections_using.html) for more details.
## Contributing to this collection
The content of this collection is made by good people just like you, a community of individuals collaborating on making the world better through developing automation software.
We are actively accepting new contributors.
All types of contributions are very welcome.
You don't know how to start? Refer to our [contribution guide](https://github.com/ansible-collections/community.general/blob/main/CONTRIBUTING.md)!
The current maintainers are listed in the [commit-rights.md](https://github.com/ansible-collections/community.general/blob/main/commit-rights.md#people) file. If you have questions or need help, feel free to mention them in the proposals.
You can find more information in the [developer guide for collections](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html#contributing-to-collections), and in the [Ansible Community Guide](https://docs.ansible.com/ansible/latest/community/index.html).
Also for some notes specific to this collection see [our CONTRIBUTING documentation](https://github.com/ansible-collections/community.general/blob/main/CONTRIBUTING.md).
### Running tests
See [here](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html#testing-collections).
## Collection maintenance
To learn how to maintain / become a maintainer of this collection, refer to:
* [Committer guidelines](https://github.com/ansible-collections/community.general/blob/main/commit-rights.md).
* [Maintainer guidelines](https://github.com/ansible/community-docs/blob/main/maintaining.rst).
It is necessary for maintainers of this collection to be subscribed to:
* The collection itself (the `Watch` button → `All Activity` in the upper right corner of the repository's homepage).
* The "Changes Impacting Collection Contributors and Maintainers" [issue](https://github.com/ansible-collections/overview/issues/45).
They also should be subscribed to Ansible's [The Bullhorn newsletter](https://docs.ansible.com/ansible/devel/community/communication.html#the-bullhorn).
## Communication
We announce important development changes and releases through Ansible's [The Bullhorn newsletter](https://eepurl.com/gZmiEP). If you are a collection developer, be sure you are subscribed.
Join us in the `#ansible` (general use questions and support), `#ansible-community` (community and collection development questions), and other [IRC channels](https://docs.ansible.com/ansible/devel/community/communication.html#irc-channels) on [Libera.chat](https://libera.chat).
We take part in the global quarterly [Ansible Contributor Summit](https://github.com/ansible/community/wiki/Contributor-Summit) virtually or in-person. Track [The Bullhorn newsletter](https://eepurl.com/gZmiEP) and join us.
For more information about communities, meetings and agendas see [Community Wiki](https://github.com/ansible/community/wiki/Community).
For more information about communication, refer to Ansible's the [Communication guide](https://docs.ansible.com/ansible/devel/community/communication.html).
## Publishing New Version
See the [Releasing guidelines](https://github.com/ansible/community-docs/blob/main/releasing_collections.rst) to learn how to release this collection.
## Release notes
See the [changelog](https://github.com/ansible-collections/community.general/blob/stable-8/CHANGELOG.rst).
## Roadmap
In general, we plan to release a major version every six months, and minor versions every two months. Major versions can contain breaking changes, while minor versions only contain new features and bugfixes.
See [this issue](https://github.com/ansible-collections/community.general/issues/582) for information on releasing, versioning, and deprecation.
## More information
- [Ansible Collection overview](https://github.com/ansible-collections/overview)
- [Ansible User guide](https://docs.ansible.com/ansible/latest/user_guide/index.html)
- [Ansible Developer guide](https://docs.ansible.com/ansible/latest/dev_guide/index.html)
- [Ansible Community code of conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html)
## Licensing
This collection is primarily licensed and distributed as a whole under the GNU General Public License v3.0 or later.
See [LICENSES/GPL-3.0-or-later.txt](https://github.com/ansible-collections/community.general/blob/main/COPYING) for the full text.
Parts of the collection are licensed under the [BSD 2-Clause license](https://github.com/ansible-collections/community.general/blob/main/LICENSES/BSD-2-Clause.txt), the [MIT license](https://github.com/ansible-collections/community.general/blob/main/LICENSES/MIT.txt), and the [PSF 2.0 license](https://github.com/ansible-collections/community.general/blob/main/LICENSES/PSF-2.0.txt).
All files have a machine readable `SDPX-License-Identifier:` comment denoting its respective license(s) or an equivalent entry in an accompanying `.license` file. Only changelog fragments (which will not be part of a release) are covered by a blanket statement in `.reuse/dep5`. This conforms to the [REUSE specification](https://reuse.software/spec/).

View File

@ -0,0 +1,5 @@
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
/.plugin-cache.yaml

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,3 @@
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
SPDX-FileCopyrightText: Ansible Project

View File

@ -0,0 +1,34 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
changelog_filename_template: ../CHANGELOG.rst
changelog_filename_version_depth: 0
changes_file: changelog.yaml
changes_format: combined
keep_fragments: false
mention_ancestor: true
flatmap: true
new_plugins_after_name: removed_features
notesdir: fragments
prelude_section_name: release_summary
prelude_section_title: Release Summary
sections:
- - major_changes
- Major Changes
- - minor_changes
- Minor Changes
- - breaking_changes
- Breaking Changes / Porting Guide
- - deprecated_features
- Deprecated Features
- - removed_features
- Removed Features (previously deprecated)
- - security_fixes
- Security Fixes
- - bugfixes
- Bugfixes
- - known_issues
- Known Issues
title: Community General

View File

@ -0,0 +1,80 @@
<!--
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
-->
Committers Guidelines for community.general
===========================================
This document is based on the [Ansible committer guidelines](https://github.com/ansible/ansible/blob/b57444af14062ec96e0af75fdfc2098c74fe2d9a/docs/docsite/rst/community/committer_guidelines.rst) ([latest version](https://docs.ansible.com/ansible/devel/community/committer_guidelines.html)).
These are the guidelines for people with commit privileges on the Ansible Community General Collection GitHub repository. Please read the guidelines before you commit.
These guidelines apply to everyone. At the same time, this is NOT a process document. So just use good judgment. You have been given commit access because we trust your judgment.
That said, use the trust wisely.
If you abuse the trust and break components and builds, and so on, the trust level falls and you may be asked not to commit or you may lose your commit privileges.
Our workflow on GitHub
----------------------
As a committer, you may already know this, but our workflow forms a lot of our team policies. Please ensure you are aware of the following workflow steps:
* Fork the repository upon which you want to do some work to your own personal repository
* Work on the specific branch upon which you need to commit
* Create a Pull Request back to the collection repository and await reviews
* Adjust code as necessary based on the Comments provided
* Ask someone from the other committers to do a final review and merge
Sometimes, committers merge their own pull requests. This section is a set of guidelines. If you are changing a comma in a doc or making a very minor change, you can use your best judgement. This is another trust thing. The process is critical for any major change, but for little things or getting something done quickly, use your best judgement and make sure people on the team are aware of your work.
Roles
-----
* Release managers: Merge pull requests to `stable-X` branches, create tags to do releases.
* Committers: Fine to do PRs for most things, but we should have a timebox. Hanging PRs may merge on the judgement of these devs.
* Module maintainers: Module maintainers own specific modules and have indirect commit access through the current module PR mechanisms. This is primary [ansibullbot](https://github.com/ansibullbot)'s `shipit` mechanism.
General rules
-------------
Individuals with direct commit access to this collection repository are entrusted with powers that allow them to do a broad variety of things--probably more than we can write down. Rather than rules, treat these as general *guidelines*, individuals with this power are expected to use their best judgement.
* Do NOTs:
- Do not commit directly.
- Do not merge your own PRs. Someone else should have a chance to review and approve the PR merge. You have a small amount of leeway here for very minor changes.
- Do not forget about non-standard / alternate environments. Consider the alternatives. Yes, people have bad/unusual/strange environments (like binaries from multiple init systems installed), but they are the ones who need us the most.
- Do not drag your community team members down. Discuss the technical merits of any pull requests you review. Avoid negativity and personal comments. For more guidance on being a good community member, read the [Ansible Community Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html).
- Do not forget about the maintenance burden. High-maintenance features may not be worth adding.
- Do not break playbooks. Always keep backwards compatibility in mind.
- Do not forget to keep it simple. Complexity breeds all kinds of problems.
- Do not merge to branches other than `main`, especially not to `stable-X`, if you do not have explicit permission to do so.
- Do not create tags. Tags are used in the release process, and should only be created by the people responsible for managing the stable branches.
* Do:
- Squash, avoid merges whenever possible, use GitHub's squash commits or cherry pick if needed (bisect thanks you).
- Be active. Committers who have no activity on the project (through merges, triage, commits, and so on) will have their permissions suspended.
- Consider backwards compatibility (goes back to "do not break existing playbooks").
- Write tests. PRs with tests are looked at with more priority than PRs without tests that should have them included. While not all changes require tests, be sure to add them for bug fixes or functionality changes.
- Discuss with other committers, specially when you are unsure of something.
- Document! If your PR is a new feature or a change to behavior, make sure you've updated all associated documentation or have notified the right people to do so.
- Consider scope, sometimes a fix can be generalized.
- Keep it simple, then things are maintainable, debuggable and intelligible.
Committers are expected to continue to follow the same community and contribution guidelines followed by the rest of the Ansible community.
People
------
Individuals who have been asked to become a part of this group have generally been contributing in significant ways to the community.general collection for some time. Should they agree, they are requested to add their names and GitHub IDs to this file, in the section below, through a pull request. Doing so indicates that these individuals agree to act in the ways that their fellow committers trust that they will act.
| Name | GitHub ID | IRC Nick | Other |
| ------------------- | -------------------- | ------------------ | -------------------- |
| Alexei Znamensky | russoz | russoz | |
| Andrew Klychkov | andersson007 | andersson007_ | |
| Andrew Pantuso | Ajpantuso | ajpantuso | |
| Felix Fontein | felixfontein | felixfontein | |
| John R Barker | gundalow | gundalow | |

View File

@ -0,0 +1,16 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
sections:
- title: Guides
toctree:
- filter_guide
- test_guide
- title: Cloud Guides
toctree:
- guide_alicloud
- guide_online
- guide_packet
- guide_scaleway

View File

@ -0,0 +1,18 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
list1:
- name: foo
extra: true
- name: bar
extra: false
- name: meh
extra: true
list2:
- name: foo
path: /foo
- name: baz
path: /baz

View File

@ -0,0 +1,24 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
list1:
- name: myname01
param01:
x: default_value
y: default_value
list:
- default_value
- name: myname02
param01: [1, 1, 2, 3]
list2:
- name: myname01
param01:
y: patch_value
z: patch_value
list:
- patch_value
- name: myname02
param01: [3, 4, 4, {key: value}]

View File

@ -0,0 +1,14 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
- name: 1. Merge two lists by common attribute 'name'
include_vars:
dir: example-001_vars
- debug:
var: list3
when: debug|d(false)|bool
- template:
src: list3.out.j2
dest: example-001.out

View File

@ -0,0 +1,7 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
list3: "{{ list1|
community.general.lists_mergeby(list2, 'name') }}"

View File

@ -0,0 +1,14 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
- name: 2. Merge two lists by common attribute 'name'
include_vars:
dir: example-002_vars
- debug:
var: list3
when: debug|d(false)|bool
- template:
src: list3.out.j2
dest: example-002.out

View File

@ -0,0 +1,7 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
list3: "{{ [list1, list2]|
community.general.lists_mergeby('name') }}"

View File

@ -0,0 +1,14 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
- name: 3. Merge recursive by 'name', replace lists (default)
include_vars:
dir: example-003_vars
- debug:
var: list3
when: debug|d(false)|bool
- template:
src: list3.out.j2
dest: example-003.out

View File

@ -0,0 +1,8 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
list3: "{{ [list1, list2]|
community.general.lists_mergeby('name',
recursive=true) }}"

View File

@ -0,0 +1,14 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
- name: 4. Merge recursive by 'name', keep lists
include_vars:
dir: example-004_vars
- debug:
var: list3
when: debug|d(false)|bool
- template:
src: list3.out.j2
dest: example-004.out

View File

@ -0,0 +1,9 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
list3: "{{ [list1, list2]|
community.general.lists_mergeby('name',
recursive=true,
list_merge='keep') }}"

View File

@ -0,0 +1,14 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
- name: 5. Merge recursive by 'name', append lists
include_vars:
dir: example-005_vars
- debug:
var: list3
when: debug|d(false)|bool
- template:
src: list3.out.j2
dest: example-005.out

View File

@ -0,0 +1,9 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
list3: "{{ [list1, list2]|
community.general.lists_mergeby('name',
recursive=true,
list_merge='append') }}"

View File

@ -0,0 +1,14 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
- name: 6. Merge recursive by 'name', prepend lists
include_vars:
dir: example-006_vars
- debug:
var: list3
when: debug|d(false)|bool
- template:
src: list3.out.j2
dest: example-006.out

View File

@ -0,0 +1,9 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
list3: "{{ [list1, list2]|
community.general.lists_mergeby('name',
recursive=true,
list_merge='prepend') }}"

View File

@ -0,0 +1,14 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
- name: 7. Merge recursive by 'name', append lists 'remove present'
include_vars:
dir: example-007_vars
- debug:
var: list3
when: debug|d(false)|bool
- template:
src: list3.out.j2
dest: example-007.out

View File

@ -0,0 +1,9 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
list3: "{{ [list1, list2]|
community.general.lists_mergeby('name',
recursive=true,
list_merge='append_rp') }}"

View File

@ -0,0 +1,14 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
- name: 8. Merge recursive by 'name', prepend lists 'remove present'
include_vars:
dir: example-008_vars
- debug:
var: list3
when: debug|d(false)|bool
- template:
src: list3.out.j2
dest: example-008.out

View File

@ -0,0 +1,9 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
list3: "{{ [list1, list2]|
community.general.lists_mergeby('name',
recursive=true,
list_merge='prepend_rp') }}"

View File

@ -0,0 +1,54 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
examples:
- label: 'In the example below the lists are merged by the attribute ``name``:'
file: example-001_vars/list3.yml
lang: 'yaml+jinja'
- label: 'This produces:'
file: example-001.out
lang: 'yaml'
- label: 'It is possible to use a list of lists as an input of the filter:'
file: example-002_vars/list3.yml
lang: 'yaml+jinja'
- label: 'This produces the same result as in the previous example:'
file: example-002.out
lang: 'yaml'
- label: 'Example ``list_merge=replace`` (default):'
file: example-003_vars/list3.yml
lang: 'yaml+jinja'
- label: 'This produces:'
file: example-003.out
lang: 'yaml'
- label: 'Example ``list_merge=keep``:'
file: example-004_vars/list3.yml
lang: 'yaml+jinja'
- label: 'This produces:'
file: example-004.out
lang: 'yaml'
- label: 'Example ``list_merge=append``:'
file: example-005_vars/list3.yml
lang: 'yaml+jinja'
- label: 'This produces:'
file: example-005.out
lang: 'yaml'
- label: 'Example ``list_merge=prepend``:'
file: example-006_vars/list3.yml
lang: 'yaml+jinja'
- label: 'This produces:'
file: example-006.out
lang: 'yaml'
- label: 'Example ``list_merge=append_rp``:'
file: example-007_vars/list3.yml
lang: 'yaml+jinja'
- label: 'This produces:'
file: example-007.out
lang: 'yaml'
- label: 'Example ``list_merge=prepend_rp``:'
file: example-008_vars/list3.yml
lang: 'yaml+jinja'
- label: 'This produces:'
file: example-008.out
lang: 'yaml'

View File

@ -0,0 +1,13 @@
..
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
{% for i in examples %}
{{ i.label }}
.. code-block:: {{ i.lang }}
{{ lookup('file', i.file)|indent(2) }}
{% endfor %}

View File

@ -0,0 +1,62 @@
..
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
Merging lists of dictionaries
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
If you have two or more lists of dictionaries and want to combine them into a list of merged dictionaries, where the dictionaries are merged by an attribute, you can use the ``lists_mergeby`` filter.
.. note:: The output of the examples in this section use the YAML callback plugin. Quoting: "Ansible output that can be quite a bit easier to read than the default JSON formatting." See :ref:`the documentation for the community.general.yaml callback plugin <ansible_collections.community.general.yaml_callback>`.
Let us use the lists below in the following examples:
.. code-block:: yaml
{{ lookup('file', 'default-common.yml')|indent(2) }}
{% for i in examples[0:2] %}
{{ i.label }}
.. code-block:: {{ i.lang }}
{{ lookup('file', i.file)|indent(2) }}
{% endfor %}
.. versionadded:: 2.0.0
{% for i in examples[2:4] %}
{{ i.label }}
.. code-block:: {{ i.lang }}
{{ lookup('file', i.file)|indent(2) }}
{% endfor %}
The filter also accepts two optional parameters: ``recursive`` and ``list_merge``. These parameters are only supported when used with ansible-base 2.10 or ansible-core, but not with Ansible 2.9. This is available since community.general 4.4.0.
**recursive**
Is a boolean, default to ``False``. Should the ``community.general.lists_mergeby`` recursively merge nested hashes. Note: It does not depend on the value of the ``hash_behaviour`` setting in ``ansible.cfg``.
**list_merge**
Is a string, its possible values are ``replace`` (default), ``keep``, ``append``, ``prepend``, ``append_rp`` or ``prepend_rp``. It modifies the behaviour of ``community.general.lists_mergeby`` when the hashes to merge contain arrays/lists.
The examples below set ``recursive=true`` and display the differences among all six options of ``list_merge``. Functionality of the parameters is exactly the same as in the filter ``combine``. See :ref:`Combining hashes/dictionaries <combine_filter>` to learn details about these options.
Let us use the lists below in the following examples
.. code-block:: yaml
{{ lookup('file', 'default-recursive-true.yml')|indent(2) }}
{% for i in examples[4:16] %}
{{ i.label }}
.. code-block:: {{ i.lang }}
{{ lookup('file', i.file)|indent(2) }}
{% endfor %}

View File

@ -0,0 +1,7 @@
{#
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
#}
list3:
{{ list3|to_nice_yaml(indent=0) }}

View File

@ -0,0 +1,62 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# 1) Run all examples and create example-XXX.out
# shell> ansible-playbook playbook.yml -e examples=true
#
# 2) Optionally, for testing, create examples_all.rst
# shell> ansible-playbook playbook.yml -e examples_all=true
#
# 3) Create docs REST files
# shell> ansible-playbook playbook.yml -e merging_lists_of_dictionaries=true
#
# Notes:
# * Use YAML callback, e.g. set ANSIBLE_STDOUT_CALLBACK=community.general.yaml
# * Use sphinx-view to render and review the REST files
# shell> sphinx-view <path_to_helper>/examples_all.rst
# * Proofread and copy completed docs *.rst files into the directory rst.
# * Then delete the *.rst and *.out files from this directory. Do not
# add *.rst and *.out in this directory to the version control.
#
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# community.general/docs/docsite/helper/lists_mergeby/playbook.yml
- hosts: localhost
gather_facts: false
tasks:
- block:
- import_tasks: example-001.yml
tags: t001
- import_tasks: example-002.yml
tags: t002
- import_tasks: example-003.yml
tags: t003
- import_tasks: example-004.yml
tags: t004
- import_tasks: example-005.yml
tags: t005
- import_tasks: example-006.yml
tags: t006
- import_tasks: example-007.yml
tags: t007
- import_tasks: example-008.yml
tags: t008
when: examples|d(false)|bool
- block:
- include_vars: examples.yml
- template:
src: examples_all.rst.j2
dest: examples_all.rst
when: examples_all|d(false)|bool
- block:
- include_vars: examples.yml
- template:
src: filter_guide_abstract_informations_merging_lists_of_dictionaries.rst.j2
dest: filter_guide_abstract_informations_merging_lists_of_dictionaries.rst
when: merging_lists_of_dictionaries|d(false)|bool

View File

@ -0,0 +1,27 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
edit_on_github:
repository: ansible-collections/community.general
branch: main
path_prefix: ''
extra_links:
- description: Submit a bug report
url: https://github.com/ansible-collections/community.general/issues/new?assignees=&labels=&template=bug_report.yml
- description: Request a feature
url: https://github.com/ansible-collections/community.general/issues/new?assignees=&labels=&template=feature_request.yml
communication:
matrix_rooms:
- topic: General usage and support questions
room: '#users:ansible.im'
irc_channels:
- topic: General usage and support questions
network: Libera
channel: '#ansible'
mailing_lists:
- topic: Ansible Project List
url: https://groups.google.com/g/ansible-project

View File

@ -0,0 +1,23 @@
..
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
.. _ansible_collections.community.general.docsite.filter_guide:
community.general Filter Guide
==============================
The :ref:`community.general collection <plugins_in_community.general>` offers several useful filter plugins.
.. toctree::
:maxdepth: 2
filter_guide_paths
filter_guide_abstract_informations
filter_guide_working_with_times
filter_guide_working_with_versions
filter_guide_creating_identifiers
filter_guide_conversions
filter_guide_selecting_json_data
filter_guide_working_with_unicode

View File

@ -0,0 +1,15 @@
..
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
Abstract transformations
------------------------
.. toctree::
:maxdepth: 1
filter_guide_abstract_informations_dictionaries
filter_guide_abstract_informations_grouping
filter_guide_abstract_informations_merging_lists_of_dictionaries
filter_guide_abstract_informations_counting_elements_in_sequence

View File

@ -0,0 +1,82 @@
..
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
Counting elements in a sequence
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The :ansplugin:`community.general.counter filter plugin <community.general.counter#filter>` allows you to count (hashable) elements in a sequence. Elements are returned as dictionary keys and their counts are stored as dictionary values.
.. code-block:: yaml+jinja
- name: Count character occurrences in a string
debug:
msg: "{{ 'abccbaabca' | community.general.counter }}"
- name: Count items in a list
debug:
msg: "{{ ['car', 'car', 'bike', 'plane', 'bike'] | community.general.counter }}"
This produces:
.. code-block:: ansible-output
TASK [Count character occurrences in a string] ********************************************
ok: [localhost] => {
"msg": {
"a": 4,
"b": 3,
"c": 3
}
}
TASK [Count items in a list] **************************************************************
ok: [localhost] => {
"msg": {
"bike": 2,
"car": 2,
"plane": 1
}
}
This plugin is useful for selecting resources based on current allocation:
.. code-block:: yaml+jinja
- name: Get ID of SCSI controller(s) with less than 4 disks attached and choose the one with the least disks
debug:
msg: >-
{{
( disks | dict2items | map(attribute='value.adapter') | list
| community.general.counter | dict2items
| rejectattr('value', '>=', 4) | sort(attribute='value') | first
).key
}}
vars:
disks:
sda:
adapter: scsi_1
sdb:
adapter: scsi_1
sdc:
adapter: scsi_1
sdd:
adapter: scsi_1
sde:
adapter: scsi_2
sdf:
adapter: scsi_3
sdg:
adapter: scsi_3
This produces:
.. code-block:: ansible-output
TASK [Get ID of SCSI controller(s) with less than 4 disks attached and choose the one with the least disks]
ok: [localhost] => {
"msg": "scsi_2"
}
.. versionadded:: 4.3.0

View File

@ -0,0 +1,124 @@
..
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
Dictionaries
^^^^^^^^^^^^
You can use the :ansplugin:`community.general.dict_kv filter <community.general.dict_kv#filter>` to create a single-entry dictionary with ``value | community.general.dict_kv(key)``:
.. code-block:: yaml+jinja
- name: Create a single-entry dictionary
debug:
msg: "{{ myvar | community.general.dict_kv('thatsmyvar') }}"
vars:
myvar: myvalue
- name: Create a list of dictionaries where the 'server' field is taken from a list
debug:
msg: >-
{{ myservers | map('community.general.dict_kv', 'server')
| map('combine', common_config) }}
vars:
common_config:
type: host
database: all
myservers:
- server1
- server2
This produces:
.. code-block:: ansible-output
TASK [Create a single-entry dictionary] **************************************************
ok: [localhost] => {
"msg": {
"thatsmyvar": "myvalue"
}
}
TASK [Create a list of dictionaries where the 'server' field is taken from a list] *******
ok: [localhost] => {
"msg": [
{
"database": "all",
"server": "server1",
"type": "host"
},
{
"database": "all",
"server": "server2",
"type": "host"
}
]
}
.. versionadded:: 2.0.0
If you need to convert a list of key-value pairs to a dictionary, you can use the ``dict`` function. Unfortunately, this function cannot be used with ``map``. For this, the :ansplugin:`community.general.dict filter <community.general.dict#filter>` can be used:
.. code-block:: yaml+jinja
- name: Create a dictionary with the dict function
debug:
msg: "{{ dict([[1, 2], ['a', 'b']]) }}"
- name: Create a dictionary with the community.general.dict filter
debug:
msg: "{{ [[1, 2], ['a', 'b']] | community.general.dict }}"
- name: Create a list of dictionaries with map and the community.general.dict filter
debug:
msg: >-
{{ values | map('zip', ['k1', 'k2', 'k3'])
| map('map', 'reverse')
| map('community.general.dict') }}
vars:
values:
- - foo
- 23
- a
- - bar
- 42
- b
This produces:
.. code-block:: ansible-output
TASK [Create a dictionary with the dict function] ****************************************
ok: [localhost] => {
"msg": {
"1": 2,
"a": "b"
}
}
TASK [Create a dictionary with the community.general.dict filter] ************************
ok: [localhost] => {
"msg": {
"1": 2,
"a": "b"
}
}
TASK [Create a list of dictionaries with map and the community.general.dict filter] ******
ok: [localhost] => {
"msg": [
{
"k1": "foo",
"k2": 23,
"k3": "a"
},
{
"k1": "bar",
"k2": 42,
"k3": "b"
}
]
}
.. versionadded:: 3.0.0

View File

@ -0,0 +1,103 @@
..
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
Grouping
^^^^^^^^
If you have a list of dictionaries, the Jinja2 ``groupby`` filter allows to group the list by an attribute. This results in a list of ``(grouper, list)`` namedtuples, where ``list`` contains all dictionaries where the selected attribute equals ``grouper``. If you know that for every ``grouper``, there will be a most one entry in that list, you can use the :ansplugin:`community.general.groupby_as_dict filter <community.general.groupby_as_dict#filter>` to convert the original list into a dictionary which maps ``grouper`` to the corresponding dictionary.
One example is ``ansible_facts.mounts``, which is a list of dictionaries where each has one ``device`` element to indicate the device which is mounted. Therefore, ``ansible_facts.mounts | community.general.groupby_as_dict('device')`` is a dictionary mapping a device to the mount information:
.. code-block:: yaml+jinja
- name: Output mount facts grouped by device name
debug:
var: ansible_facts.mounts | community.general.groupby_as_dict('device')
- name: Output mount facts grouped by mount point
debug:
var: ansible_facts.mounts | community.general.groupby_as_dict('mount')
This produces:
.. code-block:: ansible-output
TASK [Output mount facts grouped by device name] ******************************************
ok: [localhost] => {
"ansible_facts.mounts | community.general.groupby_as_dict('device')": {
"/dev/sda1": {
"block_available": 2000,
"block_size": 4096,
"block_total": 2345,
"block_used": 345,
"device": "/dev/sda1",
"fstype": "ext4",
"inode_available": 500,
"inode_total": 512,
"inode_used": 12,
"mount": "/boot",
"options": "rw,relatime,data=ordered",
"size_available": 56821,
"size_total": 543210,
"uuid": "ab31cade-d9c1-484d-8482-8a4cbee5241a"
},
"/dev/sda2": {
"block_available": 1234,
"block_size": 4096,
"block_total": 12345,
"block_used": 11111,
"device": "/dev/sda2",
"fstype": "ext4",
"inode_available": 1111,
"inode_total": 1234,
"inode_used": 123,
"mount": "/",
"options": "rw,relatime",
"size_available": 42143,
"size_total": 543210,
"uuid": "abcdef01-2345-6789-0abc-def012345678"
}
}
}
TASK [Output mount facts grouped by mount point] ******************************************
ok: [localhost] => {
"ansible_facts.mounts | community.general.groupby_as_dict('mount')": {
"/": {
"block_available": 1234,
"block_size": 4096,
"block_total": 12345,
"block_used": 11111,
"device": "/dev/sda2",
"fstype": "ext4",
"inode_available": 1111,
"inode_total": 1234,
"inode_used": 123,
"mount": "/",
"options": "rw,relatime",
"size_available": 42143,
"size_total": 543210,
"uuid": "bdf50b7d-4859-40af-8665-c637ee7a7808"
},
"/boot": {
"block_available": 2000,
"block_size": 4096,
"block_total": 2345,
"block_used": 345,
"device": "/dev/sda1",
"fstype": "ext4",
"inode_available": 500,
"inode_total": 512,
"inode_used": 12,
"mount": "/boot",
"options": "rw,relatime,data=ordered",
"size_available": 56821,
"size_total": 543210,
"uuid": "ab31cade-d9c1-484d-8482-8a4cbee5241a"
}
}
}
.. versionadded: 3.0.0

View File

@ -0,0 +1,297 @@
..
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
Merging lists of dictionaries
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
If you have two or more lists of dictionaries and want to combine them into a list of merged dictionaries, where the dictionaries are merged by an attribute, you can use the :ansplugin:`community.general.lists_mergeby filter <community.general.lists_mergeby#filter>`.
.. note:: The output of the examples in this section use the YAML callback plugin. Quoting: "Ansible output that can be quite a bit easier to read than the default JSON formatting." See :ref:`the documentation for the community.general.yaml callback plugin <ansible_collections.community.general.yaml_callback>`.
Let us use the lists below in the following examples:
.. code-block:: yaml
list1:
- name: foo
extra: true
- name: bar
extra: false
- name: meh
extra: true
list2:
- name: foo
path: /foo
- name: baz
path: /baz
In the example below the lists are merged by the attribute ``name``:
.. code-block:: yaml+jinja
list3: "{{ list1|
community.general.lists_mergeby(list2, 'name') }}"
This produces:
.. code-block:: yaml
list3:
- extra: false
name: bar
- name: baz
path: /baz
- extra: true
name: foo
path: /foo
- extra: true
name: meh
.. versionadded:: 2.0.0
It is possible to use a list of lists as an input of the filter:
.. code-block:: yaml+jinja
list3: "{{ [list1, list2]|
community.general.lists_mergeby('name') }}"
This produces the same result as in the previous example:
.. code-block:: yaml
list3:
- extra: false
name: bar
- name: baz
path: /baz
- extra: true
name: foo
path: /foo
- extra: true
name: meh
The filter also accepts two optional parameters: :ansopt:`community.general.lists_mergeby#filter:recursive` and :ansopt:`community.general.lists_mergeby#filter:list_merge`. This is available since community.general 4.4.0.
**recursive**
Is a boolean, default to ``false``. Should the :ansplugin:`community.general.lists_mergeby#filter` filter recursively merge nested hashes. Note: It does not depend on the value of the ``hash_behaviour`` setting in ``ansible.cfg``.
**list_merge**
Is a string, its possible values are :ansval:`replace` (default), :ansval:`keep`, :ansval:`append`, :ansval:`prepend`, :ansval:`append_rp` or :ansval:`prepend_rp`. It modifies the behaviour of :ansplugin:`community.general.lists_mergeby#filter` when the hashes to merge contain arrays/lists.
The examples below set :ansopt:`community.general.lists_mergeby#filter:recursive=true` and display the differences among all six options of :ansopt:`community.general.lists_mergeby#filter:list_merge`. Functionality of the parameters is exactly the same as in the filter :ansplugin:`ansible.builtin.combine#filter`. See :ref:`Combining hashes/dictionaries <combine_filter>` to learn details about these options.
Let us use the lists below in the following examples
.. code-block:: yaml
list1:
- name: myname01
param01:
x: default_value
y: default_value
list:
- default_value
- name: myname02
param01: [1, 1, 2, 3]
list2:
- name: myname01
param01:
y: patch_value
z: patch_value
list:
- patch_value
- name: myname02
param01: [3, 4, 4, {key: value}]
Example :ansopt:`community.general.lists_mergeby#filter:list_merge=replace` (default):
.. code-block:: yaml+jinja
list3: "{{ [list1, list2]|
community.general.lists_mergeby('name',
recursive=true) }}"
This produces:
.. code-block:: yaml
list3:
- name: myname01
param01:
list:
- patch_value
x: default_value
y: patch_value
z: patch_value
- name: myname02
param01:
- 3
- 4
- 4
- key: value
Example :ansopt:`community.general.lists_mergeby#filter:list_merge=keep`:
.. code-block:: yaml+jinja
list3: "{{ [list1, list2]|
community.general.lists_mergeby('name',
recursive=true,
list_merge='keep') }}"
This produces:
.. code-block:: yaml
list3:
- name: myname01
param01:
list:
- default_value
x: default_value
y: patch_value
z: patch_value
- name: myname02
param01:
- 1
- 1
- 2
- 3
Example :ansopt:`community.general.lists_mergeby#filter:list_merge=append`:
.. code-block:: yaml+jinja
list3: "{{ [list1, list2]|
community.general.lists_mergeby('name',
recursive=true,
list_merge='append') }}"
This produces:
.. code-block:: yaml
list3:
- name: myname01
param01:
list:
- default_value
- patch_value
x: default_value
y: patch_value
z: patch_value
- name: myname02
param01:
- 1
- 1
- 2
- 3
- 3
- 4
- 4
- key: value
Example :ansopt:`community.general.lists_mergeby#filter:list_merge=prepend`:
.. code-block:: yaml+jinja
list3: "{{ [list1, list2]|
community.general.lists_mergeby('name',
recursive=true,
list_merge='prepend') }}"
This produces:
.. code-block:: yaml
list3:
- name: myname01
param01:
list:
- patch_value
- default_value
x: default_value
y: patch_value
z: patch_value
- name: myname02
param01:
- 3
- 4
- 4
- key: value
- 1
- 1
- 2
- 3
Example :ansopt:`community.general.lists_mergeby#filter:list_merge=append_rp`:
.. code-block:: yaml+jinja
list3: "{{ [list1, list2]|
community.general.lists_mergeby('name',
recursive=true,
list_merge='append_rp') }}"
This produces:
.. code-block:: yaml
list3:
- name: myname01
param01:
list:
- default_value
- patch_value
x: default_value
y: patch_value
z: patch_value
- name: myname02
param01:
- 1
- 1
- 2
- 3
- 4
- 4
- key: value
Example :ansopt:`community.general.lists_mergeby#filter:list_merge=prepend_rp`:
.. code-block:: yaml+jinja
list3: "{{ [list1, list2]|
community.general.lists_mergeby('name',
recursive=true,
list_merge='prepend_rp') }}"
This produces:
.. code-block:: yaml
list3:
- name: myname01
param01:
list:
- patch_value
- default_value
x: default_value
y: patch_value
z: patch_value
- name: myname02
param01:
- 3
- 4
- 4
- key: value
- 1
- 1
- 2

View File

@ -0,0 +1,113 @@
..
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
Conversions
-----------
Parsing CSV files
^^^^^^^^^^^^^^^^^
Ansible offers the :ansplugin:`community.general.read_csv module <community.general.read_csv#module>` to read CSV files. Sometimes you need to convert strings to CSV files instead. For this, the :ansplugin:`community.general.from_csv filter <community.general.from_csv#filter>` exists.
.. code-block:: yaml+jinja
- name: "Parse CSV from string"
debug:
msg: "{{ csv_string | community.general.from_csv }}"
vars:
csv_string: |
foo,bar,baz
1,2,3
you,this,then
This produces:
.. code-block:: ansible-output
TASK [Parse CSV from string] **************************************************************
ok: [localhost] => {
"msg": [
{
"bar": "2",
"baz": "3",
"foo": "1"
},
{
"bar": "this",
"baz": "then",
"foo": "you"
}
]
}
The :ansplugin:`community.general.from_csv filter <community.general.from_csv#filter>` has several keyword arguments to control its behavior:
:dialect: Dialect of the CSV file. Default is ``excel``. Other possible choices are ``excel-tab`` and ``unix``. If one of ``delimiter``, ``skipinitialspace`` or ``strict`` is specified, ``dialect`` is ignored.
:fieldnames: A set of column names to use. If not provided, the first line of the CSV is assumed to contain the column names.
:delimiter: Sets the delimiter to use. Default depends on the dialect used.
:skipinitialspace: Set to ``true`` to ignore space directly after the delimiter. Default depends on the dialect used (usually ``false``).
:strict: Set to ``true`` to error out on invalid CSV input.
.. versionadded: 3.0.0
Converting to JSON
^^^^^^^^^^^^^^^^^^
`JC <https://pypi.org/project/jc/>`_ is a CLI tool and Python library which allows to interpret output of various CLI programs as JSON. It is also available as a filter in community.general, called :ansplugin:`community.general.jc#filter`. This filter needs the `jc Python library <https://pypi.org/project/jc/>`_ installed on the controller.
.. code-block:: yaml+jinja
- name: Run 'ls' to list files in /
command: ls /
register: result
- name: Parse the ls output
debug:
msg: "{{ result.stdout | community.general.jc('ls') }}"
This produces:
.. code-block:: ansible-output
TASK [Run 'ls' to list files in /] ********************************************************
changed: [localhost]
TASK [Parse the ls output] ****************************************************************
ok: [localhost] => {
"msg": [
{
"filename": "bin"
},
{
"filename": "boot"
},
{
"filename": "dev"
},
{
"filename": "etc"
},
{
"filename": "home"
},
{
"filename": "lib"
},
{
"filename": "proc"
},
{
"filename": "root"
},
{
"filename": "run"
},
{
"filename": "tmp"
}
]
}
.. versionadded: 2.0.0

View File

@ -0,0 +1,85 @@
..
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
Creating identifiers
--------------------
The following filters allow to create identifiers.
Hashids
^^^^^^^
`Hashids <https://hashids.org/>`_ allow to convert sequences of integers to short unique string identifiers. The :ansplugin:`community.general.hashids_encode#filter` and :ansplugin:`community.general.hashids_decode#filter` filters need the `hashids Python library <https://pypi.org/project/hashids/>`_ installed on the controller.
.. code-block:: yaml+jinja
- name: "Create hashid"
debug:
msg: "{{ [1234, 5, 6] | community.general.hashids_encode }}"
- name: "Decode hashid"
debug:
msg: "{{ 'jm2Cytn' | community.general.hashids_decode }}"
This produces:
.. code-block:: ansible-output
TASK [Create hashid] **********************************************************************
ok: [localhost] => {
"msg": "jm2Cytn"
}
TASK [Decode hashid] **********************************************************************
ok: [localhost] => {
"msg": [
1234,
5,
6
]
}
The hashids filters accept keyword arguments to allow fine-tuning the hashids generated:
:salt: String to use as salt when hashing.
:alphabet: String of 16 or more unique characters to produce a hash.
:min_length: Minimum length of hash produced.
.. versionadded: 3.0.0
Random MACs
^^^^^^^^^^^
You can use the :ansplugin:`community.general.random_mac filter <community.general.random_mac#filter>` to complete a partial `MAC address <https://en.wikipedia.org/wiki/MAC_address>`_ to a random 6-byte MAC address.
.. code-block:: yaml+jinja
- name: "Create a random MAC starting with ff:"
debug:
msg: "{{ 'FF' | community.general.random_mac }}"
- name: "Create a random MAC starting with 00:11:22:"
debug:
msg: "{{ '00:11:22' | community.general.random_mac }}"
This produces:
.. code-block:: ansible-output
TASK [Create a random MAC starting with ff:] **********************************************
ok: [localhost] => {
"msg": "ff:69:d3:78:7f:b4"
}
TASK [Create a random MAC starting with 00:11:22:] ****************************************
ok: [localhost] => {
"msg": "00:11:22:71:5d:3b"
}
You can also initialize the random number generator from a seed to create random-but-idempotent MAC addresses:
.. code-block:: yaml+jinja
"{{ '52:54:00' | community.general.random_mac(seed=inventory_hostname) }}"

View File

@ -0,0 +1,9 @@
..
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
Paths
-----
The :ansplugin:`ansible.builtin.path_join filter <ansible.builtin.path_join#filter>` has been added in ansible-base 2.10. Community.general 3.0.0 and newer contains an alias ``community.general.path_join`` for this filter that could be used on Ansible 2.9 as well. Since community.general no longer supports Ansible 2.9, this is now a simple redirect to :ansplugin:`ansible.builtin.path_join filter <ansible.builtin.path_join#filter>`.

View File

@ -0,0 +1,149 @@
..
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
.. _ansible_collections.community.general.docsite.json_query_filter:
Selecting JSON data: JSON queries
---------------------------------
To select a single element or a data subset from a complex data structure in JSON format (for example, Ansible facts), use the :ansplugin:`community.general.json_query filter <community.general.json_query#filter>`. The :ansplugin:`community.general.json_query#filter` filter lets you query a complex JSON structure and iterate over it using a loop structure.
.. note:: You must manually install the **jmespath** dependency on the Ansible controller before using this filter. This filter is built upon **jmespath**, and you can use the same syntax. For examples, see `jmespath examples <http://jmespath.org/examples.html>`_.
Consider this data structure:
.. code-block:: yaml+jinja
{
"domain_definition": {
"domain": {
"cluster": [
{
"name": "cluster1"
},
{
"name": "cluster2"
}
],
"server": [
{
"name": "server11",
"cluster": "cluster1",
"port": "8080"
},
{
"name": "server12",
"cluster": "cluster1",
"port": "8090"
},
{
"name": "server21",
"cluster": "cluster2",
"port": "9080"
},
{
"name": "server22",
"cluster": "cluster2",
"port": "9090"
}
],
"library": [
{
"name": "lib1",
"target": "cluster1"
},
{
"name": "lib2",
"target": "cluster2"
}
]
}
}
}
To extract all clusters from this structure, you can use the following query:
.. code-block:: yaml+jinja
- name: Display all cluster names
ansible.builtin.debug:
var: item
loop: "{{ domain_definition | community.general.json_query('domain.cluster[*].name') }}"
To extract all server names:
.. code-block:: yaml+jinja
- name: Display all server names
ansible.builtin.debug:
var: item
loop: "{{ domain_definition | community.general.json_query('domain.server[*].name') }}"
To extract ports from cluster1:
.. code-block:: yaml+jinja
- name: Display all ports from cluster1
ansible.builtin.debug:
var: item
loop: "{{ domain_definition | community.general.json_query(server_name_cluster1_query) }}"
vars:
server_name_cluster1_query: "domain.server[?cluster=='cluster1'].port"
.. note:: You can use a variable to make the query more readable.
To print out the ports from cluster1 in a comma separated string:
.. code-block:: yaml+jinja
- name: Display all ports from cluster1 as a string
ansible.builtin.debug:
msg: "{{ domain_definition | community.general.json_query('domain.server[?cluster==`cluster1`].port') | join(', ') }}"
.. note:: In the example above, quoting literals using backticks avoids escaping quotes and maintains readability.
You can use YAML `single quote escaping <https://yaml.org/spec/current.html#id2534365>`_:
.. code-block:: yaml+jinja
- name: Display all ports from cluster1
ansible.builtin.debug:
var: item
loop: "{{ domain_definition | community.general.json_query('domain.server[?cluster==''cluster1''].port') }}"
.. note:: Escaping single quotes within single quotes in YAML is done by doubling the single quote.
To get a hash map with all ports and names of a cluster:
.. code-block:: yaml+jinja
- name: Display all server ports and names from cluster1
ansible.builtin.debug:
var: item
loop: "{{ domain_definition | community.general.json_query(server_name_cluster1_query) }}"
vars:
server_name_cluster1_query: "domain.server[?cluster=='cluster2'].{name: name, port: port}"
To extract ports from all clusters with name starting with 'server1':
.. code-block:: yaml+jinja
- name: Display all ports from cluster1
ansible.builtin.debug:
msg: "{{ domain_definition | to_json | from_json | community.general.json_query(server_name_query) }}"
vars:
server_name_query: "domain.server[?starts_with(name,'server1')].port"
To extract ports from all clusters with name containing 'server1':
.. code-block:: yaml+jinja
- name: Display all ports from cluster1
ansible.builtin.debug:
msg: "{{ domain_definition | to_json | from_json | community.general.json_query(server_name_query) }}"
vars:
server_name_query: "domain.server[?contains(name,'server1')].port"
.. note:: while using ``starts_with`` and ``contains``, you have to use ``to_json | from_json`` filter for correct parsing of data structure.

View File

@ -0,0 +1,89 @@
..
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
Working with times
------------------
The :ansplugin:`community.general.to_time_unit filter <community.general.to_time_unit#filter>` allows to convert times from a human-readable string to a unit. For example, ``'4h 30min 12second' | community.general.to_time_unit('hour')`` gives the number of hours that correspond to 4 hours, 30 minutes and 12 seconds.
There are shorthands to directly convert to various units, like :ansplugin:`community.general.to_hours#filter`, :ansplugin:`community.general.to_minutes#filter`, :ansplugin:`community.general.to_seconds#filter`, and so on. The following table lists all units that can be used:
.. list-table:: Units
:widths: 25 25 25 25
:header-rows: 1
* - Unit name
- Unit value in seconds
- Unit strings for filter
- Shorthand filter
* - Millisecond
- 1/1000 second
- ``ms``, ``millisecond``, ``milliseconds``, ``msec``, ``msecs``, ``msecond``, ``mseconds``
- :ansplugin:`community.general.to_milliseconds#filter`
* - Second
- 1 second
- ``s``, ``sec``, ``secs``, ``second``, ``seconds``
- :ansplugin:`community.general.to_seconds#filter`
* - Minute
- 60 seconds
- ``m``, ``min``, ``mins``, ``minute``, ``minutes``
- :ansplugin:`community.general.to_minutes#filter`
* - Hour
- 60*60 seconds
- ``h``, ``hour``, ``hours``
- :ansplugin:`community.general.to_hours#filter`
* - Day
- 24*60*60 seconds
- ``d``, ``day``, ``days``
- :ansplugin:`community.general.to_days#filter`
* - Week
- 7*24*60*60 seconds
- ``w``, ``week``, ``weeks``
- :ansplugin:`community.general.to_weeks#filter`
* - Month
- 30*24*60*60 seconds
- ``mo``, ``month``, ``months``
- :ansplugin:`community.general.to_months#filter`
* - Year
- 365*24*60*60 seconds
- ``y``, ``year``, ``years``
- :ansplugin:`community.general.to_years#filter`
Note that months and years are using a simplified representation: a month is 30 days, and a year is 365 days. If you need different definitions of months or years, you can pass them as keyword arguments. For example, if you want a year to be 365.25 days, and a month to be 30.5 days, you can write ``'11months 4' | community.general.to_years(year=365.25, month=30.5)``. These keyword arguments can be specified to :ansplugin:`community.general.to_time_unit#filter` and to all shorthand filters.
.. code-block:: yaml+jinja
- name: Convert string to seconds
debug:
msg: "{{ '30h 20m 10s 123ms' | community.general.to_time_unit('seconds') }}"
- name: Convert string to hours
debug:
msg: "{{ '30h 20m 10s 123ms' | community.general.to_hours }}"
- name: Convert string to years (using 365.25 days == 1 year)
debug:
msg: "{{ '400d 15h' | community.general.to_years(year=365.25) }}"
This produces:
.. code-block:: ansible-output
TASK [Convert string to seconds] **********************************************************
ok: [localhost] => {
"msg": "109210.123"
}
TASK [Convert string to hours] ************************************************************
ok: [localhost] => {
"msg": "30.336145277778"
}
TASK [Convert string to years (using 365.25 days == 1 year)] ******************************
ok: [localhost] => {
"msg": "1.096851471595"
}
.. versionadded: 0.2.0

View File

@ -0,0 +1,35 @@
..
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
Working with Unicode
---------------------
`Unicode <https://unicode.org/main.html>`_ makes it possible to produce two strings which may be visually equivalent, but are comprised of distinctly different characters/character sequences. To address this Unicode defines `normalization forms <https://unicode.org/reports/tr15/>`_ which avoid these distinctions by choosing a unique character sequence for a given visual representation.
You can use the :ansplugin:`community.general.unicode_normalize filter <community.general.unicode_normalize#filter>` to normalize Unicode strings within your playbooks.
.. code-block:: yaml+jinja
- name: Compare Unicode representations
debug:
msg: "{{ with_combining_character | community.general.unicode_normalize == without_combining_character }}"
vars:
with_combining_character: "{{ 'Mayagu\u0308ez' }}"
without_combining_character: Mayagüez
This produces:
.. code-block:: ansible-output
TASK [Compare Unicode representations] ********************************************************
ok: [localhost] => {
"msg": true
}
The :ansplugin:`community.general.unicode_normalize filter <community.general.unicode_normalize#filter>` accepts a keyword argument :ansopt:`community.general.unicode_normalize#filter:form` to select the Unicode form used to normalize the input string.
:form: One of ``'NFC'`` (default), ``'NFD'``, ``'NFKC'``, or ``'NFKD'``. See the `Unicode reference <https://unicode.org/reports/tr15/>`_ for more information.
.. versionadded:: 3.7.0

View File

@ -0,0 +1,39 @@
..
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
Working with versions
---------------------
If you need to sort a list of version numbers, the Jinja ``sort`` filter is problematic. Since it sorts lexicographically, ``2.10`` will come before ``2.9``. To treat version numbers correctly, you can use the :ansplugin:`community.general.version_sort filter <community.general.version_sort#filter>`:
.. code-block:: yaml+jinja
- name: Sort list by version number
debug:
var: ansible_versions | community.general.version_sort
vars:
ansible_versions:
- '2.8.0'
- '2.11.0'
- '2.7.0'
- '2.10.0'
- '2.9.0'
This produces:
.. code-block:: ansible-output
TASK [Sort list by version number] ********************************************************
ok: [localhost] => {
"ansible_versions | community.general.version_sort": [
"2.7.0",
"2.8.0",
"2.9.0",
"2.10.0",
"2.11.0"
]
}
.. versionadded: 2.2.0

View File

@ -0,0 +1,96 @@
..
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
.. _ansible_collections.community.general.docsite.guide_alicloud:
Alibaba Cloud Compute Services Guide
====================================
Introduction
````````````
The community.general collection contains several modules for controlling and managing Alibaba Cloud Compute Services (Alicloud). This guide
explains how to use the Alicloud Ansible modules together.
All Alicloud modules require ``footmark`` - install it on your control machine with ``pip install footmark``.
Cloud modules, including Alicloud modules, are usually executed on your local machine (the control machine) with ``connection: local``, rather than on remote machines defined in your hosts.
Normally, you'll use the following pattern for plays that provision Alicloud resources:
.. code-block:: yaml
- hosts: localhost
connection: local
vars:
- ...
tasks:
- ...
Authentication
``````````````
You can specify your Alicloud authentication credentials (access key and secret key) by passing them as
environment variables or by storing them in a vars file.
To pass authentication credentials as environment variables:
.. code-block:: console
export ALICLOUD_ACCESS_KEY='Alicloud123'
export ALICLOUD_SECRET_KEY='AlicloudSecret123'
To store authentication credentials in a vars file, encrypt them with :ref:`Ansible Vault <vault>` to keep them secure, then list them:
.. code-block:: yaml
---
alicloud_access_key: "--REMOVED--"
alicloud_secret_key: "--REMOVED--"
Note that if you store your credentials in a vars file, you need to refer to them in each Alicloud module. For example:
.. code-block:: yaml+jinja
- community.general.ali_instance:
alicloud_access_key: "{{ alicloud_access_key }}"
alicloud_secret_key: "{{ alicloud_secret_key }}"
image_id: "..."
Provisioning
````````````
Alicloud modules create Alicloud ECS instances (:ansplugin:`community.general.ali_instance#module`) and retrieve information on these (:ansplugin:`community.general.ali_instance_info#module`).
You can use the ``count`` parameter to control the number of resources you create or terminate. For example, if you want exactly 5 instances tagged ``NewECS``, set the ``count`` of instances to 5 and the ``count_tag`` to ``NewECS``, as shown in the last task of the example playbook below. If there are no instances with the tag ``NewECS``, the task creates 5 new instances. If there are 2 instances with that tag, the task creates 3 more. If there are 8 instances with that tag, the task terminates 3 of those instances.
If you do not specify a ``count_tag``, the task creates the number of instances you specify in ``count`` with the ``instance_name`` you provide.
.. code-block:: yaml+jinja
# alicloud_setup.yml
- hosts: localhost
connection: local
tasks:
- name: Create a set of instances
community.general.ali_instance:
instance_type: ecs.n4.small
image_id: "{{ ami_id }}"
instance_name: "My-new-instance"
instance_tags:
Name: NewECS
Version: 0.0.1
count: 5
count_tag:
Name: NewECS
allocate_public_ip: true
max_bandwidth_out: 50
register: create_instance
In the example playbook above, data about the instances created by this playbook is saved in the variable defined by the ``register`` keyword in the task.
Each Alicloud module offers a variety of parameter options. Not all options are demonstrated in the above example. See each individual module for further details and examples.

View File

@ -0,0 +1,49 @@
..
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
.. _ansible_collections.community.general.docsite.guide_online:
****************
Online.net Guide
****************
Introduction
============
Online is a French hosting company mainly known for providing bare-metal servers named Dedibox.
Check it out: `https://www.online.net/en <https://www.online.net/en>`_
Dynamic inventory for Online resources
--------------------------------------
Ansible has a dynamic inventory plugin that can list your resources.
1. Create a YAML configuration such as ``online_inventory.yml`` with this content:
.. code-block:: yaml
plugin: community.general.online
2. Set your ``ONLINE_TOKEN`` environment variable with your token.
You need to open an account and log into it before you can get a token.
You can find your token at the following page: `https://console.online.net/en/api/access <https://console.online.net/en/api/access>`_
3. You can test that your inventory is working by running:
.. code-block:: console
$ ansible-inventory -v -i online_inventory.yml --list
4. Now you can run your playbook or any other module with this inventory:
.. code-block:: ansible-output
$ ansible all -i online_inventory.yml -m ping
sd-96735 | SUCCESS => {
"changed": false,
"ping": "pong"
}

View File

@ -0,0 +1,214 @@
..
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
.. _ansible_collections.community.general.docsite.guide_packet:
**********************************
Packet.net Guide
**********************************
Introduction
============
`Packet.net <https://packet.net>`_ is a bare metal infrastructure host that is supported by the community.general collection through six cloud modules. The six modules are:
- :ansplugin:`community.general.packet_device#module`: manages servers on Packet. You can use this module to create, restart and delete devices.
- :ansplugin:`community.general.packet_ip_subnet#module`: assign IP subnet to a bare metal server
- :ansplugin:`community.general.packet_project#module`: create/delete a project in Packet host
- :ansplugin:`community.general.packet_sshkey#module`: adds a public SSH key from file or value to the Packet infrastructure. Every subsequently-created device will have this public key installed in .ssh/authorized_keys.
- :ansplugin:`community.general.packet_volume#module`: create/delete a volume in Packet host
- :ansplugin:`community.general.packet_volume_attachment#module`: attach/detach a volume to a device in the Packet host
Note, this guide assumes you are familiar with Ansible and how it works. If you are not, have a look at their :ref:`docs <ansible_documentation>` before getting started.
Requirements
============
The Packet modules connect to the Packet API using the `packet-python package <https://pypi.org/project/packet-python/>`_. You can install it with pip:
.. code-block:: console
$ pip install packet-python
In order to check the state of devices created by Ansible on Packet, it is a good idea to install one of the `Packet CLI clients <https://www.packet.net/developers/integrations/>`_. Otherwise you can check them through the `Packet portal <https://app.packet.net/portal>`_.
To use the modules you will need a Packet API token. You can generate an API token through the Packet portal `here <https://app.packet.net/portal#/api-keys>`__. The simplest way to authenticate yourself is to set the Packet API token in an environment variable:
.. code-block:: console
$ export PACKET_API_TOKEN=Bfse9F24SFtfs423Gsd3ifGsd43sSdfs
If you are not comfortable exporting your API token, you can pass it as a parameter to the modules.
On Packet, devices and reserved IP addresses belong to `projects <https://www.packet.com/developers/api/#projects>`_. In order to use the packet_device module, you need to specify the UUID of the project in which you want to create or manage devices. You can find a project's UUID in the Packet portal `here <https://app.packet.net/portal#/projects/list/table/>`_ (it is just under the project table) or through one of the available `CLIs <https://www.packet.net/developers/integrations/>`_.
If you want to use a new SSH key pair in this tutorial, you can generate it to ``./id_rsa`` and ``./id_rsa.pub`` as:
.. code-block:: console
$ ssh-keygen -t rsa -f ./id_rsa
If you want to use an existing key pair, just copy the private and public key over to the playbook directory.
Device Creation
===============
The following code block is a simple playbook that creates one `Type 0 <https://www.packet.com/cloud/servers/t1-small/>`_ server (the ``plan`` parameter). You have to supply ``plan`` and ``operating_system``. ``location`` defaults to ``ewr1`` (Parsippany, NJ). You can find all the possible values for the parameters through a `CLI client <https://www.packet.net/developers/integrations/>`_.
.. code-block:: yaml+jinja
# playbook_create.yml
- name: Create Ubuntu device
hosts: localhost
tasks:
- community.general.packet_sshkey:
key_file: ./id_rsa.pub
label: tutorial key
- community.general.packet_device:
project_id: <your_project_id>
hostnames: myserver
operating_system: ubuntu_16_04
plan: baremetal_0
facility: sjc1
After running ``ansible-playbook playbook_create.yml``, you should have a server provisioned on Packet. You can verify through a CLI or in the `Packet portal <https://app.packet.net/portal#/projects/list/table>`__.
If you get an error with the message "failed to set machine state present, error: Error 404: Not Found", please verify your project UUID.
Updating Devices
================
The two parameters used to uniquely identify Packet devices are: "device_ids" and "hostnames". Both parameters accept either a single string (later converted to a one-element list), or a list of strings.
The ``device_ids`` and ``hostnames`` parameters are mutually exclusive. The following values are all acceptable:
- device_ids: ``a27b7a83-fc93-435b-a128-47a5b04f2dcf``
- hostnames: ``mydev1``
- device_ids: ``[a27b7a83-fc93-435b-a128-47a5b04f2dcf, 4887130f-0ccd-49a0-99b0-323c1ceb527b]``
- hostnames: ``[mydev1, mydev2]``
In addition, hostnames can contain a special ``%d`` formatter along with a ``count`` parameter that lets you easily expand hostnames that follow a simple name and number pattern; in other words, ``hostnames: "mydev%d", count: 2`` will expand to [mydev1, mydev2].
If your playbook acts on existing Packet devices, you can only pass the ``hostname`` and ``device_ids`` parameters. The following playbook shows how you can reboot a specific Packet device by setting the ``hostname`` parameter:
.. code-block:: yaml+jinja
# playbook_reboot.yml
- name: reboot myserver
hosts: localhost
tasks:
- community.general.packet_device:
project_id: <your_project_id>
hostnames: myserver
state: rebooted
You can also identify specific Packet devices with the ``device_ids`` parameter. The device's UUID can be found in the `Packet Portal <https://app.packet.net/portal>`_ or by using a `CLI <https://www.packet.net/developers/integrations/>`_. The following playbook removes a Packet device using the ``device_ids`` field:
.. code-block:: yaml+jinja
# playbook_remove.yml
- name: remove a device
hosts: localhost
tasks:
- community.general.packet_device:
project_id: <your_project_id>
device_ids: <myserver_device_id>
state: absent
More Complex Playbooks
======================
In this example, we will create a CoreOS cluster with `user data <https://packet.com/developers/docs/servers/key-features/user-data/>`_.
The CoreOS cluster will use `etcd <https://etcd.io/>`_ for discovery of other servers in the cluster. Before provisioning your servers, you will need to generate a discovery token for your cluster:
.. code-block:: console
$ curl -w "\n" 'https://discovery.etcd.io/new?size=3'
The following playbook will create an SSH key, 3 Packet servers, and then wait until SSH is ready (or until 5 minutes passed). Make sure to substitute the discovery token URL in ``user_data``, and the ``project_id`` before running ``ansible-playbook``. Also, feel free to change ``plan`` and ``facility``.
.. code-block:: yaml+jinja
# playbook_coreos.yml
- name: Start 3 CoreOS nodes in Packet and wait until SSH is ready
hosts: localhost
tasks:
- community.general.packet_sshkey:
key_file: ./id_rsa.pub
label: new
- community.general.packet_device:
hostnames: [coreos-one, coreos-two, coreos-three]
operating_system: coreos_beta
plan: baremetal_0
facility: ewr1
project_id: <your_project_id>
wait_for_public_IPv: 4
user_data: |
#cloud-config
coreos:
etcd2:
discovery: https://discovery.etcd.io/<token>
advertise-client-urls: http://$private_ipv4:2379,http://$private_ipv4:4001
initial-advertise-peer-urls: http://$private_ipv4:2380
listen-client-urls: http://0.0.0.0:2379,http://0.0.0.0:4001
listen-peer-urls: http://$private_ipv4:2380
fleet:
public-ip: $private_ipv4
units:
- name: etcd2.service
command: start
- name: fleet.service
command: start
register: newhosts
- name: wait for ssh
ansible.builtin.wait_for:
delay: 1
host: "{{ item.public_ipv4 }}"
port: 22
state: started
timeout: 500
loop: "{{ newhosts.results[0].devices }}"
As with most Ansible modules, the default states of the Packet modules are idempotent, meaning the resources in your project will remain the same after re-runs of a playbook. Thus, we can keep the ``packet_sshkey`` module call in our playbook. If the public key is already in your Packet account, the call will have no effect.
The second module call provisions 3 Packet Type 0 (specified using the ``plan`` parameter) servers in the project identified by the ``project_id`` parameter. The servers are all provisioned with CoreOS beta (the ``operating_system`` parameter) and are customized with cloud-config user data passed to the ``user_data`` parameter.
The ``packet_device`` module has a ``wait_for_public_IPv`` that is used to specify the version of the IP address to wait for (valid values are ``4`` or ``6`` for IPv4 or IPv6). If specified, Ansible will wait until the GET API call for a device contains an Internet-routeable IP address of the specified version. When referring to an IP address of a created device in subsequent module calls, it is wise to use the ``wait_for_public_IPv`` parameter, or ``state: active`` in the packet_device module call.
Run the playbook:
.. code-block:: console
$ ansible-playbook playbook_coreos.yml
Once the playbook quits, your new devices should be reachable through SSH. Try to connect to one and check if etcd has started properly:
.. code-block:: console
tomk@work $ ssh -i id_rsa core@$one_of_the_servers_ip
core@coreos-one ~ $ etcdctl cluster-health
If you have any questions or comments let us know! help@packet.net

View File

@ -0,0 +1,320 @@
..
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
.. _ansible_collections.community.general.docsite.guide_scaleway:
**************
Scaleway Guide
**************
Introduction
============
`Scaleway <https://scaleway.com>`_ is a cloud provider supported by the community.general collection through a set of plugins and modules.
Those modules are:
- :ansplugin:`community.general.scaleway_compute#module`: manages servers on Scaleway. You can use this module to create, restart and delete servers.
- :ansplugin:`community.general.scaleway_compute_private_network#module`
- :ansplugin:`community.general.scaleway_container#module`
- :ansplugin:`community.general.scaleway_container_info#module`
- :ansplugin:`community.general.scaleway_container_namespace_info#module`
- :ansplugin:`community.general.scaleway_container_namespace#module`
- :ansplugin:`community.general.scaleway_container_registry_info#module`
- :ansplugin:`community.general.scaleway_container_registry#module`
- :ansplugin:`community.general.scaleway_database_backup#module`
- :ansplugin:`community.general.scaleway_function#module`
- :ansplugin:`community.general.scaleway_function_info#module`
- :ansplugin:`community.general.scaleway_function_namespace_info#module`
- :ansplugin:`community.general.scaleway_function_namespace#module`
- :ansplugin:`community.general.scaleway_image_info#module`
- :ansplugin:`community.general.scaleway_ip#module`
- :ansplugin:`community.general.scaleway_ip_info#module`
- :ansplugin:`community.general.scaleway_lb#module`
- :ansplugin:`community.general.scaleway_organization_info#module`
- :ansplugin:`community.general.scaleway_private_network#module`
- :ansplugin:`community.general.scaleway_security_group#module`
- :ansplugin:`community.general.scaleway_security_group_info#module`
- :ansplugin:`community.general.scaleway_security_group_rule#module`
- :ansplugin:`community.general.scaleway_server_info#module`
- :ansplugin:`community.general.scaleway_snapshot_info#module`
- :ansplugin:`community.general.scaleway_sshkey#module`: adds a public SSH key from a file or value to the Packet infrastructure. Every subsequently-created device will have this public key installed in .ssh/authorized_keys.
- :ansplugin:`community.general.scaleway_user_data#module`
- :ansplugin:`community.general.scaleway_volume#module`: manages volumes on Scaleway.
- :ansplugin:`community.general.scaleway_volume_info#module`
The plugins are:
- :ansplugin:`community.general.scaleway#inventory`: inventory plugin
.. note::
This guide assumes you are familiar with Ansible and how it works.
If you are not, have a look at :ref:`ansible_documentation` before getting started.
Requirements
============
The Scaleway modules and inventory script connect to the Scaleway API using `Scaleway REST API <https://developer.scaleway.com>`_.
To use the modules and inventory script you will need a Scaleway API token.
You can generate an API token through the `Scaleway console's credential page <https://cloud.scaleway.com/#/credentials>`__.
The simplest way to authenticate yourself is to set the Scaleway API token in an environment variable:
.. code-block:: console
$ export SCW_TOKEN=00000000-1111-2222-3333-444444444444
If you are not comfortable exporting your API token, you can pass it as a parameter to the modules using the ``api_token`` argument.
If you want to use a new SSH key pair in this tutorial, you can generate it to ``./id_rsa`` and ``./id_rsa.pub`` as:
.. code-block:: console
$ ssh-keygen -t rsa -f ./id_rsa
If you want to use an existing key pair, just copy the private and public key over to the playbook directory.
How to add an SSH key?
======================
Connection to Scaleway Compute nodes use Secure Shell.
SSH keys are stored at the account level, which means that you can reuse the same SSH key in multiple nodes.
The first step to configure Scaleway compute resources is to have at least one SSH key configured.
:ansplugin:`community.general.scaleway_sshkey#module` is a module that manages SSH keys on your Scaleway account.
You can add an SSH key to your account by including the following task in a playbook:
.. code-block:: yaml+jinja
- name: "Add SSH key"
community.general.scaleway_sshkey:
ssh_pub_key: "ssh-rsa AAAA..."
state: "present"
The ``ssh_pub_key`` parameter contains your ssh public key as a string. Here is an example inside a playbook:
.. code-block:: yaml+jinja
- name: Test SSH key lifecycle on a Scaleway account
hosts: localhost
gather_facts: false
environment:
SCW_API_KEY: ""
tasks:
- community.general.scaleway_sshkey:
ssh_pub_key: "ssh-rsa AAAAB...424242 developer@example.com"
state: present
register: result
- ansible.builtin.assert:
that:
- result is success and result is changed
How to create a compute instance?
=================================
Now that we have an SSH key configured, the next step is to spin up a server!
:ansplugin:`community.general.scaleway_compute#module` is a module that can create, update and delete Scaleway compute instances:
.. code-block:: yaml+jinja
- name: Create a server
community.general.scaleway_compute:
name: foobar
state: present
image: 00000000-1111-2222-3333-444444444444
organization: 00000000-1111-2222-3333-444444444444
region: ams1
commercial_type: START1-S
Here are the parameter details for the example shown above:
- ``name`` is the name of the instance (the one that will show up in your web console).
- ``image`` is the UUID of the system image you would like to use.
A list of all images is available for each availability zone.
- ``organization`` represents the organization that your account is attached to.
- ``region`` represents the Availability Zone which your instance is in (for this example, ``par1`` and ``ams1``).
- ``commercial_type`` represents the name of the commercial offers.
You can check out the Scaleway pricing page to find which instance is right for you.
Take a look at this short playbook to see a working example using ``scaleway_compute``:
.. code-block:: yaml+jinja
- name: Test compute instance lifecycle on a Scaleway account
hosts: localhost
gather_facts: false
environment:
SCW_API_KEY: ""
tasks:
- name: Create a server
register: server_creation_task
community.general.scaleway_compute:
name: foobar
state: present
image: 00000000-1111-2222-3333-444444444444
organization: 00000000-1111-2222-3333-444444444444
region: ams1
commercial_type: START1-S
wait: true
- ansible.builtin.debug:
var: server_creation_task
- ansible.builtin.assert:
that:
- server_creation_task is success
- server_creation_task is changed
- name: Run it
community.general.scaleway_compute:
name: foobar
state: running
image: 00000000-1111-2222-3333-444444444444
organization: 00000000-1111-2222-3333-444444444444
region: ams1
commercial_type: START1-S
wait: true
tags:
- web_server
register: server_run_task
- ansible.builtin.debug:
var: server_run_task
- ansible.builtin.assert:
that:
- server_run_task is success
- server_run_task is changed
Dynamic Inventory Plugin
========================
Ansible ships with :ansplugin:`community.general.scaleway#inventory`.
You can now get a complete inventory of your Scaleway resources through this plugin and filter it on
different parameters (``regions`` and ``tags`` are currently supported).
Let us create an example!
Suppose that we want to get all hosts that got the tag web_server.
Create a file named ``scaleway_inventory.yml`` with the following content:
.. code-block:: yaml+jinja
plugin: community.general.scaleway
regions:
- ams1
- par1
tags:
- web_server
This inventory means that we want all hosts that got the tag ``web_server`` on the zones ``ams1`` and ``par1``.
Once you have configured this file, you can get the information using the following command:
.. code-block:: console
$ ansible-inventory --list -i scaleway_inventory.yml
The output will be:
.. code-block:: json
{
"_meta": {
"hostvars": {
"dd8e3ae9-0c7c-459e-bc7b-aba8bfa1bb8d": {
"ansible_verbosity": 6,
"arch": "x86_64",
"commercial_type": "START1-S",
"hostname": "foobar",
"ipv4": "192.0.2.1",
"organization": "00000000-1111-2222-3333-444444444444",
"state": "running",
"tags": [
"web_server"
]
}
}
},
"all": {
"children": [
"ams1",
"par1",
"ungrouped",
"web_server"
]
},
"ams1": {},
"par1": {
"hosts": [
"dd8e3ae9-0c7c-459e-bc7b-aba8bfa1bb8d"
]
},
"ungrouped": {},
"web_server": {
"hosts": [
"dd8e3ae9-0c7c-459e-bc7b-aba8bfa1bb8d"
]
}
}
As you can see, we get different groups of hosts.
``par1`` and ``ams1`` are groups based on location.
``web_server`` is a group based on a tag.
In case a filter parameter is not defined, the plugin supposes all values possible are wanted.
This means that for each tag that exists on your Scaleway compute nodes, a group based on each tag will be created.
Scaleway S3 object storage
==========================
`Object Storage <https://www.scaleway.com/object-storage>`_ allows you to store any kind of objects (documents, images, videos, and so on).
As the Scaleway API is S3 compatible, Ansible supports it natively through the amazon.aws modules: :ansplugin:`amazon.aws.s3_bucket#module`, :ansplugin:`amazon.aws.s3_object#module`.
You can find many examples in the `scaleway_s3 integration tests <https://github.com/ansible/ansible-legacy-tests/tree/devel/test/legacy/roles/scaleway_s3>`_.
.. code-block:: yaml+jinja
- hosts: myserver
vars:
scaleway_region: nl-ams
s3_url: https://s3.nl-ams.scw.cloud
environment:
# AWS_ACCESS_KEY matches your scaleway organization id available at https://cloud.scaleway.com/#/account
AWS_ACCESS_KEY: 00000000-1111-2222-3333-444444444444
# AWS_SECRET_KEY matches a secret token that you can retrieve at https://cloud.scaleway.com/#/credentials
AWS_SECRET_KEY: aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee
module_defaults:
group/amazon.aws.aws:
s3_url: '{{ s3_url }}'
region: '{{ scaleway_region }}'
tasks:
# use a fact instead of a variable, otherwise template is evaluate each time variable is used
- ansible.builtin.set_fact:
bucket_name: "{{ 99999999 | random | to_uuid }}"
# "requester_pays:" is mandatory because Scaleway does not implement related API
# another way is to use amazon.aws.s3_object and "mode: create" !
- amazon.aws.s3_bucket:
name: '{{ bucket_name }}'
requester_pays:
- name: Another way to create the bucket
amazon.aws.s3_object:
bucket: '{{ bucket_name }}'
mode: create
encrypt: false
register: bucket_creation_check
- name: add something in the bucket
amazon.aws.s3_object:
mode: put
bucket: '{{ bucket_name }}'
src: /tmp/test.txt # needs to be created before
object: test.txt
encrypt: false # server side encryption must be disabled

View File

@ -0,0 +1,33 @@
..
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
.. _ansible_collections.community.general.docsite.test_guide:
community.general Test (Plugin) Guide
=====================================
The :ref:`community.general collection <plugins_in_community.general>` offers currently one test plugin.
.. contents:: Topics
Feature Tests
-------------
The :ansplugin:`community.general.a_module test <community.general.a_module#test>` allows to check whether a given string refers to an existing module or action plugin. This can be useful in roles, which can use this to ensure that required modules are present ahead of time.
.. code-block:: yaml+jinja
- name: Make sure that community.aws.route53 is available
assert:
that:
- >
'community.aws.route53' is community.general.a_module
- name: Make sure that community.general.does_not_exist is not a module or action plugin
assert:
that:
- "'community.general.does_not_exist' is not community.general.a_module"
.. versionadded:: 4.0.0

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,187 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2020, quidame <quidame@poivron.org>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import time
from ansible.plugins.action import ActionBase
from ansible.errors import AnsibleActionFail, AnsibleConnectionFailure
from ansible.utils.vars import merge_hash
from ansible.utils.display import Display
display = Display()
class ActionModule(ActionBase):
# Keep internal params away from user interactions
_VALID_ARGS = frozenset(('path', 'state', 'table', 'noflush', 'counters', 'modprobe', 'ip_version', 'wait'))
DEFAULT_SUDOABLE = True
MSG_ERROR__ASYNC_AND_POLL_NOT_ZERO = (
"This module doesn't support async>0 and poll>0 when its 'state' param "
"is set to 'restored'. To enable its rollback feature (that needs the "
"module to run asynchronously on the remote), please set task attribute "
"'poll' (=%s) to 0, and 'async' (=%s) to a value >2 and not greater than "
"'ansible_timeout' (=%s) (recommended).")
MSG_WARNING__NO_ASYNC_IS_NO_ROLLBACK = (
"Attempts to restore iptables state without rollback in case of mistake "
"may lead the ansible controller to loose access to the hosts and never "
"regain it before fixing firewall rules through a serial console, or any "
"other way except SSH. Please set task attribute 'poll' (=%s) to 0, and "
"'async' (=%s) to a value >2 and not greater than 'ansible_timeout' (=%s) "
"(recommended).")
MSG_WARNING__ASYNC_GREATER_THAN_TIMEOUT = (
"You attempt to restore iptables state with rollback in case of mistake, "
"but with settings that will lead this rollback to happen AFTER that the "
"controller will reach its own timeout. Please set task attribute 'poll' "
"(=%s) to 0, and 'async' (=%s) to a value >2 and not greater than "
"'ansible_timeout' (=%s) (recommended).")
def _async_result(self, async_status_args, task_vars, timeout):
'''
Retrieve results of the asynchronous task, and display them in place of
the async wrapper results (those with the ansible_job_id key).
'''
async_status = self._task.copy()
async_status.args = async_status_args
async_status.action = 'ansible.builtin.async_status'
async_status.async_val = 0
async_action = self._shared_loader_obj.action_loader.get(
async_status.action, task=async_status, connection=self._connection,
play_context=self._play_context, loader=self._loader, templar=self._templar,
shared_loader_obj=self._shared_loader_obj)
if async_status.args['mode'] == 'cleanup':
return async_action.run(task_vars=task_vars)
# At least one iteration is required, even if timeout is 0.
for dummy in range(max(1, timeout)):
async_result = async_action.run(task_vars=task_vars)
if async_result.get('finished', 0) == 1:
break
time.sleep(min(1, timeout))
return async_result
def run(self, tmp=None, task_vars=None):
self._supports_check_mode = True
self._supports_async = True
result = super(ActionModule, self).run(tmp, task_vars)
del tmp # tmp no longer has any effect
if not result.get('skipped'):
# FUTURE: better to let _execute_module calculate this internally?
wrap_async = self._task.async_val and not self._connection.has_native_async
# Set short names for values we'll have to compare or reuse
task_poll = self._task.poll
task_async = self._task.async_val
check_mode = self._play_context.check_mode
max_timeout = self._connection._play_context.timeout
module_args = self._task.args
if module_args.get('state', None) == 'restored':
if not wrap_async:
if not check_mode:
display.warning(self.MSG_WARNING__NO_ASYNC_IS_NO_ROLLBACK % (
task_poll,
task_async,
max_timeout))
elif task_poll:
raise AnsibleActionFail(self.MSG_ERROR__ASYNC_AND_POLL_NOT_ZERO % (
task_poll,
task_async,
max_timeout))
else:
if task_async > max_timeout and not check_mode:
display.warning(self.MSG_WARNING__ASYNC_GREATER_THAN_TIMEOUT % (
task_poll,
task_async,
max_timeout))
# inject the async directory based on the shell option into the
# module args
async_dir = self.get_shell_option('async_dir', default="~/.ansible_async")
# Bind the loop max duration to consistent values on both
# remote and local sides (if not the same, make the loop
# longer on the controller); and set a backup file path.
module_args['_timeout'] = task_async
module_args['_back'] = '%s/iptables.state' % async_dir
async_status_args = dict(mode='status')
confirm_cmd = 'rm -f %s' % module_args['_back']
starter_cmd = 'touch %s.starter' % module_args['_back']
remaining_time = max(task_async, max_timeout)
# do work!
result = merge_hash(result, self._execute_module(module_args=module_args, task_vars=task_vars, wrap_async=wrap_async))
# Then the 3-steps "go ahead or rollback":
# 1. Catch early errors of the module (in asynchronous task) if any.
# Touch a file on the target to signal the module to process now.
# 2. Reset connection to ensure a persistent one will not be reused.
# 3. Confirm the restored state by removing the backup on the remote.
# Retrieve the results of the asynchronous task to return them.
if '_back' in module_args:
async_status_args['jid'] = result.get('ansible_job_id', None)
if async_status_args['jid'] is None:
raise AnsibleActionFail("Unable to get 'ansible_job_id'.")
# Catch early errors due to missing mandatory option, bad
# option type/value, missing required system command, etc.
result = merge_hash(result, self._async_result(async_status_args, task_vars, 0))
# The module is aware to not process the main iptables-restore
# command before finding (and deleting) the 'starter' cookie on
# the host, so the previous query will not reach ssh timeout.
dummy = self._low_level_execute_command(starter_cmd, sudoable=self.DEFAULT_SUDOABLE)
# As the main command is not yet executed on the target, here
# 'finished' means 'failed before main command be executed'.
if not result['finished']:
try:
self._connection.reset()
except AttributeError:
pass
for dummy in range(max_timeout):
time.sleep(1)
remaining_time -= 1
# - AnsibleConnectionFailure covers rejected requests (i.e.
# by rules with '--jump REJECT')
# - ansible_timeout is able to cover dropped requests (due
# to a rule or policy DROP) if not lower than async_val.
try:
dummy = self._low_level_execute_command(confirm_cmd, sudoable=self.DEFAULT_SUDOABLE)
break
except AnsibleConnectionFailure:
continue
result = merge_hash(result, self._async_result(async_status_args, task_vars, remaining_time))
# Cleanup async related stuff and internal params
for key in ('ansible_job_id', 'results_file', 'started', 'finished'):
if result.get(key):
del result[key]
if result.get('invocation', {}).get('module_args'):
for key in ('_back', '_timeout', '_async_dir', 'jid'):
if result['invocation']['module_args'].get(key):
del result['invocation']['module_args'][key]
async_status_args['mode'] = 'cleanup'
dummy = self._async_result(async_status_args, task_vars, 0)
if not wrap_async:
# remove a temporary path we created
self._remove_tmp_path(self._connection._shell.tmpdir)
return result

View File

@ -0,0 +1,233 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2020, Amin Vakil <info@aminvakil.com>
# Copyright (c) 2016-2018, Matt Davis <mdavis@ansible.com>
# Copyright (c) 2018, Sam Doran <sdoran@redhat.com>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.errors import AnsibleError, AnsibleConnectionFailure
from ansible.module_utils.common.text.converters import to_native, to_text
from ansible.module_utils.common.collections import is_string
from ansible.plugins.action import ActionBase
from ansible.utils.display import Display
display = Display()
class TimedOutException(Exception):
pass
class ActionModule(ActionBase):
TRANSFERS_FILES = False
_VALID_ARGS = frozenset((
'msg',
'delay',
'search_paths'
))
DEFAULT_CONNECT_TIMEOUT = None
DEFAULT_PRE_SHUTDOWN_DELAY = 0
DEFAULT_SHUTDOWN_MESSAGE = 'Shut down initiated by Ansible'
DEFAULT_SHUTDOWN_COMMAND = 'shutdown'
DEFAULT_SHUTDOWN_COMMAND_ARGS = '-h {delay_min} "{message}"'
DEFAULT_SUDOABLE = True
SHUTDOWN_COMMANDS = {
'alpine': 'poweroff',
'vmkernel': 'halt',
}
SHUTDOWN_COMMAND_ARGS = {
'alpine': '',
'void': '-h +{delay_min} "{message}"',
'freebsd': '-p +{delay_sec}s "{message}"',
'linux': DEFAULT_SHUTDOWN_COMMAND_ARGS,
'macosx': '-h +{delay_min} "{message}"',
'openbsd': '-h +{delay_min} "{message}"',
'solaris': '-y -g {delay_sec} -i 5 "{message}"',
'sunos': '-y -g {delay_sec} -i 5 "{message}"',
'vmkernel': '-d {delay_sec}',
'aix': '-Fh',
}
def __init__(self, *args, **kwargs):
super(ActionModule, self).__init__(*args, **kwargs)
@property
def delay(self):
return self._check_delay('delay', self.DEFAULT_PRE_SHUTDOWN_DELAY)
def _check_delay(self, key, default):
"""Ensure that the value is positive or zero"""
value = int(self._task.args.get(key, default))
if value < 0:
value = 0
return value
def _get_value_from_facts(self, variable_name, distribution, default_value):
"""Get dist+version specific args first, then distribution, then family, lastly use default"""
attr = getattr(self, variable_name)
value = attr.get(
distribution['name'] + distribution['version'],
attr.get(
distribution['name'],
attr.get(
distribution['family'],
getattr(self, default_value))))
return value
def get_distribution(self, task_vars):
# FIXME: only execute the module if we don't already have the facts we need
distribution = {}
display.debug('{action}: running setup module to get distribution'.format(action=self._task.action))
module_output = self._execute_module(
task_vars=task_vars,
module_name='ansible.legacy.setup',
module_args={'gather_subset': 'min'})
try:
if module_output.get('failed', False):
raise AnsibleError('Failed to determine system distribution. {0}, {1}'.format(
to_native(module_output['module_stdout']).strip(),
to_native(module_output['module_stderr']).strip()))
distribution['name'] = module_output['ansible_facts']['ansible_distribution'].lower()
distribution['version'] = to_text(
module_output['ansible_facts']['ansible_distribution_version'].split('.')[0])
distribution['family'] = to_text(module_output['ansible_facts']['ansible_os_family'].lower())
display.debug("{action}: distribution: {dist}".format(action=self._task.action, dist=distribution))
return distribution
except KeyError as ke:
raise AnsibleError('Failed to get distribution information. Missing "{0}" in output.'.format(ke.args[0]))
def get_shutdown_command(self, task_vars, distribution):
def find_command(command, find_search_paths):
display.debug('{action}: running find module looking in {paths} to get path for "{command}"'.format(
action=self._task.action,
command=command,
paths=find_search_paths))
find_result = self._execute_module(
task_vars=task_vars,
# prevent collection search by calling with ansible.legacy (still allows library/ override of find)
module_name='ansible.legacy.find',
module_args={
'paths': find_search_paths,
'patterns': [command],
'file_type': 'any'
}
)
return [x['path'] for x in find_result['files']]
shutdown_bin = self._get_value_from_facts('SHUTDOWN_COMMANDS', distribution, 'DEFAULT_SHUTDOWN_COMMAND')
default_search_paths = ['/sbin', '/usr/sbin', '/usr/local/sbin']
search_paths = self._task.args.get('search_paths', default_search_paths)
# FIXME: switch all this to user arg spec validation methods when they are available
# Convert bare strings to a list
if is_string(search_paths):
search_paths = [search_paths]
# Error if we didn't get a list
err_msg = "'search_paths' must be a string or flat list of strings, got {0}"
try:
incorrect_type = any(not is_string(x) for x in search_paths)
if not isinstance(search_paths, list) or incorrect_type:
raise TypeError
except TypeError:
raise AnsibleError(err_msg.format(search_paths))
full_path = find_command(shutdown_bin, search_paths) # find the path to the shutdown command
if not full_path: # if we could not find the shutdown command
display.vvv('Unable to find command "{0}" in search paths: {1}, will attempt a shutdown using systemd '
'directly.'.format(shutdown_bin, search_paths)) # tell the user we will try with systemd
systemctl_search_paths = ['/bin', '/usr/bin']
full_path = find_command('systemctl', systemctl_search_paths) # find the path to the systemctl command
if not full_path: # if we couldn't find systemctl
raise AnsibleError(
'Could not find command "{0}" in search paths: {1} or systemctl command in search paths: {2}, unable to shutdown.'.
format(shutdown_bin, search_paths, systemctl_search_paths)) # we give up here
else:
return "{0} poweroff".format(full_path[0]) # done, since we cannot use args with systemd shutdown
# systemd case taken care of, here we add args to the command
args = self._get_value_from_facts('SHUTDOWN_COMMAND_ARGS', distribution, 'DEFAULT_SHUTDOWN_COMMAND_ARGS')
# Convert seconds to minutes. If less that 60, set it to 0.
delay_sec = self.delay
shutdown_message = self._task.args.get('msg', self.DEFAULT_SHUTDOWN_MESSAGE)
return '{0} {1}'. \
format(
full_path[0],
args.format(
delay_sec=delay_sec,
delay_min=delay_sec // 60,
message=shutdown_message
)
)
def perform_shutdown(self, task_vars, distribution):
result = {}
shutdown_result = {}
shutdown_command_exec = self.get_shutdown_command(task_vars, distribution)
self.cleanup(force=True)
try:
display.vvv("{action}: shutting down server...".format(action=self._task.action))
display.debug("{action}: shutting down server with command '{command}'".
format(action=self._task.action, command=shutdown_command_exec))
if self._play_context.check_mode:
shutdown_result['rc'] = 0
else:
shutdown_result = self._low_level_execute_command(shutdown_command_exec, sudoable=self.DEFAULT_SUDOABLE)
except AnsibleConnectionFailure as e:
# If the connection is closed too quickly due to the system being shutdown, carry on
display.debug(
'{action}: AnsibleConnectionFailure caught and handled: {error}'.format(action=self._task.action,
error=to_text(e)))
shutdown_result['rc'] = 0
if shutdown_result['rc'] != 0:
result['failed'] = True
result['shutdown'] = False
result['msg'] = "Shutdown command failed. Error was {stdout}, {stderr}".format(
stdout=to_native(shutdown_result['stdout'].strip()),
stderr=to_native(shutdown_result['stderr'].strip()))
return result
result['failed'] = False
result['shutdown_command'] = shutdown_command_exec
return result
def run(self, tmp=None, task_vars=None):
self._supports_check_mode = True
self._supports_async = True
# If running with local connection, fail so we don't shutdown ourself
if self._connection.transport == 'local' and (not self._play_context.check_mode):
msg = 'Running {0} with local connection would shutdown the control node.'.format(self._task.action)
return {'changed': False, 'elapsed': 0, 'shutdown': False, 'failed': True, 'msg': msg}
if task_vars is None:
task_vars = {}
result = super(ActionModule, self).run(tmp, task_vars)
if result.get('skipped', False) or result.get('failed', False):
return result
distribution = self.get_distribution(task_vars)
# Initiate shutdown
shutdown_result = self.perform_shutdown(task_vars, distribution)
if shutdown_result['failed']:
result = shutdown_result
return result
result['shutdown'] = True
result['changed'] = True
result['shutdown_command'] = shutdown_result['shutdown_command']
return result

View File

@ -0,0 +1,127 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
name: doas
short_description: Do As user
description:
- This become plugins allows your remote/login user to execute commands as another user via the doas utility.
author: Ansible Core Team
options:
become_user:
description: User you 'become' to execute the task
ini:
- section: privilege_escalation
key: become_user
- section: doas_become_plugin
key: user
vars:
- name: ansible_become_user
- name: ansible_doas_user
env:
- name: ANSIBLE_BECOME_USER
- name: ANSIBLE_DOAS_USER
become_exe:
description: Doas executable
default: doas
ini:
- section: privilege_escalation
key: become_exe
- section: doas_become_plugin
key: executable
vars:
- name: ansible_become_exe
- name: ansible_doas_exe
env:
- name: ANSIBLE_BECOME_EXE
- name: ANSIBLE_DOAS_EXE
become_flags:
description: Options to pass to doas
default: ''
ini:
- section: privilege_escalation
key: become_flags
- section: doas_become_plugin
key: flags
vars:
- name: ansible_become_flags
- name: ansible_doas_flags
env:
- name: ANSIBLE_BECOME_FLAGS
- name: ANSIBLE_DOAS_FLAGS
become_pass:
description: password for doas prompt
required: false
vars:
- name: ansible_become_password
- name: ansible_become_pass
- name: ansible_doas_pass
env:
- name: ANSIBLE_BECOME_PASS
- name: ANSIBLE_DOAS_PASS
ini:
- section: doas_become_plugin
key: password
prompt_l10n:
description:
- List of localized strings to match for prompt detection
- If empty we'll use the built in one
default: []
ini:
- section: doas_become_plugin
key: localized_prompts
vars:
- name: ansible_doas_prompt_l10n
env:
- name: ANSIBLE_DOAS_PROMPT_L10N
'''
import re
from ansible.module_utils.common.text.converters import to_bytes
from ansible.plugins.become import BecomeBase
class BecomeModule(BecomeBase):
name = 'community.general.doas'
# messages for detecting prompted password issues
fail = ('Permission denied',)
missing = ('Authorization required',)
def check_password_prompt(self, b_output):
''' checks if the expected password prompt exists in b_output '''
# FIXME: more accurate would be: 'doas (%s@' % remote_user
# however become plugins don't have that information currently
b_prompts = [to_bytes(p) for p in self.get_option('prompt_l10n')] or [br'doas \(', br'Password:']
b_prompt = b"|".join(b_prompts)
return bool(re.match(b_prompt, b_output))
def build_become_command(self, cmd, shell):
super(BecomeModule, self).build_become_command(cmd, shell)
if not cmd:
return cmd
self.prompt = True
become_exe = self.get_option('become_exe')
flags = self.get_option('become_flags')
if not self.get_option('become_pass') and '-n' not in flags:
flags += ' -n'
become_user = self.get_option('become_user')
user = '-u %s' % (become_user) if become_user else ''
success_cmd = self._build_success_command(cmd, shell, noexe=True)
executable = getattr(shell, 'executable', shell.SHELL_FAMILY)
return '%s %s %s %s -c %s' % (become_exe, flags, user, executable, success_cmd)

View File

@ -0,0 +1,97 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
name: dzdo
short_description: Centrify's Direct Authorize
description:
- This become plugins allows your remote/login user to execute commands as another user via the dzdo utility.
author: Ansible Core Team
options:
become_user:
description: User you 'become' to execute the task
ini:
- section: privilege_escalation
key: become_user
- section: dzdo_become_plugin
key: user
vars:
- name: ansible_become_user
- name: ansible_dzdo_user
env:
- name: ANSIBLE_BECOME_USER
- name: ANSIBLE_DZDO_USER
become_exe:
description: Dzdo executable
default: dzdo
ini:
- section: privilege_escalation
key: become_exe
- section: dzdo_become_plugin
key: executable
vars:
- name: ansible_become_exe
- name: ansible_dzdo_exe
env:
- name: ANSIBLE_BECOME_EXE
- name: ANSIBLE_DZDO_EXE
become_flags:
description: Options to pass to dzdo
default: -H -S -n
ini:
- section: privilege_escalation
key: become_flags
- section: dzdo_become_plugin
key: flags
vars:
- name: ansible_become_flags
- name: ansible_dzdo_flags
env:
- name: ANSIBLE_BECOME_FLAGS
- name: ANSIBLE_DZDO_FLAGS
become_pass:
description: Options to pass to dzdo
required: false
vars:
- name: ansible_become_password
- name: ansible_become_pass
- name: ansible_dzdo_pass
env:
- name: ANSIBLE_BECOME_PASS
- name: ANSIBLE_DZDO_PASS
ini:
- section: dzdo_become_plugin
key: password
'''
from ansible.plugins.become import BecomeBase
class BecomeModule(BecomeBase):
name = 'community.general.dzdo'
# messages for detecting prompted password issues
fail = ('Sorry, try again.',)
def build_become_command(self, cmd, shell):
super(BecomeModule, self).build_become_command(cmd, shell)
if not cmd:
return cmd
becomecmd = self.get_option('become_exe')
flags = self.get_option('become_flags')
if self.get_option('become_pass'):
self.prompt = '[dzdo via ansible, key=%s] password:' % self._id
flags = '%s -p "%s"' % (flags.replace('-n', ''), self.prompt)
become_user = self.get_option('become_user')
user = '-u %s' % (become_user) if become_user else ''
return ' '.join([becomecmd, flags, user, self._build_success_command(cmd, shell)])

Some files were not shown because too many files have changed in this diff Show More