diff --git a/.automation/build.py b/.automation/build.py
index 807d9a9a0e7..d7a0cf81b1d 100644
--- a/.automation/build.py
+++ b/.automation/build.py
@@ -42,10 +42,14 @@
DEFAULT_DOCKERFILE_RUST_ARGS,
DEFAULT_RELEASE,
DEFAULT_REPORT_FOLDER_NAME,
+ DOCKER_PACKAGES_ROOT_URL,
+ GHCR_PACKAGES_ROOT_URL,
ML_DOC_URL_BASE,
ML_DOCKER_IMAGE,
ML_DOCKER_IMAGE_LEGACY,
ML_DOCKER_IMAGE_LEGACY_V5,
+ ML_DOCKER_IMAGE_WITH_HOST,
+ ML_DOCKER_NAME,
ML_REPO,
ML_REPO_URL,
)
@@ -61,6 +65,7 @@
IS_LATEST = "--latest" in sys.argv
DELETE_DOCKERFILES = "--delete-dockerfiles" in sys.argv
DELETE_TEST_CLASSES = "--delete-test-classes" in sys.argv
+CUSTOM_FLAVOR = "--custom-flavor" in sys.argv
# Release args management
if RELEASE is True:
@@ -114,6 +119,7 @@
f"{REPO_HOME}/megalinter/descriptors/schemas/megalinter-descriptor.jsonschema.json"
)
CONFIG_JSON_SCHEMA = f"{REPO_HOME}/megalinter/descriptors/schemas/megalinter-configuration.jsonschema.json"
+CUSTOM_FLAVOR_JSON_SCHEMA = f"{REPO_HOME}/megalinter/descriptors/schemas/megalinter-custom-flavor.jsonschema.json"
OWN_MEGALINTER_CONFIG_FILE = f"{REPO_HOME}/.mega-linter.yml"
IDE_LIST = {
@@ -143,6 +149,7 @@
"CSS_SCSSLINT", # Removed in v8
"OPENAPI_SPECTRAL", # Removed in v8
"SQL_SQL_LINT", # Removed in v8
+ "MARKDOWN_MARKDOWN_LINK_CHECK", # Removed in v9
]
DESCRIPTORS_FOR_BUILD_CACHE = None
@@ -226,7 +233,7 @@ def generate_flavor(flavor, flavor_info):
description: "0 if no source file has been updated, 1 if source files has been updated"
runs:
using: "docker"
- image: "docker://{ML_DOCKER_IMAGE}:{image_release}"
+ image: "docker://{ML_DOCKER_IMAGE_WITH_HOST}:{image_release}"
args:
- "-v"
- "/var/run/docker.sock:/var/run/docker.sock:rw"
@@ -251,12 +258,13 @@ def generate_flavor(flavor, flavor_info):
json.dump(flavor_info, outfile, indent=4, sort_keys=True)
outfile.write("\n")
# Write in global flavors files
- with open(GLOBAL_FLAVORS_FILE, "r", encoding="utf-8") as json_file:
- global_flavors = json.load(json_file)
- global_flavors[flavor] = flavor_info
- with open(GLOBAL_FLAVORS_FILE, "w", encoding="utf-8") as outfile:
- json.dump(global_flavors, outfile, indent=4, sort_keys=True)
- outfile.write("\n")
+ if CUSTOM_FLAVOR is not True or os.path.isdir("/megalinter-builder"):
+ with open(GLOBAL_FLAVORS_FILE, "r", encoding="utf-8") as json_file:
+ global_flavors = json.load(json_file)
+ global_flavors[flavor] = flavor_info
+ with open(GLOBAL_FLAVORS_FILE, "w", encoding="utf-8") as outfile:
+ json.dump(global_flavors, outfile, indent=4, sort_keys=True)
+ outfile.write("\n")
# Flavored dockerfile
dockerfile = f"{FLAVORS_DIR}/{flavor}/Dockerfile"
if not os.path.isdir(os.path.dirname(dockerfile)):
@@ -289,7 +297,7 @@ def generate_flavor(flavor, flavor_info):
description: "0 if no source file has been updated, 1 if source files has been updated"
runs:
using: "docker"
- image: "docker://{ML_DOCKER_IMAGE}-{flavor}:{image_release}"
+ image: "docker://{ML_DOCKER_IMAGE_WITH_HOST}-{flavor}:{image_release}"
args:
- "-v"
- "/var/run/docker.sock:/var/run/docker.sock:rw"
@@ -301,7 +309,27 @@ def generate_flavor(flavor, flavor_info):
with open(flavor_action_yml, "w", encoding="utf-8") as file:
file.write(action_yml)
logging.info(f"Updated {flavor_action_yml}")
- extra_lines = [
+ extra_lines = []
+ if CUSTOM_FLAVOR is True:
+ current_date_time_iso = datetime.now().isoformat()
+ extra_lines += [
+ "ENV CUSTOM_FLAVOR=true \\",
+ f" BUILD_VERSION={os.getenv('BUILD_VERSION', 'local_build')} \\",
+ f" BUILD_DATE={os.getenv('BUILD_DATE', 'local_build')} \\",
+ f" BUILD_REVISION={os.getenv('BUILD_REVISION', 'local_build')} \\",
+ f" CUSTOM_FLAVOR_BUILD_DATE={current_date_time_iso} \\",
+ f" CUSTOM_FLAVOR_BUILD_REPO={os.getenv('CUSTOM_FLAVOR_BUILD_REPO', 'local_build')} \\",
+ f" CUSTOM_FLAVOR_BUILD_REPO_URL={os.getenv('CUSTOM_FLAVOR_BUILD_REPO_URL', 'local_build')} \\",
+ f" CUSTOM_FLAVOR_BUILD_USER={os.getenv('CUSTOM_FLAVOR_BUILD_USER', 'local_build')}",
+ "",
+ 'LABEL com.github.actions.name="MegaLinter Custom Flavor" \\',
+ f' maintainer="{os.getenv("CUSTOM_FLAVOR_BUILD_USER", "local_build")}" \\',
+ f' org.opencontainers.image.source="{os.getenv("CUSTOM_FLAVOR_BUILD_REPO_URL", "local_build")}" \\',
+ f' org.opencontainers.image.created="{os.getenv("BUILD_DATE", "local_build")}" \\',
+ f' org.opencontainers.image.revision="{os.getenv("BUILD_REVISION", "local_build")}" \\',
+ f' org.opencontainers.image.version="{os.getenv("BUILD_VERSION", "local_build")}"',
+ ]
+ extra_lines += [
"COPY entrypoint.sh /entrypoint.sh",
"RUN chmod +x entrypoint.sh",
'ENTRYPOINT ["/bin/bash", "/entrypoint.sh"]',
@@ -315,6 +343,7 @@ def generate_flavor(flavor, flavor_info):
DEFAULT_DOCKERFILE_FLAVOR_ARGS.copy(),
{"cargo": DEFAULT_DOCKERFILE_FLAVOR_CARGO_PACKAGES.copy()},
)
+ return dockerfile
def build_dockerfile(
@@ -530,7 +559,8 @@ def build_dockerfile(
apk_install_command = ""
if len(apk_packages) > 0:
apk_install_command = (
- "RUN apk add --no-cache \\\n "
+ "RUN apk -U --no-cache upgrade"
+ + " \\\n && apk add --no-cache \\\n "
+ " \\\n ".join(list(dict.fromkeys(apk_packages)))
+ " \\\n && git config --global core.autocrlf true"
)
@@ -563,13 +593,14 @@ def build_dockerfile(
cargo_install_command = (
"RUN curl https://sh.rustup.rs -sSf |"
+ " sh -s -- -y --profile minimal --default-toolchain ${RUST_RUST_VERSION} \\\n"
- + ' && export PATH="/root/.cargo/bin:${PATH}" \\\n'
+ + ' && export PATH="/root/.cargo/bin:/root/.cargo/env:${PATH}" \\\n'
+ + " && rustup default stable \\\n"
+ f" && {rustup_cargo_cmd} \\\n"
+ " && rm -rf /root/.cargo/registry /root/.cargo/git "
+ "/root/.cache/sccache"
+ (" /root/.rustup" if keep_rustup is False else "")
+ "\n"
- + 'ENV PATH="/root/.cargo/bin:${PATH}"'
+ + 'ENV PATH="/root/.cargo/bin:/root/.cargo/env:${PATH}"'
)
replace_in_file(dockerfile, "#CARGO__START", "#CARGO__END", cargo_install_command)
# NPM packages
@@ -597,7 +628,7 @@ def build_dockerfile(
+ ' -o -iname "README.md"'
+ ' -o -iname ".package-lock.json"'
+ ' -o -iname "package-lock.json"'
- + " \\) -o -type d -name /root/.npm/_cacache \\) -delete \n"
+ + " \\) -o -type d -name /root/.npm/_cacache \\) -delete\n"
+ "WORKDIR /\n"
)
replace_in_file(dockerfile, "#NPM__START", "#NPM__END", npm_install_command)
@@ -605,8 +636,8 @@ def build_dockerfile(
pip_install_command = ""
if len(pip_packages) > 0:
pip_install_command = (
- "RUN PYTHONDONTWRITEBYTECODE=1 pip3 install --no-cache-dir pip==${PIP_PIP_VERSION} &&"
- + " PYTHONDONTWRITEBYTECODE=1 pip3 install --no-cache-dir \\\n '"
+ "RUN uv pip install --no-cache --system pip==${PIP_PIP_VERSION} &&"
+ + " uv pip install --no-cache --system \\\n '"
+ "' \\\n '".join(list(dict.fromkeys(pip_packages)))
+ "' && \\\n"
+ r"find . \( -type f \( -iname \*.pyc -o -iname \*.pyo \) -o -type d -iname __pycache__ \) -delete"
@@ -617,21 +648,16 @@ def build_dockerfile(
# Python packages in venv
if len(pipvenv_packages.items()) > 0:
pipenv_install_command = (
- "RUN PYTHONDONTWRITEBYTECODE=1 pip3 install"
- " --no-cache-dir pip==${PIP_PIP_VERSION} virtualenv==${PIP_VIRTUALENV_VERSION} \\\n"
+ "RUN uv pip install --system"
+ " --no-cache pip==${PIP_PIP_VERSION} virtualenv==${PIP_VIRTUALENV_VERSION} \\\n"
)
env_path_command = 'ENV PATH="${PATH}"'
for pip_linter, pip_linter_packages in pipvenv_packages.items():
pipenv_install_command += (
- f' && mkdir -p "/venvs/{pip_linter}" '
- + f'&& cd "/venvs/{pip_linter}" '
- + "&& virtualenv . "
- + "&& source bin/activate "
- + "&& PYTHONDONTWRITEBYTECODE=1 pip3 install --no-cache-dir "
+ f' && uv venv --seed --no-project --no-managed-python --no-cache "/venvs/{pip_linter}" '
+ + f'&& VIRTUAL_ENV="/venvs/{pip_linter}" uv pip install --no-cache '
+ (" ".join(pip_linter_packages))
- + " "
- + "&& deactivate "
- + "&& cd ./../.. \\\n"
+ + " \\\n"
)
env_path_command += f":/venvs/{pip_linter}/bin"
pipenv_install_command = pipenv_install_command[:-2] # remove last \
@@ -691,6 +717,20 @@ def match_flavor(item, flavor, flavor_info):
return True
else:
return False
+ # Custom flavor
+ elif flavor == "CUSTOM":
+ descriptors, linters_by_type = list_descriptors_for_build()
+ # Item is a linter: check if present in the flavor
+ if "linter_name" in item and item["name"] in flavor_info["linters"]:
+ return True
+ # Item is a descriptor and it contains one of the linters included in the flavor info
+ if "linters" in item:
+ for descriptor in descriptors:
+ if item["descriptor_id"] == descriptor["descriptor_id"]:
+ descriptor_linters = descriptor["linter_instances"]
+ for descriptor_linter in descriptor_linters:
+ if descriptor_linter.name in flavor_info["linters"]:
+ return True
# Other flavors
elif "descriptor_flavors" in item:
if flavor in item["descriptor_flavors"] or (
@@ -713,7 +753,7 @@ def generate_linter_dockerfiles():
linters_md += "| Linter key | Docker image | Size |\n"
linters_md += "| :----------| :----------- | :--: |\n"
descriptor_files = megalinter.linter_factory.list_descriptor_files()
- gha_workflow_yml = [" linter:", " ["]
+ active_linter_list_lower = []
for descriptor_file in descriptor_files:
descriptor_items = []
with open(descriptor_file, "r", encoding="utf-8") as f:
@@ -771,8 +811,9 @@ def generate_linter_dockerfiles():
build_dockerfile(
dockerfile, descriptor_and_linter, requires_docker, "none", extra_lines
)
- gha_workflow_yml += [f' "{linter_lower_name}",']
- docker_image = f"{ML_DOCKER_IMAGE}-only-{linter_lower_name}:{VERSION_V}"
+ docker_image = (
+ f"{ML_DOCKER_IMAGE_WITH_HOST}-only-{linter_lower_name}:{VERSION_V}"
+ )
docker_image_badge = (
f""
@@ -780,27 +821,16 @@ def generate_linter_dockerfiles():
linters_md += (
f"| {linter.name} | {docker_image} | {docker_image_badge} |\n"
)
+ if not (hasattr(linter, "disabled") and linter.disabled is True):
+ active_linter_list_lower += [linter_lower_name]
+
+ # Write linter_list_lower in .automation/generated/linters_matrix.json
+ linters_matrix_file = f"{REPO_HOME}/.automation/generated/linters_matrix.json"
+ with open(linters_matrix_file, "w", encoding="utf-8") as file:
+ json.dump(active_linter_list_lower, file, indent=2, sort_keys=True)
+ file.write("\n")
+ logging.info(f"Updated {linters_matrix_file}")
- # Update github action workflow
- gha_workflow_yml += [" ]"]
- replace_in_file(
- f"{REPO_HOME}/.github/workflows/deploy-DEV-linters.yml",
- "# linters-start",
- "# linters-end",
- "\n".join(gha_workflow_yml),
- )
- replace_in_file(
- f"{REPO_HOME}/.github/workflows/deploy-BETA-linters.yml",
- "# linters-start",
- "# linters-end",
- "\n".join(gha_workflow_yml),
- )
- replace_in_file(
- f"{REPO_HOME}/.github/workflows/deploy-RELEASE-linters.yml",
- "# linters-start",
- "# linters-end",
- "\n".join(gha_workflow_yml),
- )
# Write MD file
file = open(f"{REPO_HOME}/docs/standalone-linters.md", "w", encoding="utf-8")
file.write(linters_md + "\n")
@@ -862,10 +892,11 @@ def list_descriptors_for_build():
descriptors = []
for descriptor_file in descriptor_files:
descriptor = megalinter.linter_factory.build_descriptor_info(descriptor_file)
- descriptors += [descriptor]
descriptor_linters = megalinter.linter_factory.build_descriptor_linters(
descriptor_file, {"request_id": "build"}
)
+ descriptor["linter_instances"] = descriptor_linters
+ descriptors += [descriptor]
linters_by_type[descriptor_linters[0].descriptor_type] += descriptor_linters
DESCRIPTORS_FOR_BUILD_CACHE = descriptors, linters_by_type
return descriptors, linters_by_type
@@ -901,29 +932,48 @@ def generate_documentation():
)
# Update welcome phrase
welcome_phrase = (
- "MegaLinter is an **Open-Source** tool for **CI/CD workflows** "
+ "MegaLinter is an **open-source** tool for **CI/CD workflows** "
+ "that analyzes the **consistency of your "
- + "code**, **IAC**, **configuration**, and **scripts** in your repository "
- + "sources, to **ensure all your projects "
- + "sources are clean and formatted** whatever IDE/toolbox is used by "
- + "their developers, powered by [**OX Security**](https://www.ox.security/?ref=megalinter).\n\n"
- + f"Supporting [**{len(linters_by_type['language'])}** languages]"
+ + "code**, **IaC**, **configuration**, and **scripts** in your repository "
+ + "to **ensure all your project sources are clean and formatted**, no matter which IDE or toolbox is used by "
+ + "your developers. Powered by [**OX Security**](https://www.ox.security/?ref=megalinter).\n\n"
+ + f"Supports [**{len(linters_by_type['language'])}** languages]"
+ "(#languages), "
+ f"[**{len(linters_by_type['format'])}** formats](#formats), "
- + f"[**{len(linters_by_type['tooling_format'])}** tooling formats](#tooling-formats) "
- + "and **ready to use out of the box**, as a GitHub action or any CI system, "
- + "**highly configurable** and **free for all uses**.\n\n"
- + "MegaLinter has **native integrations** with many of the major CI/CD tools of the market.\n\n"
- + "[](https://github.com/oxsecurity/megalinter/tree/main/docs/reporters/GitHubCommentReporter.md)\n" # noqa: E501
- + "[](https://github.com/oxsecurity/megalinter/tree/main/docs/reporters/GitlabCommentReporter.md)\n" # noqa: E501
- + "[](https://github.com/oxsecurity/megalinter/tree/main/docs/reporters/AzureCommentReporter.md)\n" # noqa: E501
- + "[](https://github.com/oxsecurity/megalinter/tree/main/docs/reporters/BitbucketCommentReporter.md)\n" # noqa: E501
- + "[](https://github.com/oxsecurity/megalinter/tree/main/docs/install-jenkins.md)\n" # noqa: E501
- + "[](https://github.com/oxsecurity/megalinter/tree/main/docs/install-drone.md)\n" # noqa: E501
- + "[](https://github.com/oxsecurity/megalinter/tree/main/docs/install-concourse.md)\n" # noqa: E501
- + "[](https://github.com/oxsecurity/megalinter/tree/main/docs/install-docker.md)\n" # noqa: E501
- + "[](https://github.com/oxsecurity/megalinter/tree/main/docs/reporters/SarifReporter.md)\n" # noqa: E501
- + "[](https://github.com/oxsecurity/megalinter/tree/main/docs/reporters/ApiReporter.md)\n\n" # noqa: E501
+ + f"[**{len(linters_by_type['tooling_format'])}** tooling formats](#tooling-formats), "
+ + "and is **ready to use out of the box** as a GitHub Action or with any CI system. "
+ + "It is **highly configurable** and **free for all uses**.\n\n"
+ + "MegaLinter has **native integrations** with many major CI/CD tools.\n\n"
+ + "[]("
+ + "https://github.com/oxsecurity/megalinter/tree/main/docs/reporters/GitHubCommentReporter.md)/n"
+ + "[]("
+ + "https://github.com/oxsecurity/megalinter/tree/main/docs/reporters/GitlabCommentReporter.md)/n"
+ + "[]("
+ + "https://github.com/oxsecurity/megalinter/tree/main/docs/reporters/AzureCommentReporter.md)/n"
+ + "[]("
+ + "https://github.com/oxsecurity/megalinter/tree/main/docs/reporters/BitbucketCommentReporter.md)/n"
+ + "[]("
+ + "https://github.com/oxsecurity/megalinter/tree/main/docs/install-jenkins.md)/n"
+ + "[]("
+ + "https://github.com/oxsecurity/megalinter/tree/main/docs/install-drone.md)/n"
+ + "[]("
+ + "https://github.com/oxsecurity/megalinter/tree/main/docs/install-concourse.md)/n"
+ + "[]("
+ + "https://github.com/oxsecurity/megalinter/tree/main/docs/install-docker.md)/n"
+ + "[]("
+ + "https://github.com/oxsecurity/megalinter/tree/main/docs/reporters/SarifReporter.md)/n"
+ + "[]("
+ + "https://github.com/oxsecurity/megalinter/tree/main/docs/reporters/ApiReporter.md)/n/n"
)
# Update README.md file
replace_in_file(
@@ -989,7 +1039,7 @@ def generate_descriptor_documentation(descriptor):
]
# Title
descriptor_md += [
- f"# {descriptor.get('descriptor_label', descriptor.get('descriptor_id'))}",
+ f"# {descriptor.get('descriptor_label', descriptor.get('descriptor_id')).replace('#', '\\#')}",
"",
]
# List of linters
@@ -1124,13 +1174,16 @@ def generate_descriptor_documentation(descriptor):
def generate_flavor_documentation(flavor_id, flavor, linters_tables_md):
flavor_github_action = f"{ML_REPO}/flavors/{flavor_id}@{VERSION_V}"
- flavor_docker_image = f"{ML_DOCKER_IMAGE}-{flavor_id}:{VERSION_V}"
+ flavor_docker_image = f"{ML_DOCKER_IMAGE_WITH_HOST}-{flavor_id}:{VERSION_V}"
+ flavor_docker_image_dockerhub = (
+ f"docker.io/{ML_DOCKER_IMAGE}-{flavor_id}:{VERSION_V}"
+ )
docker_image_badge = (
f""
)
docker_pulls_badge = (
- f""
+ f""
)
flavor_doc_md = [
"---",
@@ -1150,7 +1203,12 @@ def generate_flavor_documentation(flavor_id, flavor, linters_tables_md):
"## Usage",
"",
f"- [GitHub Action]({MKDOCS_URL_ROOT}/installation/#github-action): **{flavor_github_action}**",
- f"- Docker image: **{flavor_docker_image}**",
+ "",
+ "- Docker images:",
+ "",
+ f" - GitHub Packages: **{flavor_docker_image}**",
+ f" - Docker Hub: **{flavor_docker_image_dockerhub}**",
+ "",
f"- [mega-linter-runner]({MKDOCS_URL_ROOT}/mega-linter-runner/): `mega-linter-runner --flavor {flavor_id}`",
"",
"## Embedded linters",
@@ -1966,9 +2024,7 @@ def build_flavors_md_table(filter_linter_name=None, replace_link=False):
+ +len(linters_by_type["other"])
)
docker_image_badge = f""
- docker_pulls_badge = (
- f""
- )
+ docker_pulls_badge = f""
md_line_all = (
f"| {icon_html} | [all]({MKDOCS_URL_ROOT}/supported-linters/) | "
f"Default MegaLinter Flavor | {str(linters_number)} | {docker_image_badge} {docker_pulls_badge} |"
@@ -2069,29 +2125,32 @@ def update_docker_pulls_counter():
now_str = datetime.now().replace(microsecond=0).isoformat()
for flavor_id in all_flavors_ids:
if flavor_id == "all":
- docker_image_url = (
- f"https://hub.docker.com/v2/repositories/%7BML_DOCKER_IMAGE%7D"
- )
+ ghcr_image_url = f"{GHCR_PACKAGES_ROOT_URL}/{ML_DOCKER_NAME}"
+ docker_image_url = f"{DOCKER_PACKAGES_ROOT_URL}/{ML_DOCKER_IMAGE}"
legacy_docker_image_url = (
- f"https://hub.docker.com/v2/repositories/%7BML_DOCKER_IMAGE_LEGACY%7D"
+ f"{DOCKER_PACKAGES_ROOT_URL}/{ML_DOCKER_IMAGE_LEGACY}"
)
legacy_v5_docker_image_url = (
- f"https://hub.docker.com/v2/repositories/%7BML_DOCKER_IMAGE_LEGACY_V5%7D"
+ f"{DOCKER_PACKAGES_ROOT_URL}/{ML_DOCKER_IMAGE_LEGACY_V5}"
)
else:
+ ghcr_image_url = f"{GHCR_PACKAGES_ROOT_URL}/{ML_DOCKER_NAME}-{flavor_id}"
docker_image_url = (
- f"https://hub.docker.com/v2/repositories/%7BML_DOCKER_IMAGE%7D-%7Bflavor_id%7D"
+ f"{DOCKER_PACKAGES_ROOT_URL}/{ML_DOCKER_IMAGE}-{flavor_id}"
+ )
+ legacy_docker_image_url = (
+ f"{DOCKER_PACKAGES_ROOT_URL}/{ML_DOCKER_IMAGE_LEGACY}-{flavor_id}"
)
- legacy_docker_image_url = f"https://hub.docker.com/v2/repositories/%7BML_DOCKER_IMAGE_LEGACY%7D-%7Bflavor_id%7D"
legacy_v5_docker_image_url = (
- "https://hub.docker.com/v2/repositories/"
+ f"{DOCKER_PACKAGES_ROOT_URL}/"
+ f"{ML_DOCKER_IMAGE_LEGACY_V5}-{flavor_id}"
)
+ flavor_count_0 = perform_count_request(ghcr_image_url)
flavor_count_1 = perform_count_request(docker_image_url)
flavor_count_2 = perform_count_request(legacy_docker_image_url)
flavor_count_3 = perform_count_request(legacy_v5_docker_image_url)
- flavor_count = flavor_count_1 + flavor_count_2 + flavor_count_3
+ flavor_count = flavor_count_0 + flavor_count_1 + flavor_count_2 + flavor_count_3
logging.info(f"- docker pulls for {flavor_id}: {flavor_count}")
total_count = total_count + flavor_count
flavor_stats = list(docker_stats.get(flavor_id, []))
@@ -2857,6 +2916,15 @@ def generate_json_schema_enums():
with open(CONFIG_JSON_SCHEMA, "w", encoding="utf-8") as outfile:
json.dump(json_schema, outfile, indent=2, sort_keys=True)
outfile.write("\n")
+ # Also update megalinter custom flavor schema
+ with open(CUSTOM_FLAVOR_JSON_SCHEMA, "r", encoding="utf-8") as json_flavor_file:
+ json_flavor_schema = json.load(json_flavor_file)
+ json_flavor_schema["definitions"]["enum_linter_keys"]["enum"] = json_schema[
+ "definitions"
+ ]["enum_linter_keys"]["enum"]
+ with open(CUSTOM_FLAVOR_JSON_SCHEMA, "w", encoding="utf-8") as outfile_flavor:
+ json.dump(json_flavor_schema, outfile_flavor, indent=2, sort_keys=True)
+ outfile_flavor.write("\n")
# Collect linters info from linter url, later used to build link preview card within linter documentation
@@ -2886,8 +2954,8 @@ def collect_linter_previews():
logging.error(str(e))
if title is not None:
item = {
- "title": megalinter.utils.decode_utf8(title),
- "description": megalinter.utils.decode_utf8(description),
+ "title": megalinter.utils.clean_string(title),
+ "description": megalinter.utils.clean_string(description),
"image": image,
}
data[linter.linter_name] = item
@@ -3377,7 +3445,7 @@ def reformat_markdown_tables():
shell=True,
executable=None if sys.platform == "win32" else which("bash"),
)
- stdout = utils.decode_utf8(process.stdout)
+ stdout = utils.clean_string(process.stdout)
logging.info(f"Format table results: ({process.returncode})\n" + stdout)
@@ -3457,29 +3525,6 @@ def update_dependents_info():
os.system(" ".join(command))
-def update_workflows_linters():
- descriptors, _ = list_descriptors_for_build()
-
- linters = ""
-
- for descriptor in descriptors:
- for linter in descriptor["linters"]:
- if "disabled" in linter and linter["disabled"] is True:
- continue
- if "name" in linter:
- name = linter["name"].lower()
- else:
- lang_lower = descriptor["descriptor_id"].lower()
- linter_name_lower = linter["linter_name"].lower().replace("-", "_")
- name = f"{lang_lower}_{linter_name_lower}"
-
- linters += f' "{name}",\n'
-
- update_workflow_linters(".github/workflows/deploy-DEV-linters.yml", linters)
- update_workflow_linters(".github/workflows/deploy-BETA-linters.yml", linters)
- update_workflow_linters(".github/workflows/deploy-RELEASE-linters.yml", linters)
-
-
def update_workflow_linters(file_path, linters):
with open(file_path, "r", encoding="utf-8") as f:
file_content = f.read()
@@ -3493,6 +3538,74 @@ def update_workflow_linters(file_path, linters):
f.write(file_content)
+def generate_custom_flavor():
+ megalinter_dir = (
+ "/megalinter-builder"
+ if os.path.isdir("/megalinter-builder")
+ else f"{REPO_HOME}/.automation/test"
+ )
+ work_dir = (
+ "/github/workspace" if os.path.isdir("/github/workspace") else megalinter_dir
+ )
+ reports_dir = (
+ f"{work_dir}/megalinter-reports"
+ if work_dir == "/github/workspace"
+ else f"{REPO_HOME}/megalinter-reports"
+ )
+ flavor_file = f"{work_dir}/megalinter-custom-flavor.yml"
+ with open(flavor_file, "r", encoding="utf-8") as f:
+ flavor_info = yaml.safe_load(f)
+ flavor_info["strict"] = True
+ logging.info(f"Generating custom flavor from {flavor_file} in {megalinter_dir}")
+ dockerfile_tmp = generate_flavor("CUSTOM", flavor_info)
+ dockerfile = f"{megalinter_dir}/Dockerfile-megalinter-custom"
+ copyfile(dockerfile_tmp, dockerfile)
+ # Copy to reports dir
+ if not os.path.isdir(reports_dir):
+ os.makedirs(reports_dir, exist_ok=True)
+ shutil.copyfile(dockerfile, f"{reports_dir}/Dockerfile-megalinter-custom")
+ # Delete folder containing dockerfile if runned locally
+ dockerfile_tmp_dir = os.path.dirname(dockerfile_tmp)
+ if os.path.isdir(dockerfile_tmp_dir) and "/.automation/test" in work_dir:
+ logging.info(
+ f"Deleting folder {dockerfile_tmp_dir} containing custom flavor dockerfile"
+ )
+ shutil.rmtree(dockerfile_tmp_dir, ignore_errors=True)
+ # Display dockerfile content in log
+ with open(dockerfile, "r", encoding="utf-8") as f:
+ dockerfile_content = f.read()
+ logging.info(f"Generated custom flavor dockerfile:\n\n{dockerfile_content}\n")
+ return dockerfile
+
+
+def build_custom_flavor(dockerfile):
+ logging.info("Building custom flavor docker image…")
+ work_dir = (
+ "/megalinter-builder" if os.path.isdir("/megalinter-builder") else REPO_HOME
+ )
+ tag_id = os.getenv("CUSTOM_FLAVOR_BUILD_REPO", "megalinter-custom").replace(
+ "/", "_"
+ )
+ command = [
+ "docker",
+ "build",
+ "-t",
+ tag_id,
+ "-f",
+ dockerfile,
+ work_dir,
+ ]
+ logging.info("Running command: " + " ".join(command))
+ process = subprocess.run(
+ command,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ universal_newlines=True,
+ )
+ stdout = utils.clean_string(process.stdout)
+ logging.info(f"Build custom flavor results: ({process.returncode})\n" + stdout)
+
+
if __name__ == "__main__":
logging_format = (
"[%(levelname)s] %(message)s"
@@ -3513,25 +3626,28 @@ def update_workflow_linters(file_path, linters):
handlers=[logging.StreamHandler(sys.stdout)],
)
config.init_config("build")
- # noinspection PyTypeChecker
- collect_linter_previews()
- generate_json_schema_enums()
- validate_descriptors()
- if UPDATE_DEPENDENTS is True:
- update_dependents_info()
- generate_all_flavors()
- generate_linter_dockerfiles()
- generate_linter_test_classes()
- update_workflows_linters()
- if UPDATE_DOC is True:
- logging.info("Running documentation generators…")
- # refresh_users_info() # deprecated since now we use github-dependents-info
- generate_documentation()
- generate_documentation_all_linters()
- # generate_documentation_all_users() # deprecated since now we use github-dependents-info
- generate_mkdocs_yml()
- validate_own_megalinter_config()
- manage_output_variables()
- reformat_markdown_tables()
- if RELEASE is True:
- generate_version()
+ if CUSTOM_FLAVOR is True:
+ dockerfile = generate_custom_flavor()
+ build_custom_flavor(dockerfile)
+ else:
+ # noinspection PyTypeChecker
+ collect_linter_previews()
+ generate_json_schema_enums()
+ validate_descriptors()
+ if UPDATE_DEPENDENTS is True:
+ update_dependents_info()
+ generate_all_flavors()
+ generate_linter_dockerfiles()
+ generate_linter_test_classes()
+ if UPDATE_DOC is True:
+ logging.info("Running documentation generators…")
+ # refresh_users_info() # deprecated since now we use github-dependents-info
+ generate_documentation()
+ generate_documentation_all_linters()
+ # generate_documentation_all_users() # deprecated since now we use github-dependents-info
+ generate_mkdocs_yml()
+ validate_own_megalinter_config()
+ manage_output_variables()
+ reformat_markdown_tables()
+ if RELEASE is True:
+ generate_version()
diff --git a/.automation/generated/linter-helps.json b/.automation/generated/linter-helps.json
index 6afa5b47a32..7533469eb25 100644
--- a/.automation/generated/linter-helps.json
+++ b/.automation/generated/linter-helps.json
@@ -25,9 +25,9 @@
"",
"Documents:",
"",
- " - List of checks: https://github.com/rhysd/actionlint/tree/v1.7.7/docs/checks.md",
- " - Usage: https://github.com/rhysd/actionlint/tree/v1.7.7/docs/usage.md",
- " - Configuration: https://github.com/rhysd/actionlint/tree/v1.7.7/docs/config.md",
+ " - List of checks: https://github.com/rhysd/actionlint/tree/v1.7.8/docs/checks.md",
+ " - Usage: https://github.com/rhysd/actionlint/tree/v1.7.8/docs/usage.md",
+ " - Configuration: https://github.com/rhysd/actionlint/tree/v1.7.8/docs/config.md",
"",
"Flags:",
" -color",
@@ -68,7 +68,9 @@
" [-x SKIP_LIST] [--generate-ignore] [-w WARN_LIST]",
" [--enable-list ENABLE_LIST] [--nocolor] [--force-color]",
" [--exclude EXCLUDE_PATHS [EXCLUDE_PATHS ...]]",
- " [-c CONFIG_FILE] [-i IGNORE_FILE] [--offline] [--version]",
+ " [-c CONFIG_FILE] [-i IGNORE_FILE]",
+ " [--yamllint-file YAMLLINT_FILE] [--offline | --no-offline]",
+ " [--version]",
" [lintables ...]",
"",
"positional arguments:",
@@ -79,7 +81,7 @@
" -P, --list-profiles List all profiles.",
" -L, --list-rules List all the rules.",
" -T, --list-tags List all the tags and the rules they cover.",
- " -f {brief,full,md,json,codeclimate,quiet,pep8,sarif}, --format {brief,full,md,json,codeclimate,quiet,pep8,sarif}",
+ " -f, --format {brief,full,md,json,codeclimate,quiet,pep8,sarif}",
" stdout formatting, json being an alias for codeclimate. (default: None)",
" --sarif-file SARIF_FILE",
" SARIF output file",
@@ -89,18 +91,18 @@
" -p, --parseable parseable output, same as '-f pep8'",
" --project-dir PROJECT_DIR",
" Location of project/repository, autodetected based on location of configuration file.",
- " -r RULESDIR, --rules-dir RULESDIR",
- " Specify custom rule directories. Add -R to keep using embedded rules from /venvs/ansible-lint/lib/python3.12/site-packages/ansiblelint/rules",
+ " -r, --rules-dir RULESDIR",
+ " Specify custom rule directories. Add -R to keep using embedded rules from /venvs/ansible-lint/lib/python3.13/site-packages/ansiblelint/rules",
" -R Keep default rules when using -r",
" -s, --strict Return non-zero exit code on warnings as well as errors",
" --fix [WRITE_LIST] Allow ansible-lint to perform auto-fixes, including YAML reformatting. You can limit the effective rule transforms (the 'write_list') by passing a keywords 'all' or 'none' or a comma separated list of rule ids or rule tags. YAML reformatting happens whenever '--fix' or '--fix=' is used. '--fix' and '--fix=all' are equivalent: they allow all transforms to run. Presence of --fix in command overrides config file value.",
" --show-relpath Display path relative to CWD",
- " -t TAGS, --tags TAGS only check rules whose id/tags match these values",
+ " -t, --tags TAGS only check rules whose id/tags match these values",
" -v Increase verbosity level (-vv for more)",
- " -x SKIP_LIST, --skip-list SKIP_LIST",
+ " -x, --skip-list SKIP_LIST",
" only check rules whose id/tags do not match these values. e.g: --skip-list=name,run-once",
" --generate-ignore Generate a text file '.ansible-lint-ignore' that ignores all found violations. Each line contains filename and rule id separated by a space.",
- " -w WARN_LIST, --warn-list WARN_LIST",
+ " -w, --warn-list WARN_LIST",
" only warn about these rules, unless overridden in config file. Current version default value is: experimental, jinja[spacing], fqcn[deep]",
" --enable-list ENABLE_LIST",
" activate optional rules by their tag name",
@@ -108,11 +110,14 @@
" --force-color Force colored output, same as FORCE_COLOR=1",
" --exclude EXCLUDE_PATHS [EXCLUDE_PATHS ...]",
" path to directories or files to skip. This option is repeatable.",
- " -c CONFIG_FILE, --config-file CONFIG_FILE",
+ " -c, --config-file CONFIG_FILE",
" Specify configuration file to use. By default it will look for '.ansible-lint', '.ansible-lint.yml', '.ansible-lint.yaml', '.config/ansible-lint.yml', or '.config/ansible-lint.yaml'",
- " -i IGNORE_FILE, --ignore-file IGNORE_FILE",
+ " -i, --ignore-file IGNORE_FILE",
" Specify ignore file to use. By default it will look for '.ansible-lint-ignore' or '.config/ansible-lint-ignore.txt'",
- " --offline Disable installation of requirements.yml and schema refreshing",
+ " --yamllint-file YAMLLINT_FILE",
+ " Specify yamllint config file to use. By default it will look for '.yamllint', '.yamllint.yaml', '.yamllint.yml', '~/.config/yamllint/config' or environment variables XDG_CONFIG_HOME and YAMLLINT_CONFIG_FILE.",
+ " --offline, --no-offline",
+ " Disable installation of requirements.yml and schema refreshing",
" --version",
"",
"The following environment variables are also recognized but there is no guarantee that they will work in future versions:",
@@ -142,9 +147,9 @@
],
"bandit": [
"usage: bandit [-h] [-r] [-a {file,vuln}] [-n CONTEXT_LINES] [-c CONFIG_FILE]",
- " [-p PROFILE] [-t TESTS] [-s SKIPS]",
- " [-l | --severity-level {all,low,medium,high}]",
- " [-i | --confidence-level {all,low,medium,high}]",
+ " [-p PROFILE] [-t TESTS] [-s SKIPS] [-l |",
+ " --severity-level {all,low,medium,high}] [-i |",
+ " --confidence-level {all,low,medium,high}]",
" [-f {csv,custom,html,json,sarif,sarif,screen,txt,xml,yaml}]",
" [--msg-template MSG_TEMPLATE] [-o [OUTPUT_FILE]] [-v] [-d] [-q]",
" [--ignore-nosec] [-x EXCLUDED_PATHS] [-b BASELINE]",
@@ -159,20 +164,18 @@
"options:",
" -h, --help show this help message and exit",
" -r, --recursive find and process files in subdirectories",
- " -a {file,vuln}, --aggregate {file,vuln}",
+ " -a, --aggregate {file,vuln}",
" aggregate output by vulnerability (default) or by",
" filename",
- " -n CONTEXT_LINES, --number CONTEXT_LINES",
+ " -n, --number CONTEXT_LINES",
" maximum number of code lines to output for each issue",
- " -c CONFIG_FILE, --configfile CONFIG_FILE",
+ " -c, --configfile CONFIG_FILE",
" optional config file to use for selecting plugins and",
" overriding defaults",
- " -p PROFILE, --profile PROFILE",
+ " -p, --profile PROFILE",
" profile to use (defaults to executing all tests)",
- " -t TESTS, --tests TESTS",
- " comma-separated list of test IDs to run",
- " -s SKIPS, --skip SKIPS",
- " comma-separated list of test IDs to skip",
+ " -t, --tests TESTS comma-separated list of test IDs to run",
+ " -s, --skip SKIPS comma-separated list of test IDs to skip",
" -l, --level report only issues of a given severity level or higher",
" (-l for LOW, -ll for MEDIUM, -lll for HIGH)",
" --severity-level {all,low,medium,high}",
@@ -187,13 +190,13 @@
" higher. \"all\" and \"low\" are likely to produce the same",
" results, but it is possible for rules to be undefined",
" which will not be listed in \"low\".",
- " -f {csv,custom,html,json,sarif,sarif,screen,txt,xml,yaml}, --format {csv,custom,html,json,sarif,sarif,screen,txt,xml,yaml}",
+ " -f, --format {csv,custom,html,json,sarif,sarif,screen,txt,xml,yaml}",
" specify output format",
" --msg-template MSG_TEMPLATE",
" specify output message template (only usable with",
" --format custom), see CUSTOM FORMAT section for list",
" of available values",
- " -o [OUTPUT_FILE], --output [OUTPUT_FILE]",
+ " -o, --output [OUTPUT_FILE]",
" write report to filename",
" -v, --verbose output extra information like excluded and included",
" files",
@@ -201,13 +204,13 @@
" -q, --quiet, --silent",
" only show output in the case of an error",
" --ignore-nosec do not skip lines with # nosec comments",
- " -x EXCLUDED_PATHS, --exclude EXCLUDED_PATHS",
+ " -x, --exclude EXCLUDED_PATHS",
" comma-separated list of paths (glob patterns",
" supported) to exclude from scan (note that these are",
" in addition to the excluded paths provided in the",
" config file) (default:",
" .svn,CVS,.bzr,.hg,.git,__pycache__,.tox,.eggs,*.egg)",
- " -b BASELINE, --baseline BASELINE",
+ " -b, --baseline BASELINE",
" path of a baseline report to compare against (only",
" JSON-formatted files are accepted)",
" --ini INI_PATH path to a .bandit file that supplies command line",
@@ -311,6 +314,7 @@
" B612 logging_config_insecure_listen",
" B613 trojansource",
" B614 pytorch_load",
+ " B615 huggingface_unsafe_download",
" B701 jinja2_autoescape_false",
" B702 use_of_mako_templates",
" B703 django_mark_safe",
@@ -348,10 +352,10 @@
"General help using GNU software: "
],
"bicep_linter": [
- "Bicep CLI version 0.33.93 (7a77c7f2a5)",
+ "Bicep CLI version 0.38.33 (6bb5d5f859)",
"",
"Usage:",
- " bicep build [options] ",
+ " bicep build [options] []",
" Builds a .bicep file.",
"",
" Arguments:",
@@ -363,6 +367,7 @@
" --stdout Prints the output to stdout.",
" --no-restore Builds the bicep file without restoring external modules.",
" --diagnostics-format Sets the format with which diagnostics are displayed. Valid values are ( Default | Sarif ).",
+ " --pattern Builds all files matching the specified glob pattern.",
"",
" Examples:",
" bicep build file.bicep",
@@ -371,21 +376,23 @@
" bicep build file.bicep --outfile file.json",
" bicep build file.bicep --no-restore",
" bicep build file.bicep --diagnostics-format sarif",
+ " bicep build --pattern './dir/**/*.bicep'",
"",
- " bicep format [options] ",
+ " bicep format [options] []",
" Formats a .bicep file.",
"",
" Arguments:",
" The input file",
"",
" Options:",
- " --outdir Saves the output at the specified directory.",
- " --outfile Saves the output as the specified file path.",
- " --stdout Prints the output to stdout.",
- " --newline Set newline char. Valid values are ( Auto | LF | CRLF | CR ).",
- " --indent-kind Set indentation kind. Valid values are ( Space | Tab ).",
- " --indent-size Number of spaces to indent with (Only valid with --indentKind set to Space).",
- " --insert-final-newline Insert a final newline.",
+ " --outdir Saves the output at the specified directory.",
+ " --outfile Saves the output as the specified file path.",
+ " --stdout Prints the output to stdout.",
+ " --newline Set newline char. Valid values are ( Auto | LF | CRLF | CR ).",
+ " --indent-kind Set indentation kind. Valid values are ( Space | Tab ).",
+ " --indent-size Number of spaces to indent with (Only valid with --indentKind set to Space).",
+ " --insert-final-newline Insert a final newline.",
+ " --pattern Formats all files matching the specified glob pattern.",
"",
" Examples:",
" bicep format file.bicep",
@@ -393,6 +400,7 @@
" bicep format file.bicep --outdir dir1",
" bicep format file.bicep --outfile file.json",
" bicep format file.bicep --indent-kind Tab",
+ " bicep format --pattern './dir/**/*.bicep'",
"",
" bicep decompile [options] ",
" Attempts to decompile a template .json file to .bicep.",
@@ -413,7 +421,7 @@
" bicep decompile file.json --force",
" bicep decompile file.json --outfile file.bicep",
"",
- " bicep lint [options] ",
+ " bicep lint [options] []",
" Lints a .bicep file.",
"",
" Arguments:",
@@ -422,11 +430,13 @@
" Options:",
" --no-restore Skips restoring external modules.",
" --diagnostics-format Sets the format with which diagnostics are displayed. Valid values are ( Default | Sarif ).",
+ " --pattern Lints all files matching the specified glob pattern.",
"",
" Examples:",
" bicep lint file.bicep",
" bicep lint file.bicep --no-restore",
" bicep lint file.bicep --diagnostics-format sarif",
+ " bicep lint --pattern './dir/**/*.bicep'",
"",
" bicep decompile-params [options] ",
" Attempts to decompile a parameters .json file to .bicepparam.",
@@ -489,12 +499,19 @@
" bicep publish file.bicep --target br:example.azurecr.io/hello/world:v1 --documentation-uri https://github.com/hello-world/README.md --with-source",
" bicep publish file.json --target br:example.azurecr.io/hello/world:v1 --documentation-uri https://github.com/hello-world/README.md",
"",
- " bicep restore ",
+ " bicep restore []",
" Restores external modules from the specified Bicep file to the local module cache.",
"",
" Arguments:",
" The input file",
"",
+ " Options:",
+ " --pattern Restores all files matching the specified glob pattern.",
+ "",
+ " Examples:",
+ " bicep restore main.bicep",
+ " bicep restore --pattern './dir/**/*.bicep'",
+ "",
" bicep [options]",
" Options:",
" --version -v Shows bicep version information",
@@ -502,7 +519,7 @@
" --license Prints license information",
" --third-party-notices Prints third-party notices",
"",
- " bicep build-params ",
+ " bicep build-params []",
" Builds a .json file from a .bicepparam file.",
"",
" Arguments:",
@@ -515,6 +532,7 @@
" --stdout Prints the output of building both the parameter file (.bicepparam) and the template it points to (.bicep) as json to stdout.",
" --no-restore Builds the bicep file (referenced in using declaration) without restoring external modules.",
" --diagnostics-format Sets the format with which diagnostics are displayed. Valid values are ( Default | Sarif ).",
+ " --pattern Builds all files matching the specified glob pattern.",
"",
" Examples:",
" bicep build-params params.bicepparam",
@@ -523,6 +541,7 @@
" bicep build-params params.bicepparam --outfile otherParams.json",
" bicep build-params params.bicepparam --no-restore",
" bicep build-params params.bicepparam --diagnostics-format sarif",
+ " bicep build-params --pattern './dir/**/*.bicepparam'",
"",
" bicep jsonrpc [options]",
" Runs a JSONRPC server for interacting with Bicep programmatically.",
@@ -546,7 +565,7 @@
" -c, --code TEXT Format the code passed in as a string.",
" -l, --line-length INTEGER How many characters per line to allow.",
" [default: 88]",
- " -t, --target-version [py33|py34|py35|py36|py37|py38|py39|py310|py311|py312|py313]",
+ " -t, --target-version [py33|py34|py35|py36|py37|py38|py39|py310|py311|py312|py313|py314]",
" Python versions that should be supported by",
" Black's output. You should include all",
" versions that your code supports. By",
@@ -580,7 +599,7 @@
" expected to make it into the stable style",
" Black's next major release. Implies",
" --preview.",
- " --enable-unstable-feature [string_processing|hug_parens_with_braces_and_square_brackets|wrap_long_dict_values_in_parens|multiline_string_handling|always_one_newline_after_import]",
+ " --enable-unstable-feature [string_processing|hug_parens_with_braces_and_square_brackets|wrap_long_dict_values_in_parens|multiline_string_handling|always_one_newline_after_import|fix_fmt_skip_in_one_liners|wrap_comprehension_in|remove_parens_around_except_types|normalize_cr_newlines]",
" Enable specific features included in the",
" `--unstable` style. Requires `--preview`. No",
" compatibility guarantees are provided on the",
@@ -688,32 +707,38 @@
"",
"Standard:",
" TEMPLATE The CloudFormation template to be linted",
- " -t TEMPLATE [TEMPLATE ...], --template TEMPLATE [TEMPLATE ...]",
+ " -t, --template TEMPLATE [TEMPLATE ...]",
" The CloudFormation template to be linted",
" -b, --ignore-bad-template",
" Ignore failures with Bad template",
" --ignore-templates IGNORE_TEMPLATES [IGNORE_TEMPLATES ...]",
" Ignore templates",
- " -f {quiet,parseable,json,junit,pretty,sarif}, --format {quiet,parseable,json,junit,pretty,sarif}",
+ " --deployment-files DEPLOYMENT_FILES [DEPLOYMENT_FILES ...]",
+ " Deployment files",
+ " --parameters PARAMETERS [PARAMETERS ...]",
+ " A list of parameters",
+ " --parameter-files PARAMETER_FILES [PARAMETER_FILES ...]",
+ " A list of parameter files",
+ " -f, --format {quiet,parseable,json,junit,pretty,sarif}",
" Output Format",
" -l, --list-rules list all the rules",
- " -r REGIONS [REGIONS ...], --regions REGIONS [REGIONS ...]",
+ " -r, --regions REGIONS [REGIONS ...]",
" list the regions to validate against.",
- " -i IGNORE_CHECKS [IGNORE_CHECKS ...], --ignore-checks IGNORE_CHECKS [IGNORE_CHECKS ...]",
+ " -i, --ignore-checks IGNORE_CHECKS [IGNORE_CHECKS ...]",
" only check rules whose id do not match these values",
- " -c INCLUDE_CHECKS [INCLUDE_CHECKS ...], --include-checks INCLUDE_CHECKS [INCLUDE_CHECKS ...]",
+ " -c, --include-checks INCLUDE_CHECKS [INCLUDE_CHECKS ...]",
" include rules whose id match these values",
- " -m MANDATORY_CHECKS [MANDATORY_CHECKS ...], --mandatory-checks MANDATORY_CHECKS [MANDATORY_CHECKS ...]",
+ " -m, --mandatory-checks MANDATORY_CHECKS [MANDATORY_CHECKS ...]",
" always check rules whose id match these values,",
" regardless of template exclusions",
" -e, --include-experimental",
" Include experimental rules",
- " -x CONFIGURE_RULES [CONFIGURE_RULES ...], --configure-rule CONFIGURE_RULES [CONFIGURE_RULES ...]",
+ " -x, --configure-rule CONFIGURE_RULES [CONFIGURE_RULES ...]",
" Provide configuration for a rule. Format",
" RuleId:key=value. Example: E3012:strict=true",
" --config-file CONFIG_FILE",
" Specify the cfnlintrc file to use",
- " -z CUSTOM_RULES, --custom-rules CUSTOM_RULES",
+ " -z, --custom-rules CUSTOM_RULES",
" Allows specification of a custom rule file.",
" -v, --version Version of cfn-lint",
" --output-file OUTPUT_FILE",
@@ -727,15 +752,16 @@
"Advanced / Debugging:",
" -D, --debug Enable debug logging",
" -I, --info Enable information logging",
- " -a APPEND_RULES [APPEND_RULES ...], --append-rules APPEND_RULES [APPEND_RULES ...]",
+ " -L, --list-templates List all the templates would have linted",
+ " -a, --append-rules APPEND_RULES [APPEND_RULES ...]",
" specify one or more rules directories using one or",
" more --append-rules arguments.",
- " -o OVERRIDE_SPEC, --override-spec OVERRIDE_SPEC",
+ " -o, --override-spec OVERRIDE_SPEC",
" A CloudFormation Spec override file that allows",
" customization",
" -g, --build-graph Creates a file in the same directory as the template",
" that models the template's resources in DOT format",
- " -s REGISTRY_SCHEMAS [REGISTRY_SCHEMAS ...], --registry-schemas REGISTRY_SCHEMAS [REGISTRY_SCHEMAS ...]",
+ " -s, --registry-schemas REGISTRY_SCHEMAS [REGISTRY_SCHEMAS ...]",
" one or more directories of CloudFormation Registry",
" Schemas",
" -u, --update-specs Update the CloudFormation Specs",
@@ -794,7 +820,6 @@
" [--skip-resources-without-violations] [--deep-analysis]",
" [--no-fail-on-crash] [--mask MASK] [--scan-secrets-history]",
" [--secrets-history-timeout SECRETS_HISTORY_TIMEOUT]",
- " [--openai-api-key OPENAI_API_KEY]",
" [--custom-tool-name CUSTOM_TOOL_NAME]",
"",
"Infrastructure as code static analysis",
@@ -804,11 +829,11 @@
" -v, --version version",
" --support Enable debug logs and upload the logs to the server.",
" Requires a Bridgecrew or Prisma Cloud API key.",
- " -d DIRECTORY, --directory DIRECTORY",
+ " -d, --directory DIRECTORY",
" IaC root directory (can not be used together with",
" --file).",
" --add-check Generate a new check via CLI prompt",
- " -f FILE [FILE ...], --file FILE [FILE ...]",
+ " -f, --file FILE [FILE ...]",
" File to scan (can not be used together with",
" --directory). With this option, Checkov will attempt",
" to filter the runners based on the file type. For",
@@ -838,7 +863,7 @@
" directory, so only use this option with trusted",
" repositories.",
" -l, --list List checks",
- " -o {cli,csv,cyclonedx,cyclonedx_json,json,junitxml,github_failed_only,gitlab_sast,sarif,spdx}, --output {cli,csv,cyclonedx,cyclonedx_json,json,junitxml,github_failed_only,gitlab_sast,sarif,spdx}",
+ " -o, --output {cli,csv,cyclonedx,cyclonedx_json,json,junitxml,github_failed_only,gitlab_sast,sarif,spdx}",
" Report output format. Add multiple outputs by using",
" the flag multiple times (-o sarif -o cli)",
" --output-file-path OUTPUT_FILE_PATH",
@@ -901,8 +926,7 @@
" serverless, terraform, terraform_json, terraform_plan,",
" sast, sast_python, sast_java, sast_javascript,",
" sast_typescript, sast_golang, 3d_policy",
- " -c CHECK, --check CHECK",
- " Checks to run; any other checks will be skipped. Enter",
+ " -c, --check CHECK Checks to run; any other checks will be skipped. Enter",
" one or more items separated by commas. Each item may",
" be either a Checkov check ID (CKV_AWS_123), a BC check",
" ID (BC_AWS_GENERAL_123), or a severity (LOW, MEDIUM,",
@@ -990,7 +1014,7 @@
" the console. Results are only available locally. If",
" you use the --support flag, logs will still get",
" uploaded.",
- " --docker-image DOCKER_IMAGE, --image DOCKER_IMAGE",
+ " --docker-image, --image DOCKER_IMAGE",
" Scan docker images by name or ID. Only works with",
" --bc-api-key flag",
" --dockerfile-path DOCKERFILE_PATH",
@@ -998,8 +1022,7 @@
" --repo-id REPO_ID Identity string of the repository, with form",
" /. Required when using the",
" platform integration (API key).",
- " -b BRANCH, --branch BRANCH",
- " Selected branch of the persisted repository. Only has",
+ " -b, --branch BRANCH Selected branch of the persisted repository. Only has",
" effect when using the --bc-api-key flag",
" --skip-download Do not download any data from Prisma Cloud. This will",
" omit doc links, severities, etc., as well as custom",
@@ -1042,7 +1065,7 @@
" --evaluate-variables EVALUATE_VARIABLES",
" evaluate the values of variables and locals [env var:",
" CKV_EVAL_VARS]",
- " -ca CA_CERTIFICATE, --ca-certificate CA_CERTIFICATE",
+ " -ca, --ca-certificate CA_CERTIFICATE",
" Custom CA certificate (bundle) file [env var:",
" BC_CA_BUNDLE]",
" --no-cert-verify Skip SSL certificate verification. Use this to bypass",
@@ -1121,20 +1144,13 @@
" Each entry in the list will be used formasking the",
" desired attribute for resource (or for all resources,",
" if no resource given).Notice: one entry can contain",
- " several variables, seperated with a comma. For",
+ " several variables, separated with a comma. For",
" example::, OR",
" ,",
" --scan-secrets-history",
" will scan the history of commits for secrets",
" --secrets-history-timeout SECRETS_HISTORY_TIMEOUT",
" maximum time to stop the scan",
- " --openai-api-key OPENAI_API_KEY",
- " Add an OpenAI API key to enhance finding guidelines by",
- " sending violated policies and resource code to OpenAI",
- " to request remediation guidance. This will use your",
- " OpenAI credits. Set your number of findings that will",
- " receive enhanced guidelines using",
- " CKV_OPENAI_MAX_FINDINGS [env var: CKV_OPENAI_API_KEY]",
" --custom-tool-name CUSTOM_TOOL_NAME",
" Add a tool name if you want your output to be tagged",
" with a specific tool name,this is useful when",
@@ -1342,7 +1358,7 @@
"Clang-format options:",
"",
" --Werror - If set, changes formatting warnings to errors",
- " --Wno-error= - If set don't error out on the specified warning type.",
+ " --Wno-error= - If set, don't error out on the specified warning type.",
" =unknown - If set, unknown format options are only warned about.",
" This can be used to enable formatting, even if the",
" configuration contains unknown (newer) options.",
@@ -1358,7 +1374,7 @@
" supported:",
" CSharp: .cs",
" Java: .java",
- " JavaScript: .mjs .js .ts",
+ " JavaScript: .js .mjs .cjs .ts",
" Json: .json",
" Objective-C: .m .mm",
" Proto: .proto .protodevel",
@@ -1461,7 +1477,7 @@
""
],
"clj-kondo": [
- "clj-kondo v2025.02.20",
+ "clj-kondo v2025.09.22",
"",
"Options:",
"",
@@ -1501,6 +1517,8 @@
" --report-level : minimum severity for which to report. Supported values:",
" info, warning, error. The default level if unspecified is info.",
"",
+ " --repro: ignore home dir configuration",
+ "",
" --debug: print debug information.",
""
],
@@ -1555,6 +1573,336 @@
" -c, --cache Cache linting results [boolean]",
" --ext Specify an additional file extension, separated by comma."
],
+ "cppcheck": [
+ "Cppcheck - A tool for static C/C++ code analysis",
+ "",
+ "Syntax:",
+ " cppcheck [OPTIONS] [files or paths]",
+ "",
+ "If a directory is given instead of a filename, *.cpp, *.cxx, *.cc, *.c++, *.c, *.ipp,",
+ "*.ixx, *.tpp, and *.txx files are checked recursively from the given directory.",
+ "",
+ "Options:",
+ " --addon=",
+ " Execute addon. i.e. --addon=misra. If options must be",
+ " provided a json configuration is needed.",
+ " --addon-python=",
+ " You can specify the python interpreter either in the",
+ " addon json files or through this command line option.",
+ " If not present, Cppcheck will try \"python3\" first and",
+ " then \"python\".",
+ " --cppcheck-build-dir=",
+ " Cppcheck work folder. Advantages:",
+ " * whole program analysis",
+ " * faster analysis; Cppcheck will reuse the results if",
+ " the hash for a file is unchanged.",
+ " * some useful debug information, i.e. commands used to",
+ " execute clang/clang-tidy/addons.",
+ " --check-config Check cppcheck configuration. The normal code",
+ " analysis is disabled by this flag.",
+ " --check-level=",
+ " Configure how much checking you want:",
+ " * normal: Cppcheck uses some compromises in the checking so",
+ " the checking will finish in reasonable time.",
+ " * exhaustive: deeper analysis that you choose when you can",
+ " wait.",
+ " The default choice is 'normal'.",
+ " --check-library Show information messages when library files have",
+ " incomplete info.",
+ " --checkers-report=",
+ " Write a report of all the active checkers to the given file.",
+ " --clang= Experimental: Use Clang parser instead of the builtin Cppcheck",
+ " parser. Takes the executable as optional parameter and",
+ " defaults to `clang`. Cppcheck will run the given Clang",
+ " executable, import the Clang AST and convert it into",
+ " Cppcheck data. After that the normal Cppcheck analysis is",
+ " used. You must have the executable in PATH if no path is",
+ " given.",
+ " --config-exclude=",
+ " Path (prefix) to be excluded from configuration",
+ " checking. Preprocessor configurations defined in",
+ " headers (but not sources) matching the prefix will not",
+ " be considered for evaluation.",
+ " --config-excludes-file=",
+ " A file that contains a list of config-excludes",
+ " --disable= Disable individual checks.",
+ " Please refer to the documentation of --enable=",
+ " for further details.",
+ " --dump Dump xml data for each translation unit. The dump",
+ " files have the extension .dump and contain ast,",
+ " tokenlist, symboldatabase, valueflow.",
+ " -D Define preprocessor symbol. Unless --max-configs or",
+ " --force is used, Cppcheck will only check the given",
+ " configuration when -D is used.",
+ " Example: '-DDEBUG=1 -D__cplusplus'.",
+ " -E Print preprocessor output on stdout and don't do any",
+ " further processing.",
+ " --enable= Enable additional checks. The available ids are:",
+ " * all",
+ " Enable all checks. It is recommended to only",
+ " use --enable=all when the whole program is",
+ " scanned, because this enables unusedFunction.",
+ " * warning",
+ " Enable warning messages",
+ " * style",
+ " Enable all coding style checks. All messages",
+ " with the severities 'style', 'warning',",
+ " 'performance' and 'portability' are enabled.",
+ " * performance",
+ " Enable performance messages",
+ " * portability",
+ " Enable portability messages",
+ " * information",
+ " Enable information messages",
+ " * unusedFunction",
+ " Check for unused functions. It is recommended",
+ " to only enable this when the whole program is",
+ " scanned.",
+ " * missingInclude",
+ " Warn if there are missing includes.",
+ " Several ids can be given if you separate them with",
+ " commas. See also --std",
+ " --error-exitcode= If errors are found, integer [n] is returned instead of",
+ " the default '0'. '1' is returned",
+ " if arguments are not valid or if no input files are",
+ " provided. Note that your operating system can modify",
+ " this value, e.g. '256' can become '0'.",
+ " --errorlist Print a list of all the error messages in XML format.",
+ " --exitcode-suppressions=",
+ " Used when certain messages should be displayed but",
+ " should not cause a non-zero exitcode.",
+ " --file-filter= Analyze only those files matching the given filter str",
+ " Can be used multiple times",
+ " Example: --file-filter=*bar.cpp analyzes only files",
+ " that end with bar.cpp.",
+ " --file-list= Specify the files to check in a text file. Add one",
+ " filename per line. When file is '-,' the file list will",
+ " be read from standard input.",
+ " -f, --force Force checking of all configurations in files. If used",
+ " together with '--max-configs=', the last option is the",
+ " one that is effective.",
+ " --fsigned-char Treat char type as signed.",
+ " --funsigned-char Treat char type as unsigned.",
+ " -h, --help Print this help.",
+ " -I Give path to search for include files. Give several -I",
+ " parameters to give several paths. First given path is",
+ " searched for contained header files first. If paths are",
+ " relative to source files, this is not needed.",
+ " --includes-file=",
+ " Specify directory paths to search for included header",
+ " files in a text file. Add one include path per line.",
+ " First given path is searched for contained header",
+ " files first. If paths are relative to source files,",
+ " this is not needed.",
+ " --include=",
+ " Force inclusion of a file before the checked file.",
+ " -i Give a source file or source file directory to exclude",
+ " from the check. This applies only to source files so",
+ " header files included by source files are not matched.",
+ " Directory name is matched to all parts of the path.",
+ " --inconclusive Allow that Cppcheck reports even though the analysis is",
+ " inconclusive.",
+ " There are false positives with this option. Each result",
+ " must be carefully investigated before you know if it is",
+ " good or bad.",
+ " --inline-suppr Enable inline suppressions. Use them by placing one or",
+ " more comments, like: '// cppcheck-suppress warningId'",
+ " on the lines before the warning to suppress.",
+ " -j Start threads to do the checking simultaneously.",
+ " -l Specifies that no new threads should be started if",
+ " there are other threads running and the load average is",
+ " at least .",
+ " --language=, -x ",
+ " Forces cppcheck to check all files as the given",
+ " language. Valid values are: c, c++",
+ " --library= Load file that contains information about types",
+ " and functions. With such information Cppcheck",
+ " understands your code better and therefore you",
+ " get better results. The std.cfg file that is",
+ " distributed with Cppcheck is loaded automatically.",
+ " For more information about library files, read the",
+ " manual.",
+ " --max-configs=",
+ " Maximum number of configurations to check in a file",
+ " before skipping it. Default is '12'. If used together",
+ " with '--force', the last option is the one that is",
+ " effective.",
+ " --max-ctu-depth=N Max depth in whole program analysis. The default value",
+ " is 2. A larger value will mean more errors can be found",
+ " but also means the analysis will be slower.",
+ " --output-file= Write results to file, rather than standard error.",
+ " --platform=, --platform=",
+ " Specifies platform specific types and sizes. The",
+ " available builtin platforms are:",
+ " * unix32",
+ " 32 bit unix variant",
+ " * unix64",
+ " 64 bit unix variant",
+ " * win32A",
+ " 32 bit Windows ASCII character encoding",
+ " * win32W",
+ " 32 bit Windows UNICODE character encoding",
+ " * win64",
+ " 64 bit Windows",
+ " * avr8",
+ " 8 bit AVR microcontrollers",
+ " * elbrus-e1cp",
+ " Elbrus e1c+ architecture",
+ " * pic8",
+ " 8 bit PIC microcontrollers",
+ " Baseline and mid-range architectures",
+ " * pic8-enhanced",
+ " 8 bit PIC microcontrollers",
+ " Enhanced mid-range and high end (PIC18) architectures",
+ " * pic16",
+ " 16 bit PIC microcontrollers",
+ " * mips32",
+ " 32 bit MIPS microcontrollers",
+ " * native",
+ " Type sizes of host system are assumed, but no",
+ " further assumptions.",
+ " * unspecified",
+ " Unknown type sizes",
+ " --plist-output=",
+ " Generate Clang-plist output files in folder.",
+ " --project= Run Cppcheck on project. The can be a Visual",
+ " Studio Solution (*.sln), Visual Studio Project",
+ " (*.vcxproj), compile database (compile_commands.json),",
+ " or Borland C++ Builder 6 (*.bpr). The files to analyse,",
+ " include paths, defines, platform and undefines in",
+ " the specified file will be used.",
+ " --project-configuration=",
+ " If used together with a Visual Studio Solution (*.sln)",
+ " or Visual Studio Project (*.vcxproj) you can limit",
+ " the configuration cppcheck should check.",
+ " For example: '--project-configuration=Release|Win32'",
+ " -q, --quiet Do not show progress reports.",
+ " Note that this option is not mutually exclusive with --verbose.",
+ " -rp=, --relative-paths=",
+ " Use relative paths in output. When given, are",
+ " used as base. You can separate multiple paths by ';'.",
+ " Otherwise path where source files are searched is used.",
+ " We use string comparison to create relative paths, so",
+ " using e.g. ~ for home folder does not work. It is",
+ " currently only possible to apply the base paths to",
+ " files that are on a lower level in the directory tree.",
+ " --report-progress Report progress messages while checking a file (single job only).",
+ " --rule= Match regular expression.",
+ " --rule-file= Use given rule file. For more information, see:",
+ " http://sourceforge.net/projects/cppcheck/files/Articles/",
+ " --showtime= Show timing information.",
+ " The available modes are:",
+ " * none",
+ " Show nothing (default)",
+ " * file",
+ " Show for each processed file",
+ " * file-total",
+ " Show total time only for each processed file",
+ " * summary",
+ " Show a summary at the end",
+ " * top5_file",
+ " Show the top 5 for each processed file",
+ " * top5_summary",
+ " Show the top 5 summary at the end",
+ " * top5",
+ " Alias for top5_file (deprecated)",
+ " --std= Set standard.",
+ " The available options are:",
+ " * c89",
+ " C code is C89 compatible",
+ " * c99",
+ " C code is C99 compatible",
+ " * c11",
+ " C code is C11 compatible (default)",
+ " * c++03",
+ " C++ code is C++03 compatible",
+ " * c++11",
+ " C++ code is C++11 compatible",
+ " * c++14",
+ " C++ code is C++14 compatible",
+ " * c++17",
+ " C++ code is C++17 compatible",
+ " * c++20",
+ " C++ code is C++20 compatible (default)",
+ " --suppress= Suppress warnings that match . The format of",
+ " is:",
+ " [error id]:[filename]:[line]",
+ " The [filename] and [line] are optional. If [error id]",
+ " is a wildcard '*', all error ids match.",
+ " --suppressions-list=",
+ " Suppress warnings listed in the file. Each suppression",
+ " is in the same format as above.",
+ " --suppress-xml=",
+ " Suppress warnings listed in a xml file. XML file should",
+ " follow the manual.pdf format specified in section.",
+ " `6.4 XML suppressions` .",
+ " --template='' Format the error messages. Available fields:",
+ " {file} file name",
+ " {line} line number",
+ " {column} column number",
+ " {callstack} show a callstack. Example:",
+ " [file.c:1] -> [file.c:100]",
+ " {inconclusive:text} if warning is inconclusive, text",
+ " is written",
+ " {severity} severity",
+ " {message} warning message",
+ " {id} warning id",
+ " {cwe} CWE id (Common Weakness Enumeration)",
+ " {code} show the real code",
+ " insert tab",
+ " \\n insert newline",
+ " insert carriage return",
+ " Example formats:",
+ " '{file}:{line},{severity},{id},{message}' or",
+ " '{file}({line}):({severity}) {message}' or",
+ " '{callstack} {message}'",
+ " Pre-defined templates: gcc (default), cppcheck1 (old default), vs, edit.",
+ " --template-location=''",
+ " Format error message location. If this is not provided",
+ " then no extra location info is shown.",
+ " Available fields:",
+ " {file} file name",
+ " {line} line number",
+ " {column} column number",
+ " {info} location info",
+ " {code} show the real code",
+ " insert tab",
+ " \\n insert newline",
+ " insert carriage return",
+ " Example format (gcc-like):",
+ " '{file}:{line}:{column}: note: {info}\\n{code}'",
+ " -U Undefine preprocessor symbol. Use -U to explicitly",
+ " hide certain #ifdef code paths from checking.",
+ " Example: '-UDEBUG'",
+ " -v, --verbose Output more detailed error information.",
+ " Note that this option is not mutually exclusive with --quiet.",
+ " --version Print out version number.",
+ " --xml Write results in xml format to error stream (stderr).",
+ "",
+ "Example usage:",
+ " # Recursively check the current folder. Print the progress on the screen and",
+ " # write errors to a file:",
+ " cppcheck . 2> err.txt",
+ "",
+ " # Recursively check ../myproject/ and don't print progress:",
+ " cppcheck --quiet ../myproject/",
+ "",
+ " # Check test.cpp, enable all checks:",
+ " cppcheck --enable=all --inconclusive --library=posix test.cpp",
+ "",
+ " # Check f.cpp and search include files from inc1/ and inc2/:",
+ " cppcheck -I inc1/ -I inc2/ f.cpp",
+ "",
+ "For more information:",
+ " https://files.cppchecksolutions.com/manual.pdf",
+ "",
+ "Many thanks to the 3rd party libraries we use:",
+ " * tinyxml2 -- loading project/library/ctu files.",
+ " * picojson -- loading compile database.",
+ " * pcre -- rules.",
+ " * qt -- used in GUI",
+ ""
+ ],
"cpplint": [
"",
"Syntax: cpplint.py [--verbose=#] [--output=emacs|eclipse|vs7|junit|sed|gsed]",
@@ -1783,30 +2131,20 @@
" file is located) and all sub-directories."
],
"csharpier": [
- "dotnet-csharpier",
+ "Description:",
"",
"Usage:",
- " dotnet-csharpier [options] [...]",
- "",
- "Arguments:",
- " One or more paths to a directory containing C# files to format or a C# file to format. It may be ommited when piping data via stdin.",
+ " CSharpier [command] [options]",
"",
"Options:",
- " --check Check that files are formatted. Will not write any changes.",
- " --loglevel Specify the log level - Debug, Information (default), Warning, Error, None [default: Information]",
- " --no-cache Bypass the cache to determine if a file needs to be formatted.",
- " --no-msbuild-check Bypass the check to determine if a csproj files references a different version of CSharpier.MsBuild.",
- " --include-generated Include files generated by the SDK and files that begin with comments",
- " --fast Skip comparing syntax tree of formatted file to original file to validate changes.",
- " --skip-write Skip writing changes. Generally used for testing to ensure csharpier doesn't throw any errors or cause syntax tree validation failures.",
- " --write-stdout Write the results of formatting any files to stdout.",
- " --pipe-multiple-files Keep csharpier running so that multiples files can be piped to it via stdin.",
- " --server Run CSharpier as a server so that multiple files may be formatted.",
- " --server-port Specify the port that CSharpier should start on. Defaults to a random unused port.",
- " --config-path Path to the CSharpier configuration file",
- " --compilation-errors-as-warnings Treat compilation errors from files as warnings instead of errors.",
- " --version Show version information",
- " -?, -h, --help Show help and usage information",
+ " --version Show version information",
+ " -?, -h, --help Show help and usage information",
+ "",
+ "Commands:",
+ " format Format files.",
+ " check Check that files are formatted. Will not write any changes.",
+ " pipe-files Keep csharpier running so that multiples files can be piped to it via stdin.",
+ " server Run CSharpier as a server so that multiple files may be formatted.",
""
],
"cspell": [
@@ -1826,8 +2164,10 @@
" result. The full file is displayed in",
" color.",
" suggestions|sug [options] [words...] Spelling Suggestions for words.",
+ " init [options] Initialize a CSpell configuration file.",
" link Link dictionaries and other settings to",
" the cspell global config.",
+ " dictionaries [options] List dictionaries",
" help [command] display help for command"
],
"dartanalyzer": [
@@ -1932,7 +2272,7 @@
""
],
"devskim": [
- "devskim 1.0.52+74513a99d4",
+ "devskim 1.0.67+1c44622c1f",
"\u00a9 Microsoft Corporation. All rights reserved.",
"",
" analyze Analyze source code using DevSkim",
@@ -2035,30 +2375,23 @@
" dockerfilelint < Dockerfile Lint the contents of Dockerfile via stdin"
],
"dotenv-linter": [
- "dotenv-linter 3.3.0",
- "Mikhail Grachev ",
+ "dotenv-linter 4.0.0",
+ "Mikhail Grachev , dotenv-linter core team & contributors",
"Lightning-fast linter for .env files",
"",
- "USAGE:",
- " dotenv-linter [OPTIONS] [input]... [SUBCOMMAND]",
- "",
- "ARGS:",
- " ... files or paths [default: /]",
+ "Usage: dotenv-linter [OPTIONS] ",
"",
- "OPTIONS:",
- " -e, --exclude ... Excludes files from check",
- " -h, --help Print help information",
- " --no-color Turns off the colored output",
- " --not-check-updates Doesn't check for updates",
- " -q, --quiet Doesn't display additional information",
- " -r, --recursive Recursively searches and checks .env files",
- " -s, --skip ... Skips checks",
- " -v, --version Print version information",
+ "Commands:",
+ " check Check .env files for errors such as duplicate keys or invalid syntax",
+ " fix Automatically fix issues in .env files",
+ " diff Compare .env files to ensure matching key sets",
+ " help Print this message or the help of the given subcommand(s)",
"",
- "SUBCOMMANDS:",
- " compare Compares if files have the same keys [aliases: c]",
- " fix Automatically fixes warnings [aliases: f]",
- " list Shows list of available checks [aliases: l]"
+ "Options:",
+ " --plain Switch to plain text output without colors",
+ " -q, --quiet Display only critical results, suppressing extra details",
+ " -h, --help Print help",
+ " -V, --version Print version"
],
"dotnet-format": [
"Description:",
@@ -2113,6 +2446,8 @@
" enables printing color",
" -config string",
" config",
+ " -cpuprofile string",
+ " write cpu profile to file",
" -debug",
" print debugging information",
" -disable-end-of-line",
@@ -2364,7 +2699,7 @@
" --no-show-source Negate --show-source",
" --statistics Count errors.",
" --exit-zero Exit with status code \"0\" even if there are errors.",
- " -j JOBS, --jobs JOBS Number of subprocesses to use to run checks in",
+ " -j, --jobs JOBS Number of subprocesses to use to run checks in",
" parallel. This is ignored on Windows. The default,",
" \"auto\", will auto-detect the number of processors",
" available to use. (Default: auto)",
@@ -2381,7 +2716,7 @@
" --builtins BUILTINS define more built-ins, comma separated",
" --doctests also check syntax of the doctests",
"",
- "Installed plugins: mccabe: 0.7.0, pycodestyle: 2.12.1, pyflakes: 3.2.0"
+ "Installed plugins: mccabe: 0.7.0, pycodestyle: 2.14.0, pyflakes: 3.4.0"
],
"gherkin-lint": [
"Usage: gherkin-lint [options] ",
@@ -2407,36 +2742,37 @@
"These are common Git commands used in various situations:",
"",
"start a working area (see also: git help tutorial)",
- " clone Clone a repository into a new directory",
- " init Create an empty Git repository or reinitialize an existing one",
+ " clone Clone a repository into a new directory",
+ " init Create an empty Git repository or reinitialize an existing one",
"",
"work on the current change (see also: git help everyday)",
- " add Add file contents to the index",
- " mv Move or rename a file, a directory, or a symlink",
- " restore Restore working tree files",
- " rm Remove files from the working tree and from the index",
+ " add Add file contents to the index",
+ " mv Move or rename a file, a directory, or a symlink",
+ " restore Restore working tree files",
+ " rm Remove files from the working tree and from the index",
"",
"examine the history and state (see also: git help revisions)",
- " bisect Use binary search to find the commit that introduced a bug",
- " diff Show changes between commits, commit and working tree, etc",
- " grep Print lines matching a pattern",
- " log Show commit logs",
- " show Show various types of objects",
- " status Show the working tree status",
+ " bisect Use binary search to find the commit that introduced a bug",
+ " diff Show changes between commits, commit and working tree, etc",
+ " grep Print lines matching a pattern",
+ " log Show commit logs",
+ " show Show various types of objects",
+ " status Show the working tree status",
"",
"grow, mark and tweak your common history",
- " branch List, create, or delete branches",
- " commit Record changes to the repository",
- " merge Join two or more development histories together",
- " rebase Reapply commits on top of another base tip",
- " reset Reset current HEAD to the specified state",
- " switch Switch branches",
- " tag Create, list, delete or verify a tag object signed with GPG",
+ " backfill Download missing objects in a partial clone",
+ " branch List, create, or delete branches",
+ " commit Record changes to the repository",
+ " merge Join two or more development histories together",
+ " rebase Reapply commits on top of another base tip",
+ " reset Reset current HEAD to the specified state",
+ " switch Switch branches",
+ " tag Create, list, delete or verify a tag object signed with GPG",
"",
"collaborate (see also: git help workflows)",
- " fetch Download objects and refs from another repository",
- " pull Fetch from and integrate with another repository or a local branch",
- " push Update remote refs along with associated objects",
+ " fetch Download objects and refs from another repository",
+ " pull Fetch from and integrate with another repository or a local branch",
+ " push Update remote refs along with associated objects",
"",
"'git help -a' and 'git help -g' list available subcommands and some",
"concept guides. See 'git help ' or 'git help '",
@@ -2450,7 +2786,7 @@
" gitleaks [command]",
"",
"Available Commands:",
- " completion generate the autocompletion script for the specified shell",
+ " completion Generate the autocompletion script for the specified shell",
" dir scan directories or files for secrets",
" git scan git repositories for secrets",
" help Help about any command",
@@ -2463,23 +2799,28 @@
" order of precedence:",
" 1. --config/-c",
" 2. env var GITLEAKS_CONFIG",
- " 3. (target path)/.gitleaks.toml",
- " If none of the three options are used, then gitleaks will use the default config",
+ " 3. env var GITLEAKS_CONFIG_TOML with the file content",
+ " 4. (target path)/.gitleaks.toml",
+ " If none of the four options are used, then gitleaks will use the default config",
+ " --diagnostics string enable diagnostics (http OR comma-separated list: cpu,mem,trace). cpu=CPU prof, mem=memory prof, trace=exec tracing, http=serve via net/http/pprof",
+ " --diagnostics-dir string directory to store diagnostics output files when not using http mode (defaults to current directory)",
" --enable-rule strings only enable specific rules by id",
" --exit-code int exit code when leaks have been encountered (default 1)",
" -i, --gitleaks-ignore-path string path to .gitleaksignore file or folder containing one (default \".\")",
" -h, --help help for gitleaks",
" --ignore-gitleaks-allow ignore gitleaks:allow comments",
" -l, --log-level string log level (trace, debug, info, warn, error, fatal) (default \"info\")",
+ " --max-archive-depth int allow scanning into nested archives up to this depth (default \"0\", no archive traversal is done)",
" --max-decode-depth int allow recursive decoding up to this depth (default \"0\", no decoding is done)",
" --max-target-megabytes int files larger than this will be skipped",
" --no-banner suppress banner",
" --no-color turn off color for verbose output",
" --redact uint[=100] redact secrets from logs and stdout. To redact only parts of the secret just apply a percent value from 0..100. For example --redact=20 (default 100%)",
- " -f, --report-format string output format (json, jsonextra, csv, junit, sarif, template) (default \"json\")",
+ " -f, --report-format string output format (json, csv, junit, sarif, template)",
" -r, --report-path string report file",
" --report-template string template file used to generate the report (implies --report-format=template)",
" -v, --verbose show verbose output from scan",
+ " --version version for gitleaks",
"",
"Use \"gitleaks [command] --help\" for more information about a command."
],
@@ -2491,14 +2832,17 @@
" golangci-lint [command]",
"",
"Available Commands:",
- " cache Cache control and information",
+ " cache Cache control and information.",
" completion Generate the autocompletion script for the specified shell",
- " config Config file information",
- " custom Build a version of golangci-lint with custom linters",
- " help Help",
- " linters List current linters configuration",
- " run Run the linters",
- " version Version",
+ " config Configuration file information and verification.",
+ " custom Build a version of golangci-lint with custom linters.",
+ " fmt Format Go source files.",
+ " formatters List current formatters configuration.",
+ " help Display extra help",
+ " linters List current linters configuration.",
+ " migrate Migrate configuration file from v1 to v2.",
+ " run Lint the code.",
+ " version Display the golangci-lint version.",
"",
"Flags:",
" --color string Use color when printing; can be 'always', 'auto', or 'never' (default \"auto\")",
@@ -2557,6 +2901,7 @@
" grype registry:yourrepo/yourimage:tag pull image directly from a registry (no container runtime required)",
" grype purl:path/to/purl/file read a newline separated file of package URLs from a path on disk",
" grype PURL read a single package PURL directly (e.g. pkg:apk/openssl@3.2.1?distro=alpine-3.20.3)",
+ " grype CPE read a single CPE directly (e.g. cpe:2.3:a:openssl:openssl:3.0.14:*:*:*:*:*)",
"",
"You can also pipe in Syft JSON directly:",
" syft yourimage:tag -o json | grype",
@@ -2590,8 +2935,9 @@
" --platform string an optional platform specifier for container image sources (e.g. 'linux/arm64', 'linux/arm64/v8', 'arm64', 'linux')",
" --profile stringArray configuration profiles to use",
" -q, --quiet suppress all logging output",
- " -s, --scope string selection of layers to analyze, options=[squashed all-layers] (default \"squashed\")",
+ " -s, --scope string selection of layers to analyze, options=[squashed all-layers deep-squashed] (default \"squashed\")",
" --show-suppressed show suppressed/ignored vulnerabilities in the output (only supported with table output format)",
+ " --sort-by string sort the match results with the given strategy, options=[package severity epss risk kev vulnerability] (default \"risk\")",
" -t, --template string specify the path to a Go template file (requires 'template' output to be selected)",
" -v, --verbose count increase verbosity (-v = info, -vv = debug)",
" --version version for grype",
@@ -2763,16 +3109,17 @@
"Usage: htmlhint [options]",
"",
"Options:",
- " -V, --version output the version number",
- " -l, --list show all of the rules available",
- " -c, --config custom configuration file",
- " -r, --rules set all of the rules available",
- " -R, --rulesdir load custom rules from file or folder",
- " -f, --format output messages as custom format",
- " -i, --ignore add pattern to exclude matches",
- " --nocolor disable color",
- " --warn Warn only, exit with 0",
- " -h, --help display help for command",
+ " -V, --version output the version number",
+ " -l, --list show all of the rules available",
+ " --init create a new .htmlhintrc config file with default rules",
+ " -c, --config custom configuration file",
+ " -r, --rules set all of the rules available",
+ " -R, --rulesdir load custom rules from file or folder",
+ " -f, --format output messages as custom format",
+ " -i, --ignore add pattern to exclude matches",
+ " --nocolor disable color",
+ " --warn Warn only, exit with 0",
+ " -h, --help display help for command",
" Examples:",
"",
" htmlhint",
@@ -2780,9 +3127,10 @@
" htmlhint www/test.html",
" htmlhint www/**/*.xhtml",
" htmlhint www/**/*.{htm,html}",
- " htmlhint http://www.alibaba.com/",
+ " htmlhint https://www.example.com/",
" cat test.html | htmlhint stdin",
" htmlhint --list",
+ " htmlhint --init",
" htmlhint --rules tag-pair,id-class-value=underline test.html",
" htmlhint --config .htmlhintrc test.html",
" htmlhint --ignore **/build/**,**/test/**",
@@ -2823,7 +3171,7 @@
" [-f KNOWN_FUTURE_LIBRARY] [-o KNOWN_THIRD_PARTY]",
" [-p KNOWN_FIRST_PARTY] [--known-local-folder KNOWN_LOCAL_FOLDER]",
" [--virtual-env VIRTUAL_ENV] [--conda-env CONDA_ENV]",
- " [--py {all,2,27,3,310,311,312,313,36,37,38,39,auto}]",
+ " [--py {all,2,27,3,310,311,312,313,314,36,37,38,39,auto}]",
" [files ...]",
"",
"Sort Python import definitions alphabetically within logical sections. Run",
@@ -2866,10 +3214,10 @@
" --ws, --ignore-whitespace",
" Tells isort to ignore whitespace differences when",
" --check-only is being used.",
- " --sp SETTINGS_PATH, --settings-path SETTINGS_PATH, --settings-file SETTINGS_PATH, --settings SETTINGS_PATH",
+ " --sp, --settings-path, --settings-file, --settings SETTINGS_PATH",
" Explicitly set the settings path or file instead of",
" auto determining based on file location.",
- " --cr CONFIG_ROOT, --config-root CONFIG_ROOT",
+ " --cr, --config-root CONFIG_ROOT",
" Explicitly set the config root for resolving all",
" configs. When used with the --resolve-all-configs",
" flag, isort will look at all sub-folders in this",
@@ -2886,8 +3234,7 @@
" --old-finders, --magic-placement",
" Use the old deprecated finder logic that relies on",
" environment introspection magic.",
- " -j [JOBS], --jobs [JOBS]",
- " Number of files to process in parallel. Negative value",
+ " -j, --jobs [JOBS] Number of files to process in parallel. Negative value",
" means use number of CPUs.",
" --ac, --atomic Ensures the output doesn't save if the resulting file",
" contains syntax errors.",
@@ -2905,7 +3252,7 @@
" imports sorted.",
" --filter-files Tells isort to filter files even when they are",
" explicitly passed in as part of the CLI command.",
- " -s SKIP, --skip SKIP Files that isort should skip over. If you want to skip",
+ " -s, --skip SKIP Files that isort should skip over. If you want to skip",
" multiple files you should specify twice: --skip file1",
" --skip file2. Values can be file names, directory",
" names or file paths. To skip all files in a nested",
@@ -2917,7 +3264,7 @@
" Values can be file names, directory names or file",
" paths. To skip all files in a nested path use --skip-",
" glob.",
- " --sg SKIP_GLOB, --skip-glob SKIP_GLOB",
+ " --sg, --skip-glob SKIP_GLOB",
" Files that isort should skip over.",
" --extend-skip-glob EXTEND_SKIP_GLOB",
" Additional files that isort should skip over",
@@ -2926,7 +3273,7 @@
" Treat project as a git repository and ignore files",
" listed in .gitignore. NOTE: This requires git to be",
" installed and accessible from the same shell as isort.",
- " --ext SUPPORTED_EXTENSIONS, --extension SUPPORTED_EXTENSIONS, --supported-extension SUPPORTED_EXTENSIONS",
+ " --ext, --extension, --supported-extension SUPPORTED_EXTENSIONS",
" Specifies what extensions isort can be run against.",
" --blocked-extension BLOCKED_EXTENSIONS",
" Specifies what extensions isort can never be run",
@@ -2938,14 +3285,14 @@
" be run against the root dir.",
"",
"general output options:",
- " -a ADD_IMPORTS, --add-import ADD_IMPORTS",
+ " -a, --add-import ADD_IMPORTS",
" Adds the specified import line to all files,",
" automatically determining correct placement.",
" --append, --append-only",
" Only adds the imports specified in --add-import if the",
" file contains existing imports.",
" --af, --force-adds Forces import adds even if the original file is empty.",
- " --rm REMOVE_IMPORTS, --remove-import REMOVE_IMPORTS",
+ " --rm, --remove-import REMOVE_IMPORTS",
" Removes the specified import from all files.",
" --float-to-top Causes all non-indented imports to float to the top of",
" the file having its imports sorted (immediately below",
@@ -2964,18 +3311,17 @@
" length possible",
" --ff, --from-first Switches the typical ordering preference, showing from",
" imports first then straight ones.",
- " --fgw [FORCE_GRID_WRAP], --force-grid-wrap [FORCE_GRID_WRAP]",
+ " --fgw, --force-grid-wrap [FORCE_GRID_WRAP]",
" Force number of from imports (defaults to 2 when",
" passed as CLI flag without value) to be grid wrapped",
" regardless of line length. If 0 is passed in (the",
" global default) only line length is considered.",
- " -i INDENT, --indent INDENT",
- " String to place for indents defaults to \" \" (4",
+ " -i, --indent INDENT String to place for indents defaults to \" \" (4",
" spaces).",
- " --lbi LINES_BEFORE_IMPORTS, --lines-before-imports LINES_BEFORE_IMPORTS",
- " --lai LINES_AFTER_IMPORTS, --lines-after-imports LINES_AFTER_IMPORTS",
- " --lbt LINES_BETWEEN_TYPES, --lines-between-types LINES_BETWEEN_TYPES",
- " --le LINE_ENDING, --line-ending LINE_ENDING",
+ " --lbi, --lines-before-imports LINES_BEFORE_IMPORTS",
+ " --lai, --lines-after-imports LINES_AFTER_IMPORTS",
+ " --lbt, --lines-between-types LINES_BETWEEN_TYPES",
+ " --le, --line-ending LINE_ENDING",
" Forces line endings to the specified value. If not",
" set, values will be guessed per-file.",
" --ls, --length-sort Sort imports by their string length.",
@@ -2983,7 +3329,7 @@
" Sort straight imports by their string length. Similar",
" to `length_sort` but applies only to straight imports",
" and doesn't affect from imports.",
- " -m {GRID,VERTICAL,HANGING_INDENT,VERTICAL_HANGING_INDENT,VERTICAL_GRID,VERTICAL_GRID_GROUPED,VERTICAL_GRID_GROUPED_NO_COMMA,NOQA,VERTICAL_HANGING_INDENT_BRACKET,VERTICAL_PREFIX_FROM_MODULE_IMPORT,HANGING_INDENT_WITH_PARENTHESES,BACKSLASH_GRID,0,1,2,3,4,5,6,7,8,9,10,11}, --multi-line {GRID,VERTICAL,HANGING_INDENT,VERTICAL_HANGING_INDENT,VERTICAL_GRID,VERTICAL_GRID_GROUPED,VERTICAL_GRID_GROUPED_NO_COMMA,NOQA,VERTICAL_HANGING_INDENT_BRACKET,VERTICAL_PREFIX_FROM_MODULE_IMPORT,HANGING_INDENT_WITH_PARENTHESES,BACKSLASH_GRID,0,1,2,3,4,5,6,7,8,9,10,11}",
+ " -m, --multi-line {GRID,VERTICAL,HANGING_INDENT,VERTICAL_HANGING_INDENT,VERTICAL_GRID,VERTICAL_GRID_GROUPED,VERTICAL_GRID_GROUPED_NO_COMMA,NOQA,VERTICAL_HANGING_INDENT_BRACKET,VERTICAL_PREFIX_FROM_MODULE_IMPORT,HANGING_INDENT_WITH_PARENTHESES,BACKSLASH_GRID,0,1,2,3,4,5,6,7,8,9,10,11}",
" Multi line output (0-grid, 1-vertical, 2-hanging,",
" 3-vert-hanging, 4-vert-grid, 5-vert-grid-grouped,",
" 6-deprecated-alias-for-5, 7-noqa, 8-vertical-hanging-",
@@ -3028,7 +3374,7 @@
" installable plugin.",
" --sl, --force-single-line-imports",
" Forces all from imports to appear on their own line",
- " --nsl SINGLE_LINE_EXCLUSIONS, --single-line-exclusions SINGLE_LINE_EXCLUSIONS",
+ " --nsl, --single-line-exclusions SINGLE_LINE_EXCLUSIONS",
" One or more modules to exclude from the single line",
" rule.",
" --tc, --trailing-comma",
@@ -3040,10 +3386,10 @@
" wrap modes, and only affects how individual lines that",
" are too long get continued, not sections of multiple",
" imports.",
- " -l LINE_LENGTH, -w LINE_LENGTH, --line-length LINE_LENGTH, --line-width LINE_LENGTH",
+ " -l, -w, --line-length, --line-width LINE_LENGTH",
" The max length of an import line (used for wrapping",
" long imports).",
- " --wl WRAP_LENGTH, --wrap-length WRAP_LENGTH",
+ " --wl, --wrap-length WRAP_LENGTH",
" Specifies how long lines that are wrapped should be,",
" if not set line_length is used. NOTE: wrap_length must",
" be LOWER than or equal to line_length.",
@@ -3077,7 +3423,7 @@
" VERTICAL_HANGING_INDENT mode",
"",
"section output options:",
- " --sd DEFAULT_SECTION, --section-default DEFAULT_SECTION",
+ " --sd, --section-default DEFAULT_SECTION",
" Sets the default section for import options:",
" ('FUTURE', 'STDLIB', 'THIRDPARTY', 'FIRSTPARTY',",
" 'LOCALFOLDER')",
@@ -3106,28 +3452,28 @@
" --fass, --force-alphabetical-sort-within-sections",
" Force all imports to be sorted alphabetically within a",
" section",
- " -t FORCE_TO_TOP, --top FORCE_TO_TOP",
+ " -t, --top FORCE_TO_TOP",
" Force specific imports to the top of their appropriate",
" section.",
" --combine-straight-imports, --csi",
" Combines all the bare straight imports of the same",
" section in a single line. Won't work with sections",
" which have 'as' imports",
- " --nlb NO_LINES_BEFORE, --no-lines-before NO_LINES_BEFORE",
+ " --nlb, --no-lines-before NO_LINES_BEFORE",
" Sections which should not be split with previous by",
" empty lines",
- " --src SRC_PATHS, --src-path SRC_PATHS",
+ " --src, --src-path SRC_PATHS",
" Add an explicitly defined source path (modules within",
" src paths have their imports automatically categorized",
" as first_party). Glob expansion (`*` and `**`) is",
" supported for this option.",
- " -b KNOWN_STANDARD_LIBRARY, --builtin KNOWN_STANDARD_LIBRARY",
+ " -b, --builtin KNOWN_STANDARD_LIBRARY",
" Force isort to recognize a module as part of Python's",
" standard library.",
" --extra-builtin EXTRA_STANDARD_LIBRARY",
" Extra modules to be included in the list of ones in",
" Python's standard library.",
- " -f KNOWN_FUTURE_LIBRARY, --future KNOWN_FUTURE_LIBRARY",
+ " -f, --future KNOWN_FUTURE_LIBRARY",
" Force isort to recognize a module as part of Python's",
" internal future compatibility libraries. WARNING: this",
" overrides the behavior of __future__ handling and",
@@ -3138,10 +3484,10 @@
" https://github.com/PyCQA/isort#custom-sections-and-",
" ordering and the discussion here:",
" https://github.com/PyCQA/isort/issues/1463.",
- " -o KNOWN_THIRD_PARTY, --thirdparty KNOWN_THIRD_PARTY",
+ " -o, --thirdparty KNOWN_THIRD_PARTY",
" Force isort to recognize a module as being part of a",
" third party library.",
- " -p KNOWN_FIRST_PARTY, --project KNOWN_FIRST_PARTY",
+ " -p, --project KNOWN_FIRST_PARTY",
" Force isort to recognize a module as being part of the",
" current python project.",
" --known-local-folder KNOWN_LOCAL_FOLDER",
@@ -3154,13 +3500,13 @@
" --conda-env CONDA_ENV",
" Conda environment to use for determining whether a",
" package is third-party",
- " --py {all,2,27,3,310,311,312,313,36,37,38,39,auto}, --python-version {all,2,27,3,310,311,312,313,36,37,38,39,auto}",
+ " --py, --python-version {all,2,27,3,310,311,312,313,314,36,37,38,39,auto}",
" Tells isort to set the known standard library based on",
" the specified Python version. Default is to assume any",
" Python 3 version could be the target, and use a union",
" of all stdlib modules across versions. If auto is",
" specified, the version of the interpreter used to run",
- " isort (currently: 312) will be used."
+ " isort (currently: 313) will be used."
],
"jscpd": [
"Usage: jscpd [options] ",
@@ -3436,7 +3782,7 @@
""
],
"kubescape": [
- "Kubescape is a tool for testing Kubernetes security posture. Docs: https://hub.armosec.io/docs",
+ "Kubescape is a tool for testing Kubernetes security posture. Docs: https://kubescape.io/docs/",
"",
"Usage:",
" kubescape [command]",
@@ -3463,6 +3809,7 @@
" fix Propose a fix for the misconfiguration found when scanning Kubernetes manifest files",
" help Help about any command",
" list List frameworks/controls will list the supported frameworks and controls",
+ " mcpserver Start the Kubescape MCP server",
" operator The operator is used to communicate with the Kubescape Operator within the cluster components.",
" patch Patch container images with vulnerabilities",
" prerequisites Check prerequisites for installing Kubescape Operator",
@@ -3478,8 +3825,7 @@
" -l, --logger string Logger level. Supported: debug/info/success/warning/error/fatal [$KS_LOGGER] (default \"info\")",
" --server string Backend discovery server URL",
"",
- "Use \"kubescape [command] --help\" for more information about a command.",
- "{\"level\":\"info\",\"ts\":\"2025-03-06T04:46:47Z\",\"msg\":\"Received interrupt signal, exiting...\"}"
+ "Use \"kubescape [command] --help\" for more information about a command."
],
"kubeval": [
"Validate a Kubernetes YAML file against the relevant schema",
@@ -3510,6 +3856,7 @@
" --version version for kubeval"
],
"lightning-flow-scanner": [
+ " \u203a Warning: @salesforce/cli update available from 2.102.6 to 2.104.6.",
"Find and fix potential bugs in Salesforce flows.",
"",
"USAGE",
@@ -3539,11 +3886,14 @@
"you could try \u2018??lintr\u2019"
],
"ls-lint": [
- "Usage of /node-deps/node_modules/@ls-lint/ls-lint/bin/ls-lint-linux-amd64:",
+ "ls-lint [options] [file|dir]*",
+ "Options:",
" -config value",
" ls-lint config file path(s)",
" -debug",
" write debug informations to stdout",
+ " -error-output-format string",
+ " use a specific error output format (text, json) (default \"text\")",
" -version",
" prints version information for ls-lint",
" -warn",
@@ -4082,8 +4432,7 @@
" -h, --help Show this help message and exit",
" -v, --verbose More verbose messages",
" -V, --version Show program's version number and exit",
- " -O FORMAT, --output FORMAT",
- " Set a custom output format",
+ " -O, --output FORMAT Set a custom output format",
"",
"Config file:",
" Use a config file instead of command line arguments. This is useful if you",
@@ -4180,8 +4529,8 @@
"None and Optional handling:",
" Adjust how values of type 'None' are handled. For more context on how mypy",
" handles values of type 'None', see:",
- " https://mypy.readthedocs.io/en/stable/kinds_of_types.html#no-strict-",
- " optional",
+ " https://mypy.readthedocs.io/en/stable/kinds_of_types.html#optional-types-",
+ " and-the-none-type",
"",
" --implicit-optional Assume arguments with default values of None are",
" Optional (inverse: --no-implicit-optional)",
@@ -4206,17 +4555,27 @@
" Report importing or using deprecated features as",
" notes instead of errors (inverse: --no-report-",
" deprecated-as-note)",
+ " --deprecated-calls-exclude MODULE",
+ " Disable deprecated warnings for functions/methods",
+ " coming from specific package, module, or class",
"",
"Miscellaneous strictness flags:",
" --allow-untyped-globals Suppress toplevel errors caused by missing",
" annotations (inverse: --disallow-untyped-globals)",
- " --allow-redefinition Allow unconditional variable redefinition with a",
- " new type (inverse: --disallow-redefinition)",
+ " --allow-redefinition Allow restricted, unconditional variable",
+ " redefinition with a new type (inverse: --disallow-",
+ " redefinition)",
+ " --allow-redefinition-new Allow more flexible variable redefinition",
+ " semantics (experimental) (inverse: --disallow-",
+ " redefinition-new)",
" --no-implicit-reexport Treat imports as private unless aliased (inverse:",
" --implicit-reexport)",
" --strict-equality Prohibit equality, identity, and container checks",
- " for non-overlapping types (inverse: --no-strict-",
- " equality)",
+ " for non-overlapping types (except `None`)",
+ " (inverse: --no-strict-equality)",
+ " --strict-equality-for-none",
+ " Extend `--strict-equality` for `None` checks",
+ " (inverse: --no-strict-equality-for-none)",
" --strict-bytes Disable treating bytearray and memoryview as",
" subtypes of bytes (inverse: --no-strict-bytes)",
" --extra-checks Enable additional checks that are technically",
@@ -4232,8 +4591,8 @@
" incomplete-defs, --check-untyped-defs, --disallow-",
" untyped-decorators, --warn-redundant-casts,",
" --warn-unused-ignores, --warn-return-any, --no-",
- " implicit-reexport, --strict-equality, --extra-",
- " checks",
+ " implicit-reexport, --strict-equality, --strict-",
+ " bytes, --extra-checks",
" --disable-error-code NAME",
" Disable a specific error code",
" --enable-error-code NAME Enable a specific error code",
@@ -4275,6 +4634,8 @@
" --no-sqlite-cache)",
" --cache-fine-grained Include fine-grained dependency information in the",
" cache for the mypy daemon",
+ " --fixed-format-cache Use experimental fast and compact fixed format",
+ " cache",
" --skip-version-check Allow using cache written by older mypy version",
" --skip-cache-mtime-checks",
" Skip cache internal consistency checks based on",
@@ -4288,7 +4649,6 @@
" --raise-exceptions Raise exception on fatal error",
" --custom-typing-module MODULE",
" Use a custom typing module",
- " --old-type-inference Disable new experimental type inference algorithm",
" --custom-typeshed-dir DIR",
" Use the custom typeshed in DIR",
" --warn-incomplete-stub Warn if missing type annotation in typeshed, only",
@@ -4337,11 +4697,12 @@
" recursively discovering files to check, e.g.",
" --exclude '/setup\\.py$'. May be specified more",
" than once, eg. --exclude a --exclude b",
- " -m MODULE, --module MODULE",
- " Type-check module; can repeat for more modules",
- " -p PACKAGE, --package PACKAGE",
- " Type-check package recursively; can be repeated",
- " -c PROGRAM_TEXT, --command PROGRAM_TEXT",
+ " --exclude-gitignore Use .gitignore file(s) to exclude files from",
+ " checking (in addition to any explicit --exclude if",
+ " present) (inverse: --no-exclude-gitignore)",
+ " -m, --module MODULE Type-check module; can repeat for more modules",
+ " -p, --package PACKAGE Type-check package recursively; can be repeated",
+ " -c, --command PROGRAM_TEXT",
" Type-check program passed in as string",
" files Type-check given files or directories",
"",
@@ -4489,9 +4850,6 @@
""
],
"php-cs-fixer": [
- "PHP needs to be a minimum version of PHP 7.4.0 and maximum version of PHP 8.3.*.",
- "Current PHP version: 8.4.4.",
- "Ignoring environment requirements because `PHP_CS_FIXER_IGNORE_ENV` is set. Execution may be unstable.",
"Description:",
" List commands",
"",
@@ -4552,10 +4910,7 @@
" --ignore= Ignore files based on a comma-separated list of",
" patterns matching files and/or directories.",
" --extensions= Check files with the specified file extensions",
- " (comma-separated list). Defaults to",
- " php,inc/php,js,css.",
- " The type of the file can be specified using:",
- " ext/type; e.g. module/php,es/js.",
+ " (comma-separated list). Defaults to \"php,inc\".",
" -l Check local directory only, no recursion.",
"",
"Rule Selection Options:",
@@ -4602,11 +4957,12 @@
" changed at runtime are supported.",
"",
"Reporting Options:",
- " --report= Print either the \"full\", \"xml\", \"checkstyle\",",
+ " --report= A comma-separated list of reports to print.",
+ " Available reports: \"full\", \"xml\", \"checkstyle\",",
" \"csv\", \"json\", \"junit\", \"emacs\", \"source\",",
" \"summary\", \"diff\", \"svnblame\", \"gitblame\",",
- " \"hgblame\", \"notifysend\" or \"performance\" report",
- " or specify the path to a custom report class.",
+ " \"hgblame\", \"notifysend\" or \"performance\".",
+ " Or specify the path to a custom report class.",
" By default, the \"full\" report is displayed.",
" --report-file= Write the report to the specified file path.",
" --report-= Write the report specified in to the",
@@ -4649,7 +5005,8 @@
" This applies to the following options: \"default_standard\", \"report_format\",",
" \"tab_width\", \"encoding\", \"severity\", \"error_severity\", \"warning_severity\",",
" \"show_warnings\", \"report_width\", \"show_progress\", \"quiet\", \"colors\", \"cache\",",
- " \"parallel\".",
+ " \"parallel\", \"installed_paths\", \"php_version\", \"ignore_errors_on_exit\",",
+ " \"ignore_warnings_on_exit\", \"ignore_non_auto_fixable_on_exit\".",
" --config-show Show the configuration options which are",
" currently stored in the applicable",
" CodeSniffer.conf file.",
@@ -4685,7 +5042,9 @@
" -j, --jobs=JOBS Number of paralleled jobs to run",
" -c, --configuration=CONFIGURATION Read configuration from config file [default: \".phplint.yml\"]",
" --no-configuration Ignore default configuration file (.phplint.yml)",
- " --cache=CACHE Path to the cache directory",
+ " --cache=CACHE Path to the cache directory (Deprecated option, use \"cache-dir\" instead)",
+ " --cache-dir=CACHE-DIR Path to the cache directory",
+ " --cache-ttl=CACHE-TTL Limit cached data for a period of time (>0: time to live in seconds) [default: 3600]",
" --no-cache Ignore cached data",
" -p, --progress=PROGRESS Show the progress output",
" --no-progress Hide the progress output",
@@ -4725,7 +5084,9 @@
" --allow-empty-baseline Do not error out when the generated baseline is empty",
" --memory-limit=MEMORY-LIMIT Memory limit for analysis",
" --xdebug Allow running with Xdebug for debugging purposes",
- " --fix Launch PHPStan Pro",
+ " --tmp-file=TMP-FILE (Editor mode) Edited file used in place of --instead-of file",
+ " --instead-of=INSTEAD-OF (Editor mode) File being replaced by --tmp-file",
+ " --fix Fix auto-fixable errors (experimental)",
" --watch Launch PHPStan Pro",
" --pro Launch PHPStan Pro",
" --fail-without-result-cache Return non-zero exit code when result cache is not used",
@@ -5470,7 +5831,7 @@
" --object-wrap ",
" How to wrap object literals.",
" Defaults to preserve.",
- " --parser ",
+ " --parser ",
" Which parser to use.",
" --print-width The line length where Prettier will try wrap.",
" Defaults to 80.",
@@ -5532,6 +5893,8 @@
" --cache-location Path to the cache file.",
" --cache-strategy ",
" Strategy for the cache to use for detecting changed files.",
+ " --check-ignore-pragma Check whether the file's first docblock comment contains '@noprettier' or '@noformat' to determine if it should be formatted.",
+ " Defaults to false.",
" --no-color Do not colorize error messages.",
" --no-error-on-unmatched-pattern",
" Prevent errors when pattern is unmatched.",
@@ -5546,8 +5909,7 @@
" --log-level ",
" What level of logs to report.",
" Defaults to log.",
- " --require-pragma Require either '@prettier' or '@format' to be present in the file's first docblock comment",
- " in order for it to be formatted.",
+ " --require-pragma Require either '@prettier' or '@format' to be present in the file's first docblock comment in order for it to be formatted.",
" Defaults to false.",
" --stdin-filepath Path to the file to pretend that stdin comes from.",
" --support-info Print support information as JSON.",
@@ -5711,6 +6073,11 @@
" Whether the report should include non-errors in its output (defaults to true)",
"",
"Caching:",
+ " --consolidate-cache",
+ " Consolidates all cache files that Psalm uses for this specific project into a single file,",
+ " for quicker runs when doing whole project scans.",
+ " Make sure to consolidate the cache again after running Psalm before saving the cache via CI.",
+ "",
" --clear-cache",
" Clears all cache files that Psalm uses for this specific project",
"",
@@ -5724,6 +6091,9 @@
" Runs Psalm without using cached representations of unchanged classes and files.",
" Useful if you want the afterClassLikeVisit plugin hook to run every time you visit a file.",
"",
+ " --no-reference-cache",
+ " Runs Psalm without using cached representations of unchanged methods.",
+ "",
" --no-file-cache",
" Runs Psalm without using caching every single file for later diffing.",
" This reduces the space Psalm uses on disk and file I/O.",
@@ -5812,6 +6182,7 @@
" Checks:",
" --only-checks CHECKS A comma separated list of checks that should be run",
" --ignore-paths PATHS A comma separated list of patterns to ignore",
+ " --top-scope-variables VARS A comma separated list of allowed top scope variables",
" --no-arrow_on_right_operand_line-check",
" Skip the arrow_on_right_operand_line check.",
" --no-autoloader_layout-check Skip the autoloader_layout check.",
@@ -5874,6 +6245,8 @@
" --80chars-check Enable the 80chars check.",
" --no-arrow_alignment-check Skip the arrow_alignment check.",
" --no-hard_tabs-check Skip the hard_tabs check.",
+ " --no-space_before_arrow-check",
+ " Skip the space_before_arrow check.",
" --no-trailing_whitespace-check",
" Skip the trailing_whitespace check.",
" --no-legacy_facts-check Skip the legacy_facts check.",
@@ -5921,7 +6294,7 @@
" or FATAL are suppressed, and no reports are done by",
" default. Error mode is compatible with disabling",
" specific errors.",
- " --verbose , -v In verbose mode, extra non-checker-related info will",
+ " --verbose, -v In verbose mode, extra non-checker-related info will",
" be displayed.",
" --enable-all-extensions",
" Load and enable all available extensions. Use --list-",
@@ -5955,7 +6328,7 @@
" above --fail-under value. Syntax same as enable.",
" Messages specified are enabled, while categories only",
" check already-enabled messages. (default: )",
- " --jobs , -j ",
+ " --jobs, -j ",
" Use multiple processes to speed up Pylint. Specifying",
" 0 will auto-detect the number of processors available",
" to use, and will cap the count on Windows to avoid",
@@ -5977,10 +6350,6 @@
" arbitrary code. (This is an alternative name to",
" extension-pkg-allow-list for backward compatibility.)",
" (default: [])",
- " --suggestion-mode ",
- " When enabled, pylint would attempt to guess common",
- " misconfiguration and emit user-friendly hints instead",
- " of false-positive error messages. (default: True)",
" --exit-zero Always return a 0 (non-error) status code, even if",
" lint errors are found. This is primarily useful in",
" continuous integration scripts. (default: False)",
@@ -5998,7 +6367,7 @@
" --py-version ",
" Minimum Python version to use for version dependent",
" checks. Will default to the version used to run",
- " pylint. (default: (3, 12))",
+ " pylint. (default: (3, 13))",
" --ignored-modules ",
" List of module names for which member attributes",
" should not be checked and will not be imported (useful",
@@ -6025,13 +6394,14 @@
"Reports:",
" Options related to output formatting and reporting",
"",
- " --output-format , -f ",
- " Set the output format. Available formats are: text,",
- " parseable, colorized, json2 (improved json format),",
- " json (old json format) and msvs (visual studio). You",
- " can also give a reporter class, e.g.",
+ " --output-format, -f ",
+ " Set the output format. Available formats are: 'text',",
+ " 'parseable', 'colorized', 'json2' (improved json",
+ " format), 'json' (old json format), msvs (visual",
+ " studio) and 'github' (GitHub actions). You can also",
+ " give a reporter class, e.g.",
" mypackage.mymodule.MyReporterClass.",
- " --reports , -r ",
+ " --reports, -r ",
" Tells whether to display a full report or only the",
" messages. (default: False)",
" --evaluation ",
@@ -6045,8 +6415,7 @@
" (default: max(0, 0 if fatal else 10.0 - ((float(5 *",
" error + warning + refactor + convention) / statement)",
" * 10)))",
- " --score , -s ",
- " Activate the evaluation score. (default: True)",
+ " --score, -s Activate the evaluation score. (default: True)",
" --msg-template ",
" Template used to display messages. This is a python",
" new-style format string used to format the message",
@@ -6061,14 +6430,14 @@
" CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, UNDEFINED.",
" (default: ['HIGH', 'CONTROL_FLOW', 'INFERENCE',",
" 'INFERENCE_FAILURE', 'UNDEFINED'])",
- " --enable , -e ",
+ " --enable, -e ",
" Enable the message, report, category or checker with",
" the given id(s). You can either give multiple",
" identifier separated by comma (,) or put this option",
" multiple time (only on the command line, not in the",
" configuration file where it should appear only once).",
" See also the \"--disable\" option for examples.",
- " --disable , -d ",
+ " --disable, -d ",
" Disable the message, report, category or checker with",
" the given id(s). You can either give multiple",
" identifiers separated by comma (,) or put this option",
@@ -6082,19 +6451,23 @@
" Warning level messages displayed, use \"--disable=all",
" --enable=classes --disable=W\".",
"",
- "String:",
- " Check string literals.",
+ "Similarities:",
+ " Checks for similarities and duplicated code.",
"",
- " --check-str-concat-over-line-jumps ",
- " This flag controls whether the implicit-str-concat",
- " should generate a warning on implicit string",
- " concatenation in sequences defined over several lines.",
- " (default: False)",
- " --check-quote-consistency ",
- " This flag controls whether inconsistent-quotes",
- " generates a warning when the character used as a quote",
- " delimiter is used inconsistently within a module.",
- " (default: False)",
+ " --min-similarity-lines ",
+ " Minimum lines number of a similarity. (default: 4)",
+ " --ignore-comments ",
+ " Comments are removed from the similarity computation",
+ " (default: True)",
+ " --ignore-docstrings ",
+ " Docstrings are removed from the similarity computation",
+ " (default: True)",
+ " --ignore-imports ",
+ " Imports are removed from the similarity computation",
+ " (default: True)",
+ " --ignore-signatures ",
+ " Signatures are removed from the similarity computation",
+ " (default: True)",
"",
"Method_args:",
" BaseChecker for method_args.",
@@ -6108,6 +6481,103 @@
" 'requests.api.patch', 'requests.api.post',",
" 'requests.api.put', 'requests.api.request'))",
"",
+ "Spelling:",
+ " Check spelling in comments and docstrings.",
+ "",
+ " --spelling-dict ",
+ " Spelling dictionary name. No available dictionaries :",
+ " You need to install both the python package and the",
+ " system dependency for enchant to work. (default: )",
+ " --spelling-ignore-words ",
+ " List of comma separated words that should not be",
+ " checked. (default: )",
+ " --spelling-private-dict-file ",
+ " A path to a file that contains the private dictionary;",
+ " one word per line. (default: )",
+ " --spelling-store-unknown-words ",
+ " Tells whether to store unknown words to the private",
+ " dictionary (see the --spelling-private-dict-file",
+ " option) instead of raising a message. (default: n)",
+ " --max-spelling-suggestions N",
+ " Limits count of emitted suggestions for spelling",
+ " mistakes. (default: 4)",
+ " --spelling-ignore-comment-directives ",
+ " List of comma separated words that should be",
+ " considered directives if they appear at the beginning",
+ " of a comment and should not be checked. (default: fmt:",
+ " on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:)",
+ "",
+ "Refactoring:",
+ " Looks for code which can be refactored.",
+ "",
+ " --max-nested-blocks ",
+ " Maximum number of nested blocks for function / method",
+ " body (default: 5)",
+ " --never-returning-functions ",
+ " Complete name of functions that never returns. When",
+ " checking for inconsistent-return-statements if a never",
+ " returning function is called then it will be",
+ " considered as an explicit return statement and no",
+ " message will be printed. (default: ('sys.exit',",
+ " 'argparse.parse_error'))",
+ " --suggest-join-with-non-empty-separator ",
+ " Let 'consider-using-join' be raised when the separator",
+ " to join on would be non-empty (resulting in expected",
+ " fixes of the type: ``\"- \" + \" - \".join(items)``)",
+ " (default: True)",
+ "",
+ "Classes:",
+ " Checker for class nodes.",
+ "",
+ " --defining-attr-methods ",
+ " List of method names used to declare (i.e. assign)",
+ " instance attributes. (default: ('__init__', '__new__',",
+ " 'setUp', 'asyncSetUp', '__post_init__'))",
+ " --valid-classmethod-first-arg ",
+ " List of valid names for the first argument in a class",
+ " method. (default: ('cls',))",
+ " --valid-metaclass-classmethod-first-arg ",
+ " List of valid names for the first argument in a",
+ " metaclass class method. (default: ('mcs',))",
+ " --exclude-protected ",
+ " List of member names, which should be excluded from",
+ " the protected access warning. (default: ('_asdict',",
+ " '_fields', '_replace', '_source', '_make',",
+ " 'os._exit'))",
+ " --check-protected-access-in-special-methods ",
+ " Warn about protected attribute access inside special",
+ " methods (default: False)",
+ "",
+ "Format:",
+ " Formatting checker.",
+ "",
+ " --max-line-length ",
+ " Maximum number of characters on a single line.",
+ " Pylint's default of 100 is based on PEP 8's guidance",
+ " that teams may choose line lengths up to 99",
+ " characters. (default: 100)",
+ " --ignore-long-lines ",
+ " Regexp for a line that is allowed to be longer than",
+ " the limit. (default: ^\\s*(# )??$)",
+ " --single-line-if-stmt ",
+ " Allow the body of an if to be on the same line as the",
+ " test if there is no else. (default: False)",
+ " --single-line-class-stmt ",
+ " Allow the body of a class to be on the same line as",
+ " the declaration if body contains single statement.",
+ " (default: False)",
+ " --max-module-lines ",
+ " Maximum number of lines in a module. (default: 1000)",
+ " --indent-string ",
+ " String used as indentation unit. This is usually \" \"",
+ " (4 spaces) or \" \" (1 tab). (default: )",
+ " --indent-after-paren ",
+ " Number of spaces of indent required inside a hanging",
+ " or continued line. (default: 4)",
+ " --expected-line-ending-format ",
+ " Expected format of line ending, e.g. empty (any line",
+ " ending), LF or CRLF. (default: )",
+ "",
"Typecheck:",
" Try to find bugs in the code using type inference.",
"",
@@ -6155,7 +6625,7 @@
" register other decorators that produce valid context",
" managers. (default: ['contextlib.contextmanager'])",
" --missing-member-hint-distance ",
- " The minimum edit distance a name should have in order",
+ " The maximum edit distance a name should have in order",
" to be considered a similar match for a missing member",
" name. (default: 1)",
" --missing-member-max-choices ",
@@ -6170,54 +6640,6 @@
" List of decorators that change the signature of a",
" decorated function. (default: [])",
"",
- "Format:",
- " Formatting checker.",
- "",
- " --max-line-length ",
- " Maximum number of characters on a single line.",
- " (default: 100)",
- " --ignore-long-lines ",
- " Regexp for a line that is allowed to be longer than",
- " the limit. (default: ^\\s*(# )??$)",
- " --single-line-if-stmt ",
- " Allow the body of an if to be on the same line as the",
- " test if there is no else. (default: False)",
- " --single-line-class-stmt ",
- " Allow the body of a class to be on the same line as",
- " the declaration if body contains single statement.",
- " (default: False)",
- " --max-module-lines ",
- " Maximum number of lines in a module. (default: 1000)",
- " --indent-string ",
- " String used as indentation unit. This is usually \" \"",
- " (4 spaces) or \" \" (1 tab). (default: )",
- " --indent-after-paren ",
- " Number of spaces of indent required inside a hanging",
- " or continued line. (default: 4)",
- " --expected-line-ending-format ",
- " Expected format of line ending, e.g. empty (any line",
- " ending), LF or CRLF. (default: )",
- "",
- "Exceptions:",
- " Exception related checks.",
- "",
- " --overgeneral-exceptions ",
- " Exceptions that will emit a warning when caught.",
- " (default: ('builtins.BaseException',",
- " 'builtins.Exception'))",
- "",
- "Logging:",
- " Checks use of the logging module.",
- "",
- " --logging-modules ",
- " Logging modules to check that the string format",
- " arguments are in logging function parameter format.",
- " (default: ('logging',))",
- " --logging-format-style ",
- " The type of string formatting that logging methods do.",
- " `old` means using % formatting, `new` is for `{}`",
- " formatting. (default: old)",
- "",
"Basic:",
" --good-names Good variable names which should always be accepted,",
" separated by a comma. (default: ('i', 'j', 'k', 'ex',",
@@ -6323,6 +6745,11 @@
" Overrides module-naming-style. If left empty, module",
" names will be checked with the set naming style.",
" (default: None)",
+ " --paramspec-rgx ",
+ " Regular expression matching correct parameter",
+ " specification variable names. If left empty, parameter",
+ " specification variable names will be checked with the",
+ " set naming style. (default: None)",
" --typealias-rgx ",
" Regular expression matching correct type alias names.",
" If left empty, type alias names will be checked with",
@@ -6331,6 +6758,11 @@
" Regular expression matching correct type variable",
" names. If left empty, type variable names will be",
" checked with the set naming style. (default: None)",
+ " --typevartuple-rgx ",
+ " Regular expression matching correct type variable",
+ " tuple names. If left empty, type variable tuple names",
+ " will be checked with the set naming style. (default:",
+ " None)",
" --variable-naming-style