diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index f0a9269..d1c10bb 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -38,10 +38,10 @@ jobs: uses: actions/checkout@v3 - name: Pull LocalStack Docker image run: docker pull localstack/localstack & - - name: Set up Python 3.11 + - name: Set up Python 3.12 uses: actions/setup-python@v2 with: - python-version: '3.11' + python-version: '3.12' - name: Install dependencies run: make install - name: Run code linter diff --git a/README.md b/README.md index d0c1592..4632425 100644 --- a/README.md +++ b/README.md @@ -24,6 +24,7 @@ pip install terraform-local ## Configurations The following environment variables can be configured: +* `DRY_RUN`: Generate the override file without invoking Terraform * `TF_CMD`: Terraform command to call (default: `terraform`) * `AWS_ENDPOINT_URL`: hostname and port of the target LocalStack instance * `LOCALSTACK_HOSTNAME`: __(Deprecated)__ host name of the target LocalStack instance @@ -48,6 +49,7 @@ please refer to the man pages of `terraform --help`. ## Change Log +* v0.18.0: Add `DRY_RUN` and patch S3 backend entrypoints * v0.17.1: Add `packaging` module to install requirements * v0.17.0: Add option to use new endpoints S3 backend options * v0.16.1: Update Setuptools to exclude tests during packaging diff --git a/bin/tflocal b/bin/tflocal index 02999a7..c8034dd 100755 --- a/bin/tflocal +++ b/bin/tflocal @@ -27,6 +27,7 @@ if os.path.isdir(os.path.join(PARENT_FOLDER, ".venv")): from localstack_client import config # noqa: E402 import hcl2 # noqa: E402 +DRY_RUN = str(os.environ.get("DRY_RUN")).strip().lower() in ["1", "true"] DEFAULT_REGION = "us-east-1" DEFAULT_ACCESS_KEY = "test" AWS_ENDPOINT_URL = os.environ.get("AWS_ENDPOINT_URL") @@ -35,6 +36,7 @@ LOCALHOST_HOSTNAME = "localhost.localstack.cloud" S3_HOSTNAME = os.environ.get("S3_HOSTNAME") or f"s3.{LOCALHOST_HOSTNAME}" USE_EXEC = str(os.environ.get("USE_EXEC")).strip().lower() in ["1", "true"] TF_CMD = os.environ.get("TF_CMD") or "terraform" +TF_PROXIED_CMDS = ("init", "plan", "apply", "destroy") LS_PROVIDERS_FILE = os.environ.get("LS_PROVIDERS_FILE") or "localstack_providers_override.tf" LOCALSTACK_HOSTNAME = urlparse(AWS_ENDPOINT_URL).hostname or os.environ.get("LOCALSTACK_HOSTNAME") or "localhost" EDGE_PORT = int(urlparse(AWS_ENDPOINT_URL).port or os.environ.get("EDGE_PORT") or 4566) @@ -153,12 +155,15 @@ def create_provider_config_file(provider_aliases=None): # write temporary config file providers_file = get_providers_file_path() - if os.path.exists(providers_file): - msg = f"Providers override file {providers_file} already exists - please delete it first" - raise Exception(msg) + write_provider_config_file(providers_file, tf_config) + + return providers_file + + +def write_provider_config_file(providers_file, tf_config): + """Write provider config into file""" with open(providers_file, mode="w") as fp: fp.write(tf_config) - return providers_file def get_providers_file_path() -> str: @@ -186,9 +191,12 @@ def determine_provider_aliases() -> list: def generate_s3_backend_config() -> str: """Generate an S3 `backend {..}` block with local endpoints, if configured""" + is_tf_legacy = TF_VERSION < version.Version("1.6") backend_config = None tf_files = parse_tf_files() - for obj in tf_files.values(): + for filename, obj in tf_files.items(): + if LS_PROVIDERS_FILE == filename: + continue tf_configs = ensure_list(obj.get("terraform", [])) for tf_config in tf_configs: backend_config = ensure_list(tf_config.get("backend")) @@ -199,6 +207,13 @@ def generate_s3_backend_config() -> str: if not backend_config: return "" + legacy_endpoint_mappings = { + "endpoint": "s3", + "iam_endpoint": "iam", + "sts_endpoint": "sts", + "dynamodb_endpoint": "dynamodb", + } + configs = { # note: default values, updated by `backend_config` further below... "bucket": "tf-test-state", @@ -213,15 +228,29 @@ def generate_s3_backend_config() -> str: "dynamodb": get_service_endpoint("dynamodb"), }, } + # Merge in legacy endpoint configs if not existing already + if is_tf_legacy and backend_config.get("endpoints"): + print("Warning: Unsupported backend option(s) detected (`endpoints`). Please make sure you always use the corresponding options to your Terraform version.") + exit(1) + for legacy_endpoint, endpoint in legacy_endpoint_mappings.items(): + if legacy_endpoint in backend_config and (not backend_config.get("endpoints") or endpoint not in backend_config["endpoints"]): + if not backend_config.get("endpoints"): + backend_config["endpoints"] = {} + backend_config["endpoints"].update({endpoint: backend_config[legacy_endpoint]}) + # Add any missing default endpoints + if backend_config.get("endpoints"): + backend_config["endpoints"] = { + k: backend_config["endpoints"].get(k) or v + for k, v in configs["endpoints"].items()} configs.update(backend_config) - get_or_create_bucket(configs["bucket"]) - get_or_create_ddb_table(configs["dynamodb_table"], region=configs["region"]) + if not DRY_RUN: + get_or_create_bucket(configs["bucket"]) + get_or_create_ddb_table(configs["dynamodb_table"], region=configs["region"]) result = TF_S3_BACKEND_CONFIG for key, value in configs.items(): if isinstance(value, bool): value = str(value).lower() elif isinstance(value, dict): - is_tf_legacy = not (TF_VERSION.major > 1 or (TF_VERSION.major == 1 and TF_VERSION.minor > 5)) if key == "endpoints" and is_tf_legacy: value = textwrap.indent( text=textwrap.dedent(f"""\ @@ -241,6 +270,21 @@ def generate_s3_backend_config() -> str: return result +def check_override_file(providers_file: str) -> None: + """Checks override file existance""" + if os.path.exists(providers_file): + msg = f"Providers override file {providers_file} already exists" + err_msg = msg + " - please delete it first, exiting..." + if DRY_RUN: + msg += ". File will be overwritten." + print(msg) + print("\tOnly 'yes' will be accepted to approve.") + if input("\tEnter a value: ") == "yes": + return + print(err_msg) + exit(1) + + # --- # AWS CLIENT UTILS # --- @@ -357,6 +401,11 @@ def get_or_create_ddb_table(table_name: str, region: str = None): # --- # TF UTILS # --- +def is_override_needed(args) -> bool: + if any(map(lambda x: x in args, TF_PROXIED_CMDS)): + return True + return False + def parse_tf_files() -> dict: """Parse the local *.tf files and return a dict of -> """ @@ -432,18 +481,26 @@ def main(): print(f"Unable to determine version. See error message for details: {e}") exit(1) - # create TF provider config file - providers = determine_provider_aliases() - config_file = create_provider_config_file(providers) + if is_override_needed(sys.argv[1:]): + check_override_file(get_providers_file_path()) - # call terraform command - try: - if USE_EXEC: - run_tf_exec(cmd, env) - else: - run_tf_subprocess(cmd, env) - finally: - os.remove(config_file) + # create TF provider config file + providers = determine_provider_aliases() + config_file = create_provider_config_file(providers) + else: + config_file = None + + # call terraform command if not dry-run or any of the commands + if not DRY_RUN or not is_override_needed(sys.argv[1:]): + try: + if USE_EXEC: + run_tf_exec(cmd, env) + else: + run_tf_subprocess(cmd, env) + finally: + # fall through if haven't set during dry-run + if config_file: + os.remove(config_file) if __name__ == "__main__": diff --git a/setup.cfg b/setup.cfg index b304958..1d6a990 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = terraform-local -version = 0.17.1 +version = 0.18.0 url = https://github.com/localstack/terraform-local author = LocalStack Team author_email = info@localstack.cloud @@ -15,6 +15,7 @@ classifiers = Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 + Programming Language :: Python :: 3.12 License :: OSI Approved :: Apache Software License Topic :: Software Development :: Testing diff --git a/tests/test_apply.py b/tests/test_apply.py index 4254b4b..69a471a 100644 --- a/tests/test_apply.py +++ b/tests/test_apply.py @@ -3,10 +3,16 @@ import subprocess import tempfile import uuid +import json from typing import Dict, Generator +from shutil import rmtree +from packaging import version + import boto3 import pytest +import hcl2 + THIS_PATH = os.path.abspath(os.path.dirname(__file__)) ROOT_PATH = os.path.join(THIS_PATH, "..") @@ -193,19 +199,154 @@ def test_s3_backend(): assert result["ResponseMetadata"]["HTTPStatusCode"] == 200 +def test_dry_run(monkeypatch): + monkeypatch.setenv("DRY_RUN", "1") + state_bucket = "tf-state-dry-run" + state_table = "tf-state-dry-run" + bucket_name = "bucket.dry-run" + config = """ + terraform { + backend "s3" { + bucket = "%s" + key = "terraform.tfstate" + dynamodb_table = "%s" + region = "us-east-2" + skip_credentials_validation = true + } + } + resource "aws_s3_bucket" "test-bucket" { + bucket = "%s" + } + """ % (state_bucket, state_table, bucket_name) + is_legacy_tf = is_legacy_tf_version(get_version()) + + temp_dir = deploy_tf_script(config, cleanup=False, user_input="yes") + override_file = os.path.join(temp_dir, "localstack_providers_override.tf") + assert check_override_file_exists(override_file) + + assert check_override_file_content(override_file, is_legacy=is_legacy_tf) + + # assert that bucket with state file exists + s3 = client("s3", region_name="us-east-2") + + with pytest.raises(s3.exceptions.NoSuchBucket): + s3.list_objects(Bucket=state_bucket) + + # assert that DynamoDB table with state file locks exists + dynamodb = client("dynamodb", region_name="us-east-2") + with pytest.raises(dynamodb.exceptions.ResourceNotFoundException): + dynamodb.describe_table(TableName=state_table) + + # assert that S3 resource has been created + s3 = client("s3") + with pytest.raises(s3.exceptions.ClientError): + s3.head_bucket(Bucket=bucket_name) + + +@pytest.mark.parametrize("endpoints", [ + '', + 'endpoint = "http://s3-localhost.localstack.cloud:4566"', + 'endpoints = { "s3": "http://s3-localhost.localstack.cloud:4566" }', + ''' + endpoint = "http://localhost-s3.localstack.cloud:4566" + endpoints = { "s3": "http://s3-localhost.localstack.cloud:4566" } + ''']) +def test_s3_backend_endpoints_merge(monkeypatch, endpoints: str): + monkeypatch.setenv("DRY_RUN", "1") + state_bucket = "tf-state-merge" + state_table = "tf-state-merge" + bucket_name = "bucket.merge" + config = """ + terraform { + backend "s3" { + bucket = "%s" + key = "terraform.tfstate" + dynamodb_table = "%s" + region = "us-east-2" + skip_credentials_validation = true + %s + } + } + resource "aws_s3_bucket" "test-bucket" { + bucket = "%s" + } + """ % (state_bucket, state_table, endpoints, bucket_name) + is_legacy_tf = is_legacy_tf_version(get_version()) + if is_legacy_tf and endpoints not in ("", 'endpoint = "http://s3-localhost.localstack.cloud:4566"'): + with pytest.raises(subprocess.CalledProcessError): + deploy_tf_script(config, user_input="yes") + else: + temp_dir = deploy_tf_script(config, cleanup=False, user_input="yes") + override_file = os.path.join(temp_dir, "localstack_providers_override.tf") + assert check_override_file_exists(override_file) + assert check_override_file_content(override_file, is_legacy=is_legacy_tf) + rmtree(temp_dir) + + +def check_override_file_exists(override_file): + return os.path.isfile(override_file) + + +def check_override_file_content(override_file, is_legacy: bool = False): + legacy_options = ( + "endpoint", + "iam_endpoint", + "dynamodb_endpoint", + "sts_endpoint", + ) + new_options = ( + "iam", + "dynamodb", + "s3", + "sso", + "sts", + ) + try: + with open(override_file, "r") as fp: + result = hcl2.load(fp) + result = result["terraform"][0]["backend"][0]["s3"] + except Exception as e: + print(f'Unable to parse "{override_file}" as HCL file: {e}') + + new_options_check = "endpoints" in result and all(map(lambda x: x in result.get("endpoints"), new_options)) + + if is_legacy: + legacy_options_check = all(map(lambda x: x in result, legacy_options)) + return not new_options_check and legacy_options_check + + legacy_options_check = any(map(lambda x: x in result, legacy_options)) + return new_options_check and not legacy_options_check + + ### # UTIL FUNCTIONS ### -def deploy_tf_script(script: str, env_vars: Dict[str, str] = None): - with tempfile.TemporaryDirectory() as temp_dir: + +def is_legacy_tf_version(tf_version, legacy_version: str = "1.6") -> bool: + """Check if Terraform version is legacy""" + if tf_version < version.Version(legacy_version): + return True + return False + + +def get_version(): + """Get Terraform version""" + output = run([TFLOCAL_BIN, "version", "-json"]).decode("utf-8") + return version.parse(json.loads(output)["terraform_version"]) + + +def deploy_tf_script(script: str, cleanup: bool = True, env_vars: Dict[str, str] = None, user_input: str = None): + with tempfile.TemporaryDirectory(delete=cleanup) as temp_dir: with open(os.path.join(temp_dir, "test.tf"), "w") as f: f.write(script) kwargs = {"cwd": temp_dir} + if user_input: + kwargs.update({"input": bytes(user_input, "utf-8")}) kwargs["env"] = {**os.environ, **(env_vars or {})} run([TFLOCAL_BIN, "init"], **kwargs) - out = run([TFLOCAL_BIN, "apply", "-auto-approve"], **kwargs) - return out + run([TFLOCAL_BIN, "apply", "-auto-approve"], **kwargs) + return temp_dir def get_bucket_names(**kwargs: dict) -> list: