Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
47 changes: 47 additions & 0 deletions acceptance/bin/add_repl.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
#!/usr/bin/env python3
"""
Add entry to ACC_REPLS without clobbering existing ones.

If entry already exists, it'll add suffix in _<number> format.
"""

import os
import argparse
from pathlib import Path

ACC_REPLS = Path(os.environ["TEST_TMP_DIR"]) / "ACC_REPLS"


def get_repls():
result = {}
if ACC_REPLS.exists():
for line in ACC_REPLS.open():
value, repl = line.strip().rsplit(":", 1)
result[repl] = value
return result


def add_repl(value, repl):
existing = get_repls()
for extra in range(1, 100):
if extra >= 2:
r = f"{repl}_{extra}"
else:
r = repl
if r in existing:
continue
with ACC_REPLS.open("a") as fobj:
fobj.write(f"{value}:{r}\n")
break


def main():
parser = argparse.ArgumentParser()
parser.add_argument("value")
parser.add_argument("replacement")
args = parser.parse_args()
add_repl(args.value, args.replacement)


if __name__ == "__main__":
main()
51 changes: 32 additions & 19 deletions acceptance/bin/print_state.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,44 +11,57 @@
import argparse


def write(filename):
def print_file(filename):
data = open(filename).read()
print(data, end="")
if not data.endswith("\n"):
print()


def main():
parser = argparse.ArgumentParser()
parser.add_argument("-t", "--target")
parser.add_argument("--backup", action="store_true")
args = parser.parse_args()
def get_state_files(target, backup):
default_target_dir = ".databricks/bundle/default"

if args.target:
target_dir = f".databricks/bundle/{args.target}"
if target:
target_dir = f".databricks/bundle/{target}"
if not os.path.exists(target_dir):
raise SystemExit(f"Invalid target {args.target!r}: {target_dir} does not exist")
raise SystemExit(f"Invalid target {target!r}: {target_dir} does not exist")
elif os.path.exists(default_target_dir):
target_dir = default_target_dir
else:
targets = glob.glob(".databricks/bundle/*")
if not targets:
return
targets = [os.path.basename(x) for x in targets]
if len(targets) > 1:
raise SystemExit("Many targets found, specify one to use with -t: " + ", ".join(sorted(targets)))
args.target = targets[0]
target_dir = ".databricks/bundle/" + targets[0]

if args.backup:
filename = f".databricks/bundle/{args.target}/terraform/terraform.tfstate.backup"
if os.path.exists(filename):
write(filename)
result = []

if backup:
result.append(f"{target_dir}/terraform/terraform.tfstate.backup")
else:
filename = f".databricks/bundle/{args.target}/terraform/terraform.tfstate"
if os.path.exists(filename):
write(filename)
result.append(f"{target_dir}/terraform/terraform.tfstate")
result.append(f"{target_dir}/resources.json")

return result


def get_state_file(target, backup):
result = get_state_files(target, backup)
filtered = [x for x in result if os.path.exists(x)]
return filtered[0] if filtered else result[0]


def main():
parser = argparse.ArgumentParser()
parser.add_argument("-t", "--target")
parser.add_argument("--backup", action="store_true")
args = parser.parse_args()

filename = f".databricks/bundle/{args.target}/resources.json"
for filename in get_state_files(args.target, args.backup):
if os.path.exists(filename):
write(filename)
print_file(filename)


if __name__ == "__main__":
Expand Down
75 changes: 47 additions & 28 deletions acceptance/bin/read_id.py
Original file line number Diff line number Diff line change
@@ -1,48 +1,67 @@
#!/usr/bin/env python3
"""
Print selected attributes from terraform state.
Print id of the resource from the state. Update ACC_REPLS for a given ID.

Example: read_id.py foo
Output job_id, e.g. "5555" and update ACC_REPLS with record "5555:FOO_ID"

Usage: <group> <name> [attr...]
"""

import sys
import os
import json
import argparse
from pathlib import Path

sys.path.insert(0, str(Path(__file__).parent))
from print_state import get_state_file
from add_repl import add_repl


def print_resource_terraform(group, name):
resource_type = "databricks_" + group[:-1]
filename = ".databricks/bundle/default/terraform/terraform.tfstate"
def get_id_terraform(filename, name):
raw = open(filename).read()
data = json.loads(raw)
found = 0
available = []
for r in data["resources"]:
r_type = r["type"]
r_name = r["name"]
if r_type != resource_type:
continue
if r_name != name:
continue
for inst in r["instances"]:
attribute_values = inst.get("attributes") or {}
print(attribute_values.get("id"))
return


def print_resource_direct(group, name):
filename = ".databricks/bundle/default/resources.json"
available.append(r_name)
if r_name == name:
for inst in r["instances"]:
attribute_values = inst.get("attributes") or {}
return attribute_values.get("id")

print(f"Cannot find resource with {name=}. Available: {available}", file=sys.stderr)


def get_id_direct(filename, name):
raw = open(filename).read()
data = json.loads(raw)
state_map = data["state"]
result = state_map.get(f"resources.{group}.{name}")

if result is None:
print(f"Resource {group=} {name=} not found. Available: {raw}")
return
print(result.get("__id__"))
for key, value in state_map.items():
if key.split(".")[2] == name:
return value.get("__id__")

print(f"Cannot find resource with {name=}. Available: {list(state_map.keys())}", file=sys.stderr)


def main():
parser = argparse.ArgumentParser()
parser.add_argument("-t", "--target")
parser.add_argument("--backup", action="store_true")
parser.add_argument("name")
args = parser.parse_args()

filename = get_state_file(args.target, args.backup)
if filename.endswith(".tfstate"):
id = get_id_terraform(filename, args.name)
else:
id = get_id_direct(filename, args.name)

if id:
print(id)
add_repl(str(id), args.name.upper() + "_ID")


if os.environ.get("DATABRICKS_BUNDLE_ENGINE", "").startswith("direct"):
print_resource_direct(*sys.argv[1:])
else:
print_resource_terraform(*sys.argv[1:])
if __name__ == "__main__":
main()
58 changes: 58 additions & 0 deletions acceptance/bin/replace_ids.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
#!/usr/bin/env python3
"""
Read state and add all resource IDs to ACC_REPLS.
"""

import sys
import json
import argparse
from pathlib import Path

sys.path.insert(0, str(Path(__file__).parent))
from print_state import get_state_file
from add_repl import add_repl


def iter_ids_terraform(filename):
raw = open(filename).read()
data = json.loads(raw)
available = []
for r in data["resources"]:
r_name = r["name"]
available.append(r_name)
for inst in r["instances"]:
attribute_values = inst.get("attributes") or {}
id = attribute_values.get("id")
yield r_name, id


def iter_ids_direct(filename):
raw = open(filename).read()
data = json.loads(raw)
state_map = data["state"]

for key, value in state_map.items():
name = key.split(".")[2]
id = value.get("__id__")
if id:
yield name, id


def main():
parser = argparse.ArgumentParser()
parser.add_argument("-t", "--target")
parser.add_argument("--backup", action="store_true")
args = parser.parse_args()

filename = get_state_file(args.target, args.backup)
if filename.endswith(".tfstate"):
it = iter_ids_terraform(filename)
else:
it = iter_ids_direct(filename)

for name, id in it:
add_repl(id, name.upper() + "_ID")


if __name__ == "__main__":
main()
4 changes: 2 additions & 2 deletions acceptance/bundle/integration_whl/base/output.txt
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ Updating deployment state...
Deployment complete!

>>> [CLI] bundle run some_other_job
Run URL: [DATABRICKS_URL]/?o=[NUMID]#job/[JOB_ID]/run/[NUMID]
Run URL: [DATABRICKS_URL]/?o=[NUMID]#job/[SOME_OTHER_JOB_ID]/run/[NUMID]

[TIMESTAMP] "[default] Test Wheel Job [UNIQUE_NAME]" RUNNING
[TIMESTAMP] "[default] Test Wheel Job [UNIQUE_NAME]" TERMINATED SUCCESS
Expand All @@ -57,7 +57,7 @@ Updating deployment state...
Deployment complete!

>>> [CLI] bundle run some_other_job
Run URL: [DATABRICKS_URL]/?o=[NUMID]#job/[JOB_ID]/run/[NUMID]
Run URL: [DATABRICKS_URL]/?o=[NUMID]#job/[SOME_OTHER_JOB_ID]/run/[NUMID]

[TIMESTAMP] "[default] Test Wheel Job [UNIQUE_NAME]" RUNNING
[TIMESTAMP] "[default] Test Wheel Job [UNIQUE_NAME]" TERMINATED SUCCESS
Expand Down
5 changes: 2 additions & 3 deletions acceptance/bundle/integration_whl/base/script
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,8 @@ trace cat databricks.yml
trap "errcode trace '$CLI' bundle destroy --auto-approve" EXIT
trace $CLI bundle deploy

# Capture job ID and add to runtime replacements to avoid pattern matching ambiguity
job_id=$(read_id.py jobs some_other_job)
echo "$job_id:JOB_ID" >> ACC_REPLS
# Add all resource IDs to runtime replacements to avoid pattern matching ambiguity
replace_ids.py

trace $CLI bundle run some_other_job

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ Updating deployment state...
Deployment complete!

>>> [CLI] bundle run some_other_job --python-params param1,param2
Run URL: [DATABRICKS_URL]/?o=[NUMID]#job/[JOB_ID]/run/[NUMID]
Run URL: [DATABRICKS_URL]/?o=[NUMID]#job/[SOME_OTHER_JOB_ID]/run/[NUMID]

[TIMESTAMP] "[default] Test Wheel Job [UNIQUE_NAME]" RUNNING
[TIMESTAMP] "[default] Test Wheel Job [UNIQUE_NAME]" TERMINATED SUCCESS
Expand Down
5 changes: 2 additions & 3 deletions acceptance/bundle/integration_whl/custom_params/script
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,7 @@ trace cat databricks.yml
trap "errcode trace '$CLI' bundle destroy --auto-approve" EXIT
trace $CLI bundle deploy

# Capture job ID and add to runtime replacements to avoid pattern matching ambiguity
job_id=$(read_id.py jobs some_other_job)
echo "$job_id:JOB_ID" >> ACC_REPLS
# Add all resource IDs to runtime replacements to avoid pattern matching ambiguity
replace_ids.py

trace $CLI bundle run some_other_job --python-params param1,param2
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ Updating deployment state...
Deployment complete!

>>> [CLI] bundle run some_other_job
Run URL: [DATABRICKS_URL]/?o=[NUMID]#job/[JOB_ID]/run/[NUMID]
Run URL: [DATABRICKS_URL]/?o=[NUMID]#job/[SOME_OTHER_JOB_ID]/run/[NUMID]

[TIMESTAMP] "[default] Test Wheel Job [UNIQUE_NAME]" RUNNING
[TIMESTAMP] "[default] Test Wheel Job [UNIQUE_NAME]" TERMINATED SUCCESS
Expand All @@ -57,7 +57,7 @@ Updating deployment state...
Deployment complete!

>>> [CLI] bundle run some_other_job
Run URL: [DATABRICKS_URL]/?o=[NUMID]#job/[JOB_ID]/run/[NUMID]
Run URL: [DATABRICKS_URL]/?o=[NUMID]#job/[SOME_OTHER_JOB_ID]/run/[NUMID]

[TIMESTAMP] "[default] Test Wheel Job [UNIQUE_NAME]" RUNNING
[TIMESTAMP] "[default] Test Wheel Job [UNIQUE_NAME]" TERMINATED SUCCESS
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,8 @@ trace cat databricks.yml
trap "errcode trace '$CLI' bundle destroy --auto-approve" EXIT
trace $CLI bundle deploy

# Capture job ID and add to runtime replacements to avoid pattern matching ambiguity
job_id=$(read_id.py jobs some_other_job)
echo "$job_id:JOB_ID" >> ACC_REPLS
# Add all resource IDs to runtime replacements to avoid pattern matching ambiguity
replace_ids.py

trace $CLI bundle run some_other_job

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ Updating deployment state...
Deployment complete!

>>> [CLI] bundle run some_other_job
Run URL: [DATABRICKS_URL]/?o=[NUMID]#job/[JOB_ID]/run/[NUMID]
Run URL: [DATABRICKS_URL]/?o=[NUMID]#job/[SOME_OTHER_JOB_ID]/run/[NUMID]

[TIMESTAMP] "[default] Test Wheel Job [UNIQUE_NAME]" RUNNING
[TIMESTAMP] "[default] Test Wheel Job [UNIQUE_NAME]" TERMINATED SUCCESS
Expand All @@ -26,7 +26,7 @@ Updating deployment state...
Deployment complete!

>>> [CLI] bundle run some_other_job
Run URL: [DATABRICKS_URL]/?o=[NUMID]#job/[JOB_ID]/run/[NUMID]
Run URL: [DATABRICKS_URL]/?o=[NUMID]#job/[SOME_OTHER_JOB_ID]/run/[NUMID]

[TIMESTAMP] "[default] Test Wheel Job [UNIQUE_NAME]" RUNNING
[TIMESTAMP] "[default] Test Wheel Job [UNIQUE_NAME]" TERMINATED SUCCESS
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,8 @@ cp -r $TESTDIR/../interactive_cluster/{setup.py,my_test_code} .
trap "errcode trace '$CLI' bundle destroy --auto-approve" EXIT
trace $CLI bundle deploy

# Capture job ID and add to runtime replacements to avoid pattern matching ambiguity
job_id=$(read_id.py jobs some_other_job)
echo "$job_id:JOB_ID" >> ACC_REPLS
# Add all resource IDs to runtime replacements to avoid pattern matching ambiguity
replace_ids.py

trace $CLI bundle run some_other_job

Expand Down
Loading