Skip to content
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -217,6 +217,7 @@ Zip a transformation job in preparation to upload to Data Cloud.

Options:
- `--path TEXT`: Path to the code directory (default: ".")
- `--network TEXT`: docker network (default: "default")


#### `datacustomcode deploy`
Expand All @@ -228,6 +229,7 @@ Options:
- `--name TEXT`: Name of the transformation job [required]
- `--version TEXT`: Version of the transformation job (default: "0.0.1")
- `--description TEXT`: Description of the transformation job (default: "")
- `--network TEXT`: docker network (default: "default")
- `--cpu-size TEXT`: CPU size for the deployment (default: "CPU_XL"). Available options: CPU_L(Large), CPU_XL(Extra Large), CPU_2XL(2X Large), CPU_4XL(4X Large)


Expand Down
Comment thread
joroscoSF marked this conversation as resolved.
Outdated
Binary file not shown.
16 changes: 12 additions & 4 deletions src/datacustomcode/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,11 +71,12 @@ def configure(

@cli.command()
@click.argument("path", default="payload")
def zip(path: str):
@click.option("--network", default="default")
def zip(path: str, network: str):
from datacustomcode.deploy import zip

logger.debug("Zipping project")
zip(path)
zip(path, network)


@cli.command()
Expand All @@ -84,6 +85,7 @@ def zip(path: str):
@click.option("--version", default="0.0.1")
@click.option("--description", default="Custom Data Transform Code")
@click.option("--profile", default="default")
@click.option("--network", default="default")
@click.option(
"--cpu-size",
default="CPU_2XL",
Expand All @@ -98,7 +100,13 @@ def zip(path: str):
Choose based on your workload requirements.""",
)
def deploy(
path: str, name: str, version: str, description: str, cpu_size: str, profile: str
path: str,
name: str,
version: str,
description: str,
cpu_size: str,
profile: str,
network: str,
):
from datacustomcode.credentials import Credentials
from datacustomcode.deploy import TransformationJobMetadata, deploy_full
Expand Down Expand Up @@ -132,7 +140,7 @@ def deploy(
fg="red",
)
raise click.Abort() from None
deploy_full(path, metadata, credentials)
deploy_full(path, metadata, credentials, network)


@cli.command()
Expand Down
50 changes: 36 additions & 14 deletions src/datacustomcode/deploy.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,27 +163,22 @@ def create_deployment(
ZIP_FILE_NAME = "deployment.zip"


def prepare_dependency_archive(directory: str) -> None:
def prepare_dependency_archive(directory: str, docker_network: str) -> None:
cmd = f"docker images -q {DOCKER_IMAGE_NAME}"
image_exists = cmd_output(cmd)

if not image_exists:
logger.info("Building docker image...")
cmd = (
f"{PLATFORM_ENV_VAR} docker build -t {DOCKER_IMAGE_NAME} "
f"-f Dockerfile.dependencies ."
)
logger.info(f"Building docker image with docker network: {docker_network}...")
cmd = docker_build_cmd(docker_network)
cmd_output(cmd)

with tempfile.TemporaryDirectory() as temp_dir:
logger.info("Building dependencies archive")
logger.info(
f"Building dependencies archive with docker network: {docker_network}"
)
shutil.copy("requirements.txt", temp_dir)
shutil.copy("build_native_dependencies.sh", temp_dir)
cmd = (
f"{PLATFORM_ENV_VAR} docker run --rm "
f"-v {temp_dir}:/workspace "
f"{DOCKER_IMAGE_NAME}"
)
cmd = docker_run_cmd(docker_network, temp_dir)
cmd_output(cmd)
archives_temp_path = os.path.join(temp_dir, DEPENDENCIES_ARCHIVE_FULL_NAME)
os.makedirs(os.path.dirname(DEPENDENCIES_ARCHIVE_PATH), exist_ok=True)
Expand All @@ -192,6 +187,31 @@ def prepare_dependency_archive(directory: str) -> None:
logger.info(f"Dependencies archived to {DEPENDENCIES_ARCHIVE_PATH}")


def docker_build_cmd(network: str) -> str:
cmd = (
f"{PLATFORM_ENV_VAR} docker build -t {DOCKER_IMAGE_NAME} "
f"--file Dockerfile.dependencies . "
)

if network != "default":
cmd = cmd + f"--network {network}"
logger.debug(f"Docker build command: {cmd}")
return cmd


def docker_run_cmd(network: str, temp_dir) -> str:
cmd = (
f"{PLATFORM_ENV_VAR} docker run --rm "
f"-v {temp_dir}:/workspace "
f"{DOCKER_IMAGE_NAME} "
)

if network != "default":
cmd = cmd + f"--network {network} "
logger.debug(f"Docker run command: {cmd}")
return cmd


class DeploymentsResponse(BaseModel):
deploymentStatus: str

Expand Down Expand Up @@ -366,13 +386,14 @@ def upload_zip(file_upload_url: str) -> None:

def zip(
directory: str,
docker_network: str,
):
# Create a zip file excluding .DS_Store files
import zipfile

# prepare payload only if requirements.txt is non-empty
if has_nonempty_requirements_file(directory):
prepare_dependency_archive(directory)
prepare_dependency_archive(directory, docker_network)
else:
logger.info(
f"Skipping dependency archive: requirements.txt is missing or empty "
Expand All @@ -396,6 +417,7 @@ def deploy_full(
directory: str,
metadata: TransformationJobMetadata,
credentials: Credentials,
docker_network: str,
callback=None,
) -> AccessTokenResponse:
"""Deploy a data transform in the DataCloud."""
Expand All @@ -406,7 +428,7 @@ def deploy_full(

# create deployment and upload payload
deployment = create_deployment(access_token, metadata)
zip(directory)
zip(directory, docker_network)
upload_zip(deployment.fileUploadUrl)
wait_for_deployment(access_token, metadata, callback)

Expand Down
Loading