Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(back): #1234 compute on aws batch #1237

Merged
merged 1 commit into from
Jan 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions .github/workflows/dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -628,6 +628,22 @@ jobs:
- uses: cachix/install-nix-action@6ed004b9ccb68dbc28e7c85bee15fa93dbd214ac
- name: /testTerraform/module
run: nix-env -if . && m . /testTerraform/module

linux_computeOnAwsBatch_module:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@f095bcc56b7c2baf48f3ac70d6d6782f4f553222
- uses: docker://docker.io/nixos/nix@sha256:c3db4c484f6b1ee6c9bb8ca90307cfbeca8ef88156840911356a677eeaff4845
name: /tests/computeOnAwsBatch
with:
args: sh -c "chown -R root:root /github/workspace && nix-env -if . && m . /tests/computeOnAwsBatch
macos_computeOnAwsBatch_module:
runs-on: macos-latest
steps:
- uses: actions/checkout@f095bcc56b7c2baf48f3ac70d6d6782f4f553222
- uses: cachix/install-nix-action@6ed004b9ccb68dbc28e7c85bee15fa93dbd214ac
- name: /tests/computeOnAwsBatch
run: nix-env -if . && m . /tests/computeOnAwsBatch
name: dev
on:
pull_request:
Expand Down
16 changes: 16 additions & 0 deletions .github/workflows/prod.yml
Original file line number Diff line number Diff line change
Expand Up @@ -821,6 +821,22 @@ jobs:
run: nix-env -if . && m . /testTerraform/module
env:
CACHIX_AUTH_TOKEN: ${{ secrets.CACHIX_AUTH_TOKEN }}

linux_computeOnAwsBatch_module:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@f095bcc56b7c2baf48f3ac70d6d6782f4f553222
- uses: docker://docker.io/nixos/nix@sha256:c3db4c484f6b1ee6c9bb8ca90307cfbeca8ef88156840911356a677eeaff4845
name: /tests/computeOnAwsBatch
with:
args: sh -c "chown -R root:root /github/workspace && nix-env -if . && m . /tests/computeOnAwsBatch
macos_computeOnAwsBatch_module:
runs-on: macos-latest
steps:
- uses: actions/checkout@f095bcc56b7c2baf48f3ac70d6d6782f4f553222
- uses: cachix/install-nix-action@6ed004b9ccb68dbc28e7c85bee15fa93dbd214ac
- name: /tests/computeOnAwsBatch
run: nix-env -if . && m . /tests/computeOnAwsBatch
name: prod
on:
push:
Expand Down
13 changes: 12 additions & 1 deletion docs/src/api/builtins/deploy.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,20 @@ When used as a Makes declaration (at makes.nix attrs):
Job groups to submit.
Defaults to `{ }`.

Types:
When used as a makes input:

- computeOnAwsBatch: `JobType -> SourceAble`
Source able batch file to send jobs to aws batch.

???+ warning

When used as a makes input, all arguments are required
and defaults are not available.
However nested jobs (see `nextJob` argument)
do have defaults enabled.

Types:

- `JobType` = `attrs`
- allowDuplicates: `bool` (Optional Attr)
Set to `false` in order to prevent submitting the job
Expand All @@ -32,6 +41,8 @@ Types:
It overrides the one specified
in the Batch job definition.
Additional arguments can be propagated when running this module output.
- dryRun: `bool` (Optional Attr) (Not supported on nextJob)
Do not send any job. Only check the correctness of the pipeline definition.
- definition: `str`
Name of the Batch job definition
that we will use as base for submitting the job.
Expand Down
20 changes: 20 additions & 0 deletions makes/tests/computeOnAwsBatch/main.nix
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
{computeOnAwsBatch, ...}:
computeOnAwsBatch {
dryRun = true;
allowDuplicates = true;
attempts = 1;
attemptDurationSeconds = 60;
command = ["foo"];
definition = "foo";
environment = [];
includePositionalArgsInName = true;
name = "foo";
nextJob = {};
memory = 1;
parallel = 1;
propagateTags = true;
queue = "foo";
setup = [];
tags = {};
vcpus = 1;
}
38 changes: 19 additions & 19 deletions src/args/compute-on-aws-batch/batch-client/batch_client/_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
)
from .core import (
EnvVarPointer,
JobPipelineDraft,
QueueName,
)
import click
Expand Down Expand Up @@ -37,9 +38,9 @@ def _action() -> JsonObj:
return Cmd.from_cmd(_action)


@click.command() # type: ignore[misc]
@click.option("--pipeline", type=click.Path(exists=True), required=True) # type: ignore[misc]
@click.argument("args", nargs=-1) # type: ignore[misc]
@click.command()
@click.option("--pipeline", type=click.Path(exists=True), required=True)
@click.argument("args", nargs=-1)
def submit_job(
pipeline: str,
args: FrozenList[str],
Expand Down Expand Up @@ -74,32 +75,31 @@ def _sep(item: str) -> bool:
.map(lambda x: tuple(x))
.to_list()
)
drafts = _queue_from_env.bind(
pipeline_draft = _queue_from_env.bind(
lambda queue: _root.map(
lambda root: decode.decode_all_drafts(root, arg_groups, queue)
)
).map(
lambda t: (
t[0].map(
lambda r: r.alt(
lambda e: Exception(f"Invalid job draft i.e. {e}")
).unwrap()
),
t[1],
)
)
cmd: Cmd[None] = drafts.bind(
lambda d: new_client().bind(
lambda c: utils.extract_single(d[0]).map(
lambda j: actions.send_single_job(c, j, d[1]),

def _execute(draft: JobPipelineDraft) -> Cmd[None]:
# Handle dry run logic
action = new_client().bind(
lambda c: utils.extract_single(draft.drafts).map(
lambda j: actions.send_single_job(
c, j, draft.allow_duplicates
),
lambda p: actions.send_pipeline(c, p),
)
)
)
if draft.dry_run:
return Cmd.from_cmd(lambda: None)
return action

cmd: Cmd[None] = pipeline_draft.bind(_execute)
cmd.compute()


@click.group() # type: ignore[misc]
@click.group()
def main() -> None:
pass

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
ApiClient,
)
from batch_client.core import (
AllowDuplicates,
DependentJobDraft,
JobDependencies,
JobDraft,
Expand All @@ -25,7 +24,7 @@
def send_single_job(
client: ApiClient,
draft: JobDraft,
allow_duplicates: AllowDuplicates,
allow_duplicates: bool,
) -> Cmd[None]:
dup_msg = Cmd.from_cmd(lambda: LOG.info("Detecting duplicates..."))
skipped_msg = Cmd.from_cmd(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
FrozenDict,
FrozenList,
Maybe,
PureIter,
Result,
ResultE,
)
Expand Down Expand Up @@ -240,6 +241,7 @@ class RawJobDraft:
allow_duplicates: bool
args_in_name: bool
propagate_tags: bool
dry_run: bool
next_job: Maybe[RawJobDraft]


Expand All @@ -266,3 +268,10 @@ class DependentJobDraft:
@dataclass(frozen=True)
class AllowDuplicates:
value: bool


@dataclass(frozen=True)
class JobPipelineDraft:
drafts: PureIter[JobDraft]
allow_duplicates: bool
dry_run: bool
13 changes: 10 additions & 3 deletions src/args/compute-on-aws-batch/batch-client/batch_client/decode.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,13 @@
utils,
)
from batch_client.core import (
AllowDuplicates,
Attempts,
Command,
EnvVarPointer,
JobDefinition,
JobDraft,
JobName,
JobPipelineDraft,
JobSize,
Manifest,
QueueName,
Expand Down Expand Up @@ -131,6 +131,7 @@ def _decode_raw_draft(raw: JsonObj, unwrapper: ResultUnwrapper) -> RawJobDraft:
_allow_duplicates = _require(raw, "allowDuplicates", _to_bool)
_args_in_name = _require(raw, "includePositionalArgsInName", _to_bool)
_propagate_tags = _require(raw, "propagateTags", _to_bool)
_dry_run = _require(raw, "dryRun", _to_bool)
_next = _require(raw, "nextJob", Unfolder.to_json).bind(decode_raw_draft)
return RawJobDraft(
unwrapper.unwrap(_name),
Expand All @@ -147,6 +148,7 @@ def _decode_raw_draft(raw: JsonObj, unwrapper: ResultUnwrapper) -> RawJobDraft:
unwrapper.unwrap(_allow_duplicates),
unwrapper.unwrap(_args_in_name),
unwrapper.unwrap(_propagate_tags),
unwrapper.unwrap(_dry_run),
unwrapper.unwrap(_next),
)

Expand Down Expand Up @@ -222,7 +224,7 @@ def decode_all_drafts(
root: RawJobDraft,
args: FrozenList[FrozenList[str]],
queue_from_env: ResultE[QueueName],
) -> Tuple[PureIter[ResultE[JobDraft]], AllowDuplicates]:
) -> JobPipelineDraft:
items = (
_raw_jobs(root)
.enumerate(0)
Expand All @@ -234,4 +236,9 @@ def decode_all_drafts(
)
)
)
return (items, AllowDuplicates(root.allow_duplicates))
drafts = items.map(
lambda r: r.alt(
lambda e: Exception(f"Invalid job draft i.e. {e}")
).unwrap()
)
return JobPipelineDraft(drafts, root.allow_duplicates, root.dry_run)
6 changes: 3 additions & 3 deletions src/args/compute-on-aws-batch/batch-client/build/default.nix
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
{
makePythonPyprojectPackage,
makes_inputs,
nixpkgs,
python_version,
src,
}: let
deps = import ./deps {
inherit nixpkgs python_version;
inherit makes_inputs nixpkgs python_version;
};
pkgDeps = {
runtime_deps = with deps.python_pkgs; [
Expand All @@ -24,7 +24,7 @@
pytest
];
};
packages = makePythonPyprojectPackage {
packages = makes_inputs.makePythonPyprojectPackage {
inherit (deps.lib) buildEnv buildPythonPackage;
inherit pkgDeps src;
};
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,29 @@
{
lib,
makes_inputs,
nixpkgs,
python_pkgs,
python_version,
}: let
commit = "72a495bb933f052ad812292b468ca3e18fd9dde4";
src = builtins.fetchTarball {
sha256 = "sha256:0413zl4y92dbdfmck070x7dhp5cxx66xd2pxpxg3gbhaw0yqzhqd";
commit = "fd64a300bda15c2389f5bfb314f48fb5b2a0e47a"; # 2.4.0+2
raw_src = builtins.fetchTarball {
sha256 = "sha256:0g1md5fiyzqi9xfh1qxf0mh32k8nb06w0yhc17rr5a0ijiskb8i4";
url = "https://gitlab.com/dmurciaatfluid/arch_lint/-/archive/${commit}/arch_lint-${commit}.tar";
};
in
import "${src}/build" {
src = import "${raw_src}/build/filter.nix" nixpkgs.nix-filter raw_src;
bundle = import "${raw_src}/build" {
makesLib = makes_inputs;
inherit nixpkgs python_version src;
}
};
in
bundle.build_bundle (
default: required_deps: builder:
builder lib (
required_deps (
python_pkgs
// {
inherit (default.python_pkgs) grimp;
}
)
)
)
37 changes: 19 additions & 18 deletions src/args/compute-on-aws-batch/batch-client/build/deps/default.nix
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
{
makes_inputs,
nixpkgs,
python_version,
}: let
Expand All @@ -8,32 +9,32 @@
inherit (nixpkgs.python3Packages) fetchPypi;
};

utils = import ./override_utils.nix;
pkgs_overrides = override: python_pkgs: builtins.mapAttrs (_: override python_pkgs) python_pkgs;

arch-lint = import ./arch_lint.nix {inherit nixpkgs python_version;};
fa-purity = let
core = import ./fa_purity.nix {inherit nixpkgs python_version;};
in {
"${python_version}" = core;
};
utils = makes_inputs.pythonOverrideUtils;

layer_1 = python_pkgs:
python_pkgs
// {
arch-lint = arch-lint.pkg;
fa-purity = fa-purity."${python_version}".pkg;
arch-lint = let
result = import ./arch_lint.nix {
inherit lib makes_inputs nixpkgs python_pkgs python_version;
};
in
result.pkg;
mypy-boto3-batch = import ./boto3/batch-stubs.nix {inherit lib python_pkgs;};
types-boto3 = import ./boto3/stubs.nix {inherit lib python_pkgs;};
};
layer_2 = python_pkgs:
python_pkgs
// {
fa-purity = let
result = import ./fa_purity.nix {
inherit lib makes_inputs nixpkgs python_pkgs python_version;
};
in
result.pkg;
};

fa_purity_override = python_pkgs: utils.replace_pkg ["fa_purity"] python_pkgs.fa-purity;
overrides = map pkgs_overrides [
fa_purity_override
(_: utils.no_check_override)
];

python_pkgs = utils.compose ([layer_1] ++ overrides) nixpkgs."${python_version}Packages";
python_pkgs = utils.compose [layer_2 layer_1] nixpkgs."${python_version}Packages";
in {
inherit lib python_pkgs;
}
Loading
Loading