diff --git a/.pylintrc b/.pylintrc index 5428b86be0..223580f4d3 100644 --- a/.pylintrc +++ b/.pylintrc @@ -94,7 +94,24 @@ disable= useless-object-inheritance, # TODO: Enable this check and fix code once Python 2 is no longer supported. super-with-arguments, raise-missing-from, - E1136, + C0116, # Missing function or method docstring + C0209, # Use f-string instead of format + E0015, # Unrecognized option found in config + E0702, # Raising a string instead of an exception + E1101, # Module has no member (likely dynamic attr) + E1136, # Value assigned to something inferred as None + R0022, # Useless option value in config + R1710, # Inconsistent return statements + R1714, # Consider using `in` with comparisons + R1729, # Use a generator + R1732, + R1735, # Consider using a dict or list literal + W0237, # Argument renamed in override + W0613, # Unused argument + W0621, # Redefining name from outer scope + W0719 + W1404, # Implicit string concatenation + W1514, # `open()` used without encoding [REPORTS] # Set the output format. Available formats are text, parseable, colorized, msvs @@ -436,4 +453,4 @@ analyse-fallback-blocks=no # Exceptions that will emit a warning when being caught. Defaults to # "Exception" -overgeneral-exceptions=Exception +overgeneral-exceptions=builtins.Exception diff --git a/CHANGELOG.md b/CHANGELOG.md index e59d964bd1..7db9aa6c8e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,12 +1,5 @@ # Changelog -## v2.243.2 (2025-04-16) - -### Bug Fixes and Other Changes - - * tgi image uri unit tests - * Fix deepdiff dependencies - ## v2.243.1 (2025-04-11) ### Bug Fixes and Other Changes diff --git a/VERSION b/VERSION index 4e55ec1ee4..f68f7b9691 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -2.243.3.dev0 +2.243.2.dev0 diff --git a/doc/requirements.txt b/doc/requirements.txt index 71a95f7633..b4241e06a4 100644 --- a/doc/requirements.txt +++ b/doc/requirements.txt @@ -1,7 +1,7 @@ sphinx==5.1.1 sphinx-rtd-theme==0.5.0 docutils==0.15.2 -packaging==20.9 +packaging>=23.0,<25 jinja2==3.1.6 schema==0.7.5 accelerate>=0.24.1,<=0.27.0 diff --git a/pyproject.toml b/pyproject.toml index 0122a6bf3c..6e0bf54b0d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ name = "sagemaker" dynamic = ["version", "optional-dependencies"] description = "Open source library for training and deploying models on Amazon SageMaker." readme = "README.rst" -requires-python = ">=3.8" +requires-python = ">=3.9" authors = [ { name = "Amazon Web Services" }, ] @@ -25,10 +25,10 @@ classifiers = [ "License :: OSI Approved :: Apache Software License", "Natural Language :: English", "Programming Language :: Python", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", ] dependencies = [ "attrs>=23.1.0,<24", @@ -39,15 +39,15 @@ dependencies = [ "google-pasta", "importlib-metadata>=1.4.0,<7.0", "jsonschema", - "numpy>=1.9.0,<2.0", + "numpy>=2.0.0,<2.3.0", "omegaconf>=2.2,<=2.3", - "packaging>=20.0", - "pandas", + "packaging>=23.0,<25", + "pandas==2.2.3", "pathos", "platformdirs", "protobuf>=3.12,<6.0", "psutil", - "PyYAML~=6.0", + "PyYAML>=6.0.1", "requests", "sagemaker-core>=1.0.17,<2.0.0", "schema", diff --git a/requirements/extras/local_requirements.txt b/requirements/extras/local_requirements.txt index 68b9a1bcb3..ea57b82e9a 100644 --- a/requirements/extras/local_requirements.txt +++ b/requirements/extras/local_requirements.txt @@ -1,3 +1,3 @@ urllib3>=1.26.8,<3.0.0 docker>=5.0.2,<8.0.0 -PyYAML>=5.4.1,<7 +PyYAML>=6.0.1,<7 diff --git a/requirements/extras/scipy_requirements.txt b/requirements/extras/scipy_requirements.txt index 0e99587e6e..f89caf8c2b 100644 --- a/requirements/extras/scipy_requirements.txt +++ b/requirements/extras/scipy_requirements.txt @@ -1 +1 @@ -scipy==1.10.1 +scipy==1.13.0 diff --git a/requirements/extras/test_requirements.txt b/requirements/extras/test_requirements.txt index a0087a8e13..7d4385c14b 100644 --- a/requirements/extras/test_requirements.txt +++ b/requirements/extras/test_requirements.txt @@ -1,7 +1,7 @@ tox==3.24.5 -numpy>=1.24.0 +numpy>=2.0.0, <2.3.0 build[virtualenv]==1.2.1 -flake8==4.0.1 +flake8==7.1.2 pytest==6.2.5 pytest-cov==3.0.0 pytest-rerunfailures==10.2 @@ -14,25 +14,25 @@ awslogs==0.14.0 black==24.3.0 stopit==1.1.2 # Update tox.ini to have correct version of airflow constraints file -apache-airflow==2.9.3 +apache-airflow==2.10.4 apache-airflow-providers-amazon==7.2.1 attrs>=23.1.0,<24 -fabric==2.6.0 +fabric==3.2.2 requests==2.32.2 sagemaker-experiments==0.1.35 Jinja2==3.1.6 pyvis==0.2.1 -pandas==1.4.4 -scikit-learn==1.3.0 +pandas==2.2.3 +scikit-learn==1.6.1 cloudpickle==2.2.1 jsonpickle<4.0.0 -PyYAML==6.0 +PyYAML>=6.0.1 # TODO find workaround xgboost>=1.6.2,<=1.7.6 pillow>=10.0.1,<=11 opentelemetry-proto==1.27.0 protobuf==4.25.5 -tensorboard>=2.9.0,<=2.15.2 +tensorboard>=2.16.2,<=2.18.0 transformers==4.48.0 sentencepiece==0.1.99 # https://github.com/triton-inference-server/server/issues/6246 @@ -42,8 +42,8 @@ onnx==1.17.0 nbformat>=5.9,<6 accelerate>=0.24.1,<=0.27.0 schema==0.7.5 -tensorflow>=2.9.0,<=2.15.1 -mlflow>=2.12.2,<2.13 +tensorflow==2.18.0 +mlflow>=2.16.1 huggingface_hub==0.26.2 uvicorn>=0.30.1 fastapi==0.115.4 @@ -51,3 +51,4 @@ nest-asyncio sagemaker-mlflow>=0.1.0 deepdiff>=8.0.0 orderly-set<5.4.0 +lexicon diff --git a/requirements/tox/doc8_requirements.txt b/requirements/tox/doc8_requirements.txt index e4a040dd4d..8707c06621 100644 --- a/requirements/tox/doc8_requirements.txt +++ b/requirements/tox/doc8_requirements.txt @@ -1,2 +1,2 @@ -doc8==0.10.1 -Pygments==2.15.0 +doc8==1.1.2 +Pygments==2.18.0 diff --git a/requirements/tox/flake8_requirements.txt b/requirements/tox/flake8_requirements.txt index b3ccfca84f..63a79da444 100644 --- a/requirements/tox/flake8_requirements.txt +++ b/requirements/tox/flake8_requirements.txt @@ -1,2 +1,2 @@ -flake8==4.0.1 -flake8-future-import==0.4.6 +flake8==7.1.2 +flake8-future-import==0.4.7 diff --git a/requirements/tox/pylint_requirements.txt b/requirements/tox/pylint_requirements.txt index b307f21762..0e5db209fe 100644 --- a/requirements/tox/pylint_requirements.txt +++ b/requirements/tox/pylint_requirements.txt @@ -1,2 +1,2 @@ -pylint==2.6.2 -astroid==2.4.2 +pylint==3.0.3 +astroid==3.0.2 diff --git a/requirements/tox/spelling_requirements.txt b/requirements/tox/spelling_requirements.txt index 769415eb2c..94d6bc314e 100644 --- a/requirements/tox/spelling_requirements.txt +++ b/requirements/tox/spelling_requirements.txt @@ -1,2 +1,2 @@ pyenchant==3.2.2 -pylint==2.6.2 +pylint==3.0.3 diff --git a/src/sagemaker/config/config_schema.py b/src/sagemaker/config/config_schema.py index 34a98c0b8e..61da17e7cf 100644 --- a/src/sagemaker/config/config_schema.py +++ b/src/sagemaker/config/config_schema.py @@ -540,7 +540,8 @@ def _simple_path(*args: str): "minItems": 0, "maxItems": 50, }, - # Regex is taken from https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_CreateTrainingJob.html#sagemaker-CreateTrainingJob-request-Environment + # Regex is taken from https://docs.aws.amazon.com/sagemaker/latest/APIReference/ + # API_CreateTrainingJob.html#sagemaker-CreateTrainingJob-request-Environment "environmentVariables": { TYPE: OBJECT, ADDITIONAL_PROPERTIES: False, @@ -553,13 +554,15 @@ def _simple_path(*args: str): }, "maxProperties": 48, }, - # Regex is taken from https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_S3DataSource.html#sagemaker-Type-S3DataSource-S3Uri + # Regex is taken from https://docs.aws.amazon.com/sagemaker/latest/APIReference/ + # API_S3DataSource.html#sagemaker-Type-S3DataSource-S3Uri "s3Uri": { TYPE: "string", "pattern": "^(https|s3)://([^/]+)/?(.*)$", "maxLength": 1024, }, - # Regex is taken from https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_AlgorithmSpecification.html#sagemaker-Type-AlgorithmSpecification-ContainerEntrypoint + # Regex is taken from https://docs.aws.amazon.com/sagemaker/latest/APIReference/ + # API_AlgorithmSpecification.html#sagemaker-Type-AlgorithmSpecification-ContainerEntrypoint "preExecutionCommand": {TYPE: "string", "pattern": r".*"}, # Regex based on https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_PipelineDefinitionS3Location.html # except with an additional ^ and $ for the beginning and the end to closer align to @@ -570,7 +573,8 @@ def _simple_path(*args: str): "minLength": 3, "maxLength": 63, }, - # Regex is taken from https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_MonitoringJobDefinition.html#sagemaker-Type-MonitoringJobDefinition-Environment + # Regex is taken from https://docs.aws.amazon.com/sagemaker/latest/APIReference/ + # API_MonitoringJobDefinition.html#sagemaker-Type-MonitoringJobDefinition-Environment "environment-Length256-Properties50": { TYPE: OBJECT, ADDITIONAL_PROPERTIES: False, @@ -583,7 +587,8 @@ def _simple_path(*args: str): }, "maxProperties": 50, }, - # Regex is taken from https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_CreateTransformJob.html#sagemaker-CreateTransformJob-request-Environment + # Regex is taken from https://docs.aws.amazon.com/sagemaker/latest/APIReference/ + # API_CreateTransformJob.html#sagemaker-CreateTransformJob-request-Environment "environment-Length10240-Properties16": { TYPE: OBJECT, ADDITIONAL_PROPERTIES: False, @@ -596,7 +601,8 @@ def _simple_path(*args: str): }, "maxProperties": 16, }, - # Regex is taken from https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_ContainerDefinition.html#sagemaker-Type-ContainerDefinition-Environment + # Regex is taken from https://docs.aws.amazon.com/sagemaker/latest/APIReference/ + # API_ContainerDefinition.html#sagemaker-Type-ContainerDefinition-Environment "environment-Length1024-Properties16": { TYPE: OBJECT, ADDITIONAL_PROPERTIES: False, @@ -609,7 +615,8 @@ def _simple_path(*args: str): }, "maxProperties": 16, }, - # Regex is taken from https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_CreateProcessingJob.html#sagemaker-CreateProcessingJob-request-Environment + # Regex is taken from https://docs.aws.amazon.com/sagemaker/latest/APIReference/ + # API_CreateProcessingJob.html#sagemaker-CreateProcessingJob-request-Environment "environment-Length256-Properties100": { TYPE: OBJECT, ADDITIONAL_PROPERTIES: False, @@ -622,7 +629,8 @@ def _simple_path(*args: str): }, "maxProperties": 100, }, - # Regex is taken from https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_CreateTrainingJob.html#sagemaker-CreateTrainingJob-request-Environment + # Regex is taken from https://docs.aws.amazon.com/sagemaker/latest/APIReference/ + # API_CreateTrainingJob.html#sagemaker-CreateTrainingJob-request-Environment "environment-Length512-Properties48": { TYPE: OBJECT, ADDITIONAL_PROPERTIES: False, diff --git a/src/sagemaker/feature_store/dataset_builder.py b/src/sagemaker/feature_store/dataset_builder.py index 289fa1ee0c..fc9f9372b1 100644 --- a/src/sagemaker/feature_store/dataset_builder.py +++ b/src/sagemaker/feature_store/dataset_builder.py @@ -929,7 +929,7 @@ def _construct_query_string(self, base: FeatureGroupToBeMerged) -> str: selected_features += ", " selected_features += ", ".join( [ - f'fg_{i}."{feature_name}" as "{feature_name}.{(i+1)}"' + f'fg_{i}."{feature_name}" as "{feature_name}.{(i + 1)}"' for feature_name in feature_group.projected_feature_names ] ) diff --git a/src/sagemaker/image_uri_config/huggingface-llm-neuronx.json b/src/sagemaker/image_uri_config/huggingface-llm-neuronx.json index d79e7637ed..ed5c289377 100644 --- a/src/sagemaker/image_uri_config/huggingface-llm-neuronx.json +++ b/src/sagemaker/image_uri_config/huggingface-llm-neuronx.json @@ -14,7 +14,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -67,7 +66,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -120,7 +118,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -173,7 +170,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -226,7 +222,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -279,7 +274,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -332,7 +326,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -385,7 +378,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -438,7 +430,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -491,7 +482,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -544,7 +534,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", diff --git a/src/sagemaker/image_uri_config/huggingface-llm.json b/src/sagemaker/image_uri_config/huggingface-llm.json index ed85f0d2bf..27df32a073 100644 --- a/src/sagemaker/image_uri_config/huggingface-llm.json +++ b/src/sagemaker/image_uri_config/huggingface-llm.json @@ -14,9 +14,7 @@ "1.4": "1.4.5", "2.0": "2.4.0", "2.3": "2.3.1", - "3.0": "3.0.1", - "3.2": "3.2.3", - "3.1": "3.1.1" + "3.0": "3.0.1" }, "versions": { "0.6.0": { @@ -26,7 +24,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -79,7 +76,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -132,7 +128,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -185,7 +180,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -238,7 +232,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -291,7 +284,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -344,7 +336,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -397,7 +388,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -450,7 +440,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -503,7 +492,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -556,7 +544,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -609,7 +596,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -662,7 +648,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -715,7 +700,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -768,7 +752,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -821,7 +804,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -874,7 +856,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -927,7 +908,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -972,165 +952,6 @@ "container_version": { "gpu": "cu124-ubuntu22.04-v2.1" } - }, - "3.1.1": { - "py_versions": [ - "py311" - ], - "registries": { - "af-south-1": "626614931356", - "ap-east-1": "871362719292", - "ap-east-2": "975050140332", - "ap-northeast-1": "763104351884", - "ap-northeast-2": "763104351884", - "ap-northeast-3": "364406365360", - "ap-south-1": "763104351884", - "ap-south-2": "772153158452", - "ap-southeast-1": "763104351884", - "ap-southeast-2": "763104351884", - "ap-southeast-3": "907027046896", - "ap-southeast-4": "457447274322", - "ap-southeast-5": "550225433462", - "ap-southeast-7": "590183813437", - "ca-central-1": "763104351884", - "ca-west-1": "204538143572", - "cn-north-1": "727897471807", - "cn-northwest-1": "727897471807", - "eu-central-1": "763104351884", - "eu-central-2": "380420809688", - "eu-north-1": "763104351884", - "eu-south-1": "692866216735", - "eu-south-2": "503227376785", - "eu-west-1": "763104351884", - "eu-west-2": "763104351884", - "eu-west-3": "763104351884", - "il-central-1": "780543022126", - "me-central-1": "914824155844", - "me-south-1": "217643126080", - "mx-central-1": "637423239942", - "sa-east-1": "763104351884", - "us-east-1": "763104351884", - "us-east-2": "763104351884", - "us-gov-east-1": "446045086412", - "us-gov-west-1": "442386744353", - "us-iso-east-1": "886529160074", - "us-isob-east-1": "094389454867", - "us-isof-east-1": "303241398832", - "us-isof-south-1": "454834333376", - "us-west-1": "763104351884", - "us-west-2": "763104351884" - }, - "tag_prefix": "2.6.0-tgi3.1.1", - "repository": "huggingface-pytorch-tgi-inference", - "container_version": { - "gpu": "cu124-ubuntu22.04" - } - }, - "3.2.0": { - "py_versions": [ - "py311" - ], - "registries": { - "af-south-1": "626614931356", - "ap-east-1": "871362719292", - "ap-east-2": "975050140332", - "ap-northeast-1": "763104351884", - "ap-northeast-2": "763104351884", - "ap-northeast-3": "364406365360", - "ap-south-1": "763104351884", - "ap-south-2": "772153158452", - "ap-southeast-1": "763104351884", - "ap-southeast-2": "763104351884", - "ap-southeast-3": "907027046896", - "ap-southeast-4": "457447274322", - "ap-southeast-5": "550225433462", - "ap-southeast-7": "590183813437", - "ca-central-1": "763104351884", - "ca-west-1": "204538143572", - "cn-north-1": "727897471807", - "cn-northwest-1": "727897471807", - "eu-central-1": "763104351884", - "eu-central-2": "380420809688", - "eu-north-1": "763104351884", - "eu-south-1": "692866216735", - "eu-south-2": "503227376785", - "eu-west-1": "763104351884", - "eu-west-2": "763104351884", - "eu-west-3": "763104351884", - "il-central-1": "780543022126", - "me-central-1": "914824155844", - "me-south-1": "217643126080", - "mx-central-1": "637423239942", - "sa-east-1": "763104351884", - "us-east-1": "763104351884", - "us-east-2": "763104351884", - "us-gov-east-1": "446045086412", - "us-gov-west-1": "442386744353", - "us-iso-east-1": "886529160074", - "us-isob-east-1": "094389454867", - "us-isof-east-1": "303241398832", - "us-isof-south-1": "454834333376", - "us-west-1": "763104351884", - "us-west-2": "763104351884" - }, - "tag_prefix": "2.6.0-tgi3.2.0", - "repository": "huggingface-pytorch-tgi-inference", - "container_version": { - "gpu": "cu124-ubuntu22.04" - } - }, - "3.2.3": { - "py_versions": [ - "py311" - ], - "registries": { - "af-south-1": "626614931356", - "ap-east-1": "871362719292", - "ap-east-2": "975050140332", - "ap-northeast-1": "763104351884", - "ap-northeast-2": "763104351884", - "ap-northeast-3": "364406365360", - "ap-south-1": "763104351884", - "ap-south-2": "772153158452", - "ap-southeast-1": "763104351884", - "ap-southeast-2": "763104351884", - "ap-southeast-3": "907027046896", - "ap-southeast-4": "457447274322", - "ap-southeast-5": "550225433462", - "ap-southeast-7": "590183813437", - "ca-central-1": "763104351884", - "ca-west-1": "204538143572", - "cn-north-1": "727897471807", - "cn-northwest-1": "727897471807", - "eu-central-1": "763104351884", - "eu-central-2": "380420809688", - "eu-north-1": "763104351884", - "eu-south-1": "692866216735", - "eu-south-2": "503227376785", - "eu-west-1": "763104351884", - "eu-west-2": "763104351884", - "eu-west-3": "763104351884", - "il-central-1": "780543022126", - "me-central-1": "914824155844", - "me-south-1": "217643126080", - "mx-central-1": "637423239942", - "sa-east-1": "763104351884", - "us-east-1": "763104351884", - "us-east-2": "763104351884", - "us-gov-east-1": "446045086412", - "us-gov-west-1": "442386744353", - "us-iso-east-1": "886529160074", - "us-isob-east-1": "094389454867", - "us-isof-east-1": "303241398832", - "us-isof-south-1": "454834333376", - "us-west-1": "763104351884", - "us-west-2": "763104351884" - }, - "tag_prefix": "2.6.0-tgi3.2.3", - "repository": "huggingface-pytorch-tgi-inference", - "container_version": { - "gpu": "cu124-ubuntu22.04" - } } } } diff --git a/src/sagemaker/image_uri_config/pytorch.json b/src/sagemaker/image_uri_config/pytorch.json index dbff976442..01e0d65dc5 100644 --- a/src/sagemaker/image_uri_config/pytorch.json +++ b/src/sagemaker/image_uri_config/pytorch.json @@ -199,7 +199,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -247,7 +246,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -295,7 +293,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -343,7 +340,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -391,7 +387,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -439,7 +434,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -487,7 +481,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -535,7 +528,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -582,7 +574,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -629,7 +620,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -676,7 +666,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -723,7 +712,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -770,7 +758,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -817,7 +804,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -864,7 +850,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -911,7 +896,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -958,7 +942,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1005,7 +988,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1052,7 +1034,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1101,7 +1082,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1150,7 +1130,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1195,7 +1174,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1240,7 +1218,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1285,7 +1262,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1348,7 +1324,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1398,7 +1373,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1446,7 +1420,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1494,7 +1467,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1542,7 +1514,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1590,7 +1561,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1638,7 +1608,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1818,7 +1787,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1866,7 +1834,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1915,7 +1882,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1963,7 +1929,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2011,7 +1976,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2059,7 +2023,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2107,7 +2070,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2155,7 +2117,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2202,7 +2163,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2249,7 +2209,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2296,7 +2255,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2343,7 +2301,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2390,7 +2347,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2437,7 +2393,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2484,7 +2439,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2531,7 +2485,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2578,7 +2531,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2625,7 +2577,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2672,7 +2623,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2721,7 +2671,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2770,7 +2719,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2819,7 +2767,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2864,7 +2811,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2909,7 +2855,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", diff --git a/src/sagemaker/image_uri_config/tensorflow.json b/src/sagemaker/image_uri_config/tensorflow.json index 097baafa9b..37fa7ee46d 100644 --- a/src/sagemaker/image_uri_config/tensorflow.json +++ b/src/sagemaker/image_uri_config/tensorflow.json @@ -631,7 +631,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -675,7 +674,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -719,7 +717,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -763,7 +760,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -807,7 +803,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -851,7 +846,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -895,7 +889,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -939,7 +932,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -983,7 +975,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1027,7 +1018,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1071,7 +1061,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1115,7 +1104,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1159,7 +1147,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1203,7 +1190,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1247,7 +1233,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1291,7 +1276,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1335,7 +1319,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1379,7 +1362,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1423,7 +1405,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1467,7 +1448,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1511,7 +1491,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1555,7 +1534,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1599,7 +1577,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1643,7 +1620,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1687,7 +1663,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1731,7 +1706,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1775,7 +1749,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1819,7 +1792,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1863,7 +1835,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1907,7 +1878,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1951,7 +1921,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -1995,7 +1964,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2039,7 +2007,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2083,7 +2050,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2127,7 +2093,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2171,7 +2136,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2215,7 +2179,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2259,7 +2222,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2305,7 +2267,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2351,7 +2312,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2393,7 +2353,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2455,7 +2414,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2505,7 +2463,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2555,7 +2512,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2605,7 +2561,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -2655,7 +2610,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -3111,7 +3065,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -3159,7 +3112,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -3208,7 +3160,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -3257,7 +3208,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -3306,7 +3256,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -3355,7 +3304,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -3403,7 +3351,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -3451,7 +3398,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -3499,7 +3445,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -3547,7 +3492,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -3595,7 +3539,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -3643,7 +3586,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -3691,7 +3633,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -3739,7 +3680,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -3787,7 +3727,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -3834,7 +3773,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -3881,7 +3819,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -3928,7 +3865,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -3975,7 +3911,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -4022,7 +3957,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -4069,7 +4003,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -4116,7 +4049,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -4163,7 +4095,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -4210,7 +4141,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -4257,7 +4187,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -4304,7 +4233,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -4351,7 +4279,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -4398,7 +4325,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -4445,7 +4371,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -4492,7 +4417,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -4539,7 +4463,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -4586,7 +4509,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -4633,7 +4555,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -4680,7 +4601,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -4725,7 +4645,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -4774,7 +4693,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -4823,7 +4741,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", @@ -4868,7 +4785,6 @@ "registries": { "af-south-1": "626614931356", "ap-east-1": "871362719292", - "ap-east-2": "975050140332", "ap-northeast-1": "763104351884", "ap-northeast-2": "763104351884", "ap-northeast-3": "364406365360", diff --git a/src/sagemaker/jumpstart/factory/model.py b/src/sagemaker/jumpstart/factory/model.py index 4245c5ac91..53ded3f275 100644 --- a/src/sagemaker/jumpstart/factory/model.py +++ b/src/sagemaker/jumpstart/factory/model.py @@ -104,7 +104,7 @@ def get_default_predictor( """ # if there's a non-default predictor, do not mutate -- return as is - if type(predictor) != Predictor: # pylint: disable=C0123 + if not isinstance(predictor, Predictor): raise RuntimeError( "Can only get default predictor from base Predictor class. " f"Using Predictor class '{type(predictor).__name__}'." diff --git a/src/sagemaker/local/entities.py b/src/sagemaker/local/entities.py index a21a375f54..0cf6c6d55a 100644 --- a/src/sagemaker/local/entities.py +++ b/src/sagemaker/local/entities.py @@ -845,10 +845,10 @@ def _initialize_and_validate_parameters(self, overridden_parameters): ) raise ClientError(error_msg, "start_pipeline_execution") parameter_type = default_parameters[param_name].parameter_type - if type(param_value) != parameter_type.python_type: # pylint: disable=C0123 + if not isinstance(param_value, parameter_type.python_type): error_msg = self._construct_validation_exception_message( - "Unexpected type for parameter '{}'. Expected {} but found " - "{}.".format(param_name, parameter_type.python_type, type(param_value)) + f"Unexpected type for parameter '{param_name}'. Expected \ + {parameter_type.python_type} but found {type(param_value)}." ) raise ClientError(error_msg, "start_pipeline_execution") if param_value == "": diff --git a/src/sagemaker/model_monitor/clarify_model_monitoring.py b/src/sagemaker/model_monitor/clarify_model_monitoring.py index 3edfabc747..2d9a4a69e4 100644 --- a/src/sagemaker/model_monitor/clarify_model_monitoring.py +++ b/src/sagemaker/model_monitor/clarify_model_monitoring.py @@ -86,11 +86,9 @@ def __init__( object that configures network isolation, encryption of inter-container traffic, security group IDs, and subnets. """ - if type(self) == __class__: # pylint: disable=unidiomatic-typecheck + if self.__class__ is __class__: raise TypeError( - "{} is abstract, please instantiate its subclasses instead.".format( - __class__.__name__ - ) + f"{__class__.__name__} is abstract, please instantiate its subclasses instead." ) session = sagemaker_session or Session() diff --git a/src/sagemaker/serve/model_server/multi_model_server/prepare.py b/src/sagemaker/serve/model_server/multi_model_server/prepare.py index 48cf5c878a..e3abc70dd6 100644 --- a/src/sagemaker/serve/model_server/multi_model_server/prepare.py +++ b/src/sagemaker/serve/model_server/multi_model_server/prepare.py @@ -84,7 +84,8 @@ def prepare_for_mms( image_uri: str, inference_spec: InferenceSpec = None, ) -> str: - """Prepares for InferenceSpec using model_path, writes inference.py, and captures dependencies to generate secret_key. + """Prepares for InferenceSpec using model_path, writes inference.py, \ + and captures dependencies to generate secret_key. Args:to model_path (str) : Argument diff --git a/src/sagemaker/serve/utils/conda_in_process.yml b/src/sagemaker/serve/utils/conda_in_process.yml index 61badaa52f..8233f80902 100644 --- a/src/sagemaker/serve/utils/conda_in_process.yml +++ b/src/sagemaker/serve/utils/conda_in_process.yml @@ -12,15 +12,15 @@ dependencies: - boto3>=1.34.142,<2.0 - cloudpickle==2.2.1 - google-pasta - - numpy>=1.9.0,<2.0 + - numpy>=2.0.0,<2.3.0 - protobuf>=3.12,<5.0 - smdebug_rulesconfig==1.0.1 - importlib-metadata>=1.4.0,<7.0 - - packaging>=20.0 - - pandas + - packaging>=23.0,<25 + - pandas==2.2.3 - pathos - schema - - PyYAML~=6.0 + - PyYAML>=6.0.1 - jsonschema - platformdirs - tblib>=1.7.0,<4 @@ -43,7 +43,7 @@ dependencies: - colorama>=0.4.4 - contextlib2>=21.6.0 - decorator>=5.1.1 - - dill>=0.3.6 + - dill>=0.3.9 - docutils>=0.16 - entrypoints>=0.4 - filelock>=3.11.0 @@ -64,7 +64,7 @@ dependencies: - multiprocess>=0.70.14 - networkx>=3.1 - packaging>=23.1 - - pandas>=1.5.3 + - pandas==2.2.3 - pathos>=0.3.0 - pillow>=9.5.0 - platformdirs>=3.2.0 @@ -82,7 +82,7 @@ dependencies: - python-dateutil>=2.8.2 - pytz>=2023.3 - pytz-deprecation-shim>=0.1.0.post0 - - pyyaml>=5.4.1 + - pyyaml>=6.0.1 - regex>=2023.3.23 - requests>=2.28.2 - rich>=13.3.4 diff --git a/src/sagemaker/serve/utils/in_process_requirements.txt b/src/sagemaker/serve/utils/in_process_requirements.txt index e356e1720d..c7659adb1f 100644 --- a/src/sagemaker/serve/utils/in_process_requirements.txt +++ b/src/sagemaker/serve/utils/in_process_requirements.txt @@ -11,7 +11,7 @@ cloudpickle==2.2.1 colorama>=0.4.4 contextlib2>=21.6.0 decorator>=5.1.1 -dill>=0.3.6 +dill>=0.3.9 docutils>=0.16 entrypoints>=0.4 filelock>=3.11.0 @@ -32,7 +32,7 @@ mpmath>=1.3.0 multiprocess>=0.70.14 networkx>=3.1 packaging>=23.1 -pandas>=1.5.3 +pandas==2.2.3 pathos>=0.3.0 pillow>=9.5.0 platformdirs>=3.2.0 @@ -50,7 +50,7 @@ pyrsistent>=0.19.3 python-dateutil>=2.8.2 pytz>=2023.3 pytz-deprecation-shim>=0.1.0.post0 -pyyaml>=5.4.1 +pyyaml>=6.0.1 regex>=2023.3.23 requests>=2.28.2 rich>=13.3.4 diff --git a/tests/data/pipeline/model_step/pytorch_mnist/requirements.txt b/tests/data/pipeline/model_step/pytorch_mnist/requirements.txt index 56d09228be..c25fca7e9f 100644 --- a/tests/data/pipeline/model_step/pytorch_mnist/requirements.txt +++ b/tests/data/pipeline/model_step/pytorch_mnist/requirements.txt @@ -1 +1 @@ -scipy>=1.8.1 +scipy>=1.11.3 diff --git a/tests/data/remote_function/requirements.txt b/tests/data/remote_function/requirements.txt index 0e99587e6e..f89caf8c2b 100644 --- a/tests/data/remote_function/requirements.txt +++ b/tests/data/remote_function/requirements.txt @@ -1 +1 @@ -scipy==1.10.1 +scipy==1.13.0 diff --git a/tests/data/serve_resources/mlflow/pytorch/conda.yaml b/tests/data/serve_resources/mlflow/pytorch/conda.yaml index beecdbab08..015cb47850 100644 --- a/tests/data/serve_resources/mlflow/pytorch/conda.yaml +++ b/tests/data/serve_resources/mlflow/pytorch/conda.yaml @@ -4,21 +4,21 @@ dependencies: - python=3.10.13 - pip<=23.3.1 - pip: - - mlflow==2.10.2 + - mlflow>=2.16.1 - astunparse==1.6.3 - cffi==1.16.0 - cloudpickle==2.2.1 - defusedxml==0.7.1 - - dill==0.3.8 + - dill==0.3.9 - gmpy2==2.1.2 - - numpy==1.26.4 + - numpy>=2.0.0,<2.3.0 - opt-einsum==3.3.0 - packaging==24.0 - - pandas==2.2.1 + - pandas==2.2.3 - pyyaml==6.0.1 - requests==2.31.0 - torch>=2.6.0 - torchvision>=0.17.0 - tqdm==4.66.2 - - scikit-learn==1.3.2 + - scikit-learn==1.6.1 name: mlflow-env diff --git a/tests/data/serve_resources/mlflow/pytorch/requirements.txt b/tests/data/serve_resources/mlflow/pytorch/requirements.txt index 450bcbfada..101aa2651b 100644 --- a/tests/data/serve_resources/mlflow/pytorch/requirements.txt +++ b/tests/data/serve_resources/mlflow/pytorch/requirements.txt @@ -1,14 +1,14 @@ -mlflow==2.13.2 +mlflow>=2.16.1 astunparse==1.6.3 cffi==1.16.0 cloudpickle==2.2.1 defusedxml==0.7.1 -dill==0.3.8 +dill==0.3.9 gmpy2==2.1.2 -numpy==1.24.4 +numpy>=2.0.0,<2.3.0 opt-einsum==3.3.0 -packaging==21.3 -pandas==2.2.1 +packaging>=23.0,<25 +pandas==2.2.3 pyyaml==6.0.1 requests==2.32.2 torch>=2.6.0 diff --git a/tests/data/serve_resources/mlflow/tensorflow/conda.yaml b/tests/data/serve_resources/mlflow/tensorflow/conda.yaml index 90d8c300a0..bed6a78250 100644 --- a/tests/data/serve_resources/mlflow/tensorflow/conda.yaml +++ b/tests/data/serve_resources/mlflow/tensorflow/conda.yaml @@ -2,10 +2,10 @@ channels: - conda-forge dependencies: - python=3.10.13 -- pip<=23.3.1 +- pip<=24.3 - pip: - - mlflow==2.11.1 + - mlflow>=2.16.1 - cloudpickle==2.2.1 - - numpy==1.26.4 - - tensorflow==2.16.1 + - numpy>=1.26.4 + - tensorflow==2.18.0 name: mlflow-env diff --git a/tests/data/serve_resources/mlflow/tensorflow/requirements.txt b/tests/data/serve_resources/mlflow/tensorflow/requirements.txt index ff99d3b92e..1e056aa3f2 100644 --- a/tests/data/serve_resources/mlflow/tensorflow/requirements.txt +++ b/tests/data/serve_resources/mlflow/tensorflow/requirements.txt @@ -1,4 +1,4 @@ -mlflow==2.13.2 +mlflow>=2.16.1 cloudpickle==2.2.1 -numpy==1.26.4 -tensorflow==2.16.1 +numpy>=1.26.4 +tensorflow==2.18.0 diff --git a/tests/data/serve_resources/mlflow/xgboost/conda.yaml b/tests/data/serve_resources/mlflow/xgboost/conda.yaml index 44ca3c4c2e..02f96238b2 100644 --- a/tests/data/serve_resources/mlflow/xgboost/conda.yaml +++ b/tests/data/serve_resources/mlflow/xgboost/conda.yaml @@ -2,14 +2,14 @@ channels: - conda-forge dependencies: - python=3.10.13 -- pip<=23.3.1 +- pip<=24.3 - pip: - - mlflow==2.11.1 + - mlflow>=2.16.1 - lz4==4.3.2 - - numpy==1.26.4 - - pandas==2.2.1 + - numpy>=1.26.4 + - pandas==2.2.3 - psutil==5.9.8 - - scikit-learn==1.3.2 - - scipy==1.11.3 - - xgboost==1.7.1 + - scikit-learn==1.6.1 + - scipy==1.13.0 + - xgboost==2.1.4 name: mlflow-env diff --git a/tests/data/serve_resources/mlflow/xgboost/requirements.txt b/tests/data/serve_resources/mlflow/xgboost/requirements.txt index 1130dcaec5..49aeff45a1 100644 --- a/tests/data/serve_resources/mlflow/xgboost/requirements.txt +++ b/tests/data/serve_resources/mlflow/xgboost/requirements.txt @@ -1,8 +1,8 @@ -mlflow==2.13.2 +mlflow>=2.16.1 lz4==4.3.2 -numpy==1.24.4 -pandas==2.0.3 +numpy>=1.26.4 +pandas==2.2.3 psutil==5.9.8 -scikit-learn==1.3.2 -scipy==1.10.1 -xgboost==1.7.1 +scikit-learn==1.6.1 +scipy==1.13.0 +xgboost==2.1.4 diff --git a/tests/data/workflow/requirements.txt b/tests/data/workflow/requirements.txt index 0e99587e6e..f89caf8c2b 100644 --- a/tests/data/workflow/requirements.txt +++ b/tests/data/workflow/requirements.txt @@ -1 +1 @@ -scipy==1.10.1 +scipy==1.13.0 diff --git a/tests/integ/sagemaker/experiments/test_run.py b/tests/integ/sagemaker/experiments/test_run.py index 4f59d11c54..f00f53a5ad 100644 --- a/tests/integ/sagemaker/experiments/test_run.py +++ b/tests/integ/sagemaker/experiments/test_run.py @@ -720,8 +720,8 @@ def _generate_processor( ) return FrameworkProcessor( estimator_cls=PyTorch, - framework_version="1.10", - py_version="py38", + framework_version="1.13.1", + py_version="py39", instance_count=1, instance_type="ml.m5.xlarge", role=execution_role, diff --git a/tests/integ/sagemaker/jumpstart/private_hub/test_hub_content.py b/tests/integ/sagemaker/jumpstart/private_hub/test_hub_content.py index b25cff2d62..04b945a457 100644 --- a/tests/integ/sagemaker/jumpstart/private_hub/test_hub_content.py +++ b/tests/integ/sagemaker/jumpstart/private_hub/test_hub_content.py @@ -38,7 +38,7 @@ def test_hub_model_reference(setup): describe_model_response = hub_instance.describe_model(model_name=model_id) assert describe_model_response is not None - assert type(describe_model_response) == DescribeHubContentResponse + assert isinstance(describe_model_response, DescribeHubContentResponse) assert describe_model_response.hub_content_name == model_id assert describe_model_response.hub_content_type == "ModelReference" diff --git a/tests/integ/sagemaker/serve/test_serve_js_deep_unit_tests.py b/tests/integ/sagemaker/serve/test_serve_js_deep_unit_tests.py index e13e672bec..ea65f998c8 100644 --- a/tests/integ/sagemaker/serve/test_serve_js_deep_unit_tests.py +++ b/tests/integ/sagemaker/serve/test_serve_js_deep_unit_tests.py @@ -24,11 +24,12 @@ def test_js_model_with_optimize_speculative_decoding_config_gated_requests_are_expected( sagemaker_session, ): - with patch.object( - Session, "create_model", return_value="mock_model" - ) as mock_create_model, patch.object( - Session, "endpoint_from_production_variants" - ) as mock_endpoint_from_production_variants: + with ( + patch.object(Session, "create_model", return_value="mock_model") as mock_create_model, + patch.object( + Session, "endpoint_from_production_variants" + ) as mock_endpoint_from_production_variants, + ): iam_client = sagemaker_session.boto_session.client("iam") role_arn = iam_client.get_role(RoleName=ROLE_NAME)["Role"]["Arn"] @@ -100,17 +101,18 @@ def test_js_model_with_optimize_speculative_decoding_config_gated_requests_are_e def test_js_model_with_optimize_sharding_and_resource_requirements_requests_are_expected( sagemaker_session, ): - with patch.object( - Session, - "wait_for_optimization_job", - return_value={"OptimizationJobName": "mock_optimization_job"}, - ), patch.object( - Session, "create_model", return_value="mock_model" - ) as mock_create_model, patch.object( - Session, "endpoint_from_production_variants", return_value="mock_endpoint_name" - ) as mock_endpoint_from_production_variants, patch.object( - Session, "create_inference_component" - ) as mock_create_inference_component: + with ( + patch.object( + Session, + "wait_for_optimization_job", + return_value={"OptimizationJobName": "mock_optimization_job"}, + ), + patch.object(Session, "create_model", return_value="mock_model") as mock_create_model, + patch.object( + Session, "endpoint_from_production_variants", return_value="mock_endpoint_name" + ) as mock_endpoint_from_production_variants, + patch.object(Session, "create_inference_component") as mock_create_inference_component, + ): iam_client = sagemaker_session.boto_session.client("iam") role_arn = iam_client.get_role(RoleName=ROLE_NAME)["Role"]["Arn"] @@ -185,15 +187,17 @@ def test_js_model_with_optimize_sharding_and_resource_requirements_requests_are_ def test_js_model_with_optimize_quantization_on_pre_optimized_model_requests_are_expected( sagemaker_session, ): - with patch.object( - Session, - "wait_for_optimization_job", - return_value={"OptimizationJobName": "mock_optimization_job"}, - ), patch.object( - Session, "create_model", return_value="mock_model" - ) as mock_create_model, patch.object( - Session, "endpoint_from_production_variants", return_value="mock_endpoint_name" - ) as mock_endpoint_from_production_variants: + with ( + patch.object( + Session, + "wait_for_optimization_job", + return_value={"OptimizationJobName": "mock_optimization_job"}, + ), + patch.object(Session, "create_model", return_value="mock_model") as mock_create_model, + patch.object( + Session, "endpoint_from_production_variants", return_value="mock_endpoint_name" + ) as mock_endpoint_from_production_variants, + ): iam_client = sagemaker_session.boto_session.client("iam") role_arn = iam_client.get_role(RoleName=ROLE_NAME)["Role"]["Arn"] diff --git a/tests/integ/sagemaker/workflow/helpers.py b/tests/integ/sagemaker/workflow/helpers.py index 20365ef169..9f0176c5c2 100644 --- a/tests/integ/sagemaker/workflow/helpers.py +++ b/tests/integ/sagemaker/workflow/helpers.py @@ -70,8 +70,8 @@ def create_and_execute_pipeline( assert execution_steps[0]["StepStatus"] == step_status if step_result_type: result = execution.result(execution_steps[0]["StepName"]) - assert ( - type(result) == step_result_type + assert isinstance( + result, step_result_type ), f"Expected {step_result_type}, instead found {type(result)}" if step_result_value: diff --git a/tests/integ/sagemaker/workflow/test_workflow.py b/tests/integ/sagemaker/workflow/test_workflow.py index 2643a3b88e..9ef0b14a04 100644 --- a/tests/integ/sagemaker/workflow/test_workflow.py +++ b/tests/integ/sagemaker/workflow/test_workflow.py @@ -1122,8 +1122,8 @@ def test_model_registration_with_tuning_model( entry_point=entry_point, source_dir=base_dir, role=role, - framework_version="1.10", - py_version="py38", + framework_version="1.13.1", + py_version="py39", instance_count=instance_count, instance_type=instance_type, sagemaker_session=pipeline_session, @@ -1159,8 +1159,8 @@ def test_model_registration_with_tuning_model( ), entry_point=entry_point, source_dir=base_dir, - framework_version="1.10", - py_version="py38", + framework_version="1.13.1", + py_version="py39", sagemaker_session=pipeline_session, ) step_model_regis_args = model.register( diff --git a/tests/integ/test_feature_store.py b/tests/integ/test_feature_store.py index 43db78527a..75f1807148 100644 --- a/tests/integ/test_feature_store.py +++ b/tests/integ/test_feature_store.py @@ -1645,9 +1645,11 @@ def test_create_dataset_with_feature_group_base( feature_store_session, feature_group, offline_store_s3_uri ) - with timeout(minutes=10) and cleanup_offline_store( - base, feature_store_session - ) and cleanup_offline_store(feature_group, feature_store_session): + with ( + timeout(minutes=10) + and cleanup_offline_store(base, feature_store_session) + and cleanup_offline_store(feature_group, feature_store_session) + ): feature_store = FeatureStore(sagemaker_session=feature_store_session) df, query_string = ( feature_store.create_dataset(base=base, output_path=offline_store_s3_uri) @@ -1832,9 +1834,11 @@ def test_create_dataset_with_feature_group_base_with_additional_params( feature_store_session, feature_group, offline_store_s3_uri ) - with timeout(minutes=10) and cleanup_offline_store( - base, feature_store_session - ) and cleanup_offline_store(feature_group, feature_store_session): + with ( + timeout(minutes=10) + and cleanup_offline_store(base, feature_store_session) + and cleanup_offline_store(feature_group, feature_store_session) + ): feature_store = FeatureStore(sagemaker_session=feature_store_session) df, query_string = ( feature_store.create_dataset(base=base, output_path=offline_store_s3_uri) diff --git a/tests/unit/sagemaker/feature_store/feature_processor/lineage/test_feature_processor_lineage.py b/tests/unit/sagemaker/feature_store/feature_processor/lineage/test_feature_processor_lineage.py index 118800dd0f..f149823b2f 100644 --- a/tests/unit/sagemaker/feature_store/feature_processor/lineage/test_feature_processor_lineage.py +++ b/tests/unit/sagemaker/feature_store/feature_processor/lineage/test_feature_processor_lineage.py @@ -113,69 +113,85 @@ def test_create_lineage_when_no_lineage_exists_with_fg_only(): transformation_code=TRANSFORMATION_CODE_INPUT_1, sagemaker_session=SAGEMAKER_SESSION_MOCK, ) - with patch.object( - FeatureGroupLineageEntityHandler, - "retrieve_feature_group_context_arns", - side_effect=[ - FEATURE_GROUP_INPUT[0], - FEATURE_GROUP_INPUT[1], - FEATURE_GROUP_INPUT[0], - ], - ) as retrieve_feature_group_context_arns_method, patch.object( - S3LineageEntityHandler, - "retrieve_raw_data_artifact", - side_effect=[ - RAW_DATA_INPUT_ARTIFACTS[0], - RAW_DATA_INPUT_ARTIFACTS[1], - RAW_DATA_INPUT_ARTIFACTS[2], - RAW_DATA_INPUT_ARTIFACTS[3], - ], - ) as retrieve_raw_data_artifact_method, patch.object( - S3LineageEntityHandler, - "create_transformation_code_artifact", - return_value=TRANSFORMATION_CODE_ARTIFACT_1, - ) as create_transformation_code_artifact_method, patch.object( - PipelineLineageEntityHandler, - "load_pipeline_context", - side_effect=RESOURCE_NOT_FOUND_EXCEPTION, - ) as load_pipeline_context_method, patch.object( - PipelineLineageEntityHandler, - "create_pipeline_context", - return_value=PIPELINE_CONTEXT, - ), patch.object( - PipelineVersionLineageEntityHandler, - "create_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ), patch.object( - PipelineVersionLineageEntityHandler, - "load_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ) as load_pipeline_version_context_method, patch.object( - LineageAssociationHandler, - "list_upstream_associations", - side_effect=[ - generate_pipeline_version_upstream_feature_group_list(), - [], - generate_pipeline_version_upstream_transformation_code(), - ], - ) as list_upstream_associations_method, patch.object( - LineageAssociationHandler, - "list_downstream_associations", - return_value=generate_pipeline_version_downstream_feature_group(), - ) as list_downstream_associations_method, patch.object( - PipelineLineageEntityHandler, - "update_pipeline_context", - ) as update_pipeline_context_method, patch.object( - LineageAssociationHandler, "add_upstream_feature_group_data_associations" - ) as add_upstream_feature_group_data_associations_method, patch.object( - LineageAssociationHandler, "add_downstream_feature_group_data_associations" - ) as add_downstream_feature_group_data_associations_method, patch.object( - LineageAssociationHandler, "add_upstream_raw_data_associations" - ) as add_upstream_raw_data_associations_method, patch.object( - LineageAssociationHandler, "add_upstream_transformation_code_associations" - ) as add_upstream_transformation_code_associations_method, patch.object( - LineageAssociationHandler, "add_pipeline_and_pipeline_version_association" - ) as add_pipeline_and_pipeline_version_association_method: + with ( + patch.object( + FeatureGroupLineageEntityHandler, + "retrieve_feature_group_context_arns", + side_effect=[ + FEATURE_GROUP_INPUT[0], + FEATURE_GROUP_INPUT[1], + FEATURE_GROUP_INPUT[0], + ], + ) as retrieve_feature_group_context_arns_method, + patch.object( + S3LineageEntityHandler, + "retrieve_raw_data_artifact", + side_effect=[ + RAW_DATA_INPUT_ARTIFACTS[0], + RAW_DATA_INPUT_ARTIFACTS[1], + RAW_DATA_INPUT_ARTIFACTS[2], + RAW_DATA_INPUT_ARTIFACTS[3], + ], + ) as retrieve_raw_data_artifact_method, + patch.object( + S3LineageEntityHandler, + "create_transformation_code_artifact", + return_value=TRANSFORMATION_CODE_ARTIFACT_1, + ) as create_transformation_code_artifact_method, + patch.object( + PipelineLineageEntityHandler, + "load_pipeline_context", + side_effect=RESOURCE_NOT_FOUND_EXCEPTION, + ) as load_pipeline_context_method, + patch.object( + PipelineLineageEntityHandler, + "create_pipeline_context", + return_value=PIPELINE_CONTEXT, + ), + patch.object( + PipelineVersionLineageEntityHandler, + "create_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ), + patch.object( + PipelineVersionLineageEntityHandler, + "load_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ) as load_pipeline_version_context_method, + patch.object( + LineageAssociationHandler, + "list_upstream_associations", + side_effect=[ + generate_pipeline_version_upstream_feature_group_list(), + [], + generate_pipeline_version_upstream_transformation_code(), + ], + ) as list_upstream_associations_method, + patch.object( + LineageAssociationHandler, + "list_downstream_associations", + return_value=generate_pipeline_version_downstream_feature_group(), + ) as list_downstream_associations_method, + patch.object( + PipelineLineageEntityHandler, + "update_pipeline_context", + ) as update_pipeline_context_method, + patch.object( + LineageAssociationHandler, "add_upstream_feature_group_data_associations" + ) as add_upstream_feature_group_data_associations_method, + patch.object( + LineageAssociationHandler, "add_downstream_feature_group_data_associations" + ) as add_downstream_feature_group_data_associations_method, + patch.object( + LineageAssociationHandler, "add_upstream_raw_data_associations" + ) as add_upstream_raw_data_associations_method, + patch.object( + LineageAssociationHandler, "add_upstream_transformation_code_associations" + ) as add_upstream_transformation_code_associations_method, + patch.object( + LineageAssociationHandler, "add_pipeline_and_pipeline_version_association" + ) as add_pipeline_and_pipeline_version_association_method, + ): lineage_handler.create_lineage() retrieve_feature_group_context_arns_method.assert_has_calls( @@ -259,75 +275,92 @@ def test_create_lineage_when_no_lineage_exists_with_raw_data_only(): transformation_code=TRANSFORMATION_CODE_INPUT_1, sagemaker_session=SAGEMAKER_SESSION_MOCK, ) - with patch.object( - FeatureGroupLineageEntityHandler, - "retrieve_feature_group_context_arns", - side_effect=[ - FEATURE_GROUP_INPUT[0], - FEATURE_GROUP_INPUT[1], - FEATURE_GROUP_INPUT[0], - ], - ) as retrieve_feature_group_context_arns_method, patch.object( - S3LineageEntityHandler, - "retrieve_raw_data_artifact", - side_effect=[ - RAW_DATA_INPUT_ARTIFACTS[0], - RAW_DATA_INPUT_ARTIFACTS[1], - RAW_DATA_INPUT_ARTIFACTS[2], - RAW_DATA_INPUT_ARTIFACTS[3], - ], - ) as retrieve_raw_data_artifact_method, patch.object( - S3LineageEntityHandler, - "create_transformation_code_artifact", - return_value=TRANSFORMATION_CODE_ARTIFACT_1, - ) as create_transformation_code_artifact_method, patch.object( - PipelineLineageEntityHandler, - "load_pipeline_context", - side_effect=RESOURCE_NOT_FOUND_EXCEPTION, - ) as load_pipeline_context_method, patch.object( - PipelineLineageEntityHandler, - "create_pipeline_context", - return_value=PIPELINE_CONTEXT, - ), patch.object( - PipelineVersionLineageEntityHandler, - "create_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ), patch.object( - PipelineVersionLineageEntityHandler, - "load_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ) as load_pipeline_version_context_method, patch.object( - LineageAssociationHandler, - "list_upstream_associations", - side_effect=[ - generate_pipeline_version_upstream_feature_group_list(), - [], - generate_pipeline_version_upstream_transformation_code(), - ], - ) as list_upstream_associations_method, patch.object( - LineageAssociationHandler, - "list_downstream_associations", - return_value=generate_pipeline_version_downstream_feature_group(), - ) as list_downstream_associations_method, patch.object( - PipelineLineageEntityHandler, - "update_pipeline_context", - ) as update_pipeline_context_method, patch.object( - LineageAssociationHandler, "add_upstream_feature_group_data_associations" - ) as add_upstream_feature_group_data_associations_method, patch.object( - LineageAssociationHandler, "add_downstream_feature_group_data_associations" - ) as add_downstream_feature_group_data_associations_method, patch.object( - LineageAssociationHandler, "add_upstream_raw_data_associations" - ) as add_upstream_raw_data_associations_method, patch.object( - LineageAssociationHandler, "add_upstream_transformation_code_associations" - ) as add_upstream_transformation_code_associations_method, patch.object( - LineageAssociationHandler, "add_pipeline_and_pipeline_version_association" - ) as add_pipeline_and_pipeline_version_association_method, patch.object( - Artifact, - "set_tags", - return_value={ - "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] - }, - ) as artifact_set_tags: + with ( + patch.object( + FeatureGroupLineageEntityHandler, + "retrieve_feature_group_context_arns", + side_effect=[ + FEATURE_GROUP_INPUT[0], + FEATURE_GROUP_INPUT[1], + FEATURE_GROUP_INPUT[0], + ], + ) as retrieve_feature_group_context_arns_method, + patch.object( + S3LineageEntityHandler, + "retrieve_raw_data_artifact", + side_effect=[ + RAW_DATA_INPUT_ARTIFACTS[0], + RAW_DATA_INPUT_ARTIFACTS[1], + RAW_DATA_INPUT_ARTIFACTS[2], + RAW_DATA_INPUT_ARTIFACTS[3], + ], + ) as retrieve_raw_data_artifact_method, + patch.object( + S3LineageEntityHandler, + "create_transformation_code_artifact", + return_value=TRANSFORMATION_CODE_ARTIFACT_1, + ) as create_transformation_code_artifact_method, + patch.object( + PipelineLineageEntityHandler, + "load_pipeline_context", + side_effect=RESOURCE_NOT_FOUND_EXCEPTION, + ) as load_pipeline_context_method, + patch.object( + PipelineLineageEntityHandler, + "create_pipeline_context", + return_value=PIPELINE_CONTEXT, + ), + patch.object( + PipelineVersionLineageEntityHandler, + "create_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ), + patch.object( + PipelineVersionLineageEntityHandler, + "load_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ) as load_pipeline_version_context_method, + patch.object( + LineageAssociationHandler, + "list_upstream_associations", + side_effect=[ + generate_pipeline_version_upstream_feature_group_list(), + [], + generate_pipeline_version_upstream_transformation_code(), + ], + ) as list_upstream_associations_method, + patch.object( + LineageAssociationHandler, + "list_downstream_associations", + return_value=generate_pipeline_version_downstream_feature_group(), + ) as list_downstream_associations_method, + patch.object( + PipelineLineageEntityHandler, + "update_pipeline_context", + ) as update_pipeline_context_method, + patch.object( + LineageAssociationHandler, "add_upstream_feature_group_data_associations" + ) as add_upstream_feature_group_data_associations_method, + patch.object( + LineageAssociationHandler, "add_downstream_feature_group_data_associations" + ) as add_downstream_feature_group_data_associations_method, + patch.object( + LineageAssociationHandler, "add_upstream_raw_data_associations" + ) as add_upstream_raw_data_associations_method, + patch.object( + LineageAssociationHandler, "add_upstream_transformation_code_associations" + ) as add_upstream_transformation_code_associations_method, + patch.object( + LineageAssociationHandler, "add_pipeline_and_pipeline_version_association" + ) as add_pipeline_and_pipeline_version_association_method, + patch.object( + Artifact, + "set_tags", + return_value={ + "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] + }, + ) as artifact_set_tags, + ): lineage_handler.create_lineage(TAGS) retrieve_feature_group_context_arns_method.assert_called_once_with( @@ -408,75 +441,92 @@ def test_create_lineage_when_no_lineage_exists_with_fg_and_raw_data_with_tags(): transformation_code=TRANSFORMATION_CODE_INPUT_1, sagemaker_session=SAGEMAKER_SESSION_MOCK, ) - with patch.object( - FeatureGroupLineageEntityHandler, - "retrieve_feature_group_context_arns", - side_effect=[ - FEATURE_GROUP_INPUT[0], - FEATURE_GROUP_INPUT[1], - FEATURE_GROUP_INPUT[0], - ], - ) as retrieve_feature_group_context_arns_method, patch.object( - S3LineageEntityHandler, - "retrieve_raw_data_artifact", - side_effect=[ - RAW_DATA_INPUT_ARTIFACTS[0], - RAW_DATA_INPUT_ARTIFACTS[1], - RAW_DATA_INPUT_ARTIFACTS[2], - RAW_DATA_INPUT_ARTIFACTS[3], - ], - ) as retrieve_raw_data_artifact_method, patch.object( - S3LineageEntityHandler, - "create_transformation_code_artifact", - return_value=TRANSFORMATION_CODE_ARTIFACT_1, - ) as create_transformation_code_artifact_method, patch.object( - PipelineLineageEntityHandler, - "load_pipeline_context", - side_effect=RESOURCE_NOT_FOUND_EXCEPTION, - ) as load_pipeline_context_method, patch.object( - PipelineLineageEntityHandler, - "create_pipeline_context", - return_value=PIPELINE_CONTEXT, - ), patch.object( - PipelineVersionLineageEntityHandler, - "create_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ), patch.object( - PipelineVersionLineageEntityHandler, - "load_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ) as load_pipeline_version_context_method, patch.object( - LineageAssociationHandler, - "list_upstream_associations", - side_effect=[ - generate_pipeline_version_upstream_feature_group_list(), - [], - generate_pipeline_version_upstream_transformation_code(), - ], - ) as list_upstream_associations_method, patch.object( - LineageAssociationHandler, - "list_downstream_associations", - return_value=generate_pipeline_version_downstream_feature_group(), - ) as list_downstream_associations_method, patch.object( - PipelineLineageEntityHandler, - "update_pipeline_context", - ) as update_pipeline_context_method, patch.object( - LineageAssociationHandler, "add_upstream_feature_group_data_associations" - ) as add_upstream_feature_group_data_associations_method, patch.object( - LineageAssociationHandler, "add_downstream_feature_group_data_associations" - ) as add_downstream_feature_group_data_associations_method, patch.object( - LineageAssociationHandler, "add_upstream_raw_data_associations" - ) as add_upstream_raw_data_associations_method, patch.object( - LineageAssociationHandler, "add_upstream_transformation_code_associations" - ) as add_upstream_transformation_code_associations_method, patch.object( - LineageAssociationHandler, "add_pipeline_and_pipeline_version_association" - ) as add_pipeline_and_pipeline_version_association_method, patch.object( - Artifact, - "set_tags", - return_value={ - "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] - }, - ) as artifact_set_tags: + with ( + patch.object( + FeatureGroupLineageEntityHandler, + "retrieve_feature_group_context_arns", + side_effect=[ + FEATURE_GROUP_INPUT[0], + FEATURE_GROUP_INPUT[1], + FEATURE_GROUP_INPUT[0], + ], + ) as retrieve_feature_group_context_arns_method, + patch.object( + S3LineageEntityHandler, + "retrieve_raw_data_artifact", + side_effect=[ + RAW_DATA_INPUT_ARTIFACTS[0], + RAW_DATA_INPUT_ARTIFACTS[1], + RAW_DATA_INPUT_ARTIFACTS[2], + RAW_DATA_INPUT_ARTIFACTS[3], + ], + ) as retrieve_raw_data_artifact_method, + patch.object( + S3LineageEntityHandler, + "create_transformation_code_artifact", + return_value=TRANSFORMATION_CODE_ARTIFACT_1, + ) as create_transformation_code_artifact_method, + patch.object( + PipelineLineageEntityHandler, + "load_pipeline_context", + side_effect=RESOURCE_NOT_FOUND_EXCEPTION, + ) as load_pipeline_context_method, + patch.object( + PipelineLineageEntityHandler, + "create_pipeline_context", + return_value=PIPELINE_CONTEXT, + ), + patch.object( + PipelineVersionLineageEntityHandler, + "create_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ), + patch.object( + PipelineVersionLineageEntityHandler, + "load_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ) as load_pipeline_version_context_method, + patch.object( + LineageAssociationHandler, + "list_upstream_associations", + side_effect=[ + generate_pipeline_version_upstream_feature_group_list(), + [], + generate_pipeline_version_upstream_transformation_code(), + ], + ) as list_upstream_associations_method, + patch.object( + LineageAssociationHandler, + "list_downstream_associations", + return_value=generate_pipeline_version_downstream_feature_group(), + ) as list_downstream_associations_method, + patch.object( + PipelineLineageEntityHandler, + "update_pipeline_context", + ) as update_pipeline_context_method, + patch.object( + LineageAssociationHandler, "add_upstream_feature_group_data_associations" + ) as add_upstream_feature_group_data_associations_method, + patch.object( + LineageAssociationHandler, "add_downstream_feature_group_data_associations" + ) as add_downstream_feature_group_data_associations_method, + patch.object( + LineageAssociationHandler, "add_upstream_raw_data_associations" + ) as add_upstream_raw_data_associations_method, + patch.object( + LineageAssociationHandler, "add_upstream_transformation_code_associations" + ) as add_upstream_transformation_code_associations_method, + patch.object( + LineageAssociationHandler, "add_pipeline_and_pipeline_version_association" + ) as add_pipeline_and_pipeline_version_association_method, + patch.object( + Artifact, + "set_tags", + return_value={ + "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] + }, + ) as artifact_set_tags, + ): lineage_handler.create_lineage(TAGS) retrieve_feature_group_context_arns_method.assert_has_calls( @@ -569,75 +619,92 @@ def test_create_lineage_when_no_lineage_exists_with_no_transformation_code(): output=FEATURE_GROUP_DATA_SOURCE[0].name, sagemaker_session=SAGEMAKER_SESSION_MOCK, ) - with patch.object( - FeatureGroupLineageEntityHandler, - "retrieve_feature_group_context_arns", - side_effect=[ - FEATURE_GROUP_INPUT[0], - FEATURE_GROUP_INPUT[1], - FEATURE_GROUP_INPUT[0], - ], - ) as retrieve_feature_group_context_arns_method, patch.object( - S3LineageEntityHandler, - "retrieve_raw_data_artifact", - side_effect=[ - RAW_DATA_INPUT_ARTIFACTS[0], - RAW_DATA_INPUT_ARTIFACTS[1], - RAW_DATA_INPUT_ARTIFACTS[2], - RAW_DATA_INPUT_ARTIFACTS[3], - ], - ) as retrieve_raw_data_artifact_method, patch.object( - S3LineageEntityHandler, - "create_transformation_code_artifact", - return_value=None, - ) as create_transformation_code_artifact_method, patch.object( - PipelineLineageEntityHandler, - "load_pipeline_context", - side_effect=RESOURCE_NOT_FOUND_EXCEPTION, - ) as load_pipeline_context_method, patch.object( - PipelineLineageEntityHandler, - "create_pipeline_context", - return_value=PIPELINE_CONTEXT, - ), patch.object( - PipelineVersionLineageEntityHandler, - "create_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ), patch.object( - PipelineVersionLineageEntityHandler, - "load_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ) as load_pipeline_version_context_method, patch.object( - LineageAssociationHandler, - "list_upstream_associations", - side_effect=[ - generate_pipeline_version_upstream_feature_group_list(), - [], - generate_pipeline_version_upstream_transformation_code(), - ], - ) as list_upstream_associations_method, patch.object( - LineageAssociationHandler, - "list_downstream_associations", - return_value=generate_pipeline_version_downstream_feature_group(), - ) as list_downstream_associations_method, patch.object( - PipelineLineageEntityHandler, - "update_pipeline_context", - ) as update_pipeline_context_method, patch.object( - LineageAssociationHandler, "add_upstream_feature_group_data_associations" - ) as add_upstream_feature_group_data_associations_method, patch.object( - LineageAssociationHandler, "add_downstream_feature_group_data_associations" - ) as add_downstream_feature_group_data_associations_method, patch.object( - LineageAssociationHandler, "add_upstream_raw_data_associations" - ) as add_upstream_raw_data_associations_method, patch.object( - LineageAssociationHandler, "add_upstream_transformation_code_associations" - ) as add_upstream_transformation_code_associations_method, patch.object( - LineageAssociationHandler, "add_pipeline_and_pipeline_version_association" - ) as add_pipeline_and_pipeline_version_association_method, patch.object( - Artifact, - "set_tags", - return_value={ - "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] - }, - ) as artifact_set_tags: + with ( + patch.object( + FeatureGroupLineageEntityHandler, + "retrieve_feature_group_context_arns", + side_effect=[ + FEATURE_GROUP_INPUT[0], + FEATURE_GROUP_INPUT[1], + FEATURE_GROUP_INPUT[0], + ], + ) as retrieve_feature_group_context_arns_method, + patch.object( + S3LineageEntityHandler, + "retrieve_raw_data_artifact", + side_effect=[ + RAW_DATA_INPUT_ARTIFACTS[0], + RAW_DATA_INPUT_ARTIFACTS[1], + RAW_DATA_INPUT_ARTIFACTS[2], + RAW_DATA_INPUT_ARTIFACTS[3], + ], + ) as retrieve_raw_data_artifact_method, + patch.object( + S3LineageEntityHandler, + "create_transformation_code_artifact", + return_value=None, + ) as create_transformation_code_artifact_method, + patch.object( + PipelineLineageEntityHandler, + "load_pipeline_context", + side_effect=RESOURCE_NOT_FOUND_EXCEPTION, + ) as load_pipeline_context_method, + patch.object( + PipelineLineageEntityHandler, + "create_pipeline_context", + return_value=PIPELINE_CONTEXT, + ), + patch.object( + PipelineVersionLineageEntityHandler, + "create_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ), + patch.object( + PipelineVersionLineageEntityHandler, + "load_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ) as load_pipeline_version_context_method, + patch.object( + LineageAssociationHandler, + "list_upstream_associations", + side_effect=[ + generate_pipeline_version_upstream_feature_group_list(), + [], + generate_pipeline_version_upstream_transformation_code(), + ], + ) as list_upstream_associations_method, + patch.object( + LineageAssociationHandler, + "list_downstream_associations", + return_value=generate_pipeline_version_downstream_feature_group(), + ) as list_downstream_associations_method, + patch.object( + PipelineLineageEntityHandler, + "update_pipeline_context", + ) as update_pipeline_context_method, + patch.object( + LineageAssociationHandler, "add_upstream_feature_group_data_associations" + ) as add_upstream_feature_group_data_associations_method, + patch.object( + LineageAssociationHandler, "add_downstream_feature_group_data_associations" + ) as add_downstream_feature_group_data_associations_method, + patch.object( + LineageAssociationHandler, "add_upstream_raw_data_associations" + ) as add_upstream_raw_data_associations_method, + patch.object( + LineageAssociationHandler, "add_upstream_transformation_code_associations" + ) as add_upstream_transformation_code_associations_method, + patch.object( + LineageAssociationHandler, "add_pipeline_and_pipeline_version_association" + ) as add_pipeline_and_pipeline_version_association_method, + patch.object( + Artifact, + "set_tags", + return_value={ + "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] + }, + ) as artifact_set_tags, + ): lineage_handler.create_lineage(TAGS) retrieve_feature_group_context_arns_method.assert_has_calls( @@ -728,78 +795,96 @@ def test_create_lineage_when_already_exist_with_no_version_change(): transformation_code=TRANSFORMATION_CODE_INPUT_1, sagemaker_session=SAGEMAKER_SESSION_MOCK, ) - with patch.object( - FeatureGroupLineageEntityHandler, - "retrieve_feature_group_context_arns", - side_effect=[ - FEATURE_GROUP_INPUT[0], - FEATURE_GROUP_INPUT[1], - FEATURE_GROUP_INPUT[0], - ], - ) as retrieve_feature_group_context_arns_method, patch.object( - S3LineageEntityHandler, - "retrieve_raw_data_artifact", - side_effect=[ - RAW_DATA_INPUT_ARTIFACTS[0], - RAW_DATA_INPUT_ARTIFACTS[1], - RAW_DATA_INPUT_ARTIFACTS[2], - RAW_DATA_INPUT_ARTIFACTS[3], - ], - ) as retrieve_raw_data_artifact_method, patch.object( - S3LineageEntityHandler, - "create_transformation_code_artifact", - return_value=TRANSFORMATION_CODE_ARTIFACT_1, - ) as create_transformation_code_artifact_method, patch.object( - PipelineLineageEntityHandler, - "load_pipeline_context", - return_value=PIPELINE_CONTEXT, - ) as load_pipeline_context_method, patch.object( - PipelineVersionLineageEntityHandler, - "load_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ) as load_pipeline_version_context_method, patch.object( - LineageAssociationHandler, - "list_upstream_associations", - side_effect=[ - generate_pipeline_version_upstream_feature_group_list(), - generate_pipeline_version_upstream_raw_data_list(), - generate_pipeline_version_upstream_transformation_code(), - ], - ) as list_upstream_associations_method, patch.object( - LineageAssociationHandler, - "list_downstream_associations", - return_value=generate_pipeline_version_downstream_feature_group(), - ) as list_downstream_associations_method, patch.object( - S3LineageEntityHandler, - "load_artifact_from_arn", - return_value=transformation_code_1, - ) as load_artifact_from_arn_method, patch.object( - S3LineageEntityHandler, - "update_transformation_code_artifact", - ) as update_transformation_code_artifact_method, patch.object( - PipelineLineageEntityHandler, - "update_pipeline_context", - ) as update_pipeline_context_method, patch.object( - PipelineVersionLineageEntityHandler, - "create_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ) as create_pipeline_version_context_method, patch.object( - LineageAssociationHandler, "add_upstream_feature_group_data_associations" - ) as add_upstream_feature_group_data_associations_method, patch.object( - LineageAssociationHandler, "add_downstream_feature_group_data_associations" - ) as add_downstream_feature_group_data_associations_method, patch.object( - LineageAssociationHandler, "add_upstream_raw_data_associations" - ) as add_upstream_raw_data_associations_method, patch.object( - LineageAssociationHandler, "add_upstream_transformation_code_associations" - ) as add_upstream_transformation_code_associations_method, patch.object( - LineageAssociationHandler, "add_pipeline_and_pipeline_version_association" - ) as add_pipeline_and_pipeline_version_association_method, patch.object( - Artifact, - "set_tags", - return_value={ - "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] - }, - ) as artifact_set_tags: + with ( + patch.object( + FeatureGroupLineageEntityHandler, + "retrieve_feature_group_context_arns", + side_effect=[ + FEATURE_GROUP_INPUT[0], + FEATURE_GROUP_INPUT[1], + FEATURE_GROUP_INPUT[0], + ], + ) as retrieve_feature_group_context_arns_method, + patch.object( + S3LineageEntityHandler, + "retrieve_raw_data_artifact", + side_effect=[ + RAW_DATA_INPUT_ARTIFACTS[0], + RAW_DATA_INPUT_ARTIFACTS[1], + RAW_DATA_INPUT_ARTIFACTS[2], + RAW_DATA_INPUT_ARTIFACTS[3], + ], + ) as retrieve_raw_data_artifact_method, + patch.object( + S3LineageEntityHandler, + "create_transformation_code_artifact", + return_value=TRANSFORMATION_CODE_ARTIFACT_1, + ) as create_transformation_code_artifact_method, + patch.object( + PipelineLineageEntityHandler, + "load_pipeline_context", + return_value=PIPELINE_CONTEXT, + ) as load_pipeline_context_method, + patch.object( + PipelineVersionLineageEntityHandler, + "load_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ) as load_pipeline_version_context_method, + patch.object( + LineageAssociationHandler, + "list_upstream_associations", + side_effect=[ + generate_pipeline_version_upstream_feature_group_list(), + generate_pipeline_version_upstream_raw_data_list(), + generate_pipeline_version_upstream_transformation_code(), + ], + ) as list_upstream_associations_method, + patch.object( + LineageAssociationHandler, + "list_downstream_associations", + return_value=generate_pipeline_version_downstream_feature_group(), + ) as list_downstream_associations_method, + patch.object( + S3LineageEntityHandler, + "load_artifact_from_arn", + return_value=transformation_code_1, + ) as load_artifact_from_arn_method, + patch.object( + S3LineageEntityHandler, + "update_transformation_code_artifact", + ) as update_transformation_code_artifact_method, + patch.object( + PipelineLineageEntityHandler, + "update_pipeline_context", + ) as update_pipeline_context_method, + patch.object( + PipelineVersionLineageEntityHandler, + "create_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ) as create_pipeline_version_context_method, + patch.object( + LineageAssociationHandler, "add_upstream_feature_group_data_associations" + ) as add_upstream_feature_group_data_associations_method, + patch.object( + LineageAssociationHandler, "add_downstream_feature_group_data_associations" + ) as add_downstream_feature_group_data_associations_method, + patch.object( + LineageAssociationHandler, "add_upstream_raw_data_associations" + ) as add_upstream_raw_data_associations_method, + patch.object( + LineageAssociationHandler, "add_upstream_transformation_code_associations" + ) as add_upstream_transformation_code_associations_method, + patch.object( + LineageAssociationHandler, "add_pipeline_and_pipeline_version_association" + ) as add_pipeline_and_pipeline_version_association_method, + patch.object( + Artifact, + "set_tags", + return_value={ + "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] + }, + ) as artifact_set_tags, + ): lineage_handler.create_lineage(TAGS) retrieve_feature_group_context_arns_method.assert_has_calls( @@ -925,73 +1010,91 @@ def test_create_lineage_when_already_exist_with_changed_raw_data(): transformation_code=TRANSFORMATION_CODE_INPUT_1, sagemaker_session=SAGEMAKER_SESSION_MOCK, ) - with patch.object( - FeatureGroupLineageEntityHandler, - "retrieve_feature_group_context_arns", - side_effect=[ - FEATURE_GROUP_INPUT[0], - FEATURE_GROUP_INPUT[1], - FEATURE_GROUP_INPUT[0], - ], - ) as retrieve_feature_group_context_arns_method, patch.object( - S3LineageEntityHandler, - "retrieve_raw_data_artifact", - side_effect=[RAW_DATA_INPUT_ARTIFACTS[0], RAW_DATA_INPUT_ARTIFACTS[1]], - ) as retrieve_raw_data_artifact_method, patch.object( - S3LineageEntityHandler, - "create_transformation_code_artifact", - return_value=TRANSFORMATION_CODE_ARTIFACT_1, - ) as create_transformation_code_artifact_method, patch.object( - PipelineLineageEntityHandler, - "load_pipeline_context", - return_value=pipeline_context, - ) as load_pipeline_context_method, patch.object( - PipelineVersionLineageEntityHandler, - "load_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ) as load_pipeline_version_context_method, patch.object( - LineageAssociationHandler, - "list_upstream_associations", - side_effect=[ - generate_pipeline_version_upstream_feature_group_list(), - generate_pipeline_version_upstream_raw_data_list(), - generate_pipeline_version_upstream_transformation_code(), - ], - ) as list_upstream_associations_method, patch.object( - LineageAssociationHandler, - "list_downstream_associations", - return_value=generate_pipeline_version_downstream_feature_group(), - ) as list_downstream_associations_method, patch.object( - S3LineageEntityHandler, - "load_artifact_from_arn", - return_value=transformation_code_1, - ) as load_artifact_from_arn_method, patch.object( - S3LineageEntityHandler, - "update_transformation_code_artifact", - ) as update_transformation_code_artifact_method, patch.object( - PipelineLineageEntityHandler, - "update_pipeline_context", - ) as update_pipeline_context_method, patch.object( - PipelineVersionLineageEntityHandler, - "create_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ), patch.object( - LineageAssociationHandler, "add_upstream_feature_group_data_associations" - ) as add_upstream_feature_group_data_associations_method, patch.object( - LineageAssociationHandler, "add_downstream_feature_group_data_associations" - ) as add_downstream_feature_group_data_associations_method, patch.object( - LineageAssociationHandler, "add_upstream_raw_data_associations" - ) as add_upstream_raw_data_associations_method, patch.object( - LineageAssociationHandler, "add_upstream_transformation_code_associations" - ) as add_upstream_transformation_code_associations_method, patch.object( - LineageAssociationHandler, "add_pipeline_and_pipeline_version_association" - ) as add_pipeline_and_pipeline_version_association_method, patch.object( - Artifact, - "set_tags", - return_value={ - "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] - }, - ) as artifact_set_tags: + with ( + patch.object( + FeatureGroupLineageEntityHandler, + "retrieve_feature_group_context_arns", + side_effect=[ + FEATURE_GROUP_INPUT[0], + FEATURE_GROUP_INPUT[1], + FEATURE_GROUP_INPUT[0], + ], + ) as retrieve_feature_group_context_arns_method, + patch.object( + S3LineageEntityHandler, + "retrieve_raw_data_artifact", + side_effect=[RAW_DATA_INPUT_ARTIFACTS[0], RAW_DATA_INPUT_ARTIFACTS[1]], + ) as retrieve_raw_data_artifact_method, + patch.object( + S3LineageEntityHandler, + "create_transformation_code_artifact", + return_value=TRANSFORMATION_CODE_ARTIFACT_1, + ) as create_transformation_code_artifact_method, + patch.object( + PipelineLineageEntityHandler, + "load_pipeline_context", + return_value=pipeline_context, + ) as load_pipeline_context_method, + patch.object( + PipelineVersionLineageEntityHandler, + "load_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ) as load_pipeline_version_context_method, + patch.object( + LineageAssociationHandler, + "list_upstream_associations", + side_effect=[ + generate_pipeline_version_upstream_feature_group_list(), + generate_pipeline_version_upstream_raw_data_list(), + generate_pipeline_version_upstream_transformation_code(), + ], + ) as list_upstream_associations_method, + patch.object( + LineageAssociationHandler, + "list_downstream_associations", + return_value=generate_pipeline_version_downstream_feature_group(), + ) as list_downstream_associations_method, + patch.object( + S3LineageEntityHandler, + "load_artifact_from_arn", + return_value=transformation_code_1, + ) as load_artifact_from_arn_method, + patch.object( + S3LineageEntityHandler, + "update_transformation_code_artifact", + ) as update_transformation_code_artifact_method, + patch.object( + PipelineLineageEntityHandler, + "update_pipeline_context", + ) as update_pipeline_context_method, + patch.object( + PipelineVersionLineageEntityHandler, + "create_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ), + patch.object( + LineageAssociationHandler, "add_upstream_feature_group_data_associations" + ) as add_upstream_feature_group_data_associations_method, + patch.object( + LineageAssociationHandler, "add_downstream_feature_group_data_associations" + ) as add_downstream_feature_group_data_associations_method, + patch.object( + LineageAssociationHandler, "add_upstream_raw_data_associations" + ) as add_upstream_raw_data_associations_method, + patch.object( + LineageAssociationHandler, "add_upstream_transformation_code_associations" + ) as add_upstream_transformation_code_associations_method, + patch.object( + LineageAssociationHandler, "add_pipeline_and_pipeline_version_association" + ) as add_pipeline_and_pipeline_version_association_method, + patch.object( + Artifact, + "set_tags", + return_value={ + "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] + }, + ) as artifact_set_tags, + ): lineage_handler.create_lineage(TAGS) retrieve_feature_group_context_arns_method.assert_has_calls( @@ -1140,74 +1243,92 @@ def test_create_lineage_when_already_exist_with_changed_input_fg(): transformation_code=TRANSFORMATION_CODE_INPUT_1, sagemaker_session=SAGEMAKER_SESSION_MOCK, ) - with patch.object( - FeatureGroupLineageEntityHandler, - "retrieve_feature_group_context_arns", - side_effect=[FEATURE_GROUP_INPUT[0], FEATURE_GROUP_INPUT[0]], - ) as retrieve_feature_group_context_arns_method, patch.object( - S3LineageEntityHandler, - "retrieve_raw_data_artifact", - side_effect=[ - RAW_DATA_INPUT_ARTIFACTS[0], - RAW_DATA_INPUT_ARTIFACTS[1], - RAW_DATA_INPUT_ARTIFACTS[2], - RAW_DATA_INPUT_ARTIFACTS[3], - ], - ) as retrieve_raw_data_artifact_method, patch.object( - S3LineageEntityHandler, - "create_transformation_code_artifact", - return_value=TRANSFORMATION_CODE_ARTIFACT_1, - ) as create_transformation_code_artifact_method, patch.object( - PipelineLineageEntityHandler, - "load_pipeline_context", - return_value=pipeline_context, - ) as load_pipeline_context_method, patch.object( - PipelineVersionLineageEntityHandler, - "load_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ) as load_pipeline_version_context_method, patch.object( - LineageAssociationHandler, - "list_upstream_associations", - side_effect=[ - generate_pipeline_version_upstream_feature_group_list(), - generate_pipeline_version_upstream_raw_data_list(), - generate_pipeline_version_upstream_transformation_code(), - ], - ) as list_upstream_associations_method, patch.object( - LineageAssociationHandler, - "list_downstream_associations", - return_value=generate_pipeline_version_downstream_feature_group(), - ) as list_downstream_associations_method, patch.object( - S3LineageEntityHandler, - "load_artifact_from_arn", - return_value=transformation_code_1, - ) as load_artifact_from_arn_method, patch.object( - S3LineageEntityHandler, - "update_transformation_code_artifact", - ) as update_transformation_code_artifact_method, patch.object( - PipelineLineageEntityHandler, - "update_pipeline_context", - ) as update_pipeline_context_method, patch.object( - PipelineVersionLineageEntityHandler, - "create_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ), patch.object( - LineageAssociationHandler, "add_upstream_feature_group_data_associations" - ) as add_upstream_feature_group_data_associations_method, patch.object( - LineageAssociationHandler, "add_downstream_feature_group_data_associations" - ) as add_downstream_feature_group_data_associations_method, patch.object( - LineageAssociationHandler, "add_upstream_raw_data_associations" - ) as add_upstream_raw_data_associations_method, patch.object( - LineageAssociationHandler, "add_upstream_transformation_code_associations" - ) as add_upstream_transformation_code_associations_method, patch.object( - LineageAssociationHandler, "add_pipeline_and_pipeline_version_association" - ) as add_pipeline_and_pipeline_version_association_method, patch.object( - Artifact, - "set_tags", - return_value={ - "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] - }, - ) as artifact_set_tags: + with ( + patch.object( + FeatureGroupLineageEntityHandler, + "retrieve_feature_group_context_arns", + side_effect=[FEATURE_GROUP_INPUT[0], FEATURE_GROUP_INPUT[0]], + ) as retrieve_feature_group_context_arns_method, + patch.object( + S3LineageEntityHandler, + "retrieve_raw_data_artifact", + side_effect=[ + RAW_DATA_INPUT_ARTIFACTS[0], + RAW_DATA_INPUT_ARTIFACTS[1], + RAW_DATA_INPUT_ARTIFACTS[2], + RAW_DATA_INPUT_ARTIFACTS[3], + ], + ) as retrieve_raw_data_artifact_method, + patch.object( + S3LineageEntityHandler, + "create_transformation_code_artifact", + return_value=TRANSFORMATION_CODE_ARTIFACT_1, + ) as create_transformation_code_artifact_method, + patch.object( + PipelineLineageEntityHandler, + "load_pipeline_context", + return_value=pipeline_context, + ) as load_pipeline_context_method, + patch.object( + PipelineVersionLineageEntityHandler, + "load_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ) as load_pipeline_version_context_method, + patch.object( + LineageAssociationHandler, + "list_upstream_associations", + side_effect=[ + generate_pipeline_version_upstream_feature_group_list(), + generate_pipeline_version_upstream_raw_data_list(), + generate_pipeline_version_upstream_transformation_code(), + ], + ) as list_upstream_associations_method, + patch.object( + LineageAssociationHandler, + "list_downstream_associations", + return_value=generate_pipeline_version_downstream_feature_group(), + ) as list_downstream_associations_method, + patch.object( + S3LineageEntityHandler, + "load_artifact_from_arn", + return_value=transformation_code_1, + ) as load_artifact_from_arn_method, + patch.object( + S3LineageEntityHandler, + "update_transformation_code_artifact", + ) as update_transformation_code_artifact_method, + patch.object( + PipelineLineageEntityHandler, + "update_pipeline_context", + ) as update_pipeline_context_method, + patch.object( + PipelineVersionLineageEntityHandler, + "create_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ), + patch.object( + LineageAssociationHandler, "add_upstream_feature_group_data_associations" + ) as add_upstream_feature_group_data_associations_method, + patch.object( + LineageAssociationHandler, "add_downstream_feature_group_data_associations" + ) as add_downstream_feature_group_data_associations_method, + patch.object( + LineageAssociationHandler, "add_upstream_raw_data_associations" + ) as add_upstream_raw_data_associations_method, + patch.object( + LineageAssociationHandler, "add_upstream_transformation_code_associations" + ) as add_upstream_transformation_code_associations_method, + patch.object( + LineageAssociationHandler, "add_pipeline_and_pipeline_version_association" + ) as add_pipeline_and_pipeline_version_association_method, + patch.object( + Artifact, + "set_tags", + return_value={ + "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] + }, + ) as artifact_set_tags, + ): lineage_handler.create_lineage(TAGS) retrieve_feature_group_context_arns_method.assert_has_calls( @@ -1354,78 +1475,96 @@ def test_create_lineage_when_already_exist_with_changed_output_fg(): transformation_code=TRANSFORMATION_CODE_INPUT_1, sagemaker_session=SAGEMAKER_SESSION_MOCK, ) - with patch.object( - FeatureGroupLineageEntityHandler, - "retrieve_feature_group_context_arns", - side_effect=[ - FEATURE_GROUP_INPUT[0], - FEATURE_GROUP_INPUT[1], - FEATURE_GROUP_INPUT[1], - ], - ) as retrieve_feature_group_context_arns_method, patch.object( - S3LineageEntityHandler, - "retrieve_raw_data_artifact", - side_effect=[ - RAW_DATA_INPUT_ARTIFACTS[0], - RAW_DATA_INPUT_ARTIFACTS[1], - RAW_DATA_INPUT_ARTIFACTS[2], - RAW_DATA_INPUT_ARTIFACTS[3], - ], - ) as retrieve_raw_data_artifact_method, patch.object( - S3LineageEntityHandler, - "create_transformation_code_artifact", - return_value=TRANSFORMATION_CODE_ARTIFACT_1, - ) as create_transformation_code_artifact_method, patch.object( - PipelineLineageEntityHandler, - "load_pipeline_context", - return_value=pipeline_context, - ) as load_pipeline_context_method, patch.object( - PipelineVersionLineageEntityHandler, - "load_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ) as load_pipeline_version_context_method, patch.object( - LineageAssociationHandler, - "list_upstream_associations", - side_effect=[ - generate_pipeline_version_upstream_feature_group_list(), - generate_pipeline_version_upstream_raw_data_list(), - generate_pipeline_version_upstream_transformation_code(), - ], - ) as list_upstream_associations_method, patch.object( - LineageAssociationHandler, - "list_downstream_associations", - return_value=generate_pipeline_version_downstream_feature_group(), - ) as list_downstream_associations_method, patch.object( - S3LineageEntityHandler, - "load_artifact_from_arn", - return_value=transformation_code_1, - ) as load_artifact_from_arn_method, patch.object( - S3LineageEntityHandler, - "update_transformation_code_artifact", - ) as update_transformation_code_artifact_method, patch.object( - PipelineLineageEntityHandler, - "update_pipeline_context", - ) as update_pipeline_context_method, patch.object( - PipelineVersionLineageEntityHandler, - "create_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ), patch.object( - LineageAssociationHandler, "add_upstream_feature_group_data_associations" - ) as add_upstream_feature_group_data_associations_method, patch.object( - LineageAssociationHandler, "add_downstream_feature_group_data_associations" - ) as add_downstream_feature_group_data_associations_method, patch.object( - LineageAssociationHandler, "add_upstream_raw_data_associations" - ) as add_upstream_raw_data_associations_method, patch.object( - LineageAssociationHandler, "add_upstream_transformation_code_associations" - ) as add_upstream_transformation_code_associations_method, patch.object( - LineageAssociationHandler, "add_pipeline_and_pipeline_version_association" - ) as add_pipeline_and_pipeline_version_association_method, patch.object( - Artifact, - "set_tags", - return_value={ - "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] - }, - ) as artifact_set_tags: + with ( + patch.object( + FeatureGroupLineageEntityHandler, + "retrieve_feature_group_context_arns", + side_effect=[ + FEATURE_GROUP_INPUT[0], + FEATURE_GROUP_INPUT[1], + FEATURE_GROUP_INPUT[1], + ], + ) as retrieve_feature_group_context_arns_method, + patch.object( + S3LineageEntityHandler, + "retrieve_raw_data_artifact", + side_effect=[ + RAW_DATA_INPUT_ARTIFACTS[0], + RAW_DATA_INPUT_ARTIFACTS[1], + RAW_DATA_INPUT_ARTIFACTS[2], + RAW_DATA_INPUT_ARTIFACTS[3], + ], + ) as retrieve_raw_data_artifact_method, + patch.object( + S3LineageEntityHandler, + "create_transformation_code_artifact", + return_value=TRANSFORMATION_CODE_ARTIFACT_1, + ) as create_transformation_code_artifact_method, + patch.object( + PipelineLineageEntityHandler, + "load_pipeline_context", + return_value=pipeline_context, + ) as load_pipeline_context_method, + patch.object( + PipelineVersionLineageEntityHandler, + "load_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ) as load_pipeline_version_context_method, + patch.object( + LineageAssociationHandler, + "list_upstream_associations", + side_effect=[ + generate_pipeline_version_upstream_feature_group_list(), + generate_pipeline_version_upstream_raw_data_list(), + generate_pipeline_version_upstream_transformation_code(), + ], + ) as list_upstream_associations_method, + patch.object( + LineageAssociationHandler, + "list_downstream_associations", + return_value=generate_pipeline_version_downstream_feature_group(), + ) as list_downstream_associations_method, + patch.object( + S3LineageEntityHandler, + "load_artifact_from_arn", + return_value=transformation_code_1, + ) as load_artifact_from_arn_method, + patch.object( + S3LineageEntityHandler, + "update_transformation_code_artifact", + ) as update_transformation_code_artifact_method, + patch.object( + PipelineLineageEntityHandler, + "update_pipeline_context", + ) as update_pipeline_context_method, + patch.object( + PipelineVersionLineageEntityHandler, + "create_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ), + patch.object( + LineageAssociationHandler, "add_upstream_feature_group_data_associations" + ) as add_upstream_feature_group_data_associations_method, + patch.object( + LineageAssociationHandler, "add_downstream_feature_group_data_associations" + ) as add_downstream_feature_group_data_associations_method, + patch.object( + LineageAssociationHandler, "add_upstream_raw_data_associations" + ) as add_upstream_raw_data_associations_method, + patch.object( + LineageAssociationHandler, "add_upstream_transformation_code_associations" + ) as add_upstream_transformation_code_associations_method, + patch.object( + LineageAssociationHandler, "add_pipeline_and_pipeline_version_association" + ) as add_pipeline_and_pipeline_version_association_method, + patch.object( + Artifact, + "set_tags", + return_value={ + "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] + }, + ) as artifact_set_tags, + ): lineage_handler.create_lineage(TAGS) retrieve_feature_group_context_arns_method.assert_has_calls( @@ -1576,78 +1715,96 @@ def test_create_lineage_when_already_exist_with_changed_transformation_code(): transformation_code=TRANSFORMATION_CODE_INPUT_2, sagemaker_session=SAGEMAKER_SESSION_MOCK, ) - with patch.object( - FeatureGroupLineageEntityHandler, - "retrieve_feature_group_context_arns", - side_effect=[ - FEATURE_GROUP_INPUT[0], - FEATURE_GROUP_INPUT[1], - FEATURE_GROUP_INPUT[0], - ], - ) as retrieve_feature_group_context_arns_method, patch.object( - S3LineageEntityHandler, - "retrieve_raw_data_artifact", - side_effect=[ - RAW_DATA_INPUT_ARTIFACTS[0], - RAW_DATA_INPUT_ARTIFACTS[1], - RAW_DATA_INPUT_ARTIFACTS[2], - RAW_DATA_INPUT_ARTIFACTS[3], - ], - ) as retrieve_raw_data_artifact_method, patch.object( - S3LineageEntityHandler, - "create_transformation_code_artifact", - return_value=TRANSFORMATION_CODE_ARTIFACT_2, - ) as create_transformation_code_artifact_method, patch.object( - PipelineLineageEntityHandler, - "load_pipeline_context", - return_value=pipeline_context, - ) as load_pipeline_context_method, patch.object( - PipelineVersionLineageEntityHandler, - "load_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ) as load_pipeline_version_context_method, patch.object( - LineageAssociationHandler, - "list_upstream_associations", - side_effect=[ - generate_pipeline_version_upstream_feature_group_list(), - generate_pipeline_version_upstream_raw_data_list(), - generate_pipeline_version_upstream_transformation_code(), - ], - ) as list_upstream_associations_method, patch.object( - LineageAssociationHandler, - "list_downstream_associations", - return_value=generate_pipeline_version_downstream_feature_group(), - ) as list_downstream_associations_method, patch.object( - S3LineageEntityHandler, - "load_artifact_from_arn", - return_value=transformation_code_1, - ) as load_artifact_from_arn_method, patch.object( - S3LineageEntityHandler, - "update_transformation_code_artifact", - ) as update_transformation_code_artifact_method, patch.object( - PipelineLineageEntityHandler, - "update_pipeline_context", - ) as update_pipeline_context_method, patch.object( - PipelineVersionLineageEntityHandler, - "create_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ), patch.object( - LineageAssociationHandler, "add_upstream_feature_group_data_associations" - ) as add_upstream_feature_group_data_associations_method, patch.object( - LineageAssociationHandler, "add_downstream_feature_group_data_associations" - ) as add_downstream_feature_group_data_associations_method, patch.object( - LineageAssociationHandler, "add_upstream_raw_data_associations" - ) as add_upstream_raw_data_associations_method, patch.object( - LineageAssociationHandler, "add_upstream_transformation_code_associations" - ) as add_upstream_transformation_code_associations_method, patch.object( - LineageAssociationHandler, "add_pipeline_and_pipeline_version_association" - ) as add_pipeline_and_pipeline_version_association_method, patch.object( - Artifact, - "set_tags", - return_value={ - "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] - }, - ) as artifact_set_tags: + with ( + patch.object( + FeatureGroupLineageEntityHandler, + "retrieve_feature_group_context_arns", + side_effect=[ + FEATURE_GROUP_INPUT[0], + FEATURE_GROUP_INPUT[1], + FEATURE_GROUP_INPUT[0], + ], + ) as retrieve_feature_group_context_arns_method, + patch.object( + S3LineageEntityHandler, + "retrieve_raw_data_artifact", + side_effect=[ + RAW_DATA_INPUT_ARTIFACTS[0], + RAW_DATA_INPUT_ARTIFACTS[1], + RAW_DATA_INPUT_ARTIFACTS[2], + RAW_DATA_INPUT_ARTIFACTS[3], + ], + ) as retrieve_raw_data_artifact_method, + patch.object( + S3LineageEntityHandler, + "create_transformation_code_artifact", + return_value=TRANSFORMATION_CODE_ARTIFACT_2, + ) as create_transformation_code_artifact_method, + patch.object( + PipelineLineageEntityHandler, + "load_pipeline_context", + return_value=pipeline_context, + ) as load_pipeline_context_method, + patch.object( + PipelineVersionLineageEntityHandler, + "load_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ) as load_pipeline_version_context_method, + patch.object( + LineageAssociationHandler, + "list_upstream_associations", + side_effect=[ + generate_pipeline_version_upstream_feature_group_list(), + generate_pipeline_version_upstream_raw_data_list(), + generate_pipeline_version_upstream_transformation_code(), + ], + ) as list_upstream_associations_method, + patch.object( + LineageAssociationHandler, + "list_downstream_associations", + return_value=generate_pipeline_version_downstream_feature_group(), + ) as list_downstream_associations_method, + patch.object( + S3LineageEntityHandler, + "load_artifact_from_arn", + return_value=transformation_code_1, + ) as load_artifact_from_arn_method, + patch.object( + S3LineageEntityHandler, + "update_transformation_code_artifact", + ) as update_transformation_code_artifact_method, + patch.object( + PipelineLineageEntityHandler, + "update_pipeline_context", + ) as update_pipeline_context_method, + patch.object( + PipelineVersionLineageEntityHandler, + "create_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ), + patch.object( + LineageAssociationHandler, "add_upstream_feature_group_data_associations" + ) as add_upstream_feature_group_data_associations_method, + patch.object( + LineageAssociationHandler, "add_downstream_feature_group_data_associations" + ) as add_downstream_feature_group_data_associations_method, + patch.object( + LineageAssociationHandler, "add_upstream_raw_data_associations" + ) as add_upstream_raw_data_associations_method, + patch.object( + LineageAssociationHandler, "add_upstream_transformation_code_associations" + ) as add_upstream_transformation_code_associations_method, + patch.object( + LineageAssociationHandler, "add_pipeline_and_pipeline_version_association" + ) as add_pipeline_and_pipeline_version_association_method, + patch.object( + Artifact, + "set_tags", + return_value={ + "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] + }, + ) as artifact_set_tags, + ): lineage_handler.create_lineage(TAGS) retrieve_feature_group_context_arns_method.assert_has_calls( @@ -1778,78 +1935,96 @@ def test_create_lineage_when_already_exist_with_last_transformation_code_as_none transformation_code=TRANSFORMATION_CODE_INPUT_2, sagemaker_session=SAGEMAKER_SESSION_MOCK, ) - with patch.object( - FeatureGroupLineageEntityHandler, - "retrieve_feature_group_context_arns", - side_effect=[ - FEATURE_GROUP_INPUT[0], - FEATURE_GROUP_INPUT[1], - FEATURE_GROUP_INPUT[0], - ], - ) as retrieve_feature_group_context_arns_method, patch.object( - S3LineageEntityHandler, - "retrieve_raw_data_artifact", - side_effect=[ - RAW_DATA_INPUT_ARTIFACTS[0], - RAW_DATA_INPUT_ARTIFACTS[1], - RAW_DATA_INPUT_ARTIFACTS[2], - RAW_DATA_INPUT_ARTIFACTS[3], - ], - ) as retrieve_raw_data_artifact_method, patch.object( - S3LineageEntityHandler, - "create_transformation_code_artifact", - return_value=TRANSFORMATION_CODE_ARTIFACT_2, - ) as create_transformation_code_artifact_method, patch.object( - PipelineLineageEntityHandler, - "load_pipeline_context", - return_value=pipeline_context, - ) as load_pipeline_context_method, patch.object( - PipelineVersionLineageEntityHandler, - "load_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ) as load_pipeline_version_context_method, patch.object( - LineageAssociationHandler, - "list_upstream_associations", - side_effect=[ - generate_pipeline_version_upstream_feature_group_list(), - generate_pipeline_version_upstream_raw_data_list(), - generate_pipeline_version_upstream_transformation_code(), - ], - ) as list_upstream_associations_method, patch.object( - LineageAssociationHandler, - "list_downstream_associations", - return_value=generate_pipeline_version_downstream_feature_group(), - ) as list_downstream_associations_method, patch.object( - S3LineageEntityHandler, - "load_artifact_from_arn", - return_value=transformation_code_1, - ) as load_artifact_from_arn_method, patch.object( - S3LineageEntityHandler, - "update_transformation_code_artifact", - ) as update_transformation_code_artifact_method, patch.object( - PipelineLineageEntityHandler, - "update_pipeline_context", - ) as update_pipeline_context_method, patch.object( - PipelineVersionLineageEntityHandler, - "create_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ), patch.object( - LineageAssociationHandler, "add_upstream_feature_group_data_associations" - ) as add_upstream_feature_group_data_associations_method, patch.object( - LineageAssociationHandler, "add_downstream_feature_group_data_associations" - ) as add_downstream_feature_group_data_associations_method, patch.object( - LineageAssociationHandler, "add_upstream_raw_data_associations" - ) as add_upstream_raw_data_associations_method, patch.object( - LineageAssociationHandler, "add_upstream_transformation_code_associations" - ) as add_upstream_transformation_code_associations_method, patch.object( - LineageAssociationHandler, "add_pipeline_and_pipeline_version_association" - ) as add_pipeline_and_pipeline_version_association_method, patch.object( - Artifact, - "set_tags", - return_value={ - "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] - }, - ) as artifact_set_tags: + with ( + patch.object( + FeatureGroupLineageEntityHandler, + "retrieve_feature_group_context_arns", + side_effect=[ + FEATURE_GROUP_INPUT[0], + FEATURE_GROUP_INPUT[1], + FEATURE_GROUP_INPUT[0], + ], + ) as retrieve_feature_group_context_arns_method, + patch.object( + S3LineageEntityHandler, + "retrieve_raw_data_artifact", + side_effect=[ + RAW_DATA_INPUT_ARTIFACTS[0], + RAW_DATA_INPUT_ARTIFACTS[1], + RAW_DATA_INPUT_ARTIFACTS[2], + RAW_DATA_INPUT_ARTIFACTS[3], + ], + ) as retrieve_raw_data_artifact_method, + patch.object( + S3LineageEntityHandler, + "create_transformation_code_artifact", + return_value=TRANSFORMATION_CODE_ARTIFACT_2, + ) as create_transformation_code_artifact_method, + patch.object( + PipelineLineageEntityHandler, + "load_pipeline_context", + return_value=pipeline_context, + ) as load_pipeline_context_method, + patch.object( + PipelineVersionLineageEntityHandler, + "load_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ) as load_pipeline_version_context_method, + patch.object( + LineageAssociationHandler, + "list_upstream_associations", + side_effect=[ + generate_pipeline_version_upstream_feature_group_list(), + generate_pipeline_version_upstream_raw_data_list(), + generate_pipeline_version_upstream_transformation_code(), + ], + ) as list_upstream_associations_method, + patch.object( + LineageAssociationHandler, + "list_downstream_associations", + return_value=generate_pipeline_version_downstream_feature_group(), + ) as list_downstream_associations_method, + patch.object( + S3LineageEntityHandler, + "load_artifact_from_arn", + return_value=transformation_code_1, + ) as load_artifact_from_arn_method, + patch.object( + S3LineageEntityHandler, + "update_transformation_code_artifact", + ) as update_transformation_code_artifact_method, + patch.object( + PipelineLineageEntityHandler, + "update_pipeline_context", + ) as update_pipeline_context_method, + patch.object( + PipelineVersionLineageEntityHandler, + "create_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ), + patch.object( + LineageAssociationHandler, "add_upstream_feature_group_data_associations" + ) as add_upstream_feature_group_data_associations_method, + patch.object( + LineageAssociationHandler, "add_downstream_feature_group_data_associations" + ) as add_downstream_feature_group_data_associations_method, + patch.object( + LineageAssociationHandler, "add_upstream_raw_data_associations" + ) as add_upstream_raw_data_associations_method, + patch.object( + LineageAssociationHandler, "add_upstream_transformation_code_associations" + ) as add_upstream_transformation_code_associations_method, + patch.object( + LineageAssociationHandler, "add_pipeline_and_pipeline_version_association" + ) as add_pipeline_and_pipeline_version_association_method, + patch.object( + Artifact, + "set_tags", + return_value={ + "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] + }, + ) as artifact_set_tags, + ): lineage_handler.create_lineage(TAGS) retrieve_feature_group_context_arns_method.assert_has_calls( @@ -1968,77 +2143,95 @@ def test_create_lineage_when_already_exist_with_all_previous_transformation_code transformation_code=TRANSFORMATION_CODE_INPUT_2, sagemaker_session=SAGEMAKER_SESSION_MOCK, ) - with patch.object( - FeatureGroupLineageEntityHandler, - "retrieve_feature_group_context_arns", - side_effect=[ - FEATURE_GROUP_INPUT[0], - FEATURE_GROUP_INPUT[1], - FEATURE_GROUP_INPUT[0], - ], - ) as retrieve_feature_group_context_arns_method, patch.object( - S3LineageEntityHandler, - "retrieve_raw_data_artifact", - side_effect=[ - RAW_DATA_INPUT_ARTIFACTS[0], - RAW_DATA_INPUT_ARTIFACTS[1], - RAW_DATA_INPUT_ARTIFACTS[2], - RAW_DATA_INPUT_ARTIFACTS[3], - ], - ) as retrieve_raw_data_artifact_method, patch.object( - S3LineageEntityHandler, - "create_transformation_code_artifact", - return_value=TRANSFORMATION_CODE_ARTIFACT_2, - ) as create_transformation_code_artifact_method, patch.object( - PipelineLineageEntityHandler, - "load_pipeline_context", - return_value=pipeline_context, - ) as load_pipeline_context_method, patch.object( - PipelineVersionLineageEntityHandler, - "load_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ) as load_pipeline_version_context_method, patch.object( - LineageAssociationHandler, - "list_upstream_associations", - side_effect=[ - generate_pipeline_version_upstream_feature_group_list(), - generate_pipeline_version_upstream_raw_data_list(), - iter([]), - ], - ) as list_upstream_associations_method, patch.object( - LineageAssociationHandler, - "list_downstream_associations", - return_value=generate_pipeline_version_downstream_feature_group(), - ) as list_downstream_associations_method, patch.object( - S3LineageEntityHandler, - "load_artifact_from_arn", - ) as load_artifact_from_arn_method, patch.object( - S3LineageEntityHandler, - "update_transformation_code_artifact", - ) as update_transformation_code_artifact_method, patch.object( - PipelineLineageEntityHandler, - "update_pipeline_context", - ) as update_pipeline_context_method, patch.object( - PipelineVersionLineageEntityHandler, - "create_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ), patch.object( - LineageAssociationHandler, "add_upstream_feature_group_data_associations" - ) as add_upstream_feature_group_data_associations_method, patch.object( - LineageAssociationHandler, "add_downstream_feature_group_data_associations" - ) as add_downstream_feature_group_data_associations_method, patch.object( - LineageAssociationHandler, "add_upstream_raw_data_associations" - ) as add_upstream_raw_data_associations_method, patch.object( - LineageAssociationHandler, "add_upstream_transformation_code_associations" - ) as add_upstream_transformation_code_associations_method, patch.object( - LineageAssociationHandler, "add_pipeline_and_pipeline_version_association" - ) as add_pipeline_and_pipeline_version_association_method, patch.object( - Artifact, - "set_tags", - return_value={ - "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] - }, - ) as artifact_set_tags: + with ( + patch.object( + FeatureGroupLineageEntityHandler, + "retrieve_feature_group_context_arns", + side_effect=[ + FEATURE_GROUP_INPUT[0], + FEATURE_GROUP_INPUT[1], + FEATURE_GROUP_INPUT[0], + ], + ) as retrieve_feature_group_context_arns_method, + patch.object( + S3LineageEntityHandler, + "retrieve_raw_data_artifact", + side_effect=[ + RAW_DATA_INPUT_ARTIFACTS[0], + RAW_DATA_INPUT_ARTIFACTS[1], + RAW_DATA_INPUT_ARTIFACTS[2], + RAW_DATA_INPUT_ARTIFACTS[3], + ], + ) as retrieve_raw_data_artifact_method, + patch.object( + S3LineageEntityHandler, + "create_transformation_code_artifact", + return_value=TRANSFORMATION_CODE_ARTIFACT_2, + ) as create_transformation_code_artifact_method, + patch.object( + PipelineLineageEntityHandler, + "load_pipeline_context", + return_value=pipeline_context, + ) as load_pipeline_context_method, + patch.object( + PipelineVersionLineageEntityHandler, + "load_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ) as load_pipeline_version_context_method, + patch.object( + LineageAssociationHandler, + "list_upstream_associations", + side_effect=[ + generate_pipeline_version_upstream_feature_group_list(), + generate_pipeline_version_upstream_raw_data_list(), + iter([]), + ], + ) as list_upstream_associations_method, + patch.object( + LineageAssociationHandler, + "list_downstream_associations", + return_value=generate_pipeline_version_downstream_feature_group(), + ) as list_downstream_associations_method, + patch.object( + S3LineageEntityHandler, + "load_artifact_from_arn", + ) as load_artifact_from_arn_method, + patch.object( + S3LineageEntityHandler, + "update_transformation_code_artifact", + ) as update_transformation_code_artifact_method, + patch.object( + PipelineLineageEntityHandler, + "update_pipeline_context", + ) as update_pipeline_context_method, + patch.object( + PipelineVersionLineageEntityHandler, + "create_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ), + patch.object( + LineageAssociationHandler, "add_upstream_feature_group_data_associations" + ) as add_upstream_feature_group_data_associations_method, + patch.object( + LineageAssociationHandler, "add_downstream_feature_group_data_associations" + ) as add_downstream_feature_group_data_associations_method, + patch.object( + LineageAssociationHandler, "add_upstream_raw_data_associations" + ) as add_upstream_raw_data_associations_method, + patch.object( + LineageAssociationHandler, "add_upstream_transformation_code_associations" + ) as add_upstream_transformation_code_associations_method, + patch.object( + LineageAssociationHandler, "add_pipeline_and_pipeline_version_association" + ) as add_pipeline_and_pipeline_version_association_method, + patch.object( + Artifact, + "set_tags", + return_value={ + "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] + }, + ) as artifact_set_tags, + ): lineage_handler.create_lineage(TAGS) retrieve_feature_group_context_arns_method.assert_has_calls( @@ -2154,78 +2347,96 @@ def test_create_lineage_when_already_exist_with_removed_transformation_code(): output=FEATURE_GROUP_DATA_SOURCE[0].name, sagemaker_session=SAGEMAKER_SESSION_MOCK, ) - with patch.object( - FeatureGroupLineageEntityHandler, - "retrieve_feature_group_context_arns", - side_effect=[ - FEATURE_GROUP_INPUT[0], - FEATURE_GROUP_INPUT[1], - FEATURE_GROUP_INPUT[0], - ], - ) as retrieve_feature_group_context_arns_method, patch.object( - S3LineageEntityHandler, - "retrieve_raw_data_artifact", - side_effect=[ - RAW_DATA_INPUT_ARTIFACTS[0], - RAW_DATA_INPUT_ARTIFACTS[1], - RAW_DATA_INPUT_ARTIFACTS[2], - RAW_DATA_INPUT_ARTIFACTS[3], - ], - ) as retrieve_raw_data_artifact_method, patch.object( - S3LineageEntityHandler, - "create_transformation_code_artifact", - return_value=None, - ) as create_transformation_code_artifact_method, patch.object( - PipelineLineageEntityHandler, - "load_pipeline_context", - return_value=pipeline_context, - ) as load_pipeline_context_method, patch.object( - PipelineVersionLineageEntityHandler, - "load_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ) as load_pipeline_version_context_method, patch.object( - LineageAssociationHandler, - "list_upstream_associations", - side_effect=[ - generate_pipeline_version_upstream_feature_group_list(), - generate_pipeline_version_upstream_raw_data_list(), - generate_pipeline_version_upstream_transformation_code(), - ], - ) as list_upstream_associations_method, patch.object( - LineageAssociationHandler, - "list_downstream_associations", - return_value=generate_pipeline_version_downstream_feature_group(), - ) as list_downstream_associations_method, patch.object( - S3LineageEntityHandler, - "load_artifact_from_arn", - return_value=transformation_code_1, - ) as load_artifact_from_arn_method, patch.object( - S3LineageEntityHandler, - "update_transformation_code_artifact", - ) as update_transformation_code_artifact_method, patch.object( - PipelineLineageEntityHandler, - "update_pipeline_context", - ) as update_pipeline_context_method, patch.object( - PipelineVersionLineageEntityHandler, - "create_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ), patch.object( - LineageAssociationHandler, "add_upstream_feature_group_data_associations" - ) as add_upstream_feature_group_data_associations_method, patch.object( - LineageAssociationHandler, "add_downstream_feature_group_data_associations" - ) as add_downstream_feature_group_data_associations_method, patch.object( - LineageAssociationHandler, "add_upstream_raw_data_associations" - ) as add_upstream_raw_data_associations_method, patch.object( - LineageAssociationHandler, "add_upstream_transformation_code_associations" - ) as add_upstream_transformation_code_associations_method, patch.object( - LineageAssociationHandler, "add_pipeline_and_pipeline_version_association" - ) as add_pipeline_and_pipeline_version_association_method, patch.object( - Artifact, - "set_tags", - return_value={ - "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] - }, - ) as artifact_set_tags: + with ( + patch.object( + FeatureGroupLineageEntityHandler, + "retrieve_feature_group_context_arns", + side_effect=[ + FEATURE_GROUP_INPUT[0], + FEATURE_GROUP_INPUT[1], + FEATURE_GROUP_INPUT[0], + ], + ) as retrieve_feature_group_context_arns_method, + patch.object( + S3LineageEntityHandler, + "retrieve_raw_data_artifact", + side_effect=[ + RAW_DATA_INPUT_ARTIFACTS[0], + RAW_DATA_INPUT_ARTIFACTS[1], + RAW_DATA_INPUT_ARTIFACTS[2], + RAW_DATA_INPUT_ARTIFACTS[3], + ], + ) as retrieve_raw_data_artifact_method, + patch.object( + S3LineageEntityHandler, + "create_transformation_code_artifact", + return_value=None, + ) as create_transformation_code_artifact_method, + patch.object( + PipelineLineageEntityHandler, + "load_pipeline_context", + return_value=pipeline_context, + ) as load_pipeline_context_method, + patch.object( + PipelineVersionLineageEntityHandler, + "load_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ) as load_pipeline_version_context_method, + patch.object( + LineageAssociationHandler, + "list_upstream_associations", + side_effect=[ + generate_pipeline_version_upstream_feature_group_list(), + generate_pipeline_version_upstream_raw_data_list(), + generate_pipeline_version_upstream_transformation_code(), + ], + ) as list_upstream_associations_method, + patch.object( + LineageAssociationHandler, + "list_downstream_associations", + return_value=generate_pipeline_version_downstream_feature_group(), + ) as list_downstream_associations_method, + patch.object( + S3LineageEntityHandler, + "load_artifact_from_arn", + return_value=transformation_code_1, + ) as load_artifact_from_arn_method, + patch.object( + S3LineageEntityHandler, + "update_transformation_code_artifact", + ) as update_transformation_code_artifact_method, + patch.object( + PipelineLineageEntityHandler, + "update_pipeline_context", + ) as update_pipeline_context_method, + patch.object( + PipelineVersionLineageEntityHandler, + "create_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ), + patch.object( + LineageAssociationHandler, "add_upstream_feature_group_data_associations" + ) as add_upstream_feature_group_data_associations_method, + patch.object( + LineageAssociationHandler, "add_downstream_feature_group_data_associations" + ) as add_downstream_feature_group_data_associations_method, + patch.object( + LineageAssociationHandler, "add_upstream_raw_data_associations" + ) as add_upstream_raw_data_associations_method, + patch.object( + LineageAssociationHandler, "add_upstream_transformation_code_associations" + ) as add_upstream_transformation_code_associations_method, + patch.object( + LineageAssociationHandler, "add_pipeline_and_pipeline_version_association" + ) as add_pipeline_and_pipeline_version_association_method, + patch.object( + Artifact, + "set_tags", + return_value={ + "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] + }, + ) as artifact_set_tags, + ): lineage_handler.create_lineage(TAGS) retrieve_feature_group_context_arns_method.assert_has_calls( @@ -2370,15 +2581,18 @@ def test_get_pipeline_lineage_names_when_lineage_exists(): transformation_code=TRANSFORMATION_CODE_INPUT_1, sagemaker_session=SAGEMAKER_SESSION_MOCK, ) - with patch.object( - PipelineLineageEntityHandler, - "load_pipeline_context", - return_value=PIPELINE_CONTEXT, - ) as load_pipeline_context_method, patch.object( - PipelineVersionLineageEntityHandler, - "load_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ) as load_pipeline_version_context_method: + with ( + patch.object( + PipelineLineageEntityHandler, + "load_pipeline_context", + return_value=PIPELINE_CONTEXT, + ) as load_pipeline_context_method, + patch.object( + PipelineVersionLineageEntityHandler, + "load_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ) as load_pipeline_version_context_method, + ): return_value = lineage_handler.get_pipeline_lineage_names() assert return_value == dict( @@ -2416,28 +2630,34 @@ def test_create_schedule_lineage(): pipeline=PIPELINE, sagemaker_session=SAGEMAKER_SESSION_MOCK, ) - with patch.object( - PipelineLineageEntityHandler, - "load_pipeline_context", - return_value=PIPELINE_CONTEXT, - ) as load_pipeline_context_method, patch.object( - PipelineVersionLineageEntityHandler, - "load_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ) as load_pipeline_version_context_method, patch.object( - S3LineageEntityHandler, - "retrieve_pipeline_schedule_artifact", - return_value=SCHEDULE_ARTIFACT_RESULT, - ) as retrieve_pipeline_schedule_artifact_method, patch.object( - LineageAssociationHandler, - "add_upstream_schedule_associations", - ) as add_upstream_schedule_associations_method, patch.object( - Artifact, - "set_tags", - return_value={ - "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] - }, - ) as artifact_set_tags: + with ( + patch.object( + PipelineLineageEntityHandler, + "load_pipeline_context", + return_value=PIPELINE_CONTEXT, + ) as load_pipeline_context_method, + patch.object( + PipelineVersionLineageEntityHandler, + "load_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ) as load_pipeline_version_context_method, + patch.object( + S3LineageEntityHandler, + "retrieve_pipeline_schedule_artifact", + return_value=SCHEDULE_ARTIFACT_RESULT, + ) as retrieve_pipeline_schedule_artifact_method, + patch.object( + LineageAssociationHandler, + "add_upstream_schedule_associations", + ) as add_upstream_schedule_associations_method, + patch.object( + Artifact, + "set_tags", + return_value={ + "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] + }, + ) as artifact_set_tags, + ): lineage_handler.create_schedule_lineage( pipeline_name=PIPELINE_NAME, schedule_arn=SCHEDULE_ARN, @@ -2487,28 +2707,34 @@ def test_create_trigger_lineage(): pipeline=PIPELINE, sagemaker_session=SAGEMAKER_SESSION_MOCK, ) - with patch.object( - PipelineLineageEntityHandler, - "load_pipeline_context", - return_value=PIPELINE_CONTEXT, - ) as load_pipeline_context_method, patch.object( - PipelineVersionLineageEntityHandler, - "load_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ) as load_pipeline_version_context_method, patch.object( - S3LineageEntityHandler, - "retrieve_pipeline_trigger_artifact", - return_value=PIPELINE_TRIGGER_ARTIFACT, - ) as retrieve_pipeline_trigger_artifact_method, patch.object( - LineageAssociationHandler, - "_add_association", - ) as add_association_method, patch.object( - Artifact, - "set_tags", - return_value={ - "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] - }, - ) as artifact_set_tags: + with ( + patch.object( + PipelineLineageEntityHandler, + "load_pipeline_context", + return_value=PIPELINE_CONTEXT, + ) as load_pipeline_context_method, + patch.object( + PipelineVersionLineageEntityHandler, + "load_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ) as load_pipeline_version_context_method, + patch.object( + S3LineageEntityHandler, + "retrieve_pipeline_trigger_artifact", + return_value=PIPELINE_TRIGGER_ARTIFACT, + ) as retrieve_pipeline_trigger_artifact_method, + patch.object( + LineageAssociationHandler, + "_add_association", + ) as add_association_method, + patch.object( + Artifact, + "set_tags", + return_value={ + "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] + }, + ) as artifact_set_tags, + ): lineage_handler.create_trigger_lineage( pipeline_name=PIPELINE_NAME, trigger_arn=TRIGGER_ARN, @@ -2564,56 +2790,68 @@ def test_upsert_tags_for_lineage_resources(): ) lineage_handler.sagemaker_session.boto_session = Mock() lineage_handler.sagemaker_session.sagemaker_client = Mock() - with patch.object( - S3LineageEntityHandler, - "retrieve_raw_data_artifact", - side_effect=[ - RAW_DATA_INPUT_ARTIFACTS[0], - RAW_DATA_INPUT_ARTIFACTS[1], - RAW_DATA_INPUT_ARTIFACTS[2], - RAW_DATA_INPUT_ARTIFACTS[3], - ], - ) as retrieve_raw_data_artifact_method, patch.object( - PipelineLineageEntityHandler, - "load_pipeline_context", - return_value=pipeline_context, - ) as load_pipeline_context_method, patch.object( - PipelineVersionLineageEntityHandler, - "load_pipeline_version_context", - return_value=PIPELINE_VERSION_CONTEXT, - ) as load_pipeline_version_context_method, patch.object( - LineageAssociationHandler, - "list_upstream_associations", - side_effect=[ - generate_pipeline_version_upstream_feature_group_list(), - generate_pipeline_version_upstream_raw_data_list(), - iter([]), - ], - ) as list_upstream_associations_method, patch.object( - LineageAssociationHandler, - "list_downstream_associations", - return_value=generate_pipeline_version_downstream_feature_group(), - ) as list_downstream_associations_method, patch.object( - S3LineageEntityHandler, "load_artifact_from_arn", return_value=ARTIFACT_RESULT - ) as load_artifact_from_arn_method, patch.object( - S3LineageEntityHandler, "_load_artifact_from_s3_uri", return_value=ARTIFACT_SUMMARY - ) as load_artifact_from_s3_uri_method, patch.object( - Artifact, - "set_tags", - return_value={ - "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] - }, - ) as artifact_set_tags, patch.object( - Context, - "set_tags", - return_value={ - "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] - }, - ) as context_set_tags, patch.object( - EventBridgeSchedulerHelper, "describe_schedule", return_value=dict(Arn="schedule_arn") - ) as get_event_bridge_schedule, patch.object( - EventBridgeRuleHelper, "describe_rule", return_value=dict(Arn="rule_arn") - ) as get_event_bridge_rule: + with ( + patch.object( + S3LineageEntityHandler, + "retrieve_raw_data_artifact", + side_effect=[ + RAW_DATA_INPUT_ARTIFACTS[0], + RAW_DATA_INPUT_ARTIFACTS[1], + RAW_DATA_INPUT_ARTIFACTS[2], + RAW_DATA_INPUT_ARTIFACTS[3], + ], + ) as retrieve_raw_data_artifact_method, + patch.object( + PipelineLineageEntityHandler, + "load_pipeline_context", + return_value=pipeline_context, + ) as load_pipeline_context_method, + patch.object( + PipelineVersionLineageEntityHandler, + "load_pipeline_version_context", + return_value=PIPELINE_VERSION_CONTEXT, + ) as load_pipeline_version_context_method, + patch.object( + LineageAssociationHandler, + "list_upstream_associations", + side_effect=[ + generate_pipeline_version_upstream_feature_group_list(), + generate_pipeline_version_upstream_raw_data_list(), + iter([]), + ], + ) as list_upstream_associations_method, + patch.object( + LineageAssociationHandler, + "list_downstream_associations", + return_value=generate_pipeline_version_downstream_feature_group(), + ) as list_downstream_associations_method, + patch.object( + S3LineageEntityHandler, "load_artifact_from_arn", return_value=ARTIFACT_RESULT + ) as load_artifact_from_arn_method, + patch.object( + S3LineageEntityHandler, "_load_artifact_from_s3_uri", return_value=ARTIFACT_SUMMARY + ) as load_artifact_from_s3_uri_method, + patch.object( + Artifact, + "set_tags", + return_value={ + "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] + }, + ) as artifact_set_tags, + patch.object( + Context, + "set_tags", + return_value={ + "Tags": [dict(Key="key_1", Value="value_1"), dict(Key="key_2", Value="value_2")] + }, + ) as context_set_tags, + patch.object( + EventBridgeSchedulerHelper, "describe_schedule", return_value=dict(Arn="schedule_arn") + ) as get_event_bridge_schedule, + patch.object( + EventBridgeRuleHelper, "describe_rule", return_value=dict(Arn="rule_arn") + ) as get_event_bridge_rule, + ): lineage_handler.upsert_tags_for_lineage_resources(TAGS) retrieve_raw_data_artifact_method.assert_has_calls( diff --git a/tests/unit/sagemaker/huggingface/test_llm_utils.py b/tests/unit/sagemaker/huggingface/test_llm_utils.py index 675a6fd885..9bb1b451a1 100644 --- a/tests/unit/sagemaker/huggingface/test_llm_utils.py +++ b/tests/unit/sagemaker/huggingface/test_llm_utils.py @@ -65,7 +65,7 @@ def test_huggingface_model_metadata_unauthorized_exception(self, mock_urllib): "Trying to access a gated/private HuggingFace model without valid credentials. " "Please provide a HUGGING_FACE_HUB_TOKEN in env_vars" ) - self.assertEquals(expected_error_msg, str(context.exception)) + self.assertEqual(expected_error_msg, str(context.exception)) @patch("sagemaker.huggingface.llm_utils.urllib") def test_huggingface_model_metadata_general_exception(self, mock_urllib): @@ -76,7 +76,7 @@ def test_huggingface_model_metadata_general_exception(self, mock_urllib): expected_error_msg = ( f"Did not find model metadata for the following HuggingFace Model ID {MOCK_HF_ID}" ) - self.assertEquals(expected_error_msg, str(context.exception)) + self.assertEqual(expected_error_msg, str(context.exception)) @patch("huggingface_hub.snapshot_download") def test_download_huggingface_model_metadata(self, mock_snapshot_download): diff --git a/tests/unit/sagemaker/jumpstart/constants.py b/tests/unit/sagemaker/jumpstart/constants.py index 2eb7469e21..90d2e7717e 100644 --- a/tests/unit/sagemaker/jumpstart/constants.py +++ b/tests/unit/sagemaker/jumpstart/constants.py @@ -5361,7 +5361,7 @@ "safetensors==0.3.1", "sagemaker_jumpstart_huggingface_script_utilities==1.1.3", "sagemaker_jumpstart_script_utilities==1.1.9", - "scipy==1.11.1", + "scipy==1.13.0", "termcolor==2.3.0", "texttable==1.6.7", "tokenize-rt==5.1.0", @@ -7870,7 +7870,7 @@ "safetensors==0.3.1", "sagemaker_jumpstart_huggingface_script_utilities==1.1.3", "sagemaker_jumpstart_script_utilities==1.1.9", - "scipy==1.11.1", + "scipy==1.13.0", "termcolor==2.3.0", "texttable==1.6.7", "tokenize-rt==5.1.0", @@ -8346,7 +8346,7 @@ "safetensors==0.3.1", "sagemaker_jumpstart_huggingface_script_utilities==1.1.3", "sagemaker_jumpstart_script_utilities==1.1.9", - "scipy==1.11.1", + "scipy==1.13.0", "termcolor==2.3.0", "texttable==1.6.7", "tokenize-rt==5.1.0", @@ -12095,7 +12095,7 @@ "inference_vulnerabilities": [], "training_vulnerable": False, "training_dependencies": [ - "numpy==1.23.1", + "numpy>=2.0.0", "opencv_python==4.7.0.68", "sagemaker_jumpstart_prepack_script_utilities==1.0.0", ], @@ -14360,10 +14360,10 @@ "jmespath==1.0.1", "jsonschema==4.17.3", "multiprocess==0.70.14", - "numpy==1.24.3", + "numpy>=2.0.0", "oscrypto==1.3.0", "packaging==23.1", - "pandas==2.0.2", + "pandas==2.2.3", "pathos==0.3.0", "pkgutil-resolve-name==1.3.10", "platformdirs==3.8.0", @@ -14884,10 +14884,10 @@ "jmespath==1.0.1", "jsonschema==4.17.3", "multiprocess==0.70.14", - "numpy==1.24.3", + "numpy>=2.0.0", "oscrypto==1.3.0", "packaging==23.1", - "pandas==2.0.2", + "pandas==2.2.3", "pathos==0.3.0", "pkgutil-resolve-name==1.3.10", "platformdirs==3.8.0", @@ -17400,7 +17400,7 @@ "safetensors==0.3.1", "sagemaker_jumpstart_huggingface_script_utilities==1.1.4", "sagemaker_jumpstart_script_utilities==1.1.9", - "scipy==1.11.1", + "scipy==1.13.0", "termcolor==2.3.0", "texttable==1.6.7", "tokenize-rt==5.1.0", diff --git a/tests/unit/sagemaker/jumpstart/estimator/test_sagemaker_config.py b/tests/unit/sagemaker/jumpstart/estimator/test_sagemaker_config.py index 073921d5ba..39eca166ee 100644 --- a/tests/unit/sagemaker/jumpstart/estimator/test_sagemaker_config.py +++ b/tests/unit/sagemaker/jumpstart/estimator/test_sagemaker_config.py @@ -123,16 +123,16 @@ def test_without_arg_overwrites_without_kwarg_collisions_with_config( mock_retrieve_model_init_kwargs.return_value = {} - self.assertEquals(mock_get_sagemaker_config_value.call_count, 1) - self.assertEquals(mock_estimator_init.call_args[1].get("role"), config_role) + self.assertEqual(mock_get_sagemaker_config_value.call_count, 1) + self.assertEqual(mock_estimator_init.call_args[1].get("role"), config_role) assert "enable_network_isolation" not in mock_estimator_init.call_args[1] assert "encrypt_inter_container_traffic" not in mock_estimator_init.call_args[1] estimator.deploy() - self.assertEquals(mock_get_sagemaker_config_value.call_count, 3) + self.assertEqual(mock_get_sagemaker_config_value.call_count, 3) - self.assertEquals(mock_estimator_deploy.call_args[1].get("role"), config_inference_role) + self.assertEqual(mock_estimator_deploy.call_args[1].get("role"), config_inference_role) assert "enable_network_isolation" not in mock_estimator_deploy.call_args[1] @@ -181,13 +181,13 @@ def test_without_arg_overwrites_with_kwarg_collisions_with_config( model_id=model_id, ) - self.assertEquals(mock_get_sagemaker_config_value.call_count, 3) - self.assertEquals(mock_estimator_init.call_args[1].get("role"), config_role) - self.assertEquals( + self.assertEqual(mock_get_sagemaker_config_value.call_count, 3) + self.assertEqual(mock_estimator_init.call_args[1].get("role"), config_role) + self.assertEqual( mock_estimator_init.call_args[1].get("enable_network_isolation"), config_enable_network_isolation, ) - self.assertEquals( + self.assertEqual( mock_estimator_init.call_args[1].get("encrypt_inter_container_traffic"), config_intercontainer_encryption, ) @@ -200,11 +200,11 @@ def test_without_arg_overwrites_with_kwarg_collisions_with_config( estimator.deploy() - self.assertEquals(mock_get_sagemaker_config_value.call_count, 6) + self.assertEqual(mock_get_sagemaker_config_value.call_count, 6) - self.assertEquals(mock_estimator_deploy.call_args[1].get("role"), config_inference_role) + self.assertEqual(mock_estimator_deploy.call_args[1].get("role"), config_inference_role) - self.assertEquals( + self.assertEqual( mock_estimator_deploy.call_args[1].get("enable_network_isolation"), config_inference_enable_network_isolation, ) @@ -257,13 +257,13 @@ def test_with_arg_overwrites_with_kwarg_collisions_with_config( encrypt_inter_container_traffic=override_encrypt_inter_container_traffic, ) - self.assertEquals(mock_get_sagemaker_config_value.call_count, 1) - self.assertEquals(mock_estimator_init.call_args[1].get("role"), override_role) - self.assertEquals( + self.assertEqual(mock_get_sagemaker_config_value.call_count, 1) + self.assertEqual(mock_estimator_init.call_args[1].get("role"), override_role) + self.assertEqual( mock_estimator_init.call_args[1].get("enable_network_isolation"), override_enable_network_isolation, ) - self.assertEquals( + self.assertEqual( mock_estimator_init.call_args[1].get("encrypt_inter_container_traffic"), override_encrypt_inter_container_traffic, ) @@ -280,13 +280,13 @@ def test_with_arg_overwrites_with_kwarg_collisions_with_config( enable_network_isolation=override_inference_enable_network_isolation, ) - self.assertEquals(mock_get_sagemaker_config_value.call_count, 3) + self.assertEqual(mock_get_sagemaker_config_value.call_count, 3) - self.assertEquals( + self.assertEqual( mock_estimator_deploy.call_args[1].get("role"), mock_inference_override_role ) - self.assertEquals( + self.assertEqual( mock_estimator_deploy.call_args[1].get("enable_network_isolation"), override_inference_enable_network_isolation, ) @@ -336,13 +336,13 @@ def test_with_arg_overwrites_without_kwarg_collisions_with_config( encrypt_inter_container_traffic=override_encrypt_inter_container_traffic, ) - self.assertEquals(mock_get_sagemaker_config_value.call_count, 1) - self.assertEquals(mock_estimator_init.call_args[1].get("role"), override_role) - self.assertEquals( + self.assertEqual(mock_get_sagemaker_config_value.call_count, 1) + self.assertEqual(mock_estimator_init.call_args[1].get("role"), override_role) + self.assertEqual( mock_estimator_init.call_args[1].get("enable_network_isolation"), override_enable_network_isolation, ) - self.assertEquals( + self.assertEqual( mock_estimator_init.call_args[1].get("encrypt_inter_container_traffic"), override_encrypt_inter_container_traffic, ) @@ -355,13 +355,13 @@ def test_with_arg_overwrites_without_kwarg_collisions_with_config( enable_network_isolation=override_inference_enable_network_isolation, ) - self.assertEquals(mock_get_sagemaker_config_value.call_count, 3) + self.assertEqual(mock_get_sagemaker_config_value.call_count, 3) - self.assertEquals( + self.assertEqual( mock_estimator_deploy.call_args[1].get("role"), mock_inference_override_role ) - self.assertEquals( + self.assertEqual( mock_estimator_deploy.call_args[1].get("enable_network_isolation"), override_inference_enable_network_isolation, ) @@ -412,8 +412,8 @@ def test_without_arg_overwrites_without_kwarg_collisions_without_config( model_id=model_id, ) - self.assertEquals(mock_get_sagemaker_config_value.call_count, 1) - self.assertEquals(mock_estimator_init.call_args[1].get("role"), execution_role) + self.assertEqual(mock_get_sagemaker_config_value.call_count, 1) + self.assertEqual(mock_estimator_init.call_args[1].get("role"), execution_role) assert "enable_network_isolation" not in mock_estimator_init.call_args[1] assert "encrypt_inter_container_traffic" not in mock_estimator_init.call_args[1] @@ -421,9 +421,9 @@ def test_without_arg_overwrites_without_kwarg_collisions_without_config( mock_retrieve_model_init_kwargs.return_value = {} - self.assertEquals(mock_get_sagemaker_config_value.call_count, 3) + self.assertEqual(mock_get_sagemaker_config_value.call_count, 3) - self.assertEquals(mock_estimator_deploy.call_args[1].get("role"), execution_role) + self.assertEqual(mock_estimator_deploy.call_args[1].get("role"), execution_role) assert "enable_network_isolation" not in mock_estimator_deploy.call_args[1] @@ -475,13 +475,13 @@ def test_without_arg_overwrites_with_kwarg_collisions_without_config( model_id=model_id, ) - self.assertEquals(mock_get_sagemaker_config_value.call_count, 3) - self.assertEquals(mock_estimator_init.call_args[1].get("role"), execution_role) - self.assertEquals( + self.assertEqual(mock_get_sagemaker_config_value.call_count, 3) + self.assertEqual(mock_estimator_init.call_args[1].get("role"), execution_role) + self.assertEqual( mock_estimator_init.call_args[1].get("enable_network_isolation"), metadata_enable_network_isolation, ) - self.assertEquals( + self.assertEqual( mock_estimator_init.call_args[1].get("encrypt_inter_container_traffic"), metadata_intercontainer_encryption, ) @@ -492,11 +492,11 @@ def test_without_arg_overwrites_with_kwarg_collisions_without_config( estimator.deploy() - self.assertEquals(mock_get_sagemaker_config_value.call_count, 6) + self.assertEqual(mock_get_sagemaker_config_value.call_count, 6) - self.assertEquals(mock_estimator_deploy.call_args[1].get("role"), execution_role) + self.assertEqual(mock_estimator_deploy.call_args[1].get("role"), execution_role) - self.assertEquals( + self.assertEqual( mock_estimator_deploy.call_args[1].get("enable_network_isolation"), metadata_inference_enable_network_isolation, ) @@ -548,13 +548,13 @@ def test_with_arg_overwrites_with_kwarg_collisions_without_config( encrypt_inter_container_traffic=override_encrypt_inter_container_traffic, ) - self.assertEquals(mock_get_sagemaker_config_value.call_count, 1) - self.assertEquals(mock_estimator_init.call_args[1].get("role"), override_role) - self.assertEquals( + self.assertEqual(mock_get_sagemaker_config_value.call_count, 1) + self.assertEqual(mock_estimator_init.call_args[1].get("role"), override_role) + self.assertEqual( mock_estimator_init.call_args[1].get("enable_network_isolation"), override_enable_network_isolation, ) - self.assertEquals( + self.assertEqual( mock_estimator_init.call_args[1].get("encrypt_inter_container_traffic"), override_encrypt_inter_container_traffic, ) @@ -568,11 +568,11 @@ def test_with_arg_overwrites_with_kwarg_collisions_without_config( enable_network_isolation=override_inference_enable_network_isolation, ) - self.assertEquals(mock_get_sagemaker_config_value.call_count, 3) + self.assertEqual(mock_get_sagemaker_config_value.call_count, 3) - self.assertEquals(mock_estimator_deploy.call_args[1].get("role"), override_inference_role) + self.assertEqual(mock_estimator_deploy.call_args[1].get("role"), override_inference_role) - self.assertEquals( + self.assertEqual( mock_estimator_deploy.call_args[1].get("enable_network_isolation"), override_inference_enable_network_isolation, ) @@ -618,13 +618,13 @@ def test_with_arg_overwrites_without_kwarg_collisions_without_config( enable_network_isolation=override_enable_network_isolation, encrypt_inter_container_traffic=override_encrypt_inter_container_traffic, ) - self.assertEquals(mock_get_sagemaker_config_value.call_count, 1) - self.assertEquals(mock_estimator_init.call_args[1].get("role"), override_role) - self.assertEquals( + self.assertEqual(mock_get_sagemaker_config_value.call_count, 1) + self.assertEqual(mock_estimator_init.call_args[1].get("role"), override_role) + self.assertEqual( mock_estimator_init.call_args[1].get("enable_network_isolation"), override_enable_network_isolation, ) - self.assertEquals( + self.assertEqual( mock_estimator_init.call_args[1].get("encrypt_inter_container_traffic"), override_encrypt_inter_container_traffic, ) @@ -634,11 +634,11 @@ def test_with_arg_overwrites_without_kwarg_collisions_without_config( enable_network_isolation=override_enable_network_isolation, ) - self.assertEquals(mock_get_sagemaker_config_value.call_count, 3) + self.assertEqual(mock_get_sagemaker_config_value.call_count, 3) - self.assertEquals(mock_estimator_deploy.call_args[1].get("role"), override_inference_role) + self.assertEqual(mock_estimator_deploy.call_args[1].get("role"), override_inference_role) - self.assertEquals( + self.assertEqual( mock_estimator_deploy.call_args[1].get("enable_network_isolation"), override_enable_network_isolation, ) diff --git a/tests/unit/sagemaker/jumpstart/model/test_sagemaker_config.py b/tests/unit/sagemaker/jumpstart/model/test_sagemaker_config.py index 2be4bde7e4..a0299ebb1a 100644 --- a/tests/unit/sagemaker/jumpstart/model/test_sagemaker_config.py +++ b/tests/unit/sagemaker/jumpstart/model/test_sagemaker_config.py @@ -99,9 +99,9 @@ def test_without_arg_overwrites_without_kwarg_collisions_with_config( model_id=model_id, ) - self.assertEquals(mock_get_sagemaker_config_value.call_count, 1) + self.assertEqual(mock_get_sagemaker_config_value.call_count, 1) - self.assertEquals(mock_model_init.call_args[1].get("role"), config_role) + self.assertEqual(mock_model_init.call_args[1].get("role"), config_role) assert "enable_network_isolation" not in mock_model_init.call_args[1] @@ -147,10 +147,10 @@ def test_all_arg_overwrites_without_kwarg_collisions_with_config( role=override_role, ) - self.assertEquals(mock_get_sagemaker_config_value.call_count, 1) + self.assertEqual(mock_get_sagemaker_config_value.call_count, 1) - self.assertEquals(mock_model_init.call_args[1].get("role"), override_role) - self.assertEquals( + self.assertEqual(mock_model_init.call_args[1].get("role"), override_role) + self.assertEqual( mock_model_init.call_args[1].get("enable_network_isolation"), override_enable_network_isolation, ) @@ -197,10 +197,10 @@ def test_without_arg_overwrites_all_kwarg_collisions_with_config( model_id=model_id, ) - self.assertEquals(mock_get_sagemaker_config_value.call_count, 2) + self.assertEqual(mock_get_sagemaker_config_value.call_count, 2) - self.assertEquals(mock_model_init.call_args[1].get("role"), config_role) - self.assertEquals( + self.assertEqual(mock_model_init.call_args[1].get("role"), config_role) + self.assertEqual( mock_model_init.call_args[1].get("enable_network_isolation"), config_enable_network_isolation, ) @@ -249,10 +249,10 @@ def test_with_arg_overwrites_all_kwarg_collisions_with_config( enable_network_isolation=override_enable_network_isolation, ) - self.assertEquals(mock_get_sagemaker_config_value.call_count, 1) + self.assertEqual(mock_get_sagemaker_config_value.call_count, 1) - self.assertEquals(mock_model_init.call_args[1].get("role"), override_role) - self.assertEquals( + self.assertEqual(mock_model_init.call_args[1].get("role"), override_role) + self.assertEqual( mock_model_init.call_args[1].get("enable_network_isolation"), override_enable_network_isolation, ) @@ -299,10 +299,10 @@ def test_without_arg_overwrites_all_kwarg_collisions_without_config( model_id=model_id, ) - self.assertEquals(mock_get_sagemaker_config_value.call_count, 2) + self.assertEqual(mock_get_sagemaker_config_value.call_count, 2) - self.assertEquals(mock_model_init.call_args[1].get("role"), execution_role) - self.assertEquals( + self.assertEqual(mock_model_init.call_args[1].get("role"), execution_role) + self.assertEqual( mock_model_init.call_args[1].get("enable_network_isolation"), metadata_enable_network_isolation, ) @@ -350,10 +350,10 @@ def test_with_arg_overwrites_all_kwarg_collisions_without_config( enable_network_isolation=override_enable_network_isolation, ) - self.assertEquals(mock_get_sagemaker_config_value.call_count, 1) + self.assertEqual(mock_get_sagemaker_config_value.call_count, 1) - self.assertEquals(mock_model_init.call_args[1].get("role"), override_role) - self.assertEquals( + self.assertEqual(mock_model_init.call_args[1].get("role"), override_role) + self.assertEqual( mock_model_init.call_args[1].get("enable_network_isolation"), override_enable_network_isolation, ) @@ -398,9 +398,9 @@ def test_without_arg_overwrites_without_kwarg_collisions_without_config( model_id=model_id, ) - self.assertEquals(mock_get_sagemaker_config_value.call_count, 1) + self.assertEqual(mock_get_sagemaker_config_value.call_count, 1) - self.assertEquals(mock_model_init.call_args[1].get("role"), execution_role) + self.assertEqual(mock_model_init.call_args[1].get("role"), execution_role) assert "enable_network_isolation" not in mock_model_init.call_args[1] @mock.patch( @@ -445,10 +445,10 @@ def test_with_arg_overwrites_without_kwarg_collisions_without_config( enable_network_isolation=override_enable_network_isolation, ) - self.assertEquals(mock_get_sagemaker_config_value.call_count, 1) + self.assertEqual(mock_get_sagemaker_config_value.call_count, 1) - self.assertEquals(mock_model_init.call_args[1].get("role"), override_role) - self.assertEquals( + self.assertEqual(mock_model_init.call_args[1].get("role"), override_role) + self.assertEqual( mock_model_init.call_args[1].get("enable_network_isolation"), override_enable_network_isolation, ) diff --git a/tests/unit/sagemaker/jumpstart/test_utils.py b/tests/unit/sagemaker/jumpstart/test_utils.py index e3e3110da8..de9be1d51d 100644 --- a/tests/unit/sagemaker/jumpstart/test_utils.py +++ b/tests/unit/sagemaker/jumpstart/test_utils.py @@ -1388,7 +1388,7 @@ def test_no_model_id_no_version_found(self): mock_sagemaker_session.list_tags = mock_list_tags mock_list_tags.return_value = [{"Key": "blah", "Value": "blah1"}] - self.assertEquals( + self.assertEqual( utils.get_jumpstart_model_info_from_resource_arn("some-arn", mock_sagemaker_session), (None, None, None, None), ) @@ -1403,7 +1403,7 @@ def test_model_id_no_version_found(self): {"Key": JumpStartTag.MODEL_ID, "Value": "model_id"}, ] - self.assertEquals( + self.assertEqual( utils.get_jumpstart_model_info_from_resource_arn("some-arn", mock_sagemaker_session), ("model_id", None, None, None), ) @@ -1418,7 +1418,7 @@ def test_no_model_id_version_found(self): {"Key": JumpStartTag.MODEL_VERSION, "Value": "model_version"}, ] - self.assertEquals( + self.assertEqual( utils.get_jumpstart_model_info_from_resource_arn("some-arn", mock_sagemaker_session), (None, "model_version", None, None), ) @@ -1430,7 +1430,7 @@ def test_no_config_name_found(self): mock_sagemaker_session.list_tags = mock_list_tags mock_list_tags.return_value = [{"Key": "blah", "Value": "blah1"}] - self.assertEquals( + self.assertEqual( utils.get_jumpstart_model_info_from_resource_arn("some-arn", mock_sagemaker_session), (None, None, None, None), ) @@ -1445,7 +1445,7 @@ def test_inference_config_name_found(self): {"Key": JumpStartTag.INFERENCE_CONFIG_NAME, "Value": "config_name"}, ] - self.assertEquals( + self.assertEqual( utils.get_jumpstart_model_info_from_resource_arn("some-arn", mock_sagemaker_session), (None, None, "config_name", None), ) @@ -1460,7 +1460,7 @@ def test_training_config_name_found(self): {"Key": JumpStartTag.TRAINING_CONFIG_NAME, "Value": "config_name"}, ] - self.assertEquals( + self.assertEqual( utils.get_jumpstart_model_info_from_resource_arn("some-arn", mock_sagemaker_session), (None, None, None, "config_name"), ) @@ -1476,7 +1476,7 @@ def test_both_config_name_found(self): {"Key": JumpStartTag.TRAINING_CONFIG_NAME, "Value": "training_config_name"}, ] - self.assertEquals( + self.assertEqual( utils.get_jumpstart_model_info_from_resource_arn("some-arn", mock_sagemaker_session), (None, None, "inference_config_name", "training_config_name"), ) @@ -1492,7 +1492,7 @@ def test_model_id_version_found(self): {"Key": JumpStartTag.MODEL_VERSION, "Value": "model_version"}, ] - self.assertEquals( + self.assertEqual( utils.get_jumpstart_model_info_from_resource_arn("some-arn", mock_sagemaker_session), ("model_id", "model_version", None, None), ) @@ -1510,7 +1510,7 @@ def test_multiple_model_id_versions_found(self): {"Key": JumpStartTag.MODEL_VERSION, "Value": "model_version_2"}, ] - self.assertEquals( + self.assertEqual( utils.get_jumpstart_model_info_from_resource_arn("some-arn", mock_sagemaker_session), (None, None, None, None), ) @@ -1528,7 +1528,7 @@ def test_multiple_model_id_versions_found_aliases_consistent(self): {"Key": random.choice(EXTRA_MODEL_VERSION_TAGS), "Value": "model_version_1"}, ] - self.assertEquals( + self.assertEqual( utils.get_jumpstart_model_info_from_resource_arn("some-arn", mock_sagemaker_session), ("model_id_1", "model_version_1", None, None), ) @@ -1546,7 +1546,7 @@ def test_multiple_model_id_versions_found_aliases_inconsistent(self): {"Key": random.choice(EXTRA_MODEL_VERSION_TAGS), "Value": "model_version_2"}, ] - self.assertEquals( + self.assertEqual( utils.get_jumpstart_model_info_from_resource_arn("some-arn", mock_sagemaker_session), (None, None, None, None), ) @@ -1564,7 +1564,7 @@ def test_multiple_config_names_found_aliases_inconsistent(self): {"Key": JumpStartTag.INFERENCE_CONFIG_NAME, "Value": "config_name_2"}, ] - self.assertEquals( + self.assertEqual( utils.get_jumpstart_model_info_from_resource_arn("some-arn", mock_sagemaker_session), ("model_id_1", "model_version_1", None, None), ) diff --git a/tests/unit/sagemaker/local/test_local_entities.py b/tests/unit/sagemaker/local/test_local_entities.py index 6a026c316b..74a361cf73 100644 --- a/tests/unit/sagemaker/local/test_local_entities.py +++ b/tests/unit/sagemaker/local/test_local_entities.py @@ -12,6 +12,7 @@ # language governing permissions and limitations under the License. from __future__ import absolute_import +import re import os import pytest @@ -290,10 +291,10 @@ def test_start_local_pipeline_with_wrong_parameter_type(sagemaker_local_session) local_pipeline = sagemaker.local.entities._LocalPipeline(pipeline) with pytest.raises(ClientError) as error: local_pipeline.start(PipelineParameters={"MyStr": True}) - assert ( - f"Unexpected type for parameter '{parameter.name}'. Expected " - f"{parameter.parameter_type.python_type} but found {type(True)}." in str(error.value) + expected_error_pattern = ( + r"Unexpected type for parameter 'MyStr'\. Expected .* but found \." ) + assert re.search(expected_error_pattern, str(error.value)) def test_start_local_pipeline_with_empty_parameter_string_value( diff --git a/tests/unit/sagemaker/mlflow/test_forward_sagemaker_metrics.py b/tests/unit/sagemaker/mlflow/test_forward_sagemaker_metrics.py index 4b53c93ad4..c1c6d67a8b 100644 --- a/tests/unit/sagemaker/mlflow/test_forward_sagemaker_metrics.py +++ b/tests/unit/sagemaker/mlflow/test_forward_sagemaker_metrics.py @@ -48,7 +48,7 @@ def mock_mlflow_client(): def test_encode(): existing_names = set() assert encode("test-name", existing_names) == "test-name" - assert encode("test:name", existing_names) == "test_3a_name" + assert encode("test:name", existing_names) == "test:name" assert encode("test-name", existing_names) == "test-name_1" @@ -188,6 +188,7 @@ def getenv_side_effect(arg, default=None): Mock(spec=requests.Response), Mock(spec=requests.Response), ], + "https://test.sagemaker.aws/api/2.0/mlflow/runs/update": Mock(spec=requests.Response), "https://test.sagemaker.aws/api/2.0/mlflow/runs/terminate": Mock(spec=requests.Response), } @@ -215,6 +216,11 @@ def getenv_side_effect(arg, default=None): mock_response.status_code = 200 mock_response.text = json.dumps({}) + mock_responses["https://test.sagemaker.aws/api/2.0/mlflow/runs/update"].status_code = 200 + mock_responses["https://test.sagemaker.aws/api/2.0/mlflow/runs/update"].text = json.dumps( + {"run_id": "test_run_id", "status": "FINISHED"} + ) + mock_responses["https://test.sagemaker.aws/api/2.0/mlflow/runs/terminate"].status_code = 200 mock_responses["https://test.sagemaker.aws/api/2.0/mlflow/runs/terminate"].text = json.dumps({}) @@ -222,6 +228,7 @@ def getenv_side_effect(arg, default=None): mock_responses["https://test.sagemaker.aws/api/2.0/mlflow/experiments/get-by-name"], mock_responses["https://test.sagemaker.aws/api/2.0/mlflow/runs/create"], *mock_responses["https://test.sagemaker.aws/api/2.0/mlflow/runs/log-batch"], + mock_responses["https://test.sagemaker.aws/api/2.0/mlflow/runs/update"], mock_responses["https://test.sagemaker.aws/api/2.0/mlflow/runs/terminate"], ] @@ -231,7 +238,7 @@ def getenv_side_effect(arg, default=None): log_to_mlflow(metrics, params, tags) - assert mock_request.call_count == 6 # Total number of API calls + assert mock_request.call_count == 7 # Total number of API calls @patch("sagemaker.mlflow.forward_sagemaker_metrics.get_training_job_details") diff --git a/tests/unit/sagemaker/modules/train/test_model_trainer.py b/tests/unit/sagemaker/modules/train/test_model_trainer.py index 770420c354..13530a3983 100644 --- a/tests/unit/sagemaker/modules/train/test_model_trainer.py +++ b/tests/unit/sagemaker/modules/train/test_model_trainer.py @@ -1049,15 +1049,16 @@ def mock_upload_data(path, bucket, key_prefix): model_trainer.train() - assert mock_local_container.train.called_once_with( + mock_local_container.assert_called_once_with( training_job_name=unique_name, instance_type=compute.instance_type, instance_count=compute.instance_count, image=training_image, container_root=local_container_root, sagemaker_session=modules_session, - container_entry_point=DEFAULT_ENTRYPOINT, + container_entrypoint=DEFAULT_ENTRYPOINT, container_arguments=DEFAULT_ARGUMENTS, + input_data_config=ANY, hyper_parameters=hyperparameters, environment=environment, ) diff --git a/tests/unit/sagemaker/serve/detector/test_dependency_manager.py b/tests/unit/sagemaker/serve/detector/test_dependency_manager.py index 491968dd25..bf418b44c2 100644 --- a/tests/unit/sagemaker/serve/detector/test_dependency_manager.py +++ b/tests/unit/sagemaker/serve/detector/test_dependency_manager.py @@ -21,7 +21,7 @@ DEPENDENCY_LIST = [ "requests==2.26.0", - "numpy>=1.20.0", + "numpy>=2.0.0", "pandas<=1.3.3", "matplotlib<3.5.0", "scikit-learn>0.24.1", @@ -34,7 +34,7 @@ EXPECTED_DEPENDENCY_MAP = { "requests": "==2.26.0", - "numpy": ">=1.20.0", + "numpy": ">=2.0.0", "pandas": "<=1.3.3", "matplotlib": "<3.5.0", "scikit-learn": ">0.24.1", diff --git a/tests/unit/sagemaker/serve/detector/test_pickle_dependencies.py b/tests/unit/sagemaker/serve/detector/test_pickle_dependencies.py index 34cab8a526..ced9555fc5 100644 --- a/tests/unit/sagemaker/serve/detector/test_pickle_dependencies.py +++ b/tests/unit/sagemaker/serve/detector/test_pickle_dependencies.py @@ -93,13 +93,14 @@ def create_mock_modules(name, doc, file): # happy case def test_generate_requirements_exact_match(monkeypatch): - with patch("cloudpickle.load"), patch("tqdm.tqdm"), patch( - "sagemaker.serve.detector.pickle_dependencies.subprocess.run" - ) as subprocess_run, patch( - "sagemaker.serve.detector.pickle_dependencies.subprocess.Popen" - ) as subprocess_popen, patch( - "builtins.open" - ) as mocked_open, monkeypatch.context() as m: + with ( + patch("cloudpickle.load"), + patch("tqdm.tqdm"), + patch("sagemaker.serve.detector.pickle_dependencies.subprocess.run") as subprocess_run, + patch("sagemaker.serve.detector.pickle_dependencies.subprocess.Popen") as subprocess_popen, + patch("builtins.open") as mocked_open, + monkeypatch.context() as m, + ): mock_run_stdout = MagicMock() mock_run_stdout.stdout = json.dumps(INSTALLED_PKG_JSON).encode("utf-8") subprocess_run.return_value = mock_run_stdout @@ -147,13 +148,14 @@ def test_generate_requirements_exact_match(monkeypatch): def test_generate_requirements_txt_pruning_unused_packages(monkeypatch): - with patch("cloudpickle.load"), patch("tqdm.tqdm"), patch( - "sagemaker.serve.detector.pickle_dependencies.subprocess.run" - ) as subprocess_run, patch( - "sagemaker.serve.detector.pickle_dependencies.subprocess.Popen" - ) as subprocess_popen, patch( - "builtins.open" - ) as mocked_open, monkeypatch.context() as m: + with ( + patch("cloudpickle.load"), + patch("tqdm.tqdm"), + patch("sagemaker.serve.detector.pickle_dependencies.subprocess.run") as subprocess_run, + patch("sagemaker.serve.detector.pickle_dependencies.subprocess.Popen") as subprocess_popen, + patch("builtins.open") as mocked_open, + monkeypatch.context() as m, + ): mock_run_stdout = MagicMock() mock_run_stdout.stdout = json.dumps(INSTALLED_PKG_JSON_UNUSED).encode("utf-8") subprocess_run.return_value = mock_run_stdout @@ -201,13 +203,14 @@ def test_generate_requirements_txt_pruning_unused_packages(monkeypatch): def test_generate_requirements_txt_no_currently_used_packages(monkeypatch): - with patch("cloudpickle.load"), patch("tqdm.tqdm"), patch( - "sagemaker.serve.detector.pickle_dependencies.subprocess.run" - ) as subprocess_run, patch( - "sagemaker.serve.detector.pickle_dependencies.subprocess.Popen" - ) as subprocess_popen, patch( - "builtins.open" - ) as mocked_open, monkeypatch.context() as m: + with ( + patch("cloudpickle.load"), + patch("tqdm.tqdm"), + patch("sagemaker.serve.detector.pickle_dependencies.subprocess.run") as subprocess_run, + patch("sagemaker.serve.detector.pickle_dependencies.subprocess.Popen") as subprocess_popen, + patch("builtins.open") as mocked_open, + monkeypatch.context() as m, + ): mock_run_stdout = MagicMock() mock_run_stdout.stdout = json.dumps([]).encode("utf-8") subprocess_run.return_value = mock_run_stdout diff --git a/tests/unit/sagemaker/serve/model_server/djl_serving/test_djl_prepare.py b/tests/unit/sagemaker/serve/model_server/djl_serving/test_djl_prepare.py index 183d15d13e..aa99e1971c 100644 --- a/tests/unit/sagemaker/serve/model_server/djl_serving/test_djl_prepare.py +++ b/tests/unit/sagemaker/serve/model_server/djl_serving/test_djl_prepare.py @@ -52,8 +52,8 @@ def test_create_dir_structure_from_new(self, mock_path, mock_disk_usage, mock_di mock_disk_space.assert_called_once_with(mock_model_path) mock_disk_usage.assert_called_once() - self.assertEquals(ret_model_path, mock_model_path) - self.assertEquals(ret_code_dir, mock_code_dir) + self.assertEqual(ret_model_path, mock_model_path) + self.assertEqual(ret_code_dir, mock_code_dir) @patch("sagemaker.serve.model_server.djl_serving.prepare.Path") def test_create_dir_structure_invalid_path(self, mock_path): @@ -65,7 +65,7 @@ def test_create_dir_structure_invalid_path(self, mock_path): with self.assertRaises(ValueError) as context: _create_dir_structure(mock_model_path) - self.assertEquals("model_dir is not a valid directory", str(context.exception)) + self.assertEqual("model_dir is not a valid directory", str(context.exception)) @patch("sagemaker.serve.model_server.djl_serving.prepare.S3Downloader") @patch("builtins.open", new_callable=mock_open, read_data="data") diff --git a/tests/unit/sagemaker/serve/model_server/multi_model_server/test_multi_model_server_prepare.py b/tests/unit/sagemaker/serve/model_server/multi_model_server/test_multi_model_server_prepare.py index e877c1e7e9..567a72182a 100644 --- a/tests/unit/sagemaker/serve/model_server/multi_model_server/test_multi_model_server_prepare.py +++ b/tests/unit/sagemaker/serve/model_server/multi_model_server/test_multi_model_server_prepare.py @@ -91,8 +91,8 @@ def test_create_dir_structure_from_new(self, mock_path, mock_disk_usage, mock_di mock_disk_space.assert_called_once_with(mock_model_path) mock_disk_usage.assert_called_once() - self.assertEquals(ret_model_path, mock_model_path) - self.assertEquals(ret_code_dir, mock_code_dir) + self.assertEqual(ret_model_path, mock_model_path) + self.assertEqual(ret_code_dir, mock_code_dir) @patch("sagemaker.serve.model_server.multi_model_server.prepare.Path") def test_create_dir_structure_invalid_path(self, mock_path): @@ -104,4 +104,4 @@ def test_create_dir_structure_invalid_path(self, mock_path): with self.assertRaises(ValueError) as context: _create_dir_structure(mock_model_path) - self.assertEquals("model_dir is not a valid directory", str(context.exception)) + self.assertEqual("model_dir is not a valid directory", str(context.exception)) diff --git a/tests/unit/sagemaker/serve/model_server/tgi/test_tgi_prepare.py b/tests/unit/sagemaker/serve/model_server/tgi/test_tgi_prepare.py index 88d109831d..ed94f10ce9 100644 --- a/tests/unit/sagemaker/serve/model_server/tgi/test_tgi_prepare.py +++ b/tests/unit/sagemaker/serve/model_server/tgi/test_tgi_prepare.py @@ -50,8 +50,8 @@ def test_create_dir_structure_from_new(self, mock_path, mock_disk_usage, mock_di mock_disk_space.assert_called_once_with(mock_model_path) mock_disk_usage.assert_called_once() - self.assertEquals(ret_model_path, mock_model_path) - self.assertEquals(ret_code_dir, mock_code_dir) + self.assertEqual(ret_model_path, mock_model_path) + self.assertEqual(ret_code_dir, mock_code_dir) @patch("sagemaker.serve.model_server.tgi.prepare.Path") def test_create_dir_structure_invalid_path(self, mock_path): @@ -63,7 +63,7 @@ def test_create_dir_structure_invalid_path(self, mock_path): with self.assertRaises(ValueError) as context: _create_dir_structure(mock_model_path) - self.assertEquals("model_dir is not a valid directory", str(context.exception)) + self.assertEqual("model_dir is not a valid directory", str(context.exception)) @patch("sagemaker.serve.model_server.tgi.prepare.S3Downloader") @patch("builtins.open", read_data="data") diff --git a/tests/unit/sagemaker/workflow/test_pipeline.py b/tests/unit/sagemaker/workflow/test_pipeline.py index 14c2d442eb..523b981736 100644 --- a/tests/unit/sagemaker/workflow/test_pipeline.py +++ b/tests/unit/sagemaker/workflow/test_pipeline.py @@ -99,7 +99,7 @@ def test_pipeline_create_and_update_with_config_injection(sagemaker_session_mock RoleArn=pipeline_role_arn, ) pipeline.upsert() - assert sagemaker_session_mock.sagemaker_client.update_pipeline.called_with( + sagemaker_session_mock.sagemaker_client.update_pipeline.assert_called_with( PipelineName="MyPipeline", PipelineDefinition=pipeline.definition(), RoleArn=pipeline_role_arn, @@ -130,7 +130,7 @@ def test_pipeline_create_with_parallelism_config(sagemaker_session_mock, role_ar role_arn=role_arn, parallelism_config=dict(MaxParallelExecutionSteps=10), ) - assert sagemaker_session_mock.sagemaker_client.create_pipeline.called_with( + sagemaker_session_mock.sagemaker_client.create_pipeline.assert_called_with( PipelineName="MyPipeline", PipelineDefinition=pipeline.definition(), RoleArn=role_arn, @@ -149,7 +149,7 @@ def test_pipeline_create_and_start_with_parallelism_config(sagemaker_session_moc role_arn=role_arn, parallelism_config=dict(MaxParallelExecutionSteps=10), ) - assert sagemaker_session_mock.sagemaker_client.create_pipeline.called_with( + sagemaker_session_mock.sagemaker_client.create_pipeline.assert_called_with( PipelineName="MyPipeline", PipelineDefinition=pipeline.definition(), RoleArn=role_arn, @@ -168,7 +168,7 @@ def test_pipeline_create_and_start_with_parallelism_config(sagemaker_session_moc # Specify ParallelismConfiguration to another value which will be honored in backend pipeline.start(parallelism_config=dict(MaxParallelExecutionSteps=20)) - assert sagemaker_session_mock.sagemaker_client.start_pipeline_execution.called_with( + sagemaker_session_mock.sagemaker_client.start_pipeline_execution.assert_called_with( PipelineName="MyPipeline", ParallelismConfiguration={"MaxParallelExecutionSteps": 20}, ) @@ -209,7 +209,7 @@ def test_pipeline_update(sagemaker_session_mock, role_arn): assert not pipeline.steps pipeline.update(role_arn=role_arn) assert len(json.loads(pipeline.definition())["Steps"]) == 0 - assert sagemaker_session_mock.sagemaker_client.update_pipeline.called_with( + sagemaker_session_mock.sagemaker_client.update_pipeline.assert_called_with( PipelineName="MyPipeline", PipelineDefinition=pipeline.definition(), RoleArn=role_arn ) @@ -253,7 +253,7 @@ def test_pipeline_update(sagemaker_session_mock, role_arn): pipeline.update(role_arn=role_arn) assert len(json.loads(pipeline.definition())["Steps"]) == 3 - assert sagemaker_session_mock.sagemaker_client.update_pipeline.called_with( + sagemaker_session_mock.sagemaker_client.update_pipeline.assert_called_with( PipelineName="MyPipeline", PipelineDefinition=pipeline.definition(), RoleArn=role_arn ) @@ -345,7 +345,11 @@ def test_pipeline_update_with_parallelism_config(sagemaker_session_mock, role_ar role_arn=role_arn, parallelism_config=dict(MaxParallelExecutionSteps=10), ) - assert sagemaker_session_mock.sagemaker_client.update_pipeline.called_with( + pipeline.update( + role_arn=role_arn, + parallelism_config={"MaxParallelExecutionSteps": 10}, + ) + sagemaker_session_mock.sagemaker_client.update_pipeline.assert_called_with( PipelineName="MyPipeline", PipelineDefinition=pipeline.definition(), RoleArn=role_arn, @@ -418,13 +422,11 @@ def _raise_does_already_exists_client_error(**kwargs): sagemaker_session_mock.sagemaker_client.update_pipeline.assert_called_once_with( PipelineName="MyPipeline", PipelineDefinition=pipeline.definition(), RoleArn=role_arn ) - assert sagemaker_session_mock.sagemaker_client.list_tags.called_with( - ResourceArn="mock_pipeline_arn" - ) + sagemaker_session_mock.sagemaker_client.list_tags.assert_called_with(ResourceArn="pipeline-arn") tags.append({"Key": "dummy", "Value": "dummy_tag"}) - assert sagemaker_session_mock.sagemaker_client.add_tags.called_with( - ResourceArn="mock_pipeline_arn", Tags=tags + sagemaker_session_mock.sagemaker_client.add_tags.assert_called_with( + ResourceArn="pipeline-arn", Tags=tags ) @@ -523,7 +525,7 @@ def test_pipeline_delete(sagemaker_session_mock): sagemaker_session=sagemaker_session_mock, ) pipeline.delete() - assert sagemaker_session_mock.sagemaker_client.delete_pipeline.called_with( + sagemaker_session_mock.sagemaker_client.delete_pipeline.assert_called_with( PipelineName="MyPipeline", ) @@ -536,7 +538,7 @@ def test_pipeline_describe(sagemaker_session_mock): sagemaker_session=sagemaker_session_mock, ) pipeline.describe() - assert sagemaker_session_mock.sagemaker_client.describe_pipeline.called_with( + sagemaker_session_mock.sagemaker_client.describe_pipeline.assert_called_with( PipelineName="MyPipeline", ) @@ -552,17 +554,17 @@ def test_pipeline_start(sagemaker_session_mock): sagemaker_session=sagemaker_session_mock, ) pipeline.start() - assert sagemaker_session_mock.start_pipeline_execution.called_with( + sagemaker_session_mock.sagemaker_client.start_pipeline_execution.assert_called_with( PipelineName="MyPipeline", ) pipeline.start(execution_display_name="pipeline-execution") - assert sagemaker_session_mock.start_pipeline_execution.called_with( + sagemaker_session_mock.sagemaker_client.start_pipeline_execution.assert_called_with( PipelineName="MyPipeline", PipelineExecutionDisplayName="pipeline-execution" ) pipeline.start(parameters=dict(alpha="epsilon")) - assert sagemaker_session_mock.start_pipeline_execution.called_with( + sagemaker_session_mock.sagemaker_client.start_pipeline_execution.assert_called_with( PipelineName="MyPipeline", PipelineParameters=[{"Name": "alpha", "Value": "epsilon"}] ) @@ -821,10 +823,8 @@ def test_pipeline_build_parameters_from_execution(sagemaker_session_mock): pipeline_execution_arn=reference_execution_arn, parameter_value_overrides=parameter_value_overrides, ) - assert ( - sagemaker_session_mock.sagemaker_client.list_pipeline_parameters_for_execution.called_with( - PipelineExecutionArn=reference_execution_arn - ) + sagemaker_session_mock.sagemaker_client.list_pipeline_parameters_for_execution.assert_called_with( + PipelineExecutionArn=reference_execution_arn ) assert len(parameters) == 1 assert parameters["TestParameterName"] == "NewParameterValue" @@ -850,10 +850,8 @@ def test_pipeline_build_parameters_from_execution_with_invalid_overrides(sagemak + f"are not present in the pipeline execution: {reference_execution_arn}" in str(error) ) - assert ( - sagemaker_session_mock.sagemaker_client.list_pipeline_parameters_for_execution.called_with( - PipelineExecutionArn=reference_execution_arn - ) + sagemaker_session_mock.sagemaker_client.list_pipeline_parameters_for_execution.assert_called_with( + PipelineExecutionArn=reference_execution_arn ) @@ -908,24 +906,23 @@ def test_pipeline_execution_basics(sagemaker_session_mock): ) execution = pipeline.start() execution.stop() - assert sagemaker_session_mock.sagemaker_client.stop_pipeline_execution.called_with( + sagemaker_session_mock.sagemaker_client.stop_pipeline_execution.assert_called_with( PipelineExecutionArn="my:arn" ) execution.describe() - assert sagemaker_session_mock.sagemaker_client.describe_pipeline_execution.called_with( + sagemaker_session_mock.sagemaker_client.describe_pipeline_execution.assert_called_with( PipelineExecutionArn="my:arn" ) steps = execution.list_steps() - assert sagemaker_session_mock.sagemaker_client.describe_pipeline_execution_steps.called_with( + sagemaker_session_mock.sagemaker_client.list_pipeline_execution_steps.assert_called_with( PipelineExecutionArn="my:arn" ) assert len(steps) == 1 list_parameters_response = execution.list_parameters() - assert ( - sagemaker_session_mock.sagemaker_client.list_pipeline_parameters_for_execution.called_with( - PipelineExecutionArn="my:arn" - ) + sagemaker_session_mock.sagemaker_client.list_pipeline_parameters_for_execution.assert_called_with( + PipelineExecutionArn="my:arn" ) + parameter_list = list_parameters_response["PipelineParameters"] assert len(parameter_list) == 1 assert parameter_list[0]["Name"] == "TestParameterName" diff --git a/tests/unit/test_exception_on_bad_status.py b/tests/unit/test_exception_on_bad_status.py index 2ef017efd3..dc53c97799 100644 --- a/tests/unit/test_exception_on_bad_status.py +++ b/tests/unit/test_exception_on_bad_status.py @@ -52,7 +52,7 @@ def test_raise_when_failed_created_package(): False ), "sagemaker.exceptions.UnexpectedStatusException should have been raised but was not" except Exception as e: - assert type(e) == sagemaker.exceptions.UnexpectedStatusException + assert isinstance(e, sagemaker.exceptions.UnexpectedStatusException) assert e.actual_status == "EnRoute" assert "Completed" in e.allowed_statuses @@ -73,7 +73,7 @@ def test_does_raise_when_incorrect_job_status(): False ), "sagemaker.exceptions.UnexpectedStatusException should have been raised but was not" except Exception as e: - assert type(e) == sagemaker.exceptions.UnexpectedStatusException + assert isinstance(e, sagemaker.exceptions.UnexpectedStatusException) assert e.actual_status == "Failed" assert "Completed" in e.allowed_statuses assert "Stopped" in e.allowed_statuses @@ -92,7 +92,7 @@ def test_does_raise_capacity_error_when_incorrect_job_status(): ) assert False, "sagemaker.exceptions.CapacityError should have been raised but was not" except Exception as e: - assert type(e) == sagemaker.exceptions.CapacityError + assert isinstance(e, sagemaker.exceptions.CapacityError) assert e.actual_status == "Failed" assert "Completed" in e.allowed_statuses assert "Stopped" in e.allowed_statuses @@ -114,6 +114,6 @@ def test_raise_when_failed_to_deploy_endpoint(): False ), "sagemaker.exceptions.UnexpectedStatusException should have been raised but was not" except Exception as e: - assert type(e) == sagemaker.exceptions.UnexpectedStatusException + assert isinstance(e, sagemaker.exceptions.UnexpectedStatusException) assert e.actual_status == "Failed" assert "InService" in e.allowed_statuses diff --git a/tests/unit/test_hyperparameter.py b/tests/unit/test_hyperparameter.py index ba7a363c40..edb2de97ee 100644 --- a/tests/unit/test_hyperparameter.py +++ b/tests/unit/test_hyperparameter.py @@ -62,7 +62,7 @@ def test_validated(): def test_data_type(): x = Test() x.validated = 66 - assert type(x.validated) == Test.__dict__["validated"].data_type + assert isinstance(x.validated, Test.__dict__["validated"].data_type) def test_from_string(): diff --git a/tests/unit/test_predictor_async.py b/tests/unit/test_predictor_async.py index fa2d6da6c7..c9f12ff023 100644 --- a/tests/unit/test_predictor_async.py +++ b/tests/unit/test_predictor_async.py @@ -233,7 +233,7 @@ def test_async_predict_call_verify_exceptions(): with pytest.raises( PollingTimeoutError, match=f"No result at {ASYNC_OUTPUT_LOCATION} after polling for " - f"{DEFAULT_WAITER_CONFIG.delay*DEFAULT_WAITER_CONFIG.max_attempts}" + f"{DEFAULT_WAITER_CONFIG.delay * DEFAULT_WAITER_CONFIG.max_attempts}" f" seconds. Inference could still be running", ): predictor_async.predict(input_path=input_location, waiter_config=DEFAULT_WAITER_CONFIG) @@ -253,7 +253,7 @@ def test_async_predict_call_verify_exceptions_with_null_failure_path(): with pytest.raises( PollingTimeoutError, match=f"No result at {ASYNC_OUTPUT_LOCATION} after polling for " - f"{DEFAULT_WAITER_CONFIG.delay*DEFAULT_WAITER_CONFIG.max_attempts}" + f"{DEFAULT_WAITER_CONFIG.delay * DEFAULT_WAITER_CONFIG.max_attempts}" f" seconds. Inference could still be running", ): predictor_async.predict(input_path=input_location, waiter_config=DEFAULT_WAITER_CONFIG) diff --git a/tests/unit/test_tuner.py b/tests/unit/test_tuner.py index f0325b79e9..b4d21008b5 100644 --- a/tests/unit/test_tuner.py +++ b/tests/unit/test_tuner.py @@ -46,7 +46,54 @@ from sagemaker.workflow.parameters import ParameterString, ParameterInteger from src.sagemaker.tuner import InstanceConfig -from .tuner_test_utils import * # noqa: F403 +from .tuner_test_utils import ( + BASE_JOB_NAME, + BUCKET_NAME, + CategoricalParameter, + ContinuousParameter, + DATA_DIR, + EARLY_STOPPING_TYPE, + Estimator, + ESTIMATOR, + ESTIMATOR_NAME, + ESTIMATOR_NAME_TWO, + ESTIMATOR_TWO, + FRAMEWORK_VERSION, + HYPERPARAMETER_RANGES, + HYPERPARAMETER_RANGES_TWO, + IMAGE_NAME, + INPUTS, + INSTANCE_COUNT, + INSTANCE_TYPE, + IntegerParameter, + JOB_NAME, + LIST_TAGS_RESULT, + MAX_JOBS, + MAX_PARALLEL_JOBS, + METRIC_DEFINITIONS, + MODEL_DATA, + MULTI_ALGO_TUNING_JOB_DETAILS, + NUM_COMPONENTS, + OBJECTIVE_METRIC_NAME, + OBJECTIVE_METRIC_NAME_TWO, + OBJECTIVE_TYPE, + PCA, + PY_VERSION, + REGION, + ROLE, + SAGEMAKER_SESSION, + SCRIPT_NAME, + STRATEGY, + TAGS, + TRAINING_JOB_DESCRIPTION, + TRAINING_JOB_NAME, + TUNING_JOB_DETAILS, + WarmStartConfig, + WarmStartTypes, + WARM_START_CONFIG, + ENDPOINT_DESC, + ENDPOINT_CONFIG_DESC, +) @pytest.fixture() diff --git a/tox.ini b/tox.ini index b16c0d2f0b..c47d206380 100644 --- a/tox.ini +++ b/tox.ini @@ -5,7 +5,7 @@ [tox] isolated_build = true -envlist = black-format,flake8,pylint,docstyle,sphinx,doc8,twine,py38,py39,py310,py311 +envlist = black-format,flake8,pylint,docstyle,sphinx,doc8,twine,py39,py310,py311,py312 skip_missing_interpreters = False @@ -21,13 +21,13 @@ exclude = tests/data/ venv/ env/ - tests/unit/test_tensorboard.py # excluding this file for time being + tests/unit/test_tensorboard.py max-complexity = 10 ignore = C901, - E203, # whitespace before ':': Black disagrees with and explicitly violates this. + E203, FI10, FI12, FI13, @@ -35,7 +35,7 @@ ignore = FI15, FI16, FI17, - FI18, # __future__ import "annotations" missing -> check only Python 3.7 compatible + FI18, FI50, FI51, FI52, @@ -67,7 +67,7 @@ markers = [testenv] setenv = PYTHONHASHSEED=42 -pip_version = pip==21.3 +pip_version = pip==24.3 passenv = AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY @@ -82,15 +82,18 @@ passenv = # Can be used to specify which tests to run, e.g.: tox -- -s commands = python -c "import os; os.system('install-custom-pkgs --install-boto-wheels')" - pip install 'apache-airflow==2.9.3' --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.9.3/constraints-3.8.txt" - pip install 'torch==2.0.1+cpu' -f 'https://download.pytorch.org/whl/torch_stable.html' - pip install 'torchvision==0.15.2+cpu' -f 'https://download.pytorch.org/whl/torch_stable.html' - pip install 'dill>=0.3.8' + pip install 'apache-airflow==2.10.4' --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.10.4/constraints-3.9.txt" + pip install 'torch==2.3.1+cpu' -f 'https://download.pytorch.org/whl/torch_stable.html' + pip install 'torchvision==0.18.1+cpu' -f 'https://download.pytorch.org/whl/torch_stable.html' + pip install 'dill>=0.3.9' pytest {posargs} deps = .[test] depends = - {py38,py39,py310,p311}: clean + {py39,py310,py311,py312}: clean + +[testenv:py312] +basepython = python3.12 [testenv:runcoverage] description = run unit tests with coverage @@ -105,6 +108,7 @@ deps = -r requirements/tox/flake8_requirements.txt commands = flake8 +basepython = python3.12 [testenv:pylint] skipdist = true @@ -112,7 +116,7 @@ skip_install = true deps = -r requirements/tox/pylint_requirements.txt commands = - python -m pylint --rcfile=.pylintrc -j 0 src/sagemaker + python -m pylint --rcfile=.pylintrc -j 0 src/sagemaker --fail-under=9.9 [testenv:spelling] skipdist = true @@ -132,14 +136,14 @@ commands = twine check dist/*.tar.gz [testenv:sphinx] -pip_version = pip==21.3 +pip_version = pip==24.3 changedir = doc # pip install requirements.txt is separate as RTD does it in separate steps # having the requirements.txt installed in deps above results in Double Requirement exception # https://github.com/pypa/pip/issues/988 commands = pip install --exists-action=w -r requirements.txt - sphinx-build -T -W -b html -d _build/doctrees-readthedocs -D language=en . _build/html + sphinx-build -T -b html -d _build/doctrees-readthedocs -D language=en . _build/html [testenv:doc8] deps =