Skip to content

Commit 34fe3e9

Browse files
committed
Bug fix for integrity check using hmac key in serve V2
1 parent a140cfc commit 34fe3e9

22 files changed

Lines changed: 84 additions & 156 deletions

File tree

src/sagemaker/serve/builder/transformers_builder.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -267,12 +267,6 @@ def _transformers_model_builder_deploy_wrapper(self, *args, **kwargs) -> Type[Pr
267267
self.env_vars.update(env_vars)
268268
self.pysdk_model.env.update(self.env_vars)
269269

270-
if (
271-
"SAGEMAKER_SERVE_SECRET_KEY" in self.pysdk_model.env
272-
and not self.pysdk_model.env["SAGEMAKER_SERVE_SECRET_KEY"]
273-
):
274-
del self.pysdk_model.env["SAGEMAKER_SERVE_SECRET_KEY"]
275-
276270
if "endpoint_logging" not in kwargs:
277271
kwargs["endpoint_logging"] = True
278272

src/sagemaker/serve/model_server/multi_model_server/prepare.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -25,10 +25,7 @@
2525
from sagemaker.session import Session
2626
from sagemaker.serve.spec.inference_spec import InferenceSpec
2727
from sagemaker.serve.detector.dependency_manager import capture_dependencies
28-
from sagemaker.serve.validations.check_integrity import (
29-
generate_secret_key,
30-
compute_hash,
31-
)
28+
from sagemaker.serve.validations.check_integrity import compute_hash
3229
from sagemaker.remote_function.core.serialization import _MetaData
3330

3431
logger = logging.getLogger(__name__)
@@ -120,11 +117,10 @@ def prepare_for_mms(
120117

121118
capture_dependencies(dependencies=dependencies, work_dir=code_dir)
122119

123-
secret_key = generate_secret_key()
124120
with open(str(code_dir.joinpath("serve.pkl")), "rb") as f:
125121
buffer = f.read()
126-
hash_value = compute_hash(buffer=buffer, secret_key=secret_key)
122+
hash_value = compute_hash(buffer=buffer)
127123
with open(str(code_dir.joinpath("metadata.json")), "wb") as metadata:
128124
metadata.write(_MetaData(hash_value).to_json())
129125

130-
return secret_key
126+
return ""

src/sagemaker/serve/model_server/multi_model_server/server.py

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -29,14 +29,12 @@ def _start_serving(
2929
client: object,
3030
image: str,
3131
model_path: str,
32-
secret_key: str,
3332
env_vars: dict,
3433
):
3534
"""Initializes the start of the server"""
3635
env = {
3736
"SAGEMAKER_SUBMIT_DIRECTORY": "/opt/ml/model/code",
3837
"SAGEMAKER_PROGRAM": "inference.py",
39-
"SAGEMAKER_SERVE_SECRET_KEY": secret_key,
4038
"LOCAL_PYTHON": platform.python_version(),
4139
}
4240
if env_vars:
@@ -141,15 +139,16 @@ def _upload_server_artifacts(
141139
else None
142140
)
143141

144-
if secret_key:
145-
env_vars = {
146-
"SAGEMAKER_SUBMIT_DIRECTORY": "/opt/ml/model/code",
147-
"SAGEMAKER_PROGRAM": "inference.py",
148-
"SAGEMAKER_SERVE_SECRET_KEY": secret_key,
149-
"SAGEMAKER_REGION": sagemaker_session.boto_region_name,
150-
"SAGEMAKER_CONTAINER_LOG_LEVEL": "10",
151-
"LOCAL_PYTHON": platform.python_version(),
152-
}
142+
if env_vars is None:
143+
env_vars = {}
144+
145+
env_vars.update({
146+
"SAGEMAKER_SUBMIT_DIRECTORY": "/opt/ml/model/code",
147+
"SAGEMAKER_PROGRAM": "inference.py",
148+
"SAGEMAKER_REGION": sagemaker_session.boto_region_name,
149+
"SAGEMAKER_CONTAINER_LOG_LEVEL": "10",
150+
"LOCAL_PYTHON": platform.python_version(),
151+
})
153152

154153
return model_data, _update_env_vars(env_vars)
155154

src/sagemaker/serve/model_server/smd/prepare.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -11,10 +11,7 @@
1111

1212
from sagemaker.serve.spec.inference_spec import InferenceSpec
1313
from sagemaker.serve.detector.dependency_manager import capture_dependencies
14-
from sagemaker.serve.validations.check_integrity import (
15-
generate_secret_key,
16-
compute_hash,
17-
)
14+
from sagemaker.serve.validations.check_integrity import compute_hash
1815
from sagemaker.remote_function.core.serialization import _MetaData
1916
from sagemaker.serve.spec.inference_base import CustomOrchestrator, AsyncCustomOrchestrator
2017

@@ -64,11 +61,10 @@ def prepare_for_smd(
6461

6562
capture_dependencies(dependencies=dependencies, work_dir=code_dir)
6663

67-
secret_key = generate_secret_key()
6864
with open(str(code_dir.joinpath("serve.pkl")), "rb") as f:
6965
buffer = f.read()
70-
hash_value = compute_hash(buffer=buffer, secret_key=secret_key)
66+
hash_value = compute_hash(buffer=buffer)
7167
with open(str(code_dir.joinpath("metadata.json")), "wb") as metadata:
7268
metadata.write(_MetaData(hash_value).to_json())
7369

74-
return secret_key
70+
return ""

src/sagemaker/serve/model_server/smd/server.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@ def _upload_smd_artifacts(
2020
self,
2121
model_path: str,
2222
sagemaker_session: Session,
23-
secret_key: str,
2423
s3_model_data_url: str = None,
2524
image: str = None,
2625
should_upload_artifacts: bool = False,
@@ -53,7 +52,6 @@ def _upload_smd_artifacts(
5352
"SAGEMAKER_INFERENCE_CODE_DIRECTORY": "/opt/ml/model/code",
5453
"SAGEMAKER_INFERENCE_CODE": "inference.handler",
5554
"SAGEMAKER_REGION": sagemaker_session.boto_region_name,
56-
"SAGEMAKER_SERVE_SECRET_KEY": secret_key,
5755
"LOCAL_PYTHON": platform.python_version(),
5856
}
5957
return s3_upload_path, env_vars

src/sagemaker/serve/model_server/tei/server.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -28,20 +28,16 @@ class LocalTeiServing:
2828
"""LocalTeiServing class"""
2929

3030
def _start_tei_serving(
31-
self, client: object, image: str, model_path: str, secret_key: str, env_vars: dict
31+
self, client: object, image: str, model_path: str, env_vars: dict
3232
):
3333
"""Starts a local tei serving container.
3434
3535
Args:
3636
client: Docker client
3737
image: Image to use
3838
model_path: Path to the model
39-
secret_key: Secret key to use for authentication
4039
env_vars: Environment variables to set
4140
"""
42-
if env_vars and secret_key:
43-
env_vars["SAGEMAKER_SERVE_SECRET_KEY"] = secret_key
44-
4541
self.container = client.containers.run(
4642
image,
4743
shm_size=_SHM_SIZE,

src/sagemaker/serve/model_server/tensorflow_serving/prepare.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -10,10 +10,7 @@
1010
_move_contents,
1111
)
1212
from sagemaker.serve.detector.dependency_manager import capture_dependencies
13-
from sagemaker.serve.validations.check_integrity import (
14-
generate_secret_key,
15-
compute_hash,
16-
)
13+
from sagemaker.serve.validations.check_integrity import compute_hash
1714
from sagemaker.remote_function.core.serialization import _MetaData
1815

1916

@@ -57,11 +54,10 @@ def prepare_for_tf_serving(
5754
raise ValueError("SavedModel is not found for Tensorflow or Keras flavor.")
5855
_move_contents(src_dir=mlflow_saved_model_dir, dest_dir=saved_model_bundle_dir)
5956

60-
secret_key = generate_secret_key()
6157
with open(str(code_dir.joinpath("serve.pkl")), "rb") as f:
6258
buffer = f.read()
63-
hash_value = compute_hash(buffer=buffer, secret_key=secret_key)
59+
hash_value = compute_hash(buffer=buffer)
6460
with open(str(code_dir.joinpath("metadata.json")), "wb") as metadata:
6561
metadata.write(_MetaData(hash_value).to_json())
6662

67-
return secret_key
63+
return ""

src/sagemaker/serve/model_server/tensorflow_serving/server.py

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -22,15 +22,14 @@ class LocalTensorflowServing:
2222
"""LocalTensorflowServing class."""
2323

2424
def _start_tensorflow_serving(
25-
self, client: object, image: str, model_path: str, secret_key: str, env_vars: dict
25+
self, client: object, image: str, model_path: str, env_vars: dict
2626
):
2727
"""Starts a local tensorflow serving container.
2828
2929
Args:
3030
client: Docker client
3131
image: Image to use
3232
model_path: Path to the model
33-
secret_key: Secret key to use for authentication
3433
env_vars: Environment variables to set
3534
"""
3635
self.container = client.containers.run(
@@ -48,7 +47,6 @@ def _start_tensorflow_serving(
4847
environment={
4948
"SAGEMAKER_SUBMIT_DIRECTORY": "/opt/ml/model/code",
5049
"SAGEMAKER_PROGRAM": "inference.py",
51-
"SAGEMAKER_SERVE_SECRET_KEY": secret_key,
5250
"LOCAL_PYTHON": platform.python_version(),
5351
**env_vars,
5452
},
@@ -99,7 +97,6 @@ def _upload_tensorflow_serving_artifacts(
9997
self,
10098
model_path: str,
10199
sagemaker_session: Session,
102-
secret_key: str,
103100
s3_model_data_url: str = None,
104101
image: str = None,
105102
should_upload_artifacts: bool = False,
@@ -109,7 +106,6 @@ def _upload_tensorflow_serving_artifacts(
109106
Args:
110107
model_path: Path to the model
111108
sagemaker_session: SageMaker session
112-
secret_key: Secret key to use for authentication
113109
s3_model_data_url: S3 model data URL
114110
image: Image to use
115111
model_data_s3_path: S3 model data URI
@@ -142,7 +138,6 @@ def _upload_tensorflow_serving_artifacts(
142138
"SAGEMAKER_PROGRAM": "inference.py",
143139
"SAGEMAKER_REGION": sagemaker_session.boto_region_name,
144140
"SAGEMAKER_CONTAINER_LOG_LEVEL": "10",
145-
"SAGEMAKER_SERVE_SECRET_KEY": secret_key,
146141
"LOCAL_PYTHON": platform.python_version(),
147142
}
148143
return s3_upload_path, env_vars

src/sagemaker/serve/model_server/torchserve/prepare.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,7 @@
1212
from sagemaker.session import Session
1313
from sagemaker.serve.spec.inference_spec import InferenceSpec
1414
from sagemaker.serve.detector.dependency_manager import capture_dependencies
15-
from sagemaker.serve.validations.check_integrity import (
16-
generate_secret_key,
17-
compute_hash,
18-
)
15+
from sagemaker.serve.validations.check_integrity import compute_hash
1916
from sagemaker.serve.validations.check_image_uri import is_1p_image_uri
2017
from sagemaker.remote_function.core.serialization import _MetaData
2118

@@ -69,11 +66,10 @@ def prepare_for_torchserve(
6966

7067
capture_dependencies(dependencies=dependencies, work_dir=code_dir)
7168

72-
secret_key = generate_secret_key()
7369
with open(str(code_dir.joinpath("serve.pkl")), "rb") as f:
7470
buffer = f.read()
75-
hash_value = compute_hash(buffer=buffer, secret_key=secret_key)
71+
hash_value = compute_hash(buffer=buffer)
7672
with open(str(code_dir.joinpath("metadata.json")), "wb") as metadata:
7773
metadata.write(_MetaData(hash_value).to_json())
7874

79-
return secret_key
75+
return ""

src/sagemaker/serve/model_server/torchserve/server.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ class LocalTorchServe:
2222
"""Placeholder docstring"""
2323

2424
def _start_torch_serve(
25-
self, client: object, image: str, model_path: str, secret_key: str, env_vars: dict
25+
self, client: object, image: str, model_path: str, env_vars: dict
2626
):
2727
"""Placeholder docstring"""
2828
self.container = client.containers.run(
@@ -40,7 +40,6 @@ def _start_torch_serve(
4040
environment={
4141
"SAGEMAKER_SUBMIT_DIRECTORY": "/opt/ml/model/code",
4242
"SAGEMAKER_PROGRAM": "inference.py",
43-
"SAGEMAKER_SERVE_SECRET_KEY": secret_key,
4443
"LOCAL_PYTHON": platform.python_version(),
4544
**env_vars,
4645
},
@@ -82,7 +81,6 @@ def _upload_torchserve_artifacts(
8281
self,
8382
model_path: str,
8483
sagemaker_session: Session,
85-
secret_key: str,
8684
s3_model_data_url: str = None,
8785
image: str = None,
8886
should_upload_artifacts: bool = False,
@@ -116,7 +114,6 @@ def _upload_torchserve_artifacts(
116114
"SAGEMAKER_PROGRAM": "inference.py",
117115
"SAGEMAKER_REGION": sagemaker_session.boto_region_name,
118116
"SAGEMAKER_CONTAINER_LOG_LEVEL": "10",
119-
"SAGEMAKER_SERVE_SECRET_KEY": secret_key,
120117
"LOCAL_PYTHON": platform.python_version(),
121118
}
122119
return s3_upload_path, env_vars

0 commit comments

Comments
 (0)