Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Credit Model with Sagemaker Linear Model expects 16 arguments, but got 15 #171

Open
AGrosserHH opened this issue Oct 4, 2024 · 0 comments

Comments

@AGrosserHH
Copy link

I am running the notebook "Credit Model with Sagemaker Linear Model" at IBM cloud witch Python 3.11 and I am getting an error when running the field
"from sagemaker.amazon.amazon_estimator import get_image_uri

sm_client = session.client('sagemaker')"

then the err occurs:

sagemaker.config INFO - Not applying SDK defaults from location: /etc/xdg/sagemaker/config.yaml
sagemaker.config INFO - Not applying SDK defaults from location: /home/wsuser/.config/sagemaker/config.yaml


TypeError Traceback (most recent call last)
Cell In[22], line 1
----> 1 from sagemaker.amazon.amazon_estimator import get_image_uri
3 sm_client = session.client('sagemaker')

File /opt/conda/envs/Python-RT24.1-Premium/lib/python3.11/site-packages/sagemaker/init.py:18
14 from future import absolute_import
16 import importlib_metadata
---> 18 from sagemaker import estimator, parameter, tuner # noqa: F401
19 from sagemaker.amazon.kmeans import KMeans, KMeansModel, KMeansPredictor # noqa: F401
20 from sagemaker.amazon.pca import PCA, PCAModel, PCAPredictor # noqa: F401

File /opt/conda/envs/Python-RT24.1-Premium/lib/python3.11/site-packages/sagemaker/estimator.py:110
107 from sagemaker.workflow.parameters import ParameterString
108 from sagemaker.workflow.pipeline_context import PipelineSession, runnable_by_pipeline
--> 110 from sagemaker.mlflow.forward_sagemaker_metrics import log_sagemaker_job_to_mlflow
112 logger = logging.getLogger(name)
115 class EstimatorBase(with_metaclass(ABCMeta, object)): # pylint: disable=too-many-public-methods

File /opt/conda/envs/Python-RT24.1-Premium/lib/python3.11/site-packages/sagemaker/mlflow/forward_sagemaker_metrics.py:23
21 from typing import Set, Tuple, List, Dict, Generator
22 import boto3
---> 23 import mlflow
24 from mlflow import MlflowClient
25 from mlflow.entities import Metric, Param, RunTag

File /opt/conda/envs/Python-RT24.1-Premium/lib/python3.11/site-packages/mlflow/init.py:34
31 from mlflow.version import VERSION
33 version = VERSION
---> 34 from mlflow import (
35 artifacts, # noqa: F401
36 client, # noqa: F401
37 config, # noqa: F401
38 data, # noqa: F401
39 exceptions, # noqa: F401
40 models, # noqa: F401
41 projects, # noqa: F401
42 tracking, # noqa: F401
43 )
44 from mlflow.environment_variables import MLFLOW_CONFIGURE_LOGGING
45 from mlflow.utils.lazy_load import LazyLoader

File /opt/conda/envs/Python-RT24.1-Premium/lib/python3.11/site-packages/mlflow/data/init.py:5
2 from contextlib import suppress
3 from typing import Union
----> 5 from mlflow.data import dataset_registry
6 from mlflow.data import sources as mlflow_data_sources
7 from mlflow.data.dataset import Dataset

File /opt/conda/envs/Python-RT24.1-Premium/lib/python3.11/site-packages/mlflow/data/dataset_registry.py:137
135 # use contextlib suppress to ignore import errors
136 with suppress(ImportError):
--> 137 from mlflow.data.pandas_dataset import from_pandas
139 _dataset_registry.register_constructor(from_pandas)
140 with suppress(ImportError):

File /opt/conda/envs/Python-RT24.1-Premium/lib/python3.11/site-packages/mlflow/data/pandas_dataset.py:12
10 from mlflow.data.digest_utils import compute_pandas_digest
11 from mlflow.data.evaluation_dataset import EvaluationDataset
---> 12 from mlflow.data.pyfunc_dataset_mixin import PyFuncConvertibleDatasetMixin, PyFuncInputsOutputs
13 from mlflow.exceptions import MlflowException
14 from mlflow.protos.databricks_pb2 import INVALID_PARAMETER_VALUE

File /opt/conda/envs/Python-RT24.1-Premium/lib/python3.11/site-packages/mlflow/data/pyfunc_dataset_mixin.py:6
3 from typing import List
5 from mlflow.data.evaluation_dataset import EvaluationDataset
----> 6 from mlflow.models.utils import PyFuncInput, PyFuncOutput
9 @DataClass
10 class PyFuncInputsOutputs:
11 inputs: List[PyFuncInput]

File /opt/conda/envs/Python-RT24.1-Premium/lib/python3.11/site-packages/mlflow/models/init.py:75
73 try:
74 from mlflow.models.python_api import predict
---> 75 from mlflow.models.signature import ModelSignature, infer_signature, set_signature
76 from mlflow.models.utils import (
77 ModelInputExample,
78 add_libraries_to_model,
(...)
81 validate_serving_input,
82 )
84 all += [
85 "ModelSignature",
86 "ModelInputExample",
(...)
93 "validate_serving_input",
94 ]

File /opt/conda/envs/Python-RT24.1-Premium/lib/python3.11/site-packages/mlflow/models/signature.py:21
19 from mlflow.models import Model
20 from mlflow.models.model import MLMODEL_FILE_NAME
---> 21 from mlflow.models.utils import _contains_params, _Example
22 from mlflow.protos.databricks_pb2 import INVALID_PARAMETER_VALUE, RESOURCE_DOES_NOT_EXIST
23 from mlflow.store.artifact.models_artifact_repo import ModelsArtifactRepository

File /opt/conda/envs/Python-RT24.1-Premium/lib/python3.11/site-packages/mlflow/models/utils.py:50
47 HAS_SCIPY = False
49 try:
---> 50 from pyspark.sql import DataFrame as SparkDataFrame
51 from pyspark.sql import Row
52 from pyspark.sql.types import (
53 ArrayType,
54 BinaryType,
(...)
60 TimestampType,
61 )

File /opt/conda/envs/Python-RT24.1-Premium/lib/python3.11/site-packages/pyspark/init.py:46
43 import types
45 from pyspark.conf import SparkConf
---> 46 from pyspark.context import SparkContext
47 from pyspark.rdd import RDD
48 from pyspark.files import SparkFiles

File /opt/conda/envs/Python-RT24.1-Premium/lib/python3.11/site-packages/pyspark/context.py:31
27 from tempfile import NamedTemporaryFile
29 from py4j.protocol import Py4JError
---> 31 from pyspark import accumulators
32 from pyspark.accumulators import Accumulator
33 from pyspark.broadcast import Broadcast, BroadcastPickleRegistry

File /opt/conda/envs/Python-RT24.1-Premium/lib/python3.11/site-packages/pyspark/accumulators.py:97
95 import socketserver as SocketServer
96 import threading
---> 97 from pyspark.cloudpickle import CloudPickler
98 from pyspark.serializers import read_int, PickleSerializer
101 all = ['Accumulator', 'AccumulatorParam']

File /opt/conda/envs/Python-RT24.1-Premium/lib/python3.11/site-packages/pyspark/cloudpickle.py:146
126 else:
127 return types.CodeType(
128 co.co_argcount,
129 co.co_kwonlyargcount,
(...)
142 (),
143 )
--> 146 _cell_set_template_code = _make_cell_set_template_code()
149 def cell_set(cell, value):
150 """Set the value of a closure cell.
151 """

File /opt/conda/envs/Python-RT24.1-Premium/lib/python3.11/site-packages/pyspark/cloudpickle.py:127, in _make_cell_set_template_code()
110 return types.CodeType(
111 co.co_argcount,
112 co.co_nlocals,
(...)
124 (),
125 )
126 else:
--> 127 return types.CodeType(
128 co.co_argcount,
129 co.co_kwonlyargcount,
130 co.co_nlocals,
131 co.co_stacksize,
132 co.co_flags,
133 co.co_code,
134 co.co_consts,
135 co.co_names,
136 co.co_varnames,
137 co.co_filename,
138 co.co_name,
139 co.co_firstlineno,
140 co.co_lnotab,
141 co.co_cellvars, # this is the trickery
142 (),
143 )

TypeError: code expected at least 16 arguments, got 15

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant