I have mixed type multiple output (one regression and one classification) Keras model. I am trying to pass the same sample weights for both outputs as below.
import numpy as np
import tensorflow as tf
from tensorflow import keras
# Generate some sample data
np.random.seed(42)
X = np.random.rand(1000, 10) # 1000 samples, 10 features
y_regression = X.sum(axis=1) + np.random.normal(0, 0.1, 1000) # Regression target
y_classification = (X.sum(axis=1) > 5).astype(int) # Classification target (binary)
# Create sample weights
sample_weights = np.random.rand(1000)
# Define the model with mixed outputs
def create_model():
input_layer = keras.layers.Input(shape=(10,))
dense1 = keras.layers.Dense(64, activation='relu')(input_layer)
dense2 = keras.layers.Dense(32, activation='relu')(dense1)
# Regression output
regression_output = keras.layers.Dense(1, name='regression_output')(dense2)
# Classification output
classification_output = keras.layers.Dense(1, activation='sigmoid', name='classification_output')(dense2)
model = keras.Model(inputs=input_layer, outputs=[regression_output, classification_output])
return model
model = create_model()
# Compile the model with appropriate losses and metrics for each output
modelpile(
optimizer='adam',
loss={'regression_output': 'mse', 'classification_output': 'binary_crossentropy'},
metrics={'regression_output': 'mae', 'classification_output': 'accuracy'},
)
# Train the model with sample weights
history = model.fit(
X,
{'regression_output': y_regression, 'classification_output': y_classification},
epochs=10,
batch_size=32,
sample_weight=sample_weights,
)
Have also tried specifying weights for each output
history = model.fit(
X,
{'regression_output': y_regression, 'classification_output': y_classification},
epochs=10,
batch_size=32,
sample_weight={'regression_output': sample_weights, 'classification_output': sample_weights},
)
However in both cases I am getting the below error which normally indicates a mismatch between the shape of sample_weight
array and the shape of input data although that's not the case here. What's the correct way to pass sample_weight
in multiple output model?
KeyError Traceback (most recent call last)
Cell In[18], line 58
49 modelpile(
50 optimizer='adam',
51 loss={'regression_output': 'mse', 'classification_output': 'binary_crossentropy'},
52 metrics={'regression_output': 'mae', 'classification_output': 'accuracy'},
53 )
57 # Train the model with sample weights
---> 58 history = model.fit(
59 X,
60 {'regression_output': y_regression, 'classification_output': y_classification},
61 epochs=20,
62 batch_size=32,
63 sample_weight=sample_weights,
64 )
File /opt/jupyter/notebooks-generic/.venv/lib/python3.9/site-packages/keras/src/utils/traceback_utils.py:122, in filter_traceback.<locals>.error_handler(*args, **kwargs)
119 filtered_tb = _process_traceback_frames(e.__traceback__)
120 # To get the full stack trace, call:
121 # `keras.config.disable_traceback_filtering()`
--> 122 raise e.with_traceback(filtered_tb) from None
123 finally:
124 del filtered_tb
File /opt/jupyter/notebooks-generic/.venv/lib/python3.9/site-packages/keras/src/trainers/compile_utils.py:785, in CompileLoss.call.<locals>.resolve_path(path, object)
783 def resolve_path(path, object):
784 for _path in path:
--> 785 object = object[_path]
786 return object
KeyError: 0
I have mixed type multiple output (one regression and one classification) Keras model. I am trying to pass the same sample weights for both outputs as below.
import numpy as np
import tensorflow as tf
from tensorflow import keras
# Generate some sample data
np.random.seed(42)
X = np.random.rand(1000, 10) # 1000 samples, 10 features
y_regression = X.sum(axis=1) + np.random.normal(0, 0.1, 1000) # Regression target
y_classification = (X.sum(axis=1) > 5).astype(int) # Classification target (binary)
# Create sample weights
sample_weights = np.random.rand(1000)
# Define the model with mixed outputs
def create_model():
input_layer = keras.layers.Input(shape=(10,))
dense1 = keras.layers.Dense(64, activation='relu')(input_layer)
dense2 = keras.layers.Dense(32, activation='relu')(dense1)
# Regression output
regression_output = keras.layers.Dense(1, name='regression_output')(dense2)
# Classification output
classification_output = keras.layers.Dense(1, activation='sigmoid', name='classification_output')(dense2)
model = keras.Model(inputs=input_layer, outputs=[regression_output, classification_output])
return model
model = create_model()
# Compile the model with appropriate losses and metrics for each output
modelpile(
optimizer='adam',
loss={'regression_output': 'mse', 'classification_output': 'binary_crossentropy'},
metrics={'regression_output': 'mae', 'classification_output': 'accuracy'},
)
# Train the model with sample weights
history = model.fit(
X,
{'regression_output': y_regression, 'classification_output': y_classification},
epochs=10,
batch_size=32,
sample_weight=sample_weights,
)
Have also tried specifying weights for each output
history = model.fit(
X,
{'regression_output': y_regression, 'classification_output': y_classification},
epochs=10,
batch_size=32,
sample_weight={'regression_output': sample_weights, 'classification_output': sample_weights},
)
However in both cases I am getting the below error which normally indicates a mismatch between the shape of sample_weight
array and the shape of input data although that's not the case here. What's the correct way to pass sample_weight
in multiple output model?
KeyError Traceback (most recent call last)
Cell In[18], line 58
49 modelpile(
50 optimizer='adam',
51 loss={'regression_output': 'mse', 'classification_output': 'binary_crossentropy'},
52 metrics={'regression_output': 'mae', 'classification_output': 'accuracy'},
53 )
57 # Train the model with sample weights
---> 58 history = model.fit(
59 X,
60 {'regression_output': y_regression, 'classification_output': y_classification},
61 epochs=20,
62 batch_size=32,
63 sample_weight=sample_weights,
64 )
File /opt/jupyter/notebooks-generic/.venv/lib/python3.9/site-packages/keras/src/utils/traceback_utils.py:122, in filter_traceback.<locals>.error_handler(*args, **kwargs)
119 filtered_tb = _process_traceback_frames(e.__traceback__)
120 # To get the full stack trace, call:
121 # `keras.config.disable_traceback_filtering()`
--> 122 raise e.with_traceback(filtered_tb) from None
123 finally:
124 del filtered_tb
File /opt/jupyter/notebooks-generic/.venv/lib/python3.9/site-packages/keras/src/trainers/compile_utils.py:785, in CompileLoss.call.<locals>.resolve_path(path, object)
783 def resolve_path(path, object):
784 for _path in path:
--> 785 object = object[_path]
786 return object
KeyError: 0
Share
Improve this question
asked Jan 18 at 15:24
VaidaVaida
636 bronze badges
1 Answer
Reset to default 0Changing y from dictionary to a list within model.fit resolved the error.
history = model.fit(
X,
[y_regression, y_classification],
epochs=10,
batch_size=32,
sample_weight=sample_weights,
)