[fix] model input size -> (n, w, h, c)

This commit is contained in:
NatLee 2025-01-07 05:26:07 +08:00
parent 431544ac52
commit 041773232f

View File

@ -73,7 +73,10 @@ class EmotionClient(Demography):
imgs = self._preprocess_batch_or_single_input(img) imgs = self._preprocess_batch_or_single_input(img)
# Preprocess each image and add channel dimension for grayscale images # Preprocess each image and add channel dimension for grayscale images
processed_imgs = np.expand_dims(np.array([self._preprocess_image(img) for img in imgs]), axis=0) processed_imgs = np.expand_dims(np.array([self._preprocess_image(img) for img in imgs]), axis=-1)
# Reshape input for model (expected shape=(n, 48, 48, 1)), where n is the batch size
processed_imgs = processed_imgs.reshape(processed_imgs.shape[0], 48, 48, 1)
# Prediction # Prediction
# Emotion model input shape is (48, 48, 1, n), where n is the batch size # Emotion model input shape is (48, 48, 1, n), where n is the batch size