拟合 CNN 模型时出错:ValueError: 'logits' 和 'labels' 必须具有相同的形状,接收 ((None, 3) vs (None, 2))

Error in fitting CNN model: ValueError: `logits` and `labels` must have the same shape, received ((None, 3) vs (None, 2))

提问人:Fjoralb 提问时间:11/15/2023 最后编辑:nightstandFjoralb 更新时间:11/19/2023 访问量:45

问:

这是我第一次尝试使用融合神经网络,所以我遇到了一些困难。我确实输入了错误的尺寸,或者它们的顺序错误。这些图像来自白血症的 kaggle 数据集,因此它是二进制的。代码如下:

train <- combine(train)
test <- combine(test)
str(train)

Formal class 'Image' [package "EBImage"] with 2 slots
  ..@ .Data    : num [1:250, 1:250, 1:3, 1:260] 0.541 0.545 0.545 0.541 0.537 ...
  ..@ colormode: int 2
  ..$ dim: int [1:4] 250 250 3 260
train <- aperm(train, c(4, 1, 2, 3))
test <- aperm(test, c(4, 1, 2, 3))


#response
trainy <- c(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1)
testy <- c(0,1)

trainlabels <- to_categorical(trainy)
testlabels <- to_categorical(testy)
model <- keras_model_sequential()

model %>%
  layer_conv_2d(filters = 32,
                kernel_size = c(3,3),
                activation = 'relu',
                input_shape = c(250, 250, 3)) %>%
  layer_conv_2d(filters = 32,
                kernel_size = c(3,3),
                activation = 'relu') %>%
  layer_max_pooling_2d(pool_size = c(2,2)) %>%
  layer_dropout(rate = 0.25) %>%
  layer_conv_2d(filters = 64,
                kernel_size = c(3,3),
                activation = 'relu') %>%
  layer_conv_2d(filters = 64,
                kernel_size = c(3,3),
                activation = 'relu') %>%
  layer_max_pooling_2d(pool_size = c(2,2)) %>%
  layer_dropout(rate = 0.25) %>%
  layer_flatten()%>%
  layer_dense(units = 256, activation = 'relu',) %>%
  layer_dropout(rate = 0.25) %>%
  layer_dense(units = 3, activation = 'softmax')%>%
  
  compile(loss = 'binary_crossentropy',
        optimizer = optimizer_sgd(lr = 0.01,
                                  decay = 1e-6,
                                  momentum = 0.9,
                                  nesterov = TRUE),
        metrics = c('accuracy'))
  
summary(model)

Model: "sequential_9"
_____________________________________________________________________________________________________________________________
 Layer (type)                                           Output Shape                                      Param #            
=============================================================================================================================
 conv2d_37 (Conv2D)                                     (None, 248, 248, 32)                              896                
 conv2d_36 (Conv2D)                                     (None, 246, 246, 32)                              9248               
 max_pooling2d_21 (MaxPooling2D)                        (None, 123, 123, 32)                              0                  
 dropout_23 (Dropout)                                   (None, 123, 123, 32)                              0                  
 conv2d_35 (Conv2D)                                     (None, 121, 121, 64)                              18496              
 conv2d_34 (Conv2D)                                     (None, 119, 119, 64)                              36928              
 max_pooling2d_20 (MaxPooling2D)                        (None, 59, 59, 64)                                0                  
 dropout_22 (Dropout)                                   (None, 59, 59, 64)                                0                  
 flatten_9 (Flatten)                                    (None, 222784)                                    0                  
 dense_21 (Dense)                                       (None, 256)                                       57032960           
 dropout_21 (Dropout)                                   (None, 256)                                       0                  
 dense_20 (Dense)                                       (None, 3)                                         771                
=============================================================================================================================
Total params: 57,099,299
Trainable params: 57,099,299
Non-trainable params: 0

当我尝试拟合模型时,我在这里收到错误:

history <- model %>%
  fit(train,
      trainlabels,
      epochs = 50, 
      batch_size = 32,
      validation_split = 0.2)

Epoch 1/50
Error in py_call_impl(callable, call_args$unnamed, call_args$named) :
ValueError: in user code:

<...truncated...>obj(y_t, y_p, sample_weight=sw)
File "C:\Users\Fjori\DOCUME~1\VIRTUA~1\R-TENS~1\lib\site-packages\keras\losses.py", line 152, in __call__
losses = call_fn(y_true, y_pred)
File "C:\Users\Fjori\DOCUME~1\VIRTUA~1\R-TENS~1\lib\site-packages\keras\losses.py", line 272, in call **
return ag_fn(y_true, y_pred, **self._fn_kwargs)
File "C:\Users\Fjori\DOCUME~1\VIRTUA~1\R-TENS~1\lib\site-packages\keras\losses.py", line 2162, in binary_crossentropy
backend.binary_crossentropy(y_true, y_pred, from_logits=from_logits),
File "C:\Users\Fjori\DOCUME~1\VIRTUA~1\R-TENS~1\lib\site-packages\keras\backend.py", line 5677, in binary_crossentropy
return tf.nn.sigmoid_cross_entropy_with_logits(

ValueError: `logits` and `labels` must have the same shape, received ((None, 3) vs (None, 2)).

我尝试将图像大小调整为 100 x 100,但出现相同的错误,trainlabels 变量尺寸为 [1:260, 1:2]

r conv-neural-network

评论


答: 暂无答案