Привет, ребята, я новичок в машинном обучении. Я сделал несколько снимков на свой телефон и использовал его, чтобы обучить нейронную сеть распознавать мое лицо. Но когда я запускаю файл прогнозирования python. Появилась ошибка ниже. Я пытался искать в Интернете, чтобы найти решение безрезультатно. См. ошибку: ValueError: ввод 0 последовательного слоя несовместим со слоем: ожидаемая ось -1 входной формы имеет значение 3, но получен ввод с формой [Нет, 224, 224, 1]
ниже код тренировки
    from tensorflow.keras.preprocessing.image import ImageDataGenerator
    from tensorflow.keras.applications import MobileNetV2
    from tensorflow.keras.layers import AveragePooling2D
    from tensorflow.keras.layers import Dropout
    from tensorflow.keras.layers import Flatten
    from tensorflow.keras.layers import Dense
    from tensorflow.keras.layers import Input
    from tensorflow.keras.models import Model
    from tensorflow.keras.optimizers import Adam
    from tensorflow.keras.applications.mobilenet_v2 import preprocess_input
    from tensorflow.keras.preprocessing.image import img_to_array
    from tensorflow.keras.preprocessing.image import load_img
    from tensorflow.keras.utils import to_categorical
    from sklearn.preprocessing import LabelBinarizer
    from sklearn.model_selection import train_test_split
    from sklearn.metrics import classification_report
    from imutils import paths
    import matplotlib.pyplot as plt
    import numpy as np
    import os
    import tensorflow as tf
    
    #initialize the initial learning rate, number of epochs to train for,
    
    #and batch size
    
    INIT_LR = 1e-4
    EPOCHS = 5
    BS = 32
    
    DIRECTORY = r"C:\Users\CHIJINDU\Desktop\Tense2"
    CATEGORIES = ["KUDOS", "Uju nwa"]
    
    #grab the list of images in our dataset directory, then initialize
    
    #the list of data (i.e., images) and class images
    
    print("[INFO] loading images...")
    
    data = []
    labels = []
    
    for category in CATEGORIES:
        path = os.path.join(DIRECTORY, category)
        for img in os.listdir(path):
            img_path = os.path.join(path, img)
            image = load_img(img_path, target_size=(224, 224))
            image = img_to_array(image)
            image = preprocess_input(image)
    
            data.append(image)
            labels.append(category)
    
    #perform one-hot encoding on the labels
    
    lb = LabelBinarizer()
    labels = lb.fit_transform(labels)
    labels = to_categorical(labels)
    
    data = np.array(data, dtype="float32")
    labels = np.array(labels)
    
    (trainX, testX, trainY, testY) = train_test_split(data, labels,
        test_size=0.20, stratify=labels, random_state=42)
    
    #construct the training image generator for data augmentation
    
    aug = ImageDataGenerator(
        rotation_range=20,
        zoom_range=0.15,
        width_shift_range=0.2,
        height_shift_range=0.2,
        shear_range=0.15,
        horizontal_flip=True,
        fill_mode="nearest")
    
    model = tf.keras.models.Sequential([
            tf.keras.layers.Conv2D(32, (3,3), activation='relu', input_shape=(224, 224, 3)),
            tf.keras.layers.MaxPooling2D(2, 2),
    
            tf.keras.layers.Conv2D(64, (3,3), activation='relu'),
            tf.keras.layers.MaxPooling2D(2, 2),
    
            tf.keras.layers.Conv2D(128, (3,3), activation='relu'),
            tf.keras.layers.MaxPooling2D(2, 2),
    
            tf.keras.layers.Conv2D(128, (3,3), activation='relu'),
            tf.keras.layers.MaxPooling2D(2, 2),
    
            tf.keras.layers.Flatten(),
            tf.keras.layers.Dense(512, activation='relu'),
            tf.keras.layers.Dense(2, activation='softmax')
            ])
    
    #compile our model
    
    print("[INFO] compiling model...")
    opt = Adam(lr=INIT_LR, decay=INIT_LR / EPOCHS)
    model.compile(loss="binary_crossentropy", optimizer=opt,
        metrics=["accuracy"])
    
    #train the head of the network
    
    print("[INFO] training head...")
    H = model.fit(
        aug.flow(trainX, trainY, batch_size=BS),
        steps_per_epoch=len(trainX) // BS,
        validation_data=(testX, testY),
        validation_steps=len(testX) // BS,
        epochs=EPOCHS)
    
    #make predictions on the testing set
    
    print("[INFO] evaluating network...")
    predIdxs = model.predict(testX, batch_size=BS)
    
    #for each image in the testing set we need to find the index of the
    
    #label with corresponding largest predicted probability
    
    predIdxs = np.argmax(predIdxs, axis=1)
    
    #show a nicely formatted classification report
    
    print(classification_report(testY.argmax(axis=1), predIdxs,
        target_names=lb.classes_))
    
    #serialize the model to disk
    
    print("[INFO] saving mask detector model...")
    model.save("Kudos_Uju.model", save_format="h5")
Ниже приведен код предсказания
    import os
    import cv2
    import numpy as np
    from keras.models import model_from_json
    from keras.preprocessing import image
    from keras.preprocessing.image import load_img, img_to_array
    from keras.models import load_model
    import tensorflow as tf
    
    #load model
    
    model = load_model('Kudos_Uju.model')
    
    face_cascade =cv2.CascadeClassifier(cv2.data.haarcascades + "haarcascade_frontalface_default.xml")
    
    cap=cv2.VideoCapture(0)
    
    while True:
        ret,test_img=cap.read() #captures frame and returns boolean value and captured im
        if not ret:
            continue
        gray_img= cv2.cvtColor(test_img, cv2.COLOR_BGR2GRAY)
    
        faces_detected = face_cascade.detectMultiScale(gray_img, 1.32, 5)
    
        for (x,y,w,h) in faces_detected:
            cv2.rectangle(test_img,(x,y),(x+w,y+h),(255,0,0),thickness=7)
            roi_gray=gray_img[y:y+w,x:x+h]#cropping region of interest i.e. face area fro
    
            roi_gray=cv2.resize(roi_gray,(224,224))
            img_pixels = image.img_to_array(roi_gray)
            img_pixels = np.expand_dims(img_pixels, axis = 0)
            img_pixels /= 255
    
            predictions = model.predict(img_pixels)
    
            #find max indexed array
    
            max_index = np.argmax(predictions[0])
    
            faces = ('Kudos', 'Uju')
            predicted_face = faces[max_index]
    
            cv2.putText(test_img, predicted_face, (int(x), int(y)), cv2.FONT_HERSHEY_COMPLEX, 0.7, (0,255,0), 2)
    
        resized_img = cv2.resize(test_img, (1000, 700))
        cv2.imshow('Face Prediction ',resized_img)
    
        if cv2.waitKey(10) == ord('q'):#wait until 'q' key is pressed
            break
    
    cap.release()
    cv2.destroyAllWindows
И это ошибка, которую я получил
    2021-05-02 12:28:40.152642: W tensorflow/stream_executor/platform/default/dso_loader.cc:59] Could not load dynamic library 'cudart64_101.dll'; dlerror: cudart64_101.dll not found
2021-05-02 12:28:40.152905: I tensorflow/stream_executor/cuda/cudart_stub.cc:29] Ignore above cudart dlerror if you do not have a GPU set up on your machine.
2021-05-02 12:28:48.560094: W tensorflow/stream_executor/platform/default/dso_loader.cc:59] Could not load dynamic library 'nvcuda.dll'; dlerror: nvcuda.dll not found
2021-05-02 12:28:48.560477: W tensorflow/stream_executor/cuda/cuda_driver.cc:312] failed call to cuInit: UNKNOWN ERROR (303)
2021-05-02 12:28:48.563335: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:169] retrieving CUDA diagnostic information for host: DESKTOP-08KM270
2021-05-02 12:28:48.563576: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:176] hostname: DESKTOP-08KM270
2021-05-02 12:28:48.564366: I tensorflow/core/platform/cpu_feature_guard.cc:142] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN)to use the following CPU instructions in performance-critical operations:  AVX2
To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.
2021-05-02 12:28:48.574722: I tensorflow/compiler/xla/service/service.cc:168] XLA service 0xeeb5525970 initialized for platform Host (this does not guarantee that XLA will be used). Devices:
2021-05-02 12:28:48.575031: I tensorflow/compiler/xla/service/service.cc:176]   StreamExecutor device (0): Host, Default Version
Traceback (most recent call last):
  File "C:/Users/CHIJINDU/AppData/Roaming/JetBrains/PyCharmEdu2020.1/scratches/KPred2.py", line 35, in 
    predictions = model.predict(img_pixels)
  File "C:\Users\CHIJINDU\AppData\Local\Programs\Python\Python38\lib\site-packages\tensorflow\python\keras\engine\training.py", line 130, in _method_wrapper
    return method(self, *args, **kwargs)
  File "C:\Users\CHIJINDU\AppData\Local\Programs\Python\Python38\lib\site-packages\tensorflow\python\keras\engine\training.py", line 1599, in predict
    tmp_batch_outputs = predict_function(iterator)
  File "C:\Users\CHIJINDU\AppData\Local\Programs\Python\Python38\lib\site-packages\tensorflow\python\eager\def_function.py", line 780, in __call__
    result = self._call(*args, **kwds)
  File "C:\Users\CHIJINDU\AppData\Local\Programs\Python\Python38\lib\site-packages\tensorflow\python\eager\def_function.py", line 823, in _call
    self._initialize(args, kwds, add_initializers_to=initializers)
  File "C:\Users\CHIJINDU\AppData\Local\Programs\Python\Python38\lib\site-packages\tensorflow\python\eager\def_function.py", line 696, in _initialize
    self._stateful_fn._get_concrete_function_internal_garbage_collected(  # pylint: disable=protected-access
  File "C:\Users\CHIJINDU\AppData\Local\Programs\Python\Python38\lib\site-packages\tensorflow\python\eager\function.py", line 2855, in _get_concrete_function_internal_garbage_collected
    graph_function, _, _ = self._maybe_define_function(args, kwargs)
  File "C:\Users\CHIJINDU\AppData\Local\Programs\Python\Python38\lib\site-packages\tensorflow\python\eager\function.py", line 3213, in _maybe_define_function
    graph_function = self._create_graph_function(args, kwargs)
  File "C:\Users\CHIJINDU\AppData\Local\Programs\Python\Python38\lib\site-packages\tensorflow\python\eager\function.py", line 3065, in _create_graph_function
    func_graph_module.func_graph_from_py_func(
  File "C:\Users\CHIJINDU\AppData\Local\Programs\Python\Python38\lib\site-packages\tensorflow\python\framework\func_graph.py", line 986, in func_graph_from_py_func
    func_outputs = python_func(*func_args, **func_kwargs)
  File "C:\Users\CHIJINDU\AppData\Local\Programs\Python\Python38\lib\site-packages\tensorflow\python\eager\def_function.py", line 600, in wrapped_fn
    return weak_wrapped_fn().__wrapped__(*args, **kwds)
  File "C:\Users\CHIJINDU\AppData\Local\Programs\Python\Python38\lib\site-packages\tensorflow\python\framework\func_graph.py", line 973, in wrapper
    raise e.ag_error_metadata.to_exception(e)
ValueError: in user code:
    C:\Users\CHIJINDU\AppData\Local\Programs\Python\Python38\lib\site-packages\tensorflow\python\keras\engine\training.py:1462 predict_function  *
        return step_function(self, iterator)
    C:\Users\CHIJINDU\AppData\Local\Programs\Python\Python38\lib\site-packages\tensorflow\python\keras\engine\training.py:1452 step_function  **
        outputs = model.distribute_strategy.run(run_step, args=(data,))
    C:\Users\CHIJINDU\AppData\Local\Programs\Python\Python38\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:1211 run
        return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
    C:\Users\CHIJINDU\AppData\Local\Programs\Python\Python38\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:2585 call_for_each_replica
        return self._call_for_each_replica(fn, args, kwargs)
    C:\Users\CHIJINDU\AppData\Local\Programs\Python\Python38\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:2945 _call_for_each_replica
        return fn(*args, **kwargs)
    C:\Users\CHIJINDU\AppData\Local\Programs\Python\Python38\lib\site-packages\tensorflow\python\keras\engine\training.py:1445 run_step  **
        outputs = model.predict_step(data)
    C:\Users\CHIJINDU\AppData\Local\Programs\Python\Python38\lib\site-packages\tensorflow\python\keras\engine\training.py:1418 predict_step
        return self(x, training=False)
    C:\Users\CHIJINDU\AppData\Local\Programs\Python\Python38\lib\site-packages\tensorflow\python\keras\engine\base_layer.py:975 __call__
        input_spec.assert_input_compatibility(self.input_spec, inputs,
    C:\Users\CHIJINDU\AppData\Local\Programs\Python\Python38\lib\site-packages\tensorflow\python\keras\engine\input_spec.py:212 assert_input_compatibility
        raise ValueError(
    ValueError: Input 0 of layer sequential is incompatible with the layer: expected axis -1 of input shape to have value 3 but received input with shape [None, 224, 224, 1]
    Process finished with exit code 1
 
                                                                     
                                                                     
                                                                     
                                                                    