Я хотел обучить свой cnn, используя Google Colab, но одна ошибка продолжает появляться в выделенной строке. Ошибка: InvalidArgumentError: Невозможно сжать dim [1], ожидалось измерение 1, получено 2 [[{{node metrics_2 / acc / Squeeze}}]] Ниже приведены мои коды: Могу ли я знать, что я должен сделать, чтобы исправить коды и что вызывает проблему. Это очень важно для того, чтобы я получил высшее образование. Пожалуйста помоги.
!pip install tensorflow==1.14.0
!pip install keras==2.2.0
!pip install tensorflow_hub
from __future__ import absolute_import, division, print_function, unicode_literals
import matplotlib.pylab as plt
from tensorflow import keras
# Import TensorFlow Datasets
import tensorflow as tf
import tensorflow_hub as hub
import numpy as np
import pandas as pd
# Helper libraries
import math
import logging
logger = tf.get_logger()
logger.setLevel(logging.ERROR)
!git clone https://github.com/ultralytics/google-images-download
!pip install google_images_download
!wget https://chromedriver.storage.googleapis.com/2.42/chromedriver_linux64.zip && unzip chromedriver_linux64
!apt-get update # to update ubuntu to correctly run apt install
!apt install chromium-chromedriver
!cp /usr/lib/chromium-browser/chromedriver /usr/bin
import sys
sys.path.insert(0,'/usr/lib/chromium-browser/chromedriver')
from selenium import webdriver
chrome_options = webdriver.ChromeOptions()
chrome_options.add_argument('--headless')
chrome_options.add_argument('--no-sandbox')
chrome_options.add_argument('--disable-dev-shm-usage')
wd = webdriver.Chrome('chromedriver',chrome_options=chrome_options)
wd.get("https://www.webite-url.com")
import os
#Mount the drive from Google to save the dataset
from google.colab import drive # this will be our driver
drive.mount('/gdrive')
root = '/gdrive/My Drive/' # if you want to operate on your Google
colab_path = '/gdrive/../content/'
chromedriver_path = '/gdrive/../content/chromedriver'
chromedriver = chromedriver_path
%cd /gdrive/../content/google-images-download
!python bing_scraper.py --search "raw salmon fillet" --limit 80 --download --chromedriver "chromedriver"
chromedriver = chromedriver_path
%cd /gdrive/../content/google-images-download
!python bing_scraper.py --search "raw shrimp" --limit 80 --download --chromedriver "chromedriver"
dataset_path = '/gdrive/../content/google-images-download/images/raw_salmon_fillet/'
dataset = [ dataset_path + img_name for img_name in os.listdir( dataset_path ) ]
dataset[:89] #Peek the first ten
%matplotlib inline
plt.imshow( plt.imread(dataset[49]) )
plt.grid(b=None) #remove grid
data_root='/gdrive/../content/google-images-download/images/'
IMAGE_SHAPE = (224, 224)
TRAINING_DATA_DIR = str(data_root)
print(TRAINING_DATA_DIR);
datagen_kwargs = dict(rescale=1./255, validation_split=.20)
valid_datagen = tf.keras.preprocessing.image.ImageDataGenerator(**datagen_kwargs)
valid_generator = valid_datagen.flow_from_directory(
TRAINING_DATA_DIR,
subset="validation",
shuffle=True,
target_size=IMAGE_SHAPE
)
train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(**datagen_kwargs)
train_generator = train_datagen.flow_from_directory(
TRAINING_DATA_DIR,
subset="training",
shuffle=True,
target_size=IMAGE_SHAPE)
image_batch_train, label_batch_train = next(iter(train_generator))
print("Image batch shape: ", image_batch_train.shape)
print("Label batch shape: ", label_batch_train.shape)
dataset_labels = sorted(train_generator.class_indices.items(), key=lambda pair:pair[1])
dataset_labels = np.array([key.title() for key, value in dataset_labels])
print(dataset_labels)
import tensorflow_hub as hub
model = tf.keras.models.Sequential([
tf.keras.layers.Conv2D(32, (3,3), activation='relu', input_shape=(224, 224, 3)),
tf.keras.layers.MaxPooling2D(2, 2),
tf.keras.layers.Conv2D(64, (3,3), activation='relu'),
tf.keras.layers.MaxPooling2D(2,2),
tf.keras.layers.Conv2D(128, (3,3), activation='relu'),
tf.keras.layers.MaxPooling2D(2,2),
tf.keras.layers.Conv2D(128, (3,3), activation='relu'),
tf.keras.layers.MaxPooling2D(2,2),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(512, activation='relu'),
tf.keras.layers.Dense(2)
])
model.compile(optimizer='adam',
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
metrics=['accuracy'])
#place where error occurs
***steps_per_epoch = np.ceil(train_generator.samples/train_generator.batch_size)
val_steps_per_epoch =np.ceil(valid_generator.samples/valid_generator.batch_size)
EPOCHS = 100
history = model.fit(
train_generator,
steps_per_epoch=int(steps_per_epoch),
epochs=EPOCHS,
validation_data=valid_generator,
validation_steps=int(val_steps_per_epoch)
)***