Issue
I try to create a cnn network for the image dataset containing a total of 2280 images in 20 folders(each folder contain 114 images). I have processed the image, read the image from the folder and created the dictionary. However, when i want to give the whole dataset for splitting of data into train and test, then the size of labels which i get is not correct. The size of the image array is correct: (2280,56,56) but the all_labels array should be (2280,20) whereas i am getting (2280,56)
If someone could help me, thenit would be very grateful. Thankyou in advance!
So my code is as follows
def read_image(image_path):
# Read the image using OpenCV
image = cv2.imread(image_path)
# Crop the image
#image_crop = image[:, left_crop:image.shape[1] - right_crop]
#cv2.imshow("output", image_crop)
#cv2.waitKey()
# Resize the image
#image_resized = cv2.resize(image_crop, (width, height), interpolation=cv2.INTER_AREA)
image_resized = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# Normalize the pixel values
image_resized = image_resized.astype('float32') / 255.0
#print(len(image_resized.shape))
# Print the size of the image resized
#print(f"Image resize size: {image_resized.shape}")
#cv2.namedWindow("output", cv2.WINDOW_NORMAL)
#cv2.imshow("output", image_resized)
#cv2.waitKey()
return image_resized
def process_folder(folder_path):
images = []
# Get a list of all files (images) in the folder
image_files = sorted([f for f in os.listdir(folder_path) if os.path.isfile(os.path.join(folder_path, f))])
for image_file in image_files:
image_path = os.path.join(folder_path, image_file)
# Crop and resize the image
image_processed = read_image(image_path)
# Append to the list
images.append(image_processed)
# Convert the list of images to a numpy array
images_array = np.array(images)
return images_array
def create_folder_dictionary(root_folder_path):
folder_dictionary = {}
labels_data = {}
# Get a list of all folders in the root folder
folders = sorted([f for f in os.listdir(root_folder_path) if os.path.isdir(os.path.join(root_folder_path, f))])
for folder in folders:
folder_path = os.path.join(root_folder_path, folder)
# Process each folder and store the resulting array in the dictionary
folder_data = process_folder(folder_path)
#folder_dictionary[folder] = folder_data
if folder_data is not None:
folder_dictionary[folder] = folder_data
labels_data[folder] = folder_data
return folder_dictionary,labels_data
root_folder_path = r'C:\Users\sumit\Downloads\master thesis\ImageDataset'
result,labels_data = create_folder_dictionary(root_folder_path)
# Extract keys and values
folders = list(result.keys())
folder_data_list = list(result.values())
#print(folders)
#print(len(folders))
#print(len(folder_data_list))
# Print the total number of images
total_images = sum(len(images) for images in folder_data_list)
print(f"Total number of images: {total_images}")
folders_new = list(map(int, folders))
folders_arr = np.array(folders_new)
#print("labels", folders_arr.shape)
folders_data_list_arr = np.array(folder_data_list)
#print("images", folders_data_list_arr.shape[2])
all_images = np.concatenate(folder_data_list, axis=0)
print(all_images.shape)
# Create all_labels
all_labels = np.concatenate([np.full_like(data[:, 0], label) for label, data in zip(labels_data, folder_data_list)], axis=0)
print("Shape of all_labels:", all_labels.shape)
Solution
In this line:
all_labels = np.concatenate([np.full_like(data[:, 0], label) for label, data in zip(labels_data, folder_data_list)], axis=0)
replace np.full_like(data[:, 0] with np.full(data.shape[0] if you still get an error try replacing label in that line with int(label) so replacing that line with
all_labels = np.concatenate([np.full(data.shape[0], int(label)) for label, data in zip(labels_data, folder_data_list)], axis=0)
should work.
In your version because image data has 3 dimensions( number of images, height,width) your code resulted in incorrect size for the labels while data.shape[0] you get number of images in each folder.
Answered By - Cem Koçak
0 comments:
Post a Comment
Note: Only a member of this blog may post a comment.