I am trying to standardize and then normalise an image using Numpy and OpenCV in the following manner; however, the image that's output from matplotlib looks identical. Why is that?
Code
%matplotlib inline
import cv2
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
IMG_SIZE = 256
def show_img(img):
img_rgb = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
imgplot = plt.imshow(img_rgb)
img = cv2.imread('/content/drive/My Drive/ai/test_images/test_image3.tif')
img = cv2.resize(img, (IMG_SIZE, IMG_SIZE))
show_img(img)
img = img.astype('float32')
img = (img - img.mean(axis=(0, 1, 2), keepdims=True)) / img.std(axis=(0, 1, 2), keepdims=True)
img = cv2.normalize(img, None, 0, 1, cv2.NORM_MINMAX)
show_img(img)