Ask Your Question

Revision history [back]

click to hide/show revision 1
initial version

Strange results while using xphoto::LearningBaseWB

Hello! I'm trying to use xphoto::LearningBaseWB model from this tutorial link text and this works well for images from dataset. But if i try to use other images, i get something like this: image description image description

Code:

def stretch_to_8bit(arr, clip_percentile = 2.5):
    temp = np.percentile(arr, 100 - clip_percentile)
    print(temp)
    arr = np.clip(arr * (255.0 / temp), 0, 255)
    return arr.astype(np.uint8)

def build_model(model_path, input_bit_depth=8, bin_num=0):
    range_thresh = 2 ** int(input_bit_depth) - 1
    if bin_num == 0:
        bin_num = 256 if range_thresh > 255 else 64
    inst = cv.xphoto.createLearningBasedWB(model_path)
    inst.setRangeMaxVal(range_thresh)
    inst.setSaturationThreshold(0.98)
    inst.setHistBinNum(bin_num)
    return inst

def evaluate(image, model):

    stretched = stretch_to_8bit(image)  
    new_im = model.balanceWhite(stretched)
    estimated_illuminant = estimate_illuminant(image, new_im, 0.01)
    result = stretch_to_8bit(new_im)
    return result, estimated_illuminant

model = build_model(model_path, 8, 64)
image = cv.imread(im_path, cv.IMREAD_UNCHANGED)
result, eval = evaluate(image, model)
cv.imshow("src",image)
cv.imshow("dst",result)

What am I doing wrong?

Strange results while using xphoto::LearningBaseWB

Hello! I'm trying to use xphoto::LearningBaseWB model from this tutorial link text and this works well for images from dataset. But if i try to use other images, i get something like this: image description image description

Code:

def stretch_to_8bit(arr, clip_percentile = 2.5):
    temp = np.percentile(arr, 100 - clip_percentile)
    print(temp)
    arr = np.clip(arr * (255.0 / temp), 0, 255)
    return arr.astype(np.uint8)

def build_model(model_path, input_bit_depth=8, bin_num=0):
    range_thresh = 2 ** int(input_bit_depth) - 1
    if bin_num == 0:
        bin_num = 256 if range_thresh > 255 else 64
    inst = cv.xphoto.createLearningBasedWB(model_path)
    inst.setRangeMaxVal(range_thresh)
    inst.setSaturationThreshold(0.98)
    inst.setHistBinNum(bin_num)
    return inst

def evaluate(image, model):

    stretched = stretch_to_8bit(image)  
    new_im = model.balanceWhite(stretched)
    estimated_illuminant = estimate_illuminant(image, new_im, 0.01)
    result = stretch_to_8bit(new_im)
    return result, estimated_illuminant

model = build_model(model_path, 8, 64)
image = cv.imread(im_path, cv.IMREAD_UNCHANGED)
result, eval = evaluate(image, model)
cv.imshow("src",image)
cv.imshow("dst",result)

What am I doing wrong?

Strange results while using xphoto::LearningBaseWB

Hello! I'm trying to use xphoto::LearningBaseWB model from this tutorial link text and this works well for images from dataset. But if i try to use other images, i get something like this: image description image description

Code:

def stretch_to_8bit(arr, clip_percentile = 2.5):
    temp = np.percentile(arr, 100 - clip_percentile)
    print(temp)
    arr = np.clip(arr * (255.0 / temp), 0, 255)
    return arr.astype(np.uint8)

def build_model(model_path, input_bit_depth=8, bin_num=0):
    range_thresh = 2 ** int(input_bit_depth) - 1
    if bin_num == 0:
        bin_num = 256 if range_thresh > 255 else 64
    inst = cv.xphoto.createLearningBasedWB(model_path)
    inst.setRangeMaxVal(range_thresh)
    inst.setSaturationThreshold(0.98)
    inst.setHistBinNum(bin_num)
    return inst

def evaluate(image, model):

    stretched = stretch_to_8bit(image)  
    new_im = model.balanceWhite(stretched)
    estimated_illuminant = estimate_illuminant(image, new_im, 0.01)
    result = stretch_to_8bit(new_im)
    return result, estimated_illuminant

model = build_model(model_path, 8, 64)
image = cv.imread(im_path, cv.IMREAD_UNCHANGED)
result, eval = evaluate(image, model)
cv.imshow("src",image)
cv.imshow("dst",result)

What am I doing wrong?