关于论文“guided random walks”权重函数实现的问题

Question regarding the implementation of the paper " guided random walks" weighing functions

def compute_weights_3d(data,raw,alpha, beta, eps=1.e-6):
    # raw is the test image(3D grayscale), data is an image from prior knowledge(3D grayscale)
    # raw and data have same size, while data has a driver(label) 
    # Weight calculation is main difference in multispectral version
    # Original gradient**2 replaced with sum of gradients ** 2
    inter_gradients = np.concatenate([np.diff(data[..., 0], axis=ax).ravel()
         for ax in [2, 1, 0] if data.shape[ax] > 1], axis=0) ** 2 # gradient ^2
    intra_gradients = (raw - data)**2
    # Normalize gradients
    inter_gradients = (inter_gradients - np.amin(inter_gradients))/(np.amax(inter_gradients)- np.amin(inter_gradients))
    intra_gradients = (intra_gradients - np.amin(intra_gradients))/(np.amax(intra_gradients)- np.amin(intra_gradients))

    # All dimensions considered together in this standard deviation
    #------------------------------------------------------
    inter_scale_factor  = -beta  / (10 * data.std())
    inter_weights = np.exp(inter_scale_factor * inter_gradients)
    inter_weights += eps
    #------------------------------------------------------
    intra_scale_factor  = -alpha / (10 * data.std())
    inter_weights = np.exp(intra_scale_factor * intra_gradients)
    intra_weights += eps
    #------------------------------------------------------
    return -inter_weights, -intra_weights

答案是:

  1. 根据论文实现的权重函数不正确, 记下正确的那个。
  2. 他们反转了称重函数的信号,并在矩阵计算中再次反转,所以没有区别。
def compute_weights(image,mask,alpha, beta, eps=1.e-6):
    # Weight calculation is main difference in multispectral version
    # Original gradient**2 replaced with sum of gradients ** 2
    intra_gradients = np.concatenate([np.diff(image, axis=ax).ravel()
     for ax in [1, 0] ], axis=0) ** 2            # gradient ^2
    # print('intra_gradients shape',intra_gradients.shape)
    # 5-Connected
    inter_gradients = np.concatenate([np.diff(mask, axis=ax).ravel()
    for ax in [1, 0] ], axis=0)**2 
    # inter_gradients = np.concatenate((inter_gradients,(mask-image).ravel()),axis=0)**2  # gradient ^2
    # print('inter_gradients shape',inter_gradients.shape)
    #----------------------------------------
    # 1-Connected
    # inter_gradients = (image - mask)**2
    #----------------------------------------
    # Normalize gradients
    intra_gradients = (intra_gradients - np.amin(intra_gradients))/(np.amax(intra_gradients)- np.amin(intra_gradients))
    inter_gradients = (inter_gradients - np.amin(inter_gradients))/(np.amax(inter_gradients)- np.amin(inter_gradients))
    # print('Intra Gradiesnt_Shape: ',intra_gradients.shape)    
    # All dimensions considered together in this standard deviation
    #------------------------------------------------------
    intra_scale_factor  = -beta  / (10 * image.std())
    intra_weights = np.exp(intra_scale_factor * intra_gradients)
    intra_weights += eps
    #------------------------------------------------------
    inter_scale_factor  = -alpha / (10 * image.std())
    inter_weights = np.exp(inter_scale_factor * inter_gradients)
    inter_weights += eps
    #------------------------------------------------------
    return -intra_weights, inter_weights # [W_old , w_new]