3

作为输入数据,我有一个 24 位 RGB 图像和一个具有 2..20 固定颜色的调色板。这些颜色绝不会定期分布在整个颜色范围内。

现在我必须修改输入图像的颜色,以便只使用给定调色板的颜色 - 使用调色板中最接近原始颜色的颜色(在数学上不是最接近,而是为了人类的视觉印象)。所以我需要的是一种算法,它使用输入颜色并在目标调色板中找到视觉上最适合这种颜色的颜色。请注意:我不是在寻找一种愚蠢的比较/差异算法,而是寻找真正结合了颜色对人类的印象的东西!

因为这已经是应该完成的事情了,而且我不想再次重新发明轮子:是否有一些示例源代码可以完成这项工作?在最好的情况下,它实际上是一段代码,而不是指向一个灾难性的巨大库的链接;-)

(我猜 OpenCV 不提供这样的功能?)

谢谢

4

2 回答 2

3

你应该看看Lab 颜色空间。它的设计使得色彩空间中的距离等于感知距离。因此,一旦您转换了图像,您就可以像之前所做的那样计算距离,但从感知的角度来看应该会得到更好的结果。在 OpenCV 中,您可以使用该cvtColor(source, destination, CV_BGR2Lab)功能。

另一个想法是使用dithering。这个想法是使用相邻像素混合缺失的颜色。一种流行的算法是Floyd-Steinberg 抖动

这是我的一个例子,我将使用 k-means 的优化调色板与 Lab colourspace 和 floyd steinberg dithering 相结合:

#include <opencv2/opencv.hpp>
#include <iostream>

using namespace cv;
using namespace std;

cv::Mat floydSteinberg(cv::Mat img, cv::Mat palette);
cv::Vec3b findClosestPaletteColor(cv::Vec3b color, cv::Mat palette);

int main(int argc, char** argv)
{
    // Number of clusters (colors on result image)
    int nrColors = 18;

    cv::Mat imgBGR = imread(argv[1],1);

    cv::Mat img;
    cvtColor(imgBGR, img, CV_BGR2Lab);


    cv::Mat colVec = img.reshape(1, img.rows*img.cols); // change to a Nx3 column vector

    cv::Mat colVecD;
    colVec.convertTo(colVecD, CV_32FC3, 1.0); // convert to floating point


    cv::Mat labels, centers;
    cv::kmeans(colVecD, nrColors, labels,
            cv::TermCriteria(CV_TERMCRIT_ITER, 100, 0.1),
            3, cv::KMEANS_PP_CENTERS, centers); // compute k mean centers

    // replace pixels by there corresponding image centers
    cv::Mat imgPosterized = img.clone();
    for(int i = 0; i < img.rows; i++ )
        for(int j = 0; j < img.cols; j++ )
            for(int k = 0; k < 3; k++)
                imgPosterized.at<Vec3b>(i,j)[k] = centers.at<float>(labels.at<int>(j+img.cols*i),k);

    // convert palette back to uchar
    cv::Mat palette;
    centers.convertTo(palette,CV_8UC3,1.0);

    // call floyd steinberg dithering algorithm
    cv::Mat fs = floydSteinberg(img, palette);

    cv::Mat imgPosterizedBGR, fsBGR;
    cvtColor(imgPosterized, imgPosterizedBGR, CV_Lab2BGR);
    cvtColor(fs, fsBGR, CV_Lab2BGR);


    imshow("input",imgBGR); // original image
    imshow("result",imgPosterizedBGR); // posterized image
    imshow("fs",fsBGR); // floyd steinberg dithering
    waitKey();

  return 0;
}

cv::Mat floydSteinberg(cv::Mat imgOrig, cv::Mat palette)
{
    cv::Mat img = imgOrig.clone();
    cv::Mat resImg = img.clone();
    for(int i = 0; i < img.rows; i++ )
        for(int j = 0; j < img.cols; j++ )
        {
            cv::Vec3b newpixel = findClosestPaletteColor(img.at<Vec3b>(i,j), palette);
            resImg.at<Vec3b>(i,j) = newpixel;

            for(int k=0;k<3;k++)
            {
                int quant_error = (int)img.at<Vec3b>(i,j)[k] - newpixel[k];
                if(i+1<img.rows)
                    img.at<Vec3b>(i+1,j)[k] = min(255,max(0,(int)img.at<Vec3b>(i+1,j)[k] + (7 * quant_error) / 16));
                if(i-1 > 0 && j+1 < img.cols)
                    img.at<Vec3b>(i-1,j+1)[k] = min(255,max(0,(int)img.at<Vec3b>(i-1,j+1)[k] + (3 * quant_error) / 16));
                if(j+1 < img.cols)
                    img.at<Vec3b>(i,j+1)[k] = min(255,max(0,(int)img.at<Vec3b>(i,j+1)[k] + (5 * quant_error) / 16));
                if(i+1 < img.rows && j+1 < img.cols)
                    img.at<Vec3b>(i+1,j+1)[k] = min(255,max(0,(int)img.at<Vec3b>(i+1,j+1)[k] + (1 * quant_error) / 16));
            }
        }
    return resImg;
}

float vec3bDist(cv::Vec3b a, cv::Vec3b b) 
{
    return sqrt( pow((float)a[0]-b[0],2) + pow((float)a[1]-b[1],2) + pow((float)a[2]-b[2],2) );
}

cv::Vec3b findClosestPaletteColor(cv::Vec3b color, cv::Mat palette)
{
    int i=0;
    int minI = 0;
    cv::Vec3b diff = color - palette.at<Vec3b>(0);
    float minDistance = vec3bDist(color, palette.at<Vec3b>(0));
    for (int i=0;i<palette.rows;i++)
    {
        float distance = vec3bDist(color, palette.at<Vec3b>(i));
        if (distance < minDistance)
        {
            minDistance = distance;
            minI = i;
        }
    }
    return palette.at<Vec3b>(minI);
}
于 2013-08-22T14:39:46.887 回答
1

试试这个算法(它会减少颜色数量,但它会自己计算调色板):

#include <opencv2/opencv.hpp>
#include "opencv2/legacy/legacy.hpp"
#include <vector>
#include <list>
#include <iostream>

using namespace cv;
using namespace std;

void main(void)
{
    // Number of clusters (colors on result image)
    int NrGMMComponents = 32;
    // Source file name
    string fname="D:\\ImagesForTest\\tools.jpg";

    cv::Mat SampleImg = imread(fname,1);

    //cv::GaussianBlur(SampleImg,SampleImg,Size(5,5),3);

    int SampleImgHeight = SampleImg.rows;
    int SampleImgWidth  = SampleImg.cols;

    // Pick datapoints
    vector<Vec3d> ListSamplePoints;

    for (int y=0; y<SampleImgHeight; y++)
    {
        for (int x=0; x<SampleImgWidth; x++)
        {
            // Get pixel color at that position
            Vec3b bgrPixel = SampleImg.at<Vec3b>(y, x);

            uchar b = bgrPixel.val[0];
            uchar g = bgrPixel.val[1];
            uchar r = bgrPixel.val[2];
            if(rand()%25==0) // Pick not every, bu t every 25-th
            {
            ListSamplePoints.push_back(Vec3d(b,g,r));
            }
        } // for (x)
    } // for (y)


    // Form training matrix
    Mat labels;

    int NrSamples = ListSamplePoints.size();    
    Mat samples( NrSamples, 3, CV_32FC1 );

    for (int s=0; s<NrSamples; s++)
    {
        Vec3d v = ListSamplePoints.at(s);
        samples.at<float>(s,0) = (float) v[0];
        samples.at<float>(s,1) = (float) v[1];
        samples.at<float>(s,2) = (float) v[2];
    }    

    cout << "Learning to represent the sample distributions with" << NrGMMComponents << "gaussians." << endl;

    // Algorithm parameters
    CvEMParams params;
    params.covs      = NULL;
    params.means     = NULL;
    params.weights   = NULL;
    params.probs     = NULL;
    params.nclusters = NrGMMComponents;
    params.cov_mat_type       = CvEM::COV_MAT_GENERIC; // DIAGONAL, GENERIC, SPHERICAL
    params.start_step         = CvEM::START_AUTO_STEP;
    params.term_crit.max_iter = 1500;
    params.term_crit.epsilon  = 0.001;
    params.term_crit.type     = CV_TERMCRIT_ITER|CV_TERMCRIT_EPS;
    //params.term_crit.type     = CV_TERMCRIT_ITER;

    // Train
    cout << "Started GMM training" << endl;
    CvEM em_model;
    em_model.train( samples, Mat(), params, &labels );
    cout << "Finished GMM training" << endl;

    // Result image
    Mat img  = Mat::zeros( Size( SampleImgWidth, SampleImgHeight ), CV_8UC3 );

    // Ask classifier for each pixel
    Mat sample( 1, 3, CV_32FC1 );
    Mat means;
    means=em_model.getMeans();
    for(int i = 0; i < img.rows; i++ )
    {
        for(int j = 0; j < img.cols; j++ )
        {
            Vec3b v=SampleImg.at<Vec3b>(i,j);
            sample.at<float>(0,0) = (float) v[0];
            sample.at<float>(0,1) = (float) v[1];
            sample.at<float>(0,2) = (float) v[2];

            int response = cvRound(em_model.predict( sample ));

            img.at<Vec3b>(i,j)[0]=means.at<double>(response,0);
            img.at<Vec3b>(i,j)[1]=means.at<double>(response,1);
            img.at<Vec3b>(i,j)[2]=means.at<double>(response,2);

        }
    }

    img.convertTo(img,CV_8UC3);
    imshow("result",img);
    waitKey();
    // Save the result
    cv::imwrite("result.png", img);

}

PS:对于感知颜色距离测量,最好使用 L*a*b 颜色空间。为此目的,opencv 中有转换器。对于聚类,您可以将 k-means 与定义的聚类中心(您的调色板条目)一起使用。聚类后​​,您将获得带有调色板索引的点。

于 2013-08-22T12:25:00.290 回答