/**
* 感知器分类:随机梯度下降算法求解。
* 感知器是一个线性二分类器: y = (w)T·x + b 最优化可以求解w和b。
*
* 损失函数:L(w,b) = 求和(I(yi != wT·xi + b) * yi(wT·xi + b))
*
* 输入: double[m][n] data 每行表示一个样本,每行是一个n维的特征向量。总共有m个样本。
* int[m] label 长度为m的
*
* 输出:new Perceptron
*
* @author huangyongye
*
*/
public class Perceptron{
private double[] w;
private double b;
private int maxStep;
public Perceptron(int max) {
maxStep = max;
}
/**
* 模型训练:通过梯度下降求解模型参数w向量和b
* @param trian_datas :输入特征矩阵
* @param labels :训练数据的类标
* @param alpha: 步长
* @throws Exception
*/
public void train(double[][] train_datas, int[] labels, double alpha) throws Exception {
int m = train_datas.length;
if(m == 0)
throw new Exception("wrong data");
int n = train_datas[0].length;
w = new double[n];
for(int i = 0; i < n; i++)
w[i] = 0;
b = 0;
int step = 0;
while(step < maxStep) {
boolean flag = false;
for(int i = 0; i < m; i++) {
double dist = docMul(w, train_datas[i]) + b;
if(labels[i]*dist <= 0 ) {
step++;
update_w(w, train_datas[i], labels[i], alpha);
b = update_b(b, labels[i], alpha);
flag = true;
System.out.print("第" + step + "次迭代:\n\tw: ");
for(double d: w)
System.out.print(d + " ");
System.out.println("\n\tb: " + b);
}
}
if(!flag) {
System.out.println("\n一共迭代了" + step + "次");
break;
}
}
}
/**
* sign函数
* @param dist
* @return
*/
private int sign(double dist) {
if(dist >= 0)
return 1;
return -1;
}
/**
* 点乘
* @param a
* @param b
* @return
*/
private double docMul(double[] a, double[] b) throws Exception{
if(a.length != b.length)
throw new Exception("the length of these two vector is not the same!");
double sum = 0;
for(int i = 0; i < a.length; i++) {
sum += (a[i]*(double)b[i]);
}
return sum;
}
private double update_b(double b, int labeli, double alpha) {
b = b + alpha * labeli;
return b;
}
private void update_w(double[] w, double[] ts, int labeli, double alpha) {
for(int i = 0; i < w.length; i++) {
w[i] = w[i] + alpha * (double)ts[i] * labeli;
}
}
public int test_one(double[] test_data) throws Exception {
if(test_data.length != w.length)
throw new Exception("the length of the input data is wrong!");
double dist = docMul(w, test_data) + b;
int label = sign(dist);
return label;
}
public int[] test_list(double[][] test_datas) throws Exception {
int[] labels = new int[test_datas.length];
for(int i = 0; i < test_datas.length; i++) {
labels[i] = test_one(test_datas[i]);
}
return labels;
}
public static void main(String[] args) throws Exception {
double[][] data = {{3,3},{4,3}, {1,1}};
int[] labels = {1,1,-1};
Perceptron ptest = new Perceptron(1000);
double alpha = 0.1;
ptest.train(data, labels, alpha);
double[] w = ptest.w;
System.out.print("parameter w is : ");
for(double d: w)
System.out.print(d + " ");
System.out.println("\nparameter b: " + ptest.b + "\n");
double[] test_case = {-1,-1};
double[] test_case2 = {5,5};
int label1 = ptest.test_one(test_case);
System.out.println("label1: " + label1);
int label2 = ptest.test_one(test_case2);
System.out.println("label2: " + label2);
}
}