推荐算法

package com.pan.sparksql

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.mllib.recommendation.{ALS, Rating}
import org.apache.spark.sql.Row
import org.apache.spark.sql.hive.HiveContext

/**
  * Created by pan on 2016/11/11.
  */
object Demo {

  val map1 = Map("" -> i);
  val map2 = Map("" -> j);
  var i = 0;
  var j = 0;

  def f(row: Row): Row = {
    val r0 = row.get(0).toString;
    val r1 = row.get(1).toString;
    if (!map1.contains(r0)) {
      i += 1;
    }
    map1 + (r0 -> i)
    if (!map2.contains(r1)) {
      j+= 1;
    }
    map2 + (r1 ->j)
    return Row(i, j, 1);

  }

  def main(args: Array[String]) {

    val conf = new SparkConf()
    val sc = new SparkContext(conf)
    val sqlContext = new HiveContext(sc)
    sqlContext.sql("use edw")
    val ratings = sqlContext.sql("select item_type,part_id from tt_asc_bo_sale_part ");
    val newRatings = ratings.map(f)
    val mapRatings = newRatings.map(row => Rating(row.getInt(0), row.getInt(1), row.getInt(2)))
    val model = ALS.train(mapRatings, 50, 10, 0.01)
    val p = model.recommendProducts(100, 10)
    System.out.print(p.mkString(";  "))

    System.out.println("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")
    System.out.println("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")
    System.out.println("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")
    System.out.println("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")
    System.out.println("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")
    System.out.println("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")
    
  }



}
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值