package com.pan.sparksql import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.mllib.recommendation.{ALS, Rating} import org.apache.spark.sql.Row import org.apache.spark.sql.hive.HiveContext /** * Created by pan on 2016/11/11. */ object Demo { val map1 = Map("" -> i); val map2 = Map("" -> j); var i = 0; var j = 0; def f(row: Row): Row = { val r0 = row.get(0).toString; val r1 = row.get(1).toString; if (!map1.contains(r0)) { i += 1; } map1 + (r0 -> i) if (!map2.contains(r1)) { j+= 1; } map2 + (r1 ->j) return Row(i, j, 1); } def main(args: Array[String]) { val conf = new SparkConf() val sc = new SparkContext(conf) val sqlContext = new HiveContext(sc) sqlContext.sql("use edw") val ratings = sqlContext.sql("select item_type,part_id from tt_asc_bo_sale_part "); val newRatings = ratings.map(f) val mapRatings = newRatings.map(row => Rating(row.getInt(0), row.getInt(1), row.getInt(2))) val model = ALS.train(mapRatings, 50, 10, 0.01) val p = model.recommendProducts(100, 10) System.out.print(p.mkString("; ")) System.out.println("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa") System.out.println("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa") System.out.println("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa") System.out.println("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa") System.out.println("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa") System.out.println("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa") } }
推荐算法
最新推荐文章于 2025-04-07 22:25:37 发布