spark graphx从txt文件中读数据构建图

程序功能:导入顶点以及边的数据,生成边RDD和顶点RDD,构建图

import org.apache.spark._
import org.apache.spark.graphx._
// To make some of the examples work we will also need RDD
import org.apache.spark.rdd.RDD
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf
// 字符串转码,解决乱码问题
import java.nio.charset.StandardCharsets

import scala.io.Source
import collection.mutable.ArrayBuffer

object SimpleApp {
  def main(args: Array[String]) {
    val conf = new SparkConf().setAppName("Simple Application")
    val sc = new SparkContext(conf)

    val vertexArr = new ArrayBuffer[(Long, String)]()
    val edgeArr = new ArrayBuffer[Edge[String]]()

    // 读入时指定编码
    val sourceV = Source.fromFile("/vagrant/dev_program/spark_test/graphx/python_process_guijing/zygj_vertice_attr.txt", "UTF-8")
    val lines = sourceV.getLines()

    // var i = 0
    while(lines.hasNext){
        val pp = lines.next().split("\t")
        vertexArr += ((pp(0).toLong, pp(1)))
    }
    println(vertexArr.length)

    val sourceE = Source.fromFile("/vagrant/dev_program/spark_test/graphx/python_process_guijing/zygj_edge_attr.txt", "UTF-8")
    val linesE = sourceE.getLines()

    while(linesE.hasNext){
        val ee = linesE.next().split("\t")
        edgeArr += Edge(ee(0).toLong, ee(1).toLong, ee(2))
    }

    // 创建点RDD
    val users: RDD[(VertexId, String)] = sc.parallelize(vertexArr)

    // 创建边RDD
    val relationships: RDD[Edge[String]] = sc.parallelize(edgeArr)

    // 定义一个默认用户,避免有不存在用户的关系
    val graph = Graph(users, relationships)

    // 输出Graph的信息
    graph.vertices.collect().foreach(println(_))

    graph.triplets.map(triplet => triplet.srcAttr + "----->" + triplet.dstAttr + "    attr:" + triplet.attr).collect().foreach(println(_))

  }
}

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章