我正在尝试在我的数据集上但在有向图上运行连接组件算法。我不希望连接的组件在边缘的两个方向上横向。
这是我的示例代码
import org.apache.log4j.{Level, LogManager}
import org.apache.spark.SparkConf
import org.apache.spark.graphx.Edge
import org.apache.spark.sql._
import org.graphframes._
object CCTest {
def main(args: Array[String]) {
val sparkConf = new SparkConf()
.setMaster("local[2]")
.setAppName("cc_test")
implicit val sparkSession = SparkSession.builder().config(sparkConf).getOrCreate()
val sc = sparkSession.sparkContext
val vertex = sparkSession.createDataFrame(sc.parallelize(Array(
(1L, "b4fcde907cbd290b7e16", 28),
(2L, "cda389612d6b37674cb1", 27),
(3L, "1a6a6e3fd2daaeeb2a05", 65),
(4L, "9a007eee210a47e58047", 42),
(5L, "e91898d39bf5f8501827", 55),
(6L, "ceab58c59d23549d3f4b", 50),
(12L, "ceab58c59asd3549d3f4b", 50),
(14L, "ceab508c59d23549d3f4b", 55),
(15L, "ceab508c59d23541d3f4b", 51)
))).toDF("id", "similar_hash", "size")
val edges = sparkSession.createDataFrame(sc.parallelize(Array(
Edge(2L, 1L, 0.7f),
Edge(2L, 4L, 0.2f),
Edge(3L, 2L, 0.4f),
Edge(3L, 6L, 0.3f),
Edge(4L, 1L, 0.1f),
Edge(5L, 2L, 0.2f),
Edge(5L, 3L, 0.8f),
Edge(5L, 6L, 0.3f),
Edge(12L, 14L, 1.3f),
Edge(15L, 14L, 1.3f) //< - should not be connected except (14L, 15L)
))).toDF("src", "dst", "attr")
val graph = GraphFrame(vertex, edges)
val cc = graph.connectedComponents.run()
cc.show()
sparkSession.stop()
}
}
结果:
+---+--------------------+----+---------+
| id| similar_hash|size|component|
+---+--------------------+----+---------+
| 6|ceab58c59d23549d3f4b| 50| 1|
| 5|e91898d39bf5f8501827| 55| 1|
| 1|b4fcde907cbd290b7e16| 28| 1|
| 3|1a6a6e3fd2daaeeb2a05| 65| 1|
| 12|ceab58c59asd3549d...| 50| 12|
| 2|cda389612d6b37674cb1| 27| 1|
| 4|9a007eee210a47e58047| 42| 1|
| 14|ceab508c59d23549d...| 55| 12|
| 15|ceab508c59d23541d...| 51| 12| <- should be in separate cluster
+---+--------------------+----+---------+
请问我怎样才能做到这一点?