2

我想在 spark 2.0 中使用连接的组件算法。但是,使用 sbt 0.13.15 的打包出现依赖错误。这背后的原因是什么?build.sbt文件是这样的:

lazy val root = (project in file(".")).
settings(
   name := "Networkana",
   version := "1.0",
   scalaVersion := "2.11.8",
   libraryDependencies ++= Seq(
       "org.apache.spark" %% "spark-core" % "2.1.2" % "provided",
       "org.apache.spark" %% "spark-sql" % "2.1.2" % "provided",
       "org.apache.spark" %% "spark-hive" % "2.1.2" % "provided",
       "org.apache.spark" %% "spark-hive-thriftserver" % "2.1.2" % "provided",
      "org.apache.spark" %% "spark-mllib" % "2.1.2" % "provided",
      "graphframes" % "graphframes" % "0.5.0-spark2.1-s_2.11",
      "org.apache.spark" %% "spark-graphx" % "2.1.0",
      "joda-time" % "joda-time" % "2.7",
      "org.joda" % "joda-convert" % "1.7"
      )
  //resolvers ++= Resolver.url("SparkPackages", url("https://dl.bintray.com/spark-packages/maven/"))
  )

Spark 2.0.0 失败:

[warn] Executing in batch mode.
[warn]   For better performance, hit [ENTER] to switch to interactive mode, or
[warn]   consider launching sbt without any commands, or explicitly passing 'shell'
[info] Loading project definition from /data/mop_strategy_group/machine_learning/AlgorithmsCodes/Networkana/project
[info] Updating {file:/data/mop_strategy_group/machine_learning/AlgorithmsCodes/Networkana/project/}networkana-build...
[info] Resolving org.fusesource.jansi#jansi;1.4 ...
[info] Done updating.
[info] Set current project to Networkana (in build file:/data/mop_strategy_group/machine_learning/AlgorithmsCodes/Networkana/)
[info] Updating {file:/data/mop_strategy_group/machine_learning/AlgorithmsCodes/Networkana/}root...
[info] Resolving org.apache.spark#spark-graphx_2.11;2.1.0 ...
[error] Server access Error: Connection refused (Connection refused) url=https://repo1.maven.org/maven2/org/apache/spark/spark-graphx_2.11/2.1.0/spark-graphx_2.11-2.1.0.pom
[warn]  module not found: org.apache.spark#spark-graphx_2.11;2.1.0
[warn] ==== local: tried
[warn]   /data/mop_strategy_group/.ivy2/local/org.apache.spark/spark-graphx_2.11/2.1.0/ivys/ivy.xml
[warn] ==== public: tried
[warn]   https://repo1.maven.org/maven2/org/apache/spark/spark-graphx_2.11/2.1.0/spark-graphx_2.11-2.1.0.pom
[warn] ==== local-preloaded-ivy: tried
[warn]   /data/mop_strategy_group/.sbt/preloaded/org.apache.spark/spark-graphx_2.11/2.1.0/ivys/ivy.xml
[warn] ==== local-preloaded: tried
[warn]   file:////data/mop_strategy_group/.sbt/preloaded/org/apache/spark/spark-graphx_2.11/2.1.0/spark-graphx_2.11-2.1.0.pom
[info] Resolving org.apache.spark#spark-core_2.11;2.1.2 ...
[error] Server access Error: Received fatal alert: protocol_version url=https://repo1.maven.org/maven2/org/apache/spark/spark-core_2.11/2.1.2/spark-core_2.11-2.1.2.pom
[warn]  module not found: org.apache.spark#spark-core_2.11;2.1.2
[warn] ==== local: tried
[warn]   /data/mop_strategy_group/.ivy2/local/org.apache.spark/spark-core_2.11/2.1.2/ivys/ivy.xml
[warn] ==== public: tried
[warn]   https://repo1.maven.org/maven2/org/apache/spark/spark-core_2.11/2.1.2/spark-core_2.11-2.1.2.pom
[warn] ==== local-preloaded-ivy: tried
[warn]   /data/mop_strategy_group/.sbt/preloaded/org.apache.spark/spark-core_2.11/2.1.2/ivys/ivy.xml
[warn] ==== local-preloaded: tried
[warn]   file:////data/mop_strategy_group/.sbt/preloaded/org/apache/spark/spark-core_2.11/2.1.2/spark-core_2.11-2.1.2.pom
[info] Resolving org.apache.spark#spark-sql_2.11;2.1.2 ...
[error] Server access Error: Received fatal alert: protocol_version url=https://repo1.maven.org/maven2/org/apache/spark/spark-sql_2.11/2.1.2/spark-sql_2.11-2.1.2.pom
[warn]  module not found: org.apache.spark#spark-sql_2.11;2.1.2
[warn] ==== local: tried
[warn]   /data/mop_strategy_group/.ivy2/local/org.apache.spark/spark-sql_2.11/2.1.2/ivys/ivy.xml
[warn] ==== public: tried
[warn]   https://repo1.maven.org/maven2/org/apache/spark/spark-sql_2.11/2.1.2/spark-sql_2.11-2.1.2.pom
[warn] ==== local-preloaded-ivy: tried
[warn]   /data/mop_strategy_group/.sbt/preloaded/org.apache.spark/spark-sql_2.11/2.1.2/ivys/ivy.xml
[warn] ==== local-preloaded: tried
[warn]   file:////data/mop_strategy_group/.sbt/preloaded/org/apache/spark/spark-sql_2.11/2.1.2/spark-sql_2.11-2.1.2.pom
[info] Resolving org.apache.spark#spark-hive_2.11;2.1.2 ...
[error] Server access Error: Received fatal alert: protocol_version url=https://repo1.maven.org/maven2/org/apache/spark/spark-hive_2.11/2.1.2/spark-hive_2.11-2.1.2.pom
[warn]  module not found: org.apache.spark#spark-hive_2.11;2.1.2
[warn] ==== local: tried
[warn]   /data/mop_strategy_group/.ivy2/local/org.apache.spark/spark-hive_2.11/2.1.2/ivys/ivy.xml
[warn] ==== public: tried
[warn]   https://repo1.maven.org/maven2/org/apache/spark/spark-hive_2.11/2.1.2/spark-hive_2.11-2.1.2.pom
[warn] ==== local-preloaded-ivy: tried
[warn]   /data/mop_strategy_group/.sbt/preloaded/org.apache.spark/spark-hive_2.11/2.1.2/ivys/ivy.xml
[warn] ==== local-preloaded: tried
[warn]   file:////data/mop_strategy_group/.sbt/preloaded/org/apache/spark/spark-hive_2.11/2.1.2/spark-hive_2.11-2.1.2.pom
[info] Resolving org.apache.spark#spark-hive-thriftserver_2.11;2.1.2 ...
[error] Server access Error: Received fatal alert: protocol_version url=https://repo1.maven.org/maven2/org/apache/spark/spark-hive-thriftserver_2.11/2.1.2/spark-hive-thriftserver_2.11-2.1.2.pom
[warn]  module not found: org.apache.spark#spark-hive-thriftserver_2.11;2.1.2
[warn] ==== local: tried
[warn]   /data/mop_strategy_group/.ivy2/local/org.apache.spark/spark-hive-thriftserver_2.11/2.1.2/ivys/ivy.xml
[warn] ==== public: tried
[warn]   https://repo1.maven.org/maven2/org/apache/spark/spark-hive-thriftserver_2.11/2.1.2/spark-hive-thriftserver_2.11-2.1.2.pom
[warn] ==== local-preloaded-ivy: tried
[warn]   /data/mop_strategy_group/.sbt/preloaded/org.apache.spark/spark-hive-thriftserver_2.11/2.1.2/ivys/ivy.xml
[warn] ==== local-preloaded: tried
[warn]   file:////data/mop_strategy_group/.sbt/preloaded/org/apache/spark/spark-hive-thriftserver_2.11/2.1.2/spark-hive-thriftserver_2.11-2.1.2.pom
[info] Resolving org.apache.spark#spark-mllib_2.11;2.1.2 ...
[error] Server access Error: Received fatal alert: protocol_version url=https://repo1.maven.org/maven2/org/apache/spark/spark-mllib_2.11/2.1.2/spark-mllib_2.11-2.1.2.pom
[warn]  module not found: org.apache.spark#spark-mllib_2.11;2.1.2
[warn] ==== local: tried
[warn]   /data/mop_strategy_group/.ivy2/local/org.apache.spark/spark-mllib_2.11/2.1.2/ivys/ivy.xml
[warn] ==== public: tried
[warn]   https://repo1.maven.org/maven2/org/apache/spark/spark-mllib_2.11/2.1.2/spark-mllib_2.11-2.1.2.pom
[warn] ==== local-preloaded-ivy: tried
[warn]   /data/mop_strategy_group/.sbt/preloaded/org.apache.spark/spark-mllib_2.11/2.1.2/ivys/ivy.xml
[warn] ==== local-preloaded: tried
[warn]   file:////data/mop_strategy_group/.sbt/preloaded/org/apache/spark/spark-mllib_2.11/2.1.2/spark-mllib_2.11-2.1.2.pom
[info] Resolving jline#jline;2.12.1 ...
[warn]  ::::::::::::::::::::::::::::::::::::::::::::::
[warn]  ::          UNRESOLVED DEPENDENCIES         ::
[warn]  ::::::::::::::::::::::::::::::::::::::::::::::
[warn]  :: org.apache.spark#spark-graphx_2.11;2.1.0: not found
[warn]  :: org.apache.spark#spark-core_2.11;2.1.2: not found
[warn]  :: org.apache.spark#spark-sql_2.11;2.1.2: not found
[warn]  :: org.apache.spark#spark-hive_2.11;2.1.2: not found
[warn]  :: org.apache.spark#spark-hive-thriftserver_2.11;2.1.2: not found
[warn]  :: org.apache.spark#spark-mllib_2.11;2.1.2: not found
[warn]  ::::::::::::::::::::::::::::::::::::::::::::::
[warn] 
[warn]  Note: Unresolved dependencies path:
[warn]      org.apache.spark:spark-mllib_2.11:2.1.2 (/data/mop_strategy_group/wuzj_spark/Networkana/build.sbt#L6)
[warn]        +- networkana:networkana_2.11:1.0 
[warn]      org.apache.spark:spark-graphx_2.11:2.1.0 (/data/mop_strategy_group/wuzj_spark/Networkana/build.sbt#L6)
[warn]        +- networkana:networkana_2.11:1.0
[warn]      org.apache.spark:spark-hive_2.11:2.1.2 (/data/mop_strategy_group/wuzj_spark/Networkana/build.sbt#L6)
[warn]        +- networkana:networkana_2.11:1.0
[warn]      org.apache.spark:spark-sql_2.11:2.1.2 (/data/mop_strategy_group/wuzj_spark/Networkana/build.sbt#L6)
[warn]        +- networkana:networkana_2.11:1.0
[warn]      org.apache.spark:spark-core_2.11:2.1.2 (/data/mop_strategy_group/wuzj_spark/Networkana/build.sbt#L6)
[warn]        +- networkana:networkana_2.11:1.0
[warn]      org.apache.spark:spark-hive-thriftserver_2.11:2.1.2 (/data/mop_strategy_group/wuzj_spark/Networkana/build.sbt#L6)
[warn]        +- networkana:networkana_2.11:1.0
sbt.ResolveException: unresolved dependency: org.apache.spark#spark-graphx_2.11;2.1.0: not found
unresolved dependency: org.apache.spark#spark-core_2.11;2.1.2: not found
unresolved dependency: org.apache.spark#spark-sql_2.11;2.1.2: not found
unresolved dependency: org.apache.spark#spark-hive_2.11;2.1.2: not found
unresolved dependency: org.apache.spark#spark-hive-thriftserver_2.11;2.1.2: not found
unresolved dependency: org.apache.spark#spark-mllib_2.11;2.1.2: not found
at sbt.IvyActions$.sbt$IvyActions$$resolve(IvyActions.scala:313)
at sbt.IvyActions$$anonfun$updateEither$1.apply(IvyActions.scala:191)
at sbt.IvyActions$$anonfun$updateEither$1.apply(IvyActions.scala:168)
at sbt.IvySbt$Module$$anonfun$withModule$1.apply(Ivy.scala:156)
at sbt.IvySbt$Module$$anonfun$withModule$1.apply(Ivy.scala:156)
at sbt.IvySbt$$anonfun$withIvy$1.apply(Ivy.scala:133)
at sbt.IvySbt.sbt$IvySbt$$action$1(Ivy.scala:57)
at sbt.IvySbt$$anon$4.call(Ivy.scala:65)
at xsbt.boot.Locks$GlobalLock.withChannel$1(Locks.scala:93)
at xsbt.boot.Locks$GlobalLock.xsbt$boot$Locks$GlobalLock$$withChannelRetries$1(Locks.scala:78)
at xsbt.boot.Locks$GlobalLock$$anonfun$withFileLock$1.apply(Locks.scala:97)
at xsbt.boot.Using$.withResource(Using.scala:10)
at xsbt.boot.Using$.apply(Using.scala:9)
at xsbt.boot.Locks$GlobalLock.ignoringDeadlockAvoided(Locks.scala:58)
at xsbt.boot.Locks$GlobalLock.withLock(Locks.scala:48)
at xsbt.boot.Locks$.apply0(Locks.scala:31)
at xsbt.boot.Locks$.apply(Locks.scala:28)
at sbt.IvySbt.withDefaultLogger(Ivy.scala:65)
at sbt.IvySbt.withIvy(Ivy.scala:128)
at sbt.IvySbt.withIvy(Ivy.scala:125)
at sbt.IvySbt$Module.withModule(Ivy.scala:156)
at sbt.IvyActions$.updateEither(IvyActions.scala:168)
at sbt.Classpaths$$anonfun$sbt$Classpaths$$work$1$1.apply(Defaults.scala:1488)
at sbt.Classpaths$$anonfun$sbt$Classpaths$$work$1$1.apply(Defaults.scala:1484)
at sbt.Classpaths$$anonfun$doWork$1$1$$anonfun$121.apply(Defaults.scala:1519)
at sbt.Classpaths$$anonfun$doWork$1$1$$anonfun$121.apply(Defaults.scala:1517)
at sbt.Tracked$$anonfun$lastOutput$1.apply(Tracked.scala:37)
at sbt.Classpaths$$anonfun$doWork$1$1.apply(Defaults.scala:1522)
at sbt.Classpaths$$anonfun$doWork$1$1.apply(Defaults.scala:1516)
at sbt.Tracked$$anonfun$inputChanged$1.apply(Tracked.scala:60)
at sbt.Classpaths$.cachedUpdate(Defaults.scala:1539)
at sbt.Classpaths$$anonfun$updateTask$1.apply(Defaults.scala:1466)
at sbt.Classpaths$$anonfun$updateTask$1.apply(Defaults.scala:1418)
at scala.Function1$$anonfun$compose$1.apply(Function1.scala:47)
at sbt.$tilde$greater$$anonfun$$u2219$1.apply(TypeFunctions.scala:40)
at sbt.std.Transform$$anon$4.work(System.scala:63)
at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:228)
at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:228)
at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:17)
at sbt.Execute.work(Execute.scala:237)
at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:228)
at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:228)
at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:159)
at sbt.CompletionService$$anon$2.call(CompletionService.scala:28)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
[error] (*:update) sbt.ResolveException: unresolved dependency: org.apache.spark#spark-graphx_2.11;2.1.0: not found
[error] unresolved dependency: org.apache.spark#spark-core_2.11;2.1.2: not found
[error] unresolved dependency: org.apache.spark#spark-sql_2.11;2.1.2: not found
[error] unresolved dependency: org.apache.spark#spark-hive_2.11;2.1.2: not found
[error] unresolved dependency: org.apache.spark#spark-hive-thriftserver_2.11;2.1.2: not found
[error] unresolved dependency: org.apache.spark#spark-mllib_2.11;2.1.2: not found
4

1 回答 1

3

我看到这一行:

[error] Server access Error: Received fatal alert: protocol_version url=https://repo1.maven.org/maven2/org/apache/spark/spark-hive-thriftserver_2.11/2.1.2/spark-hive-thriftserver_2.11-2.1.2.pom

您似乎正在尝试连接不支持 TLSv1.2(或未启用)的 Java 版本

请阅读为什么我会从 Maven Central 收到“收到致命警报:protocol_version”或“peer not authenticated”?完整的答案以及如何解决它。

于 2018-06-22T19:27:57.650 回答