[ https://issues.apache.org/jira/browse/FLINK-2408?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14737082#comment-14737082 ]
ASF GitHub Bot commented on FLINK-2408: --------------------------------------- Github user chiwanpark commented on the pull request: https://github.com/apache/flink/pull/941#issuecomment-138956199 Do this PR need modification? It seems good to merge. > Some maven properties are defined in build profiles only > -------------------------------------------------------- > > Key: FLINK-2408 > URL: https://issues.apache.org/jira/browse/FLINK-2408 > Project: Flink > Issue Type: Bug > Components: Build System > Affects Versions: 0.9, 0.10 > Reporter: Robert Metzger > Assignee: Robert Metzger > > SBT does not support resolving maven properties from parent poms which are > only defined in build profiles. > At least {{scala.version}} and {{scala.binary.version}} are only defined in > profiles. > This leads to unresolvable dependencies in SBT ... > {code} > [warn] > https://repo1.maven.org/maven2/org/apache/kafka/kafka_${scala.binary.version}/0.8.2.0/kafka_${scala.binary.version}-0.8.2.0.pom > [info] Resolving org.fusesource.jansi#jansi;1.4 ... > [warn] :::::::::::::::::::::::::::::::::::::::::::::: > [warn] :: UNRESOLVED DEPENDENCIES :: > [warn] :::::::::::::::::::::::::::::::::::::::::::::: > [warn] :: org.apache.kafka#kafka_${scala.binary.version};0.8.2.0: not > found > [warn] :::::::::::::::::::::::::::::::::::::::::::::: > [warn] > [warn] Note: Unresolved dependencies path: > [warn] org.apache.kafka:kafka_${scala.binary.version}:0.8.2.0 > [warn] +- org.apache.flink:flink-connector-kafka:0.9.0 > (/home/robert/Downloads/flink-sbt-master/build.sbt#L8-9) > [warn] +- com:flink-sbt-with-assembly:1.0 (sbtVersion=0.13, > scalaVersion=2.10) > sbt.ResolveException: unresolved dependency: > org.apache.kafka#kafka_${scala.binary.version};0.8.2.0: not found > at sbt.IvyActions$.sbt$IvyActions$$resolve(IvyActions.scala:291) > at sbt.IvyActions$$anonfun$updateEither$1.apply(IvyActions.scala:188) > at sbt.IvyActions$$anonfun$updateEither$1.apply(IvyActions.scala:165) > at sbt.IvySbt$Module$$anonfun$withModule$1.apply(Ivy.scala:155) > at sbt.IvySbt$Module$$anonfun$withModule$1.apply(Ivy.scala:155) > at sbt.IvySbt$$anonfun$withIvy$1.apply(Ivy.scala:132) > at sbt.IvySbt.sbt$IvySbt$$action$1(Ivy.scala:57) > at sbt.IvySbt$$anon$4.call(Ivy.scala:65) > at xsbt.boot.Locks$GlobalLock.withChannel$1(Locks.scala:93) > at > xsbt.boot.Locks$GlobalLock.xsbt$boot$Locks$GlobalLock$$withChannelRetries$1(Locks.scala:78) > at > xsbt.boot.Locks$GlobalLock$$anonfun$withFileLock$1.apply(Locks.scala:97) > at xsbt.boot.Using$.withResource(Using.scala:10) > at xsbt.boot.Using$.apply(Using.scala:9) > at xsbt.boot.Locks$GlobalLock.ignoringDeadlockAvoided(Locks.scala:58) > at xsbt.boot.Locks$GlobalLock.withLock(Locks.scala:48) > at xsbt.boot.Locks$.apply0(Locks.scala:31) > at xsbt.boot.Locks$.apply(Locks.scala:28) > at sbt.IvySbt.withDefaultLogger(Ivy.scala:65) > at sbt.IvySbt.withIvy(Ivy.scala:127) > at sbt.IvySbt.withIvy(Ivy.scala:124) > at sbt.IvySbt$Module.withModule(Ivy.scala:155) > at sbt.IvyActions$.updateEither(IvyActions.scala:165) > at > sbt.Classpaths$$anonfun$sbt$Classpaths$$work$1$1.apply(Defaults.scala:1369) > at > sbt.Classpaths$$anonfun$sbt$Classpaths$$work$1$1.apply(Defaults.scala:1365) > at > sbt.Classpaths$$anonfun$doWork$1$1$$anonfun$87.apply(Defaults.scala:1399) > at > sbt.Classpaths$$anonfun$doWork$1$1$$anonfun$87.apply(Defaults.scala:1397) > at sbt.Tracked$$anonfun$lastOutput$1.apply(Tracked.scala:37) > at sbt.Classpaths$$anonfun$doWork$1$1.apply(Defaults.scala:1402) > at sbt.Classpaths$$anonfun$doWork$1$1.apply(Defaults.scala:1396) > at sbt.Tracked$$anonfun$inputChanged$1.apply(Tracked.scala:60) > at sbt.Classpaths$.cachedUpdate(Defaults.scala:1419) > at sbt.Classpaths$$anonfun$updateTask$1.apply(Defaults.scala:1348) > at sbt.Classpaths$$anonfun$updateTask$1.apply(Defaults.scala:1310) > at scala.Function1$$anonfun$compose$1.apply(Function1.scala:47) > at sbt.$tilde$greater$$anonfun$$u2219$1.apply(TypeFunctions.scala:40) > at sbt.std.Transform$$anon$4.work(System.scala:63) > at > sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:226) > at > sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:226) > at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:17) > at sbt.Execute.work(Execute.scala:235) > at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:226) > at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:226) > at > sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:159) > at sbt.CompletionService$$anon$2.call(CompletionService.scala:28) > at java.util.concurrent.FutureTask.run(FutureTask.java:262) > at > java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) > at java.util.concurrent.FutureTask.run(FutureTask.java:262) > at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) > at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) > at java.lang.Thread.run(Thread.java:745) > [error] (*:update) sbt.ResolveException: unresolved dependency: > org.apache.kafka#kafka_${scala.binary.version};0.8.2.0: not found > [error] Total time: 25 s, completed Jul 27, 2015 10:33:23 AM > {code} > For users facing the issue right now, there is the following workaround > {code} > val flinkVersion = "0.9.0" > libraryDependencies ++= Seq("org.apache.flink" % "flink-scala" % > flinkVersion, "org.apache.flink" % "flink-clients" % flinkVersion) > libraryDependencies += "org.apache.flink" % "flink-connector-kafka" % > flinkVersion exclude("org.apache.kafka", "kafka_${scala.binary.version}") > libraryDependencies += "com.101tec" % "zkclient" % "0.5" > libraryDependencies += "org.apache.kafka" %% "kafka" % "0.8.2.0" > {code} -- This message was sent by Atlassian JIRA (v6.3.4#6332)