Writing application logic for Spark jobs that can be unit-tested without a SparkContext
kontextfrei-core-spark-2.2.0 0.7.0
Group ID:
com.danielwestheide
Artifact ID:
kontextfrei-core-spark-2.2.0_2.11
Version:
0.7.0
Release Date:
Dec 21, 2017
Licenses:
libraryDependencies += "com.danielwestheide" %% "kontextfrei-core-spark-2.2.0" % "0.7.0" resolvers += Resolver.bintrayRepo("dwestheide", "maven")
ivy"com.danielwestheide::kontextfrei-core-spark-2.2.0:0.7.0" MavenRepository("https://dl.bintray.com/dwestheide/maven")
//> using dep "com.danielwestheide::kontextfrei-core-spark-2.2.0:0.7.0"
import $ivy.`com.danielwestheide::kontextfrei-core-spark-2.2.0:0.7.0` import ammonite._, Resolvers._ val res = Resolver.Http( "Bintray dwestheide maven", "Some(https://dl.bintray.com/dwestheide/maven)", IvyPattern, false) interp.resolvers() = interp.resolvers() :+ res
<dependency> <groupId>com.danielwestheide</groupId> <artifactId>kontextfrei-core-spark-2.2.0_2.11</artifactId> <version>0.7.0</version> </dependency>
compile group: 'com.danielwestheide', name: 'kontextfrei-core-spark-2.2.0_2.11', version: '0.7.0'