Writing application logic for Spark jobs that can be unit-tested without a SparkContext
kontextfrei-core-spark-2.1.0 0.5.0
Group ID:
com.danielwestheide
Artifact ID:
kontextfrei-core-spark-2.1.0_2.10
Version:
0.5.0
Release Date:
Mar 15, 2017
Licenses:
libraryDependencies += "com.danielwestheide" %% "kontextfrei-core-spark-2.1.0" % "0.5.0" resolvers += Resolver.bintrayRepo("dwestheide", "maven")
ivy"com.danielwestheide::kontextfrei-core-spark-2.1.0:0.5.0" MavenRepository("https://dl.bintray.com/dwestheide/maven")
//> using dep "com.danielwestheide::kontextfrei-core-spark-2.1.0:0.5.0"
import $ivy.`com.danielwestheide::kontextfrei-core-spark-2.1.0:0.5.0` import ammonite._, Resolvers._ val res = Resolver.Http( "Bintray dwestheide maven", "Some(https://dl.bintray.com/dwestheide/maven)", IvyPattern, false) interp.resolvers() = interp.resolvers() :+ res
<dependency> <groupId>com.danielwestheide</groupId> <artifactId>kontextfrei-core-spark-2.1.0_2.10</artifactId> <version>0.5.0</version> </dependency>
compile group: 'com.danielwestheide', name: 'kontextfrei-core-spark-2.1.0_2.10', version: '0.5.0'