-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathbuild.sbt
More file actions
71 lines (60 loc) · 2.42 KB
/
build.sbt
File metadata and controls
71 lines (60 loc) · 2.42 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
val commonSettings = Seq(
organization := "com.datalogs",
scalaVersion := "2.11.8",
scalacOptions ++= Seq("-unchecked", "-deprecation", "-encoding", "utf8", "-target:jvm-1.8"),
javacOptions ++= Seq("-source", "1.8", "-target", "1.8"),
// Added for ScalaTest
libraryDependencies += "org.scalatest" %% "scalatest" % "3.0.5" % "test",
test in assembly := {},
parallelExecution in Test := false
)
lazy val sparkprocessorsettings = commonSettings ++ Seq(
libraryDependencies ++= Seq(
"org.rogach" %% "scallop" % "1.0.0",
"joda-time" % "joda-time" % "2.9.3",
"org.joda" % "joda-convert" % "1.8",
"com.fasterxml.jackson.dataformat" % "jackson-dataformat-xml" % "2.4.4",
"org.apache.avro" % "avro-mapred" % "1.8.0" exclude("org.mortbay.jetty", "servlet-api"),
"org.apache.parquet" % "parquet-avro" % "1.8.0",
"org.apache.spark" %% "spark-core" % "2.1.0" % "provided",
"org.apache.spark" %% "spark-hive" % "2.1.0" % "provided",
"com.databricks" %% "spark-xml" % "0.3.3"
)
)
lazy val projsparkprocessorcore = Project(id = "spark-processor-core", base = file ("spark-processor-core"))
.settings(sparkprocessorsettings)
.settings(
name := "spark-processor-core",
artifact in (Compile, assembly) := {
val art = (artifact in (Compile, assembly)).value
art.copy(`classifier` = Some("assembly"))
},
addArtifact(artifact in (Compile, assembly), assembly),
publishArtifact := false
)
lazy val projsparkprocessordataset = Project(id = "spark-processor-dataset", base = file ("spark-processor-dataset"))
.settings(sparkprocessorsettings)
.dependsOn(projsparkprocessorcore)
.settings(
name := "spark-processor-dataset",
// sbtavro.SbtAvro.projectSettings ++ Seq(
// version in AvroConfig := "1.8.2",
// javaSource in AvroConfig := sourceDirectory.value / "generated" / "avro",
// stringType in AvroConfig := "String"
// ),
artifact in (Compile, assembly) := {
val art = (artifact in (Compile, assembly)).value
art.copy(`classifier` = Some("assembly"))
},
addArtifact(artifact in (Compile, assembly), assembly),
publishArtifact := true
)
////////////////////////////// Root project //////////////////////////////////////////
lazy val processor = project.in(file ("."))
.settings(commonSettings)
.settings(
test := { },
publish := { },
publishLocal := { }
)
.aggregate(projsparkprocessorcore, projsparkprocessordataset)