1
+ // xsbt clean unidoc previewSite
2
+ // xsbt clean unidoc ghpagesPushSite
3
+ // xsbt -Dsbt.global.base=/home/eje/.sbt/sonatype +publish
4
+ // make sure sparkVersion and pythonVersion are set as you want them prior to +publish
5
+
6
+ import scala .sys .process ._
7
+
1
8
name := " isarn-sketches-spark"
2
9
3
10
organization := " org.isarnproject"
4
11
5
- bintrayOrganization := Some ( " isarn " )
12
+ val packageVersion = " 0.3.1 "
6
13
7
- val packageVersion = " 0.4.0-SNAPSHOT"
8
-
9
- val sparkVersion = " 2.2.0"
14
+ val sparkVersion = " 2.2.2"
10
15
11
16
val pythonVersion = " 2.7"
12
17
@@ -18,11 +23,9 @@ val pythonCMD = s"""python${pythonVersion.split('.').head}"""
18
23
19
24
version := s " ${packageVersion}- ${sparkSuffix}- ${pythonSuffix}"
20
25
21
- scalaVersion := " 2.11.8"
22
-
23
- crossScalaVersions := Seq (" 2.10.6" , " 2.11.8" )
26
+ scalaVersion := " 2.11.12"
24
27
25
- useGpg := true
28
+ crossScalaVersions := Seq ( " 2.11.12 " ) // scala 2.12 when spark supports it
26
29
27
30
pomIncludeRepository := { _ => false }
28
31
@@ -56,36 +59,34 @@ developers := List(
56
59
)
57
60
)
58
61
59
- def commonSettings = Seq (
60
- libraryDependencies ++= Seq (
61
- " org.isarnproject" %% " isarn-sketches" % " 0.1.1" ,
62
- " org.apache.spark" %% " spark-core" % sparkVersion % Provided ,
63
- " org.apache.spark" %% " spark-sql" % sparkVersion % Provided ,
64
- " org.apache.spark" %% " spark-mllib" % sparkVersion % Provided ,
65
- " org.isarnproject" %% " isarn-scalatest" % " 0.0.2" % Test ,
66
- " org.scalatest" %% " scalatest" % " 2.2.4" % Test ,
67
- " org.apache.commons" % " commons-math3" % " 3.6.1" % Test ),
68
- initialCommands in console := """
69
- |import org.apache.spark.SparkConf
70
- |import org.apache.spark.SparkContext
71
- |import org.apache.spark.sql.SparkSession
72
- |import org.apache.spark.SparkContext._
73
- |import org.apache.spark.rdd.RDD
74
- |import org.apache.spark.ml.linalg.Vectors
75
- |import org.isarnproject.sketches.TDigest
76
- |import org.isarnproject.sketches.udaf._
77
- |import org.apache.spark.isarnproject.sketches.udt._
78
- |val initialConf = new SparkConf().setAppName("repl").set("spark.serializer", "org.apache.spark.serializer.KryoSerializer").set("spark.kryoserializer.buffer", "16mb")
79
- |val spark = SparkSession.builder.config(initialConf).master("local[2]").getOrCreate()
80
- |import spark._, spark.implicits._
81
- |val sc = spark.sparkContext
82
- |import org.apache.log4j.{Logger, ConsoleAppender, Level}
83
- |Logger.getRootLogger().getAppender("console").asInstanceOf[ConsoleAppender].setThreshold(Level.WARN)
84
- """ .stripMargin,
85
- cleanupCommands in console := " spark.stop"
86
- )
87
-
88
- seq(commonSettings:_* )
62
+ libraryDependencies ++= Seq (
63
+ " org.isarnproject" %% " isarn-sketches" % " 0.1.2" ,
64
+ " org.apache.spark" %% " spark-core" % sparkVersion % Provided ,
65
+ " org.apache.spark" %% " spark-sql" % sparkVersion % Provided ,
66
+ " org.apache.spark" %% " spark-mllib" % sparkVersion % Provided ,
67
+ " org.isarnproject" %% " isarn-scalatest" % " 0.0.3" % Test ,
68
+ " org.scalatest" %% " scalatest" % " 3.0.5" % Test ,
69
+ " org.apache.commons" % " commons-math3" % " 3.6.1" % Test )
70
+
71
+ initialCommands in console := """
72
+ |import org.apache.spark.SparkConf
73
+ |import org.apache.spark.SparkContext
74
+ |import org.apache.spark.sql.SparkSession
75
+ |import org.apache.spark.SparkContext._
76
+ |import org.apache.spark.rdd.RDD
77
+ |import org.apache.spark.ml.linalg.Vectors
78
+ |import org.isarnproject.sketches.TDigest
79
+ |import org.isarnproject.sketches.udaf._
80
+ |import org.apache.spark.isarnproject.sketches.udt._
81
+ |val initialConf = new SparkConf().setAppName("repl").set("spark.serializer", "org.apache.spark.serializer.KryoSerializer").set("spark.kryoserializer.buffer", "16mb")
82
+ |val spark = SparkSession.builder.config(initialConf).master("local[2]").getOrCreate()
83
+ |import spark._, spark.implicits._
84
+ |val sc = spark.sparkContext
85
+ |import org.apache.log4j.{Logger, ConsoleAppender, Level}
86
+ |Logger.getRootLogger().getAppender("console").asInstanceOf[ConsoleAppender].setThreshold(Level.WARN)
87
+ """ .stripMargin
88
+
89
+ cleanupCommands in console := " spark.stop"
89
90
90
91
licenses += (" Apache-2.0" , url(" http://opensource.org/licenses/Apache-2.0" ))
91
92
@@ -118,9 +119,9 @@ compilePython := {
118
119
}
119
120
}
120
121
121
- compilePython <<= compilePython.dependsOn(deletePYC)
122
+ compilePython := ( compilePython.dependsOn(deletePYC)).value
122
123
123
- (packageBin in Compile ) << = (packageBin in Compile ).dependsOn(compilePython)
124
+ (packageBin in Compile ) : = (( packageBin in Compile ).dependsOn(compilePython)).value
124
125
125
126
mappings in (Compile , packageBin) ++= Seq (
126
127
(baseDirectory.value / " python" / " isarnproject" / " __init__.pyc" ) -> " isarnproject/__init__.pyc" ,
@@ -142,13 +143,10 @@ assemblyShadeRules in assembly := Seq(
142
143
143
144
scalacOptions in (Compile , doc) ++= Seq (" -doc-root-content" , baseDirectory.value+ " /root-doc.txt" )
144
145
145
- site.settings
146
-
147
- site.includeScaladoc()
146
+ enablePlugins(ScalaUnidocPlugin , GhpagesPlugin )
148
147
149
- // Re-enable if/when we want to support gh-pages w/ jekyll
150
- // site.jekyllSupport()
148
+ siteSubdirName in ScalaUnidoc := " latest/api"
151
149
152
- ghpages.settings
150
+ addMappingsToSiteDir(mappings in ( ScalaUnidoc , packageDoc), siteSubdirName in ScalaUnidoc )
153
151
154
152
git.remoteRepo := " git@github.com:isarn/isarn-sketches-spark.git"
0 commit comments