Skip to content

Commit 52b8ccf

Browse files
committed
merging from master
2 parents 61598ea + ab2208f commit 52b8ccf

File tree

109 files changed

+3875
-657
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

109 files changed

+3875
-657
lines changed

build.gradle

Lines changed: 82 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ allprojects {
7171
apply plugin: 'nebula.ospackage-base'
7272

7373
group = 'io.snappydata'
74-
version = '1.0.2-SNAPSHOT'
74+
version = '1.0.2'
7575

7676
// apply compiler options
7777
tasks.withType(JavaCompile) {
@@ -106,7 +106,7 @@ allprojects {
106106
scalaBinaryVersion = '2.11'
107107
scalaVersion = scalaBinaryVersion + '.8'
108108
sparkVersion = '2.1.1'
109-
snappySparkVersion = '2.1.1.1'
109+
snappySparkVersion = '2.1.1.3'
110110
sparkDistName = "spark-${sparkVersion}-bin-hadoop2.7"
111111
log4jVersion = '1.2.17'
112112
slf4jVersion = '1.7.25'
@@ -121,9 +121,10 @@ allprojects {
121121
janinoVersion = '3.0.8'
122122
derbyVersion = '10.12.1.1'
123123
pegdownVersion = '1.6.0'
124-
snappyStoreVersion = '1.6.2-SNAPSHOT'
124+
snappyStoreVersion = '1.6.2'
125125
snappydataVersion = version
126126
pulseVersion = '1.5.1'
127+
zeppelinInterpreterVersion = '0.7.3.1'
127128
buildFlags = ''
128129
createdBy = System.getProperty('user.name')
129130
osArch = System.getProperty('os.arch')
@@ -132,7 +133,7 @@ allprojects {
132133
buildDate = new Date().format('yyyy-MM-dd HH:mm:ss Z')
133134
buildNumber = new Date().format('MMddyy')
134135
jdkVersion = System.getProperty('java.version')
135-
sparkJobServerVersion = '0.6.2.6'
136+
sparkJobServerVersion = '0.6.2.7'
136137
kolobokeVersion = '1.0.0'
137138

138139
gitCmd = "git --git-dir=${rootDir}/.git --work-tree=${rootDir}"
@@ -737,7 +738,6 @@ task product(type: Zip) {
737738
if (hasGemFireConnectorProject){
738739
dependsOn ":gemfire-connector:product"
739740
}
740-
741741
}
742742

743743

@@ -858,17 +858,45 @@ task product(type: Zip) {
858858
exclude '.git*'
859859
}
860860
}
861-
if (isEnterpriseProduct && hasAqpProject) {
862-
// copy enterprise shared libraries for optimized JNI calls
863-
copy {
864-
from aqpProject.projectDir.path + '/lib'
865-
into "${snappyProductDir}/jars"
861+
if (isEnterpriseProduct) {
862+
if (hasAqpProject) {
863+
// copy enterprise shared libraries for optimized JNI calls
864+
copy {
865+
from aqpProject.projectDir.path + '/lib'
866+
into "${snappyProductDir}/jars"
867+
}
868+
copy {
869+
from aqpProject.projectDir
870+
into snappyProductDir
871+
include 'NOTICE'
872+
include '*EULA*'
873+
}
866874
}
867-
copy {
868-
from aqpProject.projectDir
869-
into snappyProductDir
870-
include 'NOTICE'
871-
include '*EULA*'
875+
876+
def jdbcConnectorProject = project(":snappy-jdbc-connector_${scalaBinaryVersion}")
877+
def gemfireConnectorProject = project(":gemfire-connector")
878+
def gfeConnectorProject = project(":gemfire-connector:connector_${scalaBinaryVersion}")
879+
def gfeFunctionProject = project(":gemfire-connector:gfeFunctions")
880+
if (hasJdbcConnectorProject) {
881+
copy {
882+
from jdbcConnectorProject.jar.destinationDir
883+
into "${snappyProductDir}/connectors"
884+
}
885+
}
886+
if (hasGemFireConnectorProject) {
887+
copy {
888+
from gfeConnectorProject.jar.destinationDir
889+
into "${snappyProductDir}/connectors"
890+
}
891+
copy {
892+
from gfeFunctionProject.jar.destinationDir
893+
into "${snappyProductDir}/connectors"
894+
}
895+
copy {
896+
from "${gemfireConnectorProject.projectDir}/examples/quickstart/data"
897+
into "${snappyProductDir}/connectors"
898+
include "persons.jar"
899+
}
872900
}
873901
}
874902
copy {
@@ -897,11 +925,16 @@ task product(type: Zip) {
897925
from("${examplesProject.projectDir}/src")
898926
into "${snappyProductDir}/quickstart/src"
899927
}
900-
901928
copy {
902929
from("${clusterProject.projectDir}/benchmark")
903930
into "${snappyProductDir}/benchmark"
904931
}
932+
def clientProject = project(':snappy-store:snappydata-store-client')
933+
copy {
934+
from clientProject.shadowJar.destinationDir
935+
into "${snappyProductDir}/connectors"
936+
include clientProject.shadowJar.archiveName
937+
}
905938
}
906939
}
907940

@@ -962,6 +995,7 @@ buildRpm {
962995
requires('perl')
963996
requires('curl')
964997
dependsOn ':packageVSD'
998+
dependsOn ':packageZeppelinInterpreter'
965999
if (rootProject.hasProperty('hadoop-provided')) {
9661000
classifier 'without_hadoop'
9671001
}
@@ -976,6 +1010,7 @@ buildDeb {
9761010
requires('curl')
9771011
recommends('java8-sdk')
9781012
dependsOn ':packageVSD'
1013+
dependsOn ':packageZeppelinInterpreter'
9791014
if (rootProject.hasProperty('hadoop-provided')) {
9801015
classifier 'without-hadoop'
9811016
}
@@ -987,6 +1022,7 @@ distTar {
9871022
dependsOn product
9881023
// also package VSD
9891024
dependsOn ':packageVSD'
1025+
dependsOn ':packageZeppelinInterpreter'
9901026
classifier 'bin'
9911027
if (rootProject.hasProperty('hadoop-provided')) {
9921028
classifier 'without-hadoop-bin'
@@ -997,6 +1033,7 @@ distZip {
9971033
dependsOn product
9981034
// also package VSD
9991035
dependsOn ':packageVSD'
1036+
dependsOn ':packageZeppelinInterpreter'
10001037
classifier 'bin'
10011038
if (rootProject.hasProperty('hadoop-provided')) {
10021039
classifier 'without-hadoop-bin'
@@ -1138,6 +1175,34 @@ int getLast(includeTestFiles, pattern) {
11381175
}
11391176
}
11401177

1178+
task packageZeppelinInterpreter { doLast {
1179+
String zeppelinInterpreterJarName = "snappydata-zeppelin-${zeppelinInterpreterVersion}.jar"
1180+
String zeppelinInterpreterDir = System.env.ZEPPELIN_INTERPRETER_DIR
1181+
1182+
if (zeppelinInterpreterDir == null || zeppelinInterpreterDir.length() == 0) {
1183+
zeppelinInterpreterDir = "${projectDir}/../zeppelin-interpreter"
1184+
}
1185+
1186+
String zeppelinInterpreterLibDir = "${zeppelinInterpreterDir}/build-artifacts/libs"
1187+
if (file(zeppelinInterpreterDir).canWrite()) {
1188+
exec {
1189+
executable "${zeppelinInterpreterDir}/gradlew"
1190+
workingDir = zeppelinInterpreterDir
1191+
args 'clean', 'product', 'distTar'
1192+
}
1193+
println ''
1194+
println "Copying Zeppelin Interpreter jar from ${zeppelinInterpreterLibDir} to ${snappyProductDir}/jars"
1195+
println ''
1196+
copy {
1197+
from "${zeppelinInterpreterLibDir}"
1198+
into "${snappyProductDir}/jars"
1199+
include "${zeppelinInterpreterJarName}"
1200+
}
1201+
} else {
1202+
println "Skipping including Zeppelin Interpreter jar due to unwritable ${zeppelinInterpreterDir}"
1203+
}
1204+
} }
1205+
11411206
task packagePulse { doLast {
11421207
String pulseWarName = "pulse-${pulseVersion}.war"
11431208
String pulseDir = System.env.PULSEDIR
@@ -1199,6 +1264,7 @@ task sparkPackage {
11991264

12001265
packagePulse.mustRunAfter product
12011266
packageVSD.mustRunAfter product
1267+
packageZeppelinInterpreter.mustRunAfter product
12021268

12031269
distTar.mustRunAfter clean, cleanAll, product
12041270
distZip.mustRunAfter clean, cleanAll, product

cluster/sbin/collect-debug-artifacts.sh

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -54,27 +54,27 @@ while [ "$1" != "" ]; do
5454
CONF_FILE="$2"
5555
shift ;;
5656
--conf=*|--config=*)
57-
CONF_FILE="`echo "$2" | sed 's/^[^=]*=//'`" ;;
57+
CONF_FILE="`echo "$1" | sed 's/^[^=]*=//'`" ;;
5858
-x)
5959
TAR_FILE="$2"
6060
shift ;;
6161
--extract=*|--xtract=*)
62-
TAR_FILE="`echo "$2" | sed 's/^[^=]*=//'`" ;;
62+
TAR_FILE="`echo "$1" | sed 's/^[^=]*=//'`" ;;
6363
-o)
6464
OUTPUT_DIR="$2"
6565
shift ;;
6666
--out=*|--outdir=*)
67-
OUTPUT_DIR="`echo "$2" | sed 's/^[^=]*=//'`" ;;
67+
OUTPUT_DIR="`echo "$1" | sed 's/^[^=]*=//'`" ;;
6868
-s)
6969
START_TIME="$2"
7070
shift ;;
7171
--start=*)
72-
START_TIME="`echo "$2" | sed 's/^[^=]*=//'`" ;;
72+
START_TIME="`echo "$1" | sed 's/^[^=]*=//'`" ;;
7373
-e)
7474
END_TIME="$2"
7575
shift ;;
7676
--end=*)
77-
END_TIME="`echo "$2" | sed 's/^[^=]*=//'`" ;;
77+
END_TIME="`echo "$1" | sed 's/^[^=]*=//'`" ;;
7878
-h|--help)
7979
usage
8080
exit 0

cluster/src/dunit/scala/io/snappydata/cluster/QueryRoutingDUnitTest.scala

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1023,4 +1023,34 @@ class QueryRoutingDUnitTest(val s: String)
10231023
conn.close()
10241024
}
10251025
}
1026+
1027+
def testAlterTableRowTable(): Unit = {
1028+
val serverHostPort = AvailablePortHelper.getRandomAvailableTCPPort
1029+
vm2.invoke(classOf[ClusterManagerTestBase], "startNetServer", serverHostPort)
1030+
val conn = DriverManager.getConnection("jdbc:snappydata://localhost:" + serverHostPort)
1031+
println(s"Connected to $serverHostPort")
1032+
1033+
val stmt = conn.createStatement();
1034+
try {
1035+
val createParentTable: String =
1036+
"create table parentT (cid int not null, sid int not null, qty int not null, " +
1037+
" constraint parent_pk primary key (cid, sid)) " +
1038+
"USING ROW OPTIONS ( PERSISTENT 'SYNCHRONOUS');"
1039+
val createChildTable: String =
1040+
"create table childT (oid int not null constraint child_pk primary key, cid int, " +
1041+
"sid int, qty int, constraint parent_fk foreign key (cid, sid)" +
1042+
"references parentT (cid, sid) on delete restrict) " +
1043+
"USING ROW OPTIONS ( PERSISTENT 'SYNCHRONOUS');"
1044+
val alterTableStmt: String = "alter table childT drop FOREIGN KEY parent_fk"
1045+
stmt.execute(createParentTable)
1046+
stmt.execute(createChildTable)
1047+
stmt.execute(alterTableStmt)
1048+
} finally {
1049+
stmt.execute("drop table childT")
1050+
stmt.execute("drop table parentT")
1051+
stmt.close()
1052+
conn.close()
1053+
}
1054+
}
1055+
10261056
}

cluster/src/dunit/scala/io/snappydata/externalstore/CatalogConsistencyDUnitTest.scala

Lines changed: 51 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -77,16 +77,6 @@ class CatalogConsistencyDUnitTest(s: String) extends ClusterManagerTestBase(s) {
7777
}
7878

7979
val routeQueryDisabledConn = getClientConnection(netPort1, false)
80-
// should throw an exception since the catalog is repaired and table entry
81-
// should have been removed
82-
try {
83-
// table should not exist in the store DD
84-
routeQueryDisabledConn.createStatement().executeQuery("select * from column_table1")
85-
} catch {
86-
case se: SQLException if (se.getSQLState.equals("42X05")) =>
87-
case unknown: Throwable => throw unknown
88-
}
89-
9080
try {
9181
// make sure that the column buffer does not exist
9282
routeQueryDisabledConn.createStatement().executeQuery(
@@ -132,7 +122,7 @@ class CatalogConsistencyDUnitTest(s: String) extends ClusterManagerTestBase(s) {
132122

133123
val connection = getClientConnection(netPort1)
134124
// repair the catalog
135-
connection.createStatement().execute("CALL SYS.REPAIR_CATALOG()")
125+
connection.createStatement().execute("CALL SYS.REPAIR_CATALOG('true', 'true')")
136126
// column_table1 should not be found in either catalog after repair
137127
assertTableDoesNotExist(netPort1, snc)
138128
// other tables should exist
@@ -165,14 +155,15 @@ class CatalogConsistencyDUnitTest(s: String) extends ClusterManagerTestBase(s) {
165155

166156
val connection = getClientConnection(netPort1)
167157
// repair the catalog
168-
connection.createStatement().execute("CALL SYS.REPAIR_CATALOG()")
158+
connection.createStatement().execute("CALL SYS.REPAIR_CATALOG('true', 'true')")
169159
// column_table1 should not be found in either catalog after repair
170160
assertTableDoesNotExist(netPort1, snc)
171161
// other tables should exist
172162
verifyTables(snc)
173163
}
174164

175-
def testCatalogRepairedWhenLeadRestarted(): Unit = {
165+
// Hive entry missing but DD entry exists
166+
def testCatalogRepairedWhenLeadStopped1(): Unit = {
176167
val netPort1 = AvailablePortHelper.getRandomAvailableTCPPort
177168
vm2.invoke(classOf[ClusterManagerTestBase], "startNetServer", netPort1)
178169

@@ -188,6 +179,53 @@ class CatalogConsistencyDUnitTest(s: String) extends ClusterManagerTestBase(s) {
188179
if(sparkContext != null) sparkContext.stop()
189180
ClusterManagerTestBase.stopAny()
190181

182+
val connection = getClientConnection(netPort1)
183+
// repair the catalog
184+
// does not actually repair, just adds warning to log file
185+
connection.createStatement().execute("CALL SYS.REPAIR_CATALOG('false', 'false')")
186+
// actually repair the catalog
187+
connection.createStatement().execute("CALL SYS.REPAIR_CATALOG('true', 'true')")
188+
189+
ClusterManagerTestBase.startSnappyLead(ClusterManagerTestBase.locatorPort, bootProps)
190+
snc = SnappyContext(sc)
191+
// column_table1 should not be found in either catalog after repair
192+
assertTableDoesNotExist(netPort1, snc)
193+
194+
// other tables should exist
195+
verifyTables(snc)
196+
}
197+
198+
// Hive entry exists but DD entry missing
199+
def testCatalogRepairedWhenLeadStopped2(): Unit = {
200+
val netPort1 = AvailablePortHelper.getRandomAvailableTCPPort
201+
vm2.invoke(classOf[ClusterManagerTestBase], "startNetServer", netPort1)
202+
203+
var snc = SnappyContext(sc)
204+
205+
createTables(snc)
206+
207+
// drop column_table1 from store DD
208+
val routeQueryDisabledConn = getClientConnection(netPort1, false)
209+
routeQueryDisabledConn.createStatement().execute("drop table " +
210+
ColumnFormatRelation.columnBatchTableName("app.column_table1"))
211+
routeQueryDisabledConn.createStatement().execute("drop table column_table1")
212+
213+
// make sure that the table exists in Hive metastore
214+
assert(JdbcExtendedUtils.tableExistsInMetaData("APP.COLUMN_TABLE1",
215+
routeQueryDisabledConn, GemFireXDClientDialect))
216+
217+
// stop spark
218+
val sparkContext = SnappyContext.globalSparkContext
219+
if(sparkContext != null) sparkContext.stop()
220+
ClusterManagerTestBase.stopAny()
221+
222+
val connection = getClientConnection(netPort1)
223+
// repair the catalog
224+
// does not actually repair, just adds warning to log file
225+
connection.createStatement().execute("CALL SYS.REPAIR_CATALOG('false', 'false')")
226+
// actually repair the catalog
227+
connection.createStatement().execute("CALL SYS.REPAIR_CATALOG('true', 'true')")
228+
191229
ClusterManagerTestBase.startSnappyLead(ClusterManagerTestBase.locatorPort, bootProps)
192230
snc = SnappyContext(sc)
193231
// column_table1 should not be found in either catalog after repair

0 commit comments

Comments
 (0)