5 repositories { jcenter() }
7 classpath 'com.github.jengelman.gradle.plugins:shadow:1.2.4'
11 apply plugin: 'com.github.johnrengelman.shadow'
13 def String getProjectProperty(String propertyName) {
14 String propertyValue = "null"
15 if (hasProperty(propertyName)) {
16 propertyValue = this.properties[propertyName]
19 throw new GradleScriptException("PropertyName " + propertyName + " is not defined in properties file")
23 def projectName = project.name
25 def sourceDir = getProjectProperty('src.dir')
26 def distDir = getProjectProperty('dist.dir')
27 def classesDir = getProjectProperty('classes.dir')
28 def javaDir = getProjectProperty('java.dir')
29 def resourcesDir = getProjectProperty('resources.dir')
30 def javaDocDir = getProjectProperty('javadoc.dir')
32 def voldTestClassesDir = getProjectProperty('testclasses.dir')
34 def commonTestSrcDir = getProjectProperty('commontestsrc.dir')
35 def unitTestSrcDir = getProjectProperty('unittestsrc.dir')
36 def intTestSrcDir = getProjectProperty('inttestsrc.dir')
37 def longTestSrcDir = getProjectProperty('longtestsrc.dir')
39 def voldVersion = getProjectProperty('curr.release')
40 def javacVersion = getProjectProperty('javac.version')
42 //This is the javaCompile variable version. Directly defining 'def version' will override this and cause nightmare
45 def archiveDirectoryName = projectName + '-' + version
46 def archiveDirectoryPath = distDir + "/" + archiveDirectoryName
48 def javadocEnabled = getProjectProperty('javadoc.enabled').toBoolean()
50 def deleteDirectoryContents(directory) {
51 project.file(directory).deleteDir()
52 project.file(directory).mkdirs()
55 def gobblinExcludes = {
56 exclude group: 'org.apache.hive'
57 exclude group: 'com.google.protobuf'
58 exclude group: 'org.apache.avro'
59 exclude group: 'com.linkedin.gobblin', module: 'gobblin-hive-registration'
63 sourceCompatibility = javacVersion
64 targetCompatibility = javacVersion
65 compileJava.options.debug = true
70 // For Hadoop dependencies
71 url "https://repository.cloudera.com/artifactory/cloudera-repos/"
74 // For BDB-Je dependencies
75 url "http://download.oracle.com/maven/"
79 // http://blog.joda.org/2014/02/turning-off-doclint-in-jdk-8-javadoc.html
80 if (JavaVersion.current().java8Compatible) {
81 tasks.withType(Javadoc) {
82 options.addStringOption('Xdoclint:none', '-quiet')
86 tasks.withType(Javadoc) {
87 enabled = javadocEnabled
93 java { srcDirs = [javaDir] }
98 output.classesDir = classesDir
99 output.resourcesDir = resourcesDir
110 output.classesDir = voldTestClassesDir
116 from (javaDir) { exclude '**/*.java','**/*.html','**/log4j.properties' }
121 // Theoretically this block can be replaced by including the log4j.properties in main resources.
122 // But that causes the log4j.properties to be present in the voldJar . Not sure what is the
123 // implication of this change, so avoiding it for now.
124 from (javaDir) { include 'log4j.properties' }
130 destinationDir = file(javaDocDir)
133 compileTestJava.doLast {
135 from (commonTestSrcDir) { exclude '**/*.java','**/*.html' }
136 from (unitTestSrcDir) { exclude '**/*.java','**/*.html' }
137 into voldTestClassesDir
141 task testJar(type: Jar) {
142 baseName = projectName + "-test"
143 from sourceSets.test.output
144 destinationDir = project.file(distDir)
149 attributes 'Voldemort-Implementation-Version' : version,
150 'Implementation-Title': 'Voldemort',
151 'Implementation-Version': version,
152 'Implementation-Vendor' :'LinkedIn'
154 destinationDir = project.file(distDir)
158 task contribJar(type:Jar) {
159 baseName = projectName + "-contrib"
160 from {subprojects*.sourceSets.main.output}
161 destinationDir = project.file(distDir)
164 task srcJar(type: Jar, dependsOn: classes) {
165 classifier = 'sources'
166 from sourceSets.main.java.srcDirs
167 destinationDir = project.file(distDir)
170 task javadocJar(type: Jar) {
171 enabled = javadocEnabled
172 classifier = 'javadoc'
174 destinationDir = file(distDir)
177 task bnpJar(dependsOn: shadowJar) {
178 // Just a nicer more self-explanatory name than "shadowJar"
192 doLast { deleteDirectoryContents(javaDocDir) }
195 // Dependencies used by both BnP and Voldemort
196 // TODO: Decide if we want to do that for all dependencies, even if they're used just in Voldemort...
198 def depAvro = 'org.apache.avro:avro:1.4.0'
199 def depProtoBuf = 'com.google.protobuf:protobuf-java:2.3.0'
200 def depJdom = 'org.jdom:jdom:1.1'
201 def depAzkaban = 'com.linkedin.azkaban:azkaban:2.5.0'
202 def depGuava = 'com.google.guava:guava:14.0.1'
203 def depLog4j = 'log4j:log4j:1.2.15'
204 def depJacksonMapper = 'org.codehaus.jackson:jackson-mapper-asl:1.9.13'
205 def depJoda = 'joda-time:joda-time:1.6'
206 def depTehuti = 'io.tehuti:tehuti:0.7.0'
210 zip64 true // Required if fatjar has more than 64K files
212 from sourceSets.main.output, sourceSets.test.output, sourceSets.main.resources
213 from {subprojects*.sourceSets.main.output}
214 from {subprojects*.sourceSets.test.output}
215 from project('gobblin').projectDir
216 dependsOn 'gobblin:shadowJar'
218 // Hadoop dependencies are expected to be provided by Azkaban.
219 // If Azkaban is not included in your deployment, you may need to remove the following excludes
220 exclude("**/org/apache/hadoop/**")
221 exclude("**/org.apache.hadoop**")
223 // Required when working in an Hadoop 2.x environment
225 include(dependency(depAvro))
226 include(dependency(depProtoBuf))
227 include(dependency(depJdom))
228 include(dependency(depAzkaban))
229 include(dependency(depGuava))
230 include(dependency(depLog4j))
231 include(dependency(depJacksonMapper))
232 include(dependency(depJoda))
233 include(dependency(depTehuti))
235 relocate 'com.google.protobuf', 'voldemort.shadow.2.3.0.com.google.protobuf'
236 relocate 'org.apache.avro', 'voldemort.shadow.1.4.0.org.apache.avro'
237 // TODO: find a way to exclude private lib's BDB-JE which gets pulled into the fat jar...
240 import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar
241 task protobufJar(type: ShadowJar) {
242 baseName = projectName + "-protobuf"
245 attributes 'Voldemort-Implementation-Version' : version,
246 'Implementation-Title': 'Voldemort',
247 'Implementation-Version': version,
248 'Implementation-Vendor' :'LinkedIn'
252 include(dependency(depProtoBuf))
253 configurations.runtime.each { exclude(dependency(it)) }
256 configurations = [ project.configurations.runtime ]
258 from sourceSets.main.output
259 destinationDir = project.file(distDir)
261 relocate 'com.google.protobuf', 'voldemort.shadow.2.3.0.com.google.protobuf'
262 // Since BDB is included via compile files option, shadow does not support
263 // excluding them. https://github.com/johnrengelman/shadow/issues/142
264 // Once shadow supports excluding them, it can be excluded. There are
265 // Complex ways to exclude it, but not worth it.
266 relocate 'com.sleepycat' , 'voldemort.shadow.5.0.88.com.sleepycat'
269 task copySources (type: Copy) {
270 from ('.') { include 'bin/*.sh', 'bin/*.bat' , 'bin/*.py' }
271 from ('.') { include distDir + '/*.jar'}
272 from ('.') { exclude distDir + '/**' ,'bin/**' , 'build/**', '.git/**' , '.gradle/**', 'config/**/data/**' }
273 into archiveDirectoryPath
276 task copyDeps(type: Copy) {
277 // note this only copies the dependencies of the root
278 // project into /lib if we start adding compile deps to
279 // subprojects will have to rethink
280 from { configurations.compile }
284 task zip (type: Zip) {
285 dependsOn copySources, copyDeps, contribJar, protobufJar
286 baseName = projectName
289 include archiveDirectoryName + '/bin/**'
293 include archiveDirectoryName + '/**'
294 exclude archiveDirectoryName + '/bin/**'
297 destinationDir = project.file(distDir)
300 task tar (type: Tar) {
301 dependsOn copySources, copyDeps, contribJar, protobufJar
302 compression = Compression.GZIP
303 baseName = projectName
307 include archiveDirectoryName + '/bin/**'
311 include archiveDirectoryName + '/**'
312 exclude archiveDirectoryName + '/bin/**'
315 destinationDir = project.file(distDir)
320 from sourceSets.main.output
321 webXml = project.file('web.xml')
322 destinationDir = project.file(distDir)
325 assemble.dependsOn copyDeps
327 copySources.dependsOn jar
330 tasks.withType(Test) {
332 // If ignoreFailures is not set, then merged reports will not be generated
333 // Gradle aborts further tasks on test failure. so if you run junitAll
334 // which runs 3 tests, reports task will never be run on failure cases.
335 ignoreFailures = true
340 events "started", "passed", "skipped", "failed"
341 exceptionFormat = 'full'
342 // showStandardStreams = true
345 def classesSize = candidateClassFiles.files.size()
346 logger.lifecycle("{} starts executing {} test classes {}",
347 path, classesSize, classesSize > 0? "(" + candidateClassFiles.files*.name[0] + ", ...)" : "")
350 //Set reasonable defaults for reports location
351 reports.html.destination = file("$buildDir/reports/$name")
352 reports.junitXml.destination = file("$buildDir/$name-results")
354 // Makes sure tests aren't marked "UP-TO-DATE" after running
355 outputs.upToDateWhen { false }
359 tasks.withType(Test) {
360 // note only the root project's tests fork for very test class
363 // Do not set the max parallelism as there are tests that uses the same port and will
364 // run into bind exceptions.
366 //ignoreFailures = gradle.startParameter.continueOnFailure
368 //all standard error messages from tests will get routed to 'DEBUG' level messages.
369 //logging.captureStandardError(LogLevel.DEBUG)
370 //all standard output messages from tests will get routed to 'DEBUG' level messages.
371 //logging.captureStandardOutput(LogLevel.DEBUG)
373 //Set reasonable defaults classpath and classes dir. They can be reconfigured in an individual task.
374 // it.testClassesDir = sourceSets.test.output.classesDir
375 // classpath = sourceSets.test.runtimeClasspath
380 def DirsToDelete = [".temp", ".version", "data"]
381 def deleteRecursively
383 deleteRecursively = { file ->
384 file.eachFile() {f ->
386 if( DirsToDelete.contains(f.getName()) )
388 println "deleting ${f.getAbsolutePath()}"
399 deleteRecursively (new File("config"))
403 task junit(dependsOn: test)
405 Collection<String> testClassesFrom(String dir, String include = '**/*Test.*') {
406 //take all *Test.java files found in given dir, make the path relative and replace .java with .class
407 fileTree(dir: dir, includes: [include]).collect { it.absolutePath.replace("\\", "/").replaceAll(file(dir).absolutePath.replace("\\", "/") + "/", "").replaceAll(".java\$", ".class")}
411 description = "Runs acceptance tests"
412 include testClassesFrom(unitTestSrcDir)
415 task junitLong(type: Test) {
416 description = "Runs long junit tests"
417 include testClassesFrom(longTestSrcDir)
420 task junitInt(type: Test) {
421 description = "Runs integration tests"
422 include testClassesFrom(intTestSrcDir)
425 task junitRebalance(type: Test) {
426 include testClassesFrom(unitTestSrcDir, '**/*Rebalance*Test.java')
429 task junitRebalanceLong(type: Test) {
430 include testClassesFrom(longTestSrcDir, '**/*Rebalance*Test.java')
433 task contribJunit(type:TestReport) {
434 // this populated below by depending on all the test tasks found
435 // in the subprojects.
436 destinationDir = file("$buildDir/reports/$name")
440 tasks.withType(Test) {
441 // hook up the report to teh contrib task and junitAll
442 rootProject.contribJunit.reportOn it
443 rootProject.junitAll.reportOn it
447 task junitAll(type: TestReport) {
448 reportOn test, junitLong
449 destinationDir = file("$project.buildDir/reports/$name")
453 task aggregatedJunit(type: TestReport) {
454 destinationDir = file("$project.buildDir/reports/$name")
458 tasks.withType(Test) {
459 finalizedBy rootProject.aggregatedJunit
460 doLast { rootProject.aggregatedJunit.reportOn it }
464 task wrapper(type: Wrapper) { gradleVersion = '2.9' }
467 // Avro serialization format
470 // INTERNAL_LIBS azkaban version not found
471 // azkaban-common-0.05.jar
473 // INTERNAL_LIBS Used for tomcat deployment, not sure if anyone uses it
474 // catalina-ant.jar , version not found in maven central
476 // coders decoders containing the Base64,binary encoding
477 compile 'commons-codec:commons-codec:1.4'
479 // TRANSITIVE_DEPENDENCY Contrib jar depends on commons-configuration-1.6.jar
480 // commons-configuration instead depends on commons-collection
481 //compile 'commons-collections:commons-collections:3.2.1'
483 // Used by MySql storage engine classes
484 // The jar supports database connection pooling
485 compile 'commons-dbcp:commons-dbcp:1.2.2'
487 // commons io is used at many places
488 // IOUtils, FileUtils and ByteArrayOutputStream
489 compile 'commons-io:commons-io:2.1'
491 // LZF compression strategy for store and tests.
492 compile 'com.ning:compress-lzf:0.9.1'
494 // Used all over the place for collections
497 // used for readonly store hdfs fetcher.
498 compile 'org.apache.hadoop:hadoop-auth:2.3.0-cdh5.1.5'
500 // used at lots of places. Seems like there is some overlap between httpclient and core, but not clear
501 compile 'org.apache.httpcomponents:httpclient:4.1.2'
503 // contains both http server and client functionalities. Used for HttpResponse but could be used at more places.
504 compile 'org.apache.httpcomponents:httpcore:4.1.2'
506 // JSON mapping library from Java Objects to JSON
507 compile depJacksonMapper
509 // JSON processing library
510 compile 'org.codehaus.jackson:jackson-core-asl:1.9.13'
512 // Used for reading XML files and Document.
515 // Jetty is used for HttpService and tests. Jetty Util is used for QueuedThreadPool class.
516 compile 'org.mortbay.jetty:jetty-util:6.1.18'
517 compile 'org.mortbay.jetty:jetty:6.1.18'
519 // A line processing library for command line. No compile time dependency
520 // Used by Voldemort shell
521 compile 'jline:jline:0.9.94'
523 // jna is library for invoking native functions
524 // used in the readonly store
525 compile 'net.java.dev.jna:jna:3.2.7'
527 // joda time is replacement for Java Date and Time
528 // used in readonly store code.
531 // Used for argument command line parsing
532 compile 'net.sf.jopt-simple:jopt-simple:4.6'
534 // log4j - logger used in almost all files
537 // used in readonly store and Co-ordinator
538 compile 'javax.mail:mail:1.4.1'
540 // Used in co-ordinator and rest services
541 compile 'io.netty:netty:3.5.8.Final'
543 // TRANSITIVE_DEPENDENCY Paranamer is a library that allows the parameter names of non-private methods and constructors to be accessed at runtime
544 // Avro has a dependency on paranamer
545 // compile 'com.thoughtworks.paranamer:paranamer:2.1'
547 // protobuf is a supported protocol format between voldemort client and server
551 compile 'javax.servlet:servlet-api:2.5'
553 // slf4j is another logging abstraction framework.
554 // It is used by the apache.avro, apache.hadoop and r2 clients
555 compile 'org.slf4j:slf4j-api:1.5.6'
556 compile 'org.slf4j:slf4j-log4j12:1.5.6'
558 // snappy is one of the supported compression strategies in voldemort
559 compile 'org.iq80.snappy:snappy:0.2'
561 // Velocity is a simple yet powerful Java-based template engine that renders data
562 // from plain Java objects to text, xml, email, SQL, Post Script, HTML etc
563 // Velocity is used for Http Server GUI
564 compile 'org.apache.velocity:velocity:1.6.2'
566 // TRANSITIVE_DEPENDENCY Apache XML Parser
568 // compile 'xerces:xercesImpl:2.9.1'
570 // BDB-JE from Oracle
571 compile 'com.sleepycat:je:5.0.104'
573 // cern library containing high performance Maps for int and double
574 // Currently only used in the tests
575 testCompile 'colt:colt:1.2.0'
577 // Used in resource pool perf testing class
578 testCompile 'commons-pool:commons-pool:1.5.2'
580 testRuntime 'mysql:mysql-connector-java:5.1.31'
582 // Used for unit tests and other automated testing
583 testCompile 'junit:junit:4.6'
585 // Mockito is written by our beloved friend Szczepan Faber :)
586 // Mocking framework used in some tests
587 testCompile 'org.mockito:mockito-all:1.8.5'
589 // contribCompile sourceSets.main.output
590 // contribCompile sourceSets.test.output
592 // declaring contribCompile dependencies as compile dependencies
593 // otherwise while copying dependencies to lib directory
594 // conflict resolution is not done properly across sourceSets
595 // and we end up with 2 versions of few jars like ( log4j, servlet etc. )
596 compile 'commons-configuration:commons-configuration:1.6'
597 compile('org.apache.hadoop:hadoop-core:2.3.0-mr1-cdh5.1.5') {
598 exclude group: 'com.google.protobuf'
599 exclude group: 'org.apache.avro'
601 compile('org.apache.hadoop:hadoop-common:2.3.0-cdh5.1.5') {
602 exclude group: 'com.google.protobuf'
603 exclude group: 'org.apache.avro'
605 compile('org.apache.hadoop:hadoop-hdfs:2.3.0-cdh5.1.5') {
606 exclude group: 'com.google.protobuf'
607 exclude group: 'org.apache.avro'
610 compile 'com.linkedin.pegasus:r2:1.8.3'
611 compile 'com.linkedin.pegasus:data:1.8.3'
612 compile 'com.linkedin.pegasus:pegasus-common:1.8.3'
615 compile 'com.google.code.typica:typica:1.7.2'
616 compile 'com.sna-projects.krati:krati:0.4.9'
620 testCompile 'io.tehuti:tehuti:0.7.0:test'
623 compile 'org.apache.tomcat:catalina-ant:6.0.43'
624 compile 'org.apache.hadoop:libthrift:0.5.0.0'
626 // rocksdb from maven
627 compile 'org.rocksdb:rocksdbjni:3.13.1'
629 // Bouncy Castle Libaray
630 compile 'org.bouncycastle:bcprov-jdk15on:1.48'
633 compile 'com.linkedin.gobblin:gobblin-runtime:0.11.0', gobblinExcludes
634 compile 'com.linkedin.gobblin:gobblin-data-management:0.11.0', gobblinExcludes
635 compile 'com.linkedin.gobblin:gobblin-throttling-service-client:0.11.0', gobblinExcludes
639 // this configures all the contrib subprojects
640 // note at the moment there dependencies are still
641 // declared in the dependency block above.
643 sourceCompatibility = javacVersion
644 targetCompatibility = javacVersion
647 // note that the contrib projects don't currently have resource
648 // directories so below is actually just a to keep idea & gradle from
649 // thinking that the test resources dir is src/test/resources
651 java { srcDirs = ['src/java'] }
652 resources { srcDirs = ['src/resources'] }
655 java { srcDirs = ['test'] }
656 resources { srcDirs = ['testResources'] }
661 // Used for unit tests and other automated testing
662 testCompile rootProject.sourceSets.test.runtimeClasspath
665 tasks.withType(Test) {
666 // this is required as the test utils expect the config directory
667 // to be at the root of the process working directory.
668 workingDir = rootProject.projectDir
674 apply plugin: 'eclipse'
678 sourceCompatibility = targetCompatibility = 1.7
681 defaultOutputDir = project.file('classes') // overrides the default of /bin which is where our scripts are
682 downloadSources = true
684 whenMerged { classpath ->
685 // so for some reason the generated .classpath for the contrib projects includes
686 // two copies of *most* (maybe all) of the libraries from the parent project
687 // the following de-dupes these
688 def duplicateLibs = classpath.entries
689 .findAll { it.kind == 'lib' } // only library entries
690 .groupBy { it.library } // index by the library
691 .findAll { it.value.size() > 1 } // only where there is more that a single entry
693 duplicateLibs.each { k, v ->
694 // pick one from the list of dupes ..preferring the one without a sourcelib path
695 // else just take the first
696 def toRemove = v.find { !it.sourcePath } ?: v.first()
697 classpath.entries.remove toRemove
699 // also with for no reason contrib projects gets a source path configured
700 // with the following path.. since this path doesn't exist eclipse complains
701 classpath.entries.removeAll {
702 it.kind == 'lib' && it.library.path.endsWith('build/resources/test')
711 downloadJavadoc = true
712 downloadSources = true