forked from spring-attic/spring-hadoop
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathbuild.gradle
825 lines (683 loc) · 22.6 KB
/
build.gradle
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
description = 'Spring for Apache Hadoop'
defaultTasks 'build'
buildscript {
repositories {
maven { url "http://repo.springsource.org/plugins-release" }
}
dependencies {
classpath("org.springframework.build.gradle:propdeps-plugin:0.0.3")
classpath("org.springframework.build.gradle:docbook-reference-plugin:0.2.6")
}
}
allprojects {
group = 'org.springframework.data'
repositories {
mavenCentral()
maven { url 'http://repo.springsource.org/libs-milestone' }
}
}
def javaProjects() {
subprojects.findAll { project -> project.name != 'docs' }
}
def hadoopProjects() {
subprojects.findAll { project -> project.name.contains('-hadoop-') || project.name.contains('-cascading') }
}
def yarnProjects() {
subprojects.findAll { project -> project.name.contains('-yarn-') }
}
//
// Select the Hadoop distribution used for building the binaries
//
def List hadoop = []
def hadoopDefault = "hadoop10"
def hadoopDistro = project.hasProperty("distro") ? project.getProperty("distro") : hadoopDefault
def hadoopVersion = hadoop10Version
// make it possible to use Pig jars compiled for Hadoop 2.0
def pigQualifier = ''
// handle older Hive version
def hiveGroup = "org.apache.hive"
// default is Hadoop 1.0.x
switch (hadoopDistro) {
// Cloudera CDH3
case "cdh3":
hadoopVersion = cdh3Version
println "Using Cloudera CDH3 [$hadoopVersion]"
hbaseVersion = cdh3HbaseVersion
// Hive in CDH3 is too old and does not allow Hive Server to be compiled
// Note that the POMs and repo are incomplete (hive-builtin is missing)
// hiveVersion = cdh3HiveVersion
// hiveGroup = "org.apache.hadoop.hive"
pigVersion = cdh3PigVersion
break;
// Cloudera CDH4
case "cdh4":
hadoopVersion = cdh4MR1Version
println "Using Cloudera CDH4 [$hadoopVersion]"
hbaseVersion = cdh4HbaseVersion
hiveVersion = cdh4HiveVersion
pigVersion = cdh4PigVersion
thriftVersion = cdh4ThriftVersion
break;
// Pivotal HD 1.0
case "phd1":
hadoopVersion = phd1Version
println "Using Pivotal HD 1.0 - [$hadoopVersion]"
hbaseVersion = phd1HbaseVersion
hiveVersion = phd1HiveVersion
pigVersion = phd1PigVersion
thriftVersion = phd1ThriftVersion
hadoop = ["org.apache.hadoop:hadoop-common:$hadoopVersion",
"org.apache.hadoop:hadoop-hdfs:$hadoopVersion",
"org.apache.hadoop:hadoop-mapreduce-client-core:$hadoopVersion",
"org.apache.hadoop:hadoop-streaming:$hadoopVersion",
"org.apache.hadoop:hadoop-distcp:$hadoopVersion"]
break;
// Hortonworks Data Platform 1.3
case "hdp13":
hadoopVersion = hdp13Version
println "Using Hortonworks Data Platform 1.3 [$hadoopVersion]"
hadoop = ["org.apache.hadoop:hadoop-streaming:$hadoopVersion",
"org.apache.hadoop:hadoop-tools:$hadoopVersion"]
hbaseVersion = hdp13HbaseVersion
hiveVersion = hdp13HiveVersion
pigVersion = hdp13PigVersion
thriftVersion = hdp13ThriftVersion
break;
// Hadoop 2.0 Alpha
case "hadoop20":
hadoopVersion = hd20Version
println "Using Apache Hadoop 2.0 - [$hadoopVersion]"
hadoop = ["org.apache.hadoop:hadoop-common:$hadoopVersion",
"org.apache.hadoop:hadoop-hdfs:$hadoopVersion",
"org.apache.hadoop:hadoop-mapreduce-client-core:$hadoopVersion",
"org.apache.hadoop:hadoop-streaming:$hadoopVersion",
"org.apache.hadoop:hadoop-distcp:$hadoopVersion"]
hbaseVersion = hd20HbaseVersion
hiveVersion = hd20HiveVersion
pigVersion = hd20PigVersion
pigQualifier = ':h2'
thriftVersion = hd20ThriftVersion
break;
// Hadoop 1.2.x Beta
case "hadoop12":
hadoopVersion = hd12Version
println "Using Apache Hadoop 1.2.x [$hadoopVersion]"
hadoop = ["org.apache.hadoop:hadoop-streaming:$hadoopVersion",
"org.apache.hadoop:hadoop-tools:$hadoopVersion"]
hbaseVersion = hd12HbaseVersion
hiveVersion = hd12HiveVersion
pigVersion = hd12PigVersion
thriftVersion = hd12ThriftVersion
break;
// Hadoop 1.1.x
case "hadoop11":
hadoopVersion = hadoop11Version
println "Using Apache Hadoop 1.1.x [$hadoopVersion]"
hadoop = ["org.apache.hadoop:hadoop-streaming:$hadoopVersion",
"org.apache.hadoop:hadoop-tools:$hadoopVersion"]
break;
default:
if (!project.hasProperty("distro")) {
println "Using default distro: Apache Hadoop [$hadoopVersion]"
} else {
if (hadoopDistro == hadoopDefault) {
println "Using Apache Hadoop 1.0.x [$hadoopVersion]"
} else {
println "$hadoopDistro is not a supported distro; using default distro: Apache Hadoop [$hadoopVersion]"
}
}
hadoopVersion = hadoop10Version
hadoop = ["org.apache.hadoop:hadoop-streaming:$hadoopVersion",
"org.apache.hadoop:hadoop-tools:$hadoopVersion"]
}
configure(javaProjects()) {
apply plugin: 'java'
apply from: "${rootProject.projectDir}/maven.gradle"
apply plugin: 'eclipse'
apply plugin: 'idea'
apply plugin: 'propdeps'
apply plugin: 'propdeps-idea'
apply plugin: 'propdeps-eclipse'
sourceCompatibility=1.6
targetCompatibility=1.6
// assume we are skipping these tests (must be enabled explicitly)
ext.skipPig = true
ext.skipHive = true
ext.skipHBase = true
ext.skipWebHdfs = true
ext.skipCascading = true
// exclude poms from the classpath (pulled in by Cloudera)
eclipse.classpath.file {
whenMerged { classpath ->
classpath.entries.removeAll { entry -> entry.toString().contains(".pom") }
}
}
eclipse {
project {
natures += 'org.springframework.ide.eclipse.core.springnature'
}
}
// dependencies that are common across all java projects
dependencies {
compile "org.springframework:spring-aop:$springVersion"
compile "org.springframework:spring-context:$springVersion"
compile "org.springframework:spring-context-support:$springVersion"
compile "org.springframework:spring-jdbc:$springVersion"
compile "org.springframework:spring-tx:$springVersion"
}
task sourcesJar(type: Jar) {
classifier = 'sources'
from sourceSets.main.allJava
}
task javadocJar(type: Jar) {
classifier = 'javadoc'
from javadoc
}
artifacts {
archives sourcesJar
archives javadocJar
}
assemble.dependsOn = ['jar', 'sourcesJar']
javadoc {
ext.srcDir = file("${projectDir}/docs/src/api")
configure(options) {
stylesheetFile = file("${rootProject.projectDir}/docs/src/api/spring-javadoc.css")
overview = "${rootProject.projectDir}/docs/src/api/overview.html"
docFilesSubDirs = true
outputLevel = org.gradle.external.javadoc.JavadocOutputLevel.QUIET
breakIterator = true
author = true
showFromProtected()
// groups = [
// 'Spring Data Hadoop' : ['org.springframework.data.hadoop*'],
// ]
links = [
"http://static.springframework.org/spring/docs/3.0.x/javadoc-api",
"http://download.oracle.com/javase/6/docs/api",
"http://commons.apache.org/proper/commons-logging/commons-logging-1.1.1/apidocs/",
"http://logging.apache.org/log4j/1.2/apidocs/",
"http://hadoop.apache.org/common/docs/current/api/",
"http://hbase.apache.org/apidocs/",
"http://pig.apache.org/docs/r0.10.0/api/",
"http://hive.apache.org/docs/r0.7.1/api/",
"http://static.springsource.org/spring-batch/apidocs/",
"http://static.springsource.org/spring-integration/api/",
"https://builds.apache.org/job/Thrift/javadoc/",
"http://docs.cascading.org/cascading/2.1/javadoc/"
]
exclude "org/springframework/data/hadoop/config/**"
}
title = "${rootProject.description} ${version} API"
}
jar {
manifest.attributes["Created-By"] = "${System.getProperty("java.version")} (${System.getProperty("java.specification.vendor")})"
manifest.attributes['Implementation-Title'] = 'spring-data-hadoop'
manifest.attributes['Implementation-Version'] = project.version
manifest.attributes['Implementation-URL'] = "http://www.springsource.org/spring-data/hadoop"
manifest.attributes['Implementation-Vendor'] = "SpringSource"
manifest.attributes['Implementation-Vendor-Id'] = "org.springframework"
def build = System.env['SHDP.BUILD']
if (build != null)
manifest.attributes['Build'] = build
String rev = "unknown"
// parse the git files to find out the revision
File gitHead = file('.git/HEAD')
if (gitHead.exists()) {
gitHead = file('.git/' + gitHead.text.trim().replace('ref: ',''))
if (gitHead.exists()) { rev = gitHead.text }
}
from("$rootDir/docs/src/info") {
include "license.txt"
include "notice.txt"
into "META-INF"
expand(copyright: new Date().format('yyyy'), version: project.version)
}
manifest.attributes['Repository-Revision'] = rev
}
}
configure(hadoopProjects()) {
// default is Hadoop 1.0.x
switch (hadoopDistro) {
// Cloudera CDH3
case "cdh3":
dependencies {
optional("org.apache.hadoop:hadoop-streaming:$hadoopVersion")
optional("org.apache.hadoop:hadoop-tools:$hadoopVersion")
testCompile "org.apache.hadoop:hadoop-examples:$hadoopVersion"
}
break;
// Cloudera CDH4
case "cdh4":
dependencies {
optional("org.apache.hadoop:hadoop-streaming:$cdh4MR1Version")
optional("org.apache.hadoop:hadoop-tools:$cdh4MR1Version")
optional("org.apache.hadoop:hadoop-common:$cdh4Version")
optional("org.apache.hadoop:hadoop-hdfs:$cdh4Version")
testCompile "org.apache.hadoop:hadoop-examples:$hadoopVersion"
}
break;
// Pivotal HD 2.0
case "phd1":
dependencies {
compile "org.apache.hive:hive-common:$hiveVersion"
compile "org.apache.hive:hive-metastore:$hiveVersion"
compile "org.apache.hive:hive-exec:$hiveVersion"
testCompile "org.apache.hadoop:hadoop-mapreduce-examples:$hadoopVersion"
testRuntime "org.apache.hadoop:hadoop-mapreduce-client-jobclient:$hadoopVersion"
}
break;
// Hortonworks Data Platform 1.3
case "hdp13":
dependencies {
testCompile "org.apache.hadoop:hadoop-examples:$hadoopVersion"
testRuntime "dk.brics.automaton:automaton:1.11-8"
}
break;
// Hadoop 2.0 Alpha
case "hadoop20":
dependencies {
testCompile "org.apache.hadoop:hadoop-mapreduce-examples:$hadoopVersion"
testRuntime "org.apache.hadoop:hadoop-mapreduce-client-jobclient:$hadoopVersion"
testRuntime "dk.brics.automaton:automaton:1.11-8"
}
break;
// Hadoop 1.2.x Beta
case "hadoop12":
dependencies {
testCompile "org.apache.hadoop:hadoop-examples:$hadoopVersion"
}
break;
// Hadoop 1.1.x
case "hadoop11":
dependencies {
testCompile "org.apache.hadoop:hadoop-examples:$hadoopVersion"
}
break;
default:
dependencies {
testCompile "org.apache.hadoop:hadoop-examples:$hadoopVersion"
}
}
dependencies {
compile hadoop
// Logging - using commons-logging from spring-core
testRuntime("log4j:log4j:$log4jVersion")
// Spring Framework
// context-support -> spring-aop/beans/core -> commons-logging
compile "org.springframework:spring-context-support:$springVersion"
// used for DAO exceptions by Pig/HBase/Hive packages
optional("org.springframework:spring-tx:$springVersion")
// used by Hive package
optional("org.springframework:spring-jdbc:$springVersion")
// Missing dependency in Hadoop 1.0.3
testRuntime "commons-io:commons-io:$commonsioVersion"
testRuntime "org.codehaus.jackson:jackson-mapper-asl:$jacksonVersion"
testRuntime "cglib:cglib:$cglibVersion"
// Hive
optional("$hiveGroup:hive-service:$hiveVersion")
// needed by JDBC test
testRuntime "$hiveGroup:hive-jdbc:$hiveVersion"
// needed by the Hive Server tests
// testRuntime "$hiveGroup:hive-builtins:$hiveVersion"
// testRuntime("$hiveGroup:hive-metastore:$hiveVersion")
//testRuntime "$hiveGroup:hive-common:$hiveVersion"
//testRuntime "$hiveGroup:hive-shims:$hiveVersion"
//testRuntime "$hiveGroup:hive-serde:$hiveVersion"
//testRuntime "org.apache.thrift:libthrift:$thriftVersion"
//testRuntime "org.apache.thrift:libfb303:$thriftVersion"
// Pig
optional("org.apache.pig:pig:$pigVersion$pigQualifier") { dep ->
exclude module: "junit"
}
// HBase
optional("org.apache.hbase:hbase:$hbaseVersion") { dep ->
exclude module: "thrift"
}
// Libs dependencies (specified to cope with incompatibilities between them)
// testRuntime "org.antlr:antlr:$antlrVersion"
// testRuntime "org.antlr:antlr-runtime:$antlrVersion"
// Testing
testCompile "junit:junit:$junitVersion"
}
}
configure(rootProject) {
apply plugin: 'eclipse'
apply plugin: 'idea'
apply plugin: "docbook-reference"
reference {
sourceDir = file("docs/src/reference/docbook")
pdfFilename = "spring-data-hadoop-reference.pdf"
}
// don't publish the default jar for the root project
configurations.archives.artifacts.clear()
task api(type: Javadoc) {
group = "Documentation"
description = "Generates aggregated Javadoc API documentation."
title = "${rootProject.description} ${version} API"
dependsOn {
subprojects.collect {
it.tasks.getByName("jar")
}
}
options.memberLevel = org.gradle.external.javadoc.JavadocMemberLevel.PROTECTED
options.author = true
options.header = rootProject.description
options.overview = "docs/src/api/overview.html"
options.stylesheetFile = file("docs/src/api/spring-javadoc.css")
options.splitIndex = true
//options.links(project.ext.javadocLinks)
source subprojects.collect { project ->
project.sourceSets.main.allJava
}
maxMemory = "1024m"
destinationDir = new File(buildDir, "api")
doFirst {
classpath = files(subprojects.collect { it.sourceSets.main.compileClasspath })
}
}
task docsZip(type: Zip) {
group = "Distribution"
baseName = "spring-data-hadoop"
classifier = "docs"
description = "Builds -${classifier} archive containing api and reference " +
"for deployment at http://static.springframework.org/spring-hadoop/docs."
from("docs/src/info") {
include "changelog.txt"
}
from (api) {
into "javadoc-api"
}
from (reference) {
into "spring-data-hadoop-reference"
}
}
task schemaZip(type: Zip) {
group = "Distribution"
baseName = "spring-framework"
classifier = "schema"
description = "Builds -${classifier} archive containing all " +
"XSDs for deployment at http://springframework.org/schema."
subprojects.each { subproject ->
def Properties schemas = new Properties();
subproject.sourceSets.main.resources.find {
it.path.endsWith("META-INF/spring.schemas")
}?.withInputStream { schemas.load(it) }
for (def key : schemas.keySet()) {
def shortName = key.replaceAll(/http.*schema.(.*).spring-.*/, '$1')
assert shortName != key
File xsdFile = subproject.sourceSets.main.resources.find {
it.path.endsWith(schemas.get(key))
}
assert xsdFile != null
into (shortName) {
from xsdFile.path
}
}
}
}
task distZip(type: Zip, dependsOn: [docsZip, schemaZip]) {
group = "Distribution"
baseName = "spring-framework"
classifier = "dist"
description = "Builds -${classifier} archive, containing all jars and docs, " +
"suitable for community download page."
ext.baseDir = "${baseName}-${project.version}";
from("docs/src/info") {
include "readme.txt"
include "license.txt"
include "notice.txt"
into "${baseDir}"
expand(copyright: new Date().format("yyyy"), version: project.version)
}
from(zipTree(docsZip.archivePath)) {
into "${baseDir}/docs"
}
from(zipTree(schemaZip.archivePath)) {
into "${baseDir}/schema"
}
subprojects.each { subproject ->
into ("${baseDir}/libs") {
from subproject.jar
if (subproject.tasks.findByPath("sourcesJar")) {
from subproject.sourcesJar
}
if (subproject.tasks.findByPath("javadocJar")) {
from subproject.javadocJar
}
}
}
}
artifacts {
archives docsZip
archives schemaZip
archives distZip
}
}
project('spring-data-hadoop-core') {
description = 'Spring for Apache Hadoop Core'
}
project('spring-data-hadoop-batch') {
description = 'Spring for Apache Hadoop Batch Features'
dependencies {
compile project(":spring-data-hadoop")
compile "org.springframework.batch:spring-batch-core:$springBatchVersion"
testRuntime "org.springframework.integration:spring-integration-file:$springIntVersion"
}
}
project('spring-data-hadoop') {
description = 'Spring for Apache Hadoop Configuration'
dependencies {
compile project(":spring-data-hadoop-core")
}
}
project('spring-hadoop-test') {
description = 'Spring for Apache Hadoop Integration Tests'
dependencies {
compile project(":spring-data-hadoop-core")
compile project(":spring-data-hadoop-batch")
compile project(":spring-data-hadoop")
compile project(":spring-cascading")
// Testing
testCompile "junit:junit:$junitVersion"
testCompile("org.mockito:mockito-core:$mockitoVersion") { dep ->
exclude group: "org.hamcrest"
}
testCompile "org.springframework:spring-test:$springVersion"
testCompile("javax.annotation:jsr250-api:1.0")
testCompile "org.springframework.integration:spring-integration-stream:$springIntVersion"
testCompile "org.springframework.integration:spring-integration-file:$springIntVersion"
testRuntime "org.springframework.integration:spring-integration-event:$springIntVersion"
testRuntime "cglib:cglib:$cglibVersion"
testRuntime "commons-io:commons-io:$commonsioVersion"
testCompile "cascading:cascading-local:$cascadingVersion"
// Testing
testRuntime "org.codehaus.groovy:groovy:$groovyVersion"
testRuntime "org.jruby:jruby:$jrubyVersion"
testRuntime "org.python:jython-standalone:$jythonVersion"
// specify a version of antlr that works with both hive and pig (works only during compilation)
testRuntime "org.antlr:antlr-runtime:$antlrVersion"
}
task downloadGutenbergBooks {
ant.get(src: 'http://www.gutenberg.lib.md.us/1/0/100/100.txt',
dest: 'src/test/resources/input/gutenberg',skipexisting:true)
ant.get(src: 'http://www.gutenberg.lib.md.us/1/3/135/135.txt',
dest: 'src/test/resources/input/gutenberg',skipexisting:true)
ant.get(src: 'http://www.gutenberg.lib.md.us/1/3/9/1399/1399.txt',
dest: 'src/test/resources/input/gutenberg',skipexisting:true)
ant.get(src: 'http://www.gutenberg.lib.md.us/2/6/0/2600/2600.txt',
dest: 'src/test/resources/input/gutenberg',skipexisting:true)
}
task enablePigTests {
description = "Enabling Pig tests"
group = "Verification"
doLast() {
project.ext.skipPig = false
}
}
task enableHiveTests {
description = "Enabling Hive tests"
group = "Verification"
doLast() {
project.ext.skipHive = false
}
}
task enableHBaseTests {
description = "Enabling HBase tests"
group = "Verification"
doLast() {
project.ext.skipHBase = false
}
}
task enableWebHdfsTests {
description = "Enabling WebHdfs tests"
group = "Verification"
doLast() {
project.ext.skipWebHdfs = false
}
}
task enableCascadingTests {
description = "Enabling Cascading tests"
group = "Verification"
doLast() {
project.ext.skipCascading = false
}
}
task enableAllTests() {
description = "Enabling all (incl. Pig, Hive, HBase, WebHdfs, Cascading) tests"
group = "Verification"
doLast() {
project.ext.skipPig = false
project.ext.skipHive = false
project.ext.skipHBase = false
project.ext.skipWebHdfs = false
project.ext.skipCascading = false
}
}
test {
//forkEvery = 1
systemProperties['input.path'] = 'build/classes/test/input'
systemProperties['output.path'] = 'build/classes/test/output'
includes = ["**/*.class"]
testLogging {
events "started"
minGranularity 2
maxGranularity 2
}
doFirst() {
ext.msg = " "
if (project.ext.skipPig) {
ext.msg += "Pig "
excludes.add("**/pig/**")
}
if (project.ext.skipHBase) {
ext.msg += "HBase "
excludes.add("**/hbase/**")
}
if (project.ext.skipHive) {
ext.msg += "Hive "
excludes.add("**/hive/**")
}
if (project.ext.skipWebHdfs) {
ext.msg += "WebHdfs "
excludes.add("**/WebHdfs*")
}
if (project.ext.skipCascading) {
ext.msg += "Cascading "
excludes.add("**/cascading/**")
}
if (!msg.trim().isEmpty())
println "Skipping [$msg] Tests";
// check prefix for hd.fs
// first copy the properties since we can't change them
ext.projProps = project.properties
if (projProps.containsKey("hd.fs")) {
String hdfs = projProps["hd.fs"].toString()
if (!hdfs.contains("://")) {
projProps.put("hd.fs", "hdfs://" + hdfs)
}
}
// due to GRADLE-2475, set the system properties manually
projProps.each { k,v ->
if (k.toString().startsWith("hd.")) {
systemProperties[k] = projProps[k]
}
}
}
}
}
project('spring-cascading') {
description = 'Spring Cascading Support'
dependencies {
compile project(":spring-data-hadoop-batch")
compile "org.springframework.integration:spring-integration-core:$springIntVersion"
// cascading
compile("cascading:cascading-hadoop:$cascadingVersion") { dep ->
exclude module: "hadoop-core"
}
}
}
if (gradle.ext.mr2) {
configure(yarnProjects()) {
task integrationTest(type: Test) {
include '**/*IntegrationTests.*'
}
test {
exclude '**/*IntegrationTests.*'
}
dependencies {
testCompile "org.springframework:spring-test:$springVersion"
testCompile "org.hamcrest:hamcrest-core:$hamcrestVersion"
testCompile "org.hamcrest:hamcrest-library:$hamcrestVersion"
testCompile "junit:junit:$junitVersion"
}
}
project('spring-yarn') {
description = 'Spring for Apache Hadoop YARN'
dependencies {
compile project("spring-yarn-batch")
}
}
project('spring-yarn:spring-yarn-core') {
description = 'Spring Yarn Core'
dependencies {
compile "org.apache.hadoop:hadoop-yarn-client:$hadoopVersion"
compile("org.apache.hadoop:hadoop-common:$hadoopVersion") { dep ->
exclude module: "junit"
}
}
}
project('spring-yarn:spring-yarn-integration') {
description = 'Spring Yarn Integration'
dependencies {
compile project(":spring-yarn:spring-yarn-core")
compile "org.springframework.integration:spring-integration-ip:$springIntVersion"
compile "com.fasterxml.jackson.core:jackson-core:$jackson2Version"
compile "com.fasterxml.jackson.core:jackson-databind:$jackson2Version"
testCompile "org.springframework.integration:spring-integration-test:$springIntVersion"
}
}
project('spring-yarn:spring-yarn-batch') {
description = 'Spring Yarn Batch'
dependencies {
compile project(":spring-yarn:spring-yarn-integration")
compile "org.springframework.batch:spring-batch-core:$springBatchVersion"
compile "org.springframework.batch:spring-batch-infrastructure:$springBatchVersion"
testCompile project(":spring-data-hadoop-core")
}
}
project('spring-yarn:spring-yarn-test') {
description = 'Spring Yarn Test'
dependencies {
compile project(":spring-yarn:spring-yarn-core")
compile "org.apache.hadoop:hadoop-yarn-client:$hadoopVersion"
compile("org.apache.hadoop:hadoop-common:$hadoopVersion") { dep ->
exclude module: "junit"
}
compile "org.apache.hadoop:hadoop-yarn-server-tests:$hadoopVersion"
}
}
}
task wrapper(type: Wrapper) {
description = "Generates gradlew[.bat] scripts"
gradleVersion = "1.6"
}