E533 sqoop/build.xml at trunk · apache/sqoop · GitHub
[go: up one dir, main page]

Skip to content
This repository was archived by the owner on Jul 9, 2021. It is now read-only.

Latest commit

 

History

History
1463 lines (1257 loc) · 58.8 KB

File metadata and controls

1463 lines (1257 loc) · 58.8 KB
<?xml version="1.0"?>
<!--
Copyright 2011 The Apache Software Foundation
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project name="sqoop" default="jar-all"
xmlns:artifact="urn:maven-artifact-ant"
xmlns:ivy="antlib:org.apache.ivy.ant">
<!-- load ant-contrib tasks to get the "if" task. -->
<taskdef resource="net/sf/antcontrib/antcontrib.properties">
<classpath>
<pathelement location="${basedir}/lib/ant-contrib-1.0b3.jar"/>
</classpath>
</taskdef>
<!-- Load system-wide and project-wide default properties set by
the user, to avoid needing to override with -D. -->
<property file="${user.home}/build.properties" />
<property file="${basedir}/build.properties" />
<!-- some basic properties -->
<property environment="env"/>
<property name="name" value="sqoop" />
<property name="Name" value="Sqoop" />
<property name="version" value="1.5.0-SNAPSHOT" />
<!-- Ivy library properties -->
<property name="ivy.dir" location="${basedir}/ivy" />
<loadproperties srcfile="${ivy.dir}/libraries.properties"/>
<!-- The last version released. -->
<property name="oldversion" value="1.4.7" />
<!-- The point when we branched for the previous release. -->
<property name="prev.git.hash"
value="b0f391e75154be86f95378ab141f6dd1b3b59475" />
<property name="artifact.name" value="${name}-${version}" />
<property name="dest.jar" value="${artifact.name}.jar" />
<property name="test.jar" value="${name}-test-${version}.jar" />
<property name="git.hash" value="" />
<!-- programs used -->
<property name="python" value="python" />
<!-- locations in the source tree -->
<property name="base.src.dir" location="${basedir}/src" />
<property name="src.dir" location="${base.src.dir}/java" />
<property name="test.dir" location="${base.src.dir}/test" />
<property name="perftest.src.dir" location="${base.src.dir}/perftest" />
<property name="lib.dir" location="${basedir}/lib" />
<property name="docs.src.dir" location="${base.src.dir}/docs" />
<property name="script.src.dir" location="${base.src.dir}/scripts" />
<!-- base directory for all build/test process output -->
<property name="build.dir" location="${basedir}/build" />
<!-- generated bin scripts -->
<property name="build.bin.dir" location="${build.dir}/bin" />
<!-- generated source code -->
<property name="build.src.dir" location="${build.dir}/src" />
<!-- staging area for *-sources.jar files -->
<property name="build.srcjar.dir" location="${build.dir}/srcjars" />
<!-- compiled classes for the main sqoop artifact. -->
<property name="build.classes" location="${build.dir}/classes"/>
<!-- root directory for output/intermediate data for testing -->
<property name="build.test" location="${build.dir}/test"/>
<property name="test.log.dir" location="${build.dir}/test/logs"/>
<property name="test.build.extraconf" value="${build.test}/extraconf" />
<!-- compiled test classes -->
<property name="build.test.classes" location="${build.test}/classes" />
<!-- compiled "perftest" programs -->
<property name="build.perftest" location="${build.dir}/perftest"/>
<property name="build.perftest.classes" location="${build.perftest}/classes"/>
<!-- generated documentation output directory -->
<property name="build.javadoc" location="${build.dir}/docs/api" />
<!-- Target dir for release notes file. -->
<property name="build.relnotes.dir" location="${build.dir}/docs" />
<property name="relnotes.filename"
location="${build.relnotes.dir}/sqoop-${version}.releasenotes.html" />
<property name="bin.artifact.name" value="${artifact.name}.bin__hadoop-${hadoop.version}" />
<property name="dist.dir" location="${build.dir}/${bin.artifact.name}" />
<property name="tar.file" location="${build.dir}/${bin.artifact.name}.tar.gz" />
<property name="build.docs.timestamp"
location="${build.dir}/docs.timestamp" />
<property name="src.artifact.name" value="${artifact.name}" />
<property name="src.dist.dir" location="${build.dir}/${src.artifact.name}" />
<property name="src.tar.file" location="${build.dir}/${src.artifact.name}.tar.gz" />
<!-- compilation -->
<property name="targetJavaVersion" value="1.8" />
<property name="sourceJavaVersion" value="1.8" />
<property name="javac.deprecation" value="off"/>
<property name="javac.debug" value="on"/>
<property name="build.encoding" value="ISO-8859-1"/>
<!-- testing with JUnit -->
<property name="test.junit.output.format" value="plain"/>
<property name="test.output" value="no"/>
<property name="test.timeout" value="1200000"/>
<!-- static analysis -->
<property name="findbugs.out.dir" value="${build.dir}/findbugs" />
<property name="findbugs.output.xml.file"
value="${findbugs.out.dir}/report.xml" />
<property name="findbugs.output.html.file"
value="${findbugs.out.dir}/report.html" />
<!-- maven -->
<property name="mvn.build.dir" value="${build.dir}/m2" />
<property name="mvn.repo" value="snapshots" />
<property name="mvn.repo.id" value="cloudera.${mvn.repo}.repo" />
<property name="mvn.deploy.url"
value="https://repository.cloudera.com/content/repositories/${mvn.repo}"/>
<property name="sqoop.pom" value="${mvn.build.dir}/sqoop.pom" />
<property name="sqooptest.pom" value="${mvn.build.dir}/sqoop-test.pom" />
<!-- code coverage -->
<property name="cobertura.dir" value="${build.dir}/cobertura" />
<property name="cobertura.home" value="${cobertura.dir}" />
<property name="cobertura.report.dir" value="${cobertura.dir}/report" />
<property name="cobertura.format" value="html" /> <!-- may be 'xml' -->
<property name="cobertura.class.dir" value="${cobertura.dir}/classes" />
<!-- aspectJ fault injection -->
<import file="${test.dir}/aop/build/aop.xml"/>
<!-- Checking code style -->
<property name="checkstyle.xml" value="${test.dir}/checkstyle.xml" />
<property name="checkstyle.format.xsl"
value="${test.dir}/checkstyle-noframes.xsl" />
<property name="checkstyle.report.dir" value="${build.dir}" />
<!-- Release audit -->
<property name="rat.reporting.classname" value="rat.Report"/>
<!-- When testing with non-free JDBC drivers, override this parameter
to contain the path to the driver library dir.
-->
<property name="sqoop.thirdparty.lib.dir" value="" />
<!-- Ivy-based dependency resolution -->
<property name="ivysettings.xml" location="${ivy.dir}/ivysettings.xml"/>
<property name="ivy.jar" location="${lib.dir}/ivy-${ivy.version}.jar"/>
<property name="ivy_repo_url"
value="https://repo1.maven.org/maven2/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar" />
<property name="mvn_repo_url"
value="https://repo1.maven.org/maven2/org/apache/maven/maven-ant-tasks/${mvn.version}/maven-ant-tasks-${mvn.version}.jar"/>
<property name="mvn.jar"
location="${build.dir}/maven-ant-tasks-${mvn.version}.jar" />
<property name="build.ivy.dir" location="${build.dir}/ivy" />
<property name="build.ivy.lib.dir" location="${build.ivy.dir}/lib" />
<property name="build.ivy.report.dir" location="${build.ivy.dir}/report" />
<property name="redist.ivy.lib.dir"
location="${build.ivy.lib.dir}/${name 8B92 }/redist"/>
<!--this is the naming policy for artifacts we want pulled down-->
<property name="ivy.artifact.retrieve.pattern"
value="${name}/[conf]/[artifact]-[revision](-[classifier]).[ext]"/>
<!--test related properties -->
<property name="sqoop.test.oracle.connectstring" value="jdbc:oracle:thin:@//localhost:1521/xe"/>
<property name="sqoop.test.oracle.username" value="SYSTEM"/>
<property name="sqoop.test.oracle.password" value="oracle"/>
<property name="sqoop.test.oracle-ee.connectstring" value="jdbc:oracle:thin:@//localhost:1522/sqoop"/>
<property name="sqoop.test.oracle-ee.username" value="SYSTEM"/>
<property name="sqoop.test.oracle-ee.password" value="Sqoop12345"/>
<property name="sqoop.test.mysql.connectstring.host_url"
value="jdbc:mysql://127.0.0.1:13306/"/>
<property name="sqoop.test.mysql.username"
value="sqoop"/>
<property name="sqoop.test.mysql.password" value="Sqoop12345" />
<property name="sqoop.test.mysql.databasename"
value="sqoop"/>
<property name="sqoop.test.cubrid.connectstring.host_url"
value="jdbc:cubrid:localhost:33000"/>
<property name="sqoop.test.cubrid.connectstring.database" value="sqoop" />
<property name="sqoop.test.cubrid.connectstring.username" value="sqoop" />
<property name="sqoop.test.cubrid.connectstring.password" value="Sqoop12345" />
<property name="sqoop.test.postgresql.connectstring.host_url"
value="jdbc:postgresql://localhost:15432/"/>
<property name="sqoop.test.postgresql.database"
value="sqoop" />
<property name="sqoop.test.postgresql.tablespace"
value="sqoop" />
<property name="sqoop.test.postgresql.username"
value="sqoop" />
<property name="sqoop.test.postgresql.password"
value="Sqoop12345" />
<property name="sqoop.test.postgresql.pg_bulkload"
value="pg_bulkload" />
<property name="sqoop.test.sqlserver.connectstring.host_url"
value="jdbc:sqlserver://localhost:1433"/>
<property name="sqoop.test.sqlserver.database"
value="master"/>
<property name="ms.sqlserver.username"
value="sa"/>
<property name="ms.sqlserver.password"
value="Sqoop12345"/>
<property name="sqoop.test.db2.connectstring.host_url" value="jdbc:db2://localhost:50000" />
<property name="sqoop.test.db2.connectstring.database" value="SQOOP" />
<property name="sqoop.test.db2.connectstring.username" value="DB2INST1" />
<property name="sqoop.test.db2.connectstring.password" value="Sqoop12345" />
<property name="sqoop.test.netezza.host" value="nz-host" />
<property name="sqoop.test.netezza.port" value="5480" />
<property name="sqoop.test.netezza.username" value="ADMIN" />
<property name="sqoop.test.netezza.password" value="password" />
<property name="sqoop.test.netezza.db.name" value="SQOOP" />
<property name="sqoop.test.netezza.table.name" value="EMPNZ" />
<property name="sqoop.test.mainframe.ftp.host" value="localhost" />
<property name="sqoop.test.mainframe.ftp.port" value="2121" />
<property name="sqoop.test.mainframe.ftp.username" value="test" />
<property name="sqoop.test.mainframe.ftp.password" value="test" />
<property name="sqoop.test.mainframe.ftp.dataset.gdg" value="TSODIQ1.GDGTEXT" />
<property name="sqoop.test.mainframe.ftp.dataset.gdg.filename" value="G0001V43" />
<property name="sqoop.test.mainframe.ftp.dataset.gdg.md5" value="f0d0d171fdb8a03dbc1266ed179d7093" />
<property name="sqoop.test.mainframe.ftp.binary.dataset.gdg" value="TSODIQ1.FOLDER" />
<property name="sqoop.test.mainframe.ftp.binary.dataset.gdg.filename" value="G0002V45" />
<property name="sqoop.test.mainframe.ftp.binary.dataset.gdg.md5" value="43eefbe34e466dd3f65a3e867a60809a" />
<property name="sqoop.test.mainframe.ftp.dataset.seq" value="TSODIQ1.GDGTEXT.G0001V43" />
<property name="sqoop.test.mainframe.ftp.dataset.seq.filename" value="G0001V43" />
<property name="sqoop.test.mainframe.ftp.dataset.seq.md5" value="f0d0d171fdb8a03dbc1266ed179d7093" />
<property name="sqoop.test.mainframe.ftp.binary.dataset.seq" value="TSODIQ1.FOLDER.FOLDERTXT" />
<property name="sqoop.test.mainframe.ftp.binary.dataset.seq.filename" value="FOLDERTXT" />
<property name="sqoop.test.mainframe.ftp.binary.dataset.seq.md5" value="1591c0fcc718fda7e9c1f3561d232b2b" />
<property name="sqoop.test.mainframe.ftp.binary.dataset.mixed" value="TSODIQ1.MIXED" />
<property name="sqoop.test.mainframe.ftp.binary.dataset.mixed.filename" value="G0039V00" />
<property name="sqoop.test.mainframe.ftp.binary.dataset.mixed.md5" value="5e7f4ec7cbeae8e0e0b4d88346eb9349" />
<property name="s3.bucket.url" value="" />
<property name="s3.generator.command" value="" />
<condition property="windows">
<os family="windows" />
</condition>
<condition property="skip-real-docs">
<or>
<isset property="docs.uptodate" />
<os family="windows" />
</or>
</condition>
<if>
<isset property="sqoop.test.msserver.connector.factory"/>
<then>
<echo message="Use ${sqoop.test.msserver.connector.factory}"/>
</then>
<else>
<echo message="Use built-in SQL server connector by default"/>
<property name="sqoop.test.msserver.connector.factory"
value="org.apache.sqoop.manager.DefaultManagerFactory"/>
</else>
</if>
<!-- The classpath for compiling and running Sqoop -->
<if>
<isset property="hadoop.home" />
<then>
<path id="compile.classpath">
<pathelement location="${build.classes}"/>
<path refid="lib.path"/>
<fileset dir="${hadoop.home}">
<include name="hadoop-core-*.jar" />
<include name="hadoop-*-core.jar" />
<include name="hadoop-common-*.jar" />
<include name="hadoop-mapred-*.jar" />
<include name="hadoop-hdfs-*.jar" />
</fileset>
<fileset dir="${hadoop.home}/lib">
<include name="*.jar" />
</fileset>
<path refid="${name}.hadoop.classpath"/>
</path>
</then>
<else>
<path id="compile.classpath">
<pathelement location="${build.classes}"/>
<path refid="lib.path"/>
<path refid="${name}.hadoop.classpath"/>
</path>
</else>
</if>
<path id="cobertura.classpath">
<fileset dir="${cobertura.home}">
<include name="**/*.jar" />
</fileset>
</path>
<!-- "init" target used for setup purposes. -->
<target name="init">
<!-- Path containing third-party libraries deployed directly with Sqoop.
This does not include anything that Ivy can retrieve for us.
-->
<path id="lib.path">
<fileset dir="${lib.dir}">
<include name="*.jar" />
</fileset>
</path>
<!-- Classpath for unit tests (superset of compile.classpath) -->
<path id="test.classpath">
<pathelement location="${build.test.classes}" />
<pathelement location="${test.build.extraconf}"/>
<path refid="${name}.hadooptest.classpath" />
<path refid="compile.classpath" />
</path>
</target>
<!-- generate the version information class. -->
<target name="gen-version" depends="init">
<if>
<equals arg1="${windows}" arg2="true" />
<then>
<exec executable="${script.src.dir}/write-version-info.cmd"
dir="${basedir}" failonerror="true">
<arg value="${build.dir}" />
<arg value="${version}" />
<arg value="${git.hash}" />
</exec>
</then>
<else>
<exec executable="${script.src.dir}/write-version-info.sh"
dir="${basedir}" failonerror="true">
<arg value="${build.dir}" />
<arg value="${version}" />
<arg value="${git.hash}" />
</exec>
</else>
</if>
</target>
<!-- Compile core classes for the project -->
<target name="compile"
depends="init, gen-version, ivy-retrieve-hadoop"
description="Compile core classes for the project">
<!-- don't use an out-of-date instrumented build. -->
<delete dir="${cobertura.class.dir}" />
<!-- ensure normal build target dir exists -->
<mkdir dir="${build.classes}" />
<!-- Compile generated code first. -->
<javac
encoding="${build.encoding}"
srcdir="${build.src.dir}"
includes="**/*.java"
destdir="${build.classes}"
debug="${javac.debug}"
source="${sourceJavaVersion}"
target="${targetJavaVersion}"
deprecation="${javac.deprecation}">
<classpath refid="compile.classpath"/>
</javac>
<!-- Compile the main code. -->
<javac
encoding="${build.encoding}"
srcdir="${src.dir}"
includes="**/*.java"
destdir="${build.classes}"
debug="${javac.debug}"
source="${sourceJavaVersion}"
target="${targetJavaVersion}"
deprecation="${javac.deprecation}">
<classpath refid="compile.classpath"/>
</javac>
</target>
<target name="compile-test"
depends="compile, ivy-retrieve-hadoop-test"
description="Compile test classes">
<mkdir dir="${build.test.classes}" />
<mkdir dir="${test.build.extraconf}"/>
<javac
encoding="${build.encoding}"
srcdir="${test.dir}"
includes="**/*.java"
destdir="${build.test.classes}"
source="${sourceJavaVersion}"
target="${targetJavaVersion}"
debug="${javac.debug}">
<classpath>
<path refid="test.classpath"/>
</classpath>
</javac>
</target>
<target name="compile-perf-test"
depends="compile, ivy-retrieve-hadoop-test"
description="Compile manual performance tests">
<mkdir dir="${build.perftest.classes}" />
<javac
encoding="${build.encoding}"
srcdir="${perftest.src.dir}"
includes="**/*.java"
destdir="${build.perftest.classes}"
source="${sourceJavaVersion}"
target="${targetJavaVersion}"
debug="${javac.debug}">
<classpath>
<path refid="test.classpath"/>
</classpath>
</javac>
</target>
<target name="jar" depends="compile" description="Create main jar">
<jar jarfile="${build.dir}/${dest.jar}" basedir="${build.classes}" />
</target>
<target name="jar-test" depends="compile-test" description="Create test jar">
<jar jarfile="${build.dir}/${test.jar}" basedir="${build.test.classes}" />
</target>
<!-- Ensure that all source code can be built -->
<target name="compile-all"
depends="compile,compile-test,compile-perf-test"
description="Compile all sources"/>
<!-- Create all jars. Note this does not include the perftests. -->
<target name="jar-all" depends="jar,jar-test"
description="Create all jar artifacts" />
<target name="scripts" depends="jar"
description="Create tool-specific wrapper scripts">
<!-- Take the list of available tools from 'sqoop help' and generate
the wrapper scripts to invoke each of these.
-->
<mkdir dir="${build.bin.dir}" />
<if>
<equals arg1="${windows}" arg2="true" />
<then>
<java classname="org.apache.sqoop.Sqoop"
fork="true"
failonerror="true"
output="${build.dir}/tools-list"
error="NUL">
<jvmarg value="-Dhadoop.security.log.file=./build/security-audit.log" />
<arg value="help" />
<classpath refid="compile.classpath"/>
</java>
<exec executable="${script.src.dir}/create-tool-scripts.cmd"
dir="${basedir}" failonerror="true">
<arg value="${build.bin.dir}" />
<arg value="${script.src.dir}/tool-script.cmd.template" />
<arg value="${build.dir}/tools-list" />
</exec>
</then>
<else>
<java classname="org.apache.sqoop.Sqoop"
fork="true"
failonerror="true"
output="${build.dir}/tools-list"
error="/dev/null">
<jvmarg value="-Dhadoop.security.log.file=./build/security-audit.log" />
<arg value="help" />
<classpath refid="compile.classpath"/>
</java>
<exec executable="${script.src.dir}/create-tool-scripts.sh"
dir="${basedir}" failonerror="true">
<arg value="${build.bin.dir}" />
<arg value="${script.src.dir}/tool-script.sh.template" />
<arg value="${build.dir}/tools-list" />
</exec>
</else>
</if>
</target>
<target name="package"
depends="jar-all,compile-all,docs,ivy-retrieve-redist,scripts"
description="Create a redistributable package">
<mkdir dir="${dist.dir}"/>
<!-- copy in the build artifact -->
<copy todir="${dist.dir}" includeEmptyDirs="false" flatten="true">
<fileset dir="${build.dir}">
<include name="${dest.jar}" />
</fileset>
</copy>
<!-- copy the test artifact -->
<copy todir="${dist.dir}" includeEmptyDirs="false" flatten="true">
<fileset dir="${build.dir}">
<include name="${test.jar}" />
</fileset>
</copy>
<!-- copy in various components of the initial source layout
so that the redistributable can bootstrap itself. -->
<copy todir="${dist.dir}" includeEmptyDirs="false" flatten="false">
<fileset dir="${basedir}" defaultexcludes="no">
<include name="**/*" />
<exclude name="build/**" />
<exclude name="lib/**" />
<exclude name=".git/**" />
<exclude name="tags" />
<exclude name=".project" />
<exclude name=".classpath" />
<exclude name="conf/managers.d/**" />
<exclude name="conf/tools.d/**" />
</fileset>
</copy>
<!-- copy the dependency libraries from ivy into the output lib dir -->
<mkdir dir="${dist.dir}/lib"/>
<copy todir="${dist.dir}/lib" includeEmptyDirs="false" flatten="true">
<fileset dir="${redist.ivy.lib.dir}">
<include name="**/*.jar" />
</fileset>
</copy>
<copy todir="${dist.dir}/lib" includeEmptyDirs="false">
<fileset dir="${lib.dir}">
<include name="**/*" />
<exclude name="ivy*" />
</fileset>
</copy>
<!-- copy in documentation build artifacts -->
<copy todir="${dist.dir}/docs" includeEmptyDirs="false" flatten="false">
<fileset dir="${build.dir}/docs">
<include name="**/*.html" />
<include name="**/*.css" />
<include name="images/**" />
</fileset>
</copy>
<copy todir="${dist.dir}/docs/man" includeEmptyDirs="false" flatten="false">
<fileset dir="${build.dir}/docs">
<include name="**/*.gz" />
</fileset>
</copy>
<!-- copy in auto-generated bin scripts -->
<copy todir="${dist.dir}/bin" includeEmptyDirs="false" flatten="true">
<fileset dir="${build.bin.dir}">
<include name="*" />
</fileset>
</copy>
<!-- make sure the bin scripts are executable. -->
<chmod perm="ugo+x" type="file" parallel="false">
<fileset dir="${dist.dir}/bin" />
<fileset dir="${dist.dir}/testdata/hive/bin" />
</chmod>
<!-- make sure any scripts named *.sh are executable. -->
<chmod perm="ugo+x" type="file" parallel="false">
<fileset dir="${dist.dir}">
<include name="**/*.sh" />
</fileset>
</chmod>
<!-- In the configuration directory, take the sqoop-site-template
and copy it to sqoop-site.xml, overwriting any user-specified
sqoop-site.xml in there.
-->
<copy file="${dist.dir}/conf/sqoop-site-template.xml"
tofile="${dist.dir}/conf/sqoop-site.xml"
overwrite="true" />
</target>
<target name="tar" depends="package" description="Create release tarball">
<tar compression="gzip" longfile="gnu" destfile="${tar.file}">
<tarfileset dir="${build.dir}" mode="664">
<exclude name="${bin.artifact.name}/bin/*" />
<exclude name="${bin.artifact.name}/testdata/hive/bin/*" />
<exclude name="${bin.artifact.name}/**/*.sh" />
<include name="${bin.artifact.name}/**" />
</tarfileset>
<tarfileset dir="${build.dir}" mode="755">
<include name="${bin.artifact.name}/bin/*" />
<include name="${bin.artifact.name}/testdata/hive/bin/*" />
<include name="${bin.artifact.name}/testdata/hcatalog/conf/*" />
<include name="${bin.artifact.name}/**/*.sh" />
</tarfileset>
</tar>
</target>
<target name="srcpackage" description="Create a redistributable source package">
<mkdir dir="${src.dist.dir}"/>
<!-- copy in various components of the initial source layout
so that the redistributable can bootstrap itself. -->
<copy todir="${src.dist.dir}" includeEmptyDirs="false" flatten="false">
<fileset dir="${basedir}" defaultexcludes="no">
<include name="**/*" />
<exclude name="build/**" />
<exclude name=".git/**" />
<exclude name="tags" />
<exclude name=".project" />
<exclude name=".classpath" />
<exclude name="conf/managers.d/**" />
<exclude name="conf/tools.d/**" />
</fileset>
</copy>
<!-- make sure the bin scripts are executable. -->
<chmod perm="ugo+x" type="file" parallel="false">
<fileset dir="${src.dist.dir}/bin" />
<fileset dir="${src.dist.dir}/testdata/hive/bin" />
</chmod>
<!-- make sure any scripts named *.sh are executable. -->
<chmod perm="ugo+x" type="file" parallel="false">
<fileset dir="${src.dist.dir}">
<include name="**/*.sh" />
</fileset>
</chmod>
</target>
<target name="srctar" depends="srcpackage" description="Create release source tarball">
<tar compression="gzip" longfile="gnu" destfile="${src.tar.file}">
<tarfileset dir="${build.dir}" mode="664">
<exclude name="${src.artifact.name}/bin/*" />
<exclude name="${src.artifact.name}/testdata/hive/bin/*" />
<exclude name="${src.artifact.name}/testdata/hcatalog/conf/*" />
<exclude name="${src.artifact.name}/**/*.sh" />
<include name="${src.artifact.name}/**" />
</tarfileset>
<tarfileset dir="${build.dir}" mode="755">
<include name="${src.artifact.name}/bin/*" />
<include name="${src.artifact.name}/testdata/hive/bin/*" />
<include name="${src.artifact.name}/testdata/hcatalog/conf/*" />
<include name="${src.artifact.name}/**/*.sh" />
</tarfileset>
</tar>
</target>
<!-- set variables that configure the actual test -->
<target name="test-prep" depends="test-prep-normal,test-prep-thirdparty,
test-prep-manual"/>
<path id="hcatalog.conf.dir">
<pathelement location="${basedir}/testdata/hcatalog/conf"/>
</path>
<target name="test-eval-condition">
<condition property="thirdparty_or_manual">
<or>
<isset property="thirdparty"/>
<isset property="manual"/>
8B92
</or>
</condition>
</target>
<target name="test-prep-normal" unless="thirdparty_or_manual"
depends="test-eval-condition">
<!-- Set this to run all the "standard" tests -->
<property name="test.pattern" value="Test*" />
<property name="cobertura.testset" value="base" />
</target>
<target name="test-prep-thirdparty" if="thirdparty">
<!-- Run tests that *end* with the name Test, instead of starting with it;
this runs non-standard tests e.g. third-party database tests. -->
<property name="test.pattern" value="*Test" />
<property name="test.exclude" value="*ManualTest" />
<property name="cobertura.testset" value="thirdparty" />
</target>
<target name="test-prep-manual" if="manual">
<!-- Run tests that are marked for manualtest execution -->
<property name="test.pattern" value="*ManualTest" />
<property name="cobertura.testset" value="manual" />
</target>
<!-- ================================================================== -->
<!-- Run unit tests -->
<!-- By default, we'll run the "normal" tests: Test*.java -->
<!-- To run third-party tests, run with -Dthirdparty=true -->
<!-- ================================================================== -->
<target name="test"
depends="compile-test,compile,test-prep,run-tests"
description="Run unit tests" />
<!-- actually run the selected unit tests -->
<target name="run-tests"
depends="compile-test,compile,test-prep">
<antcall target="checkfailure" inheritRefs="true" />
</target>
<property name="remoteDebug" value="false"/>
<target name="test-core">
<!-- inner target only intended to be used via antcall.
Does not define its dependencies. Should be invoked through the
'test' target. Does not fail the build if tests fail.
-->
<!-- use ant test -DremoteDebug=true to enable remote debugging -->
<condition property="remoteDebugJvmArgs"
value="-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=2601"
else="-ea">
<istrue value="${remoteDebug}"/>
</condition>
<delete dir="${test.log.dir}"/>
<mkdir dir="${test.log.dir}"/>
<delete dir="${build.test}/data"/>
<mkdir dir="${build.test}/data/sqoop" />
<mkdir dir="${cobertura.class.dir}" />
<copy file="${test.dir}/fi-site.xml"
todir="${test.build.extraconf}" />
<copy file="${basedir}/conf/oraoop-site-template.xml"
todir="${test.build.extraconf}" />
<copy todir="${test.build.extraconf}/oraoop">
<fileset dir="${test.dir}/oraoop"/>
</copy>
<copy file="${basedir}/conf/password-file.txt"
todir="${test.build.extraconf}" />
<copy file="${basedir}/conf/wrong-password-file.txt"
todir="${test.build.extraconf}" />
<junit
printsummary="yes" showoutput="${test.output}"
haltonfailure="no" fork="yes" maxmemory="5120m"
errorProperty="tests.failed" failureProperty="tests.failed"
timeout="${test.timeout}"
dir="${build.test}/data">
<!-- enable asserts in tests -->
<jvmarg value="-ea" />
<!-- We need to disable asserts in HadoopThriftAuthBridge to be able to run HiveMiniCluster tests. -->
<jvmarg value="-da:org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge" />
<jvmarg value="${remoteDebugJvmArgs}"/>
<sysproperty key="test.build.data" value="${build.test}/data"/>
<sysproperty key="build.test" value="${build.test}"/>
<!-- microsoft sqlserver thirdparty test related properties-->
<sysproperty key="test.data.dir" value="${basedir}/testdata"/>
<sysproperty key="ms.datatype.test.data.file.export" value="DatatypeTestData-export-lite.txt"/>
<sysproperty key="ms.datatype.test.data.file.import" value="DatatypeTestData-import-lite.txt"/>
<sysproperty key="ms.datatype.test.data.file.delim" value=","/>
<sysproperty key="ms.datatype.test.hdfsprefix" value="file:///"/>
<sysproperty key="ms.sqlserver.username" value="${ms.sqlserver.username}"/>
<sysproperty key="ms.sqlserver.password" value="${ms.sqlserver.password}"/>
<sysproperty key="net.sourceforge.cobertura.datafile"
value="${cobertura.dir}/cobertura-${cobertura.testset}.ser" />
<!-- define this property to force Sqoop to throw better exceptions on
errors during testing, instead of printing a short message and
exiting with status 1.
-->
<sysproperty key="sqoop.throwOnError" value="" />
<!-- we want more log4j output when running unit tests -->
<sysproperty key="hadoop.root.logger"
value="DEBUG,console" />
<!-- requires fork=yes for:
relative File paths to use the specified user.dir
classpath to use build/*.jar
-->
<sysproperty key="user.dir" value="${build.test}/data"/>
<!-- Setting the user.dir property is actually meaningless as it
is read-only in the Linux Sun JDK. Provide an alternate sysprop
to specify where generated code should go.
-->
<sysproperty key="sqoop.src.dir" value="${build.test}/data"/>
<!-- Override standalone Hadoop's working dirs to allow parallel
execution of multiple Hudson builders
-->
<sysproperty key="hadoop.tmp.dir" value="${build.test}/hadoop"/>
<!--
Set to an empty string below d 802E ue to MAPREDUCE-3736
<sysproperty key="fs.default.name" value="${fs.default.name}"/>
-->
<sysproperty key="fs.default.name" value=""/>
<sysproperty key="hadoop.test.localoutputfile"
value="${hadoop.test.localoutputfile}"/>
<sysproperty key="hadoop.log.dir" value="${test.log.dir}"/>
<!-- we have a mock "hive" shell instance in our testdata directory
for testing hive integration. Set this property here to ensure
that the unit tests pick it up.
-->
<sysproperty key="hive.home" value="${basedir}/testdata/hive" />
<!-- By default the Oracle tests assume an Oracle XE installation
with a hardcoded connection string. If you want to overwrite
that specify the value at command line or via
build.properties file.
-->
<sysproperty key="sqoop.test.oracle.connectstring"
value="${sqoop.test.oracle.connectstring}"/>
<sysproperty key="sqoop.test.oracle.username"
value="${sqoop.test.oracle.username}"/>
<sysproperty key="sqoop.test.oracle.password"
value="${sqoop.test.oracle.password}"/>
<sysproperty key="sqoop.test.oracle-ee.connectstring"
value="${sqoop.test.oracle-ee.connectstring}"/>
<sysproperty key="sqoop.test.oracle-ee.username"
value="${sqoop.test.oracle-ee.username}"/>
<sysproperty key="sqoop.test.oracle-ee.password"
value="${sqoop.test.oracle-ee.password}"/>
<sysproperty key="sqoop.test.mysql.connectstring.host_url"
value="${sqoop.test.mysql.connectstring.host_url}"/>
<sysproperty key="sqoop.test.mysql.username"
value="${sqoop.test.mysql.username}"/>
<sysproperty key="sqoop.test.mysql.password"
value="${sqoop.test.mysql.password}"/>
<sysproperty key="sqoop.test.mysql.databasename"
value="${sqoop.test.mysql.databasename}"/>
<sysproperty key="sqoop.test.cubrid.connectstring.host_url"
value="${sqoop.test.cubrid.connectstring.host_url}"/>
<sysproperty key="sqoop.test.cubrid.connectstring.database"
value="${sqoop.test.cubrid.connectstring.database}"/>
<sysproperty key="sqoop.test.cubrid.connectstring.username"
value="${sqoop.test.cubrid.connectstring.username}"/>
<sysproperty key="sqoop.test.cubrid.connectstring.password"
value="${sqoop.test.cubrid.connectstring.password}"/>
<sysproperty key="sqoop.test.postgresql.connectstring.host_url"
value="${sqoop.test.postgresql.connectstring.host_url}"/>
<sysproperty key="sqoop.test.postgresql.database"
value="${sqoop.test.postgresql.database}" />
<sysproperty key="sqoop.test.postgresql.tablespace"
value="${sqoop.test.postgresql.tablespace}" />
<sysproperty key="sqoop.test.postgresql.username"
value="${sqoop.test.postgresql.username}" />
<sysproperty key="sqoop.test.postgresql.password"
value="${sqoop.test.postgresql.password}" />
<sysproperty key="sqoop.test.postgresql.pg_bulkload"
value="${sqoop.test.postgresql.pg_bulkload}" />
<sysproperty key="sqoop.test.sqlserver.connectstring.host_url"
value="${sqoop.test.sqlserver.connectstring.host_url}"/>
<sysproperty key="sqoop.test.sqlserver.database"
value="${sqoop.test.sqlserver.database}"/>
<sysproperty key="sqoop.test.msserver.connector.factory"
value="${sqoop.test.msserver.connector.factory}"/>
<sysproperty key="sqoop.test.db2.connectstring.host_url" value="${sqoop.test.db2.connectstring.host_url}" />
<sysproperty key="sqoop.test.db2.connectstring.database" value="${sqoop.test.db2.connectstring.database}" />
<sysproperty key="sqoop.test.db2.connectstring.username" value="${sqoop.test.db2.connectstring.username}" />
<sysproperty key="sqoop.test.db2.connectstring.password" value="${sqoop.test.db2.connectstring.password}" />
<sysproperty key="sqoop.test.netezza.host" value="${sqoop.test.netezza.host}" />
<sysproperty key="sqoop.test.netezza.port" value="${sqoop.test.netezza.port}" />
<sysproperty key="sqoop.test.netezza.username" value="${sqoop.test.netezza.username}" />
<sysproperty key="sqoop.test.netezza.password" value="${sqoop.test.netezza.password}" />
<sysproperty key="sqoop.test.netezza.db.name" value="${sqoop.test.netezza.db.name}" />
<sysproperty key="sqoop.test.netezza.table.name" value="${sqoop.test.netezza.table.name}" />
<sysproperty key="sqoop.test.mainframe.ftp.host" value="${sqoop.test.mainframe.ftp.host}" />
<sysproperty key="sqoop.test.mainframe.ftp.port" value="${sqoop.test.mainframe.ftp.port}" />
<sysproperty key="sqoop.test.mainframe.ftp.username" value="${sqoop.test.mainframe.ftp.username}" />
<sysproperty key="sqoop.test.mainframe.ftp.password" value="${sqoop.test.mainframe.ftp.password}" />
<sysproperty key="sqoop.test.mainframe.ftp.dataset.gdg" value="${sqoop.test.mainframe.ftp.dataset.gdg}" />
<sysproperty key="sqoop.test.mainframe.ftp.dataset.gdg.filename" value="${sqoop.test.mainframe.ftp.dataset.gdg.filename}" />
<sysproperty key="sqoop.test.mainframe.ftp.dataset.gdg.md5" value="${sqoop.test.mainframe.ftp.dataset.gdg.md5}" />
<sysproperty key="sqoop.test.mainframe.ftp.binary.dataset.gdg" value="${sqoop.test.mainframe.ftp.binary.dataset.gdg}" />
<sysproperty key="sqoop.test.mainframe.ftp.binary.dataset.gdg.filename" value="${sqoop.test.mainframe.ftp.binary.dataset.gdg.filename}" />
<sysproperty key="sqoop.test.mainframe.ftp.binary.dataset.gdg.md5" value="${sqoop.test.mainframe.ftp.binary.dataset.gdg.md5}" />
<sysproperty key="s3.bucket.url" value="${s3.bucket.url}" />
<sysproperty key="s3.generator.command" value="${s3.generator.command}" />
<sysproperty key="sqoop.test.mainframe. 7F4A ftp.dataset.seq" value="${sqoop.test.mainframe.ftp.dataset.seq}" />
<sysproperty key="sqoop.test.mainframe.ftp.dataset.seq.filename" value="${sqoop.test.mainframe.ftp.dataset.seq.filename}" />
<sysproperty key="sqoop.test.mainframe.ftp.dataset.seq.md5" value="${sqoop.test.mainframe.ftp.dataset.seq.md5}" />
<sysproperty key="sqoop.test.mainframe.ftp.binary.dataset.seq" value="${sqoop.test.mainframe.ftp.binary.dataset.seq}" />
<sysproperty key="sqoop.test.mainframe.ftp.binary.dataset.seq.filename" value="${sqoop.test.mainframe.ftp.binary.dataset.seq.filename}" />
<sysproperty key="sqoop.test.mainframe.ftp.binary.dataset.seq.md5" value="${sqoop.test.mainframe.ftp.binary.dataset.seq.md5}" />
<sysproperty key="sqoop.test.mainframe.ftp.binary.dataset.mixed" value="${sqoop.test.mainframe.ftp.binary.dataset.mixed}" />
<sysproperty key="sqoop.test.mainframe.ftp.binary.dataset.mixed.filename" value="${sqoop.test.mainframe.ftp.binary.dataset.mixed.filename}" />
<sysproperty key="sqoop.test.mainframe.ftp.binary.dataset.mixed.md5" value="${sqoop.test.mainframe.ftp.binary.dataset.mixed.md5}" />
<!-- Location of Hive logs -->
<!--<sysproperty key="hive.log.dir"
value="${test.build.data}/sqoop/logs"/> -->
<classpath>
<!-- instrumented classes go ahead of normal classes -->
<pathelement location="${cobertura.class.dir}" />
<!-- Location of hive-site xml and other hadoop config files -->
<path refid="hcatalog.conf.dir" />
<!-- main classpath here. -->
<path refid="test.classpath" />
<!-- need thirdparty JDBC drivers for thirdparty tests -->
<fileset dir="${sqoop.thirdparty.lib.dir}"
includes="*.jar" />
<!-- include cobertura itself on the classpath -->
<path refid="cobertura.classpath" />
</classpath>
<formatter type="${test.junit.output.format}" />
<batchtest todir="${build.test}" unless="testcase">
<fileset dir="${test.dir}"
includes="**/${test.pattern}.java"
excludes="**/${test.exclude}.java,**/KerberizedTest.java,**/IntegrationTest.java,**/SqoopTest.java,
**/UnitTest.java,**/CubridTest.java,**/Db2Test.java,**/MainFrameTest.java,**/MysqlTest.java,
**/NetezzaTest.java,**/OracleTest.java,**/PostgresqlTest.java,**/S3Test.java,**/SqlServerTest.java,
**/ThirdPartyTest.java,**/OracleEeTest.java"/>
</batchtest>
<batchtest todir="${build.test}" if="testcase">
<fileset dir="${test.dir}" includes="**/${testcase}.java"/>
</batchtest>
</junit>
</target>
<target name="releaseaudit" depends="package,ivy-retrieve-releaseaudit"
description="Audit license headers for release">
<fail unless="rat.present"
message="Failed to load class [${rat.reporting.classname}]." />
<java classname="${rat.reporting.classname}" fork="true"
output="${build.dir}/rat.log">
<classpath refid="${name}.releaseaudit.classpath" />
<arg value="${dist.dir}" />
</java>
<exec executable="${script.src.dir}/rat-violations.sh" failOnError="true">
<arg value="${build.dir}/rat.log" />
<arg value="${dist.dir}" />
</exec>
<echo message="Release audit appears okay. Full results are in " />
<echo message="${build.dir}/rat.log" />
</target>
<target name="docs-uptodate" depends="init">
<uptodate property="docs.uptodate">
<srcfiles dir="${basedir}/src/docs/">
<include name="**/*.txt" />
</srcfiles>
<mapper type="merge" to="${build.docs.timestamp}" />
</uptodate>
</target>
<target name="checkversion">
<if>
0