651cebd69ee7e75d06c75945e3607c4fab7eb11c
[sqoop.git] / src / java / org / apache / sqoop / SqoopOptions.java
1 /**
2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements. See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership. The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License. You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18
19 package org.apache.sqoop;
20
21 import java.io.File;
22 import java.io.IOException;
23 import java.io.UnsupportedEncodingException;
24 import java.lang.reflect.Field;
25 import java.net.URLDecoder;
26 import java.sql.Connection;
27 import java.util.ArrayList;
28 import java.util.Arrays;
29 import java.util.HashMap;
30 import java.util.Iterator;
31 import java.util.Map;
32 import java.util.Properties;
33
34 import org.apache.commons.logging.Log;
35 import org.apache.commons.logging.LogFactory;
36 import org.apache.hadoop.conf.Configuration;
37 import org.apache.sqoop.accumulo.AccumuloConstants;
38 import org.apache.sqoop.mapreduce.mainframe.MainframeConfiguration;
39 import org.apache.sqoop.tool.BaseSqoopTool;
40 import org.apache.sqoop.util.CredentialsUtil;
41 import org.apache.sqoop.util.LoggingUtils;
42 import org.apache.sqoop.util.SqoopJsonUtil;
43 import org.apache.sqoop.util.password.CredentialProviderHelper;
44 import org.apache.sqoop.validation.AbortOnFailureHandler;
45 import org.apache.sqoop.validation.AbsoluteValidationThreshold;
46 import org.apache.sqoop.validation.RowCountValidator;
47
48 import org.apache.sqoop.lib.DelimiterSet;
49 import org.apache.sqoop.lib.LargeObjectLoader;
50 import org.apache.sqoop.tool.SqoopTool;
51 import org.apache.sqoop.util.RandomHash;
52 import org.apache.sqoop.util.StoredAsProperty;
53
54 import static org.apache.sqoop.Sqoop.SQOOP_RETHROW_PROPERTY;
55 import static org.apache.sqoop.orm.ClassWriter.toJavaIdentifier;
56
57 /**
58 * Configurable state used by Sqoop tools.
59 */
60 public class SqoopOptions implements Cloneable {
61
62 public static final String ORACLE_ESCAPING_DISABLED = "sqoop.oracle.escaping.disabled";
63
64 private static final String OLD_SQOOP_TEST_IMPORT_ROOT_DIR = "sqoop.test.import.rootDir";
65
66 public static final Log LOG = LogFactory.getLog(SqoopOptions.class.getName());
67
68 /**
69 * Set to true in configuration if you want to put db passwords
70 * in the metastore.
71 */
72 public static final String METASTORE_PASSWORD_KEY =
73 "sqoop.metastore.client.record.password";
74
75 // Default hive and hcat locations.
76 public static final String DEF_HIVE_HOME = "/usr/lib/hive";
77 public static final String DEF_HCAT_HOME = "/usr/lib/hive-hcatalog";
78 public static final String DEF_HCAT_HOME_OLD = "/usr/lib/hcatalog";
79
80 public static final boolean METASTORE_PASSWORD_DEFAULT = false;
81 public static final String DB_PASSWORD_KEY = "db.password";
82
83 /** Selects in-HDFS destination file format. */
84 public enum FileLayout {
85 TextFile,
86 SequenceFile,
87 AvroDataFile,
88 ParquetFile
89 }
90
91 /**
92 * Incremental imports support two modes:
93 * <ul>
94 * <li>new rows being appended to the end of a table with an
95 * incrementing id</li>
96 * <li>new data results in a date-last-modified column being
97 * updated to NOW(); Sqoop will pull all dirty rows in the next
98 * incremental import.</li>
99 * </ul>
100 */
101 public enum IncrementalMode {
102 None,
103 AppendRows,
104 DateLastModified,
105 }
106
107 /**
108 * How to handle null values when doing incremental import into HBase table:
109 * <ul>
110 * <li>Ignore: ignore update, retain previous value</li>
111 * <li>Delete: delete all previous values of column</li>
112 * </ul>
113 */
114 public enum HBaseNullIncrementalMode {
115 Ignore,
116 Delete,
117 }
118
119 /**
120 * Update mode option specifies how updates are performed when
121 * new rows are found with non-matching keys in database.
122 * It supports two modes:
123 * <ul>
124 * <li>UpdateOnly: This is the default. New rows are silently ignored.</li>
125 * <li>AllowInsert: New rows are inserted into the database.</li>
126 * </ul>
127 */
128 public enum UpdateMode {
129 UpdateOnly,
130 AllowInsert
131 }
132
133 /**
134 * Thrown when invalid cmdline options are given.
135 */
136 @SuppressWarnings("serial")
137 public static class InvalidOptionsException extends Exception {
138
139 private String message;
140
141 public InvalidOptionsException(final String msg) {
142 this.message = msg;
143 }
144
145 public String getMessage() {
146 return message;
147 }
148
149 public String toString() {
150 return getMessage();
151 }
152 }
153
154 // SQOOP-2333 please do not remove this field as plugins may rely on it.
155 @StoredAsProperty("customtool.options.jsonmap")
156 private Map<String, String> customToolOptions;
157
158 // TODO(aaron): Adding something here? Add a setter and a getter. Add a
159 // default value in initDefaults() if you need one. If this value needs to
160 // be serialized in the metastore, it should be marked with
161 // @StoredAsProperty(), if it is an int, long, boolean, String, or Enum.
162 // Arrays and other "special" types should be added directly to the
163 // loadProperties() and writeProperties() methods. Then add command-line
164 // arguments in the appropriate tools. The names of all command-line args
165 // are stored as constants in BaseSqoopTool.
166
167 @StoredAsProperty("verbose") private boolean verbose;
168
169 @StoredAsProperty("temporary.dirRoot") private String tempRootDir;
170
171 // If this property is set, always throw an exception during a job, do not just
172 // exit with status 1.
173 @StoredAsProperty("sqoop.throwOnError") private boolean throwOnError;
174
175 @StoredAsProperty("mapreduce.job.name") private String mapreduceJobName;
176
177 @StoredAsProperty("db.connect.string") private String connectString;
178 @StoredAsProperty("db.table") private String tableName;
179 private String [] columns; // Array stored as db.column.list.
180 @StoredAsProperty("db.username") private String username;
181 @StoredAsProperty("db.export.staging.table") private String stagingTableName;
182 @StoredAsProperty("db.clear.staging.table") private boolean clearStagingTable;
183 @StoredAsProperty("db.export.call") private String call;
184 private Properties connectionParams; //Properties stored as db.connect.params
185
186
187 // May not be serialized, based on configuration.
188 // db.require.password is used to determine whether 'some' password is
189 // used. If so, it is stored as 'db.password'.
190 private String password;
191
192 // This represents path to a file on ${user.home} containing the password
193 // with 400 permissions so its only readable by user executing the tool
194 @StoredAsProperty("db.password.file") private String passwordFilePath;
195 @StoredAsProperty("db.password.alias") private String passwordAlias;
196
197 @StoredAsProperty("null.string") private String nullStringValue;
198 @StoredAsProperty("input.null.string") private String inNullStringValue;
199 @StoredAsProperty("null.non-string") private String nullNonStringValue;
200 @StoredAsProperty("input.null.non-string")
201 private String inNullNonStringValue;
202
203 @StoredAsProperty("codegen.output.dir") private String codeOutputDir;
204 @StoredAsProperty("codegen.compile.dir") private String jarOutputDir;
205 // Boolean specifying whether jarOutputDir is a nonce tmpdir (true), or
206 // explicitly set by the user (false). If the former, disregard any value
207 // for jarOutputDir saved in the metastore.
208 @StoredAsProperty("codegen.auto.compile.dir") private boolean jarDirIsAuto;
209 private String hadoopMapRedHome; // not serialized to metastore.
210 @StoredAsProperty("db.split.column") private String splitByCol;
211 @StoredAsProperty("split.limit") private Integer splitLimit;
212 @StoredAsProperty("db.where.clause") private String whereClause;
213 @StoredAsProperty("db.query") private String sqlQuery;
214 @StoredAsProperty("db.query.boundary") private String boundaryQuery;
215 @StoredAsProperty("jdbc.driver.class") private String driverClassName;
216 @StoredAsProperty("hdfs.warehouse.dir") private String warehouseDir;
217 @StoredAsProperty("hdfs.target.dir") private String targetDir;
218 @StoredAsProperty("hdfs.append.dir") private boolean append;
219 @StoredAsProperty("hdfs.delete-target.dir") private boolean delete;
220 @StoredAsProperty("hdfs.file.format") private FileLayout layout;
221 @StoredAsProperty("direct.import") private boolean direct; // "direct mode."
222 @StoredAsProperty("db.batch") private boolean batchMode;
223 private String tmpDir; // where temp data goes; usually /tmp; not serialized.
224 private String hiveHome; // not serialized to metastore.
225 @StoredAsProperty("hive.import") private boolean hiveImport;
226 @StoredAsProperty("hive.overwrite.table") private boolean overwriteHiveTable;
227 @StoredAsProperty("hive.fail.table.exists")
228 private boolean failIfHiveTableExists;
229 @StoredAsProperty("hive.external.table.dir") private String hiveExternalTableDir;
230 @StoredAsProperty("hive.table.name") private String hiveTableName;
231 @StoredAsProperty("hive.database.name") private String hiveDatabaseName;
232 @StoredAsProperty("hive.drop.delims") private boolean hiveDropDelims;
233 @StoredAsProperty("hive.delims.replacement")
234 private String hiveDelimsReplacement;
235 @StoredAsProperty("hive.partition.key") private String hivePartitionKey;
236 @StoredAsProperty("hive.partition.value") private String hivePartitionValue;
237 @StoredAsProperty("hcatalog.table.name")
238 private String hCatTableName;
239 @StoredAsProperty("hcatalog.database.name")
240 private String hCatDatabaseName;
241 @StoredAsProperty("hcatalog.create.table")
242 private boolean hCatCreateTable;
243 @StoredAsProperty("hcatalog.drop.and.create.table")
244 private boolean hCatDropAndCreateTable;
245 @StoredAsProperty("hcatalog.storage.stanza")
246 private String hCatStorageStanza;
247 private String hCatHome; // not serialized to metastore.
248 @StoredAsProperty("skip.dist.cache")
249 private boolean skipDistCache;
250 @StoredAsProperty("hcatalog.partition.keys")
251 private String hCatalogPartitionKeys;
252 @StoredAsProperty("hcatalog.partition.values")
253 private String hCatalogPartitionValues;
254 // User explicit mapping of types
255 private Properties mapColumnJava; // stored as map.colum.java
256 private Properties mapColumnHive; // stored as map.column.hive
257 // SQOOP-3123 default enabled
258 private boolean escapeColumnMappingEnabled;
259 private Properties mapReplacedColumnJava; // used to replace special characters in columns
260
261 // An ordered list of column names denoting what order columns are
262 // serialized to a PreparedStatement from a generated record type.
263 // Not serialized to metastore.
264 private String [] dbOutColumns;
265
266 // package to prepend to auto-named classes.
267 @StoredAsProperty("codegen.java.packagename") private String packageName;
268
269 // package+class to apply to individual table import.
270 // also used as an *input* class with existingJarFile.
271 @StoredAsProperty("codegen.java.classname") private String className;
272
273 // Name of a jar containing existing table definition
274 // class to use.
275 @StoredAsProperty("codegen.jar.file") private String existingJarFile;
276
277 @StoredAsProperty("mapreduce.num.mappers") private int numMappers;
278 @StoredAsProperty("enable.compression") private boolean useCompression;
279 @StoredAsProperty("compression.codec") private String compressionCodec;
280
281 // In direct mode, open a new stream every X bytes.
282 @StoredAsProperty("import.direct.split.size") private long directSplitSize;
283
284 // Max size of an inline LOB; larger LOBs are written
285 // to external files on disk.
286 @StoredAsProperty("import.max.inline.lob.size") private long maxInlineLobSize;
287
288 // Max number 'n' of rows to fetch from the
289 // database when more rows are needed.
290 @StoredAsProperty("import.fetch.size") private Integer fetchSize;
291
292 // HDFS path to read from when performing an export
293 @StoredAsProperty("export.source.dir") private String exportDir;
294
295 // Column to use for the WHERE clause in an UPDATE-based export.
296 @StoredAsProperty("export.update.col") private String updateKeyCol;
297
298 @StoredAsProperty("export.new.update") private UpdateMode updateMode;
299
300 private DelimiterSet inputDelimiters; // codegen.input.delimiters.
301 private DelimiterSet outputDelimiters; // codegen.output.delimiters.
302
303 private boolean areOutputDelimsManuallySet;
304 private boolean areInputDelimsManuallySet;
305
306 private Configuration conf;
307 private String toolName;
308
309 public static final int DEFAULT_NUM_MAPPERS = 4;
310
311 private String [] extraArgs;
312
313 // HBase table to import into.
314 @StoredAsProperty("hbase.table") private String hbaseTable;
315
316 // Column family to prepend to inserted cols.
317 @StoredAsProperty("hbase.col.family") private String hbaseColFamily;
318
319 // Column of the input to use as the row key.
320 @StoredAsProperty("hbase.row.key.col") private String hbaseRowKeyCol;
321
322 // if true, bulk loading will be used.
323 @StoredAsProperty("hbase.bulk.load.enabled") private boolean hbaseBulkLoadEnabled;
324
325 // if true, create tables/col families.
326 @StoredAsProperty("hbase.create.table") private boolean hbaseCreateTable;
327
328 // col to filter on for incremental imports.
329 @StoredAsProperty("incremental.col") private String incrementalTestCol;
330 // incremental import mode we're using.
331 @StoredAsProperty("incremental.mode")
332 private IncrementalMode incrementalMode;
333 // What was the last-imported value of incrementalTestCol?
334 @StoredAsProperty("incremental.last.value")
335 private String incrementalLastValue;
336
337 @StoredAsProperty("hbase.null.incremental.mode")
338 private HBaseNullIncrementalMode hbaseNullIncrementalMode;
339
340 // exclude these tables when importing all tables.
341 @StoredAsProperty("import.all_tables.exclude")
342 private String allTablesExclude;
343
344 // HDFS paths for "old" and "new" datasets in merge tool.
345 @StoredAsProperty("merge.old.path") private String mergeOldPath;
346 @StoredAsProperty("merge.new.path") private String mergeNewPath;
347
348 // "key" column for the merge operation.
349 @StoredAsProperty("merge.key.col") private String mergeKeyCol;
350
351 // Dataset name for mainframe import tool
352 @StoredAsProperty("mainframe.input.dataset.name")
353 private String mainframeInputDatasetName;
354
355 // Dataset type for mainframe import tool
356 @StoredAsProperty("mainframe.input.dataset.type")
357 private String mainframeInputDatasetType;
358
359 // Indicates if the data set is on tape to use different FTP parser
360 @StoredAsProperty("mainframe.input.dataset.tape")
361 private String mainframeInputDatasetTape;
362
363 // Accumulo home directory
364 private String accumuloHome; // not serialized to metastore.
365 // Zookeeper home directory
366 private String zookeeperHome; // not serialized to metastore.
367
368 // Accumulo table to import into.
369 @StoredAsProperty("accumulo.table") private String accumuloTable;
370
371 // Column family to prepend to inserted cols.
372 @StoredAsProperty("accumulo.col.family") private String accumuloColFamily;
373
374 // Column of the input to use as the row key.
375 @StoredAsProperty("accumulo.row.key.col") private String accumuloRowKeyCol;
376 //
377 // Visibility token to be applied to each row imported.
378 @StoredAsProperty("accumulo.visibility") private String accumuloVisibility;
379
380 // Size of the write buffer.
381 @StoredAsProperty("accumulo.batch.size")
382 private long accumuloBatchSize;
383
384 // Maximum latency for batch writer.
385 @StoredAsProperty("accumulo.max.latency")
386 private long accumuloMaxLatency;
387
388 // if true, create table.
389 @StoredAsProperty("accumulo.create.table")
390 private boolean accumuloCreateTable;
391
392 // Accumulo user name
393 @StoredAsProperty("accumulo.user") private String accumuloUser;
394
395 // Accumulo password
396 @StoredAsProperty("accumulo.password") private String accumuloPassword;
397
398 // Accumulo instance
399 @StoredAsProperty("accumulo.instance") private String accumuloInstance;
400
401 // Accumulo zookeeper
402 @StoredAsProperty("accumulo.zookeepers") private String accumuloZookeepers;
403
404 // Relaxed Isolation
405 @StoredAsProperty("relaxed.isolation") private boolean relaxedIsolation;
406
407 // Use single mapper for non-primary key tables without
408 // explicit split by cols
409 @StoredAsProperty("reset.onemapper") private boolean autoResetToOneMapper;
410
411 @StoredAsProperty("sqlconnection.metadata.transaction.isolation.level") private int metadataTransactionIsolationLevel;
412
413 // These next two fields are not serialized to the metastore.
414 // If this SqoopOptions is created by reading a saved job, these will
415 // be populated by the JobStorage to facilitate updating the same
416 // job.
417 private String jobName;
418 private Map<String, String> jobStorageDescriptor;
419
420 // If we restore a job and then allow the user to apply arguments on
421 // top, we retain the version without the arguments in a reference to the
422 // 'parent' SqoopOptions instance, here.
423 private SqoopOptions parent;
424
425 // Nonce directory name. Generate one per process, lazily, if
426 // getNonceJarDir() is called. Not recorded in metadata. This is used as
427 // a temporary holding area for compilation work done by this process.
428 private static String curNonce;
429
430 // the connection manager fully qualified class name
431 @StoredAsProperty("connection.manager") private String connManagerClassName;
432
433 // The currently active tool. (Not saved in properties)
434 // Used to pass the SqoopTool instance in to mapreduce job configuration
435 // (JobBase, etc).
436 private SqoopTool activeSqoopTool;
437
438 // Flag to determine if data copied needs to be validated against the source
439 private boolean isValidationEnabled;
440 // These take FQCN as input, convert them to Class in light of failing early
441 private Class validatorClass; // Class for the validator implementation.
442 private Class validationThresholdClass; // ValidationThreshold implementation
443 private Class validationFailureHandlerClass; // FailureHandler implementation
444
445 @StoredAsProperty(ORACLE_ESCAPING_DISABLED)
446 private boolean oracleEscapingDisabled;
447
448 private String metaConnectStr;
449 private String metaUsername;
450 private String metaPassword;
451
452 public SqoopOptions() {
453 initDefaults(null);
454 }
455
456 public SqoopOptions(Configuration conf) {
457 initDefaults(conf);
458 }
459
460 /**
461 * Alternate SqoopOptions interface used mostly for unit testing.
462 * @param connect JDBC connect string to use
463 * @param table Table to read
464 */
465 public SqoopOptions(final String connect, final String table) {
466 initDefaults(null);
467
468 this.connectString = connect;
469 this.tableName = table;
470 }
471
472 private boolean getBooleanProperty(Properties props, String propName,
473 boolean defaultValue) {
474 String str = props.getProperty(propName,
475 Boolean.toString(defaultValue)).toLowerCase();
476 return "true".equals(str) || "yes".equals(str) || "1".equals(str);
477 }
478
479 private long getLongProperty(Properties props, String propName,
480 long defaultValue) {
481 String str = props.getProperty(propName,
482 Long.toString(defaultValue)).toLowerCase();
483 try {
484 return Long.parseLong(str);
485 } catch (NumberFormatException nfe) {
486 LOG.warn("Could not parse integer value for config parameter "
487 + propName);
488 return defaultValue;
489 }
490 }
491
492 private int getIntProperty(Properties props, String propName,
493 int defaultVal) {
494 long longVal = getLongProperty(props, propName, defaultVal);
495 return (int) longVal;
496 }
497
498 private char getCharProperty(Properties props, String propName,
499 char defaultVal) {
500 int intVal = getIntProperty(props, propName, (int) defaultVal);
501 return (char) intVal;
502 }
503
504 private DelimiterSet getDelimiterProperties(Properties props,
505 String prefix, DelimiterSet defaults) {
506
507 if (null == defaults) {
508 defaults = new DelimiterSet();
509 }
510
511 char field = getCharProperty(props, prefix + ".field",
512 defaults.getFieldsTerminatedBy());
513 char record = getCharProperty(props, prefix + ".record",
514 defaults.getLinesTerminatedBy());
515 char enclose = getCharProperty(props, prefix + ".enclose",
516 defaults.getEnclosedBy());
517 char escape = getCharProperty(props, prefix + ".escape",
518 defaults.getEscapedBy());
519 boolean required = getBooleanProperty(props, prefix +".enclose.required",
520 defaults.isEncloseRequired());
521
522 return new DelimiterSet(field, record, enclose, escape, required);
523 }
524
525 private void setDelimiterProperties(Properties props,
526 String prefix, DelimiterSet values) {
527 putProperty(props, prefix + ".field",
528 Integer.toString((int) values.getFieldsTerminatedBy()));
529 putProperty(props, prefix + ".record",
530 Integer.toString((int) values.getLinesTerminatedBy()));
531 putProperty(props, prefix + ".enclose",
532 Integer.toString((int) values.getEnclosedBy()));
533 putProperty(props, prefix + ".escape",
534 Integer.toString((int) values.getEscapedBy()));
535 putProperty(props, prefix + ".enclose.required",
536 Boolean.toString(values.isEncloseRequired()));
537 }
538
539 /** Take a comma-delimited list of input and split the elements
540 * into an output array. */
541 private String [] listToArray(String strList) {
542 return strList.split(",");
543 }
544
545 private String arrayToList(String [] array) {
546 if (null == array) {
547 return null;
548 }
549
550 StringBuilder sb = new StringBuilder();
551 boolean first = true;
552 for (String elem : array) {
553 if (!first) {
554 sb.append(",");
555 }
556 sb.append(elem);
557 first = false;
558 }
559
560 return sb.toString();
561 }
562
563 /**
564 * A put() method for Properties that is tolerent of 'null' values.
565 * If a null value is specified, the property is unset.
566 */
567 private void putProperty(Properties props, String k, String v) {
568 if (null == v) {
569 props.remove(k);
570 } else {
571 props.setProperty(k, v);
572 }
573 }
574
575 /**
576 * Given a property prefix that denotes a set of numbered properties,
577 * return an array containing all the properties.
578 *
579 * For instance, if prefix is "foo", then return properties "foo.0",
580 * "foo.1", "foo.2", and so on as an array. If no such properties
581 * exist, return 'defaults'.
582 */
583 private String [] getArgArrayProperty(Properties props, String prefix,
584 String [] defaults) {
585 int cur = 0;
586 ArrayList<String> al = new ArrayList<String>();
587 while (true) {
588 String curProp = prefix + "." + cur;
589 String curStr = props.getProperty(curProp, null);
590 if (null == curStr) {
591 break;
592 }
593
594 al.add(curStr);
595 cur++;
596 }
597
598 if (cur == 0) {
599 // Couldn't find an array here; return the defaults.
600 return defaults;
601 }
602
603 return al.toArray(new String[0]);
604 }
605
606 private void setArgArrayProperties(Properties props, String prefix,
607 String [] values) {
608 if (null == values) {
609 return;
610 }
611
612 for (int i = 0; i < values.length; i++) {
613 putProperty(props, prefix + "." + i, values[i]);
614 }
615 }
616
617 /**
618 * This method encodes the property key values found in the provided
619 * properties instance <tt>values</tt> into another properties instance
620 * <tt>props</tt>. The specified <tt>prefix</tt> is used as a namespace
621 * qualifier for keys when inserting. This allows easy introspection of the
622 * property key values in <tt>props</tt> instance to later separate out all
623 * the properties that belong to the <tt>values</tt> instance.
624 * @param props the container properties instance
625 * @param prefix the prefix for qualifying contained property keys.
626 * @param values the contained properties instance, all of whose elements will
627 * be added to the container properties instance.
628 *
629 * @see #getPropertiesAsNetstedProperties(Properties, String)
630 */
631 private void setPropertiesAsNestedProperties(Properties props,
632 String prefix, Properties values) {
633 String nestedPropertyPrefix = prefix + ".";
634 if (null == values || values.size() == 0) {
635 Iterator<String> it = props.stringPropertyNames().iterator();
636 while (it.hasNext()) {
637 String name = it.next();
638 if (name.startsWith(nestedPropertyPrefix)) {
639 props.remove(name);
640 }
641 }
642 } else {
643 Iterator<String> it = values.stringPropertyNames().iterator();
644 while (it.hasNext()) {
645 String name = it.next();
646 putProperty(props,
647 nestedPropertyPrefix + name, values.getProperty(name));
648 }
649 }
650 }
651
652 /**
653 * This method decodes the property key values found in the provided
654 * properties instance <tt>props</tt> that have keys beginning with the
655 * given prefix. Matching elements from this properties instance are modified
656 * so that their prefix is dropped.
657 * @param props the properties container
658 * @param prefix the prefix qualifying properties that need to be removed
659 * @return a new properties instance that contains all matching elements from
660 * the container properties.
661 */
662 private Properties getPropertiesAsNetstedProperties(
663 Properties props, String prefix) {
664 Properties nestedProps = new Properties();
665 String nestedPropertyPrefix = prefix + ".";
666 int index = nestedPropertyPrefix.length();
667 if (props != null && props.size() > 0) {
668 Iterator<String> it = props.stringPropertyNames().iterator();
669 while (it.hasNext()) {
670 String name = it.next();
671 if (name.startsWith(nestedPropertyPrefix)){
672 String shortName = name.substring(index);
673 nestedProps.put(shortName, props.get(name));
674 }
675 }
676 }
677 return nestedProps;
678 }
679
680 @SuppressWarnings("unchecked")
681 /**
682 * Given a set of properties, load this into the current SqoopOptions
683 * instance.
684 */
685 public void loadProperties(Properties props) {
686
687 try {
688 Field [] fields = SqoopOptions.class.getDeclaredFields();
689 for (Field f : fields) {
690 if (f.isAnnotationPresent(StoredAsProperty.class)) {
691 Class typ = f.getType();
692 StoredAsProperty storedAs = f.getAnnotation(StoredAsProperty.class);
693 String propName = storedAs.value();
694
695 if (typ.equals(int.class)) {
696 f.setInt(this,
697 getIntProperty(props, propName, f.getInt(this)));
698 } else if (typ.equals(boolean.class)) {
699 f.setBoolean(this,
700 getBooleanProperty(props, propName, f.getBoolean(this)));
701 } else if (typ.equals(long.class)) {
702 f.setLong(this,
703 getLongProperty(props, propName, f.getLong(this)));
704 } else if (typ.equals(String.class)) {
705 f.set(this, props.getProperty(propName, (String) f.get(this)));
706 } else if (typ.equals(Integer.class)) {
707 String value = props.getProperty(
708 propName,
709 f.get(this) == null ? "null" : f.get(this).toString());
710 f.set(this, value.equals("null") ? null : new Integer(value));
711 } else if (typ.isEnum()) {
712 f.set(this, Enum.valueOf(typ,
713 props.getProperty(propName, f.get(this).toString())));
714 } else if (typ.equals(Map.class)) {
715 f.set(this,
716 SqoopJsonUtil.getMapforJsonString(props.getProperty(propName)));
717 } else {
718 throw new RuntimeException("Could not retrieve property "
719 + propName + " for type: " + typ);
720 }
721 }
722 }
723 } catch (IllegalAccessException iae) {
724 throw new RuntimeException("Illegal access to field in property setter",
725 iae);
726 }
727
728 // Now load properties that were stored with special types, or require
729 // additional logic to set.
730
731 loadPasswordProperty(props);
732
733 if (this.jarDirIsAuto) {
734 // We memoized a user-specific nonce dir for compilation to the data
735 // store. Disregard that setting and create a new nonce dir.
736 String localUsername = System.getProperty("user.name", "unknown");
737 this.jarOutputDir = getNonceJarDir(tmpDir + "sqoop-" + localUsername
738 + "/compile");
739 }
740
741 String colListStr = props.getProperty("db.column.list", null);
742 if (null != colListStr) {
743 this.columns = listToArray(colListStr);
744 }
745
746 this.inputDelimiters = getDelimiterProperties(props,
747 "codegen.input.delimiters", this.inputDelimiters);
748 this.outputDelimiters = getDelimiterProperties(props,
749 "codegen.output.delimiters", this.outputDelimiters);
750
751 this.extraArgs = getArgArrayProperty(props, "tool.arguments",
752 this.extraArgs);
753
754 this.connectionParams =
755 getPropertiesAsNetstedProperties(props, "db.connect.params");
756
757 // Loading user mapping
758 this.mapColumnHive =
759 getPropertiesAsNetstedProperties(props, "map.column.hive");
760 this.mapColumnJava =
761 getPropertiesAsNetstedProperties(props, "map.column.java");
762
763 // Delimiters were previously memoized; don't let the tool override
764 // them with defaults.
765 this.areOutputDelimsManuallySet = true;
766 this.areInputDelimsManuallySet = true;
767
768 // If we loaded true verbose flag, we need to apply it
769 if (this.verbose) {
770 LoggingUtils.setDebugLevel();
771 }
772
773 // Ensuring that oracleEscapingDisabled property is propagated to
774 // the level of Hadoop configuration as well
775 this.setOracleEscapingDisabled(this.isOracleEscapingDisabled());
776 }
777
778 private void loadPasswordProperty(Properties props) {
779 passwordFilePath = props.getProperty("db.password.file");
780 if (passwordFilePath != null) {
781 try {
782 password = CredentialsUtil.fetchPasswordFromLoader(passwordFilePath, getConf());
783 return; // short-circuit
784 } catch (IOException e) {
785 throw new RuntimeException("Unable to fetch password from file.", e);
786 }
787 }
788
789 passwordAlias = props.getProperty("db.password.alias");
790 if (passwordAlias != null) {
791 try {
792 setPassword(CredentialProviderHelper.resolveAlias(getConf(), passwordAlias));
793 return; // short-circuit
794 } catch (IOException e) {
795 throw new RuntimeException("Unable to resolve credentials.", e);
796 }
797 }
798
799 if (getBooleanProperty(props, "db.require.password", false)) {
800 // The user's password was stripped out from the metastore.
801 // Require that the user enter it now.
802 setPasswordFromConsole();
803 } else {
804 this.password = props.getProperty(DB_PASSWORD_KEY, this.password);
805 }
806 }
807
808 /**
809 * Return a Properties instance that encapsulates all the "sticky"
810 * state of this SqoopOptions that should be written to a metastore
811 * to restore the job later.
812 */
813 public Properties writeProperties() {
814 Properties props = new Properties();
815
816 try {
817 Field [] fields = SqoopOptions.class.getDeclaredFields();
818 for (Field f : fields) {
819 if (f.isAnnotationPresent(StoredAsProperty.class)) {
820 Class typ = f.getType();
821 StoredAsProperty storedAs = f.getAnnotation(StoredAsProperty.class);
822 String propName = storedAs.value();
823
824 if (typ.equals(int.class)) {
825 putProperty(props, propName, Integer.toString(f.getInt(this)));
826 } else if (typ.equals(boolean.class)) {
827 putProperty(props, propName, Boolean.toString(f.getBoolean(this)));
828 } else if (typ.equals(long.class)) {
829 putProperty(props, propName, Long.toString(f.getLong(this)));
830 } else if (typ.equals(String.class)) {
831 putProperty(props, propName, (String) f.get(this));
832 } else if (typ.equals(Integer.class)) {
833 putProperty(
834 props,
835 propName,
836 f.get(this) == null ? "null" : f.get(this).toString());
837 } else if (typ.isEnum()) {
838 putProperty(props, propName, f.get(this).toString());
839 } else if (typ.equals(Map.class)) {
840 putProperty(
841 props,
842 propName,
843 SqoopJsonUtil.getJsonStringforMap((Map) f.get(this)));
844 } else {
845 throw new RuntimeException("Could not set property "
846 + propName + " for type: " + typ);
847 }
848 }
849 }
850 } catch (IllegalAccessException iae) {
851 throw new RuntimeException("Illegal access to field in property setter",
852 iae);
853 }
854
855 writePasswordProperty(props);
856
857 putProperty(props, "db.column.list", arrayToList(this.columns));
858 setDelimiterProperties(props, "codegen.input.delimiters",
859 this.inputDelimiters);
860 setDelimiterProperties(props, "codegen.output.delimiters",
861 this.outputDelimiters);
862 setArgArrayProperties(props, "tool.arguments", this.extraArgs);
863
864 setPropertiesAsNestedProperties(props,
865 "db.connect.params", this.connectionParams);
866
867 setPropertiesAsNestedProperties(props,
868 "map.column.hive", this.mapColumnHive);
869 setPropertiesAsNestedProperties(props,
870 "map.column.java", this.mapColumnJava);
871 return props;
872 }
873
874 private void writePasswordProperty(Properties props) {
875 if (getPasswordFilePath() != null) { // short-circuit
876 putProperty(props, "db.password.file", getPasswordFilePath());
877 return;
878 }
879
880 if (getPasswordAlias() != null) { // short-circuit
881 putProperty(props, "db.password.alias", getPasswordAlias());
882 return;
883 }
884
885 if (this.getConf().getBoolean(
886 METASTORE_PASSWORD_KEY, METASTORE_PASSWORD_DEFAULT)) {
887 // If the user specifies, we may store the password in the metastore.
888 putProperty(props, DB_PASSWORD_KEY, this.password);
889 putProperty(props, "db.require.password", "false");
890 } else if (this.password != null) {
891 // Otherwise, if the user has set a password, we just record
892 // a flag stating that the password will need to be reentered.
893 putProperty(props, "db.require.password", "true");
894 } else {
895 // No password saved or required.
896 putProperty(props, "db.require.password", "false");
897 }
898 }
899
900 @Override
901 public Object clone() {
902 try {
903 SqoopOptions other = (SqoopOptions) super.clone();
904 if (null != columns) {
905 other.columns = Arrays.copyOf(columns, columns.length);
906 }
907
908 if (null != dbOutColumns) {
909 other.dbOutColumns = Arrays.copyOf(dbOutColumns, dbOutColumns.length);
910 }
911
912 if (null != inputDelimiters) {
913 other.inputDelimiters = (DelimiterSet) inputDelimiters.clone();
914 }
915
916 if (null != outputDelimiters) {
917 other.outputDelimiters = (DelimiterSet) outputDelimiters.clone();
918 }
919
920 if (null != conf) {
921 other.conf = new Configuration(conf);
922 }
923
924 if (null != extraArgs) {
925 other.extraArgs = Arrays.copyOf(extraArgs, extraArgs.length);
926 }
927
928 if (null != connectionParams) {
929 other.setConnectionParams(this.connectionParams);
930 }
931
932 if (null != mapColumnHive) {
933 other.mapColumnHive = (Properties) this.mapColumnHive.clone();
934 }
935
936 if (null != mapColumnJava) {
937 other.mapColumnJava = (Properties) this.mapColumnJava.clone();
938 }
939
940 if (null != mapReplacedColumnJava) {
941 other.mapReplacedColumnJava = (Properties) this.mapReplacedColumnJava.clone();
942 }
943
944 if (null != jobStorageDescriptor) {
945 other.jobStorageDescriptor = new HashMap<>(jobStorageDescriptor);
946 }
947
948 if (null != customToolOptions) {
949 other.customToolOptions = new HashMap<>(customToolOptions);
950 }
951
952 return other;
953 } catch (CloneNotSupportedException cnse) {
954 // Shouldn't happen.
955 return null;
956 }
957 }
958
959 /**
960 * @return the temp directory to use; this is guaranteed to end with
961 * the file separator character (e.g., '/').
962 */
963 public String getTempDir() {
964 return this.tmpDir;
965 }
966
967 /**
968 * Return the name of a directory that does not exist before
969 * calling this method, and does exist afterward. We should be
970 * the only client of this directory. If this directory is not
971 * used during the lifetime of the JVM, schedule it to be removed
972 * when the JVM exits.
973 */
974 private static String getNonceJarDir(String tmpBase) {
975
976 // Make sure we don't loop forever in the event of a permission error.
977 final int MAX_DIR_CREATE_ATTEMPTS = 32;
978
979 if (null != curNonce) {
980 return curNonce;
981 }
982
983 File baseDir = new File(tmpBase);
984 File hashDir = null;
985
986 for (int attempts = 0; attempts < MAX_DIR_CREATE_ATTEMPTS; attempts++) {
987 hashDir = new File(baseDir, RandomHash.generateMD5String());
988 while (hashDir.exists()) {
989 hashDir = new File(baseDir, RandomHash.generateMD5String());
990 }
991
992 if (hashDir.mkdirs()) {
993 // We created the directory. Use it.
994 // If this directory is not actually filled with files, delete it
995 // when the JVM quits.
996 hashDir.deleteOnExit();
997 break;
998 }
999 }
1000
1001 if (hashDir == null || !hashDir.exists()) {
1002 throw new RuntimeException("Could not create temporary directory: "
1003 + hashDir + "; check for a directory permissions issue on /tmp.");
1004 }
1005
1006 LOG.debug("Generated nonce dir: " + hashDir.toString());
1007 SqoopOptions.curNonce = hashDir.toString();
1008 return SqoopOptions.curNonce;
1009 }
1010
1011 /**
1012 * Reset the nonce directory and force a new one to be generated. This
1013 * method is intended to be used only by multiple unit tests that want
1014 * to isolate themselves from one another. It should not be called
1015 * during normal Sqoop execution.
1016 */
1017 public static void clearNonceDir() {
1018 LOG.warn("Clearing nonce directory");
1019 SqoopOptions.curNonce = null;
1020 }
1021
1022 public static String getHiveHomeDefault() {
1023 // Set this with $HIVE_HOME, but -Dhive.home can override.
1024 String hiveHome = System.getenv("HIVE_HOME");
1025 hiveHome = System.getProperty("hive.home", hiveHome);
1026 if (hiveHome == null) {
1027 hiveHome = DEF_HIVE_HOME;
1028 }
1029 return hiveHome;
1030 }
1031
1032 public static String getHCatHomeDefault() {
1033 // Set this with $HCAT_HOME, but -Dhcatalog.home can override.
1034 String hcatHome = System.getenv("HCAT_HOME");
1035 hcatHome = System.getProperty("hcat.home", hcatHome);
1036 if (hcatHome == null) {
1037 File file = new File(DEF_HCAT_HOME);
1038 if (file.exists()) {
1039 hcatHome = DEF_HCAT_HOME;
1040 } else {
1041 hcatHome = DEF_HCAT_HOME_OLD;
1042 }
1043 }
1044 return hcatHome;
1045 }
1046
1047 private void initDefaults(Configuration baseConfiguration) {
1048 // first, set the true defaults if nothing else happens.
1049 // default action is to run the full pipeline.
1050 this.hadoopMapRedHome = System.getenv("HADOOP_MAPRED_HOME");
1051
1052 this.accumuloHome = getAccumuloHomeDefault();
1053 this.zookeeperHome = getZookeeperHomeDefault();
1054 this.hiveHome = getHiveHomeDefault();
1055 this.hCatHome = getHCatHomeDefault();
1056
1057 this.inputDelimiters = new DelimiterSet(
1058 DelimiterSet.NULL_CHAR, DelimiterSet.NULL_CHAR,
1059 DelimiterSet.NULL_CHAR, DelimiterSet.NULL_CHAR, false);
1060 this.outputDelimiters = new DelimiterSet();
1061
1062 // Set this to cwd, but -Dsqoop.src.dir can override.
1063 this.codeOutputDir = System.getProperty("sqoop.src.dir", ".");
1064
1065 String myTmpDir = System.getProperty("test.build.data", "/tmp/");
1066 if (!myTmpDir.endsWith(File.separator)) {
1067 myTmpDir = myTmpDir + File.separator;
1068 }
1069
1070 this.tmpDir = myTmpDir;
1071 String localUsername = System.getProperty("user.name", "unknown");
1072 this.jarOutputDir = getNonceJarDir(tmpDir + "sqoop-" + localUsername
1073 + "/compile");
1074 this.jarDirIsAuto = true;
1075 this.layout = FileLayout.TextFile;
1076
1077 this.areOutputDelimsManuallySet = false;
1078 this.areInputDelimsManuallySet = false;
1079
1080 this.numMappers = DEFAULT_NUM_MAPPERS;
1081 this.useCompression = false;
1082 this.compressionCodec = null;
1083 this.directSplitSize = 0;
1084
1085 this.maxInlineLobSize = LargeObjectLoader.DEFAULT_MAX_LOB_LENGTH;
1086
1087 // Don't set a default value for fetchsize. This allows a JDBCManager to
1088 // provide a database-specific default, if no value is provided by the
1089 // user.
1090 this.fetchSize = null;
1091
1092 if (null == baseConfiguration) {
1093 this.conf = new Configuration();
1094 } else {
1095 this.conf = baseConfiguration;
1096 }
1097
1098 this.extraArgs = null;
1099
1100 this.dbOutColumns = null;
1101
1102 this.incrementalMode = IncrementalMode.None;
1103 this.hbaseNullIncrementalMode = HBaseNullIncrementalMode.Ignore;
1104
1105 this.updateMode = UpdateMode.UpdateOnly;
1106
1107 // Creating instances for user specific mapping
1108 this.mapColumnHive = new Properties();
1109 this.mapColumnJava = new Properties();
1110
1111 // Set Accumulo batch size defaults, since 0 is not the same
1112 // as "not set"
1113 this.accumuloBatchSize = AccumuloConstants.DEFAULT_BATCH_SIZE;
1114 this.accumuloMaxLatency = AccumuloConstants.DEFAULT_LATENCY;
1115
1116 // We do not want to be verbose too much if not explicitly needed
1117 this.verbose = false;
1118 //This name of the system property is intentionally OLD_SQOOP_TEST_IMPORT_ROOT_DIR
1119 //to support backward compatibility. Do not exchange it with
1120 //org.apache.sqoop.tool.BaseSqoopTool#TEMP_ROOTDIR_ARG
1121 this.tempRootDir = System.getProperty(OLD_SQOOP_TEST_IMPORT_ROOT_DIR, "_sqoop");
1122
1123 //This default value is set intentionally according to SQOOP_RETHROW_PROPERTY system property
1124 //to support backward compatibility. Do not exchange it.
1125 this.throwOnError = isSqoopRethrowSystemPropertySet();
1126
1127 setOracleEscapingDisabled(Boolean.parseBoolean(System.getProperty(ORACLE_ESCAPING_DISABLED, "true")));
1128
1129 this.isValidationEnabled = false; // validation is disabled by default
1130 this.validatorClass = RowCountValidator.class;
1131 this.validationThresholdClass = AbsoluteValidationThreshold.class;
1132 this.validationFailureHandlerClass = AbortOnFailureHandler.class;
1133
1134 // Relaxed isolation will not enabled by default which is the behavior
1135 // of sqoop until now.
1136 this.relaxedIsolation = false;
1137
1138 // set default mainframe data set type to partitioned data set
1139 this.mainframeInputDatasetType = MainframeConfiguration.MAINFRAME_INPUT_DATASET_TYPE_PARTITIONED;
1140
1141 // set default metadata transaction isolation level to TRANSACTION_READ_COMMITTED
1142 this.metadataTransactionIsolationLevel = Connection.TRANSACTION_READ_COMMITTED;
1143
1144 // set escape column mapping to true
1145 this.escapeColumnMappingEnabled = true;
1146 }
1147
1148 /**
1149 * The SQOOP_RETHROW_PROPERTY system property is considered to be set if it is set to
1150 * any kind of String value, i.e. it is not null.
1151 */
1152 // Type of SQOOP_RETHROW_PROPERTY is String only to provide backward compatibility.
1153 public static boolean isSqoopRethrowSystemPropertySet() {
1154 return (System.getProperty(SQOOP_RETHROW_PROPERTY) != null);
1155 }
1156
1157 /**
1158 * Given a string containing a single character or an escape sequence
1159 * representing a char, return that char itself.
1160 *
1161 * Normal literal characters return themselves: "x" -&gt; 'x', etc.
1162 * Strings containing a '\' followed by one of t, r, n, or b escape to the
1163 * usual character as seen in Java: "\n" -&gt; (newline), etc.
1164 *
1165 * Strings like "\0ooo" return the character specified by the octal sequence
1166 * 'ooo'. Strings like "\0xhhh" or "\0Xhhh" return the character specified by
1167 * the hex sequence 'hhh'.
1168 *
1169 * If the input string contains leading or trailing spaces, these are
1170 * ignored.
1171 */
1172 public static char toChar(String charish) throws InvalidOptionsException {
1173 if (null == charish || charish.length() == 0) {
1174 throw new InvalidOptionsException("Character argument expected."
1175 + "\nTry --help for usage instructions.");
1176 }
1177
1178 if (charish.startsWith("\\0x") || charish.startsWith("\\0X")) {
1179 if (charish.length() == 3) {
1180 throw new InvalidOptionsException(
1181 "Base-16 value expected for character argument."
1182 + "\nTry --help for usage instructions.");
1183 } else {
1184 String valStr = charish.substring(3);
1185 int val = Integer.parseInt(valStr, 16);
1186 return (char) val;
1187 }
1188 } else if (charish.startsWith("\\0")) {
1189 if (charish.equals("\\0")) {
1190 // it's just '\0', which we can take as shorthand for nul.
1191 return DelimiterSet.NULL_CHAR;
1192 } else {
1193 // it's an octal value.
1194 String valStr = charish.substring(2);
1195 int val = Integer.parseInt(valStr, 8);
1196 return (char) val;
1197 }
1198 } else if (charish.startsWith("\\")) {
1199 if (charish.length() == 1) {
1200 // it's just a '\'. Keep it literal.
1201 return '\\';
1202 } else if (charish.length() > 2) {
1203 // we don't have any 3+ char escape strings.
1204 throw new InvalidOptionsException(
1205 "Cannot understand character argument: " + charish
1206 + "\nTry --help for usage instructions.");
1207 } else {
1208 // this is some sort of normal 1-character escape sequence.
1209 char escapeWhat = charish.charAt(1);
1210 switch(escapeWhat) {
1211 case 'b':
1212 return '\b';
1213 case 'n':
1214 return '\n';
1215 case 'r':
1216 return '\r';
1217 case 't':
1218 return '\t';
1219 case '\"':
1220 return '\"';
1221 case '\'':
1222 return '\'';
1223 case '\\':
1224 return '\\';
1225 default:
1226 throw new InvalidOptionsException(
1227 "Cannot understand character argument: " + charish
1228 + "\nTry --help for usage instructions.");
1229 }
1230 }
1231 } else {
1232 // it's a normal character.
1233 if (charish.length() > 1) {
1234 LOG.warn("Character argument " + charish + " has multiple characters; "
1235 + "only the first will be used.");
1236 }
1237
1238 return charish.charAt(0);
1239 }
1240 }
1241
1242 public boolean getVerbose() {
1243 return verbose;
1244 }
1245
1246 public void setVerbose(boolean beVerbose) {
1247 this.verbose = beVerbose;
1248 }
1249
1250 public String getTempRootDir() {
1251 return tempRootDir;
1252 }
1253
1254 public void setTempRootDir(String tempRootDir) {
1255 this.tempRootDir = tempRootDir;
1256 }
1257
1258 public boolean isThrowOnError() {
1259 return throwOnError;
1260 }
1261
1262 public void setThrowOnError(boolean throwOnError) {
1263 this.throwOnError = throwOnError;
1264 }
1265
1266 /**
1267 * Get the temporary directory; guaranteed to end in File.separator
1268 * (e.g., '/').
1269 */
1270 public String getTmpDir() {
1271 return tmpDir;
1272 }
1273
1274 public void setTmpDir(String tmp) {
1275 this.tmpDir = tmp;
1276 }
1277
1278 public String getConnectString() {
1279 return connectString;
1280 }
1281
1282 public void setConnectString(String connectStr) {
1283 this.connectString = connectStr;
1284 }
1285
1286 public String getTableName() {
1287 return tableName;
1288 }
1289
1290 public void setTableName(String table) {
1291 this.tableName = table;
1292 }
1293
1294 public String getStagingTableName() {
1295 return stagingTableName;
1296 }
1297
1298 public void setStagingTableName(String stagingTable) {
1299 this.stagingTableName = stagingTable;
1300 }
1301
1302 public boolean doClearStagingTable() {
1303 return clearStagingTable;
1304 }
1305
1306 public void setClearStagingTable(boolean clear) {
1307 clearStagingTable = clear;
1308 }
1309
1310 public String getExportDir() {
1311 return exportDir;
1312 }
1313
1314 public void setExportDir(String dir) {
1315 this.exportDir = dir;
1316 }
1317
1318 public String getExistingJarName() {
1319 return existingJarFile;
1320 }
1321
1322 public void setExistingJarName(String jarFile) {
1323 this.existingJarFile = jarFile;
1324 }
1325
1326 public String[] getColumns() {
1327 if (null == columns) {
1328 return null;
1329 } else {
1330 return Arrays.copyOf(columns, columns.length);
1331 }
1332 }
1333
1334 public String getColumnNameCaseInsensitive(String col){
1335 if (null != columns) {
1336 for(String columnName : columns) {
1337 if(columnName.equalsIgnoreCase(col)) {
1338 return columnName;
1339 }
1340 }
1341 }
1342 return null;
1343 }
1344
1345 public void setColumns(String [] cols) {
1346 if (null == cols) {
1347 this.columns = null;
1348 } else {
1349 this.columns = Arrays.copyOf(cols, cols.length);
1350 }
1351 }
1352
1353 public String getSplitByCol() {
1354 return splitByCol;
1355 }
1356
1357 public void setSplitByCol(String splitBy) {
1358 this.splitByCol = splitBy;
1359 }
1360
1361 public Integer getSplitLimit() {
1362 return splitLimit;
1363 }
1364
1365 public void setSplitLimit(Integer splitLimit) {
1366 this.splitLimit = splitLimit;
1367 }
1368
1369 public String getWhereClause() {
1370 return whereClause;
1371 }
1372
1373 public void setWhereClause(String where) {
1374 this.whereClause = where;
1375 }
1376
1377 public String getUsername() {
1378 return username;
1379 }
1380
1381 public void setUsername(String user) {
1382 this.username = user;
1383 }
1384
1385 public String getPassword() {
1386 return password;
1387 }
1388
1389 public String getPasswordFilePath() {
1390 return passwordFilePath;
1391 }
1392
1393 public void setPasswordFilePath(String passwdFilePath) {
1394 this.passwordFilePath = passwdFilePath;
1395 }
1396
1397 public String getPasswordAlias() {
1398 return passwordAlias;
1399 }
1400
1401 public void setPasswordAlias(String alias) {
1402 this.passwordAlias = alias;
1403 }
1404
1405 protected void parseColumnMapping(String mapping,
1406 Properties output) {
1407 output.clear();
1408
1409 // replace (xx,xx) with (xx#xx), so that we can just split by "," afterwards
1410 String[] maps = mapping.replaceAll("\\(([0-9]+),([0-9]+)\\)", "($1#$2)").split(",");
1411
1412 for(String map : maps) {
1413 String[] details = map.split("=");
1414 if (details.length != 2) {
1415 throw new IllegalArgumentException("Malformed mapping. "
1416 + "Column mapping should be the form key=value[,key=value]*");
1417 }
1418
1419 try {
1420 output.put(
1421 URLDecoder.decode(details[0].replaceAll("\\(([0-9]+)#([0-9]+)\\)", "($1,$2)"), "UTF-8"),
1422 URLDecoder.decode(details[1].replaceAll("\\(([0-9]+)#([0-9]+)\\)", "($1,$2)"), "UTF-8"));
1423 } catch (UnsupportedEncodingException e) {
1424 throw new IllegalArgumentException("Encoding not supported. "
1425 + "Column mapping should be UTF-8 encoding.");
1426 }
1427 }
1428 }
1429
1430 public void setMapColumnHive(String mapColumn) {
1431 parseColumnMapping(mapColumn, mapColumnHive);
1432 }
1433
1434 public void setMapColumnJava(String mapColumn) {
1435 parseColumnMapping(mapColumn, mapColumnJava);
1436 }
1437
1438 public Properties getMapColumnHive() {
1439 return mapColumnHive;
1440 }
1441
1442 public Properties getMapColumnJava() {
1443 return mapColumnJava;
1444 }
1445
1446 /**
1447 * Allow the user to enter his password on the console without printing
1448 * characters.
1449 * @return the password as a string
1450 */
1451 private String securePasswordEntry() {
1452 try {
1453 return new String(System.console().readPassword("Enter password: "));
1454 } catch (NullPointerException e) {
1455 LOG.error("It seems that you have launched a Sqoop metastore job via");
1456 LOG.error("Oozie with sqoop.metastore.client.record.password disabled.");
1457 LOG.error("But this configuration is not supported because Sqoop can't");
1458 LOG.error("prompt the user to enter the password while being executed");
1459 LOG.error("as Oozie tasks. Please enable sqoop.metastore.client.record");
1460 LOG.error(".password in sqoop-site.xml, or provide the password");
1461 LOG.error("explicitly using --password in the command tag of the Oozie");
1462 LOG.error("workflow file.");
1463 return null;
1464 }
1465 }
1466
1467 /**
1468 * Set the password in this SqoopOptions from the console without printing
1469 * characters.
1470 */
1471 public void setPasswordFromConsole() {
1472 this.password = securePasswordEntry();
1473 }
1474
1475 public String getMapreduceJobName() {
1476 return mapreduceJobName;
1477 }
1478
1479 public void setMapreduceJobName(String mapredJobName) {
1480 this.mapreduceJobName = mapredJobName;
1481 }
1482
1483 public void setPassword(String pass) {
1484 this.password = pass;
1485 }
1486
1487 public boolean isDirect() {
1488 return direct;
1489 }
1490
1491 public void setDirectMode(boolean isDirect) {
1492 this.direct = isDirect;
1493 }
1494
1495 /**
1496 * @return true if underlying statements to be executed in batch mode,
1497 * or false if to be executed in a single multirow statement.
1498 */
1499 public boolean isBatchMode() {
1500 return batchMode;
1501 }
1502
1503 public void setBatchMode(boolean mode) {
1504 this.batchMode = mode;
1505 }
1506
1507 /**
1508 * @return the number of map tasks to use for import.
1509 */
1510 public int getNumMappers() {
1511 return this.numMappers;
1512 }
1513
1514 public void setNumMappers(int m) {
1515 this.numMappers = m;
1516 }
1517
1518 /**
1519 * @return the user-specified absolute class name for the table.
1520 */
1521 public String getClassName() {
1522 return className;
1523 }
1524
1525 public void setClassName(String name) {
1526 this.className = name;
1527 }
1528
1529 /**
1530 * @return the user-specified package to prepend to table names via
1531 * --package-name.
1532 */
1533 public String getPackageName() {
1534 return packageName;
1535 }
1536
1537 public void setPackageName(String name) {
1538 this.packageName = name;
1539 }
1540
1541 public String getHiveHome() {
1542 return hiveHome;
1543 }
1544
1545 public void setHiveHome(String home) {
1546 this.hiveHome = home;
1547 }
1548
1549 /** @return true if we should import the table into Hive. */
1550 public boolean doHiveImport() {
1551 return hiveImport;
1552 }
1553
1554 public void setHiveImport(boolean doImport) {
1555 this.hiveImport = doImport;
1556 }
1557
1558 public String getHiveExternalTableDir() {
1559 return this.hiveExternalTableDir;
1560 }
1561
1562 public void setHiveExternalTableDir(String location) {
1563 this.hiveExternalTableDir = location;
1564 }
1565
1566 /**
1567 * @return the user-specified option to overwrite existing table in hive.
1568 */
1569 public boolean doOverwriteHiveTable() {
1570 return overwriteHiveTable;
1571 }
1572
1573 public void setOverwriteHiveTable(boolean overwrite) {
1574 this.overwriteHiveTable = overwrite;
1575 }
1576
1577 /**
1578 * @return the user-specified option to modify fields to drop hive delimiters
1579 */
1580 public boolean doHiveDropDelims() {
1581 return hiveDropDelims;
1582 }
1583
1584 public void setHiveDropDelims(boolean dropHiveDelims) {
1585 this.hiveDropDelims = dropHiveDelims;
1586 }
1587
1588 /**
1589 * @return the user-specified option to specify the replacement string
1590 * for hive delimeters
1591 */
1592 public String getHiveDelimsReplacement() {
1593 return hiveDelimsReplacement;
1594 }
1595
1596 public void setHiveDelimsReplacement(String replacement) {
1597 this.hiveDelimsReplacement = replacement;
1598 }
1599
1600 /**
1601 * @return the user-specified option to specify sqoop's behavior during
1602 * target table creation if the table exists.
1603 */
1604 public boolean doFailIfHiveTableExists() {
1605 return failIfHiveTableExists;
1606 }
1607
1608 public void setFailIfHiveTableExists(boolean fail) {
1609 this.failIfHiveTableExists = fail;
1610 }
1611
1612 // HCatalog support
1613 public void setHCatTableName(String ht) {
1614 this.hCatTableName = ht;
1615 }
1616
1617 public String getHCatTableName() {
1618 return this.hCatTableName;
1619 }
1620
1621 public void setHCatDatabaseName(String hd) {
1622 this.hCatDatabaseName = hd;
1623 }
1624
1625 public String getHCatDatabaseName() {
1626 return this.hCatDatabaseName;
1627 }
1628
1629
1630 public String getHCatHome() {
1631 return hCatHome;
1632 }
1633
1634 public void setHCatHome(String home) {
1635 this.hCatHome = home;
1636 }
1637
1638 public boolean doCreateHCatalogTable() {
1639 return hCatCreateTable;
1640 }
1641
1642 public void setCreateHCatalogTable(boolean create) {
1643 this.hCatCreateTable = create;
1644 }
1645
1646 public boolean doDropAndCreateHCatalogTable() {
1647 return hCatDropAndCreateTable;
1648 }
1649
1650 public void setDropAndCreateHCatalogTable(boolean dropAndCreate) {
1651 this.hCatDropAndCreateTable = dropAndCreate;
1652 }
1653
1654 public void setHCatStorageStanza(String stanza) {
1655 this.hCatStorageStanza = stanza;
1656 }
1657
1658 public String getHCatStorageStanza() {
1659 return this.hCatStorageStanza;
1660 }
1661 /**
1662 * @return location where .java files go; guaranteed to end with '/'.
1663 */
1664 public String getCodeOutputDir() {
1665 if (codeOutputDir.endsWith(File.separator)) {
1666 return codeOutputDir;
1667 } else {
1668 return codeOutputDir + File.separator;
1669 }
1670 }
1671
1672 public void setCodeOutputDir(String outputDir) {
1673 this.codeOutputDir = outputDir;
1674 }
1675
1676 /**
1677 * @return location where .jar and .class files go; guaranteed to end with
1678 * '/'.
1679 */
1680 public String getJarOutputDir() {
1681 if (jarOutputDir.endsWith(File.separator)) {
1682 return jarOutputDir;
1683 } else {
1684 return jarOutputDir + File.separator;
1685 }
1686 }
1687
1688 public void setJarOutputDir(String outDir) {
1689 this.jarOutputDir = outDir;
1690 this.jarDirIsAuto = false;
1691 }
1692
1693 /**
1694 * Return the value of $HADOOP_MAPRED_HOME.
1695 * @return $HADOOP_MAPRED_HOME, or null if it's not set.
1696 */
1697 public String getHadoopMapRedHome() {
1698 return hadoopMapRedHome;
1699 }
1700
1701 public void setHadoopMapRedHome(String home) {
1702 this.hadoopMapRedHome = home;
1703 }
1704
1705 /**
1706 * @return a sql command to execute and exit with.
1707 */
1708 public String getSqlQuery() {
1709 return sqlQuery;
1710 }
1711
1712 public void setSqlQuery(String sqlStatement) {
1713 this.sqlQuery = sqlStatement;
1714 }
1715
1716 public String getBoundaryQuery() {
1717 return boundaryQuery;
1718 }
1719
1720 public void setBoundaryQuery(String sqlStatement) {
1721 boundaryQuery = sqlStatement;
1722 }
1723
1724 /**
1725 * @return The JDBC driver class name specified with --driver.
1726 */
1727 public String getDriverClassName() {
1728 return driverClassName;
1729 }
1730
1731 public void setDriverClassName(String driverClass) {
1732 this.driverClassName = driverClass;
1733 }
1734
1735 /**
1736 * @return the base destination path for table uploads.
1737 */
1738 public String getWarehouseDir() {
1739 return warehouseDir;
1740 }
1741
1742 public void setWarehouseDir(String warehouse) {
1743 this.warehouseDir = warehouse;
1744 }
1745
1746 public String getTargetDir() {
1747 return this.targetDir;
1748 }
1749
1750 public void setTargetDir(String dir) {
1751 this.targetDir = dir;
1752 }
1753
1754 public void setAppendMode(boolean doAppend) {
1755 this.append = doAppend;
1756 }
1757
1758 public boolean isAppendMode() {
1759 return this.append;
1760 }
1761
1762 public void setDeleteMode(boolean doDelete) {
1763 this.delete = doDelete;
1764 }
1765
1766 public boolean isDeleteMode() {
1767 return this.delete;
1768 }
1769
1770 /**
1771 * @return the destination file format
1772 */
1773 public FileLayout getFileLayout() {
1774 return this.layout;
1775 }
1776
1777 public void setFileLayout(FileLayout fileLayout) {
1778 this.layout = fileLayout;
1779 }
1780
1781 /**
1782 * @return the field delimiter to use when parsing lines. Defaults to the
1783 * field delim to use when printing lines.
1784 */
1785 public char getInputFieldDelim() {
1786 char f = inputDelimiters.getFieldsTerminatedBy();
1787 if (f == DelimiterSet.NULL_CHAR) {
1788 return this.outputDelimiters.getFieldsTerminatedBy();
1789 } else {
1790 return f;
1791 }
1792 }
1793
1794 /**
1795 * Set the field delimiter to use when parsing lines.
1796 */
1797 public void setInputFieldsTerminatedBy(char c) {
1798 this.inputDelimiters.setFieldsTerminatedBy(c);
1799 }
1800
1801 /**
1802 * @return the record delimiter to use when parsing lines. Defaults to the
1803 * record delim to use when printing lines.
1804 */
1805 public char getInputRecordDelim() {
1806 char r = inputDelimiters.getLinesTerminatedBy();
1807 if (r == DelimiterSet.NULL_CHAR) {
1808 return this.outputDelimiters.getLinesTerminatedBy();
1809 } else {
1810 return r;
1811 }
1812 }
1813
1814 /**
1815 * Set the record delimiter to use when parsing lines.
1816 */
1817 public void setInputLinesTerminatedBy(char c) {
1818 this.inputDelimiters.setLinesTerminatedBy(c);
1819 }
1820
1821 /**
1822 * @return the character that may enclose fields when parsing lines.
1823 * Defaults to the enclosing-char to use when printing lines.
1824 */
1825 public char getInputEnclosedBy() {
1826 char c = inputDelimiters.getEnclosedBy();
1827 if (c == DelimiterSet.NULL_CHAR) {
1828 return this.outputDelimiters.getEnclosedBy();
1829 } else {
1830 return c;
1831 }
1832 }
1833
1834 /**
1835 * Set the enclosed-by character to use when parsing lines.
1836 */
1837 public void setInputEnclosedBy(char c) {
1838 this.inputDelimiters.setEnclosedBy(c);
1839 }
1840
1841 /**
1842 * @return the escape character to use when parsing lines. Defaults to the
1843 * escape character used when printing lines.
1844 */
1845 public char getInputEscapedBy() {
1846 char c = inputDelimiters.getEscapedBy();
1847 if (c == DelimiterSet.NULL_CHAR) {
1848 return this.outputDelimiters.getEscapedBy();
1849 } else {
1850 return c;
1851 }
1852 }
1853
1854 /**
1855 * Set the escaped-by character to use when parsing lines.
1856 */
1857 public void setInputEscapedBy(char c) {
1858 this.inputDelimiters.setEscapedBy(c);
1859 }
1860
1861 /**
1862 * @return true if fields must be enclosed by the --enclosed-by character
1863 * when parsing. Defaults to false. Set true when --input-enclosed-by is
1864 * used.
1865 */
1866 public boolean isInputEncloseRequired() {
1867 char c = this.inputDelimiters.getEnclosedBy();
1868 if (c == DelimiterSet.NULL_CHAR) {
1869 return this.outputDelimiters.isEncloseRequired();
1870 } else {
1871 return this.inputDelimiters.isEncloseRequired();
1872 }
1873 }
1874
1875 /**
1876 * If true, then all input fields are expected to be enclosed by the
1877 * enclosed-by character when parsing.
1878 */
1879 public void setInputEncloseRequired(boolean required) {
1880 this.inputDelimiters.setEncloseRequired(required);
1881 }
1882
1883 /**
1884 * @return the character to print between fields when importing them to
1885 * text.
1886 */
1887 public char getOutputFieldDelim() {
1888 return this.outputDelimiters.getFieldsTerminatedBy();
1889 }
1890
1891 /**
1892 * Set the field delimiter to use when formatting lines.
1893 */
1894 public void setFieldsTerminatedBy(char c) {
1895 this.outputDelimiters.setFieldsTerminatedBy(c);
1896 }
1897
1898
1899 /**
1900 * @return the character to print between records when importing them to
1901 * text.
1902 */
1903 public char getOutputRecordDelim() {
1904 return this.outputDelimiters.getLinesTerminatedBy();
1905 }
1906
1907 /**
1908 * Set the record delimiter to use when formatting lines.
1909 */
1910 public void setLinesTerminatedBy(char c) {
1911 this.outputDelimiters.setLinesTerminatedBy(c);
1912 }
1913
1914 /**
1915 * @return a character which may enclose the contents of fields when
1916 * imported to text.
1917 */
1918 public char getOutputEnclosedBy() {
1919 return this.outputDelimiters.getEnclosedBy();
1920 }
1921
1922 /**
1923 * Set the enclosed-by character to use when formatting lines.
1924 */
1925 public void setEnclosedBy(char c) {
1926 this.outputDelimiters.setEnclosedBy(c);
1927 }
1928
1929 /**
1930 * @return a character which signifies an escape sequence when importing to
1931 * text.
1932 */
1933 public char getOutputEscapedBy() {
1934 return this.outputDelimiters.getEscapedBy();
1935 }
1936
1937 /**
1938 * Set the escaped-by character to use when formatting lines.
1939 */
1940 public void setEscapedBy(char c) {
1941 this.outputDelimiters.setEscapedBy(c);
1942 }
1943
1944 /**
1945 * @return true if fields imported to text must be enclosed by the
1946 * EnclosedBy char. default is false; set to true if --enclosed-by is used
1947 * instead of --optionally-enclosed-by.
1948 */
1949 public boolean isOutputEncloseRequired() {
1950 return this.outputDelimiters.isEncloseRequired();
1951 }
1952
1953 /**
1954 * If true, then the enclosed-by character will be applied to all fields,
1955 * even if internal characters do not need enclosed-by protection.
1956 */
1957 public void setOutputEncloseRequired(boolean required) {
1958 this.outputDelimiters.setEncloseRequired(required);
1959 }
1960
1961 /**
1962 * @return the set of delimiters used for formatting output records.
1963 */
1964 public DelimiterSet getOutputDelimiters() {
1965 return this.outputDelimiters.copy();
1966 }
1967
1968 /**
1969 * Set the complete set of delimiters to use for output formatting.
1970 */
1971 public void setOutputDelimiters(DelimiterSet delimiters) {
1972 this.outputDelimiters = delimiters.copy();
1973 }
1974
1975 /**
1976 * @return the set of delimiters used for parsing the input.
1977 * This may include values implicitly set by the output delimiters.
1978 */
1979 public DelimiterSet getInputDelimiters() {
1980 return new DelimiterSet(
1981 getInputFieldDelim(),
1982 getInputRecordDelim(),
1983 getInputEnclosedBy(),
1984 getInputEscapedBy(),
1985 isInputEncloseRequired());
1986 }
1987
1988 /**
1989 * @return true if the user wants imported results to be compressed.
1990 */
1991 public boolean shouldUseCompression() {
1992 return this.useCompression || compressionCodec != null;
1993 }
1994
1995 public void setUseCompression(boolean compress) {
1996 this.useCompression = compress;
1997 }
1998
1999 /**
2000 * @return the name of the compression codec to use when importing.
2001 * E.g. <code>org.apache.hadoop.io.compress.GzipCodec</code>.
2002 */
2003 public String getCompressionCodec() {
2004 return compressionCodec;
2005 }
2006
2007 public void setCompressionCodec(String codec) {
2008 this.compressionCodec = codec;
2009 }
2010 /**
2011 * @return the name of the destination table when importing to Hive.
2012 */
2013 public String getHiveTableName() {
2014 if (null != this.hiveTableName) {
2015 return this.hiveTableName;
2016 } else {
2017 return this.tableName;
2018 }
2019 }
2020
2021 public void setHiveTableName(String name) {
2022 this.hiveTableName = name;
2023 }
2024
2025 public String getHiveDatabaseName() {
2026 return this.hiveDatabaseName;
2027 }
2028
2029 public void setHiveDatabaseName(String name) {
2030 this.hiveDatabaseName = name;
2031 }
2032
2033 public String getHivePartitionKey() {
2034 return hivePartitionKey;
2035 }
2036
2037 public void setHivePartitionKey(String hpk) {
2038 this.hivePartitionKey = hpk;
2039 }
2040
2041 public String getHivePartitionValue() {
2042 return hivePartitionValue;
2043 }
2044
2045 public void setHivePartitionValue(String hpv) {
2046 this.hivePartitionValue = hpv;
2047 }
2048
2049 /**
2050 * @return the file size to split by when using --direct mode.
2051 */
2052 public long getDirectSplitSize() {
2053 return this.directSplitSize;
2054 }
2055
2056 public void setDirectSplitSize(long splitSize) {
2057 this.directSplitSize = splitSize;
2058 }
2059
2060 /**
2061 * @return the max size of a LOB before we spill to a separate file.
2062 */
2063 public long getInlineLobLimit() {
2064 return this.maxInlineLobSize;
2065 }
2066
2067 public void setInlineLobLimit(long limit) {
2068 this.maxInlineLobSize = limit;
2069 }
2070
2071 public Integer getFetchSize() {
2072 return this.fetchSize;
2073 }
2074
2075 public void setFetchSize(Integer size) {
2076 this.fetchSize = size;
2077 }
2078
2079 /*
2080 * @return true if the output delimiters have been explicitly set by the user
2081 */
2082 public boolean explicitOutputDelims() {
2083 return areOutputDelimsManuallySet;
2084 }
2085
2086 /**
2087 * Flag the output delimiter settings as explicit user settings, or implicit.
2088 */
2089 public void setExplicitOutputDelims(boolean explicit) {
2090 this.areOutputDelimsManuallySet = explicit;
2091 }
2092
2093 /**
2094 * @return true if the input delimiters have been explicitly set by the user.
2095 */
2096 public boolean explicitInputDelims() {
2097 return areInputDelimsManuallySet;
2098 }
2099
2100 /**
2101 * Flag the input delimiter settings as explicit user settings, or implicit.
2102 */
2103 public void setExplicitInputDelims(boolean explicit) {
2104 this.areInputDelimsManuallySet = explicit;
2105 }
2106
2107 public Configuration getConf() {
2108 return conf;
2109 }
2110
2111 public void setConf(Configuration config) {
2112 this.conf = config;
2113 }
2114
2115 /**
2116 * @return command-line arguments after a '-'.
2117 */
2118 public String [] getExtraArgs() {
2119 if (extraArgs == null) {
2120 return null;
2121 }
2122
2123 String [] out = new String[extraArgs.length];
2124 for (int i = 0; i < extraArgs.length; i++) {
2125 out[i] = extraArgs[i];
2126 }
2127 return out;
2128 }
2129
2130 public void setExtraArgs(String [] args) {
2131 if (null == args) {
2132 this.extraArgs = null;
2133 return;
2134 }
2135
2136 this.extraArgs = new String[args.length];
2137 for (int i = 0; i < args.length; i++) {
2138 this.extraArgs[i] = args[i];
2139 }
2140 }
2141
2142 /**
2143 * Set the name of the column to be used in the WHERE clause of an
2144 * UPDATE-based export process.
2145 */
2146 public void setUpdateKeyCol(String colName) {
2147 this.updateKeyCol = colName;
2148 }
2149
2150 /**
2151 * @return the column which is the key column in a table to be exported
2152 * in update mode.
2153 */
2154 public String getUpdateKeyCol() {
2155 return this.updateKeyCol;
2156 }
2157
2158 /**
2159 * Set "UpdateOnly" to silently ignore new rows during update export.
2160 * Set "AllowInsert" to insert new rows during update export.
2161 */
2162 public void setUpdateMode(UpdateMode mode) {
2163 this.updateMode = mode;
2164 }
2165
2166 /**
2167 * @return how to handle new rows found in update export.
2168 */
2169 public UpdateMode getUpdateMode() {
2170 return updateMode;
2171 }
2172
2173 /**
2174 * @return an ordered list of column names. The code generator should
2175 * generate the DBWritable.write(PreparedStatement) method with columns
2176 * exporting in this order, if it is non-null.
2177 */
2178 public String [] getDbOutputColumns() {
2179 if (null != dbOutColumns) {
2180 return Arrays.copyOf(this.dbOutColumns, dbOutColumns.length);
2181 } else {
2182 return null;
2183 }
2184 }
2185
2186 /**
2187 * Set the order in which columns should be serialized by the generated
2188 * DBWritable.write(PreparedStatement) method. Setting this to null will use
2189 * the "natural order" of the database table.
2190 *
2191 * TODO: Expose this setter via the command-line arguments for the codegen
2192 * module. That would allow users to export to tables with columns in a
2193 * different physical order than the file layout in HDFS.
2194 */
2195 public void setDbOutputColumns(String [] outCols) {
2196 if (null == outCols) {
2197 this.dbOutColumns = null;
2198 } else {
2199 this.dbOutColumns = Arrays.copyOf(outCols, outCols.length);
2200 }
2201 }
2202
2203 /**
2204 * Set whether we should create missing HBase tables.
2205 */
2206 public void setCreateHBaseTable(boolean create) {
2207 this.hbaseCreateTable = create;
2208 }
2209
2210 /**
2211 * Returns true if we should create HBase tables/column families
2212 * that are missing.
2213 */
2214 public boolean getCreateHBaseTable() {
2215 return this.hbaseCreateTable;
2216 }
2217
2218 /**
2219 * Sets the HBase target column family.
2220 */
2221 public void setHBaseColFamily(String colFamily) {
2222 this.hbaseColFamily = colFamily;
2223 }
2224
2225 /**
2226 * Gets the HBase import target column family.
2227 */
2228 public String getHBaseColFamily() {
2229 return this.hbaseColFamily;
2230 }
2231
2232 /**
2233 * Gets the column to use as the row id in an hbase import.
2234 * If null, use the primary key column.
2235 */
2236 public String getHBaseRowKeyColumn() {
2237 return this.hbaseRowKeyCol;
2238 }
2239
2240 /**
2241 * Sets the column to use as the row id in an hbase import.
2242 */
2243 public void setHBaseRowKeyColumn(String col) {
2244 this.hbaseRowKeyCol = col;
2245 }
2246
2247 /**
2248 * @return true if bulk load is enabled and false otherwise.
2249 */
2250 public boolean isBulkLoadEnabled() {
2251 return this.hbaseBulkLoadEnabled;
2252 }
2253
2254 /**
2255 * Sets the temp dir to use as the bulk load dir in an hbase import.
2256 */
2257 public void setHBaseBulkLoadEnabled(boolean hbaseBulkLoadEnabled) {
2258 this.hbaseBulkLoadEnabled = hbaseBulkLoadEnabled;
2259 }
2260
2261 /**
2262 * Gets the target HBase table name, if any.
2263 */
2264 public String getHBaseTable() {
2265 return this.hbaseTable;
2266 }
2267
2268 /**
2269 * Sets the target HBase table name for an import.
2270 */
2271 public void setHBaseTable(String table) {
2272 this.hbaseTable = table;
2273 }
2274
2275 /**
2276 * Set the column of the import source table to check for incremental import
2277 * state.
2278 */
2279 public void setIncrementalTestColumn(String colName) {
2280 this.incrementalTestCol = colName;
2281 }
2282
2283 /**
2284 * Return the name of the column of the import source table
2285 * to check for incremental import state.
2286 */
2287 public String getIncrementalTestColumn() {
2288 return this.incrementalTestCol;
2289 }
2290
2291 /**
2292 * Set the incremental import mode to use.
2293 */
2294 public void setIncrementalMode(IncrementalMode mode) {
2295 this.incrementalMode = mode;
2296 }
2297
2298 /**
2299 * Get the incremental import mode to use.
2300 */
2301 public IncrementalMode getIncrementalMode() {
2302 return this.incrementalMode;
2303 }
2304
2305 /**
2306 * Set the last imported value of the incremental import test column.
2307 */
2308 public void setIncrementalLastValue(String lastVal) {
2309 this.incrementalLastValue = lastVal;
2310 }
2311
2312 /**
2313 * Get the last imported value of the incremental import test column.
2314 */
2315 public String getIncrementalLastValue() {
2316 return this.incrementalLastValue;
2317 }
2318
2319 /**
2320 * Get HBase null incremental mode to use.
2321 */
2322 public HBaseNullIncrementalMode getHbaseNullIncrementalMode() {
2323 return hbaseNullIncrementalMode;
2324 }
2325
2326 /**
2327 * Set HBase null incremental mode to use.
2328 */
2329 public void setHbaseNullIncrementalMode(HBaseNullIncrementalMode hbaseNullIncrementalMode) {
2330 this.hbaseNullIncrementalMode = hbaseNullIncrementalMode;
2331 }
2332
2333 /**
2334 * Set the tables to be excluded when doing all table import.
2335 */
2336 public void setAllTablesExclude(String exclude) {
2337 this.allTablesExclude = exclude;
2338 }
2339
2340 /**
2341 * Get the tables to be excluded when doing all table import.
2342 */
2343 public String getAllTablesExclude() {
2344 return this.allTablesExclude;
2345 }
2346
2347 /**
2348 * Set the name of the saved job this SqoopOptions belongs to.
2349 */
2350 public void setJobName(String job) {
2351 this.jobName = job;
2352 }
2353
2354 /**
2355 * Get the name of the saved job this SqoopOptions belongs to.
2356 */
2357 public String getJobName() {
2358 return this.jobName;
2359 }
2360
2361 /**
2362 * Set the JobStorage descriptor used to open the saved job
2363 * this SqoopOptions belongs to.
2364 */
2365 public void setStorageDescriptor(Map<String, String> descriptor) {
2366 this.jobStorageDescriptor = descriptor;
2367 }
2368
2369 /**
2370 * Get the JobStorage descriptor used to open the saved job
2371 * this SqoopOptions belongs to.
2372 */
2373 public Map<String, String> getStorageDescriptor() {
2374 return this.jobStorageDescriptor;
2375 }
2376
2377 /**
2378 * Return the parent instance this SqoopOptions is derived from.
2379 */
2380 public SqoopOptions getParent() {
2381 return this.parent;
2382 }
2383
2384 /**
2385 * Set the parent instance this SqoopOptions is derived from.
2386 */
2387 public void setParent(SqoopOptions options) {
2388 this.parent = options;
2389 }
2390
2391 /**
2392 * Set the path name used to do an incremental import of old data
2393 * which will be combined with an "new" dataset.
2394 */
2395 public void setMergeOldPath(String path) {
2396 this.mergeOldPath = path;
2397 }
2398
2399 /**
2400 * Return the path name used to do an incremental import of old data
2401 * which will be combined with an "new" dataset.
2402 */
2403 public String getMergeOldPath() {
2404 return this.mergeOldPath;
2405 }
2406
2407 /**
2408 * Set the path name used to do an incremental import of new data
2409 * which will be combined with an "old" dataset.
2410 */
2411 public void setMergeNewPath(String path) {
2412 this.mergeNewPath = path;
2413 }
2414
2415 /**
2416 * Return the path name used to do an incremental import of new data
2417 * which will be combined with an "old" dataset.
2418 */
2419 public String getMergeNewPath() {
2420 return this.mergeNewPath;
2421 }
2422
2423 /**
2424 * Set the name of the column used to merge an old and new dataset.
2425 */
2426 public void setMergeKeyCol(String col) {
2427 this.mergeKeyCol = col;
2428 }
2429
2430 /** Return the name of the column used to merge an old and new dataset. */
2431 public String getMergeKeyCol() {
2432 return this.mergeKeyCol;
2433 }
2434
2435 /**
2436 * Set the mainframe dataset name.
2437 */
2438 public void setMainframeInputDatasetName(String name) {
2439 mainframeInputDatasetName = name;
2440 tableName = name;
2441 // may need to set this in the conf variable otherwise it gets lost.
2442 }
2443
2444 public void setMainframeInputDatasetType(String name) {
2445 mainframeInputDatasetType = name;
2446 }
2447
2448 /**
2449 * Return the mainframe dataset name.
2450 */
2451 public String getMainframeInputDatasetName() {
2452 return mainframeInputDatasetName;
2453 }
2454
2455 /*
2456 * Return the mainframe dataset type
2457 */
2458 public String getMainframeInputDatasetType() {
2459 return mainframeInputDatasetType;
2460 }
2461
2462 // return whether the dataset is on tape
2463 public Boolean getMainframeInputDatasetTape() {
2464 if (mainframeInputDatasetTape == null) { return false; }
2465 return Boolean.parseBoolean(mainframeInputDatasetTape);
2466 }
2467
2468 // sets whether the dataset is on tape
2469 public void setMainframeInputDatasetTape(String txtIsFromTape) {
2470 mainframeInputDatasetTape = Boolean.valueOf(Boolean.parseBoolean(txtIsFromTape)).toString();
2471 }
2472
2473 public static String getAccumuloHomeDefault() {
2474 // Set this with $ACCUMULO_HOME, but -Daccumulo.home can override.
2475 String accumuloHome = System.getenv("ACCUMULO_HOME");
2476 accumuloHome = System.getProperty("accumulo.home", accumuloHome);
2477 return accumuloHome;
2478 }
2479
2480 public static String getZookeeperHomeDefault() {
2481 // Set this with $ZOOKEEPER_HOME, but -Dzookeeper.home can override.
2482 String zookeeperHome = System.getenv("ZOOKEEPER_HOME");
2483 zookeeperHome = System.getProperty("zookeeper.home", zookeeperHome);
2484 return zookeeperHome;
2485 }
2486
2487 public String getAccumuloHome() {
2488 return accumuloHome;
2489 }
2490
2491 public void setAccumuloHome(String home) {
2492 this.accumuloHome = home;
2493 }
2494
2495 public String getZookeeperHome() {
2496 return zookeeperHome;
2497 }
2498
2499 public void setZookeeperHome(String home) {
2500 this.zookeeperHome = home;
2501 }
2502
2503 /**
2504 * Set whether we should create missing Accumulo tables.
2505 */
2506 public void setCreateAccumuloTable(boolean create) {
2507 this.accumuloCreateTable = create;
2508 }
2509
2510 /**
2511 * Returns true if we should create Accumulo tables/column families
2512 * that are missing.
2513 */
2514 public boolean getCreateAccumuloTable() {
2515 return this.accumuloCreateTable;
2516 }
2517
2518 /**
2519 * Sets the Accumulo batch size (in bytes).
2520 */
2521 public void setAccumuloBatchSize(long batchSize) {
2522 this.accumuloBatchSize = batchSize;
2523 }
2524
2525 /**
2526 * Gets the Accumulo batch size (in bytes).
2527 */
2528 public long getAccumuloBatchSize() {
2529 return this.accumuloBatchSize;
2530 }
2531
2532 /**
2533 * Sets the Accumulo target column family.
2534 */
2535 public void setAccumuloColFamily(String colFamily) {
2536 this.accumuloColFamily = colFamily;
2537 }
2538
2539 /**
2540 * Gets the Accumulo import target column family.
2541 */
2542 public String getAccumuloColFamily() {
2543 return this.accumuloColFamily;
2544 }
2545
2546 /**
2547 * Sets the Accumulo max latency.
2548 */
2549 public void setAccumuloMaxLatency(long maxLatency) {
2550 this.accumuloMaxLatency = maxLatency;
2551 }
2552
2553 /**
2554 * Gets the Accumulo max latency.
2555 */
2556 public long getAccumuloMaxLatency() {
2557 return this.accumuloMaxLatency;
2558 }
2559
2560 /**
2561 * Gets the column to use as the row id in an Accumulo import.
2562 * If null, use the primary key column.
2563 */
2564 public String getAccumuloRowKeyColumn() {
2565 return this.accumuloRowKeyCol;
2566 }
2567
2568 /**
2569 * Sets the column to use as the row id in an Accumulo import.
2570 *
2571 */
2572 public void setAccumuloRowKeyColumn(String col) {
2573 this.accumuloRowKeyCol = col;
2574 }
2575
2576 /**
2577 * Gets the visibility token to use.
2578 * If null, don't assign a visibility.
2579 */
2580 public String getAccumuloVisibility() {
2581 return this.accumuloVisibility;
2582 }
2583
2584 /**
2585 * Sets the visibility token to use.
2586 *
2587 */
2588 public void setAccumuloVisibility(String vis) {
2589 this.accumuloVisibility = vis;
2590 }
2591
2592 /**
2593 * Gets the target Accumulo table name, if any.
2594 */
2595 public String getAccumuloTable() {
2596 return this.accumuloTable;
2597 }
2598
2599 /**
2600 * Sets the target Accumulo table name.
2601 */
2602 public void setAccumuloTable(String table) {
2603 this.accumuloTable = table;
2604 }
2605
2606 /**
2607 * Gets the target Accumulo user name, if any.
2608 */
2609 public String getAccumuloUser() {
2610 return this.accumuloUser;
2611 }
2612
2613 /**
2614 * Sets the target Accumulo user name for an import.
2615 */
2616 public void setAccumuloUser(String user) {
2617 this.accumuloUser = user;
2618 }
2619
2620 /**
2621 * Gets the target Accumulo password, if any.
2622 */
2623 public String getAccumuloPassword() {
2624 return this.accumuloPassword;
2625 }
2626
2627 /**
2628 * Sets the target Accumulo password for an import.
2629 */
2630 public void setAccumuloPassword(String passwd) {
2631 this.accumuloPassword = passwd;
2632 }
2633
2634 /**
2635 * Gets the target Accumulo instance, if any.
2636 */
2637 public String getAccumuloInstance() {
2638 return this.accumuloInstance;
2639 }
2640
2641 /**
2642 * Sets the target Accumulo instance for an import.
2643 */
2644 public void setAccumuloInstance(String instance) {
2645 this.accumuloInstance = instance;
2646 }
2647
2648 /**
2649 * Gets the target Accumulo zookeeper instance, if any.
2650 */
2651 public String getAccumuloZookeepers() {
2652 return this.accumuloZookeepers;
2653 }
2654
2655 /**
2656 ** Sets the target Accumulo zookeeper instance for an import.
2657 **/
2658 public void setAccumuloZookeepers(String zookeepers) {
2659 this.accumuloZookeepers = zookeepers;
2660 }
2661
2662 public void setConnManagerClassName(String connManagerClass) {
2663 this.connManagerClassName = connManagerClass;
2664 }
2665
2666 public String getConnManagerClassName() {
2667 return connManagerClassName;
2668 }
2669
2670 /** @return the SqoopTool that is operating this session. */
2671 public SqoopTool getActiveSqoopTool() {
2672 return activeSqoopTool;
2673 }
2674
2675 public void setActiveSqoopTool(SqoopTool tool) {
2676 activeSqoopTool = tool;
2677 }
2678
2679 public void setNullStringValue(String nullString) {
2680 this.nullStringValue = nullString;
2681 }
2682
2683 public String getNullStringValue() {
2684 return nullStringValue;
2685 }
2686
2687 public void setInNullStringValue(String inNullString) {
2688 this.inNullStringValue = inNullString;
2689 }
2690
2691 public String getInNullStringValue() {
2692 return inNullStringValue;
2693 }
2694
2695 public void setNullNonStringValue(String nullNonString) {
2696 this.nullNonStringValue = nullNonString;
2697 }
2698
2699 public String getNullNonStringValue() {
2700 return nullNonStringValue;
2701 }
2702
2703 public void setInNullNonStringValue(String inNullNonString) {
2704 this.inNullNonStringValue = inNullNonString;
2705 }
2706
2707 public String getInNullNonStringValue() {
2708 return inNullNonStringValue;
2709 }
2710
2711 public void setConnectionParams(Properties params) {
2712 connectionParams = new Properties();
2713 connectionParams.putAll(params);
2714 }
2715
2716 public Properties getConnectionParams() {
2717 return connectionParams;
2718 }
2719
2720 public void setValidationEnabled(boolean validationEnabled) {
2721 isValidationEnabled = validationEnabled;
2722 }
2723
2724 public boolean isValidationEnabled() {
2725 return isValidationEnabled;
2726 }
2727
2728 public Class getValidatorClass() {
2729 return validatorClass;
2730 }
2731
2732 public void setValidatorClass(Class validatorClazz) {
2733 this.validatorClass = validatorClazz;
2734 }
2735
2736 public Class getValidationThresholdClass() {
2737 return validationThresholdClass;
2738 }
2739
2740 public void setValidationThresholdClass(Class validationThresholdClazz) {
2741 this.validationThresholdClass = validationThresholdClazz;
2742 }
2743
2744 public Class getValidationFailureHandlerClass() {
2745 return validationFailureHandlerClass;
2746 }
2747
2748 public void setValidationFailureHandlerClass(
2749 Class validationFailureHandlerClazz) {
2750 this.validationFailureHandlerClass = validationFailureHandlerClazz;
2751 }
2752
2753 public String getCall() {
2754 return call;
2755 }
2756
2757 public void setCall(String theCall) {
2758 this.call = theCall;
2759 }
2760
2761 public void setSkipDistCache(boolean skip) {
2762 this.skipDistCache = skip;
2763 }
2764
2765 public boolean isSkipDistCache() {
2766 return this.skipDistCache;
2767 }
2768
2769 public void setRelaxedIsolation(boolean b) {
2770 this.relaxedIsolation = true;
2771
2772 }
2773
2774 public boolean getRelaxedIsolation() {
2775 return this.relaxedIsolation;
2776 }
2777
2778 public void setAutoResetToOneMapper(boolean b) {
2779 this.autoResetToOneMapper = b;
2780 }
2781
2782 public boolean getAutoResetToOneMapper() {
2783 return this.autoResetToOneMapper;
2784 }
2785
2786 public String getHCatalogPartitionKeys() {
2787 return hCatalogPartitionKeys;
2788 }
2789
2790
2791 public void setHCatalogPartitionKeys(String hpks) {
2792 this.hCatalogPartitionKeys = hpks;
2793 }
2794
2795 public String getHCatalogPartitionValues() {
2796 return hCatalogPartitionValues;
2797 }
2798
2799 public void setHCatalogPartitionValues(String hpvs) {
2800 this.hCatalogPartitionValues = hpvs;
2801 }
2802
2803 public Map<String, String> getCustomToolOptions() {
2804 return customToolOptions;
2805 }
2806
2807 public void setCustomToolOptions(Map<String, String> customToolOptions) {
2808 this.customToolOptions = customToolOptions;
2809 }
2810
2811 public String getToolName() {
2812 return this.toolName;
2813 }
2814
2815 public void setToolName(String toolName) {
2816 this.toolName = toolName;
2817 }
2818
2819 public int getMetadataTransactionIsolationLevel() {
2820 return this.metadataTransactionIsolationLevel;
2821 }
2822
2823 public void setMetadataTransactionIsolationLevel(int transactionIsolationLevel) {
2824 this.metadataTransactionIsolationLevel = transactionIsolationLevel;
2825 }
2826
2827 public boolean isOracleEscapingDisabled() {
2828 return oracleEscapingDisabled;
2829 }
2830
2831 public void setOracleEscapingDisabled(boolean escapingDisabled) {
2832 this.oracleEscapingDisabled = escapingDisabled;
2833 // important to have custom setter to ensure option is available through
2834 // Hadoop configuration on those places where SqoopOptions is not reachable
2835 getConf().setBoolean(ORACLE_ESCAPING_DISABLED, escapingDisabled);
2836 }
2837
2838 public void setEscapeMappingColumnNamesEnabled(boolean escapingEnabled) {
2839 this.escapeColumnMappingEnabled = escapingEnabled;
2840 // important to have custom setter to ensure option is available through
2841 // Hadoop configuration on those places where SqoopOptions is not reachable
2842 getConf().setBoolean(BaseSqoopTool.ESCAPE_MAPPING_COLUMN_NAMES_ENABLED, escapingEnabled);
2843 }
2844
2845 public boolean getEscapeMappingColumnNamesEnabled() {
2846 return escapeColumnMappingEnabled;
2847 }
2848
2849 public Properties getColumnNames() {
2850 if (escapeColumnMappingEnabled && null == mapReplacedColumnJava) {
2851 return doCleanColumnMapping();
2852 }
2853 return escapeColumnMappingEnabled ? mapReplacedColumnJava : mapColumnJava;
2854 }
2855
2856 private Properties doCleanColumnMapping() {
2857 mapReplacedColumnJava = new Properties();
2858
2859 if (!mapColumnJava.isEmpty()) {
2860 for (Map.Entry<Object, Object> entry : mapColumnJava.entrySet()) {
2861 String candidate = toJavaIdentifier((String)entry.getKey());
2862 mapReplacedColumnJava.put(candidate, mapColumnJava.getProperty((String)entry.getKey()));
2863 }
2864 return mapReplacedColumnJava;
2865 }
2866
2867 return mapColumnJava;
2868 }
2869
2870
2871 public String getMetaConnectStr() {
2872 return metaConnectStr;
2873 }
2874
2875 public void setMetaConnectStr(String metaConnectStr) {
2876 this.metaConnectStr = metaConnectStr;
2877 }
2878
2879 public String getMetaUsername() {
2880 return metaUsername;
2881 }
2882
2883 public void setMetaUsername(String metaUsername) {
2884 this.metaUsername = metaUsername;
2885 }
2886
2887 public String getMetaPassword() {
2888 return metaPassword;
2889 }
2890
2891 public void setMetaPassword(String metaPassword) {
2892 this.metaPassword = metaPassword;
2893 }
2894
2895 }
2896