SQOOP-931: Integrate HCatalog with Sqoop
[sqoop.git] / src / java / org / apache / sqoop / SqoopOptions.java
1 /**
2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements. See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership. The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License. You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18
19 package org.apache.sqoop;
20
21 import com.cloudera.sqoop.SqoopOptions.FileLayout;
22 import com.cloudera.sqoop.SqoopOptions.IncrementalMode;
23 import com.cloudera.sqoop.SqoopOptions.UpdateMode;
24 import java.io.File;
25 import java.io.IOException;
26 import java.lang.reflect.Field;
27 import java.util.ArrayList;
28 import java.util.Arrays;
29 import java.util.Iterator;
30 import java.util.Map;
31 import java.util.Properties;
32
33 import org.apache.commons.logging.Log;
34 import org.apache.commons.logging.LogFactory;
35 import org.apache.hadoop.conf.Configuration;
36
37 import com.cloudera.sqoop.lib.DelimiterSet;
38 import com.cloudera.sqoop.lib.LargeObjectLoader;
39 import com.cloudera.sqoop.tool.SqoopTool;
40 import com.cloudera.sqoop.util.RandomHash;
41 import com.cloudera.sqoop.util.StoredAsProperty;
42 import org.apache.sqoop.util.CredentialsUtil;
43 import org.apache.sqoop.util.LoggingUtils;
44 import org.apache.sqoop.validation.AbortOnFailureHandler;
45 import org.apache.sqoop.validation.AbsoluteValidationThreshold;
46 import org.apache.sqoop.validation.RowCountValidator;
47
48 /**
49 * Configurable state used by Sqoop tools.
50 */
51 public class SqoopOptions implements Cloneable {
52
53 public static final Log LOG = LogFactory.getLog(SqoopOptions.class.getName());
54
55 /**
56 * Set to true in configuration if you want to put db passwords
57 * in the metastore.
58 */
59 public static final String METASTORE_PASSWORD_KEY =
60 "sqoop.metastore.client.record.password";
61
62 // Default hive and hcat locations.
63 public static final String DEF_HIVE_HOME = "/usr/lib/hive";
64 public static final String DEF_HCAT_HOME = "/usr/lib/hcatalog";
65
66 public static final boolean METASTORE_PASSWORD_DEFAULT = false;
67
68 /**
69 * Thrown when invalid cmdline options are given.
70 */
71 @SuppressWarnings("serial")
72 public static class InvalidOptionsException extends Exception {
73
74 private String message;
75
76 public InvalidOptionsException(final String msg) {
77 this.message = msg;
78 }
79
80 public String getMessage() {
81 return message;
82 }
83
84 public String toString() {
85 return getMessage();
86 }
87 }
88
89 // TODO(aaron): Adding something here? Add a setter and a getter. Add a
90 // default value in initDefaults() if you need one. If this value needs to
91 // be serialized in the metastore, it should be marked with
92 // @StoredAsProperty(), if it is an int, long, boolean, String, or Enum.
93 // Arrays and other "special" types should be added directly to the
94 // loadProperties() and writeProperties() methods. Then add command-line
95 // arguments in the appropriate tools. The names of all command-line args
96 // are stored as constants in BaseSqoopTool.
97
98 @StoredAsProperty("verbose") private boolean verbose;
99
100 @StoredAsProperty("mapreduce.job.name") private String mapreduceJobName;
101
102 @StoredAsProperty("db.connect.string") private String connectString;
103 @StoredAsProperty("db.table") private String tableName;
104 private String [] columns; // Array stored as db.column.list.
105 @StoredAsProperty("db.username") private String username;
106 @StoredAsProperty("db.export.staging.table") private String stagingTableName;
107 @StoredAsProperty("db.clear.staging.table") private boolean clearStagingTable;
108 @StoredAsProperty("db.export.call") private String call;
109 private Properties connectionParams; //Properties stored as db.connect.params
110
111
112 // May not be serialized, based on configuration.
113 // db.require.password is used to determine whether 'some' password is
114 // used. If so, it is stored as 'db.password'.
115 private String password;
116
117 // This represents path to a file on ${user.home} containing the password
118 // with 400 permissions so its only readable by user executing the tool
119 @StoredAsProperty("db.password.file") private String passwordFilePath;
120
121 @StoredAsProperty("null.string") private String nullStringValue;
122 @StoredAsProperty("input.null.string") private String inNullStringValue;
123 @StoredAsProperty("null.non-string") private String nullNonStringValue;
124 @StoredAsProperty("input.null.non-string")
125 private String inNullNonStringValue;
126
127 @StoredAsProperty("codegen.output.dir") private String codeOutputDir;
128 @StoredAsProperty("codegen.compile.dir") private String jarOutputDir;
129 // Boolean specifying whether jarOutputDir is a nonce tmpdir (true), or
130 // explicitly set by the user (false). If the former, disregard any value
131 // for jarOutputDir saved in the metastore.
132 @StoredAsProperty("codegen.auto.compile.dir") private boolean jarDirIsAuto;
133 private String hadoopMapRedHome; // not serialized to metastore.
134 @StoredAsProperty("db.split.column") private String splitByCol;
135 @StoredAsProperty("db.where.clause") private String whereClause;
136 @StoredAsProperty("db.query") private String sqlQuery;
137 @StoredAsProperty("db.query.boundary") private String boundaryQuery;
138 @StoredAsProperty("jdbc.driver.class") private String driverClassName;
139 @StoredAsProperty("hdfs.warehouse.dir") private String warehouseDir;
140 @StoredAsProperty("hdfs.target.dir") private String targetDir;
141 @StoredAsProperty("hdfs.append.dir") private boolean append;
142 @StoredAsProperty("hdfs.file.format") private FileLayout layout;
143 @StoredAsProperty("direct.import") private boolean direct; // "direct mode."
144 @StoredAsProperty("db.batch") private boolean batchMode;
145 private String tmpDir; // where temp data goes; usually /tmp; not serialized.
146 private String hiveHome; // not serialized to metastore.
147 @StoredAsProperty("hive.import") private boolean hiveImport;
148 @StoredAsProperty("hive.overwrite.table") private boolean overwriteHiveTable;
149 @StoredAsProperty("hive.fail.table.exists")
150 private boolean failIfHiveTableExists;
151 @StoredAsProperty("hive.table.name") private String hiveTableName;
152 @StoredAsProperty("hive.database.name") private String hiveDatabaseName;
153 @StoredAsProperty("hive.drop.delims") private boolean hiveDropDelims;
154 @StoredAsProperty("hive.delims.replacement")
155 private String hiveDelimsReplacement;
156 @StoredAsProperty("hive.partition.key") private String hivePartitionKey;
157 @StoredAsProperty("hive.partition.value") private String hivePartitionValue;
158 @StoredAsProperty("hcatalog.table.name")
159 private String hCatTableName;
160 @StoredAsProperty("hcatalog.database.name")
161 private String hCatDatabaseName;
162 @StoredAsProperty("hcatalog.create.table")
163 private boolean hCatCreateTable;
164 @StoredAsProperty("hcatalog.storage.stanza")
165 private String hCatStorageStanza;
166 private String hCatHome; // not serialized to metastore.
167
168 // User explicit mapping of types
169 private Properties mapColumnJava; // stored as map.colum.java
170 private Properties mapColumnHive; // stored as map.column.hive
171
172 // An ordered list of column names denoting what order columns are
173 // serialized to a PreparedStatement from a generated record type.
174 // Not serialized to metastore.
175 private String [] dbOutColumns;
176
177 // package to prepend to auto-named classes.
178 @StoredAsProperty("codegen.java.packagename") private String packageName;
179
180 // package+class to apply to individual table import.
181 // also used as an *input* class with existingJarFile.
182 @StoredAsProperty("codegen.java.classname") private String className;
183
184 // Name of a jar containing existing table definition
185 // class to use.
186 @StoredAsProperty("codegen.jar.file") private String existingJarFile;
187
188 @StoredAsProperty("mapreduce.num.mappers") private int numMappers;
189 @StoredAsProperty("enable.compression") private boolean useCompression;
190 @StoredAsProperty("compression.codec") private String compressionCodec;
191
192 // In direct mode, open a new stream every X bytes.
193 @StoredAsProperty("import.direct.split.size") private long directSplitSize;
194
195 // Max size of an inline LOB; larger LOBs are written
196 // to external files on disk.
197 @StoredAsProperty("import.max.inline.lob.size") private long maxInlineLobSize;
198
199 // Max number 'n' of rows to fetch from the
200 // database when more rows are needed.
201 @StoredAsProperty("import.fetch.size") private Integer fetchSize;
202
203 // HDFS path to read from when performing an export
204 @StoredAsProperty("export.source.dir") private String exportDir;
205
206 // Column to use for the WHERE clause in an UPDATE-based export.
207 @StoredAsProperty("export.update.col") private String updateKeyCol;
208
209 @StoredAsProperty("export.new.update") private UpdateMode updateMode;
210
211 private DelimiterSet inputDelimiters; // codegen.input.delimiters.
212 private DelimiterSet outputDelimiters; // codegen.output.delimiters.
213
214 private boolean areOutputDelimsManuallySet;
215 private boolean areInputDelimsManuallySet;
216
217 private Configuration conf;
218
219 public static final int DEFAULT_NUM_MAPPERS = 4;
220
221 private String [] extraArgs;
222
223 // HBase table to import into.
224 @StoredAsProperty("hbase.table") private String hbaseTable;
225
226 // Column family to prepend to inserted cols.
227 @StoredAsProperty("hbase.col.family") private String hbaseColFamily;
228
229 // Column of the input to use as the row key.
230 @StoredAsProperty("hbase.row.key.col") private String hbaseRowKeyCol;
231
232 // if true, create tables/col families.
233 @StoredAsProperty("hbase.create.table") private boolean hbaseCreateTable;
234
235 // col to filter on for incremental imports.
236 @StoredAsProperty("incremental.col") private String incrementalTestCol;
237 // incremental import mode we're using.
238 @StoredAsProperty("incremental.mode")
239 private IncrementalMode incrementalMode;
240 // What was the last-imported value of incrementalTestCol?
241 @StoredAsProperty("incremental.last.value")
242 private String incrementalLastValue;
243
244 // exclude these tables when importing all tables.
245 @StoredAsProperty("import.all_tables.exclude")
246 private String allTablesExclude;
247
248 // HDFS paths for "old" and "new" datasets in merge tool.
249 @StoredAsProperty("merge.old.path") private String mergeOldPath;
250 @StoredAsProperty("merge.new.path") private String mergeNewPath;
251
252 // "key" column for the merge operation.
253 @StoredAsProperty("merge.key.col") private String mergeKeyCol;
254
255
256 // These next two fields are not serialized to the metastore.
257 // If this SqoopOptions is created by reading a saved job, these will
258 // be populated by the JobStorage to facilitate updating the same
259 // job.
260 private String jobName;
261 private Map<String, String> jobStorageDescriptor;
262
263 // If we restore a job and then allow the user to apply arguments on
264 // top, we retain the version without the arguments in a reference to the
265 // 'parent' SqoopOptions instance, here.
266 private com.cloudera.sqoop.SqoopOptions parent;
267
268 // Nonce directory name. Generate one per process, lazily, if
269 // getNonceJarDir() is called. Not recorded in metadata. This is used as
270 // a temporary holding area for compilation work done by this process.
271 private static String curNonce;
272
273 // the connection manager fully qualified class name
274 @StoredAsProperty("connection.manager") private String connManagerClassName;
275
276 // The currently active tool. (Not saved in properties)
277 // Used to pass the SqoopTool instance in to mapreduce job configuration
278 // (JobBase, etc).
279 private SqoopTool activeSqoopTool;
280
281 // Flag to determine if data copied needs to be validated against the source
282 private boolean isValidationEnabled;
283 // These take FQCN as input, convert them to Class in light of failing early
284 private Class validatorClass; // Class for the validator implementation.
285 private Class validationThresholdClass; // ValidationThreshold implementation
286 private Class validationFailureHandlerClass; // FailureHandler implementation
287
288 public SqoopOptions() {
289 initDefaults(null);
290 }
291
292 public SqoopOptions(Configuration conf) {
293 initDefaults(conf);
294 }
295
296 /**
297 * Alternate SqoopOptions interface used mostly for unit testing.
298 * @param connect JDBC connect string to use
299 * @param table Table to read
300 */
301 public SqoopOptions(final String connect, final String table) {
302 initDefaults(null);
303
304 this.connectString = connect;
305 this.tableName = table;
306 }
307
308 private boolean getBooleanProperty(Properties props, String propName,
309 boolean defaultValue) {
310 String str = props.getProperty(propName,
311 Boolean.toString(defaultValue)).toLowerCase();
312 return "true".equals(str) || "yes".equals(str) || "1".equals(str);
313 }
314
315 private long getLongProperty(Properties props, String propName,
316 long defaultValue) {
317 String str = props.getProperty(propName,
318 Long.toString(defaultValue)).toLowerCase();
319 try {
320 return Long.parseLong(str);
321 } catch (NumberFormatException nfe) {
322 LOG.warn("Could not parse integer value for config parameter "
323 + propName);
324 return defaultValue;
325 }
326 }
327
328 private int getIntProperty(Properties props, String propName,
329 int defaultVal) {
330 long longVal = getLongProperty(props, propName, defaultVal);
331 return (int) longVal;
332 }
333
334 private char getCharProperty(Properties props, String propName,
335 char defaultVal) {
336 int intVal = getIntProperty(props, propName, (int) defaultVal);
337 return (char) intVal;
338 }
339
340 private DelimiterSet getDelimiterProperties(Properties props,
341 String prefix, DelimiterSet defaults) {
342
343 if (null == defaults) {
344 defaults = new DelimiterSet();
345 }
346
347 char field = getCharProperty(props, prefix + ".field",
348 defaults.getFieldsTerminatedBy());
349 char record = getCharProperty(props, prefix + ".record",
350 defaults.getLinesTerminatedBy());
351 char enclose = getCharProperty(props, prefix + ".enclose",
352 defaults.getEnclosedBy());
353 char escape = getCharProperty(props, prefix + ".escape",
354 defaults.getEscapedBy());
355 boolean required = getBooleanProperty(props, prefix +".enclose.required",
356 defaults.isEncloseRequired());
357
358 return new DelimiterSet(field, record, enclose, escape, required);
359 }
360
361 private void setDelimiterProperties(Properties props,
362 String prefix, DelimiterSet values) {
363 putProperty(props, prefix + ".field",
364 Integer.toString((int) values.getFieldsTerminatedBy()));
365 putProperty(props, prefix + ".record",
366 Integer.toString((int) values.getLinesTerminatedBy()));
367 putProperty(props, prefix + ".enclose",
368 Integer.toString((int) values.getEnclosedBy()));
369 putProperty(props, prefix + ".escape",
370 Integer.toString((int) values.getEscapedBy()));
371 putProperty(props, prefix + ".enclose.required",
372 Boolean.toString(values.isEncloseRequired()));
373 }
374
375 /** Take a comma-delimited list of input and split the elements
376 * into an output array. */
377 private String [] listToArray(String strList) {
378 return strList.split(",");
379 }
380
381 private String arrayToList(String [] array) {
382 if (null == array) {
383 return null;
384 }
385
386 StringBuilder sb = new StringBuilder();
387 boolean first = true;
388 for (String elem : array) {
389 if (!first) {
390 sb.append(",");
391 }
392 sb.append(elem);
393 first = false;
394 }
395
396 return sb.toString();
397 }
398
399 /**
400 * A put() method for Properties that is tolerent of 'null' values.
401 * If a null value is specified, the property is unset.
402 */
403 private void putProperty(Properties props, String k, String v) {
404 if (null == v) {
405 props.remove(k);
406 } else {
407 props.setProperty(k, v);
408 }
409 }
410
411 /**
412 * Given a property prefix that denotes a set of numbered properties,
413 * return an array containing all the properties.
414 *
415 * For instance, if prefix is "foo", then return properties "foo.0",
416 * "foo.1", "foo.2", and so on as an array. If no such properties
417 * exist, return 'defaults'.
418 */
419 private String [] getArgArrayProperty(Properties props, String prefix,
420 String [] defaults) {
421 int cur = 0;
422 ArrayList<String> al = new ArrayList<String>();
423 while (true) {
424 String curProp = prefix + "." + cur;
425 String curStr = props.getProperty(curProp, null);
426 if (null == curStr) {
427 break;
428 }
429
430 al.add(curStr);
431 cur++;
432 }
433
434 if (cur == 0) {
435 // Couldn't find an array here; return the defaults.
436 return defaults;
437 }
438
439 return al.toArray(new String[0]);
440 }
441
442 private void setArgArrayProperties(Properties props, String prefix,
443 String [] values) {
444 if (null == values) {
445 return;
446 }
447
448 for (int i = 0; i < values.length; i++) {
449 putProperty(props, prefix + "." + i, values[i]);
450 }
451 }
452
453 /**
454 * This method encodes the property key values found in the provided
455 * properties instance <tt>values</tt> into another properties instance
456 * <tt>props</tt>. The specified <tt>prefix</tt> is used as a namespace
457 * qualifier for keys when inserting. This allows easy introspection of the
458 * property key values in <tt>props</tt> instance to later separate out all
459 * the properties that belong to the <tt>values</tt> instance.
460 * @param props the container properties instance
461 * @param prefix the prefix for qualifying contained property keys.
462 * @param values the contained properties instance, all of whose elements will
463 * be added to the container properties instance.
464 *
465 * @see #getPropertiesAsNetstedProperties(Properties, String)
466 */
467 private void setPropertiesAsNestedProperties(Properties props,
468 String prefix, Properties values) {
469 String nestedPropertyPrefix = prefix + ".";
470 if (null == values || values.size() == 0) {
471 Iterator<String> it = props.stringPropertyNames().iterator();
472 while (it.hasNext()) {
473 String name = it.next();
474 if (name.startsWith(nestedPropertyPrefix)) {
475 props.remove(name);
476 }
477 }
478 } else {
479 Iterator<String> it = values.stringPropertyNames().iterator();
480 while (it.hasNext()) {
481 String name = it.next();
482 putProperty(props,
483 nestedPropertyPrefix + name, values.getProperty(name));
484 }
485 }
486 }
487
488 /**
489 * This method decodes the property key values found in the provided
490 * properties instance <tt>props</tt> that have keys beginning with the
491 * given prefix. Matching elements from this properties instance are modified
492 * so that their prefix is dropped.
493 * @param props the properties container
494 * @param prefix the prefix qualifying properties that need to be removed
495 * @return a new properties instance that contains all matching elements from
496 * the container properties.
497 */
498 private Properties getPropertiesAsNetstedProperties(
499 Properties props, String prefix) {
500 Properties nestedProps = new Properties();
501 String nestedPropertyPrefix = prefix + ".";
502 int index = nestedPropertyPrefix.length();
503 if (props != null && props.size() > 0) {
504 Iterator<String> it = props.stringPropertyNames().iterator();
505 while (it.hasNext()) {
506 String name = it.next();
507 if (name.startsWith(nestedPropertyPrefix)){
508 String shortName = name.substring(index);
509 nestedProps.put(shortName, props.get(name));
510 }
511 }
512 }
513 return nestedProps;
514 }
515
516 @SuppressWarnings("unchecked")
517 /**
518 * Given a set of properties, load this into the current SqoopOptions
519 * instance.
520 */
521 public void loadProperties(Properties props) {
522
523 try {
524 Field [] fields = SqoopOptions.class.getDeclaredFields();
525 for (Field f : fields) {
526 if (f.isAnnotationPresent(StoredAsProperty.class)) {
527 Class typ = f.getType();
528 StoredAsProperty storedAs = f.getAnnotation(StoredAsProperty.class);
529 String propName = storedAs.value();
530
531 if (typ.equals(int.class)) {
532 f.setInt(this,
533 getIntProperty(props, propName, f.getInt(this)));
534 } else if (typ.equals(boolean.class)) {
535 f.setBoolean(this,
536 getBooleanProperty(props, propName, f.getBoolean(this)));
537 } else if (typ.equals(long.class)) {
538 f.setLong(this,
539 getLongProperty(props, propName, f.getLong(this)));
540 } else if (typ.equals(String.class)) {
541 f.set(this, props.getProperty(propName, (String) f.get(this)));
542 } else if (typ.equals(Integer.class)) {
543 String value = props.getProperty(
544 propName,
545 f.get(this) == null ? "null" : f.get(this).toString());
546 f.set(this, value.equals("null") ? null : new Integer(value));
547 } else if (typ.isEnum()) {
548 f.set(this, Enum.valueOf(typ,
549 props.getProperty(propName, f.get(this).toString())));
550 } else {
551 throw new RuntimeException("Could not retrieve property "
552 + propName + " for type: " + typ);
553 }
554 }
555 }
556 } catch (IllegalAccessException iae) {
557 throw new RuntimeException("Illegal access to field in property setter",
558 iae);
559 }
560
561 // Now load properties that were stored with special types, or require
562 // additional logic to set.
563
564 loadPasswordProperty(props);
565
566 if (this.jarDirIsAuto) {
567 // We memoized a user-specific nonce dir for compilation to the data
568 // store. Disregard that setting and create a new nonce dir.
569 String localUsername = System.getProperty("user.name", "unknown");
570 this.jarOutputDir = getNonceJarDir(tmpDir + "sqoop-" + localUsername
571 + "/compile");
572 }
573
574 String colListStr = props.getProperty("db.column.list", null);
575 if (null != colListStr) {
576 this.columns = listToArray(colListStr);
577 }
578
579 this.inputDelimiters = getDelimiterProperties(props,
580 "codegen.input.delimiters", this.inputDelimiters);
581 this.outputDelimiters = getDelimiterProperties(props,
582 "codegen.output.delimiters", this.outputDelimiters);
583
584 this.extraArgs = getArgArrayProperty(props, "tool.arguments",
585 this.extraArgs);
586
587 this.connectionParams =
588 getPropertiesAsNetstedProperties(props, "db.connect.params");
589
590 // Loading user mapping
591 this.mapColumnHive =
592 getPropertiesAsNetstedProperties(props, "map.column.hive");
593 this.mapColumnJava =
594 getPropertiesAsNetstedProperties(props, "map.column.java");
595
596 // Delimiters were previously memoized; don't let the tool override
597 // them with defaults.
598 this.areOutputDelimsManuallySet = true;
599 this.areInputDelimsManuallySet = true;
600
601 // If we loaded true verbose flag, we need to apply it
602 if (this.verbose) {
603 LoggingUtils.setDebugLevel();
604 }
605 }
606
607 private void loadPasswordProperty(Properties props) {
608 passwordFilePath = props.getProperty("db.password.file");
609 if (passwordFilePath != null) {
610 try {
611 password = CredentialsUtil.fetchPasswordFromFile(
612 getConf(), passwordFilePath);
613 return; // short-circuit
614 } catch (IOException e) {
615 throw new RuntimeException("Unable to fetch password from file.", e);
616 }
617 }
618
619 if (getBooleanProperty(props, "db.require.password", false)) {
620 // The user's password was stripped out from the metastore.
621 // Require that the user enter it now.
622 setPasswordFromConsole();
623 } else {
624 this.password = props.getProperty("db.password", this.password);
625 }
626 }
627
628 /**
629 * Return a Properties instance that encapsulates all the "sticky"
630 * state of this SqoopOptions that should be written to a metastore
631 * to restore the job later.
632 */
633 public Properties writeProperties() {
634 Properties props = new Properties();
635
636 try {
637 Field [] fields = SqoopOptions.class.getDeclaredFields();
638 for (Field f : fields) {
639 if (f.isAnnotationPresent(StoredAsProperty.class)) {
640 Class typ = f.getType();
641 StoredAsProperty storedAs = f.getAnnotation(StoredAsProperty.class);
642 String propName = storedAs.value();
643
644 if (typ.equals(int.class)) {
645 putProperty(props, propName, Integer.toString(f.getInt(this)));
646 } else if (typ.equals(boolean.class)) {
647 putProperty(props, propName, Boolean.toString(f.getBoolean(this)));
648 } else if (typ.equals(long.class)) {
649 putProperty(props, propName, Long.toString(f.getLong(this)));
650 } else if (typ.equals(String.class)) {
651 putProperty(props, propName, (String) f.get(this));
652 } else if (typ.equals(Integer.class)) {
653 putProperty(
654 props,
655 propName,
656 f.get(this) == null ? "null" : f.get(this).toString());
657 } else if (typ.isEnum()) {
658 putProperty(props, propName, f.get(this).toString());
659 } else {
660 throw new RuntimeException("Could not set property "
661 + propName + " for type: " + typ);
662 }
663 }
664 }
665 } catch (IllegalAccessException iae) {
666 throw new RuntimeException("Illegal access to field in property setter",
667 iae);
668 }
669
670 writePasswordProperty(props);
671
672 putProperty(props, "db.column.list", arrayToList(this.columns));
673 setDelimiterProperties(props, "codegen.input.delimiters",
674 this.inputDelimiters);
675 setDelimiterProperties(props, "codegen.output.delimiters",
676 this.outputDelimiters);
677 setArgArrayProperties(props, "tool.arguments", this.extraArgs);
678
679 setPropertiesAsNestedProperties(props,
680 "db.connect.params", this.connectionParams);
681
682 setPropertiesAsNestedProperties(props,
683 "map.column.hive", this.mapColumnHive);
684 setPropertiesAsNestedProperties(props,
685 "map.column.java", this.mapColumnJava);
686 return props;
687 }
688
689 private void writePasswordProperty(Properties props) {
690 if (getPasswordFilePath() != null) { // short-circuit
691 putProperty(props, "db.password.file", getPasswordFilePath());
692 return;
693 }
694
695 if (this.getConf().getBoolean(
696 METASTORE_PASSWORD_KEY, METASTORE_PASSWORD_DEFAULT)) {
697 // If the user specifies, we may store the password in the metastore.
698 putProperty(props, "db.password", this.password);
699 putProperty(props, "db.require.password", "false");
700 } else if (this.password != null) {
701 // Otherwise, if the user has set a password, we just record
702 // a flag stating that the password will need to be reentered.
703 putProperty(props, "db.require.password", "true");
704 } else {
705 // No password saved or required.
706 putProperty(props, "db.require.password", "false");
707 }
708 }
709
710 @Override
711 public Object clone() {
712 try {
713 SqoopOptions other = (SqoopOptions) super.clone();
714 if (null != columns) {
715 other.columns = Arrays.copyOf(columns, columns.length);
716 }
717
718 if (null != dbOutColumns) {
719 other.dbOutColumns = Arrays.copyOf(dbOutColumns, dbOutColumns.length);
720 }
721
722 if (null != inputDelimiters) {
723 other.inputDelimiters = (DelimiterSet) inputDelimiters.clone();
724 }
725
726 if (null != outputDelimiters) {
727 other.outputDelimiters = (DelimiterSet) outputDelimiters.clone();
728 }
729
730 if (null != conf) {
731 other.conf = new Configuration(conf);
732 }
733
734 if (null != extraArgs) {
735 other.extraArgs = Arrays.copyOf(extraArgs, extraArgs.length);
736 }
737
738 if (null != connectionParams) {
739 other.setConnectionParams(this.connectionParams);
740 }
741
742 if (null != mapColumnHive) {
743 other.mapColumnHive = (Properties) this.mapColumnHive.clone();
744 }
745
746 if (null != mapColumnJava) {
747 other.mapColumnJava = (Properties) this.mapColumnJava.clone();
748 }
749
750 return other;
751 } catch (CloneNotSupportedException cnse) {
752 // Shouldn't happen.
753 return null;
754 }
755 }
756
757 /**
758 * @return the temp directory to use; this is guaranteed to end with
759 * the file separator character (e.g., '/').
760 */
761 public String getTempDir() {
762 return this.tmpDir;
763 }
764
765 /**
766 * Return the name of a directory that does not exist before
767 * calling this method, and does exist afterward. We should be
768 * the only client of this directory. If this directory is not
769 * used during the lifetime of the JVM, schedule it to be removed
770 * when the JVM exits.
771 */
772 private static String getNonceJarDir(String tmpBase) {
773
774 // Make sure we don't loop forever in the event of a permission error.
775 final int MAX_DIR_CREATE_ATTEMPTS = 32;
776
777 if (null != curNonce) {
778 return curNonce;
779 }
780
781 File baseDir = new File(tmpBase);
782 File hashDir = null;
783
784 for (int attempts = 0; attempts < MAX_DIR_CREATE_ATTEMPTS; attempts++) {
785 hashDir = new File(baseDir, RandomHash.generateMD5String());
786 while (hashDir.exists()) {
787 hashDir = new File(baseDir, RandomHash.generateMD5String());
788 }
789
790 if (hashDir.mkdirs()) {
791 // We created the directory. Use it.
792 // If this directory is not actually filled with files, delete it
793 // when the JVM quits.
794 hashDir.deleteOnExit();
795 break;
796 }
797 }
798
799 if (hashDir == null || !hashDir.exists()) {
800 throw new RuntimeException("Could not create temporary directory: "
801 + hashDir + "; check for a directory permissions issue on /tmp.");
802 }
803
804 LOG.debug("Generated nonce dir: " + hashDir.toString());
805 SqoopOptions.curNonce = hashDir.toString();
806 return SqoopOptions.curNonce;
807 }
808
809 /**
810 * Reset the nonce directory and force a new one to be generated. This
811 * method is intended to be used only by multiple unit tests that want
812 * to isolate themselves from one another. It should not be called
813 * during normal Sqoop execution.
814 */
815 public static void clearNonceDir() {
816 LOG.warn("Clearing nonce directory");
817 SqoopOptions.curNonce = null;
818 }
819
820 public static String getHiveHomeDefault() {
821 // Set this with $HIVE_HOME, but -Dhive.home can override.
822 String hiveHome = System.getenv("HIVE_HOME");
823 hiveHome = System.getProperty("hive.home", hiveHome);
824 if (hiveHome == null) {
825 hiveHome = DEF_HIVE_HOME;
826 }
827 return hiveHome;
828 }
829
830 public static String getHCatHomeDefault() {
831 // Set this with $HCAT_HOME, but -Dhcatalog.home can override.
832 String hcatHome = System.getenv("HCAT_HOME");
833 hcatHome = System.getProperty("hcat.home", hcatHome);
834 if (hcatHome == null) {
835 hcatHome = DEF_HCAT_HOME;
836 }
837 return hcatHome;
838 }
839
840 private void initDefaults(Configuration baseConfiguration) {
841 // first, set the true defaults if nothing else happens.
842 // default action is to run the full pipeline.
843 this.hadoopMapRedHome = System.getenv("HADOOP_MAPRED_HOME");
844
845 this.hiveHome = getHiveHomeDefault();
846 this.hCatHome = getHCatHomeDefault();
847
848 this.inputDelimiters = new DelimiterSet(
849 DelimiterSet.NULL_CHAR, DelimiterSet.NULL_CHAR,
850 DelimiterSet.NULL_CHAR, DelimiterSet.NULL_CHAR, false);
851 this.outputDelimiters = new DelimiterSet();
852
853 // Set this to cwd, but -Dsqoop.src.dir can override.
854 this.codeOutputDir = System.getProperty("sqoop.src.dir", ".");
855
856 String myTmpDir = System.getProperty("test.build.data", "/tmp/");
857 if (!myTmpDir.endsWith(File.separator)) {
858 myTmpDir = myTmpDir + File.separator;
859 }
860
861 this.tmpDir = myTmpDir;
862 String localUsername = System.getProperty("user.name", "unknown");
863 this.jarOutputDir = getNonceJarDir(tmpDir + "sqoop-" + localUsername
864 + "/compile");
865 this.jarDirIsAuto = true;
866 this.layout = FileLayout.TextFile;
867
868 this.areOutputDelimsManuallySet = false;
869 this.areInputDelimsManuallySet = false;
870
871 this.numMappers = DEFAULT_NUM_MAPPERS;
872 this.useCompression = false;
873 this.compressionCodec = null;
874 this.directSplitSize = 0;
875
876 this.maxInlineLobSize = LargeObjectLoader.DEFAULT_MAX_LOB_LENGTH;
877
878 // Don't set a default value for fetchsize. This allows a JDBCManager to
879 // provide a database-specific default, if no value is provided by the
880 // user.
881 this.fetchSize = null;
882
883 if (null == baseConfiguration) {
884 this.conf = new Configuration();
885 } else {
886 this.conf = baseConfiguration;
887 }
888
889 this.extraArgs = null;
890
891 this.dbOutColumns = null;
892
893 this.incrementalMode = IncrementalMode.None;
894
895 this.updateMode = UpdateMode.UpdateOnly;
896
897 // Creating instances for user specific mapping
898 this.mapColumnHive = new Properties();
899 this.mapColumnJava = new Properties();
900
901 // We do not want to be verbose too much if not explicitly needed
902 this.verbose = false;
903 this.isValidationEnabled = false; // validation is disabled by default
904 this.validatorClass = RowCountValidator.class;
905 this.validationThresholdClass = AbsoluteValidationThreshold.class;
906 this.validationFailureHandlerClass = AbortOnFailureHandler.class;
907 }
908
909 /**
910 * Given a string containing a single character or an escape sequence
911 * representing a char, return that char itself.
912 *
913 * Normal literal characters return themselves: "x" -&gt; 'x', etc.
914 * Strings containing a '\' followed by one of t, r, n, or b escape to the
915 * usual character as seen in Java: "\n" -&gt; (newline), etc.
916 *
917 * Strings like "\0ooo" return the character specified by the octal sequence
918 * 'ooo'. Strings like "\0xhhh" or "\0Xhhh" return the character specified by
919 * the hex sequence 'hhh'.
920 *
921 * If the input string contains leading or trailing spaces, these are
922 * ignored.
923 */
924 public static char toChar(String charish) throws InvalidOptionsException {
925 if (null == charish || charish.length() == 0) {
926 throw new InvalidOptionsException("Character argument expected."
927 + "\nTry --help for usage instructions.");
928 }
929
930 if (charish.startsWith("\\0x") || charish.startsWith("\\0X")) {
931 if (charish.length() == 3) {
932 throw new InvalidOptionsException(
933 "Base-16 value expected for character argument."
934 + "\nTry --help for usage instructions.");
935 } else {
936 String valStr = charish.substring(3);
937 int val = Integer.parseInt(valStr, 16);
938 return (char) val;
939 }
940 } else if (charish.startsWith("\\0")) {
941 if (charish.equals("\\0")) {
942 // it's just '\0', which we can take as shorthand for nul.
943 return DelimiterSet.NULL_CHAR;
944 } else {
945 // it's an octal value.
946 String valStr = charish.substring(2);
947 int val = Integer.parseInt(valStr, 8);
948 return (char) val;
949 }
950 } else if (charish.startsWith("\\")) {
951 if (charish.length() == 1) {
952 // it's just a '\'. Keep it literal.
953 return '\\';
954 } else if (charish.length() > 2) {
955 // we don't have any 3+ char escape strings.
956 throw new InvalidOptionsException(
957 "Cannot understand character argument: " + charish
958 + "\nTry --help for usage instructions.");
959 } else {
960 // this is some sort of normal 1-character escape sequence.
961 char escapeWhat = charish.charAt(1);
962 switch(escapeWhat) {
963 case 'b':
964 return '\b';
965 case 'n':
966 return '\n';
967 case 'r':
968 return '\r';
969 case 't':
970 return '\t';
971 case '\"':
972 return '\"';
973 case '\'':
974 return '\'';
975 case '\\':
976 return '\\';
977 default:
978 throw new InvalidOptionsException(
979 "Cannot understand character argument: " + charish
980 + "\nTry --help for usage instructions.");
981 }
982 }
983 } else {
984 // it's a normal character.
985 if (charish.length() > 1) {
986 LOG.warn("Character argument " + charish + " has multiple characters; "
987 + "only the first will be used.");
988 }
989
990 return charish.charAt(0);
991 }
992 }
993
994 public boolean getVerbose() {
995 return verbose;
996 }
997
998 public void setVerbose(boolean beVerbose) {
999 this.verbose = beVerbose;
1000 }
1001
1002 /**
1003 * Get the temporary directory; guaranteed to end in File.separator
1004 * (e.g., '/').
1005 */
1006 public String getTmpDir() {
1007 return tmpDir;
1008 }
1009
1010 public void setTmpDir(String tmp) {
1011 this.tmpDir = tmp;
1012 }
1013
1014 public String getConnectString() {
1015 return connectString;
1016 }
1017
1018 public void setConnectString(String connectStr) {
1019 this.connectString = connectStr;
1020 }
1021
1022 public String getTableName() {
1023 return tableName;
1024 }
1025
1026 public void setTableName(String table) {
1027 this.tableName = table;
1028 }
1029
1030 public String getStagingTableName() {
1031 return stagingTableName;
1032 }
1033
1034 public void setStagingTableName(String stagingTable) {
1035 this.stagingTableName = stagingTable;
1036 }
1037
1038 public boolean doClearStagingTable() {
1039 return clearStagingTable;
1040 }
1041
1042 public void setClearStagingTable(boolean clear) {
1043 clearStagingTable = clear;
1044 }
1045
1046 public String getExportDir() {
1047 return exportDir;
1048 }
1049
1050 public void setExportDir(String dir) {
1051 this.exportDir = dir;
1052 }
1053
1054 public String getExistingJarName() {
1055 return existingJarFile;
1056 }
1057
1058 public void setExistingJarName(String jarFile) {
1059 this.existingJarFile = jarFile;
1060 }
1061
1062 public String[] getColumns() {
1063 if (null == columns) {
1064 return null;
1065 } else {
1066 return Arrays.copyOf(columns, columns.length);
1067 }
1068 }
1069
1070 public void setColumns(String [] cols) {
1071 if (null == cols) {
1072 this.columns = null;
1073 } else {
1074 this.columns = Arrays.copyOf(cols, cols.length);
1075 }
1076 }
1077
1078 public String getSplitByCol() {
1079 return splitByCol;
1080 }
1081
1082 public void setSplitByCol(String splitBy) {
1083 this.splitByCol = splitBy;
1084 }
1085
1086 public String getWhereClause() {
1087 return whereClause;
1088 }
1089
1090 public void setWhereClause(String where) {
1091 this.whereClause = where;
1092 }
1093
1094 public String getUsername() {
1095 return username;
1096 }
1097
1098 public void setUsername(String user) {
1099 this.username = user;
1100 }
1101
1102 public String getPassword() {
1103 return password;
1104 }
1105
1106 public String getPasswordFilePath() {
1107 return passwordFilePath;
1108 }
1109
1110 public void setPasswordFilePath(String passwdFilePath) {
1111 this.passwordFilePath = passwdFilePath;
1112 }
1113
1114 protected void parseColumnMapping(String mapping,
1115 Properties output) {
1116 output.clear();
1117 String[] maps = mapping.split(",");
1118 for(String map : maps) {
1119 String[] details = map.split("=");
1120 output.put(details[0], details[1]);
1121 }
1122 }
1123
1124 public void setMapColumnHive(String mapColumn) {
1125 parseColumnMapping(mapColumn, mapColumnHive);
1126 }
1127
1128 public void setMapColumnJava(String mapColumn) {
1129 parseColumnMapping(mapColumn, mapColumnJava);
1130 }
1131
1132 public Properties getMapColumnHive() {
1133 return mapColumnHive;
1134 }
1135
1136 public Properties getMapColumnJava() {
1137 return mapColumnJava;
1138 }
1139
1140 /**
1141 * Allow the user to enter his password on the console without printing
1142 * characters.
1143 * @return the password as a string
1144 */
1145 private String securePasswordEntry() {
1146 try {
1147 return new String(System.console().readPassword("Enter password: "));
1148 } catch (NullPointerException e) {
1149 LOG.error("It seems that you have launched a Sqoop metastore job via");
1150 LOG.error("Oozie with sqoop.metastore.client.record.password disabled.");
1151 LOG.error("But this configuration is not supported because Sqoop can't");
1152 LOG.error("prompt the user to enter the password while being executed");
1153 LOG.error("as Oozie tasks. Please enable sqoop.metastore.client.record");
1154 LOG.error(".password in sqoop-site.xml, or provide the password");
1155 LOG.error("explicitly using --password in the command tag of the Oozie");
1156 LOG.error("workflow file.");
1157 return null;
1158 }
1159 }
1160
1161 /**
1162 * Set the password in this SqoopOptions from the console without printing
1163 * characters.
1164 */
1165 public void setPasswordFromConsole() {
1166 this.password = securePasswordEntry();
1167 }
1168
1169 public String getMapreduceJobName() {
1170 return mapreduceJobName;
1171 }
1172
1173 public void setMapreduceJobName(String mapredJobName) {
1174 this.mapreduceJobName = mapredJobName;
1175 }
1176
1177 public void setPassword(String pass) {
1178 this.password = pass;
1179 }
1180
1181 public boolean isDirect() {
1182 return direct;
1183 }
1184
1185 public void setDirectMode(boolean isDirect) {
1186 this.direct = isDirect;
1187 }
1188
1189 /**
1190 * @return true if underlying statements to be executed in batch mode,
1191 * or false if to be executed in a single multirow statement.
1192 */
1193 public boolean isBatchMode() {
1194 return batchMode;
1195 }
1196
1197 public void setBatchMode(boolean mode) {
1198 this.batchMode = mode;
1199 }
1200
1201 /**
1202 * @return the number of map tasks to use for import.
1203 */
1204 public int getNumMappers() {
1205 return this.numMappers;
1206 }
1207
1208 public void setNumMappers(int m) {
1209 this.numMappers = m;
1210 }
1211
1212 /**
1213 * @return the user-specified absolute class name for the table.
1214 */
1215 public String getClassName() {
1216 return className;
1217 }
1218
1219 public void setClassName(String name) {
1220 this.className = name;
1221 }
1222
1223 /**
1224 * @return the user-specified package to prepend to table names via
1225 * --package-name.
1226 */
1227 public String getPackageName() {
1228 return packageName;
1229 }
1230
1231 public void setPackageName(String name) {
1232 this.packageName = name;
1233 }
1234
1235 public String getHiveHome() {
1236 return hiveHome;
1237 }
1238
1239 public void setHiveHome(String home) {
1240 this.hiveHome = home;
1241 }
1242
1243 /** @return true if we should import the table into Hive. */
1244 public boolean doHiveImport() {
1245 return hiveImport;
1246 }
1247
1248 public void setHiveImport(boolean doImport) {
1249 this.hiveImport = doImport;
1250 }
1251
1252 /**
1253 * @return the user-specified option to overwrite existing table in hive.
1254 */
1255 public boolean doOverwriteHiveTable() {
1256 return overwriteHiveTable;
1257 }
1258
1259 public void setOverwriteHiveTable(boolean overwrite) {
1260 this.overwriteHiveTable = overwrite;
1261 }
1262
1263 /**
1264 * @return the user-specified option to modify fields to drop hive delimiters
1265 */
1266 public boolean doHiveDropDelims() {
1267 return hiveDropDelims;
1268 }
1269
1270 public void setHiveDropDelims(boolean dropHiveDelims) {
1271 this.hiveDropDelims = dropHiveDelims;
1272 }
1273
1274 /**
1275 * @return the user-specified option to specify the replacement string
1276 * for hive delimeters
1277 */
1278 public String getHiveDelimsReplacement() {
1279 return hiveDelimsReplacement;
1280 }
1281
1282 public void setHiveDelimsReplacement(String replacement) {
1283 this.hiveDelimsReplacement = replacement;
1284 }
1285
1286 /**
1287 * @return the user-specified option to specify sqoop's behavior during
1288 * target table creation if the table exists.
1289 */
1290 public boolean doFailIfHiveTableExists() {
1291 return failIfHiveTableExists;
1292 }
1293
1294 public void setFailIfHiveTableExists(boolean fail) {
1295 this.failIfHiveTableExists = fail;
1296 }
1297
1298 // HCatalog support
1299 public void setHCatTableName(String ht) {
1300 this.hCatTableName = ht;
1301 }
1302
1303 public String getHCatTableName() {
1304 return this.hCatTableName;
1305 }
1306
1307 public void setHCatDatabaseName(String hd) {
1308 this.hCatDatabaseName = hd;
1309 }
1310
1311 public String getHCatDatabaseName() {
1312 return this.hCatDatabaseName;
1313 }
1314
1315
1316 public String getHCatHome() {
1317 return hCatHome;
1318 }
1319
1320 public void setHCatHome(String home) {
1321 this.hCatHome = home;
1322 }
1323
1324 public boolean doCreateHCatalogTable() {
1325 return hCatCreateTable;
1326 }
1327
1328 public void setCreateHCatalogTable(boolean create) {
1329 this.hCatCreateTable = create;
1330 }
1331
1332 public void setHCatStorageStanza(String stanza) {
1333 this.hCatStorageStanza = stanza;
1334 }
1335
1336 public String getHCatStorageStanza() {
1337 return this.hCatStorageStanza;
1338 }
1339 /**
1340 * @return location where .java files go; guaranteed to end with '/'.
1341 */
1342 public String getCodeOutputDir() {
1343 if (codeOutputDir.endsWith(File.separator)) {
1344 return codeOutputDir;
1345 } else {
1346 return codeOutputDir + File.separator;
1347 }
1348 }
1349
1350 public void setCodeOutputDir(String outputDir) {
1351 this.codeOutputDir = outputDir;
1352 }
1353
1354 /**
1355 * @return location where .jar and .class files go; guaranteed to end with
1356 * '/'.
1357 */
1358 public String getJarOutputDir() {
1359 if (jarOutputDir.endsWith(File.separator)) {
1360 return jarOutputDir;
1361 } else {
1362 return jarOutputDir + File.separator;
1363 }
1364 }
1365
1366 public void setJarOutputDir(String outDir) {
1367 this.jarOutputDir = outDir;
1368 this.jarDirIsAuto = false;
1369 }
1370
1371 /**
1372 * Return the value of $HADOOP_MAPRED_HOME.
1373 * @return $HADOOP_MAPRED_HOME, or null if it's not set.
1374 */
1375 public String getHadoopMapRedHome() {
1376 return hadoopMapRedHome;
1377 }
1378
1379 public void setHadoopMapRedHome(String home) {
1380 this.hadoopMapRedHome = home;
1381 }
1382
1383 /**
1384 * @return a sql command to execute and exit with.
1385 */
1386 public String getSqlQuery() {
1387 return sqlQuery;
1388 }
1389
1390 public void setSqlQuery(String sqlStatement) {
1391 this.sqlQuery = sqlStatement;
1392 }
1393
1394 public String getBoundaryQuery() {
1395 return boundaryQuery;
1396 }
1397
1398 public void setBoundaryQuery(String sqlStatement) {
1399 boundaryQuery = sqlStatement;
1400 }
1401
1402 /**
1403 * @return The JDBC driver class name specified with --driver.
1404 */
1405 public String getDriverClassName() {
1406 return driverClassName;
1407 }
1408
1409 public void setDriverClassName(String driverClass) {
1410 this.driverClassName = driverClass;
1411 }
1412
1413 /**
1414 * @return the base destination path for table uploads.
1415 */
1416 public String getWarehouseDir() {
1417 return warehouseDir;
1418 }
1419
1420 public void setWarehouseDir(String warehouse) {
1421 this.warehouseDir = warehouse;
1422 }
1423
1424 public String getTargetDir() {
1425 return this.targetDir;
1426 }
1427
1428 public void setTargetDir(String dir) {
1429 this.targetDir = dir;
1430 }
1431
1432 public void setAppendMode(boolean doAppend) {
1433 this.append = doAppend;
1434 }
1435
1436 public boolean isAppendMode() {
1437 return this.append;
1438 }
1439
1440 /**
1441 * @return the destination file format
1442 */
1443 public FileLayout getFileLayout() {
1444 return this.layout;
1445 }
1446
1447 public void setFileLayout(FileLayout fileLayout) {
1448 this.layout = fileLayout;
1449 }
1450
1451 /**
1452 * @return the field delimiter to use when parsing lines. Defaults to the
1453 * field delim to use when printing lines.
1454 */
1455 public char getInputFieldDelim() {
1456 char f = inputDelimiters.getFieldsTerminatedBy();
1457 if (f == DelimiterSet.NULL_CHAR) {
1458 return this.outputDelimiters.getFieldsTerminatedBy();
1459 } else {
1460 return f;
1461 }
1462 }
1463
1464 /**
1465 * Set the field delimiter to use when parsing lines.
1466 */
1467 public void setInputFieldsTerminatedBy(char c) {
1468 this.inputDelimiters.setFieldsTerminatedBy(c);
1469 }
1470
1471 /**
1472 * @return the record delimiter to use when parsing lines. Defaults to the
1473 * record delim to use when printing lines.
1474 */
1475 public char getInputRecordDelim() {
1476 char r = inputDelimiters.getLinesTerminatedBy();
1477 if (r == DelimiterSet.NULL_CHAR) {
1478 return this.outputDelimiters.getLinesTerminatedBy();
1479 } else {
1480 return r;
1481 }
1482 }
1483
1484 /**
1485 * Set the record delimiter to use when parsing lines.
1486 */
1487 public void setInputLinesTerminatedBy(char c) {
1488 this.inputDelimiters.setLinesTerminatedBy(c);
1489 }
1490
1491 /**
1492 * @return the character that may enclose fields when parsing lines.
1493 * Defaults to the enclosing-char to use when printing lines.
1494 */
1495 public char getInputEnclosedBy() {
1496 char c = inputDelimiters.getEnclosedBy();
1497 if (c == DelimiterSet.NULL_CHAR) {
1498 return this.outputDelimiters.getEnclosedBy();
1499 } else {
1500 return c;
1501 }
1502 }
1503
1504 /**
1505 * Set the enclosed-by character to use when parsing lines.
1506 */
1507 public void setInputEnclosedBy(char c) {
1508 this.inputDelimiters.setEnclosedBy(c);
1509 }
1510
1511 /**
1512 * @return the escape character to use when parsing lines. Defaults to the
1513 * escape character used when printing lines.
1514 */
1515 public char getInputEscapedBy() {
1516 char c = inputDelimiters.getEscapedBy();
1517 if (c == DelimiterSet.NULL_CHAR) {
1518 return this.outputDelimiters.getEscapedBy();
1519 } else {
1520 return c;
1521 }
1522 }
1523
1524 /**
1525 * Set the escaped-by character to use when parsing lines.
1526 */
1527 public void setInputEscapedBy(char c) {
1528 this.inputDelimiters.setEscapedBy(c);
1529 }
1530
1531 /**
1532 * @return true if fields must be enclosed by the --enclosed-by character
1533 * when parsing. Defaults to false. Set true when --input-enclosed-by is
1534 * used.
1535 */
1536 public boolean isInputEncloseRequired() {
1537 char c = this.inputDelimiters.getEnclosedBy();
1538 if (c == DelimiterSet.NULL_CHAR) {
1539 return this.outputDelimiters.isEncloseRequired();
1540 } else {
1541 return this.inputDelimiters.isEncloseRequired();
1542 }
1543 }
1544
1545 /**
1546 * If true, then all input fields are expected to be enclosed by the
1547 * enclosed-by character when parsing.
1548 */
1549 public void setInputEncloseRequired(boolean required) {
1550 this.inputDelimiters.setEncloseRequired(required);
1551 }
1552
1553 /**
1554 * @return the character to print between fields when importing them to
1555 * text.
1556 */
1557 public char getOutputFieldDelim() {
1558 return this.outputDelimiters.getFieldsTerminatedBy();
1559 }
1560
1561 /**
1562 * Set the field delimiter to use when formatting lines.
1563 */
1564 public void setFieldsTerminatedBy(char c) {
1565 this.outputDelimiters.setFieldsTerminatedBy(c);
1566 }
1567
1568
1569 /**
1570 * @return the character to print between records when importing them to
1571 * text.
1572 */
1573 public char getOutputRecordDelim() {
1574 return this.outputDelimiters.getLinesTerminatedBy();
1575 }
1576
1577 /**
1578 * Set the record delimiter to use when formatting lines.
1579 */
1580 public void setLinesTerminatedBy(char c) {
1581 this.outputDelimiters.setLinesTerminatedBy(c);
1582 }
1583
1584 /**
1585 * @return a character which may enclose the contents of fields when
1586 * imported to text.
1587 */
1588 public char getOutputEnclosedBy() {
1589 return this.outputDelimiters.getEnclosedBy();
1590 }
1591
1592 /**
1593 * Set the enclosed-by character to use when formatting lines.
1594 */
1595 public void setEnclosedBy(char c) {
1596 this.outputDelimiters.setEnclosedBy(c);
1597 }
1598
1599 /**
1600 * @return a character which signifies an escape sequence when importing to
1601 * text.
1602 */
1603 public char getOutputEscapedBy() {
1604 return this.outputDelimiters.getEscapedBy();
1605 }
1606
1607 /**
1608 * Set the escaped-by character to use when formatting lines.
1609 */
1610 public void setEscapedBy(char c) {
1611 this.outputDelimiters.setEscapedBy(c);
1612 }
1613
1614 /**
1615 * @return true if fields imported to text must be enclosed by the
1616 * EnclosedBy char. default is false; set to true if --enclosed-by is used
1617 * instead of --optionally-enclosed-by.
1618 */
1619 public boolean isOutputEncloseRequired() {
1620 return this.outputDelimiters.isEncloseRequired();
1621 }
1622
1623 /**
1624 * If true, then the enclosed-by character will be applied to all fields,
1625 * even if internal characters do not need enclosed-by protection.
1626 */
1627 public void setOutputEncloseRequired(boolean required) {
1628 this.outputDelimiters.setEncloseRequired(required);
1629 }
1630
1631 /**
1632 * @return the set of delimiters used for formatting output records.
1633 */
1634 public DelimiterSet getOutputDelimiters() {
1635 return this.outputDelimiters.copy();
1636 }
1637
1638 /**
1639 * Set the complete set of delimiters to use for output formatting.
1640 */
1641 public void setOutputDelimiters(DelimiterSet delimiters) {
1642 this.outputDelimiters = delimiters.copy();
1643 }
1644
1645 /**
1646 * @return the set of delimiters used for parsing the input.
1647 * This may include values implicitly set by the output delimiters.
1648 */
1649 public DelimiterSet getInputDelimiters() {
1650 return new DelimiterSet(
1651 getInputFieldDelim(),
1652 getInputRecordDelim(),
1653 getInputEnclosedBy(),
1654 getInputEscapedBy(),
1655 isInputEncloseRequired());
1656 }
1657
1658 /**
1659 * @return true if the user wants imported results to be compressed.
1660 */
1661 public boolean shouldUseCompression() {
1662 return this.useCompression || compressionCodec != null;
1663 }
1664
1665 public void setUseCompression(boolean compress) {
1666 this.useCompression = compress;
1667 }
1668
1669 /**
1670 * @return the name of the compression codec to use when importing.
1671 * E.g. <code>org.apache.hadoop.io.compress.GzipCodec</code>.
1672 */
1673 public String getCompressionCodec() {
1674 return compressionCodec;
1675 }
1676
1677 public void setCompressionCodec(String codec) {
1678 this.compressionCodec = codec;
1679 }
1680 /**
1681 * @return the name of the destination table when importing to Hive.
1682 */
1683 public String getHiveTableName() {
1684 if (null != this.hiveTableName) {
1685 return this.hiveTableName;
1686 } else {
1687 return this.tableName;
1688 }
1689 }
1690
1691 public void setHiveTableName(String name) {
1692 this.hiveTableName = name;
1693 }
1694
1695 public String getHiveDatabaseName() {
1696 return this.hiveDatabaseName;
1697 }
1698
1699 public void setHiveDatabaseName(String name) {
1700 this.hiveDatabaseName = name;
1701 }
1702
1703 public String getHivePartitionKey() {
1704 return hivePartitionKey;
1705 }
1706
1707 public void setHivePartitionKey(String hpk) {
1708 this.hivePartitionKey = hpk;
1709 }
1710
1711 public String getHivePartitionValue() {
1712 return hivePartitionValue;
1713 }
1714
1715 public void setHivePartitionValue(String hpv) {
1716 this.hivePartitionValue = hpv;
1717 }
1718
1719 /**
1720 * @return the file size to split by when using --direct mode.
1721 */
1722 public long getDirectSplitSize() {
1723 return this.directSplitSize;
1724 }
1725
1726 public void setDirectSplitSize(long splitSize) {
1727 this.directSplitSize = splitSize;
1728 }
1729
1730 /**
1731 * @return the max size of a LOB before we spill to a separate file.
1732 */
1733 public long getInlineLobLimit() {
1734 return this.maxInlineLobSize;
1735 }
1736
1737 public void setInlineLobLimit(long limit) {
1738 this.maxInlineLobSize = limit;
1739 }
1740
1741 public Integer getFetchSize() {
1742 return this.fetchSize;
1743 }
1744
1745 public void setFetchSize(Integer size) {
1746 this.fetchSize = size;
1747 }
1748
1749 /*
1750 * @return true if the output delimiters have been explicitly set by the user
1751 */
1752 public boolean explicitOutputDelims() {
1753 return areOutputDelimsManuallySet;
1754 }
1755
1756 /**
1757 * Flag the output delimiter settings as explicit user settings, or implicit.
1758 */
1759 public void setExplicitOutputDelims(boolean explicit) {
1760 this.areOutputDelimsManuallySet = explicit;
1761 }
1762
1763 /**
1764 * @return true if the input delimiters have been explicitly set by the user.
1765 */
1766 public boolean explicitInputDelims() {
1767 return areInputDelimsManuallySet;
1768 }
1769
1770 /**
1771 * Flag the input delimiter settings as explicit user settings, or implicit.
1772 */
1773 public void setExplicitInputDelims(boolean explicit) {
1774 this.areInputDelimsManuallySet = explicit;
1775 }
1776
1777 public Configuration getConf() {
1778 return conf;
1779 }
1780
1781 public void setConf(Configuration config) {
1782 this.conf = config;
1783 }
1784
1785 /**
1786 * @return command-line arguments after a '-'.
1787 */
1788 public String [] getExtraArgs() {
1789 if (extraArgs == null) {
1790 return null;
1791 }
1792
1793 String [] out = new String[extraArgs.length];
1794 for (int i = 0; i < extraArgs.length; i++) {
1795 out[i] = extraArgs[i];
1796 }
1797 return out;
1798 }
1799
1800 public void setExtraArgs(String [] args) {
1801 if (null == args) {
1802 this.extraArgs = null;
1803 return;
1804 }
1805
1806 this.extraArgs = new String[args.length];
1807 for (int i = 0; i < args.length; i++) {
1808 this.extraArgs[i] = args[i];
1809 }
1810 }
1811
1812 /**
1813 * Set the name of the column to be used in the WHERE clause of an
1814 * UPDATE-based export process.
1815 */
1816 public void setUpdateKeyCol(String colName) {
1817 this.updateKeyCol = colName;
1818 }
1819
1820 /**
1821 * @return the column which is the key column in a table to be exported
1822 * in update mode.
1823 */
1824 public String getUpdateKeyCol() {
1825 return this.updateKeyCol;
1826 }
1827
1828 /**
1829 * Set "UpdateOnly" to silently ignore new rows during update export.
1830 * Set "AllowInsert" to insert new rows during update export.
1831 */
1832 public void setUpdateMode(UpdateMode mode) {
1833 this.updateMode = mode;
1834 }
1835
1836 /**
1837 * @return how to handle new rows found in update export.
1838 */
1839 public UpdateMode getUpdateMode() {
1840 return updateMode;
1841 }
1842
1843 /**
1844 * @return an ordered list of column names. The code generator should
1845 * generate the DBWritable.write(PreparedStatement) method with columns
1846 * exporting in this order, if it is non-null.
1847 */
1848 public String [] getDbOutputColumns() {
1849 if (null != dbOutColumns) {
1850 return Arrays.copyOf(this.dbOutColumns, dbOutColumns.length);
1851 } else {
1852 return null;
1853 }
1854 }
1855
1856 /**
1857 * Set the order in which columns should be serialized by the generated
1858 * DBWritable.write(PreparedStatement) method. Setting this to null will use
1859 * the "natural order" of the database table.
1860 *
1861 * TODO: Expose this setter via the command-line arguments for the codegen
1862 * module. That would allow users to export to tables with columns in a
1863 * different physical order than the file layout in HDFS.
1864 */
1865 public void setDbOutputColumns(String [] outCols) {
1866 if (null == outCols) {
1867 this.dbOutColumns = null;
1868 } else {
1869 this.dbOutColumns = Arrays.copyOf(outCols, outCols.length);
1870 }
1871 }
1872
1873 /**
1874 * Set whether we should create missing HBase tables.
1875 */
1876 public void setCreateHBaseTable(boolean create) {
1877 this.hbaseCreateTable = create;
1878 }
1879
1880 /**
1881 * Returns true if we should create HBase tables/column families
1882 * that are missing.
1883 */
1884 public boolean getCreateHBaseTable() {
1885 return this.hbaseCreateTable;
1886 }
1887
1888 /**
1889 * Sets the HBase target column family.
1890 */
1891 public void setHBaseColFamily(String colFamily) {
1892 this.hbaseColFamily = colFamily;
1893 }
1894
1895 /**
1896 * Gets the HBase import target column family.
1897 */
1898 public String getHBaseColFamily() {
1899 return this.hbaseColFamily;
1900 }
1901
1902 /**
1903 * Gets the column to use as the row id in an hbase import.
1904 * If null, use the primary key column.
1905 */
1906 public String getHBaseRowKeyColumn() {
1907 return this.hbaseRowKeyCol;
1908 }
1909
1910 /**
1911 * Sets the column to use as the row id in an hbase import.
1912 */
1913 public void setHBaseRowKeyColumn(String col) {
1914 this.hbaseRowKeyCol = col;
1915 }
1916
1917 /**
1918 * Gets the target HBase table name, if any.
1919 */
1920 public String getHBaseTable() {
1921 return this.hbaseTable;
1922 }
1923
1924 /**
1925 * Sets the target HBase table name for an import.
1926 */
1927 public void setHBaseTable(String table) {
1928 this.hbaseTable = table;
1929 }
1930
1931 /**
1932 * Set the column of the import source table to check for incremental import
1933 * state.
1934 */
1935 public void setIncrementalTestColumn(String colName) {
1936 this.incrementalTestCol = colName;
1937 }
1938
1939 /**
1940 * Return the name of the column of the import source table
1941 * to check for incremental import state.
1942 */
1943 public String getIncrementalTestColumn() {
1944 return this.incrementalTestCol;
1945 }
1946
1947 /**
1948 * Set the incremental import mode to use.
1949 */
1950 public void setIncrementalMode(IncrementalMode mode) {
1951 this.incrementalMode = mode;
1952 }
1953
1954 /**
1955 * Get the incremental import mode to use.
1956 */
1957 public IncrementalMode getIncrementalMode() {
1958 return this.incrementalMode;
1959 }
1960
1961 /**
1962 * Set the last imported value of the incremental import test column.
1963 */
1964 public void setIncrementalLastValue(String lastVal) {
1965 this.incrementalLastValue = lastVal;
1966 }
1967
1968 /**
1969 * Get the last imported value of the incremental import test column.
1970 */
1971 public String getIncrementalLastValue() {
1972 return this.incrementalLastValue;
1973 }
1974
1975 /**
1976 * Set the tables to be excluded when doing all table import.
1977 */
1978 public void setAllTablesExclude(String exclude) {
1979 this.allTablesExclude = exclude;
1980 }
1981
1982 /**
1983 * Get the tables to be excluded when doing all table import.
1984 */
1985 public String getAllTablesExclude() {
1986 return this.allTablesExclude;
1987 }
1988
1989 /**
1990 * Set the name of the saved job this SqoopOptions belongs to.
1991 */
1992 public void setJobName(String job) {
1993 this.jobName = job;
1994 }
1995
1996 /**
1997 * Get the name of the saved job this SqoopOptions belongs to.
1998 */
1999 public String getJobName() {
2000 return this.jobName;
2001 }
2002
2003 /**
2004 * Set the JobStorage descriptor used to open the saved job
2005 * this SqoopOptions belongs to.
2006 */
2007 public void setStorageDescriptor(Map<String, String> descriptor) {
2008 this.jobStorageDescriptor = descriptor;
2009 }
2010
2011 /**
2012 * Get the JobStorage descriptor used to open the saved job
2013 * this SqoopOptions belongs to.
2014 */
2015 public Map<String, String> getStorageDescriptor() {
2016 return this.jobStorageDescriptor;
2017 }
2018
2019 /**
2020 * Return the parent instance this SqoopOptions is derived from.
2021 */
2022 public com.cloudera.sqoop.SqoopOptions getParent() {
2023 return this.parent;
2024 }
2025
2026 /**
2027 * Set the parent instance this SqoopOptions is derived from.
2028 */
2029 public void setParent(com.cloudera.sqoop.SqoopOptions options) {
2030 this.parent = options;
2031 }
2032
2033 /**
2034 * Set the path name used to do an incremental import of old data
2035 * which will be combined with an "new" dataset.
2036 */
2037 public void setMergeOldPath(String path) {
2038 this.mergeOldPath = path;
2039 }
2040
2041 /**
2042 * Return the path name used to do an incremental import of old data
2043 * which will be combined with an "new" dataset.
2044 */
2045 public String getMergeOldPath() {
2046 return this.mergeOldPath;
2047 }
2048
2049 /**
2050 * Set the path name used to do an incremental import of new data
2051 * which will be combined with an "old" dataset.
2052 */
2053 public void setMergeNewPath(String path) {
2054 this.mergeNewPath = path;
2055 }
2056
2057 /**
2058 * Return the path name used to do an incremental import of new data
2059 * which will be combined with an "old" dataset.
2060 */
2061 public String getMergeNewPath() {
2062 return this.mergeNewPath;
2063 }
2064
2065 /**
2066 * Set the name of the column used to merge an old and new dataset.
2067 */
2068 public void setMergeKeyCol(String col) {
2069 this.mergeKeyCol = col;
2070 }
2071
2072 /** Return the name of the column used to merge an old and new dataset. */
2073 public String getMergeKeyCol() {
2074 return this.mergeKeyCol;
2075 }
2076
2077 public void setConnManagerClassName(String connManagerClass) {
2078 this.connManagerClassName = connManagerClass;
2079 }
2080
2081 public String getConnManagerClassName() {
2082 return connManagerClassName;
2083 }
2084
2085 /** @return the SqoopTool that is operating this session. */
2086 public SqoopTool getActiveSqoopTool() {
2087 return activeSqoopTool;
2088 }
2089
2090 public void setActiveSqoopTool(SqoopTool tool) {
2091 activeSqoopTool = tool;
2092 }
2093
2094 public void setNullStringValue(String nullString) {
2095 this.nullStringValue = nullString;
2096 }
2097
2098 public String getNullStringValue() {
2099 return nullStringValue;
2100 }
2101
2102 public void setInNullStringValue(String inNullString) {
2103 this.inNullStringValue = inNullString;
2104 }
2105
2106 public String getInNullStringValue() {
2107 return inNullStringValue;
2108 }
2109
2110 public void setNullNonStringValue(String nullNonString) {
2111 this.nullNonStringValue = nullNonString;
2112 }
2113
2114 public String getNullNonStringValue() {
2115 return nullNonStringValue;
2116 }
2117
2118 public void setInNullNonStringValue(String inNullNonString) {
2119 this.inNullNonStringValue = inNullNonString;
2120 }
2121
2122 public String getInNullNonStringValue() {
2123 return inNullNonStringValue;
2124 }
2125
2126 public void setConnectionParams(Properties params) {
2127 connectionParams = new Properties();
2128 connectionParams.putAll(params);
2129 }
2130
2131 public Properties getConnectionParams() {
2132 return connectionParams;
2133 }
2134
2135 public void setValidationEnabled(boolean validationEnabled) {
2136 isValidationEnabled = validationEnabled;
2137 }
2138
2139 public boolean isValidationEnabled() {
2140 return isValidationEnabled;
2141 }
2142
2143 public Class getValidatorClass() {
2144 return validatorClass;
2145 }
2146
2147 public void setValidatorClass(Class validatorClazz) {
2148 this.validatorClass = validatorClazz;
2149 }
2150
2151 public Class getValidationThresholdClass() {
2152 return validationThresholdClass;
2153 }
2154
2155 public void setValidationThresholdClass(Class validationThresholdClazz) {
2156 this.validationThresholdClass = validationThresholdClazz;
2157 }
2158
2159 public Class getValidationFailureHandlerClass() {
2160 return validationFailureHandlerClass;
2161 }
2162
2163 public void setValidationFailureHandlerClass(
2164 Class validationFailureHandlerClazz) {
2165 this.validationFailureHandlerClass = validationFailureHandlerClazz;
2166 }
2167
2168 public String getCall() {
2169 return call;
2170 }
2171
2172 public void setCall(String theCall) {
2173 this.call = theCall;
2174 }
2175 }