SQOOP-319. Support for replacing Hive delimiters.
[sqoop.git] / src / java / com / cloudera / sqoop / SqoopOptions.java
1 /**
2 * Licensed to Cloudera, Inc. under one
3 * or more contributor license agreements. See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership. Cloudera, Inc. licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License. You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18
19
20 package com.cloudera.sqoop;
21
22 import java.io.File;
23 import java.lang.reflect.Field;
24 import java.util.ArrayList;
25 import java.util.Arrays;
26 import java.util.Iterator;
27 import java.util.Map;
28 import java.util.Properties;
29
30 import org.apache.commons.logging.Log;
31 import org.apache.commons.logging.LogFactory;
32 import org.apache.hadoop.conf.Configuration;
33
34 import com.cloudera.sqoop.lib.DelimiterSet;
35 import com.cloudera.sqoop.lib.LargeObjectLoader;
36 import com.cloudera.sqoop.tool.SqoopTool;
37 import com.cloudera.sqoop.util.RandomHash;
38 import com.cloudera.sqoop.util.StoredAsProperty;
39
40 /**
41 * Configurable state used by Sqoop tools.
42 */
43 public class SqoopOptions implements Cloneable {
44
45 public static final Log LOG = LogFactory.getLog(SqoopOptions.class.getName());
46
47 /**
48 * Set to true in configuration if you want to put db passwords
49 * in the metastore.
50 */
51 public static final String METASTORE_PASSWORD_KEY =
52 "sqoop.metastore.client.record.password";
53
54 public static final boolean METASTORE_PASSWORD_DEFAULT = false;
55
56 /**
57 * Thrown when invalid cmdline options are given.
58 */
59 @SuppressWarnings("serial")
60 public static class InvalidOptionsException extends Exception {
61
62 private String message;
63
64 public InvalidOptionsException(final String msg) {
65 this.message = msg;
66 }
67
68 public String getMessage() {
69 return message;
70 }
71
72 public String toString() {
73 return getMessage();
74 }
75 }
76
77 /** Selects in-HDFS destination file format. */
78 public enum FileLayout {
79 TextFile,
80 SequenceFile,
81 AvroDataFile
82 }
83
84 /**
85 * Incremental imports support two modes:
86 * <ul>
87 * <li>new rows being appended to the end of a table with an
88 * incrementing id</li>
89 * <li>new data results in a date-last-modified column being
90 * updated to NOW(); Sqoop will pull all dirty rows in the next
91 * incremental import.</li>
92 * </ul>
93 */
94 public enum IncrementalMode {
95 None,
96 AppendRows,
97 DateLastModified,
98 }
99
100
101 // TODO(aaron): Adding something here? Add a setter and a getter. Add a
102 // default value in initDefaults() if you need one. If this value needs to
103 // be serialized in the metastore, it should be marked with
104 // @StoredAsProperty(), if it is an int, long, boolean, String, or Enum.
105 // Arrays and other "special" types should be added directly to the
106 // loadProperties() and writeProperties() methods. Then add command-line
107 // arguments in the appropriate tools. The names of all command-line args
108 // are stored as constants in BaseSqoopTool.
109
110 @StoredAsProperty("db.connect.string") private String connectString;
111 @StoredAsProperty("db.table") private String tableName;
112 private String [] columns; // Array stored as db.column.list.
113 @StoredAsProperty("db.username") private String username;
114 @StoredAsProperty("db.export.staging.table") private String stagingTableName;
115 @StoredAsProperty("db.clear.staging.table") private boolean clearStagingTable;
116 private Properties connectionParams; //Properties stored as db.connect.params
117
118
119 // May not be serialized, based on configuration.
120 // db.require.password is used to determine whether 'some' password is
121 // used. If so, it is stored as 'db.password'.
122 private String password;
123
124 @StoredAsProperty("null.string") private String nullStringValue;
125 @StoredAsProperty("input.null.string") private String inNullStringValue;
126 @StoredAsProperty("null.non-string") private String nullNonStringValue;
127 @StoredAsProperty("input.null.non-string")
128 private String inNullNonStringValue;
129
130 @StoredAsProperty("codegen.output.dir") private String codeOutputDir;
131 @StoredAsProperty("codegen.compile.dir") private String jarOutputDir;
132 // Boolean specifying whether jarOutputDir is a nonce tmpdir (true), or
133 // explicitly set by the user (false). If the former, disregard any value
134 // for jarOutputDir saved in the metastore.
135 @StoredAsProperty("codegen.auto.compile.dir") private boolean jarDirIsAuto;
136 private String hadoopHome; // not serialized to metastore.
137 @StoredAsProperty("db.split.column") private String splitByCol;
138 @StoredAsProperty("db.where.clause") private String whereClause;
139 @StoredAsProperty("db.query") private String sqlQuery;
140 @StoredAsProperty("jdbc.driver.class") private String driverClassName;
141 @StoredAsProperty("hdfs.warehouse.dir") private String warehouseDir;
142 @StoredAsProperty("hdfs.target.dir") private String targetDir;
143 @StoredAsProperty("hdfs.append.dir") private boolean append;
144 @StoredAsProperty("hdfs.file.format") private FileLayout layout;
145 @StoredAsProperty("direct.import") private boolean direct; // "direct mode."
146 @StoredAsProperty("db.batch") private boolean batchMode;
147 private String tmpDir; // where temp data goes; usually /tmp; not serialized.
148 private String hiveHome; // not serialized to metastore.
149 @StoredAsProperty("hive.import") private boolean hiveImport;
150 @StoredAsProperty("hive.overwrite.table") private boolean overwriteHiveTable;
151 @StoredAsProperty("hive.fail.table.exists")
152 private boolean failIfHiveTableExists;
153 @StoredAsProperty("hive.table.name") private String hiveTableName;
154 @StoredAsProperty("hive.drop.delims") private boolean hiveDropDelims;
155 @StoredAsProperty("hive.delims.replacement")
156 private String hiveDelimsReplacement;
157 @StoredAsProperty("hive.partition.key") private String hivePartitionKey;
158 @StoredAsProperty("hive.partition.value") private String hivePartitionValue;
159
160 // An ordered list of column names denoting what order columns are
161 // serialized to a PreparedStatement from a generated record type.
162 // Not serialized to metastore.
163 private String [] dbOutColumns;
164
165 // package to prepend to auto-named classes.
166 @StoredAsProperty("codegen.java.packagename") private String packageName;
167
168 // package+class to apply to individual table import.
169 // also used as an *input* class with existingJarFile.
170 @StoredAsProperty("codegen.java.classname") private String className;
171
172 // Name of a jar containing existing table definition
173 // class to use.
174 @StoredAsProperty("codegen.jar.file") private String existingJarFile;
175
176 @StoredAsProperty("mapreduce.num.mappers") private int numMappers;
177 @StoredAsProperty("enable.compression") private boolean useCompression;
178 @StoredAsProperty("compression.codec") private String compressionCodec;
179
180 // In direct mode, open a new stream every X bytes.
181 @StoredAsProperty("import.direct.split.size") private long directSplitSize;
182
183 // Max size of an inline LOB; larger LOBs are written
184 // to external files on disk.
185 @StoredAsProperty("import.max.inline.lob.size") private long maxInlineLobSize;
186
187 // Max number 'n' of rows to fetch from the
188 // database when more rows are needed.
189 @StoredAsProperty("import.fetch.size") private Integer fetchSize;
190
191 // HDFS path to read from when performing an export
192 @StoredAsProperty("export.source.dir") private String exportDir;
193
194 // Column to use for the WHERE clause in an UPDATE-based export.
195 @StoredAsProperty("export.update.col") private String updateKeyCol;
196
197 private DelimiterSet inputDelimiters; // codegen.input.delimiters.
198 private DelimiterSet outputDelimiters; // codegen.output.delimiters.
199 private boolean areDelimsManuallySet;
200
201 private Configuration conf;
202
203 public static final int DEFAULT_NUM_MAPPERS = 4;
204
205 private String [] extraArgs;
206
207 // HBase table to import into.
208 @StoredAsProperty("hbase.table") private String hbaseTable;
209
210 // Column family to prepend to inserted cols.
211 @StoredAsProperty("hbase.col.family") private String hbaseColFamily;
212
213 // Column of the input to use as the row key.
214 @StoredAsProperty("hbase.row.key.col") private String hbaseRowKeyCol;
215
216 // if true, create tables/col families.
217 @StoredAsProperty("hbase.create.table") private boolean hbaseCreateTable;
218
219 // col to filter on for incremental imports.
220 @StoredAsProperty("incremental.col") private String incrementalTestCol;
221 // incremental import mode we're using.
222 @StoredAsProperty("incremental.mode")
223 private IncrementalMode incrementalMode;
224 // What was the last-imported value of incrementalTestCol?
225 @StoredAsProperty("incremental.last.value")
226 private String incrementalLastValue;
227
228 // HDFS paths for "old" and "new" datasets in merge tool.
229 @StoredAsProperty("merge.old.path") private String mergeOldPath;
230 @StoredAsProperty("merge.new.path") private String mergeNewPath;
231
232 // "key" column for the merge operation.
233 @StoredAsProperty("merge.key.col") private String mergeKeyCol;
234
235
236 // These next two fields are not serialized to the metastore.
237 // If this SqoopOptions is created by reading a saved job, these will
238 // be populated by the JobStorage to facilitate updating the same
239 // job.
240 private String jobName;
241 private Map<String, String> jobStorageDescriptor;
242
243 // If we restore a job and then allow the user to apply arguments on
244 // top, we retain the version without the arguments in a reference to the
245 // 'parent' SqoopOptions instance, here.
246 private SqoopOptions parent;
247
248 // Nonce directory name. Generate one per process, lazily, if
249 // getNonceJarDir() is called. Not recorded in metadata. This is used as
250 // a temporary holding area for compilation work done by this process.
251 private static String curNonce;
252
253 // the connection manager fully qualified class name
254 @StoredAsProperty("connection.manager") private String connManagerClassName;
255
256 // The currently active tool. (Not saved in properties)
257 // Used to pass the SqoopTool instance in to mapreduce job configuration
258 // (JobBase, etc).
259 private SqoopTool activeSqoopTool;
260
261 public SqoopOptions() {
262 initDefaults(null);
263 }
264
265 public SqoopOptions(Configuration conf) {
266 initDefaults(conf);
267 }
268
269 /**
270 * Alternate SqoopOptions interface used mostly for unit testing.
271 * @param connect JDBC connect string to use
272 * @param table Table to read
273 */
274 public SqoopOptions(final String connect, final String table) {
275 initDefaults(null);
276
277 this.connectString = connect;
278 this.tableName = table;
279 }
280
281 private boolean getBooleanProperty(Properties props, String propName,
282 boolean defaultValue) {
283 String str = props.getProperty(propName,
284 Boolean.toString(defaultValue)).toLowerCase();
285 return "true".equals(str) || "yes".equals(str) || "1".equals(str);
286 }
287
288 private long getLongProperty(Properties props, String propName,
289 long defaultValue) {
290 String str = props.getProperty(propName,
291 Long.toString(defaultValue)).toLowerCase();
292 try {
293 return Long.parseLong(str);
294 } catch (NumberFormatException nfe) {
295 LOG.warn("Could not parse integer value for config parameter "
296 + propName);
297 return defaultValue;
298 }
299 }
300
301 private int getIntProperty(Properties props, String propName,
302 int defaultVal) {
303 long longVal = getLongProperty(props, propName, defaultVal);
304 return (int) longVal;
305 }
306
307 private char getCharProperty(Properties props, String propName,
308 char defaultVal) {
309 int intVal = getIntProperty(props, propName, (int) defaultVal);
310 return (char) intVal;
311 }
312
313 private DelimiterSet getDelimiterProperties(Properties props,
314 String prefix, DelimiterSet defaults) {
315
316 if (null == defaults) {
317 defaults = new DelimiterSet();
318 }
319
320 char field = getCharProperty(props, prefix + ".field",
321 defaults.getFieldsTerminatedBy());
322 char record = getCharProperty(props, prefix + ".record",
323 defaults.getLinesTerminatedBy());
324 char enclose = getCharProperty(props, prefix + ".enclose",
325 defaults.getEnclosedBy());
326 char escape = getCharProperty(props, prefix + ".escape",
327 defaults.getEscapedBy());
328 boolean required = getBooleanProperty(props, prefix +".enclose.required",
329 defaults.isEncloseRequired());
330
331 return new DelimiterSet(field, record, enclose, escape, required);
332 }
333
334 private void setDelimiterProperties(Properties props,
335 String prefix, DelimiterSet values) {
336 putProperty(props, prefix + ".field",
337 Integer.toString((int) values.getFieldsTerminatedBy()));
338 putProperty(props, prefix + ".record",
339 Integer.toString((int) values.getLinesTerminatedBy()));
340 putProperty(props, prefix + ".enclose",
341 Integer.toString((int) values.getEnclosedBy()));
342 putProperty(props, prefix + ".escape",
343 Integer.toString((int) values.getEscapedBy()));
344 putProperty(props, prefix + ".enclose.required",
345 Boolean.toString(values.isEncloseRequired()));
346 }
347
348 /** Take a comma-delimited list of input and split the elements
349 * into an output array. */
350 private String [] listToArray(String strList) {
351 return strList.split(",");
352 }
353
354 private String arrayToList(String [] array) {
355 if (null == array) {
356 return null;
357 }
358
359 StringBuilder sb = new StringBuilder();
360 boolean first = true;
361 for (String elem : array) {
362 if (!first) {
363 sb.append(",");
364 }
365 sb.append(elem);
366 first = false;
367 }
368
369 return sb.toString();
370 }
371
372 /**
373 * A put() method for Properties that is tolerent of 'null' values.
374 * If a null value is specified, the property is unset.
375 */
376 private void putProperty(Properties props, String k, String v) {
377 if (null == v) {
378 props.remove(k);
379 } else {
380 props.setProperty(k, v);
381 }
382 }
383
384 /**
385 * Given a property prefix that denotes a set of numbered properties,
386 * return an array containing all the properties.
387 *
388 * For instance, if prefix is "foo", then return properties "foo.0",
389 * "foo.1", "foo.2", and so on as an array. If no such properties
390 * exist, return 'defaults'.
391 */
392 private String [] getArgArrayProperty(Properties props, String prefix,
393 String [] defaults) {
394 int cur = 0;
395 ArrayList<String> al = new ArrayList<String>();
396 while (true) {
397 String curProp = prefix + "." + cur;
398 String curStr = props.getProperty(curProp, null);
399 if (null == curStr) {
400 break;
401 }
402
403 al.add(curStr);
404 cur++;
405 }
406
407 if (cur == 0) {
408 // Couldn't find an array here; return the defaults.
409 return defaults;
410 }
411
412 return al.toArray(new String[0]);
413 }
414
415 private void setArgArrayProperties(Properties props, String prefix,
416 String [] values) {
417 if (null == values) {
418 return;
419 }
420
421 for (int i = 0; i < values.length; i++) {
422 putProperty(props, prefix + "." + i, values[i]);
423 }
424 }
425
426 /**
427 * This method encodes the property key values found in the provided
428 * properties instance <tt>values</tt> into another properties instance
429 * <tt>props</tt>. The specified <tt>prefix</tt> is used as a namespace
430 * qualifier for keys when inserting. This allows easy introspection of the
431 * property key values in <tt>props</tt> instance to later separate out all
432 * the properties that belong to the <tt>values</tt> instance.
433 * @param props the container properties instance
434 * @param prefix the prefix for qualifying contained property keys.
435 * @param values the contained properties instance, all of whose elements will
436 * be added to the container properties instance.
437 *
438 * @see #getPropertiesAsNetstedProperties(Properties, String)
439 */
440 private void setPropertiesAsNestedProperties(Properties props,
441 String prefix, Properties values) {
442 String nestedPropertyPrefix = prefix + ".";
443 if (null == values || values.size() == 0) {
444 Iterator<String> it = props.stringPropertyNames().iterator();
445 while (it.hasNext()) {
446 String name = it.next();
447 if (name.startsWith(nestedPropertyPrefix)) {
448 props.remove(name);
449 }
450 }
451 } else {
452 Iterator<String> it = values.stringPropertyNames().iterator();
453 while (it.hasNext()) {
454 String name = it.next();
455 putProperty(props,
456 nestedPropertyPrefix + name, values.getProperty(name));
457 }
458 }
459 }
460
461 /**
462 * This method decodes the property key values found in the provided
463 * properties instance <tt>props</tt> that have keys beginning with the
464 * given prefix. Matching elements from this properties instance are modified
465 * so that their prefix is dropped.
466 * @param props the properties container
467 * @param prefix the prefix qualifying properties that need to be removed
468 * @return a new properties instance that contains all matching elements from
469 * the container properties.
470 */
471 private Properties getPropertiesAsNetstedProperties(
472 Properties props, String prefix) {
473 Properties nestedProps = new Properties();
474 String nestedPropertyPrefix = prefix + ".";
475 int index = nestedPropertyPrefix.length();
476 if (props != null && props.size() > 0) {
477 Iterator<String> it = props.stringPropertyNames().iterator();
478 while (it.hasNext()) {
479 String name = it.next();
480 if (name.startsWith(nestedPropertyPrefix)){
481 String shortName = name.substring(index);
482 nestedProps.put(shortName, props.get(name));
483 }
484 }
485 }
486 return nestedProps;
487 }
488
489 @SuppressWarnings("unchecked")
490 /**
491 * Given a set of properties, load this into the current SqoopOptions
492 * instance.
493 */
494 public void loadProperties(Properties props) {
495
496 try {
497 Field [] fields = getClass().getDeclaredFields();
498 for (Field f : fields) {
499 if (f.isAnnotationPresent(StoredAsProperty.class)) {
500 Class typ = f.getType();
501 StoredAsProperty storedAs = f.getAnnotation(StoredAsProperty.class);
502 String propName = storedAs.value();
503
504 if (typ.equals(int.class)) {
505 f.setInt(this,
506 getIntProperty(props, propName, f.getInt(this)));
507 } else if (typ.equals(boolean.class)) {
508 f.setBoolean(this,
509 getBooleanProperty(props, propName, f.getBoolean(this)));
510 } else if (typ.equals(long.class)) {
511 f.setLong(this,
512 getLongProperty(props, propName, f.getLong(this)));
513 } else if (typ.equals(String.class)) {
514 f.set(this, props.getProperty(propName, (String) f.get(this)));
515 } else if (typ.equals(Integer.class)) {
516 String value = props.getProperty(
517 propName,
518 f.get(this) == null ? "null" : f.get(this).toString());
519 f.set(this, value.equals("null") ? null : new Integer(value));
520 } else if (typ.isEnum()) {
521 f.set(this, Enum.valueOf(typ,
522 props.getProperty(propName, f.get(this).toString())));
523 } else {
524 throw new RuntimeException("Could not retrieve property "
525 + propName + " for type: " + typ);
526 }
527 }
528 }
529 } catch (IllegalAccessException iae) {
530 throw new RuntimeException("Illegal access to field in property setter",
531 iae);
532 }
533
534 // Now load properties that were stored with special types, or require
535 // additional logic to set.
536
537 if (getBooleanProperty(props, "db.require.password", false)) {
538 // The user's password was stripped out from the metastore.
539 // Require that the user enter it now.
540 setPasswordFromConsole();
541 } else {
542 this.password = props.getProperty("db.password", this.password);
543 }
544
545 if (this.jarDirIsAuto) {
546 // We memoized a user-specific nonce dir for compilation to the data
547 // store. Disregard that setting and create a new nonce dir.
548 String localUsername = System.getProperty("user.name", "unknown");
549 this.jarOutputDir = getNonceJarDir(tmpDir + "sqoop-" + localUsername
550 + "/compile");
551 }
552
553 String colListStr = props.getProperty("db.column.list", null);
554 if (null != colListStr) {
555 this.columns = listToArray(colListStr);
556 }
557
558 this.inputDelimiters = getDelimiterProperties(props,
559 "codegen.input.delimiters", this.inputDelimiters);
560 this.outputDelimiters = getDelimiterProperties(props,
561 "codegen.output.delimiters", this.outputDelimiters);
562
563 this.extraArgs = getArgArrayProperty(props, "tool.arguments",
564 this.extraArgs);
565
566 this.connectionParams =
567 getPropertiesAsNetstedProperties(props, "db.connect.params");
568
569 // Delimiters were previously memoized; don't let the tool override
570 // them with defaults.
571 this.areDelimsManuallySet = true;
572 }
573
574 /**
575 * Return a Properties instance that encapsulates all the "sticky"
576 * state of this SqoopOptions that should be written to a metastore
577 * to restore the job later.
578 */
579 public Properties writeProperties() {
580 Properties props = new Properties();
581
582 try {
583 Field [] fields = getClass().getDeclaredFields();
584 for (Field f : fields) {
585 if (f.isAnnotationPresent(StoredAsProperty.class)) {
586 Class typ = f.getType();
587 StoredAsProperty storedAs = f.getAnnotation(StoredAsProperty.class);
588 String propName = storedAs.value();
589
590 if (typ.equals(int.class)) {
591 putProperty(props, propName, Integer.toString(f.getInt(this)));
592 } else if (typ.equals(boolean.class)) {
593 putProperty(props, propName, Boolean.toString(f.getBoolean(this)));
594 } else if (typ.equals(long.class)) {
595 putProperty(props, propName, Long.toString(f.getLong(this)));
596 } else if (typ.equals(String.class)) {
597 putProperty(props, propName, (String) f.get(this));
598 } else if (typ.equals(Integer.class)) {
599 putProperty(
600 props,
601 propName,
602 f.get(this) == null ? "null" : f.get(this).toString());
603 } else if (typ.isEnum()) {
604 putProperty(props, propName, f.get(this).toString());
605 } else {
606 throw new RuntimeException("Could not set property "
607 + propName + " for type: " + typ);
608 }
609 }
610 }
611 } catch (IllegalAccessException iae) {
612 throw new RuntimeException("Illegal access to field in property setter",
613 iae);
614 }
615
616
617 if (this.getConf().getBoolean(
618 METASTORE_PASSWORD_KEY, METASTORE_PASSWORD_DEFAULT)) {
619 // If the user specifies, we may store the password in the metastore.
620 putProperty(props, "db.password", this.password);
621 putProperty(props, "db.require.password", "false");
622 } else if (this.password != null) {
623 // Otherwise, if the user has set a password, we just record
624 // a flag stating that the password will need to be reentered.
625 putProperty(props, "db.require.password", "true");
626 } else {
627 // No password saved or required.
628 putProperty(props, "db.require.password", "false");
629 }
630
631 putProperty(props, "db.column.list", arrayToList(this.columns));
632 setDelimiterProperties(props, "codegen.input.delimiters",
633 this.inputDelimiters);
634 setDelimiterProperties(props, "codegen.output.delimiters",
635 this.outputDelimiters);
636 setArgArrayProperties(props, "tool.arguments", this.extraArgs);
637
638 setPropertiesAsNestedProperties(props,
639 "db.connect.params", this.connectionParams);
640
641 return props;
642 }
643
644 @Override
645 public Object clone() {
646 try {
647 SqoopOptions other = (SqoopOptions) super.clone();
648 if (null != columns) {
649 other.columns = Arrays.copyOf(columns, columns.length);
650 }
651
652 if (null != dbOutColumns) {
653 other.dbOutColumns = Arrays.copyOf(dbOutColumns, dbOutColumns.length);
654 }
655
656 if (null != inputDelimiters) {
657 other.inputDelimiters = (DelimiterSet) inputDelimiters.clone();
658 }
659
660 if (null != outputDelimiters) {
661 other.outputDelimiters = (DelimiterSet) outputDelimiters.clone();
662 }
663
664 if (null != conf) {
665 other.conf = new Configuration(conf);
666 }
667
668 if (null != extraArgs) {
669 other.extraArgs = Arrays.copyOf(extraArgs, extraArgs.length);
670 }
671
672 if (null != connectionParams) {
673 other.setConnectionParams(this.connectionParams);
674 }
675
676 return other;
677 } catch (CloneNotSupportedException cnse) {
678 // Shouldn't happen.
679 return null;
680 }
681 }
682
683 /**
684 * @return the temp directory to use; this is guaranteed to end with
685 * the file separator character (e.g., '/').
686 */
687 public String getTempDir() {
688 return this.tmpDir;
689 }
690
691 /**
692 * Return the name of a directory that does not exist before
693 * calling this method, and does exist afterward. We should be
694 * the only client of this directory. If this directory is not
695 * used during the lifetime of the JVM, schedule it to be removed
696 * when the JVM exits.
697 */
698 private static String getNonceJarDir(String tmpBase) {
699
700 // Make sure we don't loop forever in the event of a permission error.
701 final int MAX_DIR_CREATE_ATTEMPTS = 32;
702
703 if (null != curNonce) {
704 return curNonce;
705 }
706
707 File baseDir = new File(tmpBase);
708 File hashDir = null;
709
710 for (int attempts = 0; attempts < MAX_DIR_CREATE_ATTEMPTS; attempts++) {
711 hashDir = new File(baseDir, RandomHash.generateMD5String());
712 while (hashDir.exists()) {
713 hashDir = new File(baseDir, RandomHash.generateMD5String());
714 }
715
716 if (hashDir.mkdirs()) {
717 // We created the directory. Use it.
718 // If this directory is not actually filled with files, delete it
719 // when the JVM quits.
720 hashDir.deleteOnExit();
721 break;
722 }
723 }
724
725 if (hashDir == null || !hashDir.exists()) {
726 throw new RuntimeException("Could not create temporary directory: "
727 + hashDir + "; check for a directory permissions issue on /tmp.");
728 }
729
730 LOG.debug("Generated nonce dir: " + hashDir.toString());
731 SqoopOptions.curNonce = hashDir.toString();
732 return SqoopOptions.curNonce;
733 }
734
735 /**
736 * Reset the nonce directory and force a new one to be generated. This
737 * method is intended to be used only by multiple unit tests that want
738 * to isolate themselves from one another. It should not be called
739 * during normal Sqoop execution.
740 */
741 public static void clearNonceDir() {
742 LOG.warn("Clearing nonce directory");
743 SqoopOptions.curNonce = null;
744 }
745
746 private void initDefaults(Configuration baseConfiguration) {
747 // first, set the true defaults if nothing else happens.
748 // default action is to run the full pipeline.
749 this.hadoopHome = System.getenv("HADOOP_HOME");
750
751 // Set this with $HIVE_HOME, but -Dhive.home can override.
752 this.hiveHome = System.getenv("HIVE_HOME");
753 this.hiveHome = System.getProperty("hive.home", this.hiveHome);
754
755 this.inputDelimiters = new DelimiterSet(
756 DelimiterSet.NULL_CHAR, DelimiterSet.NULL_CHAR,
757 DelimiterSet.NULL_CHAR, DelimiterSet.NULL_CHAR, false);
758 this.outputDelimiters = new DelimiterSet();
759
760 // Set this to cwd, but -Dsqoop.src.dir can override.
761 this.codeOutputDir = System.getProperty("sqoop.src.dir", ".");
762
763 String myTmpDir = System.getProperty("test.build.data", "/tmp/");
764 if (!myTmpDir.endsWith(File.separator)) {
765 myTmpDir = myTmpDir + File.separator;
766 }
767
768 this.tmpDir = myTmpDir;
769 String localUsername = System.getProperty("user.name", "unknown");
770 this.jarOutputDir = getNonceJarDir(tmpDir + "sqoop-" + localUsername
771 + "/compile");
772 this.jarDirIsAuto = true;
773 this.layout = FileLayout.TextFile;
774
775 this.areDelimsManuallySet = false;
776
777 this.numMappers = DEFAULT_NUM_MAPPERS;
778 this.useCompression = false;
779 this.compressionCodec = null;
780 this.directSplitSize = 0;
781
782 this.maxInlineLobSize = LargeObjectLoader.DEFAULT_MAX_LOB_LENGTH;
783
784 // Don't set a default value for fetchsize. This allows a JDBCManager to
785 // provide a database-specific default, if no value is provided by the
786 // user.
787 this.fetchSize = null;
788
789 if (null == baseConfiguration) {
790 this.conf = new Configuration();
791 } else {
792 this.conf = baseConfiguration;
793 }
794
795 this.extraArgs = null;
796
797 this.dbOutColumns = null;
798
799 this.incrementalMode = IncrementalMode.None;
800 }
801
802 /**
803 * Given a string containing a single character or an escape sequence
804 * representing a char, return that char itself.
805 *
806 * Normal literal characters return themselves: "x" -&gt; 'x', etc.
807 * Strings containing a '\' followed by one of t, r, n, or b escape to the
808 * usual character as seen in Java: "\n" -&gt; (newline), etc.
809 *
810 * Strings like "\0ooo" return the character specified by the octal sequence
811 * 'ooo'. Strings like "\0xhhh" or "\0Xhhh" return the character specified by
812 * the hex sequence 'hhh'.
813 *
814 * If the input string contains leading or trailing spaces, these are
815 * ignored.
816 */
817 public static char toChar(String charish) throws InvalidOptionsException {
818 if (null == charish || charish.length() == 0) {
819 throw new InvalidOptionsException("Character argument expected."
820 + "\nTry --help for usage instructions.");
821 }
822
823 if (charish.startsWith("\\0x") || charish.startsWith("\\0X")) {
824 if (charish.length() == 3) {
825 throw new InvalidOptionsException(
826 "Base-16 value expected for character argument."
827 + "\nTry --help for usage instructions.");
828 } else {
829 String valStr = charish.substring(3);
830 int val = Integer.parseInt(valStr, 16);
831 return (char) val;
832 }
833 } else if (charish.startsWith("\\0")) {
834 if (charish.equals("\\0")) {
835 // it's just '\0', which we can take as shorthand for nul.
836 return DelimiterSet.NULL_CHAR;
837 } else {
838 // it's an octal value.
839 String valStr = charish.substring(2);
840 int val = Integer.parseInt(valStr, 8);
841 return (char) val;
842 }
843 } else if (charish.startsWith("\\")) {
844 if (charish.length() == 1) {
845 // it's just a '\'. Keep it literal.
846 return '\\';
847 } else if (charish.length() > 2) {
848 // we don't have any 3+ char escape strings.
849 throw new InvalidOptionsException(
850 "Cannot understand character argument: " + charish
851 + "\nTry --help for usage instructions.");
852 } else {
853 // this is some sort of normal 1-character escape sequence.
854 char escapeWhat = charish.charAt(1);
855 switch(escapeWhat) {
856 case 'b':
857 return '\b';
858 case 'n':
859 return '\n';
860 case 'r':
861 return '\r';
862 case 't':
863 return '\t';
864 case '\"':
865 return '\"';
866 case '\'':
867 return '\'';
868 case '\\':
869 return '\\';
870 default:
871 throw new InvalidOptionsException(
872 "Cannot understand character argument: " + charish
873 + "\nTry --help for usage instructions.");
874 }
875 }
876 } else {
877 // it's a normal character.
878 if (charish.length() > 1) {
879 LOG.warn("Character argument " + charish + " has multiple characters; "
880 + "only the first will be used.");
881 }
882
883 return charish.charAt(0);
884 }
885 }
886
887 /**
888 * Get the temporary directory; guaranteed to end in File.separator
889 * (e.g., '/').
890 */
891 public String getTmpDir() {
892 return tmpDir;
893 }
894
895 public void setTmpDir(String tmp) {
896 this.tmpDir = tmp;
897 }
898
899 public String getConnectString() {
900 return connectString;
901 }
902
903 public void setConnectString(String connectStr) {
904 this.connectString = connectStr;
905 }
906
907 public String getTableName() {
908 return tableName;
909 }
910
911 public void setTableName(String table) {
912 this.tableName = table;
913 }
914
915 public String getStagingTableName() {
916 return stagingTableName;
917 }
918
919 public void setStagingTableName(String stagingTable) {
920 this.stagingTableName = stagingTable;
921 }
922
923 public boolean doClearStagingTable() {
924 return clearStagingTable;
925 }
926
927 public void setClearStagingTable(boolean clear) {
928 clearStagingTable = clear;
929 }
930
931 public String getExportDir() {
932 return exportDir;
933 }
934
935 public void setExportDir(String dir) {
936 this.exportDir = dir;
937 }
938
939 public String getExistingJarName() {
940 return existingJarFile;
941 }
942
943 public void setExistingJarName(String jarFile) {
944 this.existingJarFile = jarFile;
945 }
946
947 public String[] getColumns() {
948 if (null == columns) {
949 return null;
950 } else {
951 return Arrays.copyOf(columns, columns.length);
952 }
953 }
954
955 public void setColumns(String [] cols) {
956 if (null == cols) {
957 this.columns = null;
958 } else {
959 this.columns = Arrays.copyOf(cols, cols.length);
960 }
961 }
962
963 public String getSplitByCol() {
964 return splitByCol;
965 }
966
967 public void setSplitByCol(String splitBy) {
968 this.splitByCol = splitBy;
969 }
970
971 public String getWhereClause() {
972 return whereClause;
973 }
974
975 public void setWhereClause(String where) {
976 this.whereClause = where;
977 }
978
979 public String getUsername() {
980 return username;
981 }
982
983 public void setUsername(String user) {
984 this.username = user;
985 }
986
987 public String getPassword() {
988 return password;
989 }
990
991 /**
992 * Allow the user to enter his password on the console without printing
993 * characters.
994 * @return the password as a string
995 */
996 private String securePasswordEntry() {
997 return new String(System.console().readPassword("Enter password: "));
998 }
999
1000 /**
1001 * Set the password in this SqoopOptions from the console without printing
1002 * characters.
1003 */
1004 public void setPasswordFromConsole() {
1005 this.password = securePasswordEntry();
1006 }
1007
1008 public void setPassword(String pass) {
1009 this.password = pass;
1010 }
1011
1012 public boolean isDirect() {
1013 return direct;
1014 }
1015
1016 public void setDirectMode(boolean isDirect) {
1017 this.direct = isDirect;
1018 }
1019
1020 /**
1021 * @return true if underlying statements to be executed in batch mode,
1022 * or false if to be executed in a single multirow statement.
1023 */
1024 public boolean isBatchMode() {
1025 return batchMode;
1026 }
1027
1028 public void setBatchMode(boolean mode) {
1029 this.batchMode = mode;
1030 }
1031
1032 /**
1033 * @return the number of map tasks to use for import.
1034 */
1035 public int getNumMappers() {
1036 return this.numMappers;
1037 }
1038
1039 public void setNumMappers(int m) {
1040 this.numMappers = m;
1041 }
1042
1043 /**
1044 * @return the user-specified absolute class name for the table.
1045 */
1046 public String getClassName() {
1047 return className;
1048 }
1049
1050 public void setClassName(String name) {
1051 this.className = name;
1052 }
1053
1054 /**
1055 * @return the user-specified package to prepend to table names via
1056 * --package-name.
1057 */
1058 public String getPackageName() {
1059 return packageName;
1060 }
1061
1062 public void setPackageName(String name) {
1063 this.packageName = name;
1064 }
1065
1066 public String getHiveHome() {
1067 return hiveHome;
1068 }
1069
1070 public void setHiveHome(String home) {
1071 this.hiveHome = home;
1072 }
1073
1074 /** @return true if we should import the table into Hive. */
1075 public boolean doHiveImport() {
1076 return hiveImport;
1077 }
1078
1079 public void setHiveImport(boolean doImport) {
1080 this.hiveImport = doImport;
1081 }
1082
1083 /**
1084 * @return the user-specified option to overwrite existing table in hive.
1085 */
1086 public boolean doOverwriteHiveTable() {
1087 return overwriteHiveTable;
1088 }
1089
1090 public void setOverwriteHiveTable(boolean overwrite) {
1091 this.overwriteHiveTable = overwrite;
1092 }
1093
1094 /**
1095 * @return the user-specified option to modify fields to drop hive delimiters
1096 */
1097 public boolean doHiveDropDelims() {
1098 return hiveDropDelims;
1099 }
1100
1101 public void setHiveDropDelims(boolean dropHiveDelims) {
1102 this.hiveDropDelims = dropHiveDelims;
1103 }
1104
1105 /**
1106 * @return the user-specified option to specify the replacement string
1107 * for hive delimeters
1108 */
1109 public String getHiveDelimsReplacement() {
1110 return hiveDelimsReplacement;
1111 }
1112
1113 public void setHiveDelimsReplacement(String replacement) {
1114 this.hiveDelimsReplacement = replacement;
1115 }
1116
1117 /**
1118 * @return the user-specified option to specify sqoop's behavior during
1119 * target table creation if the table exists.
1120 */
1121 public boolean doFailIfHiveTableExists() {
1122 return failIfHiveTableExists;
1123 }
1124
1125 public void setFailIfHiveTableExists(boolean fail) {
1126 this.failIfHiveTableExists = fail;
1127 }
1128
1129 /**
1130 * @return location where .java files go; guaranteed to end with '/'.
1131 */
1132 public String getCodeOutputDir() {
1133 if (codeOutputDir.endsWith(File.separator)) {
1134 return codeOutputDir;
1135 } else {
1136 return codeOutputDir + File.separator;
1137 }
1138 }
1139
1140 public void setCodeOutputDir(String outputDir) {
1141 this.codeOutputDir = outputDir;
1142 }
1143
1144 /**
1145 * @return location where .jar and .class files go; guaranteed to end with
1146 * '/'.
1147 */
1148 public String getJarOutputDir() {
1149 if (jarOutputDir.endsWith(File.separator)) {
1150 return jarOutputDir;
1151 } else {
1152 return jarOutputDir + File.separator;
1153 }
1154 }
1155
1156 public void setJarOutputDir(String outDir) {
1157 this.jarOutputDir = outDir;
1158 this.jarDirIsAuto = false;
1159 }
1160
1161 /**
1162 * Return the value of $HADOOP_HOME.
1163 * @return $HADOOP_HOME, or null if it's not set.
1164 */
1165 public String getHadoopHome() {
1166 return hadoopHome;
1167 }
1168
1169 public void setHadoopHome(String home) {
1170 this.hadoopHome = home;
1171 }
1172
1173 /**
1174 * @return a sql command to execute and exit with.
1175 */
1176 public String getSqlQuery() {
1177 return sqlQuery;
1178 }
1179
1180 public void setSqlQuery(String sqlStatement) {
1181 this.sqlQuery = sqlStatement;
1182 }
1183
1184 /**
1185 * @return The JDBC driver class name specified with --driver.
1186 */
1187 public String getDriverClassName() {
1188 return driverClassName;
1189 }
1190
1191 public void setDriverClassName(String driverClass) {
1192 this.driverClassName = driverClass;
1193 }
1194
1195 /**
1196 * @return the base destination path for table uploads.
1197 */
1198 public String getWarehouseDir() {
1199 return warehouseDir;
1200 }
1201
1202 public void setWarehouseDir(String warehouse) {
1203 this.warehouseDir = warehouse;
1204 }
1205
1206 public String getTargetDir() {
1207 return this.targetDir;
1208 }
1209
1210 public void setTargetDir(String dir) {
1211 this.targetDir = dir;
1212 }
1213
1214 public void setAppendMode(boolean doAppend) {
1215 this.append = doAppend;
1216 }
1217
1218 public boolean isAppendMode() {
1219 return this.append;
1220 }
1221
1222 /**
1223 * @return the destination file format
1224 */
1225 public FileLayout getFileLayout() {
1226 return this.layout;
1227 }
1228
1229 public void setFileLayout(FileLayout fileLayout) {
1230 this.layout = fileLayout;
1231 }
1232
1233 /**
1234 * @return the field delimiter to use when parsing lines. Defaults to the
1235 * field delim to use when printing lines.
1236 */
1237 public char getInputFieldDelim() {
1238 char f = inputDelimiters.getFieldsTerminatedBy();
1239 if (f == DelimiterSet.NULL_CHAR) {
1240 return this.outputDelimiters.getFieldsTerminatedBy();
1241 } else {
1242 return f;
1243 }
1244 }
1245
1246 /**
1247 * Set the field delimiter to use when parsing lines.
1248 */
1249 public void setInputFieldsTerminatedBy(char c) {
1250 this.inputDelimiters.setFieldsTerminatedBy(c);
1251 }
1252
1253 /**
1254 * @return the record delimiter to use when parsing lines. Defaults to the
1255 * record delim to use when printing lines.
1256 */
1257 public char getInputRecordDelim() {
1258 char r = inputDelimiters.getLinesTerminatedBy();
1259 if (r == DelimiterSet.NULL_CHAR) {
1260 return this.outputDelimiters.getLinesTerminatedBy();
1261 } else {
1262 return r;
1263 }
1264 }
1265
1266 /**
1267 * Set the record delimiter to use when parsing lines.
1268 */
1269 public void setInputLinesTerminatedBy(char c) {
1270 this.inputDelimiters.setLinesTerminatedBy(c);
1271 }
1272
1273 /**
1274 * @return the character that may enclose fields when parsing lines.
1275 * Defaults to the enclosing-char to use when printing lines.
1276 */
1277 public char getInputEnclosedBy() {
1278 char c = inputDelimiters.getEnclosedBy();
1279 if (c == DelimiterSet.NULL_CHAR) {
1280 return this.outputDelimiters.getEnclosedBy();
1281 } else {
1282 return c;
1283 }
1284 }
1285
1286 /**
1287 * Set the enclosed-by character to use when parsing lines.
1288 */
1289 public void setInputEnclosedBy(char c) {
1290 this.inputDelimiters.setEnclosedBy(c);
1291 }
1292
1293 /**
1294 * @return the escape character to use when parsing lines. Defaults to the
1295 * escape character used when printing lines.
1296 */
1297 public char getInputEscapedBy() {
1298 char c = inputDelimiters.getEscapedBy();
1299 if (c == DelimiterSet.NULL_CHAR) {
1300 return this.outputDelimiters.getEscapedBy();
1301 } else {
1302 return c;
1303 }
1304 }
1305
1306 /**
1307 * Set the escaped-by character to use when parsing lines.
1308 */
1309 public void setInputEscapedBy(char c) {
1310 this.inputDelimiters.setEscapedBy(c);
1311 }
1312
1313 /**
1314 * @return true if fields must be enclosed by the --enclosed-by character
1315 * when parsing. Defaults to false. Set true when --input-enclosed-by is
1316 * used.
1317 */
1318 public boolean isInputEncloseRequired() {
1319 char c = this.inputDelimiters.getEnclosedBy();
1320 if (c == DelimiterSet.NULL_CHAR) {
1321 return this.outputDelimiters.isEncloseRequired();
1322 } else {
1323 return this.inputDelimiters.isEncloseRequired();
1324 }
1325 }
1326
1327 /**
1328 * If true, then all input fields are expected to be enclosed by the
1329 * enclosed-by character when parsing.
1330 */
1331 public void setInputEncloseRequired(boolean required) {
1332 this.inputDelimiters.setEncloseRequired(required);
1333 }
1334
1335 /**
1336 * @return the character to print between fields when importing them to
1337 * text.
1338 */
1339 public char getOutputFieldDelim() {
1340 return this.outputDelimiters.getFieldsTerminatedBy();
1341 }
1342
1343 /**
1344 * Set the field delimiter to use when formatting lines.
1345 */
1346 public void setFieldsTerminatedBy(char c) {
1347 this.outputDelimiters.setFieldsTerminatedBy(c);
1348 }
1349
1350
1351 /**
1352 * @return the character to print between records when importing them to
1353 * text.
1354 */
1355 public char getOutputRecordDelim() {
1356 return this.outputDelimiters.getLinesTerminatedBy();
1357 }
1358
1359 /**
1360 * Set the record delimiter to use when formatting lines.
1361 */
1362 public void setLinesTerminatedBy(char c) {
1363 this.outputDelimiters.setLinesTerminatedBy(c);
1364 }
1365
1366 /**
1367 * @return a character which may enclose the contents of fields when
1368 * imported to text.
1369 */
1370 public char getOutputEnclosedBy() {
1371 return this.outputDelimiters.getEnclosedBy();
1372 }
1373
1374 /**
1375 * Set the enclosed-by character to use when formatting lines.
1376 */
1377 public void setEnclosedBy(char c) {
1378 this.outputDelimiters.setEnclosedBy(c);
1379 }
1380
1381 /**
1382 * @return a character which signifies an escape sequence when importing to
1383 * text.
1384 */
1385 public char getOutputEscapedBy() {
1386 return this.outputDelimiters.getEscapedBy();
1387 }
1388
1389 /**
1390 * Set the escaped-by character to use when formatting lines.
1391 */
1392 public void setEscapedBy(char c) {
1393 this.outputDelimiters.setEscapedBy(c);
1394 }
1395
1396 /**
1397 * @return true if fields imported to text must be enclosed by the
1398 * EnclosedBy char. default is false; set to true if --enclosed-by is used
1399 * instead of --optionally-enclosed-by.
1400 */
1401 public boolean isOutputEncloseRequired() {
1402 return this.outputDelimiters.isEncloseRequired();
1403 }
1404
1405 /**
1406 * If true, then the enclosed-by character will be applied to all fields,
1407 * even if internal characters do not need enclosed-by protection.
1408 */
1409 public void setOutputEncloseRequired(boolean required) {
1410 this.outputDelimiters.setEncloseRequired(required);
1411 }
1412
1413 /**
1414 * @return the set of delimiters used for formatting output records.
1415 */
1416 public DelimiterSet getOutputDelimiters() {
1417 return this.outputDelimiters.copy();
1418 }
1419
1420 /**
1421 * Set the complete set of delimiters to use for output formatting.
1422 */
1423 public void setOutputDelimiters(DelimiterSet delimiters) {
1424 this.outputDelimiters = delimiters.copy();
1425 }
1426
1427 /**
1428 * @return the set of delimiters used for parsing the input.
1429 * This may include values implicitly set by the output delimiters.
1430 */
1431 public DelimiterSet getInputDelimiters() {
1432 return new DelimiterSet(
1433 getInputFieldDelim(),
1434 getInputRecordDelim(),
1435 getInputEnclosedBy(),
1436 getInputEscapedBy(),
1437 isInputEncloseRequired());
1438 }
1439
1440 /**
1441 * @return true if the user wants imported results to be compressed.
1442 */
1443 public boolean shouldUseCompression() {
1444 return this.useCompression || compressionCodec != null;
1445 }
1446
1447 public void setUseCompression(boolean compress) {
1448 this.useCompression = compress;
1449 }
1450
1451 /**
1452 * @return the name of the compression codec to use when importing.
1453 * E.g. <code>org.apache.hadoop.io.compress.GzipCodec</code>.
1454 */
1455 public String getCompressionCodec() {
1456 return compressionCodec;
1457 }
1458
1459 public void setCompressionCodec(String codec) {
1460 this.compressionCodec = codec;
1461 }
1462 /**
1463 * @return the name of the destination table when importing to Hive.
1464 */
1465 public String getHiveTableName() {
1466 if (null != this.hiveTableName) {
1467 return this.hiveTableName;
1468 } else {
1469 return this.tableName;
1470 }
1471 }
1472
1473 public void setHiveTableName(String name) {
1474 this.hiveTableName = name;
1475 }
1476
1477 public String getHivePartitionKey() {
1478 return hivePartitionKey;
1479 }
1480
1481 public void setHivePartitionKey(String hpk) {
1482 this.hivePartitionKey = hpk;
1483 }
1484
1485 public String getHivePartitionValue() {
1486 return hivePartitionValue;
1487 }
1488
1489 public void setHivePartitionValue(String hpv) {
1490 this.hivePartitionValue = hpv;
1491 }
1492
1493 /**
1494 * @return the file size to split by when using --direct mode.
1495 */
1496 public long getDirectSplitSize() {
1497 return this.directSplitSize;
1498 }
1499
1500 public void setDirectSplitSize(long splitSize) {
1501 this.directSplitSize = splitSize;
1502 }
1503
1504 /**
1505 * @return the max size of a LOB before we spill to a separate file.
1506 */
1507 public long getInlineLobLimit() {
1508 return this.maxInlineLobSize;
1509 }
1510
1511 public void setInlineLobLimit(long limit) {
1512 this.maxInlineLobSize = limit;
1513 }
1514
1515 public Integer getFetchSize() {
1516 return this.fetchSize;
1517 }
1518
1519 public void setFetchSize(Integer size) {
1520 this.fetchSize = size;
1521 }
1522
1523 /**
1524 * @return true if the delimiters have been explicitly set by the user.
1525 */
1526 public boolean explicitDelims() {
1527 return areDelimsManuallySet;
1528 }
1529
1530 /**
1531 * Flag the delimiter settings as explicit user settings, or implicit.
1532 */
1533 public void setExplicitDelims(boolean explicit) {
1534 this.areDelimsManuallySet = explicit;
1535 }
1536
1537 public Configuration getConf() {
1538 return conf;
1539 }
1540
1541 public void setConf(Configuration config) {
1542 this.conf = config;
1543 }
1544
1545 /**
1546 * @return command-line arguments after a '-'.
1547 */
1548 public String [] getExtraArgs() {
1549 if (extraArgs == null) {
1550 return null;
1551 }
1552
1553 String [] out = new String[extraArgs.length];
1554 for (int i = 0; i < extraArgs.length; i++) {
1555 out[i] = extraArgs[i];
1556 }
1557 return out;
1558 }
1559
1560 public void setExtraArgs(String [] args) {
1561 if (null == args) {
1562 this.extraArgs = null;
1563 return;
1564 }
1565
1566 this.extraArgs = new String[args.length];
1567 for (int i = 0; i < args.length; i++) {
1568 this.extraArgs[i] = args[i];
1569 }
1570 }
1571
1572 /**
1573 * Set the name of the column to be used in the WHERE clause of an
1574 * UPDATE-based export process.
1575 */
1576 public void setUpdateKeyCol(String colName) {
1577 this.updateKeyCol = colName;
1578 }
1579
1580 /**
1581 * @return the column which is the key column in a table to be exported
1582 * in update mode.
1583 */
1584 public String getUpdateKeyCol() {
1585 return this.updateKeyCol;
1586 }
1587
1588 /**
1589 * @return an ordered list of column names. The code generator should
1590 * generate the DBWritable.write(PreparedStatement) method with columns
1591 * exporting in this order, if it is non-null.
1592 */
1593 public String [] getDbOutputColumns() {
1594 if (null != dbOutColumns) {
1595 return Arrays.copyOf(this.dbOutColumns, dbOutColumns.length);
1596 } else {
1597 return null;
1598 }
1599 }
1600
1601 /**
1602 * Set the order in which columns should be serialized by the generated
1603 * DBWritable.write(PreparedStatement) method. Setting this to null will use
1604 * the "natural order" of the database table.
1605 *
1606 * TODO: Expose this setter via the command-line arguments for the codegen
1607 * module. That would allow users to export to tables with columns in a
1608 * different physical order than the file layout in HDFS.
1609 */
1610 public void setDbOutputColumns(String [] outCols) {
1611 if (null == outCols) {
1612 this.dbOutColumns = null;
1613 } else {
1614 this.dbOutColumns = Arrays.copyOf(outCols, outCols.length);
1615 }
1616 }
1617
1618 /**
1619 * Set whether we should create missing HBase tables.
1620 */
1621 public void setCreateHBaseTable(boolean create) {
1622 this.hbaseCreateTable = create;
1623 }
1624
1625 /**
1626 * Returns true if we should create HBase tables/column families
1627 * that are missing.
1628 */
1629 public boolean getCreateHBaseTable() {
1630 return this.hbaseCreateTable;
1631 }
1632
1633 /**
1634 * Sets the HBase target column family.
1635 */
1636 public void setHBaseColFamily(String colFamily) {
1637 this.hbaseColFamily = colFamily;
1638 }
1639
1640 /**
1641 * Gets the HBase import target column family.
1642 */
1643 public String getHBaseColFamily() {
1644 return this.hbaseColFamily;
1645 }
1646
1647 /**
1648 * Gets the column to use as the row id in an hbase import.
1649 * If null, use the primary key column.
1650 */
1651 public String getHBaseRowKeyColumn() {
1652 return this.hbaseRowKeyCol;
1653 }
1654
1655 /**
1656 * Sets the column to use as the row id in an hbase import.
1657 */
1658 public void setHBaseRowKeyColumn(String col) {
1659 this.hbaseRowKeyCol = col;
1660 }
1661
1662 /**
1663 * Gets the target HBase table name, if any.
1664 */
1665 public String getHBaseTable() {
1666 return this.hbaseTable;
1667 }
1668
1669 /**
1670 * Sets the target HBase table name for an import.
1671 */
1672 public void setHBaseTable(String table) {
1673 this.hbaseTable = table;
1674 }
1675
1676 /**
1677 * Set the column of the import source table to check for incremental import
1678 * state.
1679 */
1680 public void setIncrementalTestColumn(String colName) {
1681 this.incrementalTestCol = colName;
1682 }
1683
1684 /**
1685 * Return the name of the column of the import source table
1686 * to check for incremental import state.
1687 */
1688 public String getIncrementalTestColumn() {
1689 return this.incrementalTestCol;
1690 }
1691
1692 /**
1693 * Set the incremental import mode to use.
1694 */
1695 public void setIncrementalMode(IncrementalMode mode) {
1696 this.incrementalMode = mode;
1697 }
1698
1699 /**
1700 * Get the incremental import mode to use.
1701 */
1702 public IncrementalMode getIncrementalMode() {
1703 return this.incrementalMode;
1704 }
1705
1706 /**
1707 * Set the last imported value of the incremental import test column.
1708 */
1709 public void setIncrementalLastValue(String lastVal) {
1710 this.incrementalLastValue = lastVal;
1711 }
1712
1713 /**
1714 * Get the last imported value of the incremental import test column.
1715 */
1716 public String getIncrementalLastValue() {
1717 return this.incrementalLastValue;
1718 }
1719
1720 /**
1721 * Set the name of the saved job this SqoopOptions belongs to.
1722 */
1723 public void setJobName(String job) {
1724 this.jobName = job;
1725 }
1726
1727 /**
1728 * Get the name of the saved job this SqoopOptions belongs to.
1729 */
1730 public String getJobName() {
1731 return this.jobName;
1732 }
1733
1734 /**
1735 * Set the JobStorage descriptor used to open the saved job
1736 * this SqoopOptions belongs to.
1737 */
1738 public void setStorageDescriptor(Map<String, String> descriptor) {
1739 this.jobStorageDescriptor = descriptor;
1740 }
1741
1742 /**
1743 * Get the JobStorage descriptor used to open the saved job
1744 * this SqoopOptions belongs to.
1745 */
1746 public Map<String, String> getStorageDescriptor() {
1747 return this.jobStorageDescriptor;
1748 }
1749
1750 /**
1751 * Return the parent instance this SqoopOptions is derived from.
1752 */
1753 public SqoopOptions getParent() {
1754 return this.parent;
1755 }
1756
1757 /**
1758 * Set the parent instance this SqoopOptions is derived from.
1759 */
1760 public void setParent(SqoopOptions options) {
1761 this.parent = options;
1762 }
1763
1764 /**
1765 * Set the path name used to do an incremental import of old data
1766 * which will be combined with an "new" dataset.
1767 */
1768 public void setMergeOldPath(String path) {
1769 this.mergeOldPath = path;
1770 }
1771
1772 /**
1773 * Return the path name used to do an incremental import of old data
1774 * which will be combined with an "new" dataset.
1775 */
1776 public String getMergeOldPath() {
1777 return this.mergeOldPath;
1778 }
1779
1780 /**
1781 * Set the path name used to do an incremental import of new data
1782 * which will be combined with an "old" dataset.
1783 */
1784 public void setMergeNewPath(String path) {
1785 this.mergeNewPath = path;
1786 }
1787
1788 /**
1789 * Return the path name used to do an incremental import of new data
1790 * which will be combined with an "old" dataset.
1791 */
1792 public String getMergeNewPath() {
1793 return this.mergeNewPath;
1794 }
1795
1796 /**
1797 * Set the name of the column used to merge an old and new dataset.
1798 */
1799 public void setMergeKeyCol(String col) {
1800 this.mergeKeyCol = col;
1801 }
1802
1803 /**
1804 * Return the name of the column used to merge an old and new dataset.
1805 */
1806 public String getMergeKeyCol() {
1807 return this.mergeKeyCol;
1808 }
1809
1810 public void setConnManagerClassName(String connManagerClass) {
1811 this.connManagerClassName = connManagerClass;
1812 }
1813
1814 public String getConnManagerClassName() {
1815 return connManagerClassName;
1816 }
1817
1818 /** @return the SqoopTool that is operating this session. */
1819 public SqoopTool getActiveSqoopTool() {
1820 return activeSqoopTool;
1821 }
1822
1823 public void setActiveSqoopTool(SqoopTool tool) {
1824 activeSqoopTool = tool;
1825 }
1826
1827 public void setNullStringValue(String nullString) {
1828 this.nullStringValue = nullString;
1829 }
1830
1831 public String getNullStringValue() {
1832 return nullStringValue;
1833 }
1834
1835 public void setInNullStringValue(String inNullString) {
1836 this.inNullStringValue = inNullString;
1837 }
1838
1839 public String getInNullStringValue() {
1840 return inNullStringValue;
1841 }
1842
1843 public void setNullNonStringValue(String nullNonString) {
1844 this.nullNonStringValue = nullNonString;
1845 }
1846
1847 public String getNullNonStringValue() {
1848 return nullNonStringValue;
1849 }
1850
1851 public void setInNullNonStringValue(String inNullNonString) {
1852 this.inNullNonStringValue = inNullNonString;
1853 }
1854
1855 public String getInNullNonStringValue() {
1856 return inNullNonStringValue;
1857 }
1858
1859 public void setConnectionParams(Properties params) {
1860 connectionParams = new Properties();
1861 connectionParams.putAll(params);
1862 }
1863
1864 public Properties getConnectionParams() {
1865 return connectionParams;
1866 }
1867 }
1868