fecdf43e7a41a9d175c94191db9222faedf51ac0
[sqoop.git] / src / java / org / apache / sqoop / tool / BaseSqoopTool.java
1 /**
2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements. See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership. The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License. You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18
19 package org.apache.sqoop.tool;
20
21 import java.io.File;
22 import java.io.FileInputStream;
23 import java.io.IOException;
24 import java.io.InputStream;
25 import java.sql.SQLException;
26 import java.util.Arrays;
27 import java.util.Properties;
28
29 import org.apache.commons.cli.CommandLine;
30 import org.apache.commons.cli.Option;
31 import org.apache.commons.cli.OptionBuilder;
32 import org.apache.commons.cli.OptionGroup;
33 import org.apache.commons.logging.Log;
34 import org.apache.commons.logging.LogFactory;
35 import org.apache.hadoop.util.StringUtils;
36 import org.apache.sqoop.util.CredentialsUtil;
37 import org.apache.sqoop.util.LoggingUtils;
38 import org.apache.sqoop.util.password.CredentialProviderHelper;
39
40 import com.cloudera.sqoop.ConnFactory;
41 import com.cloudera.sqoop.Sqoop;
42 import com.cloudera.sqoop.SqoopOptions;
43 import com.cloudera.sqoop.SqoopOptions.IncrementalMode;
44 import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
45 import com.cloudera.sqoop.cli.RelatedOptions;
46 import com.cloudera.sqoop.cli.ToolOptions;
47 import com.cloudera.sqoop.lib.DelimiterSet;
48 import com.cloudera.sqoop.manager.ConnManager;
49 import com.cloudera.sqoop.metastore.JobData;
50
51 /**
52 * Layer on top of SqoopTool that provides some basic common code
53 * that most SqoopTool implementations will use.
54 *
55 * Subclasses should call init() at the top of their run() method,
56 * and call destroy() at the end in a finally block.
57 */
58 public abstract class BaseSqoopTool extends com.cloudera.sqoop.tool.SqoopTool {
59
60 public static final Log LOG = LogFactory.getLog(
61 BaseSqoopTool.class.getName());
62
63 public static final String HELP_STR = "\nTry --help for usage instructions.";
64
65 // Here are all the arguments that are used by the standard sqoop tools.
66 // Their names are recorded here so that tools can share them and their
67 // use consistently. The argument parser applies the leading '--' to each
68 // string.
69 public static final String CONNECT_STRING_ARG = "connect";
70 public static final String CONN_MANAGER_CLASS_NAME =
71 "connection-manager";
72 public static final String CONNECT_PARAM_FILE = "connection-param-file";
73 public static final String DRIVER_ARG = "driver";
74 public static final String USERNAME_ARG = "username";
75 public static final String PASSWORD_ARG = "password";
76 public static final String PASSWORD_PROMPT_ARG = "P";
77 public static final String PASSWORD_PATH_ARG = "password-file";
78 public static final String PASSWORD_ALIAS_ARG = "password-alias";
79 public static final String DIRECT_ARG = "direct";
80 public static final String BATCH_ARG = "batch";
81 public static final String TABLE_ARG = "table";
82 public static final String STAGING_TABLE_ARG = "staging-table";
83 public static final String CLEAR_STAGING_TABLE_ARG = "clear-staging-table";
84 public static final String COLUMNS_ARG = "columns";
85 public static final String SPLIT_BY_ARG = "split-by";
86 public static final String SPLIT_LIMIT_ARG = "split-limit";
87 public static final String WHERE_ARG = "where";
88 public static final String HADOOP_HOME_ARG = "hadoop-home";
89 public static final String HADOOP_MAPRED_HOME_ARG = "hadoop-mapred-home";
90 public static final String HIVE_HOME_ARG = "hive-home";
91 public static final String WAREHOUSE_DIR_ARG = "warehouse-dir";
92 public static final String TARGET_DIR_ARG = "target-dir";
93 public static final String APPEND_ARG = "append";
94 public static final String DELETE_ARG = "delete-target-dir";
95 public static final String NULL_STRING = "null-string";
96 public static final String INPUT_NULL_STRING = "input-null-string";
97 public static final String NULL_NON_STRING = "null-non-string";
98 public static final String INPUT_NULL_NON_STRING = "input-null-non-string";
99 public static final String MAP_COLUMN_JAVA = "map-column-java";
100 public static final String MAP_COLUMN_HIVE = "map-column-hive";
101
102 public static final String FMT_SEQUENCEFILE_ARG = "as-sequencefile";
103 public static final String FMT_TEXTFILE_ARG = "as-textfile";
104 public static final String FMT_AVRODATAFILE_ARG = "as-avrodatafile";
105 public static final String FMT_PARQUETFILE_ARG = "as-parquetfile";
106 public static final String HIVE_IMPORT_ARG = "hive-import";
107 public static final String HIVE_TABLE_ARG = "hive-table";
108 public static final String HIVE_DATABASE_ARG = "hive-database";
109 public static final String HIVE_OVERWRITE_ARG = "hive-overwrite";
110 public static final String HIVE_DROP_DELIMS_ARG = "hive-drop-import-delims";
111 public static final String HIVE_DELIMS_REPLACEMENT_ARG =
112 "hive-delims-replacement";
113 public static final String HIVE_PARTITION_KEY_ARG = "hive-partition-key";
114 public static final String HIVE_PARTITION_VALUE_ARG = "hive-partition-value";
115 public static final String HCATCALOG_PARTITION_KEYS_ARG =
116 "hcatalog-partition-keys";
117 public static final String HCATALOG_PARTITION_VALUES_ARG =
118 "hcatalog-partition-values";
119 public static final String CREATE_HIVE_TABLE_ARG =
120 "create-hive-table";
121 public static final String HCATALOG_TABLE_ARG = "hcatalog-table";
122 public static final String HCATALOG_DATABASE_ARG = "hcatalog-database";
123 public static final String CREATE_HCATALOG_TABLE_ARG =
124 "create-hcatalog-table";
125 public static final String DROP_AND_CREATE_HCATALOG_TABLE =
126 "drop-and-create-hcatalog-table";
127 public static final String HCATALOG_STORAGE_STANZA_ARG =
128 "hcatalog-storage-stanza";
129 public static final String HCATALOG_HOME_ARG = "hcatalog-home";
130 public static final String MAPREDUCE_JOB_NAME = "mapreduce-job-name";
131 public static final String NUM_MAPPERS_ARG = "num-mappers";
132 public static final String NUM_MAPPERS_SHORT_ARG = "m";
133 public static final String COMPRESS_ARG = "compress";
134 public static final String COMPRESSION_CODEC_ARG = "compression-codec";
135 public static final String COMPRESS_SHORT_ARG = "z";
136 public static final String DIRECT_SPLIT_SIZE_ARG = "direct-split-size";
137 public static final String INLINE_LOB_LIMIT_ARG = "inline-lob-limit";
138 public static final String FETCH_SIZE_ARG = "fetch-size";
139 public static final String EXPORT_PATH_ARG = "export-dir";
140 public static final String FIELDS_TERMINATED_BY_ARG = "fields-terminated-by";
141 public static final String LINES_TERMINATED_BY_ARG = "lines-terminated-by";
142 public static final String OPTIONALLY_ENCLOSED_BY_ARG =
143 "optionally-enclosed-by";
144 public static final String ENCLOSED_BY_ARG = "enclosed-by";
145 public static final String ESCAPED_BY_ARG = "escaped-by";
146 public static final String MYSQL_DELIMITERS_ARG = "mysql-delimiters";
147 public static final String INPUT_FIELDS_TERMINATED_BY_ARG =
148 "input-fields-terminated-by";
149 public static final String INPUT_LINES_TERMINATED_BY_ARG =
150 "input-lines-terminated-by";
151 public static final String INPUT_OPTIONALLY_ENCLOSED_BY_ARG =
152 "input-optionally-enclosed-by";
153 public static final String INPUT_ENCLOSED_BY_ARG = "input-enclosed-by";
154 public static final String INPUT_ESCAPED_BY_ARG = "input-escaped-by";
155 public static final String CODE_OUT_DIR_ARG = "outdir";
156 public static final String BIN_OUT_DIR_ARG = "bindir";
157 public static final String PACKAGE_NAME_ARG = "package-name";
158 public static final String CLASS_NAME_ARG = "class-name";
159 public static final String JAR_FILE_NAME_ARG = "jar-file";
160 public static final String SQL_QUERY_ARG = "query";
161 public static final String SQL_QUERY_BOUNDARY = "boundary-query";
162 public static final String SQL_QUERY_SHORT_ARG = "e";
163 public static final String VERBOSE_ARG = "verbose";
164 public static final String HELP_ARG = "help";
165 public static final String TEMP_ROOTDIR_ARG = "temporary-rootdir";
166 public static final String UPDATE_KEY_ARG = "update-key";
167 public static final String UPDATE_MODE_ARG = "update-mode";
168 public static final String CALL_ARG = "call";
169 public static final String SKIP_DISTCACHE_ARG = "skip-dist-cache";
170 public static final String RELAXED_ISOLATION = "relaxed-isolation";
171
172 // Arguments for validation.
173 public static final String VALIDATE_ARG = "validate";
174 public static final String VALIDATOR_CLASS_ARG = "validator";
175 public static final String VALIDATION_THRESHOLD_CLASS_ARG =
176 "validation-threshold";
177 public static final String VALIDATION_FAILURE_HANDLER_CLASS_ARG =
178 "validation-failurehandler";
179
180 // Arguments for incremental imports.
181 public static final String INCREMENT_TYPE_ARG = "incremental";
182 public static final String INCREMENT_COL_ARG = "check-column";
183 public static final String INCREMENT_LAST_VAL_ARG = "last-value";
184
185 // Arguments for all table imports.
186 public static final String ALL_TABLE_EXCLUDES_ARG = "exclude-tables";
187
188 // HBase arguments.
189 public static final String HBASE_TABLE_ARG = "hbase-table";
190 public static final String HBASE_COL_FAM_ARG = "column-family";
191 public static final String HBASE_ROW_KEY_ARG = "hbase-row-key";
192 public static final String HBASE_BULK_LOAD_ENABLED_ARG =
193 "hbase-bulkload";
194 public static final String HBASE_CREATE_TABLE_ARG = "hbase-create-table";
195
196 //Accumulo arguments.
197 public static final String ACCUMULO_TABLE_ARG = "accumulo-table";
198 public static final String ACCUMULO_COL_FAM_ARG = "accumulo-column-family";
199 public static final String ACCUMULO_ROW_KEY_ARG = "accumulo-row-key";
200 public static final String ACCUMULO_VISIBILITY_ARG = "accumulo-visibility";
201 public static final String ACCUMULO_CREATE_TABLE_ARG
202 = "accumulo-create-table";
203 public static final String ACCUMULO_BATCH_SIZE_ARG = "accumulo-batch-size";
204 public static final String ACCUMULO_MAX_LATENCY_ARG = "accumulo-max-latency";
205 public static final String ACCUMULO_ZOOKEEPERS_ARG = "accumulo-zookeepers";
206 public static final String ACCUMULO_INSTANCE_ARG = "accumulo-instance";
207 public static final String ACCUMULO_USER_ARG = "accumulo-user";
208 public static final String ACCUMULO_PASSWORD_ARG = "accumulo-password";
209
210
211 // Arguments for the saved job management system.
212 public static final String STORAGE_METASTORE_ARG = "meta-connect";
213 public static final String JOB_CMD_CREATE_ARG = "create";
214 public static final String JOB_CMD_DELETE_ARG = "delete";
215 public static final String JOB_CMD_EXEC_ARG = "exec";
216 public static final String JOB_CMD_LIST_ARG = "list";
217 public static final String JOB_CMD_SHOW_ARG = "show";
218
219 // Arguments for the metastore.
220 public static final String METASTORE_SHUTDOWN_ARG = "shutdown";
221
222
223 // Arguments for merging datasets.
224 public static final String NEW_DATASET_ARG = "new-data";
225 public static final String OLD_DATASET_ARG = "onto";
226 public static final String MERGE_KEY_ARG = "merge-key";
227
228 // Reset number of mappers to one if there is no primary key avaliable and
229 // split by column is explicitly not provided
230
231 public static final String AUTORESET_TO_ONE_MAPPER = "autoreset-to-one-mapper";
232
233
234 public BaseSqoopTool() {
235 }
236
237 public BaseSqoopTool(String toolName) {
238 super(toolName);
239 }
240
241 protected ConnManager manager;
242
243 public ConnManager getManager() {
244 return manager;
245 }
246
247 public void setManager(ConnManager mgr) {
248 this.manager = mgr;
249 }
250
251 /**
252 * Should be called at the beginning of the run() method to initialize
253 * the connection manager, etc. If this succeeds (returns true), it should
254 * be paired with a call to destroy().
255 * @return true on success, false on failure.
256 */
257 protected boolean init(SqoopOptions sqoopOpts) {
258 // Get the connection to the database.
259 // Set the tool name in sqoop options
260 sqoopOpts.setToolName(getToolName());
261 try {
262 JobData data = new JobData(sqoopOpts, this);
263 this.manager = new ConnFactory(sqoopOpts.getConf()).getManager(data);
264 return true;
265 } catch (Exception e) {
266 LOG.error("Got error creating database manager: "
267 + StringUtils.stringifyException(e));
268 if (System.getProperty(Sqoop.SQOOP_RETHROW_PROPERTY) != null) {
269 throw new RuntimeException(e);
270 }
271 }
272
273 return false;
274 }
275
276 /**
277 * Should be called in a 'finally' block at the end of the run() method.
278 */
279 protected void destroy(SqoopOptions sqoopOpts) {
280 if (null != manager) {
281 try {
282 manager.close();
283 } catch (SQLException sqlE) {
284 LOG.warn("Error while closing connection: " + sqlE);
285 }
286 }
287 }
288
289 /**
290 * Examines a subset of the arrray presented, and determines if it
291 * contains any non-empty arguments. If so, logs the arguments
292 * and returns true.
293 *
294 * @param argv an array of strings to check.
295 * @param offset the first element of the array to check
296 * @param len the number of elements to check
297 * @return true if there are any non-null, non-empty argument strings
298 * present.
299 */
300 protected boolean hasUnrecognizedArgs(String [] argv, int offset, int len) {
301 if (argv == null) {
302 return false;
303 }
304
305 boolean unrecognized = false;
306 boolean printedBanner = false;
307 for (int i = offset; i < Math.min(argv.length, offset + len); i++) {
308 if (argv[i] != null && argv[i].length() > 0) {
309 if (!printedBanner) {
310 LOG.error("Error parsing arguments for " + getToolName() + ":");
311 printedBanner = true;
312 }
313 LOG.error("Unrecognized argument: " + argv[i]);
314 unrecognized = true;
315 }
316 }
317
318 return unrecognized;
319 }
320
321 protected boolean hasUnrecognizedArgs(String [] argv) {
322 if (null == argv) {
323 return false;
324 }
325 return hasUnrecognizedArgs(argv, 0, argv.length);
326 }
327
328
329 /**
330 * If argv contains an entry "--", return an array containing all elements
331 * after the "--" separator. Otherwise, return null.
332 * @param argv a set of arguments to scan for the subcommand arguments.
333 */
334 protected String [] getSubcommandArgs(String [] argv) {
335 if (null == argv) {
336 return null;
337 }
338
339 for (int i = 0; i < argv.length; i++) {
340 if (argv[i].equals("--")) {
341 return Arrays.copyOfRange(argv, i + 1, argv.length);
342 }
343 }
344
345 return null;
346 }
347
348 /**
349 * @return RelatedOptions used by job management tools.
350 */
351 protected RelatedOptions getJobOptions() {
352 RelatedOptions relatedOpts = new RelatedOptions(
353 "Job management arguments");
354 relatedOpts.addOption(OptionBuilder.withArgName("jdbc-uri")
355 .hasArg()
356 .withDescription("Specify JDBC connect string for the metastore")
357 .withLongOpt(STORAGE_METASTORE_ARG)
358 .create());
359
360 // Create an option-group surrounding the operations a user
361 // can perform on jobs.
362 OptionGroup group = new OptionGroup();
363 group.addOption(OptionBuilder.withArgName("job-id")
364 .hasArg()
365 .withDescription("Create a new saved job")
366 .withLongOpt(JOB_CMD_CREATE_ARG)
367 .create());
368 group.addOption(OptionBuilder.withArgName("job-id")
369 .hasArg()
370 .withDescription("Delete a saved job")
371 .withLongOpt(JOB_CMD_DELETE_ARG)
372 .create());
373 group.addOption(OptionBuilder.withArgName("job-id")
374 .hasArg()
375 .withDescription("Show the parameters for a saved job")
376 .withLongOpt(JOB_CMD_SHOW_ARG)
377 .create());
378
379 Option execOption = OptionBuilder.withArgName("job-id")
380 .hasArg()
381 .withDescription("Run a saved job")
382 .withLongOpt(JOB_CMD_EXEC_ARG)
383 .create();
384 group.addOption(execOption);
385
386 group.addOption(OptionBuilder
387 .withDescription("List saved jobs")
388 .withLongOpt(JOB_CMD_LIST_ARG)
389 .create());
390
391 relatedOpts.addOptionGroup(group);
392
393 // Since the "common" options aren't used in the job tool,
394 // add these settings here.
395 relatedOpts.addOption(OptionBuilder
396 .withDescription("Print more information while working")
397 .withLongOpt(VERBOSE_ARG)
398 .create());
399 relatedOpts.addOption(OptionBuilder
400 .withDescription("Print usage instructions")
401 .withLongOpt(HELP_ARG)
402 .create());
403
404 return relatedOpts;
405 }
406
407 /**
408 * @return RelatedOptions used by most/all Sqoop tools.
409 */
410 protected RelatedOptions getCommonOptions() {
411 // Connection args (common)
412 RelatedOptions commonOpts = new RelatedOptions("Common arguments");
413 commonOpts.addOption(OptionBuilder.withArgName("jdbc-uri")
414 .hasArg().withDescription("Specify JDBC connect string")
415 .withLongOpt(CONNECT_STRING_ARG)
416 .create());
417 commonOpts.addOption(OptionBuilder.withArgName("class-name")
418 .hasArg().withDescription("Specify connection manager class name")
419 .withLongOpt(CONN_MANAGER_CLASS_NAME)
420 .create());
421 commonOpts.addOption(OptionBuilder.withArgName("properties-file")
422 .hasArg().withDescription("Specify connection parameters file")
423 .withLongOpt(CONNECT_PARAM_FILE)
424 .create());
425 commonOpts.addOption(OptionBuilder.withArgName("class-name")
426 .hasArg().withDescription("Manually specify JDBC driver class to use")
427 .withLongOpt(DRIVER_ARG)
428 .create());
429 commonOpts.addOption(OptionBuilder.withArgName("username")
430 .hasArg().withDescription("Set authentication username")
431 .withLongOpt(USERNAME_ARG)
432 .create());
433 commonOpts.addOption(OptionBuilder.withArgName("password")
434 .hasArg().withDescription("Set authentication password")
435 .withLongOpt(PASSWORD_ARG)
436 .create());
437 commonOpts.addOption(OptionBuilder.withArgName(PASSWORD_PATH_ARG)
438 .hasArg().withDescription("Set authentication password file path")
439 .withLongOpt(PASSWORD_PATH_ARG)
440 .create());
441 commonOpts.addOption(OptionBuilder
442 .withDescription("Read password from console")
443 .create(PASSWORD_PROMPT_ARG));
444 commonOpts.addOption(OptionBuilder.withArgName(PASSWORD_ALIAS_ARG)
445 .hasArg().withDescription("Credential provider password alias")
446 .withLongOpt(PASSWORD_ALIAS_ARG)
447 .create());
448 commonOpts.addOption(OptionBuilder.withArgName("dir")
449 .hasArg().withDescription("Override $HADOOP_MAPRED_HOME_ARG")
450 .withLongOpt(HADOOP_MAPRED_HOME_ARG)
451 .create());
452
453 commonOpts.addOption(OptionBuilder.withArgName("hdir")
454 .hasArg().withDescription("Override $HADOOP_MAPRED_HOME_ARG")
455 .withLongOpt(HADOOP_HOME_ARG)
456 .create());
457 commonOpts.addOption(OptionBuilder
458 .withDescription("Skip copying jars to distributed cache")
459 .withLongOpt(SKIP_DISTCACHE_ARG)
460 .create());
461
462 // misc (common)
463 commonOpts.addOption(OptionBuilder
464 .withDescription("Print more information while working")
465 .withLongOpt(VERBOSE_ARG)
466 .create());
467 commonOpts.addOption(OptionBuilder
468 .withDescription("Print usage instructions")
469 .withLongOpt(HELP_ARG)
470 .create());
471 commonOpts.addOption(OptionBuilder
472 .withDescription("Defines the temporary root directory for the import")
473 .withLongOpt(TEMP_ROOTDIR_ARG)
474 .hasArg()
475 .withArgName("rootdir")
476 .create());
477 // relax isolation requirements
478 commonOpts.addOption(OptionBuilder
479 .withDescription("Use read-uncommitted isolation for imports")
480 .withLongOpt(RELAXED_ISOLATION)
481 .create());
482
483 return commonOpts;
484 }
485
486 /**
487 * @param explicitHiveImport true if the user has an explicit --hive-import
488 * available, or false if this is implied by the tool.
489 * @return options governing interaction with Hive
490 */
491 protected RelatedOptions getHiveOptions(boolean explicitHiveImport) {
492 RelatedOptions hiveOpts = new RelatedOptions("Hive arguments");
493 if (explicitHiveImport) {
494 hiveOpts.addOption(OptionBuilder
495 .withDescription("Import tables into Hive "
496 + "(Uses Hive's default delimiters if none are set.)")
497 .withLongOpt(HIVE_IMPORT_ARG)
498 .create());
499 }
500
501 hiveOpts.addOption(OptionBuilder.withArgName("dir")
502 .hasArg().withDescription("Override $HIVE_HOME")
503 .withLongOpt(HIVE_HOME_ARG)
504 .create());
505 hiveOpts.addOption(OptionBuilder
506 .withDescription("Overwrite existing data in the Hive table")
507 .withLongOpt(HIVE_OVERWRITE_ARG)
508 .create());
509 hiveOpts.addOption(OptionBuilder
510 .withDescription("Fail if the target hive table exists")
511 .withLongOpt(CREATE_HIVE_TABLE_ARG)
512 .create());
513 hiveOpts.addOption(OptionBuilder.withArgName("table-name")
514 .hasArg()
515 .withDescription("Sets the table name to use when importing to hive")
516 .withLongOpt(HIVE_TABLE_ARG)
517 .create());
518 hiveOpts.addOption(OptionBuilder.withArgName("database-name")
519 .hasArg()
520 .withDescription("Sets the database name to use when importing to hive")
521 .withLongOpt(HIVE_DATABASE_ARG)
522 .create());
523 hiveOpts.addOption(OptionBuilder
524 .withDescription("Drop Hive record \\0x01 and row delimiters "
525 + "(\\n\\r) from imported string fields")
526 .withLongOpt(HIVE_DROP_DELIMS_ARG)
527 .create());
528 hiveOpts.addOption(OptionBuilder
529 .hasArg()
530 .withDescription("Replace Hive record \\0x01 and row delimiters "
531 + "(\\n\\r) from imported string fields with user-defined string")
532 .withLongOpt(HIVE_DELIMS_REPLACEMENT_ARG)
533 .create());
534 hiveOpts.addOption(OptionBuilder.withArgName("partition-key")
535 .hasArg()
536 .withDescription("Sets the partition key to use when importing to hive")
537 .withLongOpt(HIVE_PARTITION_KEY_ARG)
538 .create());
539 hiveOpts.addOption(OptionBuilder.withArgName("partition-value")
540 .hasArg()
541 .withDescription("Sets the partition value to use when importing "
542 + "to hive")
543 .withLongOpt(HIVE_PARTITION_VALUE_ARG)
544 .create());
545 hiveOpts.addOption(OptionBuilder
546 .hasArg()
547 .withDescription("Override mapping for specific column to hive"
548 + " types.")
549 .withLongOpt(MAP_COLUMN_HIVE)
550 .create());
551
552 return hiveOpts;
553 }
554
555 /**
556 * @return options governing interaction with HCatalog.
557 */
558 protected RelatedOptions getHCatalogOptions() {
559 RelatedOptions hCatOptions = new RelatedOptions("HCatalog arguments");
560 hCatOptions.addOption(OptionBuilder
561 .hasArg()
562 .withDescription("HCatalog table name")
563 .withLongOpt(HCATALOG_TABLE_ARG)
564 .create());
565 hCatOptions.addOption(OptionBuilder
566 .hasArg()
567 .withDescription("HCatalog database name")
568 .withLongOpt(HCATALOG_DATABASE_ARG)
569 .create());
570
571 hCatOptions.addOption(OptionBuilder.withArgName("dir")
572 .hasArg().withDescription("Override $HIVE_HOME")
573 .withLongOpt(HIVE_HOME_ARG)
574 .create());
575 hCatOptions.addOption(OptionBuilder.withArgName("hdir")
576 .hasArg().withDescription("Override $HCAT_HOME")
577 .withLongOpt(HCATALOG_HOME_ARG)
578 .create());
579 hCatOptions.addOption(OptionBuilder.withArgName("partition-key")
580 .hasArg()
581 .withDescription("Sets the partition key to use when importing to hive")
582 .withLongOpt(HIVE_PARTITION_KEY_ARG)
583 .create());
584 hCatOptions.addOption(OptionBuilder.withArgName("partition-value")
585 .hasArg()
586 .withDescription("Sets the partition value to use when importing "
587 + "to hive")
588 .withLongOpt(HIVE_PARTITION_VALUE_ARG)
589 .create());
590 hCatOptions.addOption(OptionBuilder
591 .hasArg()
592 .withDescription("Override mapping for specific column to hive"
593 + " types.")
594 .withLongOpt(MAP_COLUMN_HIVE)
595 .create());
596 hCatOptions.addOption(OptionBuilder.withArgName("partition-key")
597 .hasArg()
598 .withDescription("Sets the partition keys to use when importing to hive")
599 .withLongOpt(HCATCALOG_PARTITION_KEYS_ARG)
600 .create());
601 hCatOptions.addOption(OptionBuilder.withArgName("partition-value")
602 .hasArg()
603 .withDescription("Sets the partition values to use when importing "
604 + "to hive")
605 .withLongOpt(HCATALOG_PARTITION_VALUES_ARG)
606 .create());
607 return hCatOptions;
608 }
609
610 protected RelatedOptions getHCatImportOnlyOptions() {
611 RelatedOptions hCatOptions = new RelatedOptions(
612 "HCatalog import specific options");
613 hCatOptions.addOption(OptionBuilder
614 .withDescription("Create HCatalog before import")
615 .withLongOpt(CREATE_HCATALOG_TABLE_ARG)
616 .create());
617 hCatOptions.addOption(OptionBuilder
618 .withDescription("Drop and Create HCatalog before import")
619 .withLongOpt(DROP_AND_CREATE_HCATALOG_TABLE)
620 .create());
621 hCatOptions.addOption(OptionBuilder
622 .hasArg()
623 .withDescription("HCatalog storage stanza for table creation")
624 .withLongOpt(HCATALOG_STORAGE_STANZA_ARG)
625 .create());
626 return hCatOptions;
627 }
628
629 /**
630 * @return options governing output format delimiters
631 */
632 protected RelatedOptions getOutputFormatOptions() {
633 RelatedOptions formatOpts = new RelatedOptions(
634 "Output line formatting arguments");
635 formatOpts.addOption(OptionBuilder.withArgName("char")
636 .hasArg()
637 .withDescription("Sets the field separator character")
638 .withLongOpt(FIELDS_TERMINATED_BY_ARG)
639 .create());
640 formatOpts.addOption(OptionBuilder.withArgName("char")
641 .hasArg()
642 .withDescription("Sets the end-of-line character")
643 .withLongOpt(LINES_TERMINATED_BY_ARG)
644 .create());
645 formatOpts.addOption(OptionBuilder.withArgName("char")
646 .hasArg()
647 .withDescription("Sets a field enclosing character")
648 .withLongOpt(OPTIONALLY_ENCLOSED_BY_ARG)
649 .create());
650 formatOpts.addOption(OptionBuilder.withArgName("char")
651 .hasArg()
652 .withDescription("Sets a required field enclosing character")
653 .withLongOpt(ENCLOSED_BY_ARG)
654 .create());
655 formatOpts.addOption(OptionBuilder.withArgName("char")
656 .hasArg()
657 .withDescription("Sets the escape character")
658 .withLongOpt(ESCAPED_BY_ARG)
659 .create());
660 formatOpts.addOption(OptionBuilder
661 .withDescription("Uses MySQL's default delimiter set: "
662 + "fields: , lines: \\n escaped-by: \\ optionally-enclosed-by: '")
663 .withLongOpt(MYSQL_DELIMITERS_ARG)
664 .create());
665
666 return formatOpts;
667 }
668
669 /**
670 * @return options governing input format delimiters.
671 */
672 protected RelatedOptions getInputFormatOptions() {
673 RelatedOptions inputFormatOpts =
674 new RelatedOptions("Input parsing arguments");
675 inputFormatOpts.addOption(OptionBuilder.withArgName("char")
676 .hasArg()
677 .withDescription("Sets the input field separator")
678 .withLongOpt(INPUT_FIELDS_TERMINATED_BY_ARG)
679 .create());
680 inputFormatOpts.addOption(OptionBuilder.withArgName("char")
681 .hasArg()
682 .withDescription("Sets the input end-of-line char")
683 .withLongOpt(INPUT_LINES_TERMINATED_BY_ARG)
684 .create());
685 inputFormatOpts.addOption(OptionBuilder.withArgName("char")
686 .hasArg()
687 .withDescription("Sets a field enclosing character")
688 .withLongOpt(INPUT_OPTIONALLY_ENCLOSED_BY_ARG)
689 .create());
690 inputFormatOpts.addOption(OptionBuilder.withArgName("char")
691 .hasArg()
692 .withDescription("Sets a required field encloser")
693 .withLongOpt(INPUT_ENCLOSED_BY_ARG)
694 .create());
695 inputFormatOpts.addOption(OptionBuilder.withArgName("char")
696 .hasArg()
697 .withDescription("Sets the input escape character")
698 .withLongOpt(INPUT_ESCAPED_BY_ARG)
699 .create());
700
701 return inputFormatOpts;
702 }
703
704 /**
705 * @param multiTable true if these options will be used for bulk code-gen.
706 * @return options related to code generation.
707 */
708 protected RelatedOptions getCodeGenOpts(boolean multiTable) {
709 RelatedOptions codeGenOpts =
710 new RelatedOptions("Code generation arguments");
711 codeGenOpts.addOption(OptionBuilder.withArgName("dir")
712 .hasArg()
713 .withDescription("Output directory for generated code")
714 .withLongOpt(CODE_OUT_DIR_ARG)
715 .create());
716 codeGenOpts.addOption(OptionBuilder.withArgName("dir")
717 .hasArg()
718 .withDescription("Output directory for compiled objects")
719 .withLongOpt(BIN_OUT_DIR_ARG)
720 .create());
721 codeGenOpts.addOption(OptionBuilder.withArgName("name")
722 .hasArg()
723 .withDescription("Put auto-generated classes in this package")
724 .withLongOpt(PACKAGE_NAME_ARG)
725 .create());
726 codeGenOpts.addOption(OptionBuilder.withArgName("null-str")
727 .hasArg()
728 .withDescription("Null string representation")
729 .withLongOpt(NULL_STRING)
730 .create());
731 codeGenOpts.addOption(OptionBuilder.withArgName("null-str")
732 .hasArg()
733 .withDescription("Input null string representation")
734 .withLongOpt(INPUT_NULL_STRING)
735 .create());
736 codeGenOpts.addOption(OptionBuilder.withArgName("null-str")
737 .hasArg()
738 .withDescription("Null non-string representation")
739 .withLongOpt(NULL_NON_STRING)
740 .create());
741 codeGenOpts.addOption(OptionBuilder.withArgName("null-str")
742 .hasArg()
743 .withDescription("Input null non-string representation")
744 .withLongOpt(INPUT_NULL_NON_STRING)
745 .create());
746 codeGenOpts.addOption(OptionBuilder
747 .hasArg()
748 .withDescription("Override mapping for specific columns to java types")
749 .withLongOpt(MAP_COLUMN_JAVA)
750 .create());
751
752 if (!multiTable) {
753 codeGenOpts.addOption(OptionBuilder.withArgName("name")
754 .hasArg()
755 .withDescription("Sets the generated class name. "
756 + "This overrides --" + PACKAGE_NAME_ARG + ". When combined "
757 + "with --" + JAR_FILE_NAME_ARG + ", sets the input class.")
758 .withLongOpt(CLASS_NAME_ARG)
759 .create());
760 }
761 return codeGenOpts;
762 }
763
764 protected RelatedOptions getHBaseOptions() {
765 RelatedOptions hbaseOpts =
766 new RelatedOptions("HBase arguments");
767 hbaseOpts.addOption(OptionBuilder.withArgName("table")
768 .hasArg()
769 .withDescription("Import to <table> in HBase")
770 .withLongOpt(HBASE_TABLE_ARG)
771 .create());
772 hbaseOpts.addOption(OptionBuilder.withArgName("family")
773 .hasArg()
774 .withDescription("Sets the target column family for the import")
775 .withLongOpt(HBASE_COL_FAM_ARG)
776 .create());
777 hbaseOpts.addOption(OptionBuilder.withArgName("col")
778 .hasArg()
779 .withDescription("Specifies which input column to use as the row key")
780 .withLongOpt(HBASE_ROW_KEY_ARG)
781 .create());
782 hbaseOpts.addOption(OptionBuilder
783 .withDescription("Enables HBase bulk loading")
784 .withLongOpt(HBASE_BULK_LOAD_ENABLED_ARG)
785 .create());
786 hbaseOpts.addOption(OptionBuilder
787 .withDescription("If specified, create missing HBase tables")
788 .withLongOpt(HBASE_CREATE_TABLE_ARG)
789 .create());
790
791 return hbaseOpts;
792 }
793
794 protected RelatedOptions getAccumuloOptions() {
795 RelatedOptions accumuloOpts =
796 new RelatedOptions("Accumulo arguments");
797 accumuloOpts.addOption(OptionBuilder.withArgName("table")
798 .hasArg()
799 .withDescription("Import to <table> in Accumulo")
800 .withLongOpt(ACCUMULO_TABLE_ARG)
801 .create());
802 accumuloOpts.addOption(OptionBuilder.withArgName("family")
803 .hasArg()
804 .withDescription("Sets the target column family for the import")
805 .withLongOpt(ACCUMULO_COL_FAM_ARG)
806 .create());
807 accumuloOpts.addOption(OptionBuilder.withArgName("col")
808 .hasArg()
809 .withDescription("Specifies which input column to use as the row key")
810 .withLongOpt(ACCUMULO_ROW_KEY_ARG)
811 .create());
812 accumuloOpts.addOption(OptionBuilder.withArgName("vis")
813 .hasArg()
814 .withDescription("Visibility token to be applied to all rows imported")
815 .withLongOpt(ACCUMULO_VISIBILITY_ARG)
816 .create());
817 accumuloOpts.addOption(OptionBuilder
818 .withDescription("If specified, create missing Accumulo tables")
819 .withLongOpt(ACCUMULO_CREATE_TABLE_ARG)
820 .create());
821 accumuloOpts.addOption(OptionBuilder.withArgName("size")
822 .hasArg()
823 .withDescription("Batch size in bytes")
824 .withLongOpt(ACCUMULO_BATCH_SIZE_ARG)
825 .create());
826 accumuloOpts.addOption(OptionBuilder.withArgName("latency")
827 .hasArg()
828 .withDescription("Max write latency in milliseconds")
829 .withLongOpt(ACCUMULO_MAX_LATENCY_ARG)
830 .create());
831 accumuloOpts.addOption(OptionBuilder.withArgName("zookeepers")
832 .hasArg()
833 .withDescription("Comma-separated list of zookeepers (host:port)")
834 .withLongOpt(ACCUMULO_ZOOKEEPERS_ARG)
835 .create());
836 accumuloOpts.addOption(OptionBuilder.withArgName("instance")
837 .hasArg()
838 .withDescription("Accumulo instance name.")
839 .withLongOpt(ACCUMULO_INSTANCE_ARG)
840 .create());
841 accumuloOpts.addOption(OptionBuilder.withArgName("user")
842 .hasArg()
843 .withDescription("Accumulo user name.")
844 .withLongOpt(ACCUMULO_USER_ARG)
845 .create());
846 accumuloOpts.addOption(OptionBuilder.withArgName("password")
847 .hasArg()
848 .withDescription("Accumulo password.")
849 .withLongOpt(ACCUMULO_PASSWORD_ARG)
850 .create());
851
852 return accumuloOpts;
853 }
854
855 protected void applyAccumuloOptions(CommandLine in, SqoopOptions out) {
856 if (in.hasOption(ACCUMULO_TABLE_ARG)) {
857 out.setAccumuloTable(in.getOptionValue(ACCUMULO_TABLE_ARG));
858 }
859
860 if (in.hasOption(ACCUMULO_COL_FAM_ARG)) {
861 out.setAccumuloColFamily(in.getOptionValue(ACCUMULO_COL_FAM_ARG));
862 }
863
864 if (in.hasOption(ACCUMULO_ROW_KEY_ARG)) {
865 out.setAccumuloRowKeyColumn(in.getOptionValue(ACCUMULO_ROW_KEY_ARG));
866 }
867
868 if (in.hasOption(ACCUMULO_VISIBILITY_ARG)) {
869 out.setAccumuloVisibility(in.getOptionValue(ACCUMULO_VISIBILITY_ARG));
870 }
871
872 if (in.hasOption(ACCUMULO_CREATE_TABLE_ARG)) {
873 out.setCreateAccumuloTable(true);
874 }
875
876 if (in.hasOption(ACCUMULO_BATCH_SIZE_ARG)) {
877 out.setAccumuloBatchSize(Long.parseLong(
878 in.getOptionValue(ACCUMULO_BATCH_SIZE_ARG)));
879 }
880
881 if (in.hasOption(ACCUMULO_MAX_LATENCY_ARG)) {
882 out.setAccumuloMaxLatency(Long.parseLong(
883 in.getOptionValue(ACCUMULO_MAX_LATENCY_ARG)));
884 }
885
886 if (in.hasOption(ACCUMULO_ZOOKEEPERS_ARG)) {
887 out.setAccumuloZookeepers(in.getOptionValue(ACCUMULO_ZOOKEEPERS_ARG));
888 }
889
890 if (in.hasOption(ACCUMULO_INSTANCE_ARG)) {
891 out.setAccumuloInstance(in.getOptionValue(ACCUMULO_INSTANCE_ARG));
892 }
893
894 if (in.hasOption(ACCUMULO_USER_ARG)) {
895 out.setAccumuloUser(in.getOptionValue(ACCUMULO_USER_ARG));
896 }
897
898 if (in.hasOption(ACCUMULO_PASSWORD_ARG)) {
899 out.setAccumuloPassword(in.getOptionValue(ACCUMULO_PASSWORD_ARG));
900 }
901 }
902
903
904 @SuppressWarnings("static-access")
905 protected void addValidationOpts(RelatedOptions validationOptions) {
906 validationOptions.addOption(OptionBuilder
907 .withDescription("Validate the copy using the configured validator")
908 .withLongOpt(VALIDATE_ARG)
909 .create());
910 validationOptions.addOption(OptionBuilder
911 .withArgName(VALIDATOR_CLASS_ARG).hasArg()
912 .withDescription("Fully qualified class name for the Validator")
913 .withLongOpt(VALIDATOR_CLASS_ARG)
914 .create());
915 validationOptions.addOption(OptionBuilder
916 .withArgName(VALIDATION_THRESHOLD_CLASS_ARG).hasArg()
917 .withDescription("Fully qualified class name for ValidationThreshold")
918 .withLongOpt(VALIDATION_THRESHOLD_CLASS_ARG)
919 .create());
920 validationOptions.addOption(OptionBuilder
921 .withArgName(VALIDATION_FAILURE_HANDLER_CLASS_ARG).hasArg()
922 .withDescription("Fully qualified class name for "
923 + "ValidationFailureHandler")
924 .withLongOpt(VALIDATION_FAILURE_HANDLER_CLASS_ARG)
925 .create());
926 }
927
928 /**
929 * Apply common command-line to the state.
930 */
931 protected void applyCommonOptions(CommandLine in, SqoopOptions out)
932 throws InvalidOptionsException {
933
934 // common options.
935 if (in.hasOption(VERBOSE_ARG)) {
936 // Immediately switch into DEBUG logging.
937 out.setVerbose(true);
938 LoggingUtils.setDebugLevel();
939 LOG.debug("Enabled debug logging.");
940 }
941
942 if (in.hasOption(HELP_ARG)) {
943 ToolOptions toolOpts = new ToolOptions();
944 configureOptions(toolOpts);
945 printHelp(toolOpts);
946 throw new InvalidOptionsException("");
947 }
948
949 if (in.hasOption(TEMP_ROOTDIR_ARG)) {
950 out.setTempRootDir(in.getOptionValue(TEMP_ROOTDIR_ARG));
951 }
952
953 if (in.hasOption(CONNECT_STRING_ARG)) {
954 out.setConnectString(in.getOptionValue(CONNECT_STRING_ARG));
955 }
956
957 if (in.hasOption(CONN_MANAGER_CLASS_NAME)) {
958 out.setConnManagerClassName(in.getOptionValue(CONN_MANAGER_CLASS_NAME));
959 }
960
961 if (in.hasOption(CONNECT_PARAM_FILE)) {
962 File paramFile = new File(in.getOptionValue(CONNECT_PARAM_FILE));
963 if (!paramFile.exists()) {
964 throw new InvalidOptionsException(
965 "Specified connection parameter file not found: " + paramFile);
966 }
967 InputStream inStream = null;
968 Properties connectionParams = new Properties();
969 try {
970 inStream = new FileInputStream(
971 new File(in.getOptionValue(CONNECT_PARAM_FILE)));
972 connectionParams.load(inStream);
973 } catch (IOException ex) {
974 LOG.warn("Failed to load connection parameter file", ex);
975 throw new InvalidOptionsException(
976 "Error while loading connection parameter file: "
977 + ex.getMessage());
978 } finally {
979 if (inStream != null) {
980 try {
981 inStream.close();
982 } catch (IOException ex) {
983 LOG.warn("Failed to close input stream", ex);
984 }
985 }
986 }
987 LOG.debug("Loaded connection parameters: " + connectionParams);
988 out.setConnectionParams(connectionParams);
989 }
990
991 if (in.hasOption(NULL_STRING)) {
992 out.setNullStringValue(in.getOptionValue(NULL_STRING));
993 }
994
995 if (in.hasOption(INPUT_NULL_STRING)) {
996 out.setInNullStringValue(in.getOptionValue(INPUT_NULL_STRING));
997 }
998
999 if (in.hasOption(NULL_NON_STRING)) {
1000 out.setNullNonStringValue(in.getOptionValue(NULL_NON_STRING));
1001 }
1002
1003 if (in.hasOption(INPUT_NULL_NON_STRING)) {
1004 out.setInNullNonStringValue(in.getOptionValue(INPUT_NULL_NON_STRING));
1005 }
1006
1007 if (in.hasOption(DRIVER_ARG)) {
1008 out.setDriverClassName(in.getOptionValue(DRIVER_ARG));
1009 }
1010
1011 if (in.hasOption(SKIP_DISTCACHE_ARG)) {
1012 LOG.debug("Disabling dist cache");
1013 out.setSkipDistCache(true);
1014 }
1015
1016 applyCredentialsOptions(in, out);
1017
1018
1019 if (in.hasOption(HADOOP_MAPRED_HOME_ARG)) {
1020 out.setHadoopMapRedHome(in.getOptionValue(HADOOP_MAPRED_HOME_ARG));
1021 // Only consider HADOOP_HOME if HADOOP_MAPRED_HOME is not set
1022 } else if (in.hasOption(HADOOP_HOME_ARG)) {
1023 out.setHadoopMapRedHome(in.getOptionValue(HADOOP_HOME_ARG));
1024 }
1025 if (in.hasOption(RELAXED_ISOLATION)) {
1026 out.setRelaxedIsolation(true);
1027 }
1028 }
1029
1030 private void applyCredentialsOptions(CommandLine in, SqoopOptions out)
1031 throws InvalidOptionsException {
1032 if (in.hasOption(USERNAME_ARG)) {
1033 out.setUsername(in.getOptionValue(USERNAME_ARG));
1034 if (null == out.getPassword()) {
1035 // Set password to empty if the username is set first,
1036 // to ensure that they're either both null or neither is.
1037 out.setPassword("");
1038 }
1039 }
1040
1041 if (in.hasOption(PASSWORD_ARG)) {
1042 LOG.warn("Setting your password on the command-line is insecure. "
1043 + "Consider using -" + PASSWORD_PROMPT_ARG + " instead.");
1044 out.setPassword(in.getOptionValue(PASSWORD_ARG));
1045 }
1046
1047 if (in.hasOption(PASSWORD_PROMPT_ARG)) {
1048 out.setPasswordFromConsole();
1049 }
1050
1051 if (in.hasOption(PASSWORD_PATH_ARG)) {
1052 if (in.hasOption(PASSWORD_ARG) || in.hasOption(PASSWORD_PROMPT_ARG)
1053 || in.hasOption(PASSWORD_ALIAS_ARG)) {
1054 throw new InvalidOptionsException("Only one of password, password "
1055 + "alias or path to a password file must be specified.");
1056 }
1057
1058 try {
1059 out.setPasswordFilePath(in.getOptionValue(PASSWORD_PATH_ARG));
1060 // apply password from file into password in options
1061 out.setPassword(CredentialsUtil.fetchPassword(out));
1062 // And allow the PasswordLoader to clean up any sensitive properties
1063 CredentialsUtil.cleanUpSensitiveProperties(out.getConf());
1064 } catch (IOException ex) {
1065 LOG.warn("Failed to load password file", ex);
1066 throw (InvalidOptionsException)
1067 new InvalidOptionsException("Error while loading password file: "
1068 + ex.getMessage()).initCause(ex);
1069 }
1070 }
1071 if (in.hasOption(PASSWORD_ALIAS_ARG)) {
1072 if (in.hasOption(PASSWORD_ARG) || in.hasOption(PASSWORD_PROMPT_ARG)
1073 || in.hasOption(PASSWORD_PATH_ARG)) {
1074 throw new InvalidOptionsException("Only one of password, password "
1075 + "alias or path to a password file must be specified.");
1076 }
1077 out.setPasswordAlias(in.getOptionValue(PASSWORD_ALIAS_ARG));
1078 if (!CredentialProviderHelper.isProviderAvailable()) {
1079 throw new InvalidOptionsException(
1080 "CredentialProvider facility not available in the hadoop "
1081 + " environment used");
1082 }
1083 try {
1084 out.setPassword(CredentialProviderHelper
1085 .resolveAlias(out.getConf(), in.getOptionValue(PASSWORD_ALIAS_ARG)));
1086 } catch (IOException ioe) {
1087 throw (InvalidOptionsException)
1088 new InvalidOptionsException("Unable to process alias")
1089 .initCause(ioe);
1090 }
1091 }
1092 }
1093
1094 protected void applyHiveOptions(CommandLine in, SqoopOptions out)
1095 throws InvalidOptionsException {
1096
1097 if (in.hasOption(HIVE_HOME_ARG)) {
1098 out.setHiveHome(in.getOptionValue(HIVE_HOME_ARG));
1099 }
1100
1101 if (in.hasOption(HIVE_IMPORT_ARG)) {
1102 out.setHiveImport(true);
1103 }
1104
1105 if (in.hasOption(HIVE_OVERWRITE_ARG)) {
1106 out.setOverwriteHiveTable(true);
1107 }
1108
1109 if (in.hasOption(CREATE_HIVE_TABLE_ARG)) {
1110 out.setFailIfHiveTableExists(true);
1111 }
1112
1113 if (in.hasOption(HIVE_TABLE_ARG)) {
1114 out.setHiveTableName(in.getOptionValue(HIVE_TABLE_ARG));
1115 }
1116
1117 if (in.hasOption(HIVE_DATABASE_ARG)) {
1118 out.setHiveDatabaseName(in.getOptionValue(HIVE_DATABASE_ARG));
1119 }
1120
1121 if (in.hasOption(HIVE_DROP_DELIMS_ARG)) {
1122 out.setHiveDropDelims(true);
1123 }
1124
1125 if (in.hasOption(HIVE_DELIMS_REPLACEMENT_ARG)) {
1126 out.setHiveDelimsReplacement(
1127 in.getOptionValue(HIVE_DELIMS_REPLACEMENT_ARG));
1128 }
1129
1130 if (in.hasOption(HIVE_PARTITION_KEY_ARG)) {
1131 out.setHivePartitionKey(in.getOptionValue(HIVE_PARTITION_KEY_ARG));
1132 }
1133
1134 if (in.hasOption(HIVE_PARTITION_VALUE_ARG)) {
1135 out.setHivePartitionValue(in.getOptionValue(HIVE_PARTITION_VALUE_ARG));
1136 }
1137
1138 if (in.hasOption(MAP_COLUMN_HIVE)) {
1139 out.setMapColumnHive(in.getOptionValue(MAP_COLUMN_HIVE));
1140 }
1141 }
1142
1143 protected void applyHCatalogOptions(CommandLine in, SqoopOptions out) {
1144 if (in.hasOption(HCATALOG_TABLE_ARG)) {
1145 out.setHCatTableName(in.getOptionValue(HCATALOG_TABLE_ARG));
1146 }
1147
1148 if (in.hasOption(HCATALOG_DATABASE_ARG)) {
1149 out.setHCatDatabaseName(in.getOptionValue(HCATALOG_DATABASE_ARG));
1150 }
1151
1152 if (in.hasOption(HCATALOG_STORAGE_STANZA_ARG)) {
1153 out.setHCatStorageStanza(in.getOptionValue(HCATALOG_STORAGE_STANZA_ARG));
1154 }
1155
1156 if (in.hasOption(CREATE_HCATALOG_TABLE_ARG)) {
1157 out.setCreateHCatalogTable(true);
1158 }
1159
1160 if (in.hasOption(DROP_AND_CREATE_HCATALOG_TABLE)) {
1161 out.setDropAndCreateHCatalogTable(true);
1162 }
1163
1164 if (in.hasOption(HCATALOG_HOME_ARG)) {
1165 out.setHCatHome(in.getOptionValue(HCATALOG_HOME_ARG));
1166 }
1167
1168 // Allow some of the hive options also
1169
1170 if (in.hasOption(HIVE_HOME_ARG)) {
1171 out.setHiveHome(in.getOptionValue(HIVE_HOME_ARG));
1172 }
1173
1174 if (in.hasOption(HCATCALOG_PARTITION_KEYS_ARG)) {
1175 out.setHCatalogPartitionKeys(
1176 in.getOptionValue(HCATCALOG_PARTITION_KEYS_ARG));
1177 }
1178
1179 if (in.hasOption(HCATALOG_PARTITION_VALUES_ARG)) {
1180 out.setHCatalogPartitionValues(
1181 in.getOptionValue(HCATALOG_PARTITION_VALUES_ARG));
1182 }
1183
1184 if (in.hasOption(HIVE_PARTITION_KEY_ARG)) {
1185 out.setHivePartitionKey(in.getOptionValue(HIVE_PARTITION_KEY_ARG));
1186 }
1187
1188 if (in.hasOption(HIVE_PARTITION_VALUE_ARG)) {
1189 out.setHivePartitionValue(in.getOptionValue(HIVE_PARTITION_VALUE_ARG));
1190 }
1191
1192 if (in.hasOption(MAP_COLUMN_HIVE)) {
1193 out.setMapColumnHive(in.getOptionValue(MAP_COLUMN_HIVE));
1194 }
1195
1196 }
1197
1198
1199 protected void applyOutputFormatOptions(CommandLine in, SqoopOptions out)
1200 throws InvalidOptionsException {
1201 if (in.hasOption(FIELDS_TERMINATED_BY_ARG)) {
1202 out.setFieldsTerminatedBy(SqoopOptions.toChar(
1203 in.getOptionValue(FIELDS_TERMINATED_BY_ARG)));
1204 out.setExplicitOutputDelims(true);
1205 }
1206
1207 if (in.hasOption(LINES_TERMINATED_BY_ARG)) {
1208 out.setLinesTerminatedBy(SqoopOptions.toChar(
1209 in.getOptionValue(LINES_TERMINATED_BY_ARG)));
1210 out.setExplicitOutputDelims(true);
1211 }
1212
1213 if (in.hasOption(OPTIONALLY_ENCLOSED_BY_ARG)) {
1214 out.setEnclosedBy(SqoopOptions.toChar(
1215 in.getOptionValue(OPTIONALLY_ENCLOSED_BY_ARG)));
1216 out.setOutputEncloseRequired(false);
1217 out.setExplicitOutputDelims(true);
1218 }
1219
1220 if (in.hasOption(ENCLOSED_BY_ARG)) {
1221 out.setEnclosedBy(SqoopOptions.toChar(
1222 in.getOptionValue(ENCLOSED_BY_ARG)));
1223 out.setOutputEncloseRequired(true);
1224 out.setExplicitOutputDelims(true);
1225 }
1226
1227 if (in.hasOption(ESCAPED_BY_ARG)) {
1228 out.setEscapedBy(SqoopOptions.toChar(
1229 in.getOptionValue(ESCAPED_BY_ARG)));
1230 out.setExplicitOutputDelims(true);
1231 }
1232
1233 if (in.hasOption(MYSQL_DELIMITERS_ARG)) {
1234 out.setOutputEncloseRequired(false);
1235 out.setFieldsTerminatedBy(',');
1236 out.setLinesTerminatedBy('\n');
1237 out.setEscapedBy('\\');
1238 out.setEnclosedBy('\'');
1239 out.setExplicitOutputDelims(true);
1240 }
1241 }
1242
1243 protected void applyInputFormatOptions(CommandLine in, SqoopOptions out)
1244 throws InvalidOptionsException {
1245 if (in.hasOption(INPUT_FIELDS_TERMINATED_BY_ARG)) {
1246 out.setInputFieldsTerminatedBy(SqoopOptions.toChar(
1247 in.getOptionValue(INPUT_FIELDS_TERMINATED_BY_ARG)));
1248 out.setExplicitInputDelims(true);
1249 }
1250
1251 if (in.hasOption(INPUT_LINES_TERMINATED_BY_ARG)) {
1252 out.setInputLinesTerminatedBy(SqoopOptions.toChar(
1253 in.getOptionValue(INPUT_LINES_TERMINATED_BY_ARG)));
1254 out.setExplicitInputDelims(true);
1255 }
1256
1257 if (in.hasOption(INPUT_OPTIONALLY_ENCLOSED_BY_ARG)) {
1258 out.setInputEnclosedBy(SqoopOptions.toChar(
1259 in.getOptionValue(INPUT_OPTIONALLY_ENCLOSED_BY_ARG)));
1260 out.setInputEncloseRequired(false);
1261 out.setExplicitInputDelims(true);
1262 }
1263
1264 if (in.hasOption(INPUT_ENCLOSED_BY_ARG)) {
1265 out.setInputEnclosedBy(SqoopOptions.toChar(
1266 in.getOptionValue(INPUT_ENCLOSED_BY_ARG)));
1267 out.setInputEncloseRequired(true);
1268 out.setExplicitInputDelims(true);
1269 }
1270
1271 if (in.hasOption(INPUT_ESCAPED_BY_ARG)) {
1272 out.setInputEscapedBy(SqoopOptions.toChar(
1273 in.getOptionValue(INPUT_ESCAPED_BY_ARG)));
1274 out.setExplicitInputDelims(true);
1275 }
1276 }
1277
1278 protected void applyCodeGenOptions(CommandLine in, SqoopOptions out,
1279 boolean multiTable) throws InvalidOptionsException {
1280 if (in.hasOption(CODE_OUT_DIR_ARG)) {
1281 out.setCodeOutputDir(in.getOptionValue(CODE_OUT_DIR_ARG));
1282 }
1283
1284 if (in.hasOption(BIN_OUT_DIR_ARG)) {
1285 out.setJarOutputDir(in.getOptionValue(BIN_OUT_DIR_ARG));
1286 }
1287
1288 if (in.hasOption(PACKAGE_NAME_ARG)) {
1289 out.setPackageName(in.getOptionValue(PACKAGE_NAME_ARG));
1290 }
1291
1292 if (in.hasOption(MAP_COLUMN_JAVA)) {
1293 out.setMapColumnJava(in.getOptionValue(MAP_COLUMN_JAVA));
1294 }
1295
1296 if (!multiTable && in.hasOption(CLASS_NAME_ARG)) {
1297 out.setClassName(in.getOptionValue(CLASS_NAME_ARG));
1298 }
1299 }
1300
1301 protected void applyHBaseOptions(CommandLine in, SqoopOptions out) {
1302 if (in.hasOption(HBASE_TABLE_ARG)) {
1303 out.setHBaseTable(in.getOptionValue(HBASE_TABLE_ARG));
1304 }
1305
1306 if (in.hasOption(HBASE_COL_FAM_ARG)) {
1307 out.setHBaseColFamily(in.getOptionValue(HBASE_COL_FAM_ARG));
1308 }
1309
1310 if (in.hasOption(HBASE_ROW_KEY_ARG)) {
1311 out.setHBaseRowKeyColumn(in.getOptionValue(HBASE_ROW_KEY_ARG));
1312 }
1313
1314 out.setHBaseBulkLoadEnabled(in.hasOption(HBASE_BULK_LOAD_ENABLED_ARG));
1315
1316 if (in.hasOption(HBASE_CREATE_TABLE_ARG)) {
1317 out.setCreateHBaseTable(true);
1318 }
1319 }
1320
1321 protected void applyValidationOptions(CommandLine in, SqoopOptions out)
1322 throws InvalidOptionsException {
1323 if (in.hasOption(VALIDATE_ARG)) {
1324 out.setValidationEnabled(true);
1325 }
1326
1327 // Class Names are converted to Class in light of failing early
1328 if (in.hasOption(VALIDATOR_CLASS_ARG)) {
1329 out.setValidatorClass(
1330 getClassByName(in.getOptionValue(VALIDATOR_CLASS_ARG)));
1331 }
1332
1333 if (in.hasOption(VALIDATION_THRESHOLD_CLASS_ARG)) {
1334 out.setValidationThresholdClass(
1335 getClassByName(in.getOptionValue(VALIDATION_THRESHOLD_CLASS_ARG)));
1336 }
1337
1338 if (in.hasOption(VALIDATION_FAILURE_HANDLER_CLASS_ARG)) {
1339 out.setValidationFailureHandlerClass(getClassByName(
1340 in.getOptionValue(VALIDATION_FAILURE_HANDLER_CLASS_ARG)));
1341 }
1342 }
1343
1344 protected Class<?> getClassByName(String className)
1345 throws InvalidOptionsException {
1346 try {
1347 return Class.forName(className, true,
1348 Thread.currentThread().getContextClassLoader());
1349 } catch (ClassNotFoundException e) {
1350 throw new InvalidOptionsException(e.getMessage());
1351 }
1352 }
1353
1354 protected void validateCommonOptions(SqoopOptions options)
1355 throws InvalidOptionsException {
1356 if (options.getConnectString() == null) {
1357 throw new InvalidOptionsException(
1358 "Error: Required argument --connect is missing."
1359 + HELP_STR);
1360 }
1361 }
1362
1363 protected void validateCodeGenOptions(SqoopOptions options)
1364 throws InvalidOptionsException {
1365 if (options.getClassName() != null && options.getPackageName() != null) {
1366 throw new InvalidOptionsException(
1367 "--class-name overrides --package-name. You cannot use both."
1368 + HELP_STR);
1369 }
1370 }
1371
1372 protected void validateOutputFormatOptions(SqoopOptions options)
1373 throws InvalidOptionsException {
1374 if (options.doHiveImport()) {
1375 if (!options.explicitOutputDelims()) {
1376 // user hasn't manually specified delimiters, and wants to import
1377 // straight to Hive. Use Hive-style delimiters.
1378 LOG.info("Using Hive-specific delimiters for output. You can override");
1379 LOG.info("delimiters with --fields-terminated-by, etc.");
1380 options.setOutputDelimiters(DelimiterSet.HIVE_DELIMITERS);
1381 }
1382
1383 if (options.getOutputEscapedBy() != DelimiterSet.NULL_CHAR) {
1384 LOG.warn("Hive does not support escape characters in fields;");
1385 LOG.warn("parse errors in Hive may result from using --escaped-by.");
1386 }
1387
1388 if (options.getOutputEnclosedBy() != DelimiterSet.NULL_CHAR) {
1389 LOG.warn("Hive does not support quoted strings; parse errors");
1390 LOG.warn("in Hive may result from using --enclosed-by.");
1391 }
1392 }
1393 }
1394
1395 protected void validateHiveOptions(SqoopOptions options)
1396 throws InvalidOptionsException {
1397 if (options.getHiveDelimsReplacement() != null
1398 && options.doHiveDropDelims()) {
1399 throw new InvalidOptionsException("The " + HIVE_DROP_DELIMS_ARG
1400 + " option conflicts with the " + HIVE_DELIMS_REPLACEMENT_ARG
1401 + " option." + HELP_STR);
1402 }
1403
1404 // Make sure that one of hCatalog or hive jobs are used
1405 String hCatTable = options.getHCatTableName();
1406 if (hCatTable != null && options.doHiveImport()) {
1407 throw new InvalidOptionsException("The " + HCATALOG_TABLE_ARG
1408 + " option conflicts with the " + HIVE_IMPORT_ARG
1409 + " option." + HELP_STR);
1410 }
1411
1412 if (options.doHiveImport()
1413 && options.getFileLayout() == SqoopOptions.FileLayout.AvroDataFile) {
1414 throw new InvalidOptionsException("Hive import is not compatible with "
1415 + "importing into AVRO format.");
1416 }
1417
1418 if (options.doHiveImport()
1419 && options.getFileLayout() == SqoopOptions.FileLayout.SequenceFile) {
1420 throw new InvalidOptionsException("Hive import is not compatible with "
1421 + "importing into SequenceFile format.");
1422 }
1423
1424 if (options.doHiveImport()
1425 && options.isAppendMode()
1426 && !options.getIncrementalMode().equals(IncrementalMode.AppendRows)) {
1427 throw new InvalidOptionsException("Append mode for hive imports is not "
1428 + " yet supported. Please remove the parameter --append-mode");
1429 }
1430
1431 // Many users are reporting issues when they are trying to import data
1432 // directly into hive warehouse. This should prevent users from doing
1433 // so in case of a default location.
1434 String defaultHiveWarehouse = "/user/hive/warehouse";
1435 if (options.doHiveImport()
1436 && ((
1437 options.getWarehouseDir() != null
1438 && options.getWarehouseDir().startsWith(defaultHiveWarehouse)
1439 ) || (
1440 options.getTargetDir() != null
1441 && options.getTargetDir().startsWith(defaultHiveWarehouse)
1442 ))) {
1443 LOG.warn("It seems that you're doing hive import directly into default");
1444 LOG.warn("hive warehouse directory which is not supported. Sqoop is");
1445 LOG.warn("firstly importing data into separate directory and then");
1446 LOG.warn("inserting data into hive. Please consider removing");
1447 LOG.warn("--target-dir or --warehouse-dir into /user/hive/warehouse in");
1448 LOG.warn("case that you will detect any issues.");
1449 }
1450
1451 // Warn about using hive specific arguments without hive import itself
1452 // In HCatalog support some of the Hive options are reused
1453 if (!options.doHiveImport()
1454 && (((options.getHiveHome() != null
1455 && !options.getHiveHome().
1456 equals(SqoopOptions.getHiveHomeDefault())
1457 && hCatTable == null))
1458 || options.doOverwriteHiveTable()
1459 || options.doFailIfHiveTableExists()
1460 || (options.getHiveTableName() != null
1461 && !options.getHiveTableName().equals(options.getTableName()))
1462 || (options.getHivePartitionKey() != null && hCatTable == null)
1463 || (options.getHivePartitionValue() != null && hCatTable == null)
1464 || (options.getMapColumnHive().size() > 0 && hCatTable == null))) {
1465 LOG.warn("It seems that you've specified at least one of following:");
1466 LOG.warn("\t--hive-home");
1467 LOG.warn("\t--hive-overwrite");
1468 LOG.warn("\t--create-hive-table");
1469 LOG.warn("\t--hive-table");
1470 LOG.warn("\t--hive-partition-key");
1471 LOG.warn("\t--hive-partition-value");
1472 LOG.warn("\t--map-column-hive");
1473 LOG.warn("Without specifying parameter --hive-import. Please note that");
1474 LOG.warn("those arguments will not be used in this session. Either");
1475 LOG.warn("specify --hive-import to apply them correctly or remove them");
1476 LOG.warn("from command line to remove this warning.");
1477 LOG.info("Please note that --hive-home, --hive-partition-key, ");
1478 LOG.info("\t hive-partition-value and --map-column-hive options are ");
1479 LOG.info("\t are also valid for HCatalog imports and exports");
1480 }
1481 }
1482
1483 protected void validateAccumuloOptions(SqoopOptions options)
1484 throws InvalidOptionsException {
1485 if ((options.getAccumuloColFamily() != null
1486 && options.getAccumuloTable() == null)
1487 || (options.getAccumuloColFamily() == null
1488 && options.getAccumuloTable() != null)) {
1489 throw new InvalidOptionsException(
1490 "Both --accumulo-table and --accumulo-column-family must be set."
1491 + HELP_STR);
1492 }
1493
1494 if (options.getAccumuloTable() != null
1495 && options.getHBaseTable() != null) {
1496 throw new InvalidOptionsException("HBase import is incompatible with "
1497 + "Accumulo import.");
1498 }
1499 if (options.getAccumuloTable() != null
1500 && options.getFileLayout() != SqoopOptions.FileLayout.TextFile) {
1501 throw new InvalidOptionsException("Accumulo import is not compatible "
1502 + "with importing into file format.");
1503 }
1504 if (options.getAccumuloTable() != null
1505 && options.getHBaseColFamily() != null) {
1506 throw new InvalidOptionsException("Use --accumulo-column-family with "
1507 + "Accumulo import.");
1508 }
1509 if (options.getAccumuloTable() != null
1510 && options.getAccumuloUser() == null) {
1511 throw
1512 new InvalidOptionsException("Must specify Accumulo user.");
1513 }
1514 if (options.getAccumuloTable() != null
1515 && options.getAccumuloInstance() == null) {
1516 throw new
1517 InvalidOptionsException("Must specify Accumulo instance.");
1518 }
1519 if (options.getAccumuloTable() != null
1520 && options.getAccumuloZookeepers() == null) {
1521 throw new
1522 InvalidOptionsException("Must specify Zookeeper server(s).");
1523 }
1524 }
1525
1526 protected void validateHCatalogOptions(SqoopOptions options)
1527 throws InvalidOptionsException {
1528 // Make sure that one of hCatalog or hive jobs are used
1529 String hCatTable = options.getHCatTableName();
1530 if (hCatTable == null) {
1531 if (options.getHCatHome() != null && !options.getHCatHome().
1532 equals(SqoopOptions.getHCatHomeDefault())) {
1533 LOG.warn("--hcatalog-home option will be ignored in "
1534 + "non-HCatalog jobs");
1535 }
1536 if (options.getHCatDatabaseName() != null) {
1537 LOG.warn("--hcatalog-database option will be ignored "
1538 + "without --hcatalog-table");
1539 }
1540
1541 if (options.getHCatStorageStanza() != null) {
1542 LOG.warn("--hcatalog-storage-stanza option will be ignored "
1543 + "without --hatalog-table");
1544 }
1545 return;
1546 }
1547
1548 if (options.explicitInputDelims()) {
1549 LOG.warn("Input field/record delimiter options are not "
1550 + "used in HCatalog jobs unless the format is text. It is better "
1551 + "to use --hive-import in those cases. For text formats");
1552 }
1553 if (options.explicitOutputDelims()
1554 || options.getHiveDelimsReplacement() != null
1555 || options.doHiveDropDelims()) {
1556 LOG.warn("Output field/record delimiter options are not useful"
1557 + " in HCatalog jobs for most of the output types except text based "
1558 + " formats is text. It is better "
1559 + "to use --hive-import in those cases. For non text formats, ");
1560 }
1561 if (options.doHiveImport()) {
1562 throw new InvalidOptionsException("The " + HCATALOG_TABLE_ARG
1563 + " option conflicts with the " + HIVE_IMPORT_ARG
1564 + " option." + HELP_STR);
1565 }
1566 if (options.getTargetDir() != null) {
1567 throw new InvalidOptionsException("The " + TARGET_DIR_ARG
1568 + " option conflicts with the " + HCATALOG_TABLE_ARG
1569 + " option." + HELP_STR);
1570 }
1571 if (options.getWarehouseDir() != null) {
1572 throw new InvalidOptionsException("The " + WAREHOUSE_DIR_ARG
1573 + " option conflicts with the " + HCATALOG_TABLE_ARG
1574 + " option." + HELP_STR);
1575 }
1576
1577 if (options.isAppendMode()) {
1578 throw new InvalidOptionsException("Append mode for imports is not "
1579 + " compatible with HCatalog. Please remove the parameter"
1580 + "--append-mode");
1581 }
1582 if (options.getExportDir() != null) {
1583 throw new InvalidOptionsException("The " + EXPORT_PATH_ARG
1584 + " option conflicts with the " + HCATALOG_TABLE_ARG
1585 + " option." + HELP_STR);
1586 }
1587
1588 if (options.getFileLayout() == SqoopOptions.FileLayout.AvroDataFile) {
1589 throw new InvalidOptionsException("HCatalog job is not compatible with "
1590 + " AVRO format option " + FMT_AVRODATAFILE_ARG
1591 + " option." + HELP_STR);
1592
1593 }
1594
1595 if (options.getFileLayout() == SqoopOptions.FileLayout.SequenceFile) {
1596 throw new InvalidOptionsException("HCatalog job is not compatible with "
1597 + "SequenceFile format option " + FMT_SEQUENCEFILE_ARG
1598 + " option." + HELP_STR);
1599 }
1600
1601 if (options.getHCatalogPartitionKeys() != null
1602 && options.getHCatalogPartitionValues() == null) {
1603 throw new InvalidOptionsException("Either both --hcatalog-partition-keys"
1604 + " and --hcatalog-partition-values should be provided or both of these"
1605 + " options should be omitted.");
1606 }
1607
1608 if (options.getHCatalogPartitionKeys() != null) {
1609 if (options.getHivePartitionKey() != null) {
1610 LOG.warn("Both --hcatalog-partition-keys and --hive-partition-key"
1611 + "options are provided. --hive-partition-key option will be"
1612 + "ignored");
1613 }
1614
1615 String[] keys = options.getHCatalogPartitionKeys().split(",");
1616 String[] vals = options.getHCatalogPartitionValues().split(",");
1617
1618 if (keys.length != vals.length) {
1619 throw new InvalidOptionsException("Number of static partition keys "
1620 + "provided dpes match the number of partition values");
1621 }
1622
1623 for (int i = 0; i < keys.length; ++i) {
1624 String k = keys[i].trim();
1625 if (k.isEmpty()) {
1626 throw new InvalidOptionsException(
1627 "Invalid HCatalog static partition key at position " + i);
1628 }
1629 }
1630 for (int i = 0; i < vals.length; ++i) {
1631 String v = vals[i].trim();
1632 if (v.isEmpty()) {
1633 throw new InvalidOptionsException(
1634 "Invalid HCatalog static partition key at position " + v);
1635 }
1636 }
1637 } else {
1638 if (options.getHivePartitionKey() != null
1639 && options.getHivePartitionValue() == null) {
1640 throw new InvalidOptionsException("Either both --hive-partition-key and"
1641 + " --hive-partition-value options should be provided or both of "
1642 + "these options should be omitted");
1643 }
1644 }
1645 if (options.doCreateHCatalogTable() &&
1646 options.doDropAndCreateHCatalogTable()) {
1647 throw new InvalidOptionsException("Options --create-hcatalog-table" +
1648 " and --drop-and-create-hcatalog-table are mutually exclusive." +
1649 " Use any one of them");
1650 }
1651 }
1652
1653 protected void validateHBaseOptions(SqoopOptions options)
1654 throws InvalidOptionsException {
1655 if ((options.getHBaseColFamily() != null && options.getHBaseTable() == null)
1656 || (options.getHBaseColFamily() == null
1657 && options.getHBaseTable() != null)) {
1658 throw new InvalidOptionsException(
1659 "Both --hbase-table and --column-family must be set together."
1660 + HELP_STR);
1661 }
1662
1663 if (options.isBulkLoadEnabled() && options.getHBaseTable() == null) {
1664 String validationMessage = String.format("Can't run import with %s "
1665 + "without %s",
1666 BaseSqoopTool.HBASE_BULK_LOAD_ENABLED_ARG,
1667 BaseSqoopTool.HBASE_TABLE_ARG);
1668 throw new InvalidOptionsException(validationMessage);
1669 }
1670 }
1671
1672 /**
1673 * Given an array of extra arguments (usually populated via
1674 * this.extraArguments), determine the offset of the first '--'
1675 * argument in the list. Return 'extra.length' if there is none.
1676 */
1677 protected int getDashPosition(String [] extra) {
1678 int dashPos = extra.length;
1679 for (int i = 0; i < extra.length; i++) {
1680 if (extra[i].equals("--")) {
1681 dashPos = i;
1682 break;
1683 }
1684 }
1685
1686 return dashPos;
1687 }
1688 }
1689