18f7a0af48d972d5186e9414475e080f1eb765f3
[sqoop.git] / src / java / org / apache / sqoop / tool / ImportAllTablesTool.java
1 /**
2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements. See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership. The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License. You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18
19 package org.apache.sqoop.tool;
20
21 import java.io.IOException;
22 import java.util.Arrays;
23 import java.util.HashSet;
24 import java.util.Set;
25
26 import org.apache.commons.cli.CommandLine;
27 import org.apache.commons.cli.OptionBuilder;
28 import org.apache.commons.logging.Log;
29 import org.apache.commons.logging.LogFactory;
30
31 import org.apache.sqoop.SqoopOptions;
32 import org.apache.sqoop.SqoopOptions.InvalidOptionsException;
33 import org.apache.sqoop.cli.RelatedOptions;
34 import org.apache.sqoop.hive.HiveImport;
35 import org.apache.sqoop.util.ImportException;
36
37 /**
38 * Tool that performs database imports of all tables in a database to HDFS.
39 */
40 public class ImportAllTablesTool extends ImportTool {
41
42 public static final Log LOG = LogFactory.getLog(
43 ImportAllTablesTool.class.getName());
44
45 public ImportAllTablesTool() {
46 super("import-all-tables", true);
47 }
48
49 @Override
50 @SuppressWarnings("static-access")
51 /** {@inheritDoc} */
52 protected RelatedOptions getImportOptions() {
53 // Imports
54 RelatedOptions importOpts = super.getImportOptions();
55
56 importOpts.addOption(OptionBuilder.withArgName("tables")
57 .hasArg().withDescription("Tables to exclude when importing all tables")
58 .withLongOpt(ALL_TABLE_EXCLUDES_ARG)
59 .create());
60
61 return importOpts;
62 }
63
64 @Override
65 /** {@inheritDoc} */
66 public void applyOptions(CommandLine in, SqoopOptions out)
67 throws InvalidOptionsException {
68 super.applyOptions(in, out);
69
70 if (in.hasOption(ALL_TABLE_EXCLUDES_ARG)) {
71 out.setAllTablesExclude(in.getOptionValue(ALL_TABLE_EXCLUDES_ARG));
72 }
73 }
74
75 @Override
76 /** {@inheritDoc} */
77 public int run(SqoopOptions options) {
78 HiveImport hiveImport = null;
79 Set<String> excludes = new HashSet<String>();
80
81 if (!init(options)) {
82 return 1;
83 }
84
85 try {
86 if (options.doHiveImport()) {
87 hiveImport = new HiveImport(options, manager, options.getConf(), false);
88 }
89
90 if (options.getAllTablesExclude() != null) {
91 excludes.addAll(Arrays.asList(options.getAllTablesExclude().split(",")));
92 }
93
94 String [] tables = manager.listTables();
95 if (null == tables) {
96 System.err.println("Could not retrieve tables list from server");
97 LOG.error("manager.listTables() returned null");
98 return 1;
99 } else {
100 for (String tableName : tables) {
101 if (excludes.contains(tableName)) {
102 System.out.println("Skipping table: " + tableName);
103 } else {
104 SqoopOptions clonedOptions = (SqoopOptions) options.clone();
105 importTable(clonedOptions, tableName, hiveImport);
106 }
107 }
108 }
109 } catch (IOException ioe) {
110 LOG.error("Encountered IOException running import job: "
111 + ioe.toString());
112 rethrowIfRequired(options, ioe);
113 return 1;
114 } catch (ImportException ie) {
115 LOG.error("Error during import: " + ie.toString());
116 rethrowIfRequired(options, ie);
117 return 1;
118 } finally {
119 destroy(options);
120 }
121
122 return 0;
123 }
124
125 }
126