b6269648068ad5ffa6dbcb3167f25834551cbd24
[sqoop.git] / src / test / com / cloudera / sqoop / hive / TestHiveImport.java
1 /**
2 * Licensed to the Apache Software Foundation (ASF) under one
3 * or more contributor license agreements. See the NOTICE file
4 * distributed with this work for additional information
5 * regarding copyright ownership. The ASF licenses this file
6 * to you under the Apache License, Version 2.0 (the
7 * "License"); you may not use this file except in compliance
8 * with the License. You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18
19 package com.cloudera.sqoop.hive;
20
21 import java.io.BufferedReader;
22 import java.io.File;
23 import java.io.FileReader;
24 import java.io.IOException;
25 import java.util.ArrayList;
26 import java.util.Arrays;
27 import java.util.List;
28
29 import org.apache.avro.generic.GenericRecord;
30 import org.apache.commons.logging.Log;
31 import org.apache.commons.logging.LogFactory;
32 import org.apache.hadoop.conf.Configuration;
33 import org.apache.hadoop.fs.FileSystem;
34 import org.apache.hadoop.fs.Path;
35 import org.junit.After;
36 import org.junit.Before;
37 import org.junit.Test;
38
39 import com.cloudera.sqoop.SqoopOptions;
40 import com.cloudera.sqoop.SqoopOptions.InvalidOptionsException;
41 import com.cloudera.sqoop.testutil.CommonArgs;
42 import com.cloudera.sqoop.testutil.ImportJobTestCase;
43 import com.cloudera.sqoop.tool.BaseSqoopTool;
44 import com.cloudera.sqoop.tool.CodeGenTool;
45 import com.cloudera.sqoop.tool.CreateHiveTableTool;
46 import com.cloudera.sqoop.tool.ImportTool;
47 import com.cloudera.sqoop.tool.SqoopTool;
48 import org.apache.commons.cli.ParseException;
49 import org.kitesdk.data.Dataset;
50 import org.kitesdk.data.DatasetReader;
51 import org.kitesdk.data.Datasets;
52
53 /**
54 * Test HiveImport capability after an import to HDFS.
55 */
56 public class TestHiveImport extends ImportJobTestCase {
57
58 public static final Log LOG = LogFactory.getLog(
59 TestHiveImport.class.getName());
60
61 @Before
62 public void setUp() {
63 super.setUp();
64 HiveImport.setTestMode(true);
65 }
66
67 @After
68 public void tearDown() {
69 super.tearDown();
70 HiveImport.setTestMode(false);
71 }
72
73 /**
74 * Sets the expected number of columns in the table being manipulated
75 * by the test. Under the hood, this sets the expected column names
76 * to DATA_COLi for 0 <= i < numCols.
77 * @param numCols the number of columns to be created.
78 */
79 protected void setNumCols(int numCols) {
80 String [] cols = new String[numCols];
81 for (int i = 0; i < numCols; i++) {
82 cols[i] = "DATA_COL" + i;
83 }
84
85 setColNames(cols);
86 }
87
88 protected String[] getTypesNewLineTest() {
89 String[] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
90 return types;
91 }
92
93 /**
94 * Create the argv to pass to Sqoop.
95 * @return the argv as an array of strings.
96 */
97 protected String [] getArgv(boolean includeHadoopFlags, String [] moreArgs) {
98 ArrayList<String> args = new ArrayList<String>();
99
100 if (includeHadoopFlags) {
101 CommonArgs.addHadoopFlags(args);
102 }
103
104 if (null != moreArgs) {
105 for (String arg: moreArgs) {
106 args.add(arg);
107 }
108 }
109
110 args.add("--table");
111 args.add(getTableName());
112 args.add("--warehouse-dir");
113 args.add(getWarehouseDir());
114 args.add("--connect");
115 args.add(getConnectString());
116 args.add("--hive-import");
117 String [] colNames = getColNames();
118 if (null != colNames) {
119 args.add("--split-by");
120 args.add(colNames[0]);
121 } else {
122 fail("Could not determine column names.");
123 }
124
125 args.add("--num-mappers");
126 args.add("1");
127
128 for (String a : args) {
129 LOG.debug("ARG : "+ a);
130 }
131
132 return args.toArray(new String[0]);
133 }
134
135 /**
136 * @return the argv to supply to a create-table only job for Hive imports.
137 */
138 protected String [] getCreateTableArgv(boolean includeHadoopFlags,
139 String [] moreArgs) {
140
141 ArrayList<String> args = new ArrayList<String>();
142
143 if (null != moreArgs) {
144 for (String arg: moreArgs) {
145 args.add(arg);
146 }
147 }
148
149 args.add("--table");
150 args.add(getTableName());
151 args.add("--connect");
152 args.add(getConnectString());
153
154 return args.toArray(new String[0]);
155 }
156
157 /**
158 * @return the argv to supply to a code-gen only job for Hive imports.
159 */
160 protected String [] getCodeGenArgs() {
161 ArrayList<String> args = new ArrayList<String>();
162
163 args.add("--table");
164 args.add(getTableName());
165 args.add("--connect");
166 args.add(getConnectString());
167 args.add("--hive-import");
168
169 return args.toArray(new String[0]);
170 }
171
172 /**
173 * @return the argv to supply to a ddl-executing-only job for Hive imports.
174 */
175 protected String [] getCreateHiveTableArgs(String [] extraArgs) {
176 ArrayList<String> args = new ArrayList<String>();
177
178 args.add("--table");
179 args.add(getTableName());
180 args.add("--connect");
181 args.add(getConnectString());
182
183 if (null != extraArgs) {
184 for (String arg : extraArgs) {
185 args.add(arg);
186 }
187 }
188
189 return args.toArray(new String[0]);
190 }
191
192 private SqoopOptions getSqoopOptions(String [] args, SqoopTool tool) {
193 SqoopOptions opts = null;
194 try {
195 opts = tool.parseArguments(args, null, null, true);
196 } catch (Exception e) {
197 fail("Invalid options: " + e.toString());
198 }
199
200 return opts;
201 }
202
203 private void runImportTest(String tableName, String [] types,
204 String [] values, String verificationScript, String [] args,
205 SqoopTool tool) throws IOException {
206
207 // create a table and populate it with a row...
208 createTableWithColTypes(types, values);
209
210 // set up our mock hive shell to compare our generated script
211 // against the correct expected one.
212 SqoopOptions options = getSqoopOptions(args, tool);
213 String hiveHome = options.getHiveHome();
214 assertNotNull("hive.home was not set", hiveHome);
215 String testDataPath = new Path(new Path(hiveHome),
216 "scripts/" + verificationScript).toString();
217 System.setProperty("expected.script",
218 new File(testDataPath).getAbsolutePath());
219
220 // verify that we can import it correctly into hive.
221 runImport(tool, args);
222 }
223
224 /** Test that we can generate a file containing the DDL and not import. */
225 @Test
226 public void testGenerateOnly() throws IOException {
227 final String TABLE_NAME = "GenerateOnly";
228 setCurTableName(TABLE_NAME);
229 setNumCols(1);
230
231 // Figure out where our target generated .q file is going to be.
232 SqoopOptions options = getSqoopOptions(getArgv(false, null),
233 new ImportTool());
234 Path ddlFile = new Path(new Path(options.getCodeOutputDir()),
235 TABLE_NAME + ".q");
236 FileSystem fs = FileSystem.getLocal(new Configuration());
237
238 // If it's already there, remove it before running the test to ensure
239 // that it's the current test that generated the file.
240 if (fs.exists(ddlFile)) {
241 if (!fs.delete(ddlFile, false)) {
242 LOG.warn("Could not delete previous ddl file: " + ddlFile);
243 }
244 }
245
246 // Run a basic import, but specify that we're just generating definitions.
247 String [] types = { "INTEGER" };
248 String [] vals = { "42" };
249 runImportTest(TABLE_NAME, types, vals, null, getCodeGenArgs(),
250 new CodeGenTool());
251
252 // Test that the generated definition file exists.
253 assertTrue("Couldn't find expected ddl file", fs.exists(ddlFile));
254
255 Path hiveImportPath = new Path(new Path(options.getWarehouseDir()),
256 TABLE_NAME);
257 assertFalse("Import actually happened!", fs.exists(hiveImportPath));
258 }
259
260
261 /** Test that strings and ints are handled in the normal fashion. */
262 @Test
263 public void testNormalHiveImport() throws IOException {
264 final String TABLE_NAME = "NORMAL_HIVE_IMPORT";
265 setCurTableName(TABLE_NAME);
266 setNumCols(3);
267 String [] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
268 String [] vals = { "'test'", "42", "'somestring'" };
269 runImportTest(TABLE_NAME, types, vals, "normalImport.q",
270 getArgv(false, null), new ImportTool());
271 }
272
273 /** Test that strings and ints are handled in the normal fashion as parquet
274 * file. */
275 @Test
276 public void testNormalHiveImportAsParquet() throws IOException {
277 final String TABLE_NAME = "NORMAL_HIVE_IMPORT_AS_PARQUET";
278 setCurTableName(TABLE_NAME);
279 setNumCols(3);
280 String [] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
281 String [] vals = { "'test'", "42", "'somestring'" };
282 String [] extraArgs = {"--as-parquetfile"};
283
284 runImportTest(TABLE_NAME, types, vals, "", getArgv(false, extraArgs),
285 new ImportTool());
286 verifyHiveDataset(TABLE_NAME, new Object[][]{{"test", 42, "somestring"}});
287 }
288
289 private void verifyHiveDataset(String tableName, Object[][] valsArray) {
290 String datasetUri = String.format("dataset:hive:default/%s",
291 tableName.toLowerCase());
292 assertTrue(Datasets.exists(datasetUri));
293 Dataset dataset = Datasets.load(datasetUri);
294 assertFalse(dataset.isEmpty());
295
296 DatasetReader<GenericRecord> reader = dataset.newReader();
297 try {
298 List<String> expectations = new ArrayList<String>();
299 if (valsArray != null) {
300 for (Object[] vals : valsArray) {
301 expectations.add(Arrays.toString(vals));
302 }
303 }
304
305 while (reader.hasNext() && expectations.size() > 0) {
306 String actual = Arrays.toString(
307 convertGenericRecordToArray(reader.next()));
308 assertTrue("Expect record: " + actual, expectations.remove(actual));
309 }
310 assertFalse(reader.hasNext());
311 assertEquals(0, expectations.size());
312 } finally {
313 reader.close();
314 }
315 }
316
317 private static Object[] convertGenericRecordToArray(GenericRecord record) {
318 Object[] result = new Object[record.getSchema().getFields().size()];
319 for (int i = 0; i < result.length; i++) {
320 result[i] = record.get(i);
321 }
322 return result;
323 }
324
325 /** Test that table is created in hive with no data import. */
326 @Test
327 public void testCreateOnlyHiveImport() throws IOException {
328 final String TABLE_NAME = "CREATE_ONLY_HIVE_IMPORT";
329 setCurTableName(TABLE_NAME);
330 setNumCols(3);
331 String [] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
332 String [] vals = { "'test'", "42", "'somestring'" };
333 runImportTest(TABLE_NAME, types, vals,
334 "createOnlyImport.q", getCreateHiveTableArgs(null),
335 new CreateHiveTableTool());
336 }
337
338 /**
339 * Test that table is created in hive and replaces the existing table if
340 * any.
341 */
342 @Test
343 public void testCreateOverwriteHiveImport() throws IOException {
344 final String TABLE_NAME = "CREATE_OVERWRITE_HIVE_IMPORT";
345 setCurTableName(TABLE_NAME);
346 setNumCols(3);
347 String [] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
348 String [] vals = { "'test'", "42", "'somestring'" };
349 String [] extraArgs = {"--hive-overwrite", "--create-hive-table"};
350 runImportTest(TABLE_NAME, types, vals,
351 "createOverwriteImport.q", getCreateHiveTableArgs(extraArgs),
352 new CreateHiveTableTool());
353 runImportTest(TABLE_NAME, types, vals,
354 "createOverwriteImport.q", getCreateHiveTableArgs(extraArgs),
355 new CreateHiveTableTool());
356 }
357
358 /**
359 * Test that table is created in hive and replaces the existing table if
360 * any.
361 */
362 @Test
363 public void testCreateOverwriteHiveImportAsParquet() throws IOException {
364 final String TABLE_NAME = "CREATE_OVERWRITE_HIVE_IMPORT_AS_PARQUET";
365 setCurTableName(TABLE_NAME);
366 setNumCols(3);
367 String [] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
368 String [] vals = { "'test'", "42", "'somestring'" };
369 String [] extraArgs = {"--as-parquetfile"};
370 ImportTool tool = new ImportTool();
371
372 runImportTest(TABLE_NAME, types, vals, "", getArgv(false, extraArgs), tool);
373 verifyHiveDataset(TABLE_NAME, new Object[][]{{"test", 42, "somestring"}});
374
375 String [] valsToOverwrite = { "'test2'", "24", "'somestring2'" };
376 String [] extraArgsForOverwrite = {"--as-parquetfile", "--hive-overwrite"};
377 runImportTest(TABLE_NAME, types, valsToOverwrite, "",
378 getArgv(false, extraArgsForOverwrite), tool);
379 verifyHiveDataset(TABLE_NAME, new Object[][] {{"test2", 24, "somestring2"}});
380 }
381
382 /**
383 * Test that records are appended to an existing table.
384 */
385 @Test
386 public void testAppendHiveImportAsParquet() throws IOException {
387 final String TABLE_NAME = "APPEND_HIVE_IMPORT_AS_PARQUET";
388 setCurTableName(TABLE_NAME);
389 setNumCols(3);
390 String [] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
391 String [] vals = { "'test'", "42", "'somestring'" };
392 String [] extraArgs = {"--as-parquetfile"};
393 String [] args = getArgv(false, extraArgs);
394 ImportTool tool = new ImportTool();
395
396 runImportTest(TABLE_NAME, types, vals, "", args, tool);
397 verifyHiveDataset(TABLE_NAME, new Object[][]{{"test", 42, "somestring"}});
398
399 String [] valsToAppend = { "'test2'", "4242", "'somestring2'" };
400 runImportTest(TABLE_NAME, types, valsToAppend, "", args, tool);
401 verifyHiveDataset(TABLE_NAME, new Object[][] {
402 {"test2", 4242, "somestring2"}, {"test", 42, "somestring"}});
403 }
404
405 /** Test that dates are coerced properly to strings. */
406 @Test
407 public void testDate() throws IOException {
408 final String TABLE_NAME = "DATE_HIVE_IMPORT";
409 setCurTableName(TABLE_NAME);
410 setNumCols(2);
411 String [] types = { "VARCHAR(32)", "DATE" };
412 String [] vals = { "'test'", "'2009-05-12'" };
413 runImportTest(TABLE_NAME, types, vals, "dateImport.q",
414 getArgv(false, null), new ImportTool());
415 }
416
417 /** Test that NUMERICs are coerced to doubles. */
418 @Test
419 public void testNumeric() throws IOException {
420 final String TABLE_NAME = "NUMERIC_HIVE_IMPORT";
421 setCurTableName(TABLE_NAME);
422 setNumCols(2);
423 String [] types = { "NUMERIC", "CHAR(64)" };
424 String [] vals = { "3.14159", "'foo'" };
425 runImportTest(TABLE_NAME, types, vals, "numericImport.q",
426 getArgv(false, null), new ImportTool());
427 }
428
429 /** If bin/hive returns an error exit status, we should get an IOException. */
430 @Test
431 public void testHiveExitFails() {
432 // The expected script is different than the one which would be generated
433 // by this, so we expect an IOException out.
434 final String TABLE_NAME = "FAILING_HIVE_IMPORT";
435 setCurTableName(TABLE_NAME);
436 setNumCols(2);
437 String [] types = { "NUMERIC", "CHAR(64)" };
438 String [] vals = { "3.14159", "'foo'" };
439 try {
440 runImportTest(TABLE_NAME, types, vals, "failingImport.q",
441 getArgv(false, null), new ImportTool());
442 // If we get here, then the run succeeded -- which is incorrect.
443 fail("FAILING_HIVE_IMPORT test should have thrown IOException");
444 } catch (IOException ioe) {
445 // expected; ok.
446 }
447 }
448
449 /** Test that we can set delimiters how we want them. */
450 @Test
451 public void testCustomDelimiters() throws IOException {
452 final String TABLE_NAME = "CUSTOM_DELIM_IMPORT";
453 setCurTableName(TABLE_NAME);
454 setNumCols(3);
455 String [] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
456 String [] vals = { "'test'", "42", "'somestring'" };
457 String [] extraArgs = {
458 "--fields-terminated-by", ",",
459 "--lines-terminated-by", "|",
460 };
461 runImportTest(TABLE_NAME, types, vals, "customDelimImport.q",
462 getArgv(false, extraArgs), new ImportTool());
463 }
464
465 /**
466 * Test hive import with row that has new line in it.
467 */
468 @Test
469 public void testFieldWithHiveDelims() throws IOException,
470 InterruptedException {
471 final String TABLE_NAME = "FIELD_WITH_NL_HIVE_IMPORT";
472
473 LOG.info("Doing import of single row into FIELD_WITH_NL_HIVE_IMPORT table");
474 setCurTableName(TABLE_NAME);
475 setNumCols(3);
476 String[] types = getTypesNewLineTest();
477 String[] vals = { "'test with \n new lines \n'", "42",
478 "'oh no " + '\01' + " field delims " + '\01' + "'", };
479 String[] moreArgs = { "--"+ BaseSqoopTool.HIVE_DROP_DELIMS_ARG };
480
481 runImportTest(TABLE_NAME, types, vals, "fieldWithNewlineImport.q",
482 getArgv(false, moreArgs), new ImportTool());
483
484 LOG.info("Validating data in single row is present in: "
485 + "FIELD_WITH_NL_HIVE_IMPORT table");
486
487 // Ideally, we would actually invoke hive code to verify that record with
488 // record and field delimiters have values replaced and that we have the
489 // proper number of hive records. Unfortunately, this is a non-trivial task,
490 // and better dealt with at an integration test level
491 //
492 // Instead, this assumes the path of the generated table and just validate
493 // map job output.
494
495 // Get and read the raw output file
496 String whDir = getWarehouseDir();
497 File p = new File(new File(whDir, TABLE_NAME), "part-m-00000");
498 File f = new File(p.toString());
499 FileReader fr = new FileReader(f);
500 BufferedReader br = new BufferedReader(fr);
501 try {
502 // verify the output
503 assertEquals(br.readLine(), "test with new lines " + '\01' + "42"
504 + '\01' + "oh no field delims ");
505 assertEquals(br.readLine(), null); // should only be one line
506 } catch (IOException ioe) {
507 fail("Unable to read files generated from hive");
508 } finally {
509 br.close();
510 }
511 }
512
513 /**
514 * Test hive import with row that has new line in it.
515 */
516 @Test
517 public void testFieldWithHiveDelimsReplacement() throws IOException,
518 InterruptedException {
519 final String TABLE_NAME = "FIELD_WITH_NL_REPLACEMENT_HIVE_IMPORT";
520
521 LOG.info("Doing import of single row into "
522 + "FIELD_WITH_NL_REPLACEMENT_HIVE_IMPORT table");
523 setCurTableName(TABLE_NAME);
524 setNumCols(3);
525 String[] types = getTypesNewLineTest();
526 String[] vals = { "'test with\nnew lines\n'", "42",
527 "'oh no " + '\01' + " field delims " + '\01' + "'", };
528 String[] moreArgs = { "--"+BaseSqoopTool.HIVE_DELIMS_REPLACEMENT_ARG, " "};
529
530 runImportTest(TABLE_NAME, types, vals,
531 "fieldWithNewlineReplacementImport.q", getArgv(false, moreArgs),
532 new ImportTool());
533
534 LOG.info("Validating data in single row is present in: "
535 + "FIELD_WITH_NL_REPLACEMENT_HIVE_IMPORT table");
536
537 // Ideally, we would actually invoke hive code to verify that record with
538 // record and field delimiters have values replaced and that we have the
539 // proper number of hive records. Unfortunately, this is a non-trivial task,
540 // and better dealt with at an integration test level
541 //
542 // Instead, this assumes the path of the generated table and just validate
543 // map job output.
544
545 // Get and read the raw output file
546 String whDir = getWarehouseDir();
547 File p = new File(new File(whDir, TABLE_NAME), "part-m-00000");
548 File f = new File(p.toString());
549 FileReader fr = new FileReader(f);
550 BufferedReader br = new BufferedReader(fr);
551 try {
552 // verify the output
553 assertEquals(br.readLine(), "test with new lines " + '\01' + "42"
554 + '\01' + "oh no field delims ");
555 assertEquals(br.readLine(), null); // should only be one line
556 } catch (IOException ioe) {
557 fail("Unable to read files generated from hive");
558 } finally {
559 br.close();
560 }
561 }
562
563 /**
564 * Test hive drop and replace option validation.
565 */
566 @Test
567 public void testHiveDropAndReplaceOptionValidation() throws ParseException {
568 LOG.info("Testing conflicting Hive delimiter drop/replace options");
569
570 setNumCols(3);
571 String[] moreArgs = { "--"+BaseSqoopTool.HIVE_DELIMS_REPLACEMENT_ARG, " ",
572 "--"+BaseSqoopTool.HIVE_DROP_DELIMS_ARG, };
573
574 ImportTool tool = new ImportTool();
575 try {
576 tool.validateOptions(tool.parseArguments(getArgv(false, moreArgs), null,
577 null, true));
578 fail("Expected InvalidOptionsException");
579 } catch (InvalidOptionsException ex) {
580 /* success */
581 }
582 }
583
584 /**
585 * Test hive import with row that has new line in it.
586 */
587 @Test
588 public void testImportHiveWithPartitions() throws IOException,
589 InterruptedException {
590 final String TABLE_NAME = "PARTITION_HIVE_IMPORT";
591
592 LOG.info("Doing import of single row into PARTITION_HIVE_IMPORT table");
593 setCurTableName(TABLE_NAME);
594 setNumCols(3);
595 String[] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)", };
596 String[] vals = { "'whoop'", "42", "'I am a row in a partition'", };
597 String[] moreArgs = { "--" + BaseSqoopTool.HIVE_PARTITION_KEY_ARG, "ds",
598 "--" + BaseSqoopTool.HIVE_PARTITION_VALUE_ARG, "20110413", };
599
600 runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
601 getArgv(false, moreArgs), new ImportTool());
602 }
603
604 /**
605 * If partition key is set to one of importing columns, we should get an
606 * IOException.
607 * */
608 @Test
609 public void testImportWithBadPartitionKey() {
610 final String TABLE_NAME = "FAILING_PARTITION_HIVE_IMPORT";
611
612 LOG.info("Doing import of single row into " + TABLE_NAME + " table");
613 setCurTableName(TABLE_NAME);
614 setNumCols(3);
615 String[] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)", };
616 String[] vals = { "'key'", "42", "'I am a row in a partition'", };
617
618 String partitionKey = getColNames()[0];
619
620 // Specify 1st column as partition key and import every column of the
621 // table by default (i.e. no --columns option).
622 String[] moreArgs1 = {
623 "--" + BaseSqoopTool.HIVE_PARTITION_KEY_ARG,
624 partitionKey,
625 };
626
627 // Specify 1st column as both partition key and importing column.
628 String[] moreArgs2 = {
629 "--" + BaseSqoopTool.HIVE_PARTITION_KEY_ARG,
630 partitionKey,
631 "--" + BaseSqoopTool.COLUMNS_ARG,
632 partitionKey,
633 };
634
635 // Test hive-import with the 1st args.
636 try {
637 runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
638 getArgv(false, moreArgs1), new ImportTool());
639 fail(TABLE_NAME + " test should have thrown IOException");
640 } catch (IOException ioe) {
641 // expected; ok.
642 }
643
644 // Test hive-import with the 2nd args.
645 try {
646 runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
647 getArgv(false, moreArgs2), new ImportTool());
648 fail(TABLE_NAME + " test should have thrown IOException");
649 } catch (IOException ioe) {
650 // expected; ok.
651 }
652
653 // Test create-hive-table with the 1st args.
654 try {
655 runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
656 getCreateTableArgv(false, moreArgs1), new CreateHiveTableTool());
657 fail(TABLE_NAME + " test should have thrown IOException");
658 } catch (IOException ioe) {
659 // expected; ok.
660 }
661 }
662 }