Skip to content

Commit 45d5837

Browse files
committed
MAPREDUCE-5102. fix coverage org.apache.hadoop.mapreduce.lib.db and org.apache.hadoop.mapred.lib.db. Contributed by Aleksey Gorshkov, Andrey Klochkov, and Nathan Roberts
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1530816 13f79535-47bb-0310-9956-ffa450edef68
1 parent 6ee5440 commit 45d5837

File tree

5 files changed

+607
-0
lines changed

5 files changed

+607
-0
lines changed

hadoop-mapreduce-project/CHANGES.txt

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -187,6 +187,10 @@ Release 2.3.0 - UNRELEASED
187187

188188
MAPREDUCE-5514. Fix TestRMContainerAllocator. (Zhijie Shen via acmurthy)
189189

190+
MAPREDUCE-5102. fix coverage org.apache.hadoop.mapreduce.lib.db and
191+
org.apache.hadoop.mapred.lib.db (Aleksey Gorshkov, Andrey Klochkov, and
192+
Nathan Roberts via jlowe)
193+
190194
Release 2.2.1 - UNRELEASED
191195

192196
INCOMPATIBLE CHANGES
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,157 @@
1+
/**
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package org.apache.hadoop.mapred.lib.db;
20+
21+
import java.sql.DriverManager;
22+
23+
import org.apache.hadoop.io.LongWritable;
24+
import org.apache.hadoop.mapred.JobConf;
25+
import org.apache.hadoop.mapred.RecordReader;
26+
import org.apache.hadoop.mapred.Reporter;
27+
import org.apache.hadoop.mapred.lib.db.DBInputFormat.DBInputSplit;
28+
import org.apache.hadoop.mapred.lib.db.DBInputFormat.DBRecordReader;
29+
import org.apache.hadoop.mapred.lib.db.DBInputFormat.NullDBWritable;
30+
import org.apache.hadoop.mapred.InputSplit;
31+
import org.apache.hadoop.mapreduce.MRJobConfig;
32+
import org.apache.hadoop.mapred.lib.db.DBConfiguration;
33+
import org.apache.hadoop.mapreduce.lib.db.DriverForTest;
34+
import org.junit.Test;
35+
36+
import static org.junit.Assert.*;
37+
import static org.mockito.Mockito.*;
38+
39+
public class TestDBInputFormat {
40+
41+
/**
42+
* test DBInputFormat class. Class should split result for chunks
43+
* @throws Exception
44+
*/
45+
@Test(timeout = 10000)
46+
public void testDBInputFormat() throws Exception {
47+
JobConf configuration = new JobConf();
48+
setupDriver(configuration);
49+
50+
DBInputFormat<NullDBWritable> format = new DBInputFormat<NullDBWritable>();
51+
format.setConf(configuration);
52+
format.setConf(configuration);
53+
DBInputFormat.DBInputSplit splitter = new DBInputFormat.DBInputSplit(1, 10);
54+
Reporter reporter = mock(Reporter.class);
55+
RecordReader<LongWritable, NullDBWritable> reader = format.getRecordReader(
56+
splitter, configuration, reporter);
57+
58+
configuration.setInt(MRJobConfig.NUM_MAPS, 3);
59+
InputSplit[] lSplits = format.getSplits(configuration, 3);
60+
assertEquals(5, lSplits[0].getLength());
61+
assertEquals(3, lSplits.length);
62+
63+
// test reader .Some simple tests
64+
assertEquals(LongWritable.class, reader.createKey().getClass());
65+
assertEquals(0, reader.getPos());
66+
assertEquals(0, reader.getProgress(), 0.001);
67+
reader.close();
68+
}
69+
70+
/**
71+
* test configuration for db. should works DBConfiguration.* parameters.
72+
*/
73+
@Test (timeout = 5000)
74+
public void testSetInput() {
75+
JobConf configuration = new JobConf();
76+
77+
String[] fieldNames = { "field1", "field2" };
78+
DBInputFormat.setInput(configuration, NullDBWritable.class, "table",
79+
"conditions", "orderBy", fieldNames);
80+
assertEquals(
81+
"org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable",
82+
configuration.getClass(DBConfiguration.INPUT_CLASS_PROPERTY, null)
83+
.getName());
84+
assertEquals("table",
85+
configuration.get(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, null));
86+
87+
String[] fields = configuration
88+
.getStrings(DBConfiguration.INPUT_FIELD_NAMES_PROPERTY);
89+
assertEquals("field1", fields[0]);
90+
assertEquals("field2", fields[1]);
91+
92+
assertEquals("conditions",
93+
configuration.get(DBConfiguration.INPUT_CONDITIONS_PROPERTY, null));
94+
assertEquals("orderBy",
95+
configuration.get(DBConfiguration.INPUT_ORDER_BY_PROPERTY, null));
96+
97+
configuration = new JobConf();
98+
99+
DBInputFormat.setInput(configuration, NullDBWritable.class, "query",
100+
"countQuery");
101+
assertEquals("query", configuration.get(DBConfiguration.INPUT_QUERY, null));
102+
assertEquals("countQuery",
103+
configuration.get(DBConfiguration.INPUT_COUNT_QUERY, null));
104+
105+
JobConf jConfiguration = new JobConf();
106+
DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl", "user",
107+
"password");
108+
assertEquals("driverClass",
109+
jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
110+
assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
111+
assertEquals("user", jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
112+
assertEquals("password",
113+
jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
114+
jConfiguration = new JobConf();
115+
DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl");
116+
assertEquals("driverClass",
117+
jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
118+
assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
119+
assertNull(jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
120+
assertNull(jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
121+
}
122+
123+
/**
124+
*
125+
* test DBRecordReader. This reader should creates keys, values, know about position..
126+
*/
127+
@SuppressWarnings("unchecked")
128+
@Test (timeout = 5000)
129+
public void testDBRecordReader() throws Exception {
130+
131+
JobConf job = mock(JobConf.class);
132+
DBConfiguration dbConfig = mock(DBConfiguration.class);
133+
String[] fields = { "field1", "filed2" };
134+
135+
@SuppressWarnings("rawtypes")
136+
DBRecordReader reader = new DBInputFormat<NullDBWritable>().new DBRecordReader(
137+
new DBInputSplit(), NullDBWritable.class, job,
138+
DriverForTest.getConnection(), dbConfig, "condition", fields, "table");
139+
LongWritable key = reader.createKey();
140+
assertEquals(0, key.get());
141+
DBWritable value = reader.createValue();
142+
assertEquals(
143+
"org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable", value
144+
.getClass().getName());
145+
assertEquals(0, reader.getPos());
146+
assertFalse(reader.next(key, value));
147+
148+
}
149+
150+
private void setupDriver(JobConf configuration) throws Exception {
151+
configuration.set(DBConfiguration.URL_PROPERTY, "testUrl");
152+
DriverManager.registerDriver(new DriverForTest());
153+
configuration.set(DBConfiguration.DRIVER_CLASS_PROPERTY,
154+
DriverForTest.class.getCanonicalName());
155+
}
156+
157+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,112 @@
1+
/**
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
package org.apache.hadoop.mapreduce.lib.db;
19+
20+
import java.sql.Connection;
21+
import java.sql.DatabaseMetaData;
22+
import java.sql.Driver;
23+
import java.sql.DriverPropertyInfo;
24+
import java.sql.PreparedStatement;
25+
import java.sql.ResultSet;
26+
import java.sql.SQLException;
27+
import java.sql.SQLFeatureNotSupportedException;
28+
import java.sql.Statement;
29+
import java.util.Properties;
30+
import java.util.logging.Logger;
31+
32+
import static org.mockito.Matchers.any;
33+
import static org.mockito.Mockito.*;
34+
35+
/**
36+
* class emulates a connection to database
37+
*
38+
*/
39+
public class DriverForTest implements Driver {
40+
41+
public static Connection getConnection() {
42+
Connection connection = mock(FakeConnection.class);
43+
try {
44+
Statement statement = mock(Statement.class);
45+
ResultSet results = mock(ResultSet.class);
46+
when(results.getLong(1)).thenReturn(15L);
47+
when(statement.executeQuery(any(String.class))).thenReturn(results);
48+
when(connection.createStatement()).thenReturn(statement);
49+
50+
DatabaseMetaData metadata = mock(DatabaseMetaData.class);
51+
when(metadata.getDatabaseProductName()).thenReturn("Test");
52+
when(connection.getMetaData()).thenReturn(metadata);
53+
54+
PreparedStatement reparedStatement0= mock(PreparedStatement.class);
55+
when(connection.prepareStatement(anyString())).thenReturn(
56+
reparedStatement0);
57+
58+
PreparedStatement preparedStatement = mock(PreparedStatement.class);
59+
ResultSet resultSet = mock(ResultSet.class);
60+
when(resultSet.next()).thenReturn(false);
61+
when(preparedStatement.executeQuery()).thenReturn(resultSet);
62+
63+
when(connection.prepareStatement(anyString(), anyInt(), anyInt()))
64+
.thenReturn(preparedStatement);
65+
} catch (SQLException e) {
66+
;
67+
}
68+
return connection;
69+
}
70+
71+
@Override
72+
public boolean acceptsURL(String arg0) throws SQLException {
73+
return "testUrl".equals(arg0);
74+
}
75+
76+
@Override
77+
public Connection connect(String arg0, Properties arg1) throws SQLException {
78+
79+
return getConnection();
80+
}
81+
82+
@Override
83+
public int getMajorVersion() {
84+
return 1;
85+
}
86+
87+
@Override
88+
public int getMinorVersion() {
89+
return 1;
90+
}
91+
92+
@Override
93+
public DriverPropertyInfo[] getPropertyInfo(String arg0, Properties arg1)
94+
throws SQLException {
95+
96+
return null;
97+
}
98+
99+
@Override
100+
public boolean jdbcCompliant() {
101+
return true;
102+
}
103+
104+
public Logger getParentLogger() throws SQLFeatureNotSupportedException {
105+
throw new SQLFeatureNotSupportedException();
106+
}
107+
108+
private interface FakeConnection extends Connection{
109+
public void setSessionTimeZone(String arg);
110+
}
111+
112+
}

0 commit comments

Comments
 (0)