/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.sakserv.minicluster.impl;
import org.apache.http.client.HttpClient;
import org.apache.http.impl.client.HttpClients;
import static org.junit.Assert.assertEquals;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URL;
import java.net.URLConnection;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.github.sakserv.minicluster.config.ConfigVars;
import com.github.sakserv.propertyparser.PropertyParser;
public class HdfsLocalClusterIntegrationTest {
// Logger
private static final Logger LOG = LoggerFactory.getLogger(HdfsLocalClusterIntegrationTest.class);
// Setup the property parser
private static PropertyParser propertyParser;
static {
try {
propertyParser = new PropertyParser(ConfigVars.DEFAULT_PROPS_FILE);
propertyParser.parsePropsFile();
} catch(IOException e) {
LOG.error("Unable to load property file: {}", propertyParser.getProperty(ConfigVars.DEFAULT_PROPS_FILE));
}
}
private static HdfsLocalCluster dfsCluster;
@BeforeClass
public static void setUp() throws Exception {
dfsCluster = new HdfsLocalCluster.Builder()
.setHdfsNamenodePort(Integer.parseInt(propertyParser.getProperty(ConfigVars.HDFS_NAMENODE_PORT_KEY)))
.setHdfsNamenodeHttpPort( Integer.parseInt( propertyParser.getProperty( ConfigVars.HDFS_NAMENODE_HTTP_PORT_KEY ) ) )
.setHdfsTempDir(propertyParser.getProperty(ConfigVars.HDFS_TEMP_DIR_KEY))
.setHdfsNumDatanodes(Integer.parseInt(propertyParser.getProperty(ConfigVars.HDFS_NUM_DATANODES_KEY)))
.setHdfsEnablePermissions(
Boolean.parseBoolean(propertyParser.getProperty(ConfigVars.HDFS_ENABLE_PERMISSIONS_KEY)))
.setHdfsFormat(Boolean.parseBoolean(propertyParser.getProperty(ConfigVars.HDFS_FORMAT_KEY)))
.setHdfsEnableRunningUserAsProxyUser(Boolean.parseBoolean(
propertyParser.getProperty(ConfigVars.HDFS_ENABLE_RUNNING_USER_AS_PROXY_USER)))
.setHdfsConfig(new Configuration())
.build();
dfsCluster.start();
}
@AfterClass
public static void tearDown() throws Exception {
dfsCluster.stop();
}
@Test
public void testDfsClusterStart() throws Exception {
// Write a file to HDFS containing the test string
FileSystem hdfsFsHandle = dfsCluster.getHdfsFileSystemHandle();
FSDataOutputStream writer = hdfsFsHandle.create(
new Path(propertyParser.getProperty(ConfigVars.HDFS_TEST_FILE_KEY)));
writer.writeUTF(propertyParser.getProperty(ConfigVars.HDFS_TEST_STRING_KEY));
writer.close();
// Read the file and compare to test string
FSDataInputStream reader = hdfsFsHandle.open(
new Path(propertyParser.getProperty(ConfigVars.HDFS_TEST_FILE_KEY)));
assertEquals(reader.readUTF(), propertyParser.getProperty(ConfigVars.HDFS_TEST_STRING_KEY));
reader.close();
hdfsFsHandle.close();
URL url = new URL(
String.format( "http://localhost:%s/webhdfs/v1?op=GETHOMEDIRECTORY&user.name=guest",
propertyParser.getProperty( ConfigVars.HDFS_NAMENODE_HTTP_PORT_KEY ) ) );
URLConnection connection = url.openConnection();
connection.setRequestProperty( "Accept-Charset", "UTF-8" );
BufferedReader response = new BufferedReader( new InputStreamReader( connection.getInputStream() ) );
String line = response.readLine();
response.close();
assertEquals( "{\"Path\":\"/user/guest\"}", line );
}
}