Я новичок в bigdata. Сначала я хочу попробовать, как Mapreduce работать с hbase. Сценарий представляет собой суммирование поля uas в моей карте использования базы данных hbase на основе даты, которая является первичным ключом. Вот мой стол:MapReduce HBase NullPointerException
Hbase :: Таблица - тест ROW COLUMN + CELL 10102010 # 1 колонок = ср: нам, метка время = 1418267197429, значение = jonru 10102010 # 1 колонок = сравни: викторина, метка время = 1418267197429, value = \ x00 \ x00 \ x00d 10102010 # 1 column = cf: uas, timestamp = 1418267197429, value = \ x00 \ x00 \ x00d 10102010 # 1 column = cf: uts, timestamp = 1418267197429, value = \ x00 \ x00 \ x00d 10102010 # 2 column = cf: nama, timestamp = 1418267180874, value = jonru 10102010 # 2 column = cf: quiz, timestamp = 1418267180874, value = \ x00 \ x00 \ x00d 10102010 # 2 column = cf: uas, время mark = 1418267180874, value = \ x00 \ x00 \ x00d 10102010 # 2 column = cf: uts, timestamp = 1418267180874, value = \ x00 \ x00 \ x00d 10102012 # 1 column = cf: nama, timestamp = 1418267156542, value = jonru 10102012 # 1 column = cf: quiz, timestamp = 1418267156542, value = \ x00 \ x00 \ x00 \ x0A 10102012 # 1 column = cf: uas, timestamp = 1418267156542, value = \ x00 \ x00 \ x00 \ x0A 10102012 # 1 column = cf: uts, timestamp = 1418267156542, value = \ x00 \ x00 \ x00 \ x0A 10102012 # 2 column = cf: nama, timestamp = 1418267166524, value = jonru 10102012 # 2 column = cf: quiz, timestamp = 1418267166524, value = \ x00 \ x00 \ x00 \ x0A 10102012 # 2 column = cf: uas, timestamp = 1418267166524, value = \ x00 \ x00 \ x00 \ x0A 10102012 # 2 column = cf: uts, ti mestamp = 1418267166524, значение = \ x00 \ x00 \ x00 \ x0A
Мои коды, подобные этим:
public class TestMapReduce {
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
Configuration config = HBaseConfiguration.create();
Job job = new Job(config, "Test");
job.setJarByClass(TestMapReduce.TestMapper.class);
Scan scan = new Scan();
scan.setCaching(500);
scan.setCacheBlocks(false);
TableMapReduceUtil.initTableMapperJob(
"test",
scan,
TestMapReduce.TestMapper.class,
Text.class,
IntWritable.class,
job);
TableMapReduceUtil.initTableReducerJob(
"test",
TestReducer.class,
job);
job.waitForCompletion(true);
}
public static class TestMapper extends TableMapper<Text, IntWritable> {
@Override
protected void map(ImmutableBytesWritable rowKey, Result columns, Mapper.Context context) throws IOException, InterruptedException {
System.out.println("mulai mapping");
try {
//get row key
String inKey = new String(rowKey.get());
//get new key having date only
String onKey = new String(inKey.split("#")[0]);
//get value s_sent column
byte[] bUas = columns.getValue(Bytes.toBytes("cf"), Bytes.toBytes("uas"));
String sUas = new String(bUas);
Integer uas = new Integer(sUas);
//emit date and sent values
context.write(new Text(onKey), new IntWritable(uas));
} catch (RuntimeException ex) {
ex.printStackTrace();
}
}
}
public class TestReducer extends TableReducer {
public void reduce(Text key, Iterable values, Reducer.Context context) throws IOException, InterruptedException {
try {
int sum = 0;
for (Object test : values) {
System.out.println(test.toString());
sum += Integer.parseInt(test.toString());
}
Put inHbase = new Put(key.getBytes());
inHbase.add(Bytes.toBytes("cf"), Bytes.toBytes("sum"), Bytes.toBytes(sum));
context.write(null, inHbase);
} catch (Exception e) {
e.printStackTrace();
}
}
}
Я получил ошибки, подобные этим:
Exception in thread "main" java.lang.NullPointerException
at java.lang.ProcessBuilder.start(ProcessBuilder.java:1010) at java.lang.ProcessBuilder.start(ProcessBuilder.java:1010)
at org.apache.hadoop.util.Shell.runCommand(Shell.java:451)
at org.apache.hadoop.util.Shell.run(Shell.java:424)
at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:656)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:745)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:728)
at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:633)
at org.apache.hadoop.fs.RawLocalFileSystem.mkdirs(RawLocalFileSystem.java:421)
at org.apache.hadoop.fs.FilterFileSystem.mkdirs(FilterFileSystem.java:281)
at org.apache.hadoop.mapreduce.JobSubmissionFiles.getStagingDir(JobSubmissionFiles.java:125)
at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:348)
at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1295)
at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1292)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1554)
at org.apache.hadoop.mapreduce.Job.submit(Job.java:1292)
at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1313)
at TestMapReduce.main(TestMapReduce.java:97)
Java Result: 1
Помогите мне пожалуйста:)