2014-11-16 4 views
0

У меня есть следующая ошибка, когда я запускаю свою топологию Storm, содержащую болт Hbase.Нет Файловая система для схемы: hdfs

java.io.IOException: No FileSystem for scheme: hdfs 
at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2298) ~[hadoop-common-2.0.0-cdh4.7.0.jar:na] 
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2305) ~[hadoop-common-2.0.0-cdh4.7.0.jar:na] 
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:89) ~[hadoop-common-2.0.0-cdh4.7.0.jar:na] 
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2344) ~[hadoop-common-2.0.0-cdh4.7.0.jar:na] 
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2326) ~[hadoop-common-2.0.0-cdh4.7.0.jar:na] 
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:353) ~[hadoop-common-2.0.0-cdh4.7.0.jar:na] 
at org.apache.hadoop.fs.Path.getFileSystem(Path.java:194) ~[hadoop-common-2.0.0-cdh4.7.0.jar:na] 
at org.apache.hadoop.hbase.util.DynamicClassLoader.<init>(DynamicClassLoader.java:104) ~[hbase-common-0.98.1-hadoop2.jar:0.98.1-hadoop2] 
at org.apache.hadoop.hbase.protobuf.ProtobufUtil.<clinit>(ProtobufUtil.java:201) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2] 
at org.apache.hadoop.hbase.ClusterId.parseFrom(ClusterId.java:64) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2] 
at org.apache.hadoop.hbase.zookeeper.ZKClusterId.readClusterIdZNode(ZKClusterId.java:69) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2] 
at org.apache.hadoop.hbase.client.ZooKeeperRegistry.getClusterId(ZooKeeperRegistry.java:83) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2] 
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.retrieveClusterId(HConnectionManager.java:857) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2] 
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.<init>(HConnectionManager.java:662) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2] 
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) [na:1.7.0_72] 
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57) [na:1.7.0_72] 
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) [na:1.7.0_72] 
at java.lang.reflect.Constructor.newInstance(Constructor.java:526) [na:1.7.0_72] 
at org.apache.hadoop.hbase.client.HConnectionManager.createConnection(HConnectionManager.java:414) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2] 
at org.apache.hadoop.hbase.client.HConnectionManager.createConnection(HConnectionManager.java:393) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2] 
at org.apache.hadoop.hbase.client.HConnectionManager.getConnection(HConnectionManager.java:274) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2] 
at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:194) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2] 
at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:156) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2] 
at org.apache.storm.hbase.bolt.HBaseBolt$1.run(HBaseBolt.java:97) [storm-hbase-0.1.2.jar:na] 
at org.apache.storm.hbase.bolt.HBaseBolt$1.run(HBaseBolt.java:94) [storm-hbase-0.1.2.jar:na] 
at java.security.AccessController.doPrivileged(Native Method) [na:1.7.0_72] 
at javax.security.auth.Subject.doAs(Subject.java:415) [na:1.7.0_72] 
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1438) [hadoop-common-2.0.0-cdh4.7.0.jar:na] 
at org.apache.storm.hbase.bolt.HBaseBolt.prepare(HBaseBolt.java:94) [storm-hbase-0.1.2.jar:na] 
at backtype.storm.daemon.executor$fn__3352$fn__3364.invoke(executor.clj:690) [storm-core-0.9.2-incubating.jar:0.9.2-incubating] 
at backtype.storm.util$async_loop$fn__452.invoke(util.clj:429) [storm-core-0.9.2-incubating.jar:0.9.2-incubating] 
at clojure.lang.AFn.run(AFn.java:24) [clojure-1.5.1.jar:na] 
at java.lang.Thread.run(Thread.java:745) [na:1.7.0_72] 

16: 44: 32,839 [Тема-31-HbasePersistorBolt] INFO backtype.storm.daemon.executor - Приготовленный болт HbasePersistorBolt: (5)

Вот мой pom.xml

<?xml version="1.0" encoding="UTF-8"?> 

http://maven.apache.org/xsd/maven-4.0.0.xsd "> помощи-МГК fr.aid.cim 0,9-СНАПШОТ 4.0.0 событий-структуры топологии

<dependencies> 
    <!-- Hadoop Hbase Storm Kafka dependencies--> 
    <dependency> 
     <groupId>org.apache.hadoop</groupId> 
     <artifactId>hadoop-client</artifactId> 
     <version>${org.apache.hadoop.version}</version> 
     <exclusions> 
      <exclusion> 
       <artifactId>com.google.protobuf</artifactId> 
       <groupId>protobuf-java</groupId> 
      </exclusion> 
     </exclusions> 
    </dependency> 

    <dependency> 
     <groupId>com.google.protobuf</groupId> 
     <artifactId>protobuf-java</artifactId> 
     <version>2.5.0</version> 
    </dependency> 

    <dependency> 
     <groupId>com.github.ptgoetz</groupId> 
     <artifactId>storm-hbase</artifactId> 
     <version>${storm-hbase.version}</version> 
    </dependency> 

    <dependency> 
     <groupId>org.apache.storm</groupId> 
     <artifactId>storm-kafka</artifactId> 
     <version>0.9.2-incubating</version> 
    </dependency> 

    <dependency> 
     <groupId>org.apache.kafka</groupId> 
     <artifactId>kafka_2.10</artifactId> 
     <version>0.8.1.1</version> 
     <exclusions> 
      <exclusion> 
       <groupId>org.apache.zookeeper</groupId> 
       <artifactId>zookeeper</artifactId> 
      </exclusion> 
      <exclusion> 
       <groupId>log4j</groupId> 
       <artifactId>log4j</artifactId> 
      </exclusion> 
     </exclusions> 
    </dependency> 
    <!-- END Hadoop Hbase Storm Kafka dependencies--> 

    <!-- Project Dependencies --> 
    <dependency> 
     <groupId>fr.aid.cim</groupId> 
     <artifactId>commons</artifactId> 
     <version>${project.version}</version> 
    </dependency> 

    <dependency> 
     <groupId>fr.aid.cim</groupId> 
     <artifactId>storm-hazelcast</artifactId> 
     <version>${project.version}</version> 
    </dependency> 
    <!-- END Project Dependencies --> 

    <!-- Integration TEST Dependencies --> 
    <dependency> 
     <groupId>org.apache.hadoop</groupId> 
     <artifactId>hadoop-core</artifactId> 
     <scope>test</scope> 
    </dependency> 

    <dependency> 
     <groupId>org.apache.hadoop</groupId> 
     <artifactId>hadoop-common</artifactId> 
     <type>test-jar</type> 
     <scope>test</scope> 
    </dependency> 

    <dependency> 
     <groupId>org.apache.hbase</groupId> 
     <artifactId>hbase</artifactId> 
     <version>${org.apache.hbase.version}</version> 
     <type>test-jar</type> 
     <scope>test</scope> 
    </dependency> 

    <dependency> 
     <groupId>org.apache.hadoop</groupId> 
     <artifactId>hadoop-hdfs</artifactId> 
     <version>${org.apache.hadoop.version}</version> 
     <type>test-jar</type> 
     <scope>test</scope> 
    </dependency> 

    <dependency> 
     <groupId>org.apache.hadoop</groupId> 
     <artifactId>hadoop-hdfs</artifactId> 
     <version>${org.apache.hadoop.version}</version> 
     <scope>test</scope> 
    </dependency> 

    <dependency> 
     <groupId>org.apache.zookeeper</groupId> 
     <artifactId>zookeeper</artifactId> 
     <scope>test</scope> 
    </dependency> 
    <!-- END TEST Dependencies --> 

    <!-- Other Dependencies --> 
    <dependency> 
     <groupId>org.json</groupId> 
     <artifactId>json</artifactId> 
     <version>20140107</version> 
    </dependency> 

    <dependency> 
     <groupId>com.google.guava</groupId> 
     <artifactId>guava</artifactId> 
     <version>11.0</version> 
    </dependency> 

    <dependency> 
     <groupId>com.fasterxml.jackson.core</groupId> 
     <artifactId>jackson-databind</artifactId> 
    </dependency> 
    <!-- END Other Dependencies --> 

</dependencies> 

<build> 
    <plugins> 
     <plugin> 
      <artifactId>maven-assembly-plugin</artifactId> 
      <configuration> 
       <descriptorRefs> 
        <descriptorRef>jar-with-dependencies</descriptorRef> 
       </descriptorRefs> 
      </configuration> 
      <executions> 
       <execution> 
        <id>make-assembly</id> 
        <phase>package</phase> 
        <goals> 
         <goal>single</goal> 
        </goals> 
       </execution> 
      </executions> 
     </plugin> 
     <plugin> 
      <groupId>org.apache.maven.plugins</groupId> 
      <artifactId>maven-dependency-plugin</artifactId> 
     </plugin> 
    </plugins> 
</build> 

<profiles> 
    <profile> 
     <id>local</id> 
     <activation> 
      <activeByDefault>true</activeByDefault> 
     </activation> 
     <dependencies> 
      <dependency> 
       <groupId>org.apache.storm</groupId> 
       <artifactId>storm-core</artifactId> 
      </dependency> 
      <dependency> 
       <groupId>org.apache.zookeeper</groupId> 
       <artifactId>zookeeper</artifactId> 
      </dependency> 
     </dependencies> 
    </profile> 
    <profile> 
     <id>cluster</id> 
     <dependencies> 
      <dependency> 
       <groupId>org.apache.storm</groupId> 
       <artifactId>storm-core</artifactId> 
       <scope>provided</scope> 
      </dependency> 
      <dependency> 
       <groupId>org.apache.zookeeper</groupId> 
       <artifactId>zookeeper</artifactId> 
       <scope>provided</scope> 
      </dependency> 
     </dependencies> 
    </profile> 

</profiles> 

Любые идеи? Благодаря

ответ

6

Попробуйте добавить hadoop-hdfs в компиляции контекстной зависимости:

<dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>${org.apache.hadoop.version}</version> </dependency>