【问题标题】:java.lang.IllegalArgumentException: Wrong FS: hdfs://localhost:9000 expected: file:///java.lang.IllegalArgumentException:错误的 FS:hdfs://localhost:9000 预期:file:///
【发布时间】:2017-05-08 09:15:48
【问题描述】:

我在复制、删除、重命名和移动的 hadoop 命令中苦苦挣扎。运行副本时出现以下错误。我也检查了其他类似的答案,但没有成功。

Exception in thread "AWT-EventQueue-0" java.lang.IllegalArgumentException: Wrong FS: hdfs://localhost:9000/user/new1, expected: file:///
at org.apache.hadoop.fs.FileSystem.checkPath(FileSystem.java:390)
at org.apache.hadoop.fs.RawLocalFileSystem.pathToFile(RawLocalFileSystem.java:55)
at org.apache.hadoop.fs.LocalFileSystem.pathToFile(LocalFileSystem.java:61)
at org.apache.hadoop.fs.LocalFileSystem.exists(LocalFileSystem.java:51)
at org.apache.hadoop.fs.FileUtil.checkDest(FileUtil.java:355)
at org.apache.hadoop.fs.FileUtil.copy(FileUtil.java:211)
at org.apache.hadoop.fs.FileUtil.copy(FileUtil.java:163)
at org.apache.hadoop.fs.FileSystem.copyToLocalFile(FileSystem.java:1257)
at org.apache.hadoop.fs.FileSystem.copyToLocalFile(FileSystem.java:1238)
at hdfstest1.HDFSTestGUI1.copyDirectory(HDFSTestGUI1.java:710)
at hdfstest1.HDFSTestGUI1.jMenuPasteItemActionPerformed(HDFSTestGUI1.java:446)
at hdfstest1.HDFSTestGUI1.access$400(HDFSTestGUI1.java:43)
at hdfstest1.HDFSTestGUI1$5.actionPerformed(HDFSTestGUI1.java:172)
at javax.swing.AbstractButton.fireActionPerformed(AbstractButton.java:2022)
at javax.swing.AbstractButton$Handler.actionPerformed(AbstractButton.java:2348)
at javax.swing.DefaultButtonModel.fireActionPerformed(DefaultButtonModel.java:402)
at javax.swing.DefaultButtonModel.setPressed(DefaultButtonModel.java:259)
at javax.swing.AbstractButton.doClick(AbstractButton.java:376)
at javax.swing.plaf.basic.BasicMenuItemUI.doClick(BasicMenuItemUI.java:833)
at javax.swing.plaf.basic.BasicMenuItemUI$Handler.mouseReleased(BasicMenuItemUI.java:877)
at java.awt.Component.processMouseEvent(Component.java:6533)
at javax.swing.JComponent.processMouseEvent(JComponent.java:3324)
at java.awt.Component.processEvent(Component.java:6298)
at java.awt.Container.processEvent(Container.java:2236)
at java.awt.Component.dispatchEventImpl(Component.java:4889)
at java.awt.Container.dispatchEventImpl(Container.java:2294)
at java.awt.Component.dispatchEvent(Component.java:4711)
at java.awt.LightweightDispatcher.retargetMouseEvent(Container.java:4888)
at java.awt.LightweightDispatcher.processMouseEvent(Container.java:4525)
at java.awt.LightweightDispatcher.dispatchEvent(Container.java:4466)
at java.awt.Container.dispatchEventImpl(Container.java:2280)
at java.awt.Window.dispatchEventImpl(Window.java:2746)
at java.awt.Component.dispatchEvent(Component.java:4711)
at java.awt.EventQueue.dispatchEventImpl(EventQueue.java:758)
at java.awt.EventQueue.access$500(EventQueue.java:97)
at java.awt.EventQueue$3.run(EventQueue.java:709)
at java.awt.EventQueue$3.run(EventQueue.java:703)
at java.security.AccessController.doPrivileged(Native Method)
at java.security.ProtectionDomain$JavaSecurityAccessImpl.doIntersectionPrivilege(ProtectionDomain.java:80)
at java.security.ProtectionDomain$JavaSecurityAccessImpl.doIntersectionPrivilege(ProtectionDomain.java:90)
at java.awt.EventQueue$4.run(EventQueue.java:731)
at java.awt.EventQueue$4.run(EventQueue.java:729)
at java.security.AccessController.doPrivileged(Native Method)
at java.security.ProtectionDomain$JavaSecurityAccessImpl.doIntersectionPrivilege(ProtectionDomain.java:80)
at java.awt.EventQueue.dispatchEvent(EventQueue.java:728)
at java.awt.EventDispatchThread.pumpOneEventForFilters(EventDispatchThread.java:201)
at java.awt.EventDispatchThread.pumpEventsForFilter(EventDispatchThread.java:116)
at java.awt.EventDispatchThread.pumpEventsForHierarchy(EventDispatchThread.java:105)
at java.awt.EventDispatchThread.pumpEvents(EventDispatchThread.java:101)
at java.awt.EventDispatchThread.pumpEvents(EventDispatchThread.java:93)
at java.awt.EventDispatchThread.run(EventDispatchThread.java:82)

这是我的代码 sn-p:

 //constructor
 public HDFSTestGUI1()  {
    try{
        conf1.addResource(new Path("/usr/local/hadoop/conf/core-site.xml"));
        conf1.addResource(new Path("/usr/local/hadoop/conf/hdfs-site.xml"));
        initComponents();
        setIcon();
        showDate();
        showTime();
        homeBtn.doClick();
        writeBtn1.setVisible(false);
        readBtn.setVisible(false);
    } catch(Exception ex) {
         JOptionPane.showMessageDialog(this, "Exception::Please check core-site.xml config or " + ex);
    }

     //main method
    public static void main(String args[]) {
    //<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
    /* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel.
     * For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html 
     */
    try {
        for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
            if ("Nimbus".equals(info.getName())) {
                javax.swing.UIManager.setLookAndFeel(info.getClassName());
                break;
            }
        }
    } catch (ClassNotFoundException | InstantiationException | 
    IllegalAccessException | 
javax.swing.UnsupportedLookAndFeelException ex) {
     java.util.logging.Logger.getLogger(HDFSTestGUI1.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
    }
    //</editor-fold>
    /* Create and display the form */
    java.awt.EventQueue.invokeLater(new Runnable() {
        @Override
        public void run() {
            try {
                new HDFSTestGUI1().setVisible(true);
            } catch (Exception ex) {

  Logger.getLogger(HDFSTestGUI1.class.getName()).log(Level.SEVERE, null, ex);
            }
        }
    });
}


private void copyDirectory(String srcPath, String destPath) {
    try {
        Path p1, p2;
        p1 = new Path(srcPath);
        p2 = new Path(destPath);
        if (p1.getFileSystem(conf1).exists(p2)) {
            fs.copyToLocalFile(p1,p2);
            JOptionPane.showMessageDialog(this, "Copied 
 Successfully");
        } else {
            JOptionPane.showMessageDialog(this, "Destination path does 
  not Exist::" + p2);
        }

    } catch (IOException ex) {
        JOptionPane.showMessageDialog(this, ex);

Logger.getLogger(HDFSTestGUI1.class.getName()).log(Level.SEVERE, null, ex);
    }
}

核心站点.xml:

<configuration>

<property>
<name>hadoop.tmp.dir</name>
<value>/app/hadoop/tmp</value>
<description>A base for other temporary directories.</description>
</property>
<property>
<name>fs.default.name</name>
<value>hdfs://localhost:9000/</value>
<description>The name of the default file system.  A URI whose
scheme and authority determine the FileSystem implementation.  The
uri's scheme determines the config property (fs.SCHEME.impl) naming
the FileSystem implementation class.  The uri's authority is used to
determine the host, port, etc. for a filesystem.</description>
</property>

</configuration>

谁能告诉我正确的方向?

【问题讨论】:

  • 代码很长。你能指出我们的错误吗?比如你到底想在哪里使用user/new1这个位置?
  • 很日志的代码。我正在尝试从 localhost:9000 和 gui 获取列表,我正在打开它。基本上我正在创建一个文件资源管理器,我可以在其中列出目录但不能执行其他选项。在 copyDirectory() 方法中,我得到了这个异常。如果您想了解更多信息,请告诉我。

标签: java hadoop hdfs


【解决方案1】:

在 copyDirectory 方法中,fs 未定义。您可能想使用:

FileSystem fs = FileSystem.get(conf)

conforg.apache.hadoop.conf.Configuration 对象。

【讨论】:

  • 它已定义,但正如我所说,我只放了少量代码。我已将其用作全局变量。
  • @DipeshRaichana 检查您是否可能不小心交换了 srcPathdestPath 的值,否则我将删除此答案。
  • 我已经将路径(绝对)与 hdfs 文件系统一起使用。感谢收看。
  • @DipeshRaichana copyToLocalFile 用于从 HDFS 复制到本地文件。所以你的destPath 应该在你的本地文件系统上。
  • @DipeshRaichana 见this
猜你喜欢
  • 1970-01-01
  • 2014-07-09
  • 2015-11-06
  • 1970-01-01
  • 2017-01-23
  • 2018-10-14
  • 2015-11-12
  • 2019-12-23
  • 2012-01-26
相关资源
最近更新 更多