在努力奋斗的崽崽 2022-11-08 23:03 采纳率: 66.7%
浏览 59

Hadoop安装与配置

问题遇到的现象和发生背景

要安装Hadoop,结果start-all.cmd运行起来一直在报错,求帮忙

用代码块功能插入代码,请勿粘贴截图


2022-11-08 22:55:31,416 ERROR nodemanager.NodeManager: Error starting NodeManager
java.lang.UnsatisfiedLinkError: org.apache.hadoop.io.nativeio.NativeIO$Windows.access0(Ljava/lang/String;I)Z
        at org.apache.hadoop.io.nativeio.NativeIO$Windows.access0(Native Method)
        at org.apache.hadoop.io.nativeio.NativeIO$Windows.access(NativeIO.java:640)
        at org.apache.hadoop.fs.FileUtil.canRead(FileUtil.java:1201)
        at org.apache.hadoop.util.DiskChecker.checkAccessByFileMethods(DiskChecker.java:118)
        at org.apache.hadoop.util.DiskChecker.checkDir(DiskChecker.java:81)
        at org.apache.hadoop.util.BasicDiskValidator.checkStatus(BasicDiskValidator.java:32)
        at org.apache.hadoop.yarn.server.nodemanager.DirectoryCollection.testDirs(DirectoryCollection.java:473)
        at org.apache.hadoop.yarn.server.nodemanager.DirectoryCollection.checkDirs(DirectoryCollection.java:392)
        at org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService.checkDirs(LocalDirsHandlerService.java:490)
        at org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService.serviceInit(LocalDirsHandlerService.java:222)
        at org.apache.hadoop.service.AbstractService.init(AbstractService.java:164)
        at org.apache.hadoop.service.CompositeService.serviceInit(CompositeService.java:108)
        at org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService.serviceInit(NodeHealthCheckerService.java:59)
        at org.apache.hadoop.service.AbstractService.init(AbstractService.java:164)
        at org.apache.hadoop.service.CompositeService.serviceInit(CompositeService.java:108)
        at org.apache.hadoop.yarn.server.nodemanager.NodeManager.serviceInit(NodeManager.java:472)
        at org.apache.hadoop.service.AbstractService.init(AbstractService.java:164)
        at org.apache.hadoop.yarn.server.nodemanager.NodeManager.initAndStartNodeManager(NodeManager.java:918)
        at org.apache.hadoop.yarn.server.nodemanager.NodeManager.main(NodeManager.java:979)

2022-11-08 22:55:32,088 FATAL resourcemanager.ResourceManager: Error starting ResourceManager
java.lang.NoClassDefFoundError: org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorManager
        at java.lang.ClassLoader.defineClass1(Native Method)
        at java.lang.ClassLoader.defineClass(ClassLoader.java:756)
        at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
        at java.net.URLClassLoader.defineClass(URLClassLoader.java:468)
        at java.net.URLClassLoader.access$100(URLClassLoader.java:74)
        at java.net.URLClassLoader$1.run(URLClassLoader.java:369)
        at java.net.URLClassLoader$1.run(URLClassLoader.java:363)
        at java.security.AccessController.doPrivileged(Native Method)
        at java.net.URLClassLoader.findClass(URLClassLoader.java:362)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:418)
        at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:355)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:351)
        at java.lang.Class.getDeclaredMethods0(Native Method)
        at java.lang.Class.privateGetDeclaredMethods(Class.java:2701)
        at java.lang.Class.getDeclaredMethods(Class.java:1975)
        at com.google.inject.spi.InjectionPoint.getInjectionPoints(InjectionPoint.java:688)
        at com.google.inject.spi.InjectionPoint.forInstanceMethodsAndFields(InjectionPoint.java:380)
        at com.google.inject.spi.InjectionPoint.forInstanceMethodsAndFields(InjectionPoint.java:399)
        at com.google.inject.internal.BindingBuilder.toInstance(BindingBuilder.java:84)
        at org.apache.hadoop.yarn.server.resourcemanager.webapp.RMWebApp.setup(RMWebApp.java:60)
        at org.apache.hadoop.yarn.webapp.WebApp.configureServlets(WebApp.java:160)
        at com.google.inject.servlet.ServletModule.configure(ServletModule.java:55)
        at com.google.inject.AbstractModule.configure(AbstractModule.java:62)
        at com.google.inject.spi.Elements$RecordingBinder.install(Elements.java:340)
        at com.google.inject.spi.Elements.getElements(Elements.java:110)
        at com.google.inject.internal.InjectorShell$Builder.build(InjectorShell.java:138)
        at com.google.inject.internal.InternalInjectorCreator.build(InternalInjectorCreator.java:104)
        at com.google.inject.Guice.createInjector(Guice.java:96)
        at com.google.inject.Guice.createInjector(Guice.java:73)
        at com.google.inject.Guice.createInjector(Guice.java:62)
        at org.apache.hadoop.yarn.webapp.WebApps$Builder.build(WebApps.java:379)
        at org.apache.hadoop.yarn.webapp.WebApps$Builder.start(WebApps.java:424)
        at org.apache.hadoop.yarn.server.resourcemanager.ResourceManager.startWepApp(ResourceManager.java:1161)
        at org.apache.hadoop.yarn.server.resourcemanager.ResourceManager.serviceStart(ResourceManager.java:1271)
        at org.apache.hadoop.service.AbstractService.start(AbstractService.java:194)
        at org.apache.hadoop.yarn.server.resourcemanager.ResourceManager.main(ResourceManager.java:1467)
Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollectorManager
        at java.net.URLClassLoader.findClass(URLClassLoader.java:382)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:418)
        at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:355)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:351)
        ... 36 more
2022-11-08 22:55:30,005 WARN checker.StorageLocationChecker: Exception checking StorageLocation [DISK]file:/E:/hadoop-3.1.0/data/dfs/datanode
java.lang.RuntimeException: Error while running command to get file permissions : ExitCodeException exitCode=-1073741515:
        at org.apache.hadoop.util.Shell.runCommand(Shell.java:1009)
        at org.apache.hadoop.util.Shell.run(Shell.java:902)
        at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:1227)
        at org.apache.hadoop.util.Shell.execCommand(Shell.java:1321)
        at org.apache.hadoop.util.Shell.execCommand(Shell.java:1303)
        at org.apache.hadoop.fs.FileUtil.execCommand(FileUtil.java:1321)
        at org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.loadPermissionInfoByNonNativeIO(RawLocalFileSystem.java:726)
        at org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.loadPermissionInfo(RawLocalFileSystem.java:717)
        at org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.getPermission(RawLocalFileSystem.java:678)
        at org.apache.hadoop.util.DiskChecker.mkdirsWithExistsAndPermissionCheck(DiskChecker.java:191)
        at org.apache.hadoop.util.DiskChecker.checkDir(DiskChecker.java:98)
        at org.apache.hadoop.hdfs.server.datanode.StorageLocation.check(StorageLocation.java:239)
        at org.apache.hadoop.hdfs.server.datanode.StorageLocation.check(StorageLocation.java:52)
        at org.apache.hadoop.hdfs.server.datanode.checker.ThrottledAsyncChecker$1.call(ThrottledAsyncChecker.java:142)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)

        at org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.loadPermissionInfoByNonNativeIO(RawLocalFileSystem.java:766)
        at org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.loadPermissionInfo(RawLocalFileSystem.java:717)
        at org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.getPermission(RawLocalFileSystem.java:678)
        at org.apache.hadoop.util.DiskChecker.mkdirsWithExistsAndPermissionCheck(DiskChecker.java:191)
        at org.apache.hadoop.util.DiskChecker.checkDir(DiskChecker.java:98)
        at org.apache.hadoop.hdfs.server.datanode.StorageLocation.check(StorageLocation.java:239)
        at org.apache.hadoop.hdfs.server.datanode.StorageLocation.check(StorageLocation.java:52)
        at org.apache.hadoop.hdfs.server.datanode.checker.ThrottledAsyncChecker$1.call(ThrottledAsyncChecker.java:142)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
2022-11-08 22:55:30,005 ERROR datanode.DataNode: Exception in secureMain
org.apache.hadoop.util.DiskChecker$DiskErrorException: Too many failed volumes - current valid volumes: 0, volumes configured: 1, volumes failed: 1, volume failures tolerated: 0
        at org.apache.hadoop.hdfs.server.datanode.checker.StorageLocationChecker.check(StorageLocationChecker.java:220)
        at org.apache.hadoop.hdfs.server.datanode.DataNode.makeInstance(DataNode.java:2762)
        at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:2677)
        at org.apache.hadoop.hdfs.server.datanode.DataNode.createDataNode(DataNode.java:2719)
        at org.apache.hadoop.hdfs.server.datanode.DataNode.secureMain(DataNode.java:2863)
        at org.apache.hadoop.hdfs.server.datanode.DataNode.main(DataNode.java:2887)

2022-11-08 22:55:31,904 ERROR namenode.NameNode: Failed to start namenode.
java.lang.UnsatisfiedLinkError: org.apache.hadoop.io.nativeio.NativeIO$Windows.access0(Ljava/lang/String;I)Z
        at org.apache.hadoop.io.nativeio.NativeIO$Windows.access0(Native Method)
        at org.apache.hadoop.io.nativeio.NativeIO$Windows.access(NativeIO.java:640)
        at org.apache.hadoop.fs.FileUtil.canWrite(FileUtil.java:1220)
        at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.analyzeStorage(Storage.java:667)
        at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.analyzeStorage(Storage.java:620)
        at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverStorageDirs(FSImage.java:371)
        at org.apache.hadoop.hdfs.server.namenode.FSImage.recoverTransitionRead(FSImage.java:227)
        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.loadFSImage(FSNamesystem.java:1086)
        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.loadFromDisk(FSNamesystem.java:714)
        at org.apache.hadoop.hdfs.server.namenode.NameNode.loadNamesystem(NameNode.java:669)
        at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:731)
        at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:968)
        at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:947)
        at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:1674)
        at org.apache.hadoop.hdfs.server.namenode.NameNode.main(NameNode.java:1741)
运行结果及报错内容

img

img

img

img

我的解答思路和尝试过的方法

我尝试过重新配置,但仍不行。

  • 写回答

3条回答 默认 最新

  • 此木Y 2022-11-09 13:51
    关注

    这种多数是因为hadoop的配置文件有写的不对的地方,再检查检查

    评论

报告相同问题?

问题事件

  • 创建了问题 11月8日

悬赏问题

  • ¥15 Opencv(C++)异常
  • ¥15 VScode上配置C语言环境
  • ¥15 汇编语言没有主程序吗?
  • ¥15 这个函数为什么会爆内存
  • ¥15 无法装系统,grub成了顽固拦路虎
  • ¥15 springboot aop 应用启动异常
  • ¥15 matlab有关债券凸性久期的代码
  • ¥15 lvgl v8.2定时器提前到来
  • ¥15 qtcp 发送数据时偶尔会遇到发送数据失败?用的MSVC编译器(标签-qt|关键词-tcp)
  • ¥15 cam_lidar_calibration报错