libhdfs - 无法打开共享库 libhdfs.so.0.0.0.0
libhdfs - cannot open shared library libhdfs.so.0.0.0.0
我有一个 运行ning hdfs 实例,由 -
证明
reikdas@reikdas-HP-Pavilion-x360-Convertible-14-dh1xxx:~$ jps
16083 Jps
12389 NameNode
12774 SecondaryNameNode
11083
和
reikdas@reikdas-HP-Pavilion-x360-Convertible-14-dh1xxx:~$ hadoop fs -ls /
2021-09-27 12:06:59,520 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
Found 1 items
drwxr-xr-x - reikdas supergroup 0 2021-09-27 00:31 /test
中复制了在 C 中使用 libhdfs 的规范示例
#include "hdfs.h"
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
int main(int argc, char **argv) {
hdfsFS fs = hdfsConnect("default", 0);
// Also tested - hdfsConnect("127.0.0.1", 9000)
const char* writePath = "/testfile.txt";
hdfsFile writeFile = hdfsOpenFile(fs, writePath, O_WRONLY |O_CREAT, 0, 0, 0);
if(!writeFile) {
fprintf(stderr, "Failed to open %s for writing!\n", writePath);
exit(-1);
}
char* buffer = "Hello, World!";
tSize num_written_bytes = hdfsWrite(fs, writeFile, (void*)buffer, strlen(buffer)+1);
if (hdfsFlush(fs, writeFile)) {
fprintf(stderr, "Failed to 'flush' %s\n", writePath);
exit(-1);
}
hdfsCloseFile(fs, writeFile);
}
然后我用 -
“成功”编译了它
gcc testlhdfs.c -I $HADOOP_HDFS_HOME/include/ -L $HADOOP_HDFS_HOME/lib/native -lhdfs
但是我在尝试 运行 可执行文件时遇到错误 -
./a.out: error while loading shared libraries: libhdfs.so.0.0.0: cannot open shared object file: No such file or directory
当我看到 $HADOOP_HDFS_HOME/lib/native
-
中存在 libhdfs.so.0.0.0
时,我不知道为什么会出现此错误
reikdas@reikdas-HP-Pavilion-x360-Convertible-14-dh1xxx:~$ ls -l $HADOOP_HDFS_HOME/lib/native
total 166640
drwxr-xr-x 2 reikdas reikdas 4096 Jun 15 01:44 examples
-rw-r--r-- 1 reikdas reikdas 1507316 Jun 15 01:13 libhadoop.a
-rw-r--r-- 1 reikdas reikdas 1741256 Jun 15 01:44 libhadooppipes.a
lrwxrwxrwx 1 reikdas reikdas 18 Jun 15 01:13 libhadoop.so -> libhadoop.so.1.0.0
-rwxr-xr-x 1 reikdas reikdas 803040 Jun 15 01:13 libhadoop.so.1.0.0
-rw-r--r-- 1 reikdas reikdas 754382 Jun 15 01:44 libhadooputils.a
-rw-r--r-- 1 reikdas reikdas 551556 Jun 15 01:18 libhdfs.a
-rw-r--r-- 1 reikdas reikdas 106522330 Jun 15 01:20 libhdfspp.a
lrwxrwxrwx 1 reikdas reikdas 18 Jun 15 01:20 libhdfspp.so -> libhdfspp.so.0.1.0
-rwxr-xr-x 1 reikdas reikdas 44375064 Jun 15 01:20 libhdfspp.so.0.1.0
lrwxrwxrwx 1 reikdas reikdas 16 Jun 15 01:18 libhdfs.so -> libhdfs.so.0.0.0
-rwxr-xr-x 1 reikdas reikdas 333648 Jun 15 01:18 libhdfs.so.0.0.0
-rw-r--r-- 1 reikdas reikdas 10029114 Jun 15 01:39 libnativetask.a
lrwxrwxrwx 1 reikdas reikdas 22 Jun 15 01:39 libnativetask.so -> libnativetask.so.1.0.0
-rwxr-xr-x 1 reikdas reikdas 3985736 Jun 15 01:39 libnativetask.so.1.0.0
我的其他环境变量也已适当设置 -
export HADOOP_HOME=/home/reikdas/hadoop
export HADOOP_INSTALL=$HADOOP_HOME
export HADOOP_MAPRED_HOME=$HADOOP_HOME
export HADOOP_COMMON_HOME=$HADOOP_HOME
export HADOOP_HDFS_HOME=$HADOOP_HOME
export YARN_HOME=$HADOOP_HOME
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export PATH=$PATH:$HADOOP_HOME/sbin:$HADOOP_HOME/bin
export HADOOP_OPTS="-Djava.library.path=$HADOOP_HOME/lib/native"
这可能是我在 core-site.xml
文件中设置的相关配置选项 -
<property>
<name>fs.default.name</name>
<value>hdfs://127.0.0.1:9000</value>
</property>
非常感谢在修复此错误时能得到的所有帮助。
你可以试试 ldconfig $HADOOP_HDFS_HOME/lib/native
我有一个 运行ning hdfs 实例,由 -
证明reikdas@reikdas-HP-Pavilion-x360-Convertible-14-dh1xxx:~$ jps
16083 Jps
12389 NameNode
12774 SecondaryNameNode
11083
和
reikdas@reikdas-HP-Pavilion-x360-Convertible-14-dh1xxx:~$ hadoop fs -ls /
2021-09-27 12:06:59,520 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
Found 1 items
drwxr-xr-x - reikdas supergroup 0 2021-09-27 00:31 /test
中复制了在 C 中使用 libhdfs 的规范示例
#include "hdfs.h"
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
int main(int argc, char **argv) {
hdfsFS fs = hdfsConnect("default", 0);
// Also tested - hdfsConnect("127.0.0.1", 9000)
const char* writePath = "/testfile.txt";
hdfsFile writeFile = hdfsOpenFile(fs, writePath, O_WRONLY |O_CREAT, 0, 0, 0);
if(!writeFile) {
fprintf(stderr, "Failed to open %s for writing!\n", writePath);
exit(-1);
}
char* buffer = "Hello, World!";
tSize num_written_bytes = hdfsWrite(fs, writeFile, (void*)buffer, strlen(buffer)+1);
if (hdfsFlush(fs, writeFile)) {
fprintf(stderr, "Failed to 'flush' %s\n", writePath);
exit(-1);
}
hdfsCloseFile(fs, writeFile);
}
然后我用 -
“成功”编译了它gcc testlhdfs.c -I $HADOOP_HDFS_HOME/include/ -L $HADOOP_HDFS_HOME/lib/native -lhdfs
但是我在尝试 运行 可执行文件时遇到错误 -
./a.out: error while loading shared libraries: libhdfs.so.0.0.0: cannot open shared object file: No such file or directory
当我看到 $HADOOP_HDFS_HOME/lib/native
-
libhdfs.so.0.0.0
时,我不知道为什么会出现此错误
reikdas@reikdas-HP-Pavilion-x360-Convertible-14-dh1xxx:~$ ls -l $HADOOP_HDFS_HOME/lib/native
total 166640
drwxr-xr-x 2 reikdas reikdas 4096 Jun 15 01:44 examples
-rw-r--r-- 1 reikdas reikdas 1507316 Jun 15 01:13 libhadoop.a
-rw-r--r-- 1 reikdas reikdas 1741256 Jun 15 01:44 libhadooppipes.a
lrwxrwxrwx 1 reikdas reikdas 18 Jun 15 01:13 libhadoop.so -> libhadoop.so.1.0.0
-rwxr-xr-x 1 reikdas reikdas 803040 Jun 15 01:13 libhadoop.so.1.0.0
-rw-r--r-- 1 reikdas reikdas 754382 Jun 15 01:44 libhadooputils.a
-rw-r--r-- 1 reikdas reikdas 551556 Jun 15 01:18 libhdfs.a
-rw-r--r-- 1 reikdas reikdas 106522330 Jun 15 01:20 libhdfspp.a
lrwxrwxrwx 1 reikdas reikdas 18 Jun 15 01:20 libhdfspp.so -> libhdfspp.so.0.1.0
-rwxr-xr-x 1 reikdas reikdas 44375064 Jun 15 01:20 libhdfspp.so.0.1.0
lrwxrwxrwx 1 reikdas reikdas 16 Jun 15 01:18 libhdfs.so -> libhdfs.so.0.0.0
-rwxr-xr-x 1 reikdas reikdas 333648 Jun 15 01:18 libhdfs.so.0.0.0
-rw-r--r-- 1 reikdas reikdas 10029114 Jun 15 01:39 libnativetask.a
lrwxrwxrwx 1 reikdas reikdas 22 Jun 15 01:39 libnativetask.so -> libnativetask.so.1.0.0
-rwxr-xr-x 1 reikdas reikdas 3985736 Jun 15 01:39 libnativetask.so.1.0.0
我的其他环境变量也已适当设置 -
export HADOOP_HOME=/home/reikdas/hadoop
export HADOOP_INSTALL=$HADOOP_HOME
export HADOOP_MAPRED_HOME=$HADOOP_HOME
export HADOOP_COMMON_HOME=$HADOOP_HOME
export HADOOP_HDFS_HOME=$HADOOP_HOME
export YARN_HOME=$HADOOP_HOME
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export PATH=$PATH:$HADOOP_HOME/sbin:$HADOOP_HOME/bin
export HADOOP_OPTS="-Djava.library.path=$HADOOP_HOME/lib/native"
这可能是我在 core-site.xml
文件中设置的相关配置选项 -
<property>
<name>fs.default.name</name>
<value>hdfs://127.0.0.1:9000</value>
</property>
非常感谢在修复此错误时能得到的所有帮助。
你可以试试 ldconfig $HADOOP_HDFS_HOME/lib/native