使用 Node 和 SSH2 将缓冲区数据写入 SFTP 服务器
Write buffer data to SFTP server using Node and SSH2
我正在尝试使用 SSH2 模块将文件写入 Node 中的 SFTP 服务器。对于我的用例,文件源是 Azure Blob 存储,文件相对较大(超过 5 gig),所以我的想法是从 blob 存储中以块的形式捕获数据并将它们写入服务器。不想下载整个文件然后执行写入,因为文件很大并且不想在运行时出现磁盘 space 问题。
我通过使用可用的 downloadToBuffer() 和 write() 函数并递增 'offset' 直到所有字节都已写入,从而实现了这一点。如代码片段所示
sftp.open('remoteFilePath','w', async (openError,handle) => {
if (openError) throw openError;
var blobOffset=0;
try{
while(blobOffset<file.size){
await client.downloadToBuffer(blobOffset, blobOffset+ length > file.size? file.size - blobOffset: length).then((buffer) => {
sftp.write(handle,buffer,0,blobOffset + length > file.size? buffer.length:length, blobOffset, (writeError)=>{if(writeError) throw writeError});
});
blobOffset += length;
}
}
catch(e){
console.log(e);
}
}
此解决方案有效,但感觉对于大文件而言效率不高。有没有更好的方法来实现这个?也许使用流而不必使用循环?
问题请参考以下代码
var Client = require("ssh2").Client;
var {
BlobServiceClient,
StorageSharedKeyCredential,
} = require("@azure/storage-blob");
var accountName = "andyprivate";
var accountKey =
"";
var creds = new StorageSharedKeyCredential(accountName, accountKey);
var blobServiceClient = new BlobServiceClient(
`https://${accountName}.blob.core.windows.net`,
creds
);
var containerClient = blobServiceClient.getContainerClient("output");
let blob = containerClient.getBlockBlobClient("5m Sales Records.csv");
let read = (await blob.download(0)).readableStreamBody;
var conn = new Client();
conn.connect({
host: "",
port: 22,
username: "",
password: "!",
});
conn.on("ready", async () => {
conn.sftp((err, sftp) => {
if (err) throw err;
var write = sftp.createWriteStream("/home/testqw/test.csv");
read.pipe(write);
write
.on("error", function (error) {
throw error;
})
.on("finish", () => {
console.log("All writes are now complete.");
sftp.end();
});
});
});
conn.on("end", () => {
console.log("close the connection");
});
我正在尝试使用 SSH2 模块将文件写入 Node 中的 SFTP 服务器。对于我的用例,文件源是 Azure Blob 存储,文件相对较大(超过 5 gig),所以我的想法是从 blob 存储中以块的形式捕获数据并将它们写入服务器。不想下载整个文件然后执行写入,因为文件很大并且不想在运行时出现磁盘 space 问题。
我通过使用可用的 downloadToBuffer() 和 write() 函数并递增 'offset' 直到所有字节都已写入,从而实现了这一点。如代码片段所示
sftp.open('remoteFilePath','w', async (openError,handle) => {
if (openError) throw openError;
var blobOffset=0;
try{
while(blobOffset<file.size){
await client.downloadToBuffer(blobOffset, blobOffset+ length > file.size? file.size - blobOffset: length).then((buffer) => {
sftp.write(handle,buffer,0,blobOffset + length > file.size? buffer.length:length, blobOffset, (writeError)=>{if(writeError) throw writeError});
});
blobOffset += length;
}
}
catch(e){
console.log(e);
}
}
此解决方案有效,但感觉对于大文件而言效率不高。有没有更好的方法来实现这个?也许使用流而不必使用循环?
问题请参考以下代码
var Client = require("ssh2").Client;
var {
BlobServiceClient,
StorageSharedKeyCredential,
} = require("@azure/storage-blob");
var accountName = "andyprivate";
var accountKey =
"";
var creds = new StorageSharedKeyCredential(accountName, accountKey);
var blobServiceClient = new BlobServiceClient(
`https://${accountName}.blob.core.windows.net`,
creds
);
var containerClient = blobServiceClient.getContainerClient("output");
let blob = containerClient.getBlockBlobClient("5m Sales Records.csv");
let read = (await blob.download(0)).readableStreamBody;
var conn = new Client();
conn.connect({
host: "",
port: 22,
username: "",
password: "!",
});
conn.on("ready", async () => {
conn.sftp((err, sftp) => {
if (err) throw err;
var write = sftp.createWriteStream("/home/testqw/test.csv");
read.pipe(write);
write
.on("error", function (error) {
throw error;
})
.on("finish", () => {
console.log("All writes are now complete.");
sftp.end();
});
});
});
conn.on("end", () => {
console.log("close the connection");
});