GCF "No Such Object" 刚刚创建有问题的对象时
GCF "No Such Object" when the Object in question was just created
我正在设置一个 Google Cloud Functions (GCF) 函数,该函数的触发频率足以同时存在多个实例 运行。
我从 readStream 中收到错误,流的源文件不存在,但此时在我的程序中我实际上刚刚创建了它。
我已通过 console.log()-ing 文件 JSON 确保文件在流开始之前存在,因此文件确实存在。我还确保我尝试访问的文件已完成由先前的流写入并等待,但没有骰子。
编辑:代码现在包含整个脚本。似乎抛出错误的部分是函数 columnDelete()。
var parse = require('fast-csv');
var Storage = require('@google-cloud/storage');
var Transform = require('readable-stream').Transform;
var storage = new Storage();
var bucket = storage.bucket('<BUCKET>');
const DMSs = ['PBS','CDK','One_Eighty','InfoBahn'];
class DeleteColumns extends Transform{
constructor(){
super({objectMode:true})
}
_transform(row, enc, done){
//create an array 2 elements shorter than received
let newRow = new Array(row.length - 2);
//write all data but the first two columns
for(let i = 0; i < newRow.length; i++){
newRow[i] = row[i+2];
}
this.push(newRow.toString() + '\n');
done();
}
}
function rename(file, originalFile, DMS){
return new Promise((resolve, reject) => {
var dealer;
var date;
var header = true;
var parser = parse({delimiter : ",", quote:'\'});
//for each row of data
var stream = originalFile.createReadStream();
stream.pipe(parser)
.on('data', (row)=>{
//if this is the first line do nothing
if(header){
header = false;
}
//otherwise record the contents of the first two columns and then destroy the stream
else {
dealer = row[0].toString().replace('"', '').replace('"', '');
date = row[1].toString().replace('"', '').replace('"', '');
stream.end();
}
})
.on('finish', function(){
var newName = dealer + ' ' + date + '_' + DMS + 'temp.csv';
//if this was not triggered by the renaming of a file
if(!file.name.includes(dealer)&&!file.name.includes(':')){
console.log('Renamed ' + file.name);
originalFile.copy(newName);
originalFile.copy(newName.replace('temp',''));
}else{
newName = 'Not Renamed';
console.log('Oops, triggered by the rename');
}
resolve(newName);
});
});
}
function columnDelete(fileName){
return new Promise((resolve, reject) =>{
console.log('Deleting Columns...');
console.log(bucket.file(fileName));
var parser = parse({delimiter : ",", quote:'\'});
var del = new DeleteColumns();
var temp = bucket.file(fileName);
var final = bucket.file(fileName.replace('temp', ''));
//for each row of data
temp.createReadStream()
//parse the csv
.pipe(parser)
//delete first two columns
.pipe(del)
//write to new file
.pipe(final.createWriteStream()
.on('finish', function(){
console.log('Columns Deleted');
temp.delete();
resolve();
})
);
});
}
exports.triggerRename = async(data, context) => {
var DMS = 'Triple';
var file = data;
//if not a temporary file
if(!file.name.includes('temp')){
//create a new File object from the name of the data passed
const originalFile = bucket.file(file.name);
//identify which database this data is from
DMSs.forEach(function(database){
if(file.name.includes(database)){
DMS = database;
}
});
//rename the file
var tempName = await rename(file, originalFile, DMS);
//if it was renamed, delete the extra columns
if (!tempName.includes('Not Renamed')){
await columnDelete(tempName);
}
} else if(file.name.includes('undefined')){
console.log(file.name + ' is invalid. Deleted.');
bucket.file(file.name).delete();
}
else {
console.log( file.name + ' is a temporary file. Did not rename.');
}
};
我期望输出如下:
Deleting Columns...
Columns Deleted
简单明了,让我们知道它何时开始和结束。
然而,我得到的是:
Deleting Columns...
ApiError: No such object: <file> at at Object.parseHttpRespMessage(......)
finished with status: 'crash'
由于显而易见的原因,这是不需要的。我的下一个想法是确保该文件没有被脚本的另一个实例中途删除,但要做到这一点,我必须检查该文件是否正在被另一个流使用,也就是说,对我来说知识,不可能。
有什么想法吗?
当我创建文件时,我调用了异步函数 copy() 并继续,这意味着在尝试访问文件时它没有完成复制。我不知道,文件对象是一个引用变量,实际上并不包含文件本身。在复制文件时,指针存在但指向未完成的文件。
因此,"No Such Object"。为了解决这个问题,我只是使用了一个回调来确保在我访问文件之前复制已经完成。
感谢 Doug Stevenson 让我知道指针!
我正在设置一个 Google Cloud Functions (GCF) 函数,该函数的触发频率足以同时存在多个实例 运行。
我从 readStream 中收到错误,流的源文件不存在,但此时在我的程序中我实际上刚刚创建了它。
我已通过 console.log()-ing 文件 JSON 确保文件在流开始之前存在,因此文件确实存在。我还确保我尝试访问的文件已完成由先前的流写入并等待,但没有骰子。
编辑:代码现在包含整个脚本。似乎抛出错误的部分是函数 columnDelete()。
var parse = require('fast-csv');
var Storage = require('@google-cloud/storage');
var Transform = require('readable-stream').Transform;
var storage = new Storage();
var bucket = storage.bucket('<BUCKET>');
const DMSs = ['PBS','CDK','One_Eighty','InfoBahn'];
class DeleteColumns extends Transform{
constructor(){
super({objectMode:true})
}
_transform(row, enc, done){
//create an array 2 elements shorter than received
let newRow = new Array(row.length - 2);
//write all data but the first two columns
for(let i = 0; i < newRow.length; i++){
newRow[i] = row[i+2];
}
this.push(newRow.toString() + '\n');
done();
}
}
function rename(file, originalFile, DMS){
return new Promise((resolve, reject) => {
var dealer;
var date;
var header = true;
var parser = parse({delimiter : ",", quote:'\'});
//for each row of data
var stream = originalFile.createReadStream();
stream.pipe(parser)
.on('data', (row)=>{
//if this is the first line do nothing
if(header){
header = false;
}
//otherwise record the contents of the first two columns and then destroy the stream
else {
dealer = row[0].toString().replace('"', '').replace('"', '');
date = row[1].toString().replace('"', '').replace('"', '');
stream.end();
}
})
.on('finish', function(){
var newName = dealer + ' ' + date + '_' + DMS + 'temp.csv';
//if this was not triggered by the renaming of a file
if(!file.name.includes(dealer)&&!file.name.includes(':')){
console.log('Renamed ' + file.name);
originalFile.copy(newName);
originalFile.copy(newName.replace('temp',''));
}else{
newName = 'Not Renamed';
console.log('Oops, triggered by the rename');
}
resolve(newName);
});
});
}
function columnDelete(fileName){
return new Promise((resolve, reject) =>{
console.log('Deleting Columns...');
console.log(bucket.file(fileName));
var parser = parse({delimiter : ",", quote:'\'});
var del = new DeleteColumns();
var temp = bucket.file(fileName);
var final = bucket.file(fileName.replace('temp', ''));
//for each row of data
temp.createReadStream()
//parse the csv
.pipe(parser)
//delete first two columns
.pipe(del)
//write to new file
.pipe(final.createWriteStream()
.on('finish', function(){
console.log('Columns Deleted');
temp.delete();
resolve();
})
);
});
}
exports.triggerRename = async(data, context) => {
var DMS = 'Triple';
var file = data;
//if not a temporary file
if(!file.name.includes('temp')){
//create a new File object from the name of the data passed
const originalFile = bucket.file(file.name);
//identify which database this data is from
DMSs.forEach(function(database){
if(file.name.includes(database)){
DMS = database;
}
});
//rename the file
var tempName = await rename(file, originalFile, DMS);
//if it was renamed, delete the extra columns
if (!tempName.includes('Not Renamed')){
await columnDelete(tempName);
}
} else if(file.name.includes('undefined')){
console.log(file.name + ' is invalid. Deleted.');
bucket.file(file.name).delete();
}
else {
console.log( file.name + ' is a temporary file. Did not rename.');
}
};
我期望输出如下:
Deleting Columns...
Columns Deleted
简单明了,让我们知道它何时开始和结束。
然而,我得到的是:
Deleting Columns...
ApiError: No such object: <file> at at Object.parseHttpRespMessage(......)
finished with status: 'crash'
由于显而易见的原因,这是不需要的。我的下一个想法是确保该文件没有被脚本的另一个实例中途删除,但要做到这一点,我必须检查该文件是否正在被另一个流使用,也就是说,对我来说知识,不可能。
有什么想法吗?
当我创建文件时,我调用了异步函数 copy() 并继续,这意味着在尝试访问文件时它没有完成复制。我不知道,文件对象是一个引用变量,实际上并不包含文件本身。在复制文件时,指针存在但指向未完成的文件。
因此,"No Such Object"。为了解决这个问题,我只是使用了一个回调来确保在我访问文件之前复制已经完成。
感谢 Doug Stevenson 让我知道指针!