使用 nodejs 将大型 json 文件迁移到 Mongodb
Migrate large json file into Mongodb using nodejs
我正在使用 nodejs 将数据迁移到 mongodb,我尝试迁移的 json 文件太大,当我尝试迁移一些数据时(大约 8000 个文档) ),它有效,否则我会遇到这个问题:
MongooseError: Operation `products.insertOne()` buffering timed out after 10000ms
at Timeout.<anonymous> (\migration-
module\node_modules\mongoose\lib\drivers\node-mongodb-native\collection.js:148:23)
at listOnTimeout (internal/timers.js:557:17)
at processTimers (internal/timers.js:500:7)
MongooseError: Operation `products.insertOne()` buffering timed out after 10000ms
at Timeout.<anonymous> (\migration-
module\node_modules\mongoose\lib\drivers\node-mongodb-native\collection.js:148:23)
at listOnTimeout (internal/timers.js:557:17)
at processTimers (internal/timers.js:500:7)
脚本:
/** @format */
fs = require('fs')
var mongoose = require('mongoose')
mongoose.connect('mongodb://host')
mongoose.Promise = global.Promise
var schema = new mongoose.Schema({
//some fields
})
var d = fs.readFileSync('./data/file.json', 'utf8', (err, data) => {
if (err) throw err
return d
})
var e = JSON.parse(d)
var Product = mongoose.model('product', schema)
//console.log(e)
for (var i = 0; i < e.length; i++) {
// data process
// another data process
var product = new Product(e[i])
product.save(function (err) {
if (err) return console.log(err)
})
}
e.length === i ? console.log('Migration Done successfully') : ''
请问有解决这个问题的方法吗?
禁用缓冲和其他一些修复:
fs = require('fs')
var mongoose = require('mongoose')
mongoose.set('bufferCommands', false);
var schema = new mongoose.Schema({
//some fields
})
var Product = mongoose.model('product', schema)
var d = fs.readFileSync('./data/file.json', 'utf8', (err, data) => {
if (err) throw err
return data // <= corrected here
})
var e = JSON.parse(d);
//console.log(e)
async function main() {
await mongoose.connect('mongodb://host') // <= corrected here
for (var i = 0; i < e.length; i++) {
// data process
// another data process
var product = new Product(e[i]);
await product.save(); // <= corrected here
})
}
main().then(()=>{console.log('Migration Done successfully')})
我正在使用 nodejs 将数据迁移到 mongodb,我尝试迁移的 json 文件太大,当我尝试迁移一些数据时(大约 8000 个文档) ),它有效,否则我会遇到这个问题:
MongooseError: Operation `products.insertOne()` buffering timed out after 10000ms
at Timeout.<anonymous> (\migration-
module\node_modules\mongoose\lib\drivers\node-mongodb-native\collection.js:148:23)
at listOnTimeout (internal/timers.js:557:17)
at processTimers (internal/timers.js:500:7)
MongooseError: Operation `products.insertOne()` buffering timed out after 10000ms
at Timeout.<anonymous> (\migration-
module\node_modules\mongoose\lib\drivers\node-mongodb-native\collection.js:148:23)
at listOnTimeout (internal/timers.js:557:17)
at processTimers (internal/timers.js:500:7)
脚本:
/** @format */
fs = require('fs')
var mongoose = require('mongoose')
mongoose.connect('mongodb://host')
mongoose.Promise = global.Promise
var schema = new mongoose.Schema({
//some fields
})
var d = fs.readFileSync('./data/file.json', 'utf8', (err, data) => {
if (err) throw err
return d
})
var e = JSON.parse(d)
var Product = mongoose.model('product', schema)
//console.log(e)
for (var i = 0; i < e.length; i++) {
// data process
// another data process
var product = new Product(e[i])
product.save(function (err) {
if (err) return console.log(err)
})
}
e.length === i ? console.log('Migration Done successfully') : ''
请问有解决这个问题的方法吗?
禁用缓冲和其他一些修复:
fs = require('fs')
var mongoose = require('mongoose')
mongoose.set('bufferCommands', false);
var schema = new mongoose.Schema({
//some fields
})
var Product = mongoose.model('product', schema)
var d = fs.readFileSync('./data/file.json', 'utf8', (err, data) => {
if (err) throw err
return data // <= corrected here
})
var e = JSON.parse(d);
//console.log(e)
async function main() {
await mongoose.connect('mongodb://host') // <= corrected here
for (var i = 0; i < e.length; i++) {
// data process
// another data process
var product = new Product(e[i]);
await product.save(); // <= corrected here
})
}
main().then(()=>{console.log('Migration Done successfully')})