如何将大文件分块加载到 Adonis JS 中?
How to load a large file into Adonis JS divided into chunks?
我在 Adonis JS 框架上做站点的服务器端。
我的任务是加载大文件,为了解决这个问题,我决定使用分块加载文件。
我找到了一些客户端代码,它似乎可以工作。
这是客户端的代码:https://codepen.io/chaly7500/pen/YzQyZNR
服务端代码:
//routes.ts.
apiGroup('v1', 'files', Route.group(async () => {
Route.post('upload', 'Files/UploadController.index')
}))
//UploadController.ts.
'use strict'
import {HttpContextContract} from "@ioc:Adonis/Core/HttpContext";
import MediaRepositories from "App/Repositories/MediaRepositories";
export default class UploadController {
public async index({request}:HttpContextContract){
const file = request.file('file')
// console.log(file)
return await MediaRepositories.createMedia(file)
}
}
//MediaRepositories.ts
'use strict'
Import Application from "@ioc:Adonis/Core/Application";
export default class MediaRepositories {
static async createMedia(file) {
await file.move(Application.publicPath('media/transientmodels'))
}
static async updateMediaById(){
}
static async updateMediaByIds(){
}
}
上传到服务器后,我有一个blob文件
当我将 blob 文件更改为 blob.png 图像中断时
有没有人用AdonisJS实现上传大文件?
或者如何将 blob 文件正确转换为图片或视频?
主要问题:
如何将大文件上传到 adonis 而不会出现请求超时错误?
我能够解决这个库的加载问题
https://www.npmjs.com/package/file-chunked
//UploadController.ts
'use strict'
import {HttpContextContract} from "@ioc:Adonis/Core/HttpContext";
import parseJson from "parse-json";
import MediaRepositories from "App/Repositories/MediaRepositories";
export default class UploadController {
public async index({request}:HttpContextContract){
const file = await request.file('file')
const chunkMetaDataStr = await request.input('chunkMetadata');
const chunkMetaData = await parseJson(chunkMetaDataStr);
return await MediaRepositories.createMedia(file, chunkMetaData)
}
}
// MediaRepositories.ts
'use strict'
import Application from "@ioc:Adonis/Core/Application";
import FileChunked from "file-chunked";
import * as fs from "fs";
import Media from "App/Models/Media";
import Env from '@ioc:Adonis/Core/Env'
export default class MediaRepositories {
static async createMedia(file, chunkMetaData) {
await file?.move(Application.publicPath('media/transientmodels/' + chunkMetaData.FileGuid + '/tmp_chunks'));
await FileChunked.upload({
chunkStorage: Application.publicPath('media/transientmodels/' + chunkMetaData.FileGuid), // where the uploaded file(chunked file in this case) are saved
uploadId: chunkMetaData.FileGuid,
chunkIndex: chunkMetaData.Index,
totalChunksCount: chunkMetaData.TotalCount,
filePath: file?.filePath,
});
if (chunkMetaData.Index == (chunkMetaData.TotalCount - 1)) {
fs.copyFileSync(Application.publicPath('media/transientmodels/' + chunkMetaData.FileGuid + '/tmp_chunks/' + file.clientName),
Application.publicPath('media/transientmodels/' + chunkMetaData.FileGuid + '/tmp_chunks/' + chunkMetaData.FileName));
}
}
}
我在 Adonis JS 框架上做站点的服务器端。 我的任务是加载大文件,为了解决这个问题,我决定使用分块加载文件。 我找到了一些客户端代码,它似乎可以工作。
这是客户端的代码:https://codepen.io/chaly7500/pen/YzQyZNR
服务端代码:
//routes.ts.
apiGroup('v1', 'files', Route.group(async () => {
Route.post('upload', 'Files/UploadController.index')
}))
//UploadController.ts.
'use strict'
import {HttpContextContract} from "@ioc:Adonis/Core/HttpContext";
import MediaRepositories from "App/Repositories/MediaRepositories";
export default class UploadController {
public async index({request}:HttpContextContract){
const file = request.file('file')
// console.log(file)
return await MediaRepositories.createMedia(file)
}
}
//MediaRepositories.ts
'use strict'
Import Application from "@ioc:Adonis/Core/Application";
export default class MediaRepositories {
static async createMedia(file) {
await file.move(Application.publicPath('media/transientmodels'))
}
static async updateMediaById(){
}
static async updateMediaByIds(){
}
}
上传到服务器后,我有一个blob文件 当我将 blob 文件更改为 blob.png 图像中断时
有没有人用AdonisJS实现上传大文件?
或者如何将 blob 文件正确转换为图片或视频?
主要问题: 如何将大文件上传到 adonis 而不会出现请求超时错误?
我能够解决这个库的加载问题 https://www.npmjs.com/package/file-chunked
//UploadController.ts
'use strict'
import {HttpContextContract} from "@ioc:Adonis/Core/HttpContext";
import parseJson from "parse-json";
import MediaRepositories from "App/Repositories/MediaRepositories";
export default class UploadController {
public async index({request}:HttpContextContract){
const file = await request.file('file')
const chunkMetaDataStr = await request.input('chunkMetadata');
const chunkMetaData = await parseJson(chunkMetaDataStr);
return await MediaRepositories.createMedia(file, chunkMetaData)
}
}
// MediaRepositories.ts
'use strict'
import Application from "@ioc:Adonis/Core/Application";
import FileChunked from "file-chunked";
import * as fs from "fs";
import Media from "App/Models/Media";
import Env from '@ioc:Adonis/Core/Env'
export default class MediaRepositories {
static async createMedia(file, chunkMetaData) {
await file?.move(Application.publicPath('media/transientmodels/' + chunkMetaData.FileGuid + '/tmp_chunks'));
await FileChunked.upload({
chunkStorage: Application.publicPath('media/transientmodels/' + chunkMetaData.FileGuid), // where the uploaded file(chunked file in this case) are saved
uploadId: chunkMetaData.FileGuid,
chunkIndex: chunkMetaData.Index,
totalChunksCount: chunkMetaData.TotalCount,
filePath: file?.filePath,
});
if (chunkMetaData.Index == (chunkMetaData.TotalCount - 1)) {
fs.copyFileSync(Application.publicPath('media/transientmodels/' + chunkMetaData.FileGuid + '/tmp_chunks/' + file.clientName),
Application.publicPath('media/transientmodels/' + chunkMetaData.FileGuid + '/tmp_chunks/' + chunkMetaData.FileName));
}
}
}