【发布时间】:2014-10-06 02:45:37
【问题描述】:
所以我有我默认的 1Mb 块要上传,一次同时有 5 个块,但是在这些块完成后,我的客户端不会发送剩余的块。因此最大输出为 5Mb,有人知道为什么吗?我没有更改任何其他默认设置,testChunks 已启用。
我的节点侧帖是根据cleversprocket 的答案建模的:
reassemble binary after flow.js upload on node/express server
我的获取方面基于:
//'found', filename, original_filename, identifier
//'not_found', null, null, null
$.get = function(req, callback) {
var chunkNumber = req.param('flowChunkNumber', 0);
var chunkSize = req.param('flowChunkSize', 0);
var totalSize = req.param('flowTotalSize', 0);
var identifier = req.param('flowIdentifier', "");
var filename = req.param('flowFilename', "");
if (validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename) == 'valid') {
var chunkFilename = getChunkFilename(chunkNumber, identifier);
fs.exists(chunkFilename, function(exists) {
if (exists) {
callback('found', chunkFilename, filename, identifier);
} else {
callback('not_found', null, null, null);
}
});
} else {
callback('not_found', null, null, null);
}
};
我不明白为什么它没有再次触发,进程继续:尝试获取文件块,没有块,回复 404,POST CHUNK,POST PARTLY_DONE。 (每次同时上传 x5)。 然后它不再上传块,它只是在某些部分停止(大量的 console.logs 没有解决这个问题)。
我已经从永久性错误中删除了 404,这样应用程序就不会停止,但它仍然会停止。
任何帮助都会很好,因为文件有时可能会大于 5Mb。
谢谢, 乔
编辑(完整的邮政编码):
客户端设置(仅更改 mg-flow 的默认设置):
chunkSize: 1024 * 1024,
forceChunkSize: false,
simultaneousUploads: 5,
singleFile: false,
fileParameterName: 'file',
progressCallbacksInterval: 0, //instant feedback
speedSmoothingFactor: 1,
query: {},
headers: {},
withCredentials: false,
preprocess: null,
method: 'multipart',
prioritizeFirstAndLastChunk: false,
target: '/',
testChunks: true,
generateUniqueIdentifier: null,
maxChunkRetries: undefined,
chunkRetryInterval: undefined,
permanentErrors: [415, 500, 501],
onDropStopPropagation: false
通过将 target 选项设置为 url 在标签中动态设置目标,这是一个在范围内动态创建的变量,因此它上传到 tmp,然后在收到所有块到讲座目录后移动,以使组织和管理更容易,例如删除
i.e. uploadFile/{{module.id}}/{{lecture.id}}
然后是服务器端,
var multipart = require('connect-multiparty');
var multipartMiddleware = multipart();
获取
//Handle status checks on chunks through Flow.js
app.get('/uploadFile/:mId/:id', lectureRoutes.fileGet);
exports.fileGet = function(req, res) {
flow.get(req, function(status, filename, original_filename, identifier) {
console.log('GET', status);
res.send(200, (status == 'found' ? 200 : 404));
});
}
//'found', filename, original_filename, identifier
//'not_found', null, null, null
$.get = function(req, callback) {
var chunkNumber = req.param('flowChunkNumber', 0);
var chunkSize = req.param('flowChunkSize', 0);
var totalSize = req.param('flowTotalSize', 0);
var identifier = req.param('flowIdentifier', "");
var filename = req.param('flowFilename', "");
if (validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename) == 'valid') {
var chunkFilename = getChunkFilename(chunkNumber, identifier);
fs.exists(chunkFilename, function(exists) {
if (exists) {
callback('found', chunkFilename, filename, identifier);
} else {
callback('not_found', null, null, null);
}
});
} else {
callback('not_found', null, null, null);
}
};
发布
//To Save File
app.post('/uploadFile/:mId/:id', multipartMiddleware, lectureRoutes.fileAddPost);
//FIX THIS
exports.fileAddPost = function(req,res){
flow.post(req, function(status, filename, original_filename, identifier, currentTestChunk, numberOfChunks) {
console.log('POST', status, original_filename, identifier);
if (status === 'done' && currentTestChunk > numberOfChunks) {
var stream = fs.createWriteStream('./tmp/' + filename);
//EDIT: I removed options {end: true} because it isn't needed
//and added {onDone: flow.clean} to remove the chunks after writing
//the file.
flow.write(identifier, stream, {onDone: flow.clean});
//Once written move the file.
mv('./tmp/'+filename,'./public/files/'+req.params.mId+'/'+req.params.id+'/'+filename, {mkdirp: true,clobber: false}, function(err){
if(err == 'EEXIST') {
//FILE ALREADY EXIST STOP IT FROM BEING RE-ADDED TO DB, although allow file to be moved incase its newer
}
else if(!err){
var set = {};
Modules.findById(req.params.mId, function(err,module){
module.Lectures.id(req.params.id).Files.push({"fileName" : filename});
module.save(function(err){
if(err) console.log(err)
//return id of file back
else{
var link = module.Lectures.id(req.params.id).Files;
res.send(200, link[link.length-1]._id)
}
})
})
}
});
}
});
}
//'partly_done', filename, original_filename, identifier
//'done', filename, original_filename, identifier
//'invalid_flow_request', null, null, null
//'non_flow_request', null, null, null
$.post = function(req, callback){
var fields = req.body;
var files = req.files;
var chunkNumber = fields['flowChunkNumber'];
var chunkSize = fields['flowChunkSize'];
var totalSize = fields['flowTotalSize'];
var identifier = cleanIdentifier(fields['flowIdentifier']);
var filename = fields['flowFilename'];
var original_filename = fields['flowIdentifier'];
if (!files[$.fileParameterName] || !files[$.fileParameterName].size) {
callback('invalid_flow_request', null, null, null);
return;
}
var validation = validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename, files[$.fileParameterName].size);
if (validation == 'valid') {
var chunkFilename = getChunkFilename(chunkNumber, identifier);
fs.rename(files[$.fileParameterName].path, chunkFilename, function(){
// Do we have all the chunks?
var currentTestChunk = 1;
var numberOfChunks = Math.max(Math.floor(totalSize/(chunkSize*1.0)), 1);
var testChunkExists = function(){
fs.exists(getChunkFilename(currentTestChunk, identifier), function(exists){
currentTestChunk++;
if(exists){
if(currentTestChunk>numberOfChunks) {
callback('done', filename, original_filename, identifier, currentTestChunk, numberOfChunks);
} else {
// Recursion
testChunkExists();
}
} else {
//Add currentTestChunk and numberOfChunks to the callback
callback('partly_done', filename, original_filename, identifier, currentTestChunk, numberOfChunks);
}
});
}
testChunkExists();
});
} else {
callback(validation, filename, original_filename, identifier);
}
}
【问题讨论】:
-
你的帖子是什么样的?
标签: javascript node.js angularjs file-upload xmlhttprequest