forked from FoundKeyGang/FoundKey
バグ修正
This commit is contained in:
parent
54804f4a64
commit
47f98fbab7
2 changed files with 46 additions and 70 deletions
|
@ -214,7 +214,7 @@ export default (
|
||||||
|
|
||||||
const readable = fs.createReadStream(path);
|
const readable = fs.createReadStream(path);
|
||||||
|
|
||||||
return addToGridFS(name, readable, mime, {
|
return addToGridFS(detectedName, readable, mime, {
|
||||||
user_id: user._id,
|
user_id: user._id,
|
||||||
folder_id: folder !== null ? folder._id : null,
|
folder_id: folder !== null ? folder._id : null,
|
||||||
comment: comment,
|
comment: comment,
|
||||||
|
@ -224,25 +224,26 @@ export default (
|
||||||
.then(file => {
|
.then(file => {
|
||||||
log(`drive file has been created ${file._id}`);
|
log(`drive file has been created ${file._id}`);
|
||||||
resolve(file);
|
resolve(file);
|
||||||
return serialize(file);
|
|
||||||
})
|
|
||||||
.then(serializedFile => {
|
|
||||||
// Publish drive_file_created event
|
|
||||||
event(user._id, 'drive_file_created', fileObj);
|
|
||||||
|
|
||||||
// Register to search database
|
serialize(file)
|
||||||
if (config.elasticsearch.enable) {
|
.then(serializedFile => {
|
||||||
const es = require('../../db/elasticsearch');
|
// Publish drive_file_created event
|
||||||
es.index({
|
event(user._id, 'drive_file_created', serializedFile);
|
||||||
index: 'misskey',
|
|
||||||
type: 'drive_file',
|
// Register to search database
|
||||||
id: file._id.toString(),
|
if (config.elasticsearch.enable) {
|
||||||
body: {
|
const es = require('../../db/elasticsearch');
|
||||||
name: file.name,
|
es.index({
|
||||||
user_id: user._id.toString()
|
index: 'misskey',
|
||||||
|
type: 'drive_file',
|
||||||
|
id: file._id.toString(),
|
||||||
|
body: {
|
||||||
|
name: file.name,
|
||||||
|
user_id: user._id.toString()
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
|
||||||
})
|
})
|
||||||
.catch(reject);
|
.catch(reject);
|
||||||
});
|
});
|
||||||
|
|
|
@ -10,7 +10,6 @@ import * as debug from 'debug';
|
||||||
import * as tmp from 'tmp';
|
import * as tmp from 'tmp';
|
||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
import * as request from 'request';
|
import * as request from 'request';
|
||||||
import * as crypto from 'crypto';
|
|
||||||
|
|
||||||
const log = debug('misskey:endpoint:upload_from_url');
|
const log = debug('misskey:endpoint:upload_from_url');
|
||||||
|
|
||||||
|
@ -21,11 +20,11 @@ const log = debug('misskey:endpoint:upload_from_url');
|
||||||
* @param {any} user
|
* @param {any} user
|
||||||
* @return {Promise<any>}
|
* @return {Promise<any>}
|
||||||
*/
|
*/
|
||||||
module.exports = (params, user) => new Promise((res, rej) => {
|
module.exports = async (params, user): Promise<any> => {
|
||||||
// Get 'url' parameter
|
// Get 'url' parameter
|
||||||
// TODO: Validate this url
|
// TODO: Validate this url
|
||||||
const [url, urlErr] = $(params.url).string().$;
|
const [url, urlErr] = $(params.url).string().$;
|
||||||
if (urlErr) return rej('invalid url param');
|
if (urlErr) throw 'invalid url param';
|
||||||
|
|
||||||
let name = URL.parse(url).pathname.split('/').pop();
|
let name = URL.parse(url).pathname.split('/').pop();
|
||||||
if (!validateFileName(name)) {
|
if (!validateFileName(name)) {
|
||||||
|
@ -34,59 +33,35 @@ module.exports = (params, user) => new Promise((res, rej) => {
|
||||||
|
|
||||||
// Get 'folder_id' parameter
|
// Get 'folder_id' parameter
|
||||||
const [folderId = null, folderIdErr] = $(params.folder_id).optional.nullable.id().$;
|
const [folderId = null, folderIdErr] = $(params.folder_id).optional.nullable.id().$;
|
||||||
if (folderIdErr) return rej('invalid folder_id param');
|
if (folderIdErr) throw 'invalid folder_id param';
|
||||||
|
|
||||||
// Create temp file
|
// Create temp file
|
||||||
new Promise((res, rej) => {
|
const path = await new Promise((res: (string) => void, rej) => {
|
||||||
tmp.file((e, path) => {
|
tmp.file((e, path) => {
|
||||||
if (e) return rej(e);
|
if (e) return rej(e);
|
||||||
res(path);
|
res(path);
|
||||||
});
|
});
|
||||||
})
|
});
|
||||||
// Download file
|
|
||||||
.then((path: string) => new Promise((res, rej) => {
|
// write content at URL to temp file
|
||||||
const writable = fs.createWriteStream(path);
|
await new Promise((res, rej) => {
|
||||||
request(url)
|
const writable = fs.createWriteStream(path);
|
||||||
.on('error', rej)
|
request(url)
|
||||||
.on('end', () => {
|
.on('error', rej)
|
||||||
writable.close();
|
.on('end', () => {
|
||||||
res(path);
|
writable.close();
|
||||||
})
|
res(path);
|
||||||
.pipe(writable)
|
})
|
||||||
.on('error', rej);
|
.pipe(writable)
|
||||||
}))
|
.on('error', rej);
|
||||||
// Calculate hash & content-type
|
});
|
||||||
.then((path: string) => new Promise((res, rej) => {
|
|
||||||
const readable = fs.createReadStream(path);
|
const driveFile = await create(user, path, name, null, folderId);
|
||||||
const hash = crypto.createHash('md5');
|
|
||||||
readable
|
// clean-up
|
||||||
.on('error', rej)
|
fs.unlink(path, (e) => {
|
||||||
.on('end', () => {
|
if (e) log(e.stack);
|
||||||
hash.end();
|
});
|
||||||
res([path, hash.digest('hex')]);
|
|
||||||
})
|
return serialize(driveFile);
|
||||||
.pipe(hash)
|
};
|
||||||
.on('error', rej);
|
|
||||||
}))
|
|
||||||
// Create file
|
|
||||||
.then((rv: string[]) => new Promise((res, rej) => {
|
|
||||||
const [path, hash] = rv;
|
|
||||||
create(user, {
|
|
||||||
stream: fs.createReadStream(path),
|
|
||||||
name,
|
|
||||||
hash
|
|
||||||
}, null, folderId)
|
|
||||||
.then(driveFile => {
|
|
||||||
res(driveFile);
|
|
||||||
// crean-up
|
|
||||||
fs.unlink(path, (e) => {
|
|
||||||
if (e) log(e.stack);
|
|
||||||
});
|
|
||||||
})
|
|
||||||
.catch(rej);
|
|
||||||
}))
|
|
||||||
// Serialize
|
|
||||||
.then(serialize)
|
|
||||||
.then(res)
|
|
||||||
.catch(rej);
|
|
||||||
});
|
|
||||||
|
|
Loading…
Reference in a new issue