forked from FoundKeyGang/FoundKey
server: unify drive object types in database
Minor adjustment: The 'name' columns have the same max length. Major adjustment: Rename both columns to be "parentId" and update all references of this name in the backend. API parameters are not changed, since that would be an unnecessary breaking change.
This commit is contained in:
parent
701054b86e
commit
94d1cf75aa
14 changed files with 48 additions and 26 deletions
|
@ -0,0 +1,22 @@
|
|||
export class unifyDriveObjects1679767920029 {
|
||||
name = 'unifyDriveObjects1679767920029';
|
||||
|
||||
async up(queryRunner) {
|
||||
await queryRunner.query(`ALTER TABLE "drive_file" RENAME COLUMN "folderId" TO "parentId"`);
|
||||
await queryRunner.query(`ALTER TABLE "drive_folder" ALTER COLUMN "name" TYPE character varying(256)`);
|
||||
// The column name changed so the name that typeorm generates for indices and foreign keys changes too.
|
||||
// To avoid reindexing, just rename them.
|
||||
await queryRunner.query(`ALTER TABLE "drive_file" RENAME CONSTRAINT "FK_bb90d1956dafc4068c28aa7560a" TO "FK_84b4e3038e7e64a68764dd7ea3e"`);
|
||||
await queryRunner.query(`ALTER INDEX "IDX_bb90d1956dafc4068c28aa7560" RENAME TO "IDX_84b4e3038e7e64a68764dd7ea3"`);
|
||||
await queryRunner.query(`ALTER INDEX "IDX_55720b33a61a7c806a8215b825" RENAME TO "IDX_7c607687cd487292d16617b23e"`);
|
||||
}
|
||||
|
||||
async down(queryRunner) {
|
||||
await queryRunner.query(`ALTER TABLE "drive_file" RENAME CONSTRAINT "FK_84b4e3038e7e64a68764dd7ea3e" TO "FK_bb90d1956dafc4068c28aa7560a"`);
|
||||
await queryRunner.query(`ALTER INDEX "IDX_84b4e3038e7e64a68764dd7ea3" RENAME TO "IDX_bb90d1956dafc4068c28aa7560"`);
|
||||
await queryRunner.query(`ALTER INDEX "IDX_7c607687cd487292d16617b23e" RENAME TO "IDX_55720b33a61a7c806a8215b825"`);
|
||||
|
||||
await queryRunner.query(`ALTER TABLE "drive_folder" ALTER COLUMN "name" TYPE character varying(128) USING substr("name", 1, 128)`);
|
||||
await queryRunner.query(`ALTER TABLE "drive_file" RENAME COLUMN "parentId" TO "folderId"`);
|
||||
}
|
||||
}
|
|
@ -4,7 +4,7 @@ import { User } from './user.js';
|
|||
import { DriveFolder } from './drive-folder.js';
|
||||
|
||||
@Entity()
|
||||
@Index(['userId', 'folderId', 'id'])
|
||||
@Index(['userId', 'parentId', 'id'])
|
||||
export class DriveFile {
|
||||
@PrimaryColumn(id())
|
||||
public id: string;
|
||||
|
@ -142,13 +142,13 @@ export class DriveFile {
|
|||
nullable: true,
|
||||
comment: 'The parent folder ID. If null, it means the DriveFile is located in root.',
|
||||
})
|
||||
public folderId: DriveFolder['id'] | null;
|
||||
public parentId: DriveFolder['id'] | null;
|
||||
|
||||
@ManyToOne(() => DriveFolder, {
|
||||
onDelete: 'SET NULL',
|
||||
})
|
||||
@JoinColumn()
|
||||
public folder: DriveFolder | null;
|
||||
public parent: DriveFolder | null;
|
||||
|
||||
@Index()
|
||||
@Column('boolean', {
|
||||
|
|
|
@ -14,7 +14,7 @@ export class DriveFolder {
|
|||
public createdAt: Date;
|
||||
|
||||
@Column('varchar', {
|
||||
length: 128,
|
||||
length: 256,
|
||||
comment: 'The name of the DriveFolder.',
|
||||
})
|
||||
public name: string;
|
||||
|
|
|
@ -105,8 +105,8 @@ export const DriveFileRepository = db.getRepository(DriveFile).extend({
|
|||
url: opts.self ? file.url : this.getPublicUrl(file, false),
|
||||
thumbnailUrl: this.getPublicUrl(file, true),
|
||||
comment: file.comment,
|
||||
folderId: file.folderId,
|
||||
folder: opts.detail && file.folderId ? DriveFolders.pack(file.folderId, {
|
||||
folderId: file.parentId,
|
||||
folder: opts.detail && file.parentId ? DriveFolders.pack(file.parentId, {
|
||||
detail: true,
|
||||
}) : undefined,
|
||||
userId: file.userId,
|
||||
|
|
|
@ -28,7 +28,7 @@ export const DriveFolderRepository = db.getRepository(DriveFolder).extend({
|
|||
parentId: folder.id,
|
||||
}),
|
||||
filesCount: DriveFiles.countBy({
|
||||
folderId: folder.id,
|
||||
parentId: folder.id,
|
||||
}),
|
||||
|
||||
...(folder.parentId ? {
|
||||
|
|
|
@ -38,9 +38,9 @@ export default define(meta, paramDef, async (ps, user) => {
|
|||
.andWhere('file.userId = :userId', { userId: user.id });
|
||||
|
||||
if (ps.folderId) {
|
||||
query.andWhere('file.folderId = :folderId', { folderId: ps.folderId });
|
||||
query.andWhere('file.parentId = :parentId', { parentId: ps.folderId });
|
||||
} else {
|
||||
query.andWhere('file.folderId IS NULL');
|
||||
query.andWhere('file.parentId IS NULL');
|
||||
}
|
||||
|
||||
if (ps.type) {
|
||||
|
|
|
@ -62,7 +62,7 @@ export default define(meta, paramDef, async (ps, user, _, file, cleanup) => {
|
|||
|
||||
try {
|
||||
// Create file
|
||||
const driveFile = await addFile({ user, path: file.path, name, comment: ps.comment, folderId: ps.folderId, force: ps.force, sensitive: ps.isSensitive });
|
||||
const driveFile = await addFile({ user, path: file.path, name, comment: ps.comment, parentId: ps.folderId, force: ps.force, sensitive: ps.isSensitive });
|
||||
return await DriveFiles.pack(driveFile, { self: true });
|
||||
} catch (e) {
|
||||
if (e instanceof Error || typeof e === 'string') {
|
||||
|
|
|
@ -36,7 +36,7 @@ export default define(meta, paramDef, async (ps, user) => {
|
|||
const files = await DriveFiles.findBy({
|
||||
name: ps.name,
|
||||
userId: user.id,
|
||||
folderId: ps.folderId ?? IsNull(),
|
||||
parentId: ps.folderId ?? IsNull(),
|
||||
});
|
||||
|
||||
return await Promise.all(files.map(file => DriveFiles.pack(file, { self: true })));
|
||||
|
|
|
@ -54,7 +54,7 @@ export default define(meta, paramDef, async (ps, user) => {
|
|||
|
||||
if (ps.folderId !== undefined) {
|
||||
if (ps.folderId === null) {
|
||||
file.folderId = null;
|
||||
file.parentId = null;
|
||||
} else {
|
||||
const folder = await DriveFolders.findOneBy({
|
||||
id: ps.folderId,
|
||||
|
@ -63,14 +63,14 @@ export default define(meta, paramDef, async (ps, user) => {
|
|||
|
||||
if (folder == null) throw new ApiError('NO_SUCH_FOLDER');
|
||||
|
||||
file.folderId = folder.id;
|
||||
file.parentId = folder.id;
|
||||
}
|
||||
}
|
||||
|
||||
await DriveFiles.update(file.id, {
|
||||
name: file.name,
|
||||
comment: file.comment,
|
||||
folderId: file.folderId,
|
||||
parentId: file.parentId,
|
||||
isSensitive: file.isSensitive,
|
||||
});
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ export const paramDef = {
|
|||
|
||||
// eslint-disable-next-line import/no-default-export
|
||||
export default define(meta, paramDef, async (ps, user) => {
|
||||
uploadFromUrl({ url: ps.url, user, folderId: ps.folderId, sensitive: ps.isSensitive, force: ps.force, comment: ps.comment }).then(file => {
|
||||
uploadFromUrl({ url: ps.url, user, parentId: ps.folderId, sensitive: ps.isSensitive, force: ps.force, comment: ps.comment }).then(file => {
|
||||
DriveFiles.pack(file, { self: true }).then(packedFile => {
|
||||
publishMainStream(user.id, 'urlUploadFinished', {
|
||||
marker: ps.marker,
|
||||
|
|
|
@ -33,7 +33,7 @@ export default define(meta, paramDef, async (ps, user) => {
|
|||
|
||||
const [childFoldersCount, childFilesCount] = await Promise.all([
|
||||
DriveFolders.countBy({ parentId: folder.id }),
|
||||
DriveFiles.countBy({ folderId: folder.id }),
|
||||
DriveFiles.countBy({ parentId: folder.id }),
|
||||
]);
|
||||
|
||||
if (childFoldersCount !== 0 || childFilesCount !== 0) {
|
||||
|
|
|
@ -48,10 +48,10 @@ export default define(meta, paramDef, async (ps, user) => {
|
|||
|
||||
if (ps.folderId) {
|
||||
foldersQuery.andWhere('folder.parentId = :parentId', { parentId: ps.folderId });
|
||||
filesQuery.andWhere('file.folderId = :folderId', { folderId: ps.folderId });
|
||||
filesQuery.andWhere('file.parentId = :parentId', { parentId: ps.folderId });
|
||||
} else {
|
||||
foldersQuery.andWhere('folder.parentId IS NULL');
|
||||
filesQuery.andWhere('file.folderId IS NULL');
|
||||
filesQuery.andWhere('file.parentId IS NULL');
|
||||
}
|
||||
|
||||
const folders = await foldersQuery.take(ps.limit).getMany();
|
||||
|
|
|
@ -322,7 +322,7 @@ type AddFileArgs = {
|
|||
/** Comment */
|
||||
comment?: string | null;
|
||||
/** Folder ID */
|
||||
folderId?: any;
|
||||
parentId?: any;
|
||||
/** If set to true, forcibly upload the file even if there is a file with the same hash. */
|
||||
force?: boolean;
|
||||
/** Do not save file to local */
|
||||
|
@ -344,7 +344,7 @@ export async function addFile({
|
|||
path,
|
||||
name = null,
|
||||
comment = null,
|
||||
folderId = null,
|
||||
parentId = null,
|
||||
force = false,
|
||||
isLink = false,
|
||||
url = null,
|
||||
|
@ -392,12 +392,12 @@ export async function addFile({
|
|||
//#endregion
|
||||
|
||||
const fetchFolder = async (): Promise<DriveFolder | null> => {
|
||||
if (!folderId) {
|
||||
if (!parentId) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const driveFolder = await DriveFolders.findOneBy({
|
||||
id: folderId,
|
||||
id: parentId,
|
||||
userId: user ? user.id : IsNull(),
|
||||
});
|
||||
|
||||
|
@ -429,7 +429,7 @@ export async function addFile({
|
|||
file.createdAt = new Date();
|
||||
file.userId = user ? user.id : null;
|
||||
file.userHost = user ? user.host : null;
|
||||
file.folderId = folder?.id ?? null;
|
||||
file.parentId = folder?.id ?? null;
|
||||
file.comment = comment;
|
||||
file.properties = properties;
|
||||
file.blurhash = info.blurhash || null;
|
||||
|
|
|
@ -13,7 +13,7 @@ const logger = driveLogger.createSubLogger('downloader');
|
|||
type Args = {
|
||||
url: string;
|
||||
user: { id: User['id']; host: User['host'] } | null;
|
||||
folderId?: DriveFolder['id'] | null;
|
||||
parentId?: DriveFolder['id'] | null;
|
||||
uri?: string | null;
|
||||
sensitive?: boolean;
|
||||
force?: boolean;
|
||||
|
@ -24,7 +24,7 @@ type Args = {
|
|||
export async function uploadFromUrl({
|
||||
url,
|
||||
user,
|
||||
folderId = null,
|
||||
parentId = null,
|
||||
uri = null,
|
||||
sensitive = false,
|
||||
force = false,
|
||||
|
@ -50,7 +50,7 @@ export async function uploadFromUrl({
|
|||
// If the comment is same as the name, skip comment
|
||||
// (image.name is passed in when receiving attachment)
|
||||
comment: name === comment ? null : comment,
|
||||
folderId,
|
||||
parentId,
|
||||
force,
|
||||
isLink,
|
||||
url,
|
||||
|
|
Loading…
Reference in a new issue