This commit is contained in:
nelle 2024-06-17 10:15:54 -06:00
parent cc4a0d3e58
commit ba0e5eec93
113 changed files with 6611 additions and 9045 deletions

View file

@ -1,195 +0,0 @@
#━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
# Firefish configuration
#━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
# ┌─────┐
#───┘ URL └─────────────────────────────────────────────────────
# Final accessible URL seen by a user.
url: https://example.tld/
# ONCE YOU HAVE STARTED THE INSTANCE, DO NOT CHANGE THE
# URL SETTINGS AFTER THAT!
# ┌───────────────────────┐
#───┘ Port and TLS settings └───────────────────────────────────
#
# Misskey requires a reverse proxy to support HTTPS connections.
#
# +----- https://example.tld/ ------------+
# +------+ |+-------------+ +----------------+|
# | User | ---> || Proxy (443) | ---> | Misskey (3000) ||
# +------+ |+-------------+ +----------------+|
# +---------------------------------------+
#
# You need to set up a reverse proxy. (e.g. nginx)
# An encrypted connection with HTTPS is highly recommended
# because tokens may be transferred in GET requests.
# The port that your Misskey server should listen on.
port: 3000
# ┌──────────────────────────┐
#───┘ PostgreSQL configuration └────────────────────────────────
db:
host: database
port: 5432
# Database name
db: postgres
# Auth
user: postgres
pass: test
# Whether disable Caching queries
#disableCache: true
# Extra Connection options
#extra:
# ssl: true
# ┌─────────────────────┐
#───┘ Redis configuration └─────────────────────────────────────
redis:
host: redis
port: 6379
#family: 0 # 0=Both, 4=IPv4, 6=IPv6
#pass: example-pass
#prefix: example-prefix
#db: 1
# ┌─────────────────────────────┐
#───┘ Elasticsearch configuration └─────────────────────────────
#elasticsearch:
# host: localhost
# port: 9200
# ssl: false
# user:
# pass:
# ┌───────────────┐
#───┘ ID generation └───────────────────────────────────────────
# You can select the ID generation method.
# You don't usually need to change this setting, but you can
# change it according to your preferences.
# Available methods:
# aid ... Short, Millisecond accuracy
# meid ... Similar to ObjectID, Millisecond accuracy
# ulid ... Millisecond accuracy
# objectid ... This is left for backward compatibility
# ONCE YOU HAVE STARTED THE INSTANCE, DO NOT CHANGE THE
# ID SETTINGS AFTER THAT!
id: 'aid'
# ┌─────────────────────┐
#───┘ Other configuration └─────────────────────────────────────
# Max note length, should be < 8000.
#maxNoteLength: 3000
# Whether disable HSTS
#disableHsts: true
# Number of worker processes
#clusterLimit: 1
# Job concurrency per worker
# deliverJobConcurrency: 128
# inboxJobConcurrency: 16
# Job rate limiter
# deliverJobPerSec: 128
# inboxJobPerSec: 16
# Job attempts
# deliverJobMaxAttempts: 12
# inboxJobMaxAttempts: 8
# IP address family used for outgoing request (ipv4, ipv6 or dual)
#outgoingAddressFamily: ipv4
# Syslog option
#syslog:
# host: localhost
# port: 514
# Proxy for HTTP/HTTPS
#proxy: http://127.0.0.1:3128
#proxyBypassHosts: [
# 'example.com',
# '192.0.2.8'
#]
# Proxy for SMTP/SMTPS
#proxySmtp: http://127.0.0.1:3128 # use HTTP/1.1 CONNECT
#proxySmtp: socks4://127.0.0.1:1080 # use SOCKS4
#proxySmtp: socks5://127.0.0.1:1080 # use SOCKS5
# Media Proxy
#mediaProxy: https://example.com/proxy
# Proxy remote files (default: false)
#proxyRemoteFiles: true
#allowedPrivateNetworks: [
# '127.0.0.1/32'
#]
# Upload or download file size limits (bytes)
#maxFileSize: 262144000
# Managed hosting settings
# !!!!!!!!!!
# >>>>>> NORMAL SELF-HOSTERS, STAY AWAY! <<<<<<
# >>>>>> YOU DON'T NEED THIS! <<<<<<
# !!!!!!!!!!
# Each category is optional, but if each item in each category is mandatory!
# If you mess this up, that's on you, you've been warned...
#maxUserSignups: 100
#isManagedHosting: true
#deepl:
# managed: true
# authKey: ''
# isPro: false
#
#email:
# managed: true
# address: 'example@email.com'
# host: 'email.com'
# port: 587
# user: 'example@email.com'
# pass: ''
# useImplicitSslTls: false
#
#objectStorage:
# managed: true
# baseUrl: ''
# bucket: ''
# prefix: ''
# endpoint: ''
# region: ''
# accessKey: ''
# secretKey: ''
# useSsl: true
# connnectOverProxy: false
# setPublicReadOnUpload: true
# s3ForcePathStyle: true
# !!!!!!!!!!
# >>>>>> AGAIN, NORMAL SELF-HOSTERS, STAY AWAY! <<<<<<
# >>>>>> YOU DON'T NEED THIS, ABOVE SETTINGS ARE FOR MANAGED HOSTING ONLY! <<<<<<
# !!!!!!!!!!
# Seriously. Do NOT fill out the above settings if you're self-hosting.
# They're much better off being set from the control panel.

View file

@ -1,11 +0,0 @@
{
"recommendations": [
"editorconfig.editorconfig",
"eg2.vscode-npm-script",
"vue.volar",
"vue.vscode-typescript-vue-plugin",
"arcanis.vscode-zipfs",
"orta.vscode-twoslash-queries",
"biomejs.biome"
]
}

BIN
.yarn/corepack.tgz (Stored with Git LFS)

Binary file not shown.

@ -1 +0,0 @@
Subproject commit 1c4e96bcfe61c981a1e8f23142082ac8ce7fc575

View file

@ -1,13 +0,0 @@
Copyright 2023 The Iceshrimp contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -1,103 +0,0 @@
import type Bull from "bull";
import { queueLogger } from "../../logger.js";
import { DriveFiles, Notes, UserProfiles, Users } from "@/models/index.js";
import type { DbUserDeleteJobData } from "@/queue/types.js";
import type { Note } from "@/models/entities/note.js";
import type { DriveFile } from "@/models/entities/drive-file.js";
import { MoreThan } from "typeorm";
import { deleteFileSync } from "@/services/drive/delete-file.js";
import { sendEmail } from "@/services/send-email.js";
import { publishInternalEvent } from "@/services/stream.js";
const logger = queueLogger.createSubLogger("delete-account");
export async function deleteAccount(
job: Bull.Job<DbUserDeleteJobData>,
): Promise<string | void> {
logger.info(`Deleting account of ${job.data.user.id} ...`);
const user = await Users.findOneBy({ id: job.data.user.id });
if (!user) return;
const isLocal = Users.isLocalUser(user);
{
// Delete notes
let cursor: Note["id"] | null = null;
while (true) {
const notes = (await Notes.find({
where: {
userId: user.id,
...(cursor ? { id: MoreThan(cursor) } : {}),
},
take: 10,
order: {
id: 1,
},
})) as Note[];
if (notes.length === 0) {
break;
}
cursor = notes[notes.length - 1].id;
await Notes.delete(notes.map((note) => note.id));
}
logger.succ("All of notes deleted");
}
{
// Delete files
let cursor: DriveFile["id"] | null = null;
while (true) {
const files = (await DriveFiles.find({
where: {
userId: user.id,
...(cursor ? { id: MoreThan(cursor) } : {}),
},
take: 10,
order: {
id: 1,
},
})) as DriveFile[];
if (files.length === 0) {
break;
}
cursor = files[files.length - 1].id;
for (const file of files) {
await deleteFileSync(file);
}
}
logger.succ("All of files deleted");
}
{
// Send email notification
const profile = await UserProfiles.findOneByOrFail({ userId: user.id });
if (profile.email && profile.emailVerified) {
sendEmail(
profile.email,
"Account deleted",
"Your account has been deleted.",
"Your account has been deleted.",
);
}
}
// soft指定されている場合は物理削除しない
if (job.data.soft) {
// nop
} else {
await Users.delete(job.data.user.id);
publishInternalEvent(isLocal ? "localUserDeleted" : "remoteUserDeleted", { id: user.id });
}
return "Account deleted";
}

View file

@ -1,61 +0,0 @@
import type Bull from "bull";
import { queueLogger } from "../../logger.js";
import { deleteFileSync } from "@/services/drive/delete-file.js";
import { Users, DriveFiles } from "@/models/index.js";
import { MoreThan } from "typeorm";
import type { DbUserJobData } from "@/queue/types.js";
const logger = queueLogger.createSubLogger("delete-drive-files");
export async function deleteDriveFiles(
job: Bull.Job<DbUserJobData>,
done: any,
): Promise<void> {
logger.info(`Deleting drive files of ${job.data.user.id} ...`);
const user = await Users.findOneBy({ id: job.data.user.id });
if (user == null) {
done();
return;
}
let deletedCount = 0;
let cursor: any = null;
while (true) {
const files = await DriveFiles.find({
where: {
userId: user.id,
...(cursor ? { id: MoreThan(cursor) } : {}),
},
take: 100,
order: {
id: 1,
},
});
if (files.length === 0) {
job.progress(100);
break;
}
cursor = files[files.length - 1].id;
for (const file of files) {
await deleteFileSync(file);
deletedCount++;
}
const total = await DriveFiles.countBy({
userId: user.id,
});
job.progress(deletedCount / total);
}
logger.succ(
`All drive files (${deletedCount}) of ${user.id} has been deleted.`,
);
done();
}

View file

@ -1,105 +0,0 @@
import type Bull from "bull";
import * as fs from "node:fs";
import { queueLogger } from "../../logger.js";
import { addFile } from "@/services/drive/add-file.js";
import { format as dateFormat } from "date-fns";
import { getFullApAccount } from "@/misc/convert-host.js";
import { createTemp } from "@/misc/create-temp.js";
import { Users, Blockings } from "@/models/index.js";
import { MoreThan } from "typeorm";
import type { DbUserJobData } from "@/queue/types.js";
const logger = queueLogger.createSubLogger("export-blocking");
export async function exportBlocking(
job: Bull.Job<DbUserJobData>,
done: any,
): Promise<void> {
logger.info(`Exporting blocking of ${job.data.user.id} ...`);
const user = await Users.findOneBy({ id: job.data.user.id });
if (user == null) {
done();
return;
}
// Create temp file
const [path, cleanup] = await createTemp();
logger.info(`Temp file is ${path}`);
try {
const stream = fs.createWriteStream(path, { flags: "a" });
let exportedCount = 0;
let cursor: any = null;
while (true) {
const blockings = await Blockings.find({
where: {
blockerId: user.id,
...(cursor ? { id: MoreThan(cursor) } : {}),
},
take: 100,
order: {
id: 1,
},
});
if (blockings.length === 0) {
job.progress(100);
break;
}
cursor = blockings[blockings.length - 1].id;
for (const block of blockings) {
const u = await Users.findOneBy({ id: block.blockeeId });
if (u == null) {
exportedCount++;
continue;
}
const content = getFullApAccount(u.username, u.host);
await new Promise<void>((res, rej) => {
stream.write(content + "\n", (err) => {
if (err) {
logger.error(err);
rej(err);
} else {
res();
}
});
});
exportedCount++;
}
const total = await Blockings.countBy({
blockerId: user.id,
});
job.progress(exportedCount / total);
}
stream.end();
logger.succ(`Exported to: ${path}`);
const fileName = `blocking-${dateFormat(
new Date(),
"yyyy-MM-dd-HH-mm-ss",
)}.csv`;
const driveFile = await addFile({
user,
path,
name: fileName,
force: true,
});
logger.succ(`Exported to: ${driveFile.id}`);
} finally {
cleanup();
}
done();
}

View file

@ -1,132 +0,0 @@
import type Bull from "bull";
import * as fs from "node:fs";
import { ulid } from "ulid";
import mime from "mime-types";
import archiver from "archiver";
import { queueLogger } from "../../logger.js";
import { addFile } from "@/services/drive/add-file.js";
import { format as dateFormat } from "date-fns";
import { Users, Emojis } from "@/models/index.js";
import {} from "@/queue/types.js";
import { createTemp, createTempDir } from "@/misc/create-temp.js";
import { downloadUrl } from "@/misc/download-url.js";
import config from "@/config/index.js";
import { IsNull } from "typeorm";
const logger = queueLogger.createSubLogger("export-custom-emojis");
export async function exportCustomEmojis(
job: Bull.Job,
done: () => void,
): Promise<void> {
logger.info("Exporting custom emojis ...");
const user = await Users.findOneBy({ id: job.data.user.id });
if (user == null) {
done();
return;
}
const [path, cleanup] = await createTempDir();
logger.info(`Temp dir is ${path}`);
const metaPath = `${path}/meta.json`;
fs.writeFileSync(metaPath, "", "utf-8");
const metaStream = fs.createWriteStream(metaPath, { flags: "a" });
const writeMeta = (text: string): Promise<void> => {
return new Promise<void>((res, rej) => {
metaStream.write(text, (err) => {
if (err) {
logger.error(err);
rej(err);
} else {
res();
}
});
});
};
await writeMeta(
`{"metaVersion":2,"host":"${
config.host
}","exportedAt":"${new Date().toString()}","emojis":[`,
);
const customEmojis = await Emojis.find({
where: {
host: IsNull(),
},
order: {
id: "ASC",
},
});
for (const emoji of customEmojis) {
const ext = mime.extension(emoji.type);
// there are some restrictions on file names, so to be safe the files are
// named after their database id instead of the actual emoji name
const fileName = emoji.id + (ext ? '.' + ext : '');
const emojiPath = `${path}/${fileName}`;
fs.writeFileSync(emojiPath, "", "binary");
let downloaded = false;
try {
await downloadUrl(emoji.originalUrl, emojiPath);
downloaded = true;
} catch (e) {
// TODO: 何度か再試行
logger.error(e instanceof Error ? e : new Error(e as string));
}
if (!downloaded) {
fs.unlinkSync(emojiPath);
}
const content = JSON.stringify({
fileName: fileName,
downloaded: downloaded,
emoji: emoji,
});
const isFirst = customEmojis.indexOf(emoji) === 0;
await writeMeta(isFirst ? content : ",\n" + content);
}
await writeMeta("]}");
metaStream.end();
// Create archive
const [archivePath, archiveCleanup] = await createTemp();
const archiveStream = fs.createWriteStream(archivePath);
const archive = archiver("zip", {
zlib: { level: 0 },
});
archiveStream.on("close", async () => {
logger.succ(`Exported to: ${archivePath}`);
const fileName = `custom-emojis-${dateFormat(
new Date(),
"yyyy-MM-dd-HH-mm-ss",
)}.zip`;
const driveFile = await addFile({
user,
path: archivePath,
name: fileName,
force: true,
});
logger.succ(`Exported to: ${driveFile.id}`);
cleanup();
archiveCleanup();
done();
});
archive.pipe(archiveStream);
archive.directory(path, false);
archive.finalize();
}

View file

@ -1,113 +0,0 @@
import type Bull from "bull";
import * as fs from "node:fs";
import { queueLogger } from "../../logger.js";
import { addFile } from "@/services/drive/add-file.js";
import { format as dateFormat } from "date-fns";
import { getFullApAccount } from "@/misc/convert-host.js";
import { createTemp } from "@/misc/create-temp.js";
import { Users, Followings, Mutings } from "@/models/index.js";
import { In, MoreThan, Not } from "typeorm";
import type { DbUserJobData } from "@/queue/types.js";
import type { Following } from "@/models/entities/following.js";
const logger = queueLogger.createSubLogger("export-following");
export async function exportFollowing(
job: Bull.Job<DbUserJobData>,
done: () => void,
): Promise<void> {
logger.info(`Exporting following of ${job.data.user.id} ...`);
const user = await Users.findOneBy({ id: job.data.user.id });
if (user == null) {
done();
return;
}
// Create temp file
const [path, cleanup] = await createTemp();
logger.info(`Temp file is ${path}`);
try {
const stream = fs.createWriteStream(path, { flags: "a" });
let cursor: Following["id"] | null = null;
const mutings = job.data.excludeMuting
? await Mutings.findBy({
muterId: user.id,
})
: [];
while (true) {
const followings = (await Followings.find({
where: {
followerId: user.id,
...(mutings.length > 0
? { followeeId: Not(In(mutings.map((x) => x.muteeId))) }
: {}),
...(cursor ? { id: MoreThan(cursor) } : {}),
},
take: 100,
order: {
id: 1,
},
})) as Following[];
if (followings.length === 0) {
break;
}
cursor = followings[followings.length - 1].id;
for (const following of followings) {
const u = await Users.findOneBy({ id: following.followeeId });
if (u == null) {
continue;
}
if (
job.data.excludeInactive &&
u.updatedAt &&
Date.now() - u.updatedAt.getTime() > 1000 * 60 * 60 * 24 * 90
) {
continue;
}
const content = getFullApAccount(u.username, u.host);
await new Promise<void>((res, rej) => {
stream.write(content + "\n", (err) => {
if (err) {
logger.error(err);
rej(err);
} else {
res();
}
});
});
}
}
stream.end();
logger.succ(`Exported to: ${path}`);
const fileName = `following-${dateFormat(
new Date(),
"yyyy-MM-dd-HH-mm-ss",
)}.csv`;
const driveFile = await addFile({
user,
path,
name: fileName,
force: true,
});
logger.succ(`Exported to: ${driveFile.id}`);
} finally {
cleanup();
}
done();
}

View file

@ -1,106 +0,0 @@
import type Bull from "bull";
import * as fs from "node:fs";
import { queueLogger } from "../../logger.js";
import { addFile } from "@/services/drive/add-file.js";
import { format as dateFormat } from "date-fns";
import { getFullApAccount } from "@/misc/convert-host.js";
import { createTemp } from "@/misc/create-temp.js";
import { Users, Mutings } from "@/models/index.js";
import { IsNull, MoreThan } from "typeorm";
import type { DbUserJobData } from "@/queue/types.js";
const logger = queueLogger.createSubLogger("export-mute");
export async function exportMute(
job: Bull.Job<DbUserJobData>,
done: any,
): Promise<void> {
logger.info(`Exporting mute of ${job.data.user.id} ...`);
const user = await Users.findOneBy({ id: job.data.user.id });
if (user == null) {
done();
return;
}
// Create temp file
const [path, cleanup] = await createTemp();
logger.info(`Temp file is ${path}`);
try {
const stream = fs.createWriteStream(path, { flags: "a" });
let exportedCount = 0;
let cursor: any = null;
while (true) {
const mutes = await Mutings.find({
where: {
muterId: user.id,
expiresAt: IsNull(),
...(cursor ? { id: MoreThan(cursor) } : {}),
},
take: 100,
order: {
id: 1,
},
});
if (mutes.length === 0) {
job.progress(100);
break;
}
cursor = mutes[mutes.length - 1].id;
for (const mute of mutes) {
const u = await Users.findOneBy({ id: mute.muteeId });
if (u == null) {
exportedCount++;
continue;
}
const content = getFullApAccount(u.username, u.host);
await new Promise<void>((res, rej) => {
stream.write(content + "\n", (err) => {
if (err) {
logger.error(err);
rej(err);
} else {
res();
}
});
});
exportedCount++;
}
const total = await Mutings.countBy({
muterId: user.id,
});
job.progress(exportedCount / total);
}
stream.end();
logger.succ(`Exported to: ${path}`);
const fileName = `mute-${dateFormat(
new Date(),
"yyyy-MM-dd-HH-mm-ss",
)}.csv`;
const driveFile = await addFile({
user,
path,
name: fileName,
force: true,
});
logger.succ(`Exported to: ${driveFile.id}`);
} finally {
cleanup();
}
done();
}

View file

@ -1,133 +0,0 @@
import type Bull from "bull";
import * as fs from "node:fs";
import { queueLogger } from "../../logger.js";
import { addFile } from "@/services/drive/add-file.js";
import { format as dateFormat } from "date-fns";
import { Users, Notes, Polls, DriveFiles } from "@/models/index.js";
import { MoreThan } from "typeorm";
import type { Note } from "@/models/entities/note.js";
import type { Poll } from "@/models/entities/poll.js";
import type { DbUserJobData } from "@/queue/types.js";
import { createTemp } from "@/misc/create-temp.js";
const logger = queueLogger.createSubLogger("export-notes");
export async function exportNotes(
job: Bull.Job<DbUserJobData>,
done: any,
): Promise<void> {
logger.info(`Exporting notes of ${job.data.user.id} ...`);
const user = await Users.findOneBy({ id: job.data.user.id });
if (user == null) {
done();
return;
}
// Create temp file
const [path, cleanup] = await createTemp();
logger.info(`Temp file is ${path}`);
try {
const stream = fs.createWriteStream(path, { flags: "a" });
const write = (text: string): Promise<void> => {
return new Promise<void>((res, rej) => {
stream.write(text, (err) => {
if (err) {
logger.error(err);
rej(err);
} else {
res();
}
});
});
};
await write("[");
let exportedNotesCount = 0;
let cursor: Note["id"] | null = null;
while (true) {
const notes = (await Notes.find({
where: {
userId: user.id,
...(cursor ? { id: MoreThan(cursor) } : {}),
},
take: 100,
order: {
id: 1,
},
})) as Note[];
if (notes.length === 0) {
job.progress(100);
break;
}
cursor = notes[notes.length - 1].id;
for (const note of notes) {
let poll: Poll | undefined;
if (note.hasPoll) {
poll = await Polls.findOneByOrFail({ noteId: note.id });
}
const content = JSON.stringify(await serialize(note, poll));
const isFirst = exportedNotesCount === 0;
await write(isFirst ? content : ",\n" + content);
exportedNotesCount++;
}
const total = await Notes.countBy({
userId: user.id,
});
job.progress(exportedNotesCount / total);
}
await write("]");
stream.end();
logger.succ(`Exported to: ${path}`);
const fileName = `notes-${dateFormat(
new Date(),
"yyyy-MM-dd-HH-mm-ss",
)}.json`;
const driveFile = await addFile({
user,
path,
name: fileName,
force: true,
});
logger.succ(`Exported to: ${driveFile.id}`);
} finally {
cleanup();
}
done();
}
async function serialize(
note: Note,
poll: Poll | null = null,
): Promise<Record<string, unknown>> {
return {
id: note.id,
text: note.text,
createdAt: note.createdAt,
fileIds: note.fileIds,
files: await DriveFiles.packMany(note.fileIds),
replyId: note.replyId,
renoteId: note.renoteId,
poll: poll,
cw: note.cw,
visibility: note.visibility,
visibleUserIds: note.visibleUserIds,
localOnly: note.localOnly,
};
}

View file

@ -1,81 +0,0 @@
import type Bull from "bull";
import * as fs from "node:fs";
import { queueLogger } from "../../logger.js";
import { addFile } from "@/services/drive/add-file.js";
import { format as dateFormat } from "date-fns";
import { getFullApAccount } from "@/misc/convert-host.js";
import { createTemp } from "@/misc/create-temp.js";
import { Users, UserLists, UserListJoinings } from "@/models/index.js";
import { In } from "typeorm";
import type { DbUserJobData } from "@/queue/types.js";
const logger = queueLogger.createSubLogger("export-user-lists");
export async function exportUserLists(
job: Bull.Job<DbUserJobData>,
done: any,
): Promise<void> {
logger.info(`Exporting user lists of ${job.data.user.id} ...`);
const user = await Users.findOneBy({ id: job.data.user.id });
if (user == null) {
done();
return;
}
const lists = await UserLists.findBy({
userId: user.id,
});
// Create temp file
const [path, cleanup] = await createTemp();
logger.info(`Temp file is ${path}`);
try {
const stream = fs.createWriteStream(path, { flags: "a" });
for (const list of lists) {
const joinings = await UserListJoinings.findBy({ userListId: list.id });
const users = await Users.findBy({
id: In(joinings.map((j) => j.userId)),
});
for (const u of users) {
const acct = getFullApAccount(u.username, u.host);
const content = `${list.name},${acct}`;
await new Promise<void>((res, rej) => {
stream.write(content + "\n", (err) => {
if (err) {
logger.error(err);
rej(err);
} else {
res();
}
});
});
}
}
stream.end();
logger.succ(`Exported to: ${path}`);
const fileName = `user-lists-${dateFormat(
new Date(),
"yyyy-MM-dd-HH-mm-ss",
)}.csv`;
const driveFile = await addFile({
user,
path,
name: fileName,
force: true,
});
logger.succ(`Exported to: ${driveFile.id}`);
} finally {
cleanup();
}
done();
}

View file

@ -1,79 +0,0 @@
import type Bull from "bull";
import { queueLogger } from "../../logger.js";
import * as Acct from "@/misc/acct.js";
import { resolveUser } from "@/remote/resolve-user.js";
import { downloadTextFile } from "@/misc/download-text-file.js";
import { isSelfHost, toPuny } from "@/misc/convert-host.js";
import { Users, DriveFiles, Blockings } from "@/models/index.js";
import type { DbUserImportJobData } from "@/queue/types.js";
import block from "@/services/blocking/create.js";
import { IsNull } from "typeorm";
const logger = queueLogger.createSubLogger("import-blocking");
export async function importBlocking(
job: Bull.Job<DbUserImportJobData>,
done: any,
): Promise<void> {
logger.info(`Importing blocking of ${job.data.user.id} ...`);
const user = await Users.findOneBy({ id: job.data.user.id });
if (user == null) {
done();
return;
}
const file = await DriveFiles.findOneBy({
id: job.data.fileId,
});
if (file == null) {
done();
return;
}
const csv = await downloadTextFile(file.url);
let linenum = 0;
for (const line of csv.trim().split("\n")) {
linenum++;
try {
const acct = line.split(",")[0].trim();
const { username, host } = Acct.parse(acct);
let target = isSelfHost(host!)
? await Users.findOneBy({
host: IsNull(),
usernameLower: username.toLowerCase(),
})
: await Users.findOneBy({
host: toPuny(host!),
usernameLower: username.toLowerCase(),
});
if (host == null && target == null) continue;
if (target == null) {
target = await resolveUser(username, host);
}
if (target == null) {
throw new Error(`cannot resolve user: @${username}@${host}`);
}
// skip myself
if (target.id === job.data.user.id) continue;
logger.info(`Block[${linenum}] ${target.id} ...`);
await block(user, target);
} catch (e) {
logger.warn(`Error in line:${linenum} ${e}`);
}
}
logger.succ("Imported");
done();
}

View file

@ -1,150 +0,0 @@
import type Bull from "bull";
import * as fs from "node:fs";
import AdmZip from "adm-zip";
import { queueLogger } from "../../logger.js";
import { createTempDir } from "@/misc/create-temp.js";
import { downloadUrl } from "@/misc/download-url.js";
import { DriveFiles, Emojis } from "@/models/index.js";
import type { DbUserImportJobData } from "@/queue/types.js";
import { addFile } from "@/services/drive/add-file.js";
import { genId } from "@/misc/gen-id.js";
import { db } from "@/db/postgre.js";
import probeImageSize from "probe-image-size";
import * as path from "path";
const logger = queueLogger.createSubLogger("import-custom-emojis");
// TODO: 名前衝突時の動作を選べるようにする
export async function importCustomEmojis(
job: Bull.Job<DbUserImportJobData>,
done: any,
): Promise<void> {
logger.info("Importing custom emojis ...");
const file = await DriveFiles.findOneBy({
id: job.data.fileId,
});
if (file == null) {
done();
return;
}
const [tempPath, cleanup] = await createTempDir();
logger.info(`Temp dir is ${tempPath}`);
const destPath = `${tempPath}/emojis.zip`;
try {
fs.writeFileSync(destPath, "", "binary");
await downloadUrl(file.url, destPath);
} catch (e) {
// TODO: 何度か再試行
if (e instanceof Error || typeof e === "string") {
logger.error(e);
}
throw e;
}
const outputPath = `${tempPath}/emojis`;
const unzipStream = fs.createReadStream(destPath);
const zip = new AdmZip(destPath);
zip.extractAllToAsync(outputPath, true, false, async (error) => {
if (error) throw error;
if (fs.existsSync(`${outputPath}/meta.json`)) {
logger.info("starting emoji import with metadata");
const metaRaw = fs.readFileSync(`${outputPath}/meta.json`, "utf-8");
const meta = JSON.parse(metaRaw);
for (const record of meta.emojis) {
if (!record.downloaded) continue;
const emojiInfo = record.emoji;
const emojiPath = `${outputPath}/${record.fileName}`;
await Emojis.delete({
name: emojiInfo.name,
});
const driveFile = await addFile({
user: null,
path: emojiPath,
name: record.fileName,
force: true,
});
const file = fs.createReadStream(emojiPath);
const size = await probeImageSize(file);
file.destroy();
await Emojis.insert({
id: genId(),
updatedAt: new Date(),
name: emojiInfo.name,
category: emojiInfo.category,
host: null,
aliases: emojiInfo.aliases,
originalUrl: driveFile.url,
publicUrl: driveFile.webpublicUrl ?? driveFile.url,
type: driveFile.webpublicType ?? driveFile.type,
license: emojiInfo.license,
width: size.width || null,
height: size.height || null,
}).then((x) => Emojis.findOneByOrFail(x.identifiers[0]));
}
} else {
logger.info("starting emoji import without metadata");
// Since we lack metadata, we import into a randomized category name instead
let categoryName = genId();
let containedEmojis = fs.readdirSync(outputPath);
// Filter out accidental JSON files
containedEmojis = containedEmojis.filter(
(emoji) => !emoji.match(/\.(json)$/i),
);
for (const emojiFilename of containedEmojis) {
// strip extension and get filename to use as name
const name = path.basename(emojiFilename, path.extname(emojiFilename));
const emojiPath = `${outputPath}/${emojiFilename}`;
logger.info(`importing ${name}`);
await Emojis.delete({
name: name,
});
const driveFile = await addFile({
user: null,
path: emojiPath,
name: path.basename(emojiFilename),
force: true,
});
const file = fs.createReadStream(emojiPath);
const size = await probeImageSize(file);
file.destroy();
logger.info(`emoji size: ${size.width}x${size.height}`);
await Emojis.insert({
id: genId(),
updatedAt: new Date(),
name: name,
category: categoryName,
host: null,
aliases: [],
originalUrl: driveFile.url,
publicUrl: driveFile.webpublicUrl ?? driveFile.url,
type: driveFile.webpublicType ?? driveFile.type,
license: null,
width: size.width || null,
height: size.height || null,
}).then((x) => Emojis.findOneByOrFail(x.identifiers[0]));
}
}
await db.queryResultCache!.remove(["meta_emojis"]);
cleanup();
logger.succ("Imported");
done();
});
logger.succ(`Unzipping to ${outputPath}`);
}

View file

@ -1,15 +0,0 @@
import * as Post from "@/misc/post.js";
import create from "@/services/note/create.js";
import { Users } from "@/models/index.js";
import type { DbUserImportMastoPostJobData } from "@/queue/types.js";
import { queueLogger } from "../../logger.js";
import type Bull from "bull";
const logger = queueLogger.createSubLogger("import-firefish-post");
export async function importCkPost(
job: Bull.Job<DbUserImportMastoPostJobData>,
done: any,
): Promise<void> {
done();
}

View file

@ -1,116 +0,0 @@
import { IsNull } from "typeorm";
import follow from "@/services/following/create.js";
import * as Acct from "@/misc/acct.js";
import { resolveUser } from "@/remote/resolve-user.js";
import { downloadTextFile } from "@/misc/download-text-file.js";
import { isSelfHost, toPuny } from "@/misc/convert-host.js";
import { Users, DriveFiles } from "@/models/index.js";
import type { DbUserImportJobData } from "@/queue/types.js";
import { queueLogger } from "../../logger.js";
import type Bull from "bull";
const logger = queueLogger.createSubLogger("import-following");
export async function importFollowing(
job: Bull.Job<DbUserImportJobData>,
done: any,
): Promise<void> {
logger.info(`Importing following of ${job.data.user.id} ...`);
const user = await Users.findOneBy({ id: job.data.user.id });
if (user == null) {
done();
return;
}
const file = await DriveFiles.findOneBy({
id: job.data.fileId,
});
if (file == null) {
done();
return;
}
const csv = await downloadTextFile(file.url);
let linenum = 0;
if (file.type.endsWith("json")) {
for (const acct of JSON.parse(csv)) {
try {
const { username, host } = Acct.parse(acct);
let target = isSelfHost(host!)
? await Users.findOneBy({
host: IsNull(),
usernameLower: username.toLowerCase(),
})
: await Users.findOneBy({
host: toPuny(host!),
usernameLower: username.toLowerCase(),
});
if (host == null && target == null) continue;
if (target == null) {
target = await resolveUser(username, host);
}
if (target == null) {
throw new Error(`cannot resolve user: @${username}@${host}`);
}
// skip myself
if (target.id === job.data.user.id) continue;
logger.info(`Follow[${linenum}] ${target.id} ...`);
follow(user, target);
} catch (e) {
logger.warn(`Error in line:${linenum} ${e}`);
}
}
} else {
for (const line of csv.trim().split("\n")) {
linenum++;
try {
const acct = line.split(",")[0].trim();
const { username, host } = Acct.parse(acct);
let target = isSelfHost(host!)
? await Users.findOneBy({
host: IsNull(),
usernameLower: username.toLowerCase(),
})
: await Users.findOneBy({
host: toPuny(host!),
usernameLower: username.toLowerCase(),
});
if (host == null && target == null) continue;
if (target == null) {
target = await resolveUser(username, host);
}
if (target == null) {
throw new Error(`cannot resolve user: @${username}@${host}`);
}
// skip myself
if (target.id === job.data.user.id) continue;
logger.info(`Follow[${linenum}] ${target.id} ...`);
follow(user, target);
} catch (e) {
logger.warn(`Error in line:${linenum} ${e}`);
}
}
}
logger.succ("Imported");
done();
}

View file

@ -1,19 +0,0 @@
import create from "@/services/note/create.js";
import { Users } from "@/models/index.js";
import type { DbUserImportMastoPostJobData } from "@/queue/types.js";
import { queueLogger } from "../../logger.js";
import type Bull from "bull";
import { htmlToMfm } from "@/remote/activitypub/misc/html-to-mfm.js";
import { resolveNote } from "@/remote/activitypub/models/note.js";
import { Note } from "@/models/entities/note.js";
import { uploadFromUrl } from "@/services/drive/upload-from-url.js";
import type { DriveFile } from "@/models/entities/drive-file.js";
const logger = queueLogger.createSubLogger("import-masto-post");
export async function importMastoPost(
job: Bull.Job<DbUserImportMastoPostJobData>,
done: any,
): Promise<void> {
done();
}

View file

@ -1,89 +0,0 @@
import type Bull from "bull";
import { queueLogger } from "../../logger.js";
import * as Acct from "@/misc/acct.js";
import { resolveUser } from "@/remote/resolve-user.js";
import { downloadTextFile } from "@/misc/download-text-file.js";
import { isSelfHost, toPuny } from "@/misc/convert-host.js";
import { Users, DriveFiles, Mutings } from "@/models/index.js";
import type { DbUserImportJobData } from "@/queue/types.js";
import type { User } from "@/models/entities/user.js";
import { genId } from "@/misc/gen-id.js";
import { IsNull } from "typeorm";
const logger = queueLogger.createSubLogger("import-muting");
export async function importMuting(
job: Bull.Job<DbUserImportJobData>,
done: any,
): Promise<void> {
logger.info(`Importing muting of ${job.data.user.id} ...`);
const user = await Users.findOneBy({ id: job.data.user.id });
if (user == null) {
done();
return;
}
const file = await DriveFiles.findOneBy({
id: job.data.fileId,
});
if (file == null) {
done();
return;
}
const csv = await downloadTextFile(file.url);
let linenum = 0;
for (const line of csv.trim().split("\n")) {
linenum++;
try {
const acct = line.split(",")[0].trim();
const { username, host } = Acct.parse(acct);
let target = isSelfHost(host!)
? await Users.findOneBy({
host: IsNull(),
usernameLower: username.toLowerCase(),
})
: await Users.findOneBy({
host: toPuny(host!),
usernameLower: username.toLowerCase(),
});
if (host == null && target == null) continue;
if (target == null) {
target = await resolveUser(username, host);
}
if (target == null) {
throw new Error(`cannot resolve user: @${username}@${host}`);
}
// skip myself
if (target.id === job.data.user.id) continue;
logger.info(`Mute[${linenum}] ${target.id} ...`);
await mute(user, target);
} catch (e) {
logger.warn(`Error in line:${linenum} ${e}`);
}
}
logger.succ("Imported");
done();
}
async function mute(user: User, target: User) {
await Mutings.insert({
id: genId(),
createdAt: new Date(),
muterId: user.id,
muteeId: target.id,
});
}

View file

@ -1,76 +0,0 @@
import { downloadTextFile } from "@/misc/download-text-file.js";
import { processMastoNotes } from "@/misc/process-masto-notes.js";
import { Users, DriveFiles } from "@/models/index.js";
import type { DbUserImportPostsJobData } from "@/queue/types.js";
import { queueLogger } from "../../logger.js";
import type Bull from "bull";
import {
createImportCkPostJob,
createImportMastoPostJob,
} from "@/queue/index.js";
const logger = queueLogger.createSubLogger("import-posts");
export async function importPosts(
job: Bull.Job<DbUserImportPostsJobData>,
done: any,
): Promise<void> {
logger.info(`Importing posts of ${job.data.user.id} ...`);
const user = await Users.findOneBy({ id: job.data.user.id });
if (user == null) {
done();
return;
}
const file = await DriveFiles.findOneBy({
id: job.data.fileId,
});
if (file == null) {
done();
return;
}
if (file.name.endsWith("tar.gz") || file.name.endsWith("zip")) {
try {
logger.info("Reading Mastodon archive");
const outbox = await processMastoNotes(
file.name,
file.url,
job.data.user.id,
);
for (const post of outbox.orderedItems) {
createImportMastoPostJob(job.data.user, post, job.data.signatureCheck);
}
} catch (e) {
// handle error
logger.warn(`Failed reading Mastodon archive: ${e}`);
}
logger.succ("Mastodon archive imported");
done();
return;
}
const json = await downloadTextFile(file.url);
try {
const parsed = JSON.parse(json);
if (parsed instanceof Array) {
logger.info("Parsing key style posts");
for (const post of JSON.parse(json)) {
createImportCkPostJob(job.data.user, post, job.data.signatureCheck);
}
} else if (parsed instanceof Object) {
logger.info("Parsing animal style posts");
for (const post of parsed.orderedItems) {
createImportMastoPostJob(job.data.user, post, job.data.signatureCheck);
}
}
} catch (e) {
// handle error
logger.warn(`Error reading: ${e}`);
}
logger.succ("Imported");
done();
}

View file

@ -1,111 +0,0 @@
import type Bull from "bull";
import { queueLogger } from "../../logger.js";
import * as Acct from "@/misc/acct.js";
import { resolveUser } from "@/remote/resolve-user.js";
import { pushUserToUserList } from "@/services/user-list/push.js";
import { downloadTextFile } from "@/misc/download-text-file.js";
import { isSelfHost, toPuny } from "@/misc/convert-host.js";
import {
DriveFiles,
Users,
UserLists,
UserListJoinings, Blockings, Followings,
} from "@/models/index.js";
import { genId } from "@/misc/gen-id.js";
import type { DbUserImportJobData } from "@/queue/types.js";
import { IsNull } from "typeorm";
const logger = queueLogger.createSubLogger("import-user-lists");
export async function importUserLists(
job: Bull.Job<DbUserImportJobData>,
done: any,
): Promise<void> {
logger.info(`Importing user lists of ${job.data.user.id} ...`);
const user = await Users.findOneBy({ id: job.data.user.id });
if (user == null) {
done();
return;
}
const file = await DriveFiles.findOneBy({
id: job.data.fileId,
});
if (file == null) {
done();
return;
}
const csv = await downloadTextFile(file.url);
let linenum = 0;
for (const line of csv.trim().split("\n")) {
linenum++;
try {
const listName = line.split(",")[0].trim();
const { username, host } = Acct.parse(line.split(",")[1].trim());
let list = await UserLists.findOneBy({
userId: user.id,
name: listName,
});
if (list == null) {
list = await UserLists.insert({
id: genId(),
createdAt: new Date(),
userId: user.id,
name: listName,
}).then((x) => UserLists.findOneByOrFail(x.identifiers[0]));
}
let target = isSelfHost(host!)
? await Users.findOneBy({
host: IsNull(),
usernameLower: username.toLowerCase(),
})
: await Users.findOneBy({
host: toPuny(host!),
usernameLower: username.toLowerCase(),
});
if (target == null) {
target = await resolveUser(username, host);
}
const isBlocked = await Blockings.exist({
where: {
blockerId: target.id,
blockeeId: user.id,
},
});
const isFollowed = await Followings.exist({
where: {
followerId: user.id,
followeeId: target.id,
},
});
if (isBlocked || !isFollowed) continue;
if (
(await UserListJoinings.findOneBy({
userListId: list!.id,
userId: target.id,
})) != null
)
continue;
pushUserToUserList(target, list!);
} catch (e) {
logger.warn(`Error in line:${linenum} ${e}`);
}
}
logger.succ("Imported");
done();
}

View file

@ -1,47 +0,0 @@
import type Bull from "bull";
import type { DbJobData } from "@/queue/types.js";
import { deleteDriveFiles } from "./delete-drive-files.js";
import { exportCustomEmojis } from "./export-custom-emojis.js";
import { exportNotes } from "./export-notes.js";
import { exportFollowing } from "./export-following.js";
import { exportMute } from "./export-mute.js";
import { exportBlocking } from "./export-blocking.js";
import { exportUserLists } from "./export-user-lists.js";
import { importFollowing } from "./import-following.js";
import { importUserLists } from "./import-user-lists.js";
import { deleteAccount } from "./delete-account.js";
import { importMuting } from "./import-muting.js";
import { importPosts } from "./import-posts.js";
import { importMastoPost } from "./import-masto-post.js";
import { importCkPost } from "./import-firefish-post.js";
import { importBlocking } from "./import-blocking.js";
import { importCustomEmojis } from "./import-custom-emojis.js";
const jobs = {
deleteDriveFiles,
exportCustomEmojis,
exportNotes,
exportFollowing,
exportMute,
exportBlocking,
exportUserLists,
importFollowing,
importMuting,
importBlocking,
importUserLists,
importPosts,
importMastoPost,
importCkPost,
importCustomEmojis,
deleteAccount,
} as Record<
string,
| Bull.ProcessCallbackFunction<DbJobData>
| Bull.ProcessPromiseFunction<DbJobData>
>;
export default function (dbQueue: Bull.Queue<DbJobData>) {
for (const [k, v] of Object.entries(jobs)) {
dbQueue.process(k, v);
}
}

View file

@ -1,61 +0,0 @@
import define from "../../../define.js";
import { ApiError } from "../../../error.js";
import { DriveFiles, Notes } from "@/models/index.js";
export const meta = {
tags: ["drive", "notes"],
requireCredential: true,
kind: "read:drive",
description: "Find the notes to which the given file is attached.",
res: {
type: "array",
optional: false,
nullable: false,
items: {
type: "object",
optional: false,
nullable: false,
ref: "Note",
},
},
errors: {
noSuchFile: {
message: "No such file.",
code: "NO_SUCH_FILE",
id: "c118ece3-2e4b-4296-99d1-51756e32d232",
},
},
} as const;
export const paramDef = {
type: "object",
properties: {
fileId: { type: "string", format: "misskey:id" },
},
required: ["fileId"],
} as const;
export default define(meta, paramDef, async (ps, user) => {
// Fetch file
const file = await DriveFiles.findOneBy({
id: ps.fileId,
userId: user.id,
});
if (file == null) {
throw new ApiError(meta.errors.noSuchFile);
}
const notes = await Notes.createQueryBuilder("note")
.where(":file = ANY(note.fileIds)", { file: file.id })
.getMany();
return await Notes.packMany(notes, user, {
detail: true,
});
});

View file

@ -1,42 +0,0 @@
import define from "../../../define.js";
import { createWorker } from "tesseract.js";
export const meta = {
tags: ["drive"],
requireCredential: true,
kind: "read:drive",
description: "Return caption of image",
res: {
type: "string",
optional: false,
nullable: false,
},
} as const;
export const paramDef = {
type: "object",
properties: {
url: { type: "string" },
},
required: ["url"],
} as const;
export default define(meta, paramDef, async (ps) => {
const worker = createWorker({
logger: (m) => console.log(m),
});
await worker.load();
await worker.loadLanguage("eng");
await worker.initialize("eng");
const {
data: { text },
} = await worker.recognize(ps.url);
await worker.terminate();
return text;
});

View file

@ -1,37 +0,0 @@
import define from "../../../define.js";
import { DriveFiles } from "@/models/index.js";
export const meta = {
tags: ["drive"],
requireCredential: true,
kind: "read:drive",
description: "Check if a given file exists.",
res: {
type: "boolean",
optional: false,
nullable: false,
},
} as const;
export const paramDef = {
type: "object",
properties: {
md5: { type: "string" },
},
required: ["md5"],
} as const;
export default define(meta, paramDef, async (ps, user) => {
const exist = await DriveFiles.exist({
where: {
md5: ps.md5,
userId: user.id,
},
});
return exist;
});

View file

@ -1,129 +0,0 @@
import { addFile } from "@/services/drive/add-file.js";
import { DriveFiles } from "@/models/index.js";
import { DB_MAX_IMAGE_COMMENT_LENGTH } from "@/misc/hard-limits.js";
import { IdentifiableError } from "@/misc/identifiable-error.js";
import { fetchMeta } from "@/misc/fetch-meta.js";
import { HOUR } from "@/const.js";
import define from "../../../define.js";
import { apiLogger } from "../../../logger.js";
import { ApiError } from "../../../error.js";
export const meta = {
tags: ["drive"],
requireCredential: true,
limit: {
duration: HOUR,
max: 120,
},
requireFile: true,
kind: "write:drive",
description: "Upload a new drive file.",
res: {
type: "object",
optional: false,
nullable: false,
ref: "DriveFile",
},
errors: {
invalidFileName: {
message: "Invalid file name.",
code: "INVALID_FILE_NAME",
id: "f449b209-0c60-4e51-84d5-29486263bfd4",
},
inappropriate: {
message:
"Cannot upload the file because it has been determined that it possibly contains inappropriate content.",
code: "INAPPROPRIATE",
id: "bec5bd69-fba3-43c9-b4fb-2894b66ad5d2",
},
noFreeSpace: {
message:
"Cannot upload the file because you have no free space of drive.",
code: "NO_FREE_SPACE",
id: "d08dbc37-a6a9-463a-8c47-96c32ab5f064",
},
},
} as const;
export const paramDef = {
type: "object",
properties: {
folderId: {
type: "string",
format: "misskey:id",
nullable: true,
default: null,
},
name: { type: "string", nullable: true, default: null },
comment: {
type: "string",
nullable: true,
maxLength: DB_MAX_IMAGE_COMMENT_LENGTH,
default: null,
},
isSensitive: { type: "boolean", default: false },
force: { type: "boolean", default: false },
},
required: [],
} as const;
export default define(
meta,
paramDef,
async (ps, user, _, file, cleanup, ip, headers) => {
// Get 'name' parameter
let name = ps.name || file.originalname;
if (name !== undefined && name !== null) {
name = name.trim();
if (name.length === 0) {
name = null;
} else if (name === "blob") {
name = null;
} else if (!DriveFiles.validateFileName(name)) {
throw new ApiError(meta.errors.invalidFileName);
}
} else {
name = null;
}
const meta = await fetchMeta();
try {
// Create file
const driveFile = await addFile({
user,
path: file.path,
name,
comment: ps.comment,
folderId: ps.folderId,
force: ps.force,
sensitive: ps.isSensitive,
requestIp: meta.enableIpLogging ? ip : null,
requestHeaders: meta.enableIpLogging ? headers : null,
});
return await DriveFiles.pack(driveFile, { self: true });
} catch (e) {
if (e instanceof Error || typeof e === "string") {
apiLogger.error(e);
}
if (e instanceof IdentifiableError) {
if (e.id === "282f77bf-5816-4f72-9264-aa14d8261a21")
throw new ApiError(meta.errors.inappropriate);
if (e.id === "c6244ed2-a39a-4e1c-bf93-f0fbd7764fa6")
throw new ApiError(meta.errors.noFreeSpace);
}
throw new ApiError();
} finally {
cleanup!();
}
},
);

View file

@ -1,55 +0,0 @@
import { deleteFile } from "@/services/drive/delete-file.js";
import { publishDriveStream } from "@/services/stream.js";
import define from "../../../define.js";
import { ApiError } from "../../../error.js";
import { DriveFiles, Users } from "@/models/index.js";
export const meta = {
tags: ["drive"],
requireCredential: true,
kind: "write:drive",
description: "Delete an existing drive file.",
errors: {
noSuchFile: {
message: "No such file.",
code: "NO_SUCH_FILE",
id: "908939ec-e52b-4458-b395-1025195cea58",
},
accessDenied: {
message: "Access denied.",
code: "ACCESS_DENIED",
id: "5eb8d909-2540-4970-90b8-dd6f86088121",
},
},
} as const;
export const paramDef = {
type: "object",
properties: {
fileId: { type: "string", format: "misskey:id" },
},
required: ["fileId"],
} as const;
export default define(meta, paramDef, async (ps, user) => {
const file = await DriveFiles.findOneBy({ id: ps.fileId });
if (file == null) {
throw new ApiError(meta.errors.noSuchFile);
}
if (!(user.isAdmin || user.isModerator) && file.userId !== user.id) {
throw new ApiError(meta.errors.accessDenied);
}
// Delete
await deleteFile(file);
// Publish fileDeleted event
publishDriveStream(user.id, "fileDeleted", file.id);
});

View file

@ -1,41 +0,0 @@
import { DriveFiles } from "@/models/index.js";
import define from "../../../define.js";
export const meta = {
tags: ["drive"],
requireCredential: true,
kind: "read:drive",
description: "Search for a drive file by a hash of the contents.",
res: {
type: "array",
optional: false,
nullable: false,
items: {
type: "object",
optional: false,
nullable: false,
ref: "DriveFile",
},
},
} as const;
export const paramDef = {
type: "object",
properties: {
md5: { type: "string" },
},
required: ["md5"],
} as const;
export default define(meta, paramDef, async (ps, user) => {
const files = await DriveFiles.findBy({
md5: ps.md5,
userId: user.id,
});
return await DriveFiles.packMany(files, { self: true });
});

View file

@ -1,51 +0,0 @@
import define from "../../../define.js";
import { DriveFiles } from "@/models/index.js";
import { IsNull } from "typeorm";
export const meta = {
requireCredential: true,
tags: ["drive"],
kind: "read:drive",
description: "Search for a drive file by the given parameters.",
res: {
type: "array",
optional: false,
nullable: false,
items: {
type: "object",
optional: false,
nullable: false,
ref: "DriveFile",
},
},
} as const;
export const paramDef = {
type: "object",
properties: {
name: { type: "string" },
folderId: {
type: "string",
format: "misskey:id",
nullable: true,
default: null,
},
},
required: ["name"],
} as const;
export default define(meta, paramDef, async (ps, user) => {
const files = await DriveFiles.findBy({
name: ps.name,
userId: user.id,
folderId: ps.folderId ?? IsNull(),
});
return await Promise.all(
files.map((file) => DriveFiles.pack(file, { self: true })),
);
});

View file

@ -1,89 +0,0 @@
import type { DriveFile } from "@/models/entities/drive-file.js";
import { DriveFiles, Users } from "@/models/index.js";
import define from "../../../define.js";
import { ApiError } from "../../../error.js";
export const meta = {
tags: ["drive"],
requireCredential: true,
kind: "read:drive",
description: "Show the properties of a drive file.",
res: {
type: "object",
optional: false,
nullable: false,
ref: "DriveFile",
},
errors: {
noSuchFile: {
message: "No such file.",
code: "NO_SUCH_FILE",
id: "067bc436-2718-4795-b0fb-ecbe43949e31",
},
accessDenied: {
message: "Access denied.",
code: "ACCESS_DENIED",
id: "25b73c73-68b1-41d0-bad1-381cfdf6579f",
},
},
} as const;
export const paramDef = {
type: "object",
anyOf: [
{
properties: {
fileId: { type: "string", format: "misskey:id" },
},
required: ["fileId"],
},
{
properties: {
url: { type: "string" },
},
required: ["url"],
},
],
} as const;
export default define(meta, paramDef, async (ps, user) => {
let file: DriveFile | null = null;
if (ps.fileId) {
file = await DriveFiles.findOneBy({ id: ps.fileId });
} else if (ps.url) {
file = await DriveFiles.findOne({
where: [
{
url: ps.url,
},
{
webpublicUrl: ps.url,
},
{
thumbnailUrl: ps.url,
},
],
});
}
if (file == null) {
throw new ApiError(meta.errors.noSuchFile);
}
if (!(user.isAdmin || user.isModerator) && file.userId !== user.id) {
throw new ApiError(meta.errors.accessDenied);
}
return await DriveFiles.pack(file, {
detail: true,
withUser: true,
self: true,
});
});

View file

@ -1,116 +0,0 @@
import { publishDriveStream } from "@/services/stream.js";
import { DriveFiles, DriveFolders, Users } from "@/models/index.js";
import { DB_MAX_IMAGE_COMMENT_LENGTH } from "@/misc/hard-limits.js";
import define from "../../../define.js";
import { ApiError } from "../../../error.js";
export const meta = {
tags: ["drive"],
requireCredential: true,
kind: "write:drive",
description: "Update the properties of a drive file.",
errors: {
invalidFileName: {
message: "Invalid file name.",
code: "INVALID_FILE_NAME",
id: "395e7156-f9f0-475e-af89-53c3c23080c2",
},
noSuchFile: {
message: "No such file.",
code: "NO_SUCH_FILE",
id: "e7778c7e-3af9-49cd-9690-6dbc3e6c972d",
},
accessDenied: {
message: "Access denied.",
code: "ACCESS_DENIED",
id: "01a53b27-82fc-445b-a0c1-b558465a8ed2",
},
noSuchFolder: {
message: "No such folder.",
code: "NO_SUCH_FOLDER",
id: "ea8fb7a5-af77-4a08-b608-c0218176cd73",
},
},
res: {
type: "object",
optional: false,
nullable: false,
ref: "DriveFile",
},
} as const;
export const paramDef = {
type: "object",
properties: {
fileId: { type: "string", format: "misskey:id" },
folderId: { type: "string", format: "misskey:id", nullable: true },
name: { type: "string" },
isSensitive: { type: "boolean" },
comment: {
type: "string",
nullable: true,
maxLength: DB_MAX_IMAGE_COMMENT_LENGTH,
},
},
required: ["fileId"],
} as const;
export default define(meta, paramDef, async (ps, user) => {
const file = await DriveFiles.findOneBy({ id: ps.fileId });
if (file == null) {
throw new ApiError(meta.errors.noSuchFile);
}
if (!(user.isAdmin || user.isModerator) && file.userId !== user.id) {
throw new ApiError(meta.errors.accessDenied);
}
if (ps.name) file.name = ps.name;
if (!DriveFiles.validateFileName(file.name)) {
throw new ApiError(meta.errors.invalidFileName);
}
if (ps.comment !== undefined) file.comment = ps.comment;
if (ps.isSensitive !== undefined) file.isSensitive = ps.isSensitive;
if (ps.folderId !== undefined) {
if (ps.folderId === null) {
file.folderId = null;
} else {
const folder = await DriveFolders.findOneBy({
id: ps.folderId,
userId: user.id,
});
if (folder == null) {
throw new ApiError(meta.errors.noSuchFolder);
}
file.folderId = folder.id;
}
}
await DriveFiles.update(file.id, {
name: file.name,
comment: file.comment,
folderId: file.folderId,
isSensitive: file.isSensitive,
});
const fileObj = await DriveFiles.pack(file, { self: true });
// Publish fileUpdated event
publishDriveStream(user.id, "fileUpdated", fileObj);
return fileObj;
});

View file

@ -1,57 +0,0 @@
import { uploadFromUrl } from "@/services/drive/upload-from-url.js";
import define from "../../../define.js";
import { DriveFiles } from "@/models/index.js";
import { publishMainStream } from "@/services/stream.js";
import { HOUR } from "@/const.js";
export const meta = {
tags: ["drive"],
limit: {
duration: HOUR,
max: 60,
},
description:
"Request the server to download a new drive file from the specified URL.",
requireCredential: true,
kind: "write:drive",
} as const;
export const paramDef = {
type: "object",
properties: {
url: { type: "string" },
folderId: {
type: "string",
format: "misskey:id",
nullable: true,
default: null,
},
isSensitive: { type: "boolean", default: false },
comment: { type: "string", nullable: true, maxLength: 512, default: null },
marker: { type: "string", nullable: true, default: null },
force: { type: "boolean", default: false },
},
required: ["url"],
} as const;
export default define(meta, paramDef, async (ps, user) => {
uploadFromUrl({
url: ps.url,
user,
folderId: ps.folderId,
sensitive: ps.isSensitive,
force: ps.force,
comment: ps.comment,
}).then((file) => {
DriveFiles.pack(file, { self: true }).then((packedFile) => {
publishMainStream(user.id, "urlUploadFinished", {
marker: ps.marker,
file: packedFile,
});
});
});
});