initial commit

This commit is contained in:
Hampus Kraft
2026-01-01 20:42:59 +00:00
commit 2f557eda8c
9029 changed files with 1490197 additions and 0 deletions

View File

@@ -0,0 +1,162 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {ChannelID, GuildID, MessageID} from '~/BrandedTypes';
import {
BatchBuilder,
buildPatchFromData,
Db,
executeConditional,
executeVersionedUpdate,
fetchMany,
fetchManyInChunks,
fetchOne,
} from '~/database/Cassandra';
import {CHANNEL_COLUMNS, type ChannelRow} from '~/database/CassandraTypes';
import {Logger} from '~/Logger';
import {Channel} from '~/Models';
import {Channels, ChannelsByGuild} from '~/Tables';
import {IChannelDataRepository} from './IChannelDataRepository';
const FETCH_CHANNEL_BY_ID = Channels.select({
where: [Channels.where.eq('channel_id'), Channels.where.eq('soft_deleted')],
limit: 1,
});
const FETCH_CHANNELS_BY_IDS = Channels.select({
where: [Channels.where.in('channel_id', 'channel_ids'), Channels.where.eq('soft_deleted')],
});
const FETCH_GUILD_CHANNELS_BY_GUILD_ID = ChannelsByGuild.select({
where: ChannelsByGuild.where.eq('guild_id'),
});
const DEFAULT_CAS_RETRIES = 8;
export class ChannelDataRepository extends IChannelDataRepository {
async findUnique(channelId: ChannelID): Promise<Channel | null> {
const channel = await fetchOne<ChannelRow>(
FETCH_CHANNEL_BY_ID.bind({
channel_id: channelId,
soft_deleted: false,
}),
);
return channel ? new Channel(channel) : null;
}
async upsert(data: ChannelRow, oldData?: ChannelRow | null): Promise<Channel> {
const channelId = data.channel_id;
const result = await executeVersionedUpdate<ChannelRow, 'channel_id' | 'soft_deleted'>(
async () => {
if (oldData !== undefined) return oldData;
return await fetchOne<ChannelRow>(FETCH_CHANNEL_BY_ID.bind({channel_id: channelId, soft_deleted: false}));
},
(current) => ({
pk: {channel_id: channelId, soft_deleted: false},
patch: buildPatchFromData(data, current, CHANNEL_COLUMNS, ['channel_id', 'soft_deleted']),
}),
Channels,
{onFailure: 'log'},
);
if (data.guild_id) {
await fetchOne(
ChannelsByGuild.upsertAll({
guild_id: data.guild_id,
channel_id: channelId,
}),
);
}
return new Channel({...data, version: result.finalVersion ?? 0});
}
async updateLastMessageId(channelId: ChannelID, messageId: MessageID): Promise<void> {
for (let i = 0; i < DEFAULT_CAS_RETRIES; i++) {
const existing = await fetchOne<ChannelRow>(
FETCH_CHANNEL_BY_ID.bind({
channel_id: channelId,
soft_deleted: false,
}),
);
if (!existing) return;
const prev = existing.last_message_id ?? null;
if (prev !== null && messageId <= prev) return;
const q = Channels.patchByPkIf(
{channel_id: channelId, soft_deleted: false},
{last_message_id: Db.set(messageId)},
{col: 'last_message_id', expectedParam: 'prev_last_message_id', expectedValue: prev},
);
const res = await executeConditional(q);
if (res.applied) return;
}
Logger.warn(
{channelId: channelId.toString(), messageId: messageId.toString()},
'Failed to advance Channels.last_message_id after retries',
);
}
async delete(channelId: ChannelID, guildId?: GuildID): Promise<void> {
const batch = new BatchBuilder();
batch.addPrepared(
Channels.deleteByPk({
channel_id: channelId,
soft_deleted: false,
}),
);
if (guildId) {
batch.addPrepared(
ChannelsByGuild.deleteByPk({
guild_id: guildId,
channel_id: channelId,
}),
);
}
await batch.execute();
}
async listGuildChannels(guildId: GuildID): Promise<Array<Channel>> {
const guildChannels = await fetchMany<{channel_id: bigint}>(
FETCH_GUILD_CHANNELS_BY_GUILD_ID.bind({guild_id: guildId}),
);
if (guildChannels.length === 0) return [];
const channelIds = guildChannels.map((c) => c.channel_id);
const channels = await fetchManyInChunks<ChannelRow>(FETCH_CHANNELS_BY_IDS, channelIds, (chunk) => ({
channel_ids: chunk,
soft_deleted: false,
}));
return channels.map((channel) => new Channel(channel));
}
async countGuildChannels(guildId: GuildID): Promise<number> {
const guildChannels = await fetchMany<{channel_id: bigint}>(
FETCH_GUILD_CHANNELS_BY_GUILD_ID.bind({guild_id: guildId}),
);
return guildChannels.length;
}
}

View File

@@ -0,0 +1,36 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {ChannelDataRepository} from './ChannelDataRepository';
import {IChannelRepositoryAggregate} from './IChannelRepositoryAggregate';
import {MessageInteractionRepository} from './MessageInteractionRepository';
import {MessageRepository} from './MessageRepository';
export class ChannelRepository extends IChannelRepositoryAggregate {
readonly channelData: ChannelDataRepository;
readonly messages: MessageRepository;
readonly messageInteractions: MessageInteractionRepository;
constructor() {
super();
this.channelData = new ChannelDataRepository();
this.messages = new MessageRepository(this.channelData);
this.messageInteractions = new MessageInteractionRepository(this.messages);
}
}

View File

@@ -0,0 +1,31 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {ChannelID, GuildID, MessageID} from '~/BrandedTypes';
import type {ChannelRow} from '~/database/CassandraTypes';
import type {Channel} from '~/Models';
export abstract class IChannelDataRepository {
abstract findUnique(channelId: ChannelID): Promise<Channel | null>;
abstract upsert(data: ChannelRow): Promise<Channel>;
abstract updateLastMessageId(channelId: ChannelID, messageId: MessageID): Promise<void>;
abstract delete(channelId: ChannelID, guildId?: GuildID): Promise<void>;
abstract listGuildChannels(guildId: GuildID): Promise<Array<Channel>>;
abstract countGuildChannels(guildId: GuildID): Promise<number>;
}

View File

@@ -0,0 +1,28 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {IChannelDataRepository} from './IChannelDataRepository';
import type {IMessageInteractionRepository} from './IMessageInteractionRepository';
import type {IMessageRepository} from './IMessageRepository';
export abstract class IChannelRepositoryAggregate {
abstract readonly channelData: IChannelDataRepository;
abstract readonly messages: IMessageRepository;
abstract readonly messageInteractions: IMessageInteractionRepository;
}

View File

@@ -0,0 +1,75 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {ChannelID, EmojiID, MessageID, UserID} from '~/BrandedTypes';
import type {Message, MessageReaction} from '~/Models';
export abstract class IMessageInteractionRepository {
abstract listChannelPins(channelId: ChannelID, beforePinnedTimestamp: Date, limit?: number): Promise<Array<Message>>;
abstract addChannelPin(channelId: ChannelID, messageId: MessageID, pinnedTimestamp: Date): Promise<void>;
abstract removeChannelPin(channelId: ChannelID, messageId: MessageID): Promise<void>;
abstract listMessageReactions(channelId: ChannelID, messageId: MessageID): Promise<Array<MessageReaction>>;
abstract listReactionUsers(
channelId: ChannelID,
messageId: MessageID,
emojiName: string,
limit?: number,
after?: UserID,
emojiId?: EmojiID,
): Promise<Array<MessageReaction>>;
abstract addReaction(
channelId: ChannelID,
messageId: MessageID,
userId: UserID,
emojiName: string,
emojiId?: EmojiID,
emojiAnimated?: boolean,
): Promise<MessageReaction>;
abstract removeReaction(
channelId: ChannelID,
messageId: MessageID,
userId: UserID,
emojiName: string,
emojiId?: EmojiID,
): Promise<void>;
abstract removeAllReactions(channelId: ChannelID, messageId: MessageID): Promise<void>;
abstract removeAllReactionsForEmoji(
channelId: ChannelID,
messageId: MessageID,
emojiName: string,
emojiId?: EmojiID,
): Promise<void>;
abstract countReactionUsers(
channelId: ChannelID,
messageId: MessageID,
emojiName: string,
emojiId?: EmojiID,
): Promise<number>;
abstract countUniqueReactions(channelId: ChannelID, messageId: MessageID): Promise<number>;
abstract checkUserReactionExists(
channelId: ChannelID,
messageId: MessageID,
userId: UserID,
emojiName: string,
emojiId?: EmojiID,
): Promise<boolean>;
abstract setHasReaction(channelId: ChannelID, messageId: MessageID, hasReaction: boolean): Promise<void>;
}

View File

@@ -0,0 +1,66 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {AttachmentID, ChannelID, MessageID, UserID} from '~/BrandedTypes';
import type {AttachmentLookupRow, MessageRow} from '~/database/CassandraTypes';
import type {Message} from '~/Models';
export interface ListMessagesOptions {
restrictToBeforeBucket?: boolean;
immediateAfter?: boolean;
}
export abstract class IMessageRepository {
abstract listMessages(
channelId: ChannelID,
beforeMessageId?: MessageID,
limit?: number,
afterMessageId?: MessageID,
options?: ListMessagesOptions,
): Promise<Array<Message>>;
abstract getMessage(channelId: ChannelID, messageId: MessageID): Promise<Message | null>;
abstract upsertMessage(data: MessageRow, oldData?: MessageRow | null): Promise<Message>;
abstract deleteMessage(
channelId: ChannelID,
messageId: MessageID,
authorId: UserID,
pinnedTimestamp?: Date,
): Promise<void>;
abstract bulkDeleteMessages(channelId: ChannelID, messageIds: Array<MessageID>): Promise<void>;
abstract deleteAllChannelMessages(channelId: ChannelID): Promise<void>;
abstract listMessagesByAuthor(
authorId: UserID,
limit?: number,
lastChannelId?: ChannelID,
lastMessageId?: MessageID,
): Promise<Array<{channelId: ChannelID; messageId: MessageID}>>;
abstract deleteMessagesByAuthor(
authorId: UserID,
channelIds?: Array<ChannelID>,
messageIds?: Array<MessageID>,
): Promise<void>;
abstract anonymizeMessage(channelId: ChannelID, messageId: MessageID, newAuthorId: UserID): Promise<void>;
abstract authorHasMessage(authorId: UserID, channelId: ChannelID, messageId: MessageID): Promise<boolean>;
abstract lookupAttachmentByChannelAndFilename(
channelId: ChannelID,
attachmentId: AttachmentID,
filename: string,
): Promise<MessageID | null>;
abstract listChannelAttachments(channelId: ChannelID): Promise<Array<AttachmentLookupRow>>;
}

View File

@@ -0,0 +1,362 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {ChannelID, EmojiID, MessageID, UserID} from '~/BrandedTypes';
import {createEmojiID} from '~/BrandedTypes';
import {MAX_USERS_PER_MESSAGE_REACTION} from '~/Constants';
import {Db, deleteOneOrMany, fetchMany, fetchOne, upsertOne} from '~/database/Cassandra';
import type {ChannelPinRow, MessageReactionRow} from '~/database/CassandraTypes';
import {type Message, MessageReaction} from '~/Models';
import {ChannelPins, MessageReactions, Messages} from '~/Tables';
import * as BucketUtils from '~/utils/BucketUtils';
import {IMessageInteractionRepository} from './IMessageInteractionRepository';
import type {MessageRepository} from './MessageRepository';
const createFetchChannelPinsQuery = (limit: number) =>
ChannelPins.selectCql({
where: [ChannelPins.where.eq('channel_id'), ChannelPins.where.lt('pinned_timestamp', 'before_pinned_timestamp')],
limit,
});
const FETCH_MESSAGE_REACTIONS_BY_CHANNEL_AND_MESSAGE_QUERY = MessageReactions.selectCql({
where: [
MessageReactions.where.eq('channel_id'),
MessageReactions.where.eq('bucket'),
MessageReactions.where.eq('message_id'),
],
});
const CHECK_MESSAGE_HAS_REACTIONS_QUERY = MessageReactions.selectCql({
columns: ['channel_id'],
where: [
MessageReactions.where.eq('channel_id'),
MessageReactions.where.eq('bucket'),
MessageReactions.where.eq('message_id'),
],
limit: 1,
});
const createFetchReactionUsersByEmojiQuery = (limit: number, hasAfter: boolean = false) =>
MessageReactions.selectCql({
where: hasAfter
? [
MessageReactions.where.eq('channel_id'),
MessageReactions.where.eq('bucket'),
MessageReactions.where.eq('message_id'),
MessageReactions.where.eq('emoji_id'),
MessageReactions.where.eq('emoji_name'),
MessageReactions.where.gt('user_id', 'after_user_id'),
]
: [
MessageReactions.where.eq('channel_id'),
MessageReactions.where.eq('bucket'),
MessageReactions.where.eq('message_id'),
MessageReactions.where.eq('emoji_id'),
MessageReactions.where.eq('emoji_name'),
],
limit,
});
const CHECK_USER_REACTION_EXISTS_QUERY = MessageReactions.selectCql({
columns: ['channel_id', 'bucket', 'message_id', 'user_id', 'emoji_id', 'emoji_name'],
where: [
MessageReactions.where.eq('channel_id'),
MessageReactions.where.eq('bucket'),
MessageReactions.where.eq('message_id'),
MessageReactions.where.eq('user_id'),
MessageReactions.where.eq('emoji_id'),
MessageReactions.where.eq('emoji_name'),
],
limit: 1,
});
export class MessageInteractionRepository extends IMessageInteractionRepository {
private messageRepository: MessageRepository;
constructor(messageRepository: MessageRepository) {
super();
this.messageRepository = messageRepository;
}
async listChannelPins(
channelId: ChannelID,
beforePinnedTimestamp: Date,
limit: number = 50,
): Promise<Array<Message>> {
const pins = await fetchMany<ChannelPinRow>(createFetchChannelPinsQuery(limit), {
channel_id: channelId,
before_pinned_timestamp: beforePinnedTimestamp,
});
const messages: Array<Message> = [];
for (const pin of pins) {
const message = await this.messageRepository.getMessage(channelId, pin.message_id);
if (message) {
messages.push(message);
}
}
return messages;
}
async addChannelPin(channelId: ChannelID, messageId: MessageID, pinnedTimestamp: Date): Promise<void> {
await upsertOne(
ChannelPins.upsertAll({
channel_id: channelId,
message_id: messageId,
pinned_timestamp: pinnedTimestamp,
}),
);
}
async removeChannelPin(channelId: ChannelID, messageId: MessageID): Promise<void> {
const message = await this.messageRepository.getMessage(channelId, messageId);
if (!message || !message.pinnedTimestamp) {
return;
}
await deleteOneOrMany(
ChannelPins.deleteByPk({
channel_id: channelId,
pinned_timestamp: message.pinnedTimestamp,
message_id: messageId,
}),
);
}
async listMessageReactions(channelId: ChannelID, messageId: MessageID): Promise<Array<MessageReaction>> {
const bucket = BucketUtils.makeBucket(messageId);
const reactions = await fetchMany<MessageReactionRow>(FETCH_MESSAGE_REACTIONS_BY_CHANNEL_AND_MESSAGE_QUERY, {
channel_id: channelId,
bucket,
message_id: messageId,
});
return reactions.map((reaction) => new MessageReaction(reaction));
}
async listReactionUsers(
channelId: ChannelID,
messageId: MessageID,
emojiName: string,
limit: number = 25,
after?: UserID,
emojiId?: EmojiID,
): Promise<Array<MessageReaction>> {
const bucket = BucketUtils.makeBucket(messageId);
const normalizedEmojiId = emojiId ?? createEmojiID(0n);
const hasAfter = !!after;
const reactions = hasAfter
? await fetchMany<MessageReactionRow>(
createFetchReactionUsersByEmojiQuery(Math.min(limit, MAX_USERS_PER_MESSAGE_REACTION), true),
{
channel_id: channelId,
bucket,
message_id: messageId,
emoji_id: normalizedEmojiId,
emoji_name: emojiName,
after_user_id: after!,
},
)
: await fetchMany<MessageReactionRow>(
createFetchReactionUsersByEmojiQuery(Math.min(limit, MAX_USERS_PER_MESSAGE_REACTION), false),
{
channel_id: channelId,
bucket,
message_id: messageId,
emoji_id: normalizedEmojiId,
emoji_name: emojiName,
},
);
return reactions.map((reaction) => new MessageReaction(reaction));
}
async addReaction(
channelId: ChannelID,
messageId: MessageID,
userId: UserID,
emojiName: string,
emojiId?: EmojiID,
emojiAnimated: boolean = false,
): Promise<MessageReaction> {
const bucket = BucketUtils.makeBucket(messageId);
const normalizedEmojiId = emojiId ? emojiId : createEmojiID(0n);
const reactionData: MessageReactionRow = {
channel_id: channelId,
bucket,
message_id: messageId,
user_id: userId,
emoji_id: normalizedEmojiId,
emoji_name: emojiName,
emoji_animated: emojiAnimated,
};
await upsertOne(MessageReactions.upsertAll(reactionData));
await this.setHasReaction(channelId, messageId, true);
return new MessageReaction(reactionData);
}
async removeReaction(
channelId: ChannelID,
messageId: MessageID,
userId: UserID,
emojiName: string,
emojiId?: EmojiID,
): Promise<void> {
const bucket = BucketUtils.makeBucket(messageId);
const normalizedEmojiId = emojiId ?? createEmojiID(0n);
await deleteOneOrMany(
MessageReactions.deleteByPk({
channel_id: channelId,
bucket,
message_id: messageId,
user_id: userId,
emoji_id: normalizedEmojiId,
emoji_name: emojiName,
}),
);
const hasReactions = await this.messageHasAnyReactions(channelId, messageId);
await this.setHasReaction(channelId, messageId, hasReactions);
}
async removeAllReactions(channelId: ChannelID, messageId: MessageID): Promise<void> {
const bucket = BucketUtils.makeBucket(messageId);
const deleteQuery = MessageReactions.deleteCql({
where: [
MessageReactions.where.eq('channel_id'),
MessageReactions.where.eq('bucket'),
MessageReactions.where.eq('message_id'),
],
});
await deleteOneOrMany(deleteQuery, {
channel_id: channelId,
bucket,
message_id: messageId,
});
const hasReactions = await this.messageHasAnyReactions(channelId, messageId);
await this.setHasReaction(channelId, messageId, hasReactions);
}
async removeAllReactionsForEmoji(
channelId: ChannelID,
messageId: MessageID,
emojiName: string,
emojiId?: EmojiID,
): Promise<void> {
const bucket = BucketUtils.makeBucket(messageId);
const normalizedEmojiId = emojiId ?? createEmojiID(0n);
const deleteQuery = MessageReactions.deleteCql({
where: [
MessageReactions.where.eq('channel_id'),
MessageReactions.where.eq('bucket'),
MessageReactions.where.eq('message_id'),
MessageReactions.where.eq('emoji_id'),
MessageReactions.where.eq('emoji_name'),
],
});
await deleteOneOrMany(deleteQuery, {
channel_id: channelId,
bucket,
message_id: messageId,
emoji_id: normalizedEmojiId,
emoji_name: emojiName,
});
const hasReactions = await this.messageHasAnyReactions(channelId, messageId);
await this.setHasReaction(channelId, messageId, hasReactions);
}
async countReactionUsers(
channelId: ChannelID,
messageId: MessageID,
emojiName: string,
emojiId?: EmojiID,
): Promise<number> {
const reactions = await this.listReactionUsers(channelId, messageId, emojiName, undefined, undefined, emojiId);
return reactions.length;
}
async countUniqueReactions(channelId: ChannelID, messageId: MessageID): Promise<number> {
const reactions = await this.listMessageReactions(channelId, messageId);
const uniqueEmojis = new Set<string>();
for (const reaction of reactions) {
const emojiKey = `${reaction.emojiId}:${reaction.emojiName}`;
uniqueEmojis.add(emojiKey);
}
return uniqueEmojis.size;
}
async checkUserReactionExists(
channelId: ChannelID,
messageId: MessageID,
userId: UserID,
emojiName: string,
emojiId?: EmojiID,
): Promise<boolean> {
const bucket = BucketUtils.makeBucket(messageId);
const normalizedEmojiId = emojiId ?? createEmojiID(0n);
const reaction = await fetchOne<MessageReactionRow>(CHECK_USER_REACTION_EXISTS_QUERY, {
channel_id: channelId,
bucket,
message_id: messageId,
user_id: userId,
emoji_id: normalizedEmojiId,
emoji_name: emojiName,
});
return !!reaction;
}
async setHasReaction(channelId: ChannelID, messageId: MessageID, hasReaction: boolean): Promise<void> {
const bucket = BucketUtils.makeBucket(messageId);
await upsertOne(
Messages.patchByPk(
{
channel_id: channelId,
bucket,
message_id: messageId,
},
{
has_reaction: Db.set(hasReaction),
},
),
);
}
private async messageHasAnyReactions(channelId: ChannelID, messageId: MessageID): Promise<boolean> {
const bucket = BucketUtils.makeBucket(messageId);
const row = await fetchOne<Pick<MessageReactionRow, 'channel_id'>>(CHECK_MESSAGE_HAS_REACTIONS_QUERY, {
channel_id: channelId,
bucket,
message_id: messageId,
});
return Boolean(row);
}
}

View File

@@ -0,0 +1,121 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {AttachmentID, ChannelID, MessageID, UserID} from '~/BrandedTypes';
import type {AttachmentLookupRow, MessageRow} from '~/database/CassandraTypes';
import type {Message} from '~/Models';
import type {ChannelDataRepository} from './ChannelDataRepository';
import {IMessageRepository, type ListMessagesOptions} from './IMessageRepository';
import {MessageAttachmentRepository} from './message/MessageAttachmentRepository';
import {MessageAuthorRepository} from './message/MessageAuthorRepository';
import {MessageDataRepository} from './message/MessageDataRepository';
import {MessageDeletionRepository} from './message/MessageDeletionRepository';
export class MessageRepository extends IMessageRepository {
private dataRepo: MessageDataRepository;
private deletionRepo: MessageDeletionRepository;
private attachmentRepo: MessageAttachmentRepository;
private authorRepo: MessageAuthorRepository;
private channelDataRepo: ChannelDataRepository;
constructor(channelDataRepo: ChannelDataRepository) {
super();
this.dataRepo = new MessageDataRepository();
this.deletionRepo = new MessageDeletionRepository(this.dataRepo);
this.attachmentRepo = new MessageAttachmentRepository();
this.authorRepo = new MessageAuthorRepository(this.dataRepo, this.deletionRepo);
this.channelDataRepo = channelDataRepo;
}
async listMessages(
channelId: ChannelID,
beforeMessageId?: MessageID,
limit?: number,
afterMessageId?: MessageID,
options?: ListMessagesOptions,
): Promise<Array<Message>> {
return this.dataRepo.listMessages(channelId, beforeMessageId, limit, afterMessageId, options);
}
async getMessage(channelId: ChannelID, messageId: MessageID): Promise<Message | null> {
return this.dataRepo.getMessage(channelId, messageId);
}
async upsertMessage(data: MessageRow, oldData?: MessageRow | null): Promise<Message> {
const message = await this.dataRepo.upsertMessage(data, oldData);
if (!oldData) {
void this.channelDataRepo.updateLastMessageId(data.channel_id, data.message_id);
}
return message;
}
async deleteMessage(
channelId: ChannelID,
messageId: MessageID,
authorId: UserID,
pinnedTimestamp?: Date,
): Promise<void> {
return this.deletionRepo.deleteMessage(channelId, messageId, authorId, pinnedTimestamp);
}
async bulkDeleteMessages(channelId: ChannelID, messageIds: Array<MessageID>): Promise<void> {
return this.deletionRepo.bulkDeleteMessages(channelId, messageIds);
}
async deleteAllChannelMessages(channelId: ChannelID): Promise<void> {
return this.deletionRepo.deleteAllChannelMessages(channelId);
}
async listMessagesByAuthor(
authorId: UserID,
limit?: number,
lastChannelId?: ChannelID,
lastMessageId?: MessageID,
): Promise<Array<{channelId: ChannelID; messageId: MessageID}>> {
return this.authorRepo.listMessagesByAuthor(authorId, limit, lastChannelId, lastMessageId);
}
async deleteMessagesByAuthor(
authorId: UserID,
channelIds?: Array<ChannelID>,
messageIds?: Array<MessageID>,
): Promise<void> {
return this.authorRepo.deleteMessagesByAuthor(authorId, channelIds, messageIds);
}
async anonymizeMessage(channelId: ChannelID, messageId: MessageID, newAuthorId: UserID): Promise<void> {
return this.authorRepo.anonymizeMessage(channelId, messageId, newAuthorId);
}
async authorHasMessage(authorId: UserID, channelId: ChannelID, messageId: MessageID): Promise<boolean> {
return this.authorRepo.hasMessageByAuthor(authorId, channelId, messageId);
}
async lookupAttachmentByChannelAndFilename(
channelId: ChannelID,
attachmentId: AttachmentID,
filename: string,
): Promise<MessageID | null> {
return this.attachmentRepo.lookupAttachmentByChannelAndFilename(channelId, attachmentId, filename);
}
async listChannelAttachments(channelId: ChannelID): Promise<Array<AttachmentLookupRow>> {
return this.attachmentRepo.listChannelAttachments(channelId);
}
}

View File

@@ -0,0 +1,215 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import {describe, expect, it, vi} from 'vitest';
import {
BucketScanDirection,
type BucketScanTraceEvent,
BucketScanTraceKind,
scanBucketsWithIndex,
} from './BucketScanEngine';
interface FakeRow {
id: bigint;
}
function makeIndexBuckets(
allBuckets: Array<number>,
direction: BucketScanDirection,
): (query: {minBucket: number; maxBucket: number; limit: number}) => Promise<Array<number>> {
return async (query) => {
const filtered = allBuckets.filter((b) => b >= query.minBucket && b <= query.maxBucket);
filtered.sort((a, b) => (direction === BucketScanDirection.Desc ? b - a : a - b));
return filtered.slice(0, query.limit);
};
}
describe('scanBucketsWithIndex', () => {
it('scans buckets in DESC order and stops when limit is satisfied', async () => {
const listBucketsFromIndex = vi.fn(makeIndexBuckets([5, 4, 3, 2, 1], BucketScanDirection.Desc));
const fetchRowsForBucket = vi.fn(async (bucket: number) => ({rows: [{id: BigInt(bucket)}], unbounded: true}));
const trace: Array<BucketScanTraceEvent> = [];
const result = await scanBucketsWithIndex<FakeRow>(
{
listBucketsFromIndex,
fetchRowsForBucket,
getRowId: (row) => row.id,
trace: (event) => trace.push(event),
},
{
minBucket: 1,
maxBucket: 5,
limit: 3,
direction: BucketScanDirection.Desc,
indexPageSize: 200,
},
);
expect(listBucketsFromIndex).toHaveBeenCalledTimes(1);
expect(fetchRowsForBucket.mock.calls.map((c) => c[0])).toEqual([5, 4, 3]);
expect(result.rows.map((r) => r.id)).toEqual([5n, 4n, 3n]);
const processed = trace.filter((e) => e.kind === BucketScanTraceKind.ProcessBucket).map((e) => e.bucket);
expect(processed).toEqual([5, 4, 3]);
});
it('falls back to the numeric scan when the index is missing buckets', async () => {
const listBucketsFromIndex = vi.fn(makeIndexBuckets([5, 3, 1], BucketScanDirection.Desc));
const fetchRowsForBucket = vi.fn(async (bucket: number) => ({rows: [{id: BigInt(bucket)}], unbounded: true}));
const trace: Array<BucketScanTraceEvent> = [];
const result = await scanBucketsWithIndex<FakeRow>(
{
listBucketsFromIndex,
fetchRowsForBucket,
getRowId: (row) => row.id,
trace: (event) => trace.push(event),
},
{
minBucket: 1,
maxBucket: 5,
limit: 5,
direction: BucketScanDirection.Desc,
indexPageSize: 200,
},
);
expect(result.rows.map((r) => r.id)).toEqual([5n, 3n, 1n, 4n, 2n]);
const processed = trace.filter((e) => e.kind === BucketScanTraceKind.ProcessBucket).map((e) => e.bucket);
expect(processed).toEqual([5, 3, 1, 4, 2]);
});
it('honors stopAfterBucket in DESC scans', async () => {
const listBucketsFromIndex = vi.fn(makeIndexBuckets([5, 4, 3, 2, 1], BucketScanDirection.Desc));
const fetchRowsForBucket = vi.fn(async (bucket: number) => ({rows: [{id: BigInt(bucket)}], unbounded: true}));
const trace: Array<BucketScanTraceEvent> = [];
const result = await scanBucketsWithIndex<FakeRow>(
{
listBucketsFromIndex,
fetchRowsForBucket,
getRowId: (row) => row.id,
trace: (event) => trace.push(event),
},
{
minBucket: 1,
maxBucket: 5,
limit: 100,
direction: BucketScanDirection.Desc,
indexPageSize: 200,
stopAfterBucket: 4,
},
);
expect(result.rows.map((r) => r.id)).toEqual([5n, 4n]);
const processed = trace.filter((e) => e.kind === BucketScanTraceKind.ProcessBucket).map((e) => e.bucket);
expect(processed).toEqual([5, 4]);
});
it('scans buckets in ASC order and returns closest rows first', async () => {
const listBucketsFromIndex = vi.fn(makeIndexBuckets([400, 401], BucketScanDirection.Asc));
const fetchRowsForBucket = vi.fn(async (bucket: number, limit: number) => {
const rowsByBucket = new Map<number, Array<FakeRow>>([
[400, [{id: 51n}, {id: 52n}, {id: 53n}]],
[401, [{id: 100n}, {id: 101n}]],
]);
return {rows: (rowsByBucket.get(bucket) ?? []).slice(0, limit), unbounded: true};
});
const result = await scanBucketsWithIndex<FakeRow>(
{
listBucketsFromIndex,
fetchRowsForBucket,
getRowId: (row) => row.id,
},
{
minBucket: 400,
maxBucket: 401,
limit: 4,
direction: BucketScanDirection.Asc,
indexPageSize: 200,
},
);
expect(result.rows.map((r) => r.id)).toEqual([51n, 52n, 53n, 100n]);
expect(fetchRowsForBucket.mock.calls.map((c) => c[0])).toEqual([400, 401]);
});
it('deduplicates rows using getRowId', async () => {
const listBucketsFromIndex = vi.fn(makeIndexBuckets([5, 4], BucketScanDirection.Desc));
const fetchRowsForBucket = vi.fn(async (bucket: number) => {
const rowsByBucket = new Map<number, Array<FakeRow>>([
[5, [{id: 1n}, {id: 2n}]],
[4, [{id: 2n}, {id: 3n}]],
]);
return {rows: rowsByBucket.get(bucket) ?? [], unbounded: true};
});
const result = await scanBucketsWithIndex<FakeRow>(
{
listBucketsFromIndex,
fetchRowsForBucket,
getRowId: (row) => row.id,
},
{
minBucket: 4,
maxBucket: 5,
limit: 10,
direction: BucketScanDirection.Desc,
indexPageSize: 200,
},
);
expect(result.rows.map((r) => r.id)).toEqual([1n, 2n, 3n]);
});
it('marks empty buckets only when the query was unbounded', async () => {
const listBucketsFromIndex = vi.fn(makeIndexBuckets([3, 2, 1], BucketScanDirection.Desc));
const emptied: Array<number> = [];
const fetchRowsForBucket = vi.fn(async (bucket: number) => {
if (bucket === 3) return {rows: [], unbounded: false};
if (bucket === 2) return {rows: [], unbounded: true};
return {rows: [{id: 1n}], unbounded: true};
});
await scanBucketsWithIndex<FakeRow>(
{
listBucketsFromIndex,
fetchRowsForBucket,
getRowId: (row) => row.id,
onEmptyUnboundedBucket: async (bucket) => {
emptied.push(bucket);
},
},
{
minBucket: 1,
maxBucket: 3,
limit: 1,
direction: BucketScanDirection.Desc,
indexPageSize: 200,
},
);
expect(emptied).toEqual([2]);
});
});

View File

@@ -0,0 +1,358 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
export enum BucketScanDirection {
Asc = 'asc',
Desc = 'desc',
}
export enum BucketScanTraceKind {
Start = 'start',
ListBucketsFromIndex = 'listBucketsFromIndex',
ProcessBucket = 'processBucket',
FetchBucket = 'fetchBucket',
MarkBucketEmpty = 'markBucketEmpty',
TouchBucket = 'touchBucket',
StopAfterBucketReached = 'stopAfterBucketReached',
Complete = 'complete',
}
export interface BucketScanTraceEvent {
kind: BucketScanTraceKind;
minBucket: number;
maxBucket: number;
limit: number;
direction: BucketScanDirection;
bucket?: number;
remaining?: number;
indexQuery?: {minBucket: number; maxBucket: number; limit: number};
indexResult?: Array<number>;
fetchResult?: {rowCount: number; unbounded: boolean};
}
export interface BucketScanIndexQuery {
minBucket: number;
maxBucket: number;
limit: number;
}
export interface BucketScanBucketFetchResult<Row> {
rows: Array<Row>;
unbounded: boolean;
}
export interface BucketScanDeps<Row> {
listBucketsFromIndex: (query: BucketScanIndexQuery) => Promise<Array<number>>;
fetchRowsForBucket: (bucket: number, limit: number) => Promise<BucketScanBucketFetchResult<Row>>;
getRowId: (row: Row) => bigint;
onEmptyUnboundedBucket?: (bucket: number) => Promise<void>;
onBucketHasRows?: (bucket: number) => Promise<void>;
trace?: (event: BucketScanTraceEvent) => void;
}
export interface BucketScanOptions {
minBucket: number;
maxBucket: number;
limit: number;
direction: BucketScanDirection;
indexPageSize: number;
stopAfterBucket?: number;
}
export interface BucketScanResult<Row> {
rows: Array<Row>;
}
export async function scanBucketsWithIndex<Row>(
deps: BucketScanDeps<Row>,
opts: BucketScanOptions,
): Promise<BucketScanResult<Row>> {
const trace = deps.trace;
trace?.({
kind: BucketScanTraceKind.Start,
minBucket: opts.minBucket,
maxBucket: opts.maxBucket,
limit: opts.limit,
direction: opts.direction,
});
if (opts.limit <= 0) {
trace?.({
kind: BucketScanTraceKind.Complete,
minBucket: opts.minBucket,
maxBucket: opts.maxBucket,
limit: opts.limit,
direction: opts.direction,
});
return {rows: []};
}
let remaining = opts.limit;
const out: Array<Row> = [];
const seenRowIds = new Set<bigint>();
const processedBuckets = new Set<number>();
const processBucket = async (bucket: number) => {
trace?.({
kind: BucketScanTraceKind.ProcessBucket,
minBucket: opts.minBucket,
maxBucket: opts.maxBucket,
limit: opts.limit,
direction: opts.direction,
bucket,
remaining,
});
trace?.({
kind: BucketScanTraceKind.FetchBucket,
minBucket: opts.minBucket,
maxBucket: opts.maxBucket,
limit: opts.limit,
direction: opts.direction,
bucket,
remaining,
});
const {rows, unbounded} = await deps.fetchRowsForBucket(bucket, remaining);
trace?.({
kind: BucketScanTraceKind.FetchBucket,
minBucket: opts.minBucket,
maxBucket: opts.maxBucket,
limit: opts.limit,
direction: opts.direction,
bucket,
remaining,
fetchResult: {rowCount: rows.length, unbounded},
});
if (rows.length === 0) {
if (unbounded && deps.onEmptyUnboundedBucket) {
trace?.({
kind: BucketScanTraceKind.MarkBucketEmpty,
minBucket: opts.minBucket,
maxBucket: opts.maxBucket,
limit: opts.limit,
direction: opts.direction,
bucket,
remaining,
});
await deps.onEmptyUnboundedBucket(bucket);
}
return;
}
if (deps.onBucketHasRows) {
trace?.({
kind: BucketScanTraceKind.TouchBucket,
minBucket: opts.minBucket,
maxBucket: opts.maxBucket,
limit: opts.limit,
direction: opts.direction,
bucket,
remaining,
});
await deps.onBucketHasRows(bucket);
}
for (const row of rows) {
if (remaining <= 0) break;
const rowId = deps.getRowId(row);
if (seenRowIds.has(rowId)) continue;
seenRowIds.add(rowId);
out.push(row);
remaining--;
}
};
const stopAfterBucket = typeof opts.stopAfterBucket === 'number' ? opts.stopAfterBucket : null;
const shouldStopAfterBucket = (bucket: number) => stopAfterBucket !== null && bucket === stopAfterBucket;
if (opts.direction === BucketScanDirection.Desc) {
let cursorMax: number | null = opts.maxBucket;
while (remaining > 0 && cursorMax !== null && cursorMax >= opts.minBucket) {
const query: BucketScanIndexQuery = {
minBucket: opts.minBucket,
maxBucket: cursorMax,
limit: opts.indexPageSize,
};
trace?.({
kind: BucketScanTraceKind.ListBucketsFromIndex,
minBucket: opts.minBucket,
maxBucket: opts.maxBucket,
limit: opts.limit,
direction: opts.direction,
indexQuery: query,
});
const buckets = await deps.listBucketsFromIndex(query);
trace?.({
kind: BucketScanTraceKind.ListBucketsFromIndex,
minBucket: opts.minBucket,
maxBucket: opts.maxBucket,
limit: opts.limit,
direction: opts.direction,
indexQuery: query,
indexResult: buckets,
});
if (buckets.length === 0) break;
for (const bucket of buckets) {
if (remaining <= 0) break;
if (bucket < opts.minBucket || bucket > opts.maxBucket) continue;
if (processedBuckets.has(bucket)) continue;
processedBuckets.add(bucket);
await processBucket(bucket);
if (shouldStopAfterBucket(bucket)) {
trace?.({
kind: BucketScanTraceKind.StopAfterBucketReached,
minBucket: opts.minBucket,
maxBucket: opts.maxBucket,
limit: opts.limit,
direction: opts.direction,
bucket,
remaining,
});
return {rows: out};
}
}
const last = buckets[buckets.length - 1];
const nextCursor = last - 1;
cursorMax = nextCursor >= opts.minBucket ? nextCursor : null;
}
if (remaining > 0) {
for (let bucket = opts.maxBucket; remaining > 0 && bucket >= opts.minBucket; bucket--) {
if (processedBuckets.has(bucket)) continue;
processedBuckets.add(bucket);
await processBucket(bucket);
if (shouldStopAfterBucket(bucket)) {
trace?.({
kind: BucketScanTraceKind.StopAfterBucketReached,
minBucket: opts.minBucket,
maxBucket: opts.maxBucket,
limit: opts.limit,
direction: opts.direction,
bucket,
remaining,
});
return {rows: out};
}
}
}
} else {
let cursorMin: number | null = opts.minBucket;
while (remaining > 0 && cursorMin !== null && cursorMin <= opts.maxBucket) {
const query: BucketScanIndexQuery = {
minBucket: cursorMin,
maxBucket: opts.maxBucket,
limit: opts.indexPageSize,
};
trace?.({
kind: BucketScanTraceKind.ListBucketsFromIndex,
minBucket: opts.minBucket,
maxBucket: opts.maxBucket,
limit: opts.limit,
direction: opts.direction,
indexQuery: query,
});
const buckets = await deps.listBucketsFromIndex(query);
trace?.({
kind: BucketScanTraceKind.ListBucketsFromIndex,
minBucket: opts.minBucket,
maxBucket: opts.maxBucket,
limit: opts.limit,
direction: opts.direction,
indexQuery: query,
indexResult: buckets,
});
if (buckets.length === 0) break;
for (const bucket of buckets) {
if (remaining <= 0) break;
if (bucket < opts.minBucket || bucket > opts.maxBucket) continue;
if (processedBuckets.has(bucket)) continue;
processedBuckets.add(bucket);
await processBucket(bucket);
if (shouldStopAfterBucket(bucket)) {
trace?.({
kind: BucketScanTraceKind.StopAfterBucketReached,
minBucket: opts.minBucket,
maxBucket: opts.maxBucket,
limit: opts.limit,
direction: opts.direction,
bucket,
remaining,
});
return {rows: out};
}
}
const last = buckets[buckets.length - 1];
const nextCursor = last + 1;
cursorMin = nextCursor <= opts.maxBucket ? nextCursor : null;
}
if (remaining > 0) {
for (let bucket = opts.minBucket; remaining > 0 && bucket <= opts.maxBucket; bucket++) {
if (processedBuckets.has(bucket)) continue;
processedBuckets.add(bucket);
await processBucket(bucket);
if (shouldStopAfterBucket(bucket)) {
trace?.({
kind: BucketScanTraceKind.StopAfterBucketReached,
minBucket: opts.minBucket,
maxBucket: opts.maxBucket,
limit: opts.limit,
direction: opts.direction,
bucket,
remaining,
});
return {rows: out};
}
}
}
}
trace?.({
kind: BucketScanTraceKind.Complete,
minBucket: opts.minBucket,
maxBucket: opts.maxBucket,
limit: opts.limit,
direction: opts.direction,
});
return {rows: out};
}

View File

@@ -0,0 +1,58 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {AttachmentID, ChannelID, MessageID} from '~/BrandedTypes';
import {fetchMany, fetchOne} from '~/database/Cassandra';
import type {AttachmentLookupRow} from '~/database/CassandraTypes';
import {AttachmentLookup} from '~/Tables';
const LOOKUP_ATTACHMENT_BY_CHANNEL_AND_FILENAME_QUERY = AttachmentLookup.selectCql({
where: [
AttachmentLookup.where.eq('channel_id'),
AttachmentLookup.where.eq('attachment_id'),
AttachmentLookup.where.eq('filename'),
],
limit: 1,
});
const LIST_CHANNEL_ATTACHMENTS_QUERY = AttachmentLookup.selectCql({
where: AttachmentLookup.where.eq('channel_id'),
});
export class MessageAttachmentRepository {
async lookupAttachmentByChannelAndFilename(
channelId: ChannelID,
attachmentId: AttachmentID,
filename: string,
): Promise<MessageID | null> {
const result = await fetchOne<AttachmentLookupRow>(LOOKUP_ATTACHMENT_BY_CHANNEL_AND_FILENAME_QUERY, {
channel_id: channelId,
attachment_id: attachmentId,
filename,
});
return result ? result.message_id : null;
}
async listChannelAttachments(channelId: ChannelID): Promise<Array<AttachmentLookupRow>> {
const results = await fetchMany<AttachmentLookupRow>(LIST_CHANNEL_ATTACHMENTS_QUERY, {
channel_id: channelId,
});
return results;
}
}

View File

@@ -0,0 +1,165 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {ChannelID, MessageID, UserID} from '~/BrandedTypes';
import {createChannelID, createMessageID} from '~/BrandedTypes';
import {Db, deleteOneOrMany, fetchMany, fetchOne, upsertOne} from '~/database/Cassandra';
import {Messages, MessagesByAuthor} from '~/Tables';
import * as BucketUtils from '~/utils/BucketUtils';
import type {MessageDataRepository} from './MessageDataRepository';
import type {MessageDeletionRepository} from './MessageDeletionRepository';
const SELECT_MESSAGE_BY_AUTHOR = MessagesByAuthor.select({
where: [
MessagesByAuthor.where.eq('author_id'),
MessagesByAuthor.where.eq('channel_id'),
MessagesByAuthor.where.eq('message_id'),
],
limit: 1,
});
function listMessagesByAuthorQuery(limit: number, usePagination: boolean) {
return MessagesByAuthor.select({
columns: ['channel_id', 'message_id'],
where: usePagination
? [
MessagesByAuthor.where.eq('author_id'),
MessagesByAuthor.where.tupleGt(['channel_id', 'message_id'], ['last_channel_id', 'last_message_id']),
]
: [MessagesByAuthor.where.eq('author_id')],
limit,
});
}
export class MessageAuthorRepository {
constructor(
private messageDataRepo: MessageDataRepository,
private messageDeletionRepo: MessageDeletionRepository,
) {}
async listMessagesByAuthor(
authorId: UserID,
limit: number = 1000,
lastChannelId?: ChannelID,
lastMessageId?: MessageID,
): Promise<Array<{channelId: ChannelID; messageId: MessageID}>> {
const usePagination = Boolean(lastChannelId && lastMessageId);
const q = listMessagesByAuthorQuery(limit, usePagination);
const results = await fetchMany<{channel_id: bigint; message_id: bigint}>(
usePagination
? q.bind({
author_id: authorId,
last_channel_id: lastChannelId!,
last_message_id: lastMessageId!,
})
: q.bind({
author_id: authorId,
}),
);
let filteredResults = results;
if (lastChannelId && lastMessageId) {
filteredResults = results.filter((r) => {
const channelId = createChannelID(r.channel_id);
const messageId = createMessageID(r.message_id);
return channelId > lastChannelId || (channelId === lastChannelId && messageId > lastMessageId);
});
}
return filteredResults.map((r) => ({
channelId: createChannelID(r.channel_id),
messageId: createMessageID(r.message_id),
}));
}
async deleteMessagesByAuthor(
authorId: UserID,
channelIds?: Array<ChannelID>,
messageIds?: Array<MessageID>,
): Promise<void> {
const messagesToDelete = await this.listMessagesByAuthor(authorId);
for (const {channelId, messageId} of messagesToDelete) {
if (channelIds && !channelIds.includes(channelId)) continue;
if (messageIds && !messageIds.includes(messageId)) continue;
const message = await this.messageDataRepo.getMessage(channelId, messageId);
if (message && message.authorId === authorId) {
await this.messageDeletionRepo.deleteMessage(
channelId,
messageId,
authorId,
message.pinnedTimestamp || undefined,
);
}
}
}
async hasMessageByAuthor(authorId: UserID, channelId: ChannelID, messageId: MessageID): Promise<boolean> {
const result = await fetchOne<{channel_id: bigint; message_id: bigint}>(
SELECT_MESSAGE_BY_AUTHOR.bind({
author_id: authorId,
channel_id: channelId,
message_id: messageId,
}),
);
return result !== null;
}
async anonymizeMessage(channelId: ChannelID, messageId: MessageID, newAuthorId: UserID): Promise<void> {
const bucket = BucketUtils.makeBucket(messageId);
const message = await this.messageDataRepo.getMessage(channelId, messageId);
if (!message) return;
if (message.authorId) {
await deleteOneOrMany(
MessagesByAuthor.deleteByPk({
author_id: message.authorId,
channel_id: channelId,
message_id: messageId,
}),
);
}
await upsertOne(
MessagesByAuthor.upsertAll({
author_id: newAuthorId,
channel_id: channelId,
message_id: messageId,
}),
);
await upsertOne(
Messages.patchByPk(
{
channel_id: channelId,
bucket,
message_id: messageId,
},
{
author_id: Db.set(newAuthorId),
},
),
);
}
}

View File

@@ -0,0 +1,917 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {ChannelID, MessageID} from '~/BrandedTypes';
import {
BatchBuilder,
buildPatchFromData,
Db,
deleteOneOrMany,
executeConditional,
executeVersionedUpdate,
fetchMany,
fetchOne,
upsertOne,
} from '~/database/Cassandra';
import type {ChannelMessageBucketRow, ChannelStateRow, MessageRow} from '~/database/CassandraTypes';
import {MESSAGE_COLUMNS} from '~/database/CassandraTypes';
import {Logger} from '~/Logger';
import {Message} from '~/Models';
import {
AttachmentLookup,
ChannelEmptyBuckets,
ChannelMessageBuckets,
ChannelPins,
ChannelState,
Messages,
MessagesByAuthor,
} from '~/Tables';
import * as BucketUtils from '~/utils/BucketUtils';
import * as SnowflakeUtils from '~/utils/SnowflakeUtils';
import type {ListMessagesOptions} from '../IMessageRepository';
import {BucketScanDirection, scanBucketsWithIndex} from './BucketScanEngine';
const logger = Logger.child({module: 'MessageDataRepository'});
const DEFAULT_MESSAGE_LIMIT = 50;
const DEFAULT_BUCKET_INDEX_PAGE_SIZE = 200;
const DEFAULT_CAS_RETRIES = 8;
const LEGACY_BUCKETS_TO_CHECK = [0];
const FETCH_MESSAGE_BY_CHANNEL_BUCKET_AND_MESSAGE_ID = Messages.select({
where: [Messages.where.eq('channel_id'), Messages.where.eq('bucket'), Messages.where.eq('message_id')],
limit: 1,
});
const FETCH_CHANNEL_STATE = ChannelState.select({
where: ChannelState.where.eq('channel_id'),
limit: 1,
});
export class MessageDataRepository {
async listMessages(
channelId: ChannelID,
beforeMessageId?: MessageID,
limit: number = DEFAULT_MESSAGE_LIMIT,
afterMessageId?: MessageID,
options?: ListMessagesOptions,
): Promise<Array<Message>> {
if (limit <= 0) return [];
logger.debug(
{
channelId: channelId.toString(),
before: beforeMessageId?.toString() ?? null,
after: afterMessageId?.toString() ?? null,
limit,
},
'listMessages start',
);
if (beforeMessageId && afterMessageId) {
return this.listMessagesBetween(channelId, afterMessageId, beforeMessageId, limit, options);
}
if (beforeMessageId) {
return this.listMessagesBefore(channelId, beforeMessageId, limit, options);
}
if (afterMessageId) {
return this.listMessagesAfter(channelId, afterMessageId, limit, options);
}
return this.listMessagesLatest(channelId, limit);
}
private makeFetchMessagesBefore(limit: number) {
return Messages.select({
where: [
Messages.where.eq('channel_id'),
Messages.where.eq('bucket'),
Messages.where.lt('message_id', 'before_message_id'),
],
orderBy: {col: 'message_id', direction: 'DESC'},
limit,
});
}
private makeFetchMessagesAfterDesc(limit: number) {
return Messages.select({
where: [
Messages.where.eq('channel_id'),
Messages.where.eq('bucket'),
Messages.where.gt('message_id', 'after_message_id'),
],
orderBy: {col: 'message_id', direction: 'DESC'},
limit,
});
}
private makeFetchMessagesBetween(limit: number) {
return Messages.select({
where: [
Messages.where.eq('channel_id'),
Messages.where.eq('bucket'),
Messages.where.gt('message_id', 'after_message_id'),
Messages.where.lt('message_id', 'before_message_id'),
],
orderBy: {col: 'message_id', direction: 'DESC'},
limit,
});
}
private makeFetchMessagesLatestDesc(limit: number) {
return Messages.select({
where: [Messages.where.eq('channel_id'), Messages.where.eq('bucket')],
orderBy: {col: 'message_id', direction: 'DESC'},
limit,
});
}
private makeFetchMessagesAfterAsc(limit: number) {
return Messages.select({
where: [
Messages.where.eq('channel_id'),
Messages.where.eq('bucket'),
Messages.where.gt('message_id', 'after_message_id'),
],
orderBy: {col: 'message_id', direction: 'ASC'},
limit,
});
}
private makeFetchMessagesOldestAsc(limit: number) {
return Messages.select({
where: [Messages.where.eq('channel_id'), Messages.where.eq('bucket')],
orderBy: {col: 'message_id', direction: 'ASC'},
limit,
});
}
private async listMessagesLatest(channelId: ChannelID, limit: number): Promise<Array<Message>> {
const state = await this.getChannelState(channelId);
const nowId = SnowflakeUtils.getSnowflake();
const maxBucket = BucketUtils.makeBucket(nowId);
const minBucket = state?.created_bucket ?? BucketUtils.makeBucket(channelId);
return this.scanBucketsDescForMessages(channelId, {
limit,
minBucket,
maxBucket,
});
}
private async listMessagesBefore(
channelId: ChannelID,
before: MessageID,
limit: number,
options?: ListMessagesOptions,
): Promise<Array<Message>> {
const state = await this.getChannelState(channelId);
const maxBucket = BucketUtils.makeBucket(before);
const minBucket = state?.created_bucket ?? BucketUtils.makeBucket(channelId);
logger.debug(
{
channelId: channelId.toString(),
before: before.toString(),
limit,
maxBucket,
minBucket,
stateCreatedBucket: state?.created_bucket ?? null,
restrictToBeforeBucket: options?.restrictToBeforeBucket ?? null,
},
'listMessagesBefore: computed bucket range',
);
return this.scanBucketsDescForMessages(channelId, {
limit,
minBucket,
maxBucket,
before,
restrictToBeforeBucket: options?.restrictToBeforeBucket,
});
}
private async listMessagesAfter(
channelId: ChannelID,
after: MessageID,
limit: number,
options?: ListMessagesOptions,
): Promise<Array<Message>> {
const state = await this.getChannelState(channelId);
const afterBucket = BucketUtils.makeBucket(after);
const createdMin = state?.created_bucket ?? BucketUtils.makeBucket(channelId);
const minBucket = Math.max(afterBucket, createdMin);
const nowBucket = BucketUtils.makeBucket(SnowflakeUtils.getSnowflake());
const maxBucket = Math.max(nowBucket, minBucket);
logger.debug(
{
channelId: channelId.toString(),
action: 'listMessagesAfter',
after: after.toString(),
minBucket,
maxBucket,
limit,
immediateAfter: options?.immediateAfter ?? false,
},
'listMessagesAfter parameters',
);
if (options?.immediateAfter) {
const asc = await this.scanBucketsAscForMessages(channelId, {
limit,
minBucket,
maxBucket,
after,
});
return asc.reverse();
}
return this.scanBucketsDescForMessages(channelId, {
limit,
minBucket,
maxBucket,
after,
});
}
private async listMessagesBetween(
channelId: ChannelID,
after: MessageID,
before: MessageID,
limit: number,
options?: ListMessagesOptions,
): Promise<Array<Message>> {
const state = await this.getChannelState(channelId);
const afterBucket = BucketUtils.makeBucket(after);
const beforeBucket = BucketUtils.makeBucket(before);
const high = Math.max(afterBucket, beforeBucket);
const low = Math.min(afterBucket, beforeBucket);
const createdMin = state?.created_bucket ?? BucketUtils.makeBucket(channelId);
const minBucket = Math.max(low, createdMin);
const maxBucket = high;
logger.debug(
{
channelId: channelId.toString(),
action: 'listMessagesBetween',
after: after.toString(),
before: before.toString(),
minBucket,
maxBucket,
limit,
},
'listMessagesBetween parameters',
);
return this.scanBucketsDescForMessages(channelId, {
limit,
minBucket,
maxBucket,
after,
before,
restrictToBeforeBucket: options?.restrictToBeforeBucket,
});
}
private async scanBucketsDescForMessages(
channelId: ChannelID,
opts: {
limit: number;
minBucket: number;
maxBucket: number;
before?: MessageID;
after?: MessageID;
restrictToBeforeBucket?: boolean;
},
): Promise<Array<Message>> {
const beforeBucket = opts.before ? BucketUtils.makeBucket(opts.before) : null;
const afterBucket = opts.after ? BucketUtils.makeBucket(opts.after) : null;
const stopAfterBucket =
opts.restrictToBeforeBucket === true && opts.before && !opts.after && beforeBucket !== null
? beforeBucket
: undefined;
logger.debug(
{
channelId: channelId.toString(),
minBucket: opts.minBucket,
maxBucket: opts.maxBucket,
beforeBucket,
afterBucket,
restrictToBeforeBucket: opts.restrictToBeforeBucket ?? null,
stopAfterBucket: stopAfterBucket ?? null,
},
'scanBucketsDescForMessages: starting scan',
);
const {rows: out} = await scanBucketsWithIndex<MessageRow>(
{
listBucketsFromIndex: async (query) =>
this.listBucketsDescFromIndex(channelId, {
minBucket: query.minBucket,
maxBucket: query.maxBucket,
limit: query.limit,
}),
fetchRowsForBucket: async (bucket, limit) =>
this.fetchRowsForBucket(channelId, bucket, limit, {
before: opts.before,
after: opts.after,
beforeBucket,
afterBucket,
}),
getRowId: (row) => row.message_id,
onEmptyUnboundedBucket: async (bucket) => this.markBucketEmpty(channelId, bucket),
onBucketHasRows: async (bucket) => this.touchBucketWithMessages(channelId, bucket),
},
{
limit: opts.limit,
minBucket: opts.minBucket,
maxBucket: opts.maxBucket,
direction: BucketScanDirection.Desc,
indexPageSize: DEFAULT_BUCKET_INDEX_PAGE_SIZE,
stopAfterBucket,
},
);
if (out.length === 0) return [];
let maxId: MessageID = out[0].message_id;
let maxBucketForId = out[0].bucket;
for (const row of out) {
if (row.message_id > maxId) {
maxId = row.message_id;
maxBucketForId = row.bucket;
}
}
await this.touchChannelHasMessages(channelId);
await this.advanceChannelStateLastMessageIfNewer(channelId, maxId, maxBucketForId);
return this.repairAndMapMessages(channelId, out);
}
private async scanBucketsAscForMessages(
channelId: ChannelID,
opts: {
limit: number;
minBucket: number;
maxBucket: number;
after: MessageID;
},
): Promise<Array<Message>> {
const afterBucket = BucketUtils.makeBucket(opts.after);
const {rows: out} = await scanBucketsWithIndex<MessageRow>(
{
listBucketsFromIndex: async (query) =>
this.listBucketsAscFromIndex(channelId, {
minBucket: query.minBucket,
maxBucket: query.maxBucket,
limit: query.limit,
}),
fetchRowsForBucket: async (bucket, limit) =>
this.fetchRowsForBucketAsc(channelId, bucket, limit, {
after: opts.after,
afterBucket,
}),
getRowId: (row) => row.message_id,
onEmptyUnboundedBucket: async (bucket) => this.markBucketEmpty(channelId, bucket),
onBucketHasRows: async (bucket) => this.touchBucketWithMessages(channelId, bucket),
},
{
limit: opts.limit,
minBucket: opts.minBucket,
maxBucket: opts.maxBucket,
direction: BucketScanDirection.Asc,
indexPageSize: DEFAULT_BUCKET_INDEX_PAGE_SIZE,
},
);
if (out.length === 0) return [];
let maxId: MessageID = out[0].message_id;
let maxBucketForId = out[0].bucket;
for (const row of out) {
if (row.message_id > maxId) {
maxId = row.message_id;
maxBucketForId = row.bucket;
}
}
await this.touchChannelHasMessages(channelId);
await this.advanceChannelStateLastMessageIfNewer(channelId, maxId, maxBucketForId);
return this.repairAndMapMessages(channelId, out);
}
private async fetchRowsForBucketAsc(
channelId: ChannelID,
bucket: number,
limit: number,
meta: {
after: MessageID;
afterBucket: number;
},
): Promise<{rows: Array<MessageRow>; unbounded: boolean}> {
logger.debug(
{
channelId: channelId.toString(),
bucket,
limit,
meta: {after: meta.after.toString(), afterBucket: meta.afterBucket},
},
'fetchRowsForBucketAsc parameters',
);
if (bucket === meta.afterBucket) {
const q = this.makeFetchMessagesAfterAsc(limit);
const rows = await fetchMany<MessageRow>(
q.bind({
channel_id: channelId,
bucket,
after_message_id: meta.after,
}),
);
return {rows, unbounded: false};
}
const q = this.makeFetchMessagesOldestAsc(limit);
const rows = await fetchMany<MessageRow>(q.bind({channel_id: channelId, bucket}));
return {rows, unbounded: true};
}
private async fetchRowsForBucket(
channelId: ChannelID,
bucket: number,
limit: number,
meta: {
before?: MessageID;
after?: MessageID;
beforeBucket: number | null;
afterBucket: number | null;
},
): Promise<{rows: Array<MessageRow>; unbounded: boolean}> {
logger.debug(
{
channelId: channelId.toString(),
bucket,
limit,
meta: {
before: meta.before?.toString() ?? null,
after: meta.after?.toString() ?? null,
beforeBucket: meta.beforeBucket,
afterBucket: meta.afterBucket,
},
},
'fetchRowsForBucket parameters',
);
if (meta.before && meta.after && meta.beforeBucket === bucket && meta.afterBucket === bucket) {
const q = this.makeFetchMessagesBetween(limit);
const rows = await fetchMany<MessageRow>(
q.bind({
channel_id: channelId,
bucket,
after_message_id: meta.after,
before_message_id: meta.before,
}),
);
return {rows, unbounded: false};
}
if (meta.before && meta.beforeBucket === bucket) {
const q = this.makeFetchMessagesBefore(limit);
const rows = await fetchMany<MessageRow>(
q.bind({
channel_id: channelId,
bucket,
before_message_id: meta.before,
}),
);
return {rows, unbounded: false};
}
if (meta.after && meta.afterBucket === bucket) {
const q = this.makeFetchMessagesAfterDesc(limit);
const rows = await fetchMany<MessageRow>(
q.bind({
channel_id: channelId,
bucket,
after_message_id: meta.after,
}),
);
return {rows, unbounded: false};
}
const q = this.makeFetchMessagesLatestDesc(limit);
const rows = await fetchMany<MessageRow>(q.bind({channel_id: channelId, bucket}));
return {rows, unbounded: true};
}
private async touchBucketWithMessages(channelId: ChannelID, bucket: number): Promise<void> {
const batch = new BatchBuilder();
batch.addPrepared(
ChannelMessageBuckets.upsertAll({
channel_id: channelId,
bucket,
updated_at: new Date(),
}),
);
batch.addPrepared(
ChannelEmptyBuckets.deleteByPk({
channel_id: channelId,
bucket,
}),
);
await batch.execute(true);
}
private async markBucketEmpty(channelId: ChannelID, bucket: number): Promise<void> {
const batch = new BatchBuilder();
batch.addPrepared(
ChannelMessageBuckets.deleteByPk({
channel_id: channelId,
bucket,
}),
);
batch.addPrepared(
ChannelEmptyBuckets.upsertAll({
channel_id: channelId,
bucket,
updated_at: new Date(),
}),
);
await batch.execute(true);
}
private async touchChannelHasMessages(channelId: ChannelID): Promise<void> {
await upsertOne(
ChannelState.patchByPk(
{channel_id: channelId},
{
has_messages: Db.set(true),
updated_at: Db.set(new Date()),
},
),
);
}
private async advanceChannelStateLastMessageIfNewer(
channelId: ChannelID,
newLastMessageId: MessageID,
newLastMessageBucket: number,
): Promise<void> {
for (let i = 0; i < DEFAULT_CAS_RETRIES; i++) {
const state = await this.getChannelState(channelId);
const prev = state?.last_message_id ?? null;
if (prev !== null && newLastMessageId <= prev) return;
const q = ChannelState.patchByPkIf(
{channel_id: channelId},
{
has_messages: Db.set(true),
last_message_id: Db.set(newLastMessageId),
last_message_bucket: Db.set(newLastMessageBucket),
updated_at: Db.set(new Date()),
},
{col: 'last_message_id', expectedParam: 'prev_last_message_id', expectedValue: prev},
);
const res = await executeConditional(q);
if (res.applied) return;
}
Logger.warn(
{channelId: channelId.toString(), messageId: newLastMessageId.toString()},
'Failed to advance ChannelState.last_message_id after retries',
);
}
private async getChannelState(channelId: ChannelID): Promise<ChannelStateRow | null> {
return fetchOne<ChannelStateRow>(FETCH_CHANNEL_STATE.bind({channel_id: channelId}));
}
private async listBucketsDescFromIndex(
channelId: ChannelID,
opts: {minBucket?: number; maxBucket?: number; limit: number},
): Promise<Array<number>> {
const where = [ChannelMessageBuckets.where.eq('channel_id')];
if (typeof opts.minBucket === 'number') where.push(ChannelMessageBuckets.where.gte('bucket', 'min_bucket'));
if (typeof opts.maxBucket === 'number') where.push(ChannelMessageBuckets.where.lte('bucket', 'max_bucket'));
const q = ChannelMessageBuckets.select({
columns: ['bucket'],
where,
orderBy: {col: 'bucket', direction: 'DESC'},
limit: opts.limit,
});
const params = {
channel_id: channelId,
...(typeof opts.minBucket === 'number' ? {min_bucket: opts.minBucket} : {}),
...(typeof opts.maxBucket === 'number' ? {max_bucket: opts.maxBucket} : {}),
};
const rows = await fetchMany<Pick<ChannelMessageBucketRow, 'bucket'>>(q.bind(params));
const buckets = rows.map((r) => r.bucket);
logger.debug(
{
channelId: channelId.toString(),
minBucket: opts.minBucket ?? null,
maxBucket: opts.maxBucket ?? null,
limit: opts.limit,
bucketsFound: buckets,
},
'listBucketsDescFromIndex: query result',
);
return buckets;
}
private async listBucketsAscFromIndex(
channelId: ChannelID,
opts: {minBucket?: number; maxBucket?: number; limit: number},
): Promise<Array<number>> {
const where = [ChannelMessageBuckets.where.eq('channel_id')];
if (typeof opts.minBucket === 'number') where.push(ChannelMessageBuckets.where.gte('bucket', 'min_bucket'));
if (typeof opts.maxBucket === 'number') where.push(ChannelMessageBuckets.where.lte('bucket', 'max_bucket'));
const q = ChannelMessageBuckets.select({
columns: ['bucket'],
where,
orderBy: {col: 'bucket', direction: 'ASC'},
limit: opts.limit,
});
const params = {
channel_id: channelId,
...(typeof opts.minBucket === 'number' ? {min_bucket: opts.minBucket} : {}),
...(typeof opts.maxBucket === 'number' ? {max_bucket: opts.maxBucket} : {}),
};
const rows = await fetchMany<Pick<ChannelMessageBucketRow, 'bucket'>>(q.bind(params));
return rows.map((r) => r.bucket);
}
async getMessage(channelId: ChannelID, messageId: MessageID): Promise<Message | null> {
const bucket = BucketUtils.makeBucket(messageId);
const message = await fetchOne<MessageRow>(
FETCH_MESSAGE_BY_CHANNEL_BUCKET_AND_MESSAGE_ID.bind({
channel_id: channelId,
bucket,
message_id: messageId,
}),
);
if (message) return new Message(message);
const repairedMessage = await this.attemptBucketReadRepair(channelId, messageId, bucket);
return repairedMessage;
}
async upsertMessage(data: MessageRow, oldData?: MessageRow | null): Promise<Message> {
const expectedBucket = BucketUtils.makeBucket(data.message_id);
if (data.bucket !== expectedBucket) {
throw new Error(
`Invalid message bucket for ${data.message_id.toString()}: expected ${expectedBucket}, received ${data.bucket}`,
);
}
const batch = new BatchBuilder();
batch.addPrepared(
ChannelEmptyBuckets.deleteByPk({
channel_id: data.channel_id,
bucket: data.bucket,
}),
);
const result = await executeVersionedUpdate<MessageRow, 'channel_id' | 'bucket' | 'message_id'>(
async () => {
if (oldData !== undefined) return oldData;
const pk = {
channel_id: data.channel_id,
bucket: data.bucket,
message_id: data.message_id,
};
const existingMessage = await fetchOne<MessageRow>(FETCH_MESSAGE_BY_CHANNEL_BUCKET_AND_MESSAGE_ID.bind(pk));
return existingMessage ?? null;
},
(current) => ({
pk: {
channel_id: data.channel_id,
bucket: data.bucket,
message_id: data.message_id,
},
patch: buildPatchFromData(data, current, MESSAGE_COLUMNS, ['channel_id', 'bucket', 'message_id']),
}),
Messages,
{onFailure: 'log'},
);
if (!result.applied) {
throw new Error(`Failed to upsert message ${data.message_id} after LWT retries`);
}
const finalVersion = result.finalVersion ?? 1;
if (data.author_id) {
batch.addPrepared(
MessagesByAuthor.upsertAll({
author_id: data.author_id,
channel_id: data.channel_id,
message_id: data.message_id,
}),
);
}
if (data.pinned_timestamp) {
batch.addPrepared(
ChannelPins.upsertAll({
channel_id: data.channel_id,
message_id: data.message_id,
pinned_timestamp: data.pinned_timestamp,
}),
);
}
if (oldData?.pinned_timestamp && !data.pinned_timestamp) {
batch.addPrepared(
ChannelPins.deleteByPk({
channel_id: data.channel_id,
message_id: data.message_id,
pinned_timestamp: oldData.pinned_timestamp,
}),
);
}
if (oldData?.attachments) {
for (const attachment of oldData.attachments) {
batch.addPrepared(
AttachmentLookup.deleteByPk({
channel_id: data.channel_id,
attachment_id: attachment.attachment_id,
filename: attachment.filename,
}),
);
}
}
if (data.attachments) {
for (const attachment of data.attachments) {
batch.addPrepared(
AttachmentLookup.upsertAll({
channel_id: data.channel_id,
attachment_id: attachment.attachment_id,
filename: attachment.filename,
message_id: data.message_id,
}),
);
}
}
batch.addPrepared(
ChannelMessageBuckets.upsertAll({
channel_id: data.channel_id,
bucket: data.bucket,
updated_at: new Date(),
}),
);
const createdBucket = BucketUtils.makeBucket(data.channel_id);
batch.addPrepared(
ChannelState.patchByPk(
{channel_id: data.channel_id},
{
created_bucket: Db.set(createdBucket),
has_messages: Db.set(true),
updated_at: Db.set(new Date()),
},
),
);
await batch.execute();
await this.advanceChannelStateLastMessageIfNewer(data.channel_id, data.message_id, data.bucket);
return new Message({...data, version: finalVersion});
}
private async attemptBucketReadRepair(
channelId: ChannelID,
messageId: MessageID,
expectedBucket: number,
): Promise<Message | null> {
for (const legacyBucket of LEGACY_BUCKETS_TO_CHECK) {
if (legacyBucket === expectedBucket) continue;
const legacyRow = await fetchOne<MessageRow>(
FETCH_MESSAGE_BY_CHANNEL_BUCKET_AND_MESSAGE_ID.bind({
channel_id: channelId,
bucket: legacyBucket,
message_id: messageId,
}),
);
if (!legacyRow) continue;
Logger.warn(
{channelId: channelId.toString(), messageId: messageId.toString(), legacyBucket, expectedBucket},
'Repairing message bucket mismatch',
);
const repairedRow: MessageRow = {
...legacyRow,
bucket: expectedBucket,
};
const repairedMessage = await this.upsertMessage(repairedRow, legacyRow);
await deleteOneOrMany(
Messages.deleteByPk({
channel_id: channelId,
bucket: legacyBucket,
message_id: messageId,
}),
);
return repairedMessage;
}
return null;
}
private async repairAndMapMessages(channelId: ChannelID, messages: Array<MessageRow>): Promise<Array<Message>> {
if (messages.length === 0) return [];
const repaired: Array<Message> = [];
for (const message of messages) {
const expectedBucket = BucketUtils.makeBucket(message.message_id);
if (message.bucket === expectedBucket) {
repaired.push(new Message(message));
continue;
}
const repairedMessage = await this.attemptBucketReadRepair(channelId, message.message_id, expectedBucket);
if (repairedMessage) {
repaired.push(repairedMessage);
continue;
}
Logger.warn(
{
channelId: channelId.toString(),
messageId: message.message_id.toString(),
legacyBucket: message.bucket,
expectedBucket,
},
'Failed to repair message bucket mismatch during listMessages; returning legacy row',
);
repaired.push(new Message(message));
}
return repaired;
}
}

View File

@@ -0,0 +1,334 @@
/*
* Copyright (C) 2026 Fluxer Contributors
*
* This file is part of Fluxer.
*
* Fluxer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fluxer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Fluxer. If not, see <https://www.gnu.org/licenses/>.
*/
import type {ChannelID, MessageID, UserID} from '~/BrandedTypes';
import {BatchBuilder, Db, deleteOneOrMany, fetchMany, fetchOne, upsertOne} from '~/database/Cassandra';
import type {ChannelMessageBucketRow, ChannelStateRow} from '~/database/CassandraTypes';
import type {Message} from '~/Models';
import {
AttachmentLookup,
ChannelEmptyBuckets,
ChannelMessageBuckets,
ChannelPins,
ChannelState,
MessageReactions,
Messages,
MessagesByAuthor,
} from '~/Tables';
import * as BucketUtils from '~/utils/BucketUtils';
import type {MessageDataRepository} from './MessageDataRepository';
const BULK_DELETE_BATCH_SIZE = 100;
const POST_DELETE_BUCKET_CHECK_LIMIT = 25;
const HAS_ANY_MESSAGE_IN_BUCKET = Messages.select({
columns: ['message_id'],
where: [Messages.where.eq('channel_id'), Messages.where.eq('bucket')],
limit: 1,
});
const FETCH_CHANNEL_STATE = ChannelState.select({
where: ChannelState.where.eq('channel_id'),
limit: 1,
});
const LIST_BUCKETS_DESC = ChannelMessageBuckets.select({
columns: ['bucket'],
where: ChannelMessageBuckets.where.eq('channel_id'),
orderBy: {col: 'bucket', direction: 'DESC'},
limit: POST_DELETE_BUCKET_CHECK_LIMIT,
});
const FETCH_LATEST_MESSAGE_ID_IN_BUCKET = Messages.select({
columns: ['message_id'],
where: [Messages.where.eq('channel_id'), Messages.where.eq('bucket')],
limit: 1,
});
export class MessageDeletionRepository {
constructor(private messageDataRepo: MessageDataRepository) {}
private addMessageDeletionBatchQueries(
batch: BatchBuilder,
channelId: ChannelID,
messageId: MessageID,
bucket: number,
message: Message | null,
authorId?: UserID,
pinnedTimestamp?: Date,
): void {
batch.addPrepared(
Messages.deleteByPk({
channel_id: channelId,
bucket,
message_id: messageId,
}),
);
const effectiveAuthorId = authorId ?? message?.authorId ?? null;
if (effectiveAuthorId) {
batch.addPrepared(
MessagesByAuthor.deleteByPk({
author_id: effectiveAuthorId,
channel_id: channelId,
message_id: messageId,
}),
);
}
const effectivePinned = pinnedTimestamp ?? message?.pinnedTimestamp ?? null;
if (effectivePinned) {
batch.addPrepared(
ChannelPins.deleteByPk({
channel_id: channelId,
message_id: messageId,
pinned_timestamp: effectivePinned,
}),
);
}
batch.addPrepared(
MessageReactions.deletePartition({
channel_id: channelId,
bucket,
message_id: messageId,
}),
);
if (message?.attachments) {
for (const attachment of message.attachments) {
batch.addPrepared(
AttachmentLookup.deleteByPk({
channel_id: channelId,
attachment_id: attachment.id,
filename: attachment.filename,
}),
);
}
}
}
private async markBucketEmpty(channelId: ChannelID, bucket: number): Promise<void> {
const batch = new BatchBuilder();
batch.addPrepared(
ChannelMessageBuckets.deleteByPk({
channel_id: channelId,
bucket,
}),
);
batch.addPrepared(
ChannelEmptyBuckets.upsertAll({
channel_id: channelId,
bucket,
updated_at: new Date(),
}),
);
await batch.execute(true);
}
private async isBucketEmpty(channelId: ChannelID, bucket: number): Promise<boolean> {
const row = await fetchOne<{message_id: bigint}>(
HAS_ANY_MESSAGE_IN_BUCKET.bind({
channel_id: channelId,
bucket,
}),
);
return row == null;
}
private async reconcileChannelStateIfNeeded(
channelId: ChannelID,
deletedMessageIds: Array<MessageID>,
emptiedBuckets: Set<number>,
): Promise<void> {
const state = await fetchOne<ChannelStateRow>(FETCH_CHANNEL_STATE.bind({channel_id: channelId}));
if (!state) return;
const lastBucket = state.last_message_bucket as number | null | undefined;
const lastId = state.last_message_id as MessageID | null | undefined;
const touchedLast =
(lastBucket != null && emptiedBuckets.has(lastBucket)) || (lastId != null && deletedMessageIds.includes(lastId));
if (!touchedLast) return;
const bucketRows = await fetchMany<Pick<ChannelMessageBucketRow, 'bucket'>>(
LIST_BUCKETS_DESC.bind({channel_id: channelId}),
);
for (const {bucket} of bucketRows) {
const latest = await fetchOne<{message_id: bigint}>(
FETCH_LATEST_MESSAGE_ID_IN_BUCKET.bind({channel_id: channelId, bucket}),
);
if (!latest) {
await this.markBucketEmpty(channelId, bucket);
continue;
}
await upsertOne(
ChannelState.patchByPk(
{channel_id: channelId},
{
has_messages: Db.set(true),
last_message_bucket: Db.set(bucket),
last_message_id: Db.set(latest.message_id as MessageID),
updated_at: Db.set(new Date()),
},
),
);
return;
}
await upsertOne(
ChannelState.patchByPk(
{channel_id: channelId},
{
has_messages: Db.set(false),
last_message_bucket: Db.clear(),
last_message_id: Db.clear(),
updated_at: Db.set(new Date()),
},
),
);
}
private async postDeleteMaintenance(
channelId: ChannelID,
affectedBuckets: Set<number>,
deletedMessageIds: Array<MessageID>,
): Promise<void> {
const emptiedBuckets = new Set<number>();
for (const bucket of affectedBuckets) {
const empty = await this.isBucketEmpty(channelId, bucket);
if (!empty) continue;
emptiedBuckets.add(bucket);
await this.markBucketEmpty(channelId, bucket);
}
if (emptiedBuckets.size > 0 || deletedMessageIds.length > 0) {
await this.reconcileChannelStateIfNeeded(channelId, deletedMessageIds, emptiedBuckets);
}
}
async deleteMessage(
channelId: ChannelID,
messageId: MessageID,
authorId: UserID,
pinnedTimestamp?: Date,
): Promise<void> {
const bucket = BucketUtils.makeBucket(messageId);
const message = await this.messageDataRepo.getMessage(channelId, messageId);
const batch = new BatchBuilder();
this.addMessageDeletionBatchQueries(batch, channelId, messageId, bucket, message, authorId, pinnedTimestamp);
await batch.execute();
await this.postDeleteMaintenance(channelId, new Set([bucket]), [messageId]);
}
async bulkDeleteMessages(channelId: ChannelID, messageIds: Array<MessageID>): Promise<void> {
if (messageIds.length === 0) return;
for (let i = 0; i < messageIds.length; i += BULK_DELETE_BATCH_SIZE) {
const chunk = messageIds.slice(i, i + BULK_DELETE_BATCH_SIZE);
const messages = await Promise.all(chunk.map((id) => this.messageDataRepo.getMessage(channelId, id)));
const affectedBuckets = new Set<number>();
const batch = new BatchBuilder();
for (let j = 0; j < chunk.length; j++) {
const messageId = chunk[j];
const message = messages[j];
const bucket = BucketUtils.makeBucket(messageId);
affectedBuckets.add(bucket);
this.addMessageDeletionBatchQueries(batch, channelId, messageId, bucket, message);
}
await batch.execute();
await this.postDeleteMaintenance(channelId, affectedBuckets, chunk);
}
}
async deleteAllChannelMessages(channelId: ChannelID): Promise<void> {
const BATCH_SIZE = 50;
let hasMore = true;
let beforeMessageId: MessageID | undefined;
const allDeleted: Array<MessageID> = [];
const affectedBuckets = new Set<number>();
while (hasMore) {
const messages = await this.messageDataRepo.listMessages(channelId, beforeMessageId, 100);
if (messages.length === 0) {
hasMore = false;
break;
}
for (let i = 0; i < messages.length; i += BATCH_SIZE) {
const batch = new BatchBuilder();
const messageBatch = messages.slice(i, i + BATCH_SIZE);
for (const message of messageBatch) {
const bucket = BucketUtils.makeBucket(message.id);
affectedBuckets.add(bucket);
allDeleted.push(message.id);
this.addMessageDeletionBatchQueries(
batch,
channelId,
message.id,
bucket,
message,
message.authorId ?? undefined,
message.pinnedTimestamp || undefined,
);
}
await batch.execute();
}
if (messages.length < 100) {
hasMore = false;
} else {
beforeMessageId = messages[messages.length - 1].id;
}
}
await this.postDeleteMaintenance(channelId, affectedBuckets, allDeleted);
await deleteOneOrMany(
ChannelMessageBuckets.deletePartition({
channel_id: channelId,
}),
);
await deleteOneOrMany(
ChannelEmptyBuckets.deletePartition({
channel_id: channelId,
}),
);
}
}