feat: chunk long announcements for Discord, Reddit, and Discourse

 This commit was made with love from Hikari~ 🌸
This commit is contained in:
2026-03-03 16:56:15 -08:00
committed by Naomi Carrigan
parent 4437047543
commit f25163096b
4 changed files with 204 additions and 5 deletions
+43 -3
View File
@@ -4,9 +4,14 @@
* @author Naomi Carrigan
*/
/* eslint-disable @typescript-eslint/naming-convention -- we are making raw API calls. */
/* eslint-disable max-lines-per-function -- Chunked sending requires more logic. */
/* eslint-disable no-await-in-loop -- Sequential chunk posting requires awaiting each request. */
import { chunkContent } from "../utils/chunkContent.js";
import type { AnnouncementType } from "../interfaces/announcementType.js";
const discordLimit = 2000;
const channelIds: Record<AnnouncementType, string> = {
community: "1386105484313886820",
company: "1422472775695728661",
@@ -25,6 +30,7 @@ const getAnnouncementPing = (type: AnnouncementType): string => {
/**
* Forwards an announcement to our Discord server.
* Sends content in sequential messages if it exceeds the 2000 character limit.
* @param title - The title of the announcement.
* @param content - The main body of the announcement.
* @param type - Whether the announcement is for a product or community.
@@ -35,12 +41,21 @@ export const announceOnDiscord = async(
content: string,
type: AnnouncementType,
): Promise<string> => {
const channelId = channelIds[type];
const ping = getAnnouncementPing(type);
const firstMessagePrefix = `# ${title}\n\n`;
const firstMessageSuffix = `\n-# ${ping}`;
const firstChunkLimit =
discordLimit - firstMessagePrefix.length - firstMessageSuffix.length;
const chunks = chunkContent(content, firstChunkLimit);
const messageRequest = await fetch(
`https://discord.com/api/v10/channels/${channelIds[type]}/messages`,
`https://discord.com/api/v10/channels/${channelId}/messages`,
{
body: JSON.stringify({
allowed_mentions: { parse: [ "users", "roles" ] },
content: `# ${title}\n\n${content}\n-# ${getAnnouncementPing(type)}`,
content: `${firstMessagePrefix}${chunks[0]}${firstMessageSuffix}`,
}),
headers: {
"Authorization": `Bot ${process.env.DISCORD_TOKEN ?? ""}`,
@@ -49,16 +64,19 @@ export const announceOnDiscord = async(
method: "POST",
},
);
if (messageRequest.status !== 200) {
return `Failed to send message to Discord. Status: ${messageRequest.status.toString()} ${messageRequest.statusText}`;
}
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions -- fetch does not accept generics.
const message = await messageRequest.json() as { id?: string };
if (message.id === undefined) {
return `Failed to parse message ID, cannot crosspost. ${JSON.stringify(message)}`;
}
const crosspostRequest = await fetch(
`https://discord.com/api/v10/channels/${channelIds[type]}/messages/${message.id}/crosspost`,
`https://discord.com/api/v10/channels/${channelId}/messages/${message.id}/crosspost`,
{
headers: {
"Authorization": `Bot ${process.env.DISCORD_TOKEN ?? ""}`,
@@ -67,8 +85,30 @@ export const announceOnDiscord = async(
method: "POST",
},
);
if (!crosspostRequest.ok) {
return `Failed to crosspost message to Discord. Status: ${crosspostRequest.status.toString()} ${crosspostRequest.statusText}`;
}
for (const chunk of chunks.slice(1)) {
const chunkRequest = await fetch(
`https://discord.com/api/v10/channels/${channelId}/messages`,
{
body: JSON.stringify({
content: chunk,
}),
headers: {
"Authorization": `Bot ${process.env.DISCORD_TOKEN ?? ""}`,
"Content-Type": "application/json",
},
method: "POST",
},
);
if (!chunkRequest.ok) {
return `Failed to send continuation chunk to Discord. Status: ${chunkRequest.status.toString()} ${chunkRequest.statusText}`;
}
}
return "Successfully sent and published message to Discord.";
};
+36 -1
View File
@@ -4,10 +4,13 @@
* @author Naomi Carrigan
*/
/* eslint-disable @typescript-eslint/naming-convention -- we are making raw API calls. */
/* eslint-disable no-await-in-loop -- Sequential chunk posting requires awaiting each request. */
import { chunkContent } from "../utils/chunkContent.js";
import type { AnnouncementType } from "../interfaces/announcementType.js";
const announcementCategoryId = 16;
const discourseLimit = 32_000;
const tags: Record<AnnouncementType, string> = {
community: "Community",
@@ -17,6 +20,7 @@ const tags: Record<AnnouncementType, string> = {
/**
* Posts an announcement to the NHCarrigan Discourse support forum.
* Sends overflow content as sequential replies if it exceeds the 32,000 character limit.
* @param title - The title of the announcement.
* @param content - The main body of the announcement in markdown.
* @param type - Whether the announcement is for a product, community, or company.
@@ -31,10 +35,12 @@ export const announceOnDiscourse = async(
return "Discourse API key is not set.";
}
const chunks = chunkContent(content, discourseLimit);
const response = await fetch("https://support.nhcarrigan.com/posts.json", {
body: JSON.stringify({
category: announcementCategoryId,
raw: content,
raw: chunks[0],
tags: [ tags[type] ],
title: title,
}),
@@ -50,5 +56,34 @@ export const announceOnDiscourse = async(
return `Failed to post to Discourse. Status: ${response.status.toString()} ${response.statusText}`;
}
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions -- Fetch does not accept generic.
const data = (await response.json()) as { topic_id?: number };
for (const chunk of chunks.slice(1)) {
if (data.topic_id === undefined) {
return "Failed to retrieve Discourse topic ID for continuation posts.";
}
const replyResponse = await fetch(
"https://support.nhcarrigan.com/posts.json",
{
body: JSON.stringify({
raw: chunk,
topic_id: data.topic_id,
}),
headers: {
"Api-Key": process.env.FORUM_API_KEY,
"Api-Username": "hikari",
"Content-Type": "application/json",
},
method: "POST",
},
);
if (!replyResponse.ok) {
return `Failed to post continuation chunk to Discourse. Status: ${replyResponse.status.toString()} ${replyResponse.statusText}`;
}
}
return "Successfully posted announcement to Discourse~! ✨";
};
+49 -1
View File
@@ -5,9 +5,13 @@
*/
/* eslint-disable @typescript-eslint/naming-convention -- we are making raw API calls. */
/* eslint-disable max-lines-per-function -- Big logic here. */
/* eslint-disable no-await-in-loop -- Sequential chunk posting requires awaiting each request. */
import { chunkContent } from "../utils/chunkContent.js";
import type { AnnouncementType } from "../interfaces/announcementType.js";
const redditLimit = 40_000;
const flairIds: Record<AnnouncementType, string> = {
community: "7a01a5a6-0f29-11ef-a0c4-c6fb085f7c8f",
company: "dd8057c0-9e30-11f0-b321-d683551dcb2b",
@@ -16,6 +20,7 @@ const flairIds: Record<AnnouncementType, string> = {
/**
* Posts an announcement to a specific subreddit as a self-post.
* Sends overflow content as nested replies if it exceeds the 40,000 character limit.
* @param title - The title of the announcement.
* @param content - The main body of the announcement.
* @param type - Whether the announcement is for a product or community.
@@ -34,6 +39,9 @@ export const announceOnReddit = async(
) {
return "Reddit credentials are not set.";
}
const chunks = chunkContent(content, redditLimit);
const tokenResponse = await fetch(
"https://www.reddit.com/api/v1/access_token",
{
@@ -71,7 +79,7 @@ export const announceOnReddit = async(
flair_text: type,
kind: "self",
sr: "nhcarrigan",
text: content,
text: chunks[0],
title: title,
}),
headers: {
@@ -85,6 +93,7 @@ export const announceOnReddit = async(
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions -- Fetch does not accept generic.
const redditData = (await redditPost.json()) as {
json: {
data?: { name?: string };
errors: Array<unknown>;
};
};
@@ -95,5 +104,44 @@ export const announceOnReddit = async(
)}`;
}
let parentName = redditData.json.data?.name;
for (const chunk of chunks.slice(1)) {
if (parentName === undefined) {
return "Failed to get Reddit post fullname for chaining replies.";
}
const commentResponse = await fetch(
"https://oauth.reddit.com/api/comment",
{
body: new URLSearchParams({
api_type: "json",
text: chunk,
thing_id: parentName,
}),
headers: {
"Authorization": `bearer ${tokenData.access_token}`,
"Content-Type": "application/x-www-form-urlencoded",
"User-Agent": "HikariBot/1.0 by nhcarrigan",
},
method: "POST",
},
);
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions -- Fetch does not accept generic.
const commentData = (await commentResponse.json()) as {
json: {
data?: { things?: Array<{ data?: { name?: string } }> };
errors: Array<unknown>;
};
};
if (commentData.json.errors.length > 0) {
return `Failed to post reply chunk to Reddit: ${JSON.stringify(commentData.json.errors)}`;
}
parentName = commentData.json.data?.things?.[0]?.data?.name;
}
return "Successfully posted announcement to Reddit~! ✨";
};
+76
View File
@@ -0,0 +1,76 @@
/**
* @copyright nhcarrigan
* @license Naomi's Public License
* @author Naomi Carrigan
*/
/**
* Splits content into chunks that do not exceed the given character limit.
* Splits preferably at paragraph boundaries, then line boundaries,
* then hard-cuts at the limit as a last resort.
* @param content - The content to chunk.
* @param limit - The maximum character count per chunk.
* @returns An array of content chunks.
*/
export const chunkContent = (content: string, limit: number): string[] => {
if (content.length <= limit) {
return [ content ];
}
const chunks: string[] = [];
const paragraphs = content.split("\n\n");
let current = "";
for (const paragraph of paragraphs) {
const separator = current.length > 0 ? "\n\n" : "";
const combined = `${current}${separator}${paragraph}`;
if (combined.length <= limit) {
current = combined;
continue;
}
if (current.length > 0) {
chunks.push(current);
current = "";
}
if (paragraph.length <= limit) {
current = paragraph;
continue;
}
// Paragraph itself exceeds the limit — split by lines
const lines = paragraph.split("\n");
for (const line of lines) {
const lineSeparator = current.length > 0 ? "\n" : "";
const combinedLine = `${current}${lineSeparator}${line}`;
if (combinedLine.length <= limit) {
current = combinedLine;
continue;
}
if (current.length > 0) {
chunks.push(current);
current = "";
}
if (line.length <= limit) {
current = line;
continue;
}
// Single line exceeds limit — hard-cut
for (let index = 0; index < line.length; index += limit) {
chunks.push(line.slice(index, index + limit));
}
}
}
if (current.length > 0) {
chunks.push(current);
}
return chunks;
};