Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

threadcap

Package Overview
Dependencies
Maintainers
1
Versions
13
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

threadcap - npm Package Compare versions

Comparing version 0.1.5 to 0.1.6

745

cjs/main.js

@@ -0,1 +1,4 @@

function isNonEmpty(value) {
return value.trim().length > 0;
}
function isStringRecord(obj) {

@@ -10,160 +13,7 @@ return typeof obj === 'object' && obj !== null && !Array.isArray(obj) && obj.constructor === Object;

}
const MAX_LEVELS = 1000;
async function makeThreadcap(url, opts) {
const { cache, userAgent } = opts;
const fetcher = makeFetcherWithUserAgent(opts.fetcher, userAgent);
const object = await findOrFetchActivityPubObject(url, new Date().toISOString(), fetcher, cache);
const { id, type } = object;
if (typeof type !== 'string')
throw new Error(`Unexpected type for object: ${JSON.stringify(object)}`);
if (!/^(Note|Article|Video|PodcastEpisode)$/.test(type))
throw new Error(`Unexpected type: ${type}`);
if (typeof id !== 'string')
throw new Error(`Unexpected id for object: ${JSON.stringify(object)}`);
return {
root: id,
nodes: {},
commenters: {}
};
function isNonNegativeInteger(value) {
return Number.isInteger(value) && value >= 0;
}
async function updateThreadcap(threadcap, opts) {
const { userAgent, cache, updateTime, callbacks, maxLevels, maxNodes: maxNodesInput, startNode, keepGoing } = opts;
const fetcher = makeFetcherWithUserAgent(opts.fetcher, userAgent);
const maxLevel = Math.min(Math.max(maxLevels === undefined ? 1000 : Math.round(maxLevels), 0), 1000);
const maxNodes = maxNodesInput === undefined ? undefined : Math.max(Math.round(maxNodesInput), 0);
if (startNode && !threadcap.nodes[startNode])
throw new Error(`Invalid start node: ${startNode}`);
if (maxLevel === 0)
return;
if (maxNodes === 0)
return;
const idsBylevel = [
[
startNode || threadcap.root
]
];
let remaining = 1;
let processed = 0;
const processLevel = async (level) => {
callbacks === null || callbacks === void 0 ? void 0 : callbacks.onEvent({
kind: 'process-level',
phase: 'before',
level: level + 1
});
const nextLevel = level + 1;
for (const id of idsBylevel[level] || []) {
const processReplies = nextLevel < maxLevel;
const node = await processNode(id, processReplies, threadcap, updateTime, fetcher, cache, callbacks);
remaining--;
processed++;
if (maxNodes && processed >= maxNodes)
return;
if (keepGoing && !keepGoing())
return;
if (node.replies && nextLevel < maxLevel) {
if (!idsBylevel[nextLevel])
idsBylevel[nextLevel] = [];
idsBylevel[nextLevel].push(...node.replies);
remaining += node.replies.length;
}
callbacks === null || callbacks === void 0 ? void 0 : callbacks.onEvent({
kind: 'nodes-remaining',
remaining
});
}
callbacks === null || callbacks === void 0 ? void 0 : callbacks.onEvent({
kind: 'process-level',
phase: 'after',
level: level + 1
});
if (idsBylevel[nextLevel])
await processLevel(nextLevel);
};
await processLevel(0);
}
class InMemoryCache {
constructor() {
Object.defineProperty(this, "map", {
enumerable: true,
configurable: true,
writable: true,
value: new Map()
});
Object.defineProperty(this, "onReturningCachedResponse", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
}
get(id, after) {
const { response, fetched } = this.map.get(id) || {};
if (response && fetched && fetched > after) {
if (this.onReturningCachedResponse)
this.onReturningCachedResponse(id, after, fetched, response);
return Promise.resolve(response);
}
return Promise.resolve(undefined);
}
put(id, fetched, response) {
this.map.set(id, {
response,
fetched
});
return Promise.resolve();
}
}
function computeDefaultMillisToWait(input) {
const { remaining, millisTillReset } = input;
if (remaining >= 100)
return 0;
return remaining > 0 ? Math.round(millisTillReset / remaining) : millisTillReset;
}
function makeRateLimitedFetcher(fetcher, opts1 = {}) {
const { callbacks } = opts1;
const computeMillisToWait = opts1.computeMillisToWait || computeDefaultMillisToWait;
const hostLimits = new Map();
return async (url, opts) => {
const hostname = new URL(url).hostname;
const limits = hostLimits.get(hostname);
if (limits) {
const { limit, remaining, reset } = limits;
const millisTillReset = new Date(reset).getTime() - Date.now();
const millisToWait = computeMillisToWait({
hostname,
limit,
remaining,
reset,
millisTillReset
});
if (millisToWait > 0) {
callbacks === null || callbacks === void 0 ? void 0 : callbacks.onEvent({
kind: 'waiting-for-rate-limit',
hostname,
millisToWait,
millisTillReset,
limit,
remaining,
reset
});
await sleep(millisToWait);
}
}
const res = await fetcher(url, opts);
const limit = tryParseInt(res.headers.get('x-ratelimit-limit') || '');
const remaining = tryParseInt(res.headers.get('x-ratelimit-remaining') || '');
const reset = tryParseIso8601(res.headers.get('x-ratelimit-reset') || '');
if (limit !== undefined && remaining !== undefined && reset !== undefined) {
hostLimits.set(hostname, {
limit,
remaining,
reset
});
}
return res;
};
}
const APPLICATION_ACTIVITY_JSON = 'application/activity+json';
async function findOrFetchActivityPubObject(url, after, fetcher, cache) {
const response = await findOrFetchActivityPubResponse(url, after, fetcher, cache);
async function findOrFetchJson(url, after, fetcher, cache, opts) {
const response = await findOrFetchTextResponse(url, after, fetcher, cache, opts);
const { status, headers, bodyText } = response;

@@ -177,10 +27,14 @@ if (status !== 200)

}
async function findOrFetchActivityPubResponse(url, after, fetcher, cache) {
async function findOrFetchTextResponse(url, after, fetcher, cache, opts) {
const existing = await cache.get(url, after);
if (existing)
return existing;
const { accept, authorization } = opts;
const headers = {
accept
};
if (authorization)
headers.authorization = authorization;
const res = await fetcher(url, {
headers: {
accept: APPLICATION_ACTIVITY_JSON
}
headers
});

@@ -197,62 +51,44 @@ const response = {

}
async function processNode(id, processReplies, threadcap, updateTime, fetcher, cache, callbacks) {
let node = threadcap.nodes[id];
if (!node) {
node = {};
threadcap.nodes[id] = node;
}
const updateComment = !node.commentAsof || node.commentAsof < updateTime;
if (updateComment) {
try {
node.comment = await fetchComment(id, updateTime, fetcher, cache, callbacks);
const { attributedTo } = node.comment;
const existingCommenter = threadcap.commenters[attributedTo];
if (!existingCommenter || existingCommenter.asof < updateTime) {
threadcap.commenters[attributedTo] = await fetchCommenter(attributedTo, updateTime, fetcher, cache);
}
node.commentError = undefined;
}
catch (e) {
node.comment = undefined;
node.commentError = `${e.stack || e}`;
}
node.commentAsof = updateTime;
}
callbacks === null || callbacks === void 0 ? void 0 : callbacks.onEvent({
kind: 'node-processed',
nodeId: id,
part: 'comment',
updated: updateComment
const ActivityPubProtocolImplementation = {
initThreadcap: initActivityPubThreadcap,
fetchComment: fetchActivityPubComment,
fetchCommenter: fetchActivityPubCommenter,
fetchReplies: fetchActivityPubReplies
};
async function findOrFetchActivityPubObject(url, after, fetcher, cache) {
return await findOrFetchJson(url, after, fetcher, cache, {
accept: 'application/activity+json'
});
if (processReplies) {
const updateReplies = !node.repliesAsof || node.repliesAsof < updateTime;
if (updateReplies) {
try {
node.replies = await fetchReplies(id, updateTime, fetcher, cache, callbacks);
node.repliesError = undefined;
}
catch (e) {
node.replies = undefined;
node.repliesError = `${e.stack || e}`;
}
node.repliesAsof = updateTime;
}
callbacks === null || callbacks === void 0 ? void 0 : callbacks.onEvent({
kind: 'node-processed',
nodeId: id,
part: 'replies',
updated: updateReplies
});
}
return node;
}
async function fetchComment(id, updateTime, fetcher, cache, callbacks) {
async function initActivityPubThreadcap(url, opts) {
const { fetcher, cache } = opts;
const object = await findOrFetchActivityPubObject(url, new Date().toISOString(), fetcher, cache);
const { id, type } = object;
if (typeof type !== 'string')
throw new Error(`Unexpected type for object: ${JSON.stringify(object)}`);
if (!/^(Note|Article|Video|PodcastEpisode)$/.test(type))
throw new Error(`Unexpected type: ${type}`);
if (typeof id !== 'string')
throw new Error(`Unexpected id for object: ${JSON.stringify(object)}`);
return {
protocol: 'activitypub',
roots: [
id
],
nodes: {},
commenters: {}
};
}
async function fetchActivityPubComment(id, opts) {
const { fetcher, cache, updateTime, callbacks } = opts;
const object = await findOrFetchActivityPubObject(id, updateTime, fetcher, cache);
return computeComment(object, id, callbacks);
}
async function fetchCommenter(attributedTo, updateTime, fetcher, cache) {
async function fetchActivityPubCommenter(attributedTo, opts) {
const { fetcher, cache, updateTime } = opts;
const object = await findOrFetchActivityPubObject(attributedTo, updateTime, fetcher, cache);
return computeCommenter(object, updateTime);
}
async function fetchReplies(id, updateTime, fetcher, cache, callbacks) {
async function fetchActivityPubReplies(id, opts) {
const { fetcher, cache, updateTime, callbacks } = opts;
const fetchedObject = await findOrFetchActivityPubObject(id, updateTime, fetcher, cache);

@@ -362,16 +198,2 @@ const object = unwrapActivityIfNecessary(fetchedObject, id, callbacks);

}
function makeFetcherWithUserAgent(fetcher, userAgent) {
userAgent = userAgent.trim();
if (userAgent.length === 0)
throw new Error(`Expected non-blank user-agent`);
return async (url, opts) => {
const headers = {
...(opts === null || opts === void 0 ? void 0 : opts.headers) || {},
'user-agent': userAgent
};
return await fetcher(url, {
headers
});
};
}
function unwrapActivityIfNecessary(object, id, callbacks) {

@@ -554,2 +376,465 @@ if (object.type === 'Create' && isStringRecord(object.object)) {

}
const LightningCommentsProtocolImplementation = {
async initThreadcap(url, opts) {
const { fetcher, cache } = opts;
const time = new Date().toISOString();
const comments = await findOrFetchLightningComments(url, time, fetcher, cache);
const roots = comments.filter((v) => v.depth === 0).map((v) => computeUrlWithHash(url, `comment-${v.id}`));
return {
protocol: 'lightningcomments',
roots,
nodes: {},
commenters: {}
};
},
async fetchComment(id, opts) {
const { fetcher, cache, updateTime } = opts;
const m = /^#comment-(.*?)$/.exec(new URL(id).hash);
if (m) {
const [_, commentId] = m;
const comments = await findOrFetchLightningComments(computeUrlWithHash(id, ''), updateTime, fetcher, cache);
const comment = comments.find((v) => v.id === commentId);
if (!comment)
throw new Error(`Comment not found: ${commentId}`);
return {
attachments: [],
attributedTo: computeUrlWithHash(id, `commenter-${computeCommenterId(comment.sender)}`),
content: {
und: comment.message
},
published: comment.created
};
}
throw new Error(`fetchComment: unexpected id=${id}`);
},
async fetchCommenter(attributedTo, opts) {
const { fetcher, cache, updateTime } = opts;
const m = /^#commenter-(.*?)$/.exec(new URL(attributedTo).hash);
if (m) {
const [_, commenterId] = m;
const comments = await findOrFetchLightningComments(computeUrlWithHash(attributedTo, ''), updateTime, fetcher, cache);
const commenter = comments.map((v) => v.sender).find((v) => computeCommenterId(v) === commenterId);
if (!commenter)
throw new Error(`Commenter not found: ${commenterId}`);
return {
asof: updateTime,
name: `${commenter.name} from ${commenter.app}`
};
}
throw new Error(`fetchCommenter: unexpected attributedTo=${attributedTo}`);
},
async fetchReplies(id, opts) {
const { fetcher, cache, updateTime } = opts;
const m = /^#comment-(.*?)$/.exec(new URL(id).hash);
if (m) {
const [_, commentId] = m;
const url = computeUrlWithHash(id, '');
const comments = await findOrFetchLightningComments(url, updateTime, fetcher, cache);
const comment = comments.find((v) => v.id === commentId);
if (!comment)
throw new Error(`Comment not found: ${commentId}`);
return comment.children.map((v) => computeUrlWithHash(url, `comment-${v}`));
}
throw new Error(`fetchReplies: unexpected id=${id}`);
}
};
async function findOrFetchLightningComments(url, after, fetcher, cache) {
const obj = await findOrFetchJson(url, after, fetcher, cache, {
accept: 'application/json'
});
if (!isStringRecord(obj) || !isStringRecord(obj.data) || !Array.isArray(obj.data.comments))
throw new Error(`Unable to find obj.data.comments array: ${JSON.stringify(obj)}`);
return obj.data.comments.map((v, i) => {
if (!isValidLightningComment(v))
throw new Error(`Unexpected lightning comment at index ${i}: ${JSON.stringify(v)}`);
return v;
});
}
function computeUrlWithHash(url, hash) {
const u = new URL(url);
u.hash = hash;
return u.toString();
}
function computeCommenterId(sender) {
return sender.id === null ? `null-${sender.name}` : sender.id;
}
function isValidLightningComment(obj) {
return isStringRecord(obj) && typeof obj.id === 'string' && isNonEmpty(obj.id) && typeof obj.message === 'string' && isNonEmpty(obj.message) && (typeof obj.parent === 'string' && isNonEmpty(obj.parent) || obj.parent === null) && Array.isArray(obj.children) && obj.children.every((v) => typeof v === 'string' && isNonEmpty(v)) && typeof obj.depth === 'number' && isNonNegativeInteger(obj.depth) && typeof obj.created === 'string' && isValidIso8601(obj.created) && isValidLightningSender(obj.sender);
}
function isValidLightningSender(obj) {
return isStringRecord(obj) && typeof obj.app === 'string' && isNonEmpty(obj.app) && (obj.id === null || typeof obj.id === 'string' && isNonEmpty(obj.id)) && typeof obj.name === 'string' && isNonEmpty(obj.name);
}
const TwitterProtocolImplementation = {
async initThreadcap(url, opts) {
const { hostname, pathname } = new URL(url);
const m = /^\/.*?\/status\/(\d+)$/.exec(pathname);
if (hostname !== 'twitter.com' || !m)
throw new Error(`Unexpected tweet url: ${url}`);
const [_, id] = m;
const tweetApiUrl = `https://api.twitter.com/2/tweets/${id}`;
const obj = await findOrFetchTwitter(tweetApiUrl, new Date().toISOString(), opts);
if (DEBUG)
console.log(JSON.stringify(obj, undefined, 2));
return {
protocol: 'twitter',
roots: [
tweetApiUrl
],
nodes: {},
commenters: {}
};
},
async fetchComment(id, opts) {
const { updateTime } = opts;
const url = new URL(id);
url.searchParams.set('tweet.fields', 'author_id,lang,created_at');
const obj = await findOrFetchTwitter(url.toString(), updateTime, opts);
if (DEBUG)
console.log(JSON.stringify(obj, undefined, 2));
const tweetId = obj.data.id;
const text = obj.data.text;
const authorId = obj.data.author_id;
const lang = obj.data.lang;
const createdAt = obj.data.created_at;
const content = {};
content[lang] = text;
const tweetUrl = `https://twitter.com/i/web/status/${tweetId}`;
return {
attachments: [],
attributedTo: `https://api.twitter.com/2/users/${authorId}`,
content,
published: createdAt,
url: tweetUrl
};
},
async fetchCommenter(attributedTo, opts) {
const { updateTime } = opts;
const url = new URL(attributedTo);
url.searchParams.set('user.fields', 'url,profile_image_url');
const obj = await findOrFetchTwitter(url.toString(), updateTime, opts);
if (DEBUG)
console.log('fetchCommenter', JSON.stringify(obj, undefined, 2));
const name = obj.data.name;
const fqUsername = '@' + obj.data.username;
const userUrl = `https://twitter.com/${obj.data.username}`;
const iconUrl = obj.data.profile_image_url;
const iconUrlLower = (iconUrl || '').toLowerCase();
const iconMediaType = iconUrlLower.endsWith('.jpg') ? 'image/jpeg' : iconUrlLower.endsWith('.png') ? 'image/png' : undefined;
const icon = iconUrl ? {
url: iconUrl,
mediaType: iconMediaType
} : undefined;
return {
asof: updateTime,
name,
fqUsername,
url: userUrl,
icon
};
},
async fetchReplies(id, opts) {
const m = /^https:\/\/api\.twitter\.com\/2\/tweets\/(.*?)$/.exec(id);
if (!m)
throw new Error(`Unexpected tweet id: ${id}`);
const [_, tweetId] = m;
const convo = await findOrFetchConversation(tweetId, opts);
return Object.values(convo.tweets).filter((v) => v.referenced_tweets.some((w) => w.type === 'replied_to' && w.id === tweetId)).map((v) => `https://api.twitter.com/2/tweets/${v.id}`);
}
};
const DEBUG = false;
async function findOrFetchTwitter(url, after, opts) {
const { fetcher, cache, bearerToken } = opts;
const obj = await findOrFetchJson(url, after, fetcher, cache, {
accept: 'application/json',
authorization: `Bearer ${bearerToken}`
});
return obj;
}
async function findOrFetchConversation(tweetId, opts) {
const { updateTime, state } = opts;
let { conversation } = state;
if (!conversation) {
const conversationId = await findOrFetchConversationId(tweetId, opts);
const url = new URL('https://api.twitter.com/2/tweets/search/recent');
url.searchParams.set('query', `conversation_id:${conversationId}`);
url.searchParams.set('expansions', `referenced_tweets.id`);
url.searchParams.set('tweet.fields', `author_id,lang,created_at`);
const obj = await findOrFetchTwitter(url.toString(), updateTime, opts);
const tweets = {};
for (const tweetObj of obj.data) {
const tweet = tweetObj;
tweets[tweet.id] = tweet;
}
conversation = {
tweets
};
state.conversation = conversation;
}
return conversation;
}
async function findOrFetchConversationId(tweetId, opts) {
const { updateTime, state } = opts;
let { conversationId } = state;
if (typeof conversationId === 'string')
return conversationId;
const url = new URL(`https://api.twitter.com/2/tweets/${tweetId}`);
url.searchParams.set('tweet.fields', 'conversation_id');
const obj = await findOrFetchTwitter(url.toString(), updateTime, opts);
conversationId = obj.data.conversation_id;
if (typeof conversationId !== 'string')
throw new Error(`Unexpected conversationId in payload: ${JSON.stringify(obj, undefined, 2)}`);
state.conversationId = conversationId;
return conversationId;
}
function isValidProtocol(protocol) {
return protocol === 'activitypub' || protocol === 'lightningcomments' || protocol === 'twitter';
}
const MAX_LEVELS = 1000;
async function makeThreadcap(url, opts) {
const { cache, userAgent, protocol, bearerToken } = opts;
const fetcher = makeFetcherWithUserAgent(opts.fetcher, userAgent);
const implementation = computeProtocolImplementation(protocol);
return await implementation.initThreadcap(url, {
fetcher,
cache,
bearerToken
});
}
async function updateThreadcap(threadcap, opts) {
const { userAgent, cache, updateTime, callbacks, maxLevels, maxNodes: maxNodesInput, startNode, keepGoing, bearerToken } = opts;
const fetcher = makeFetcherWithUserAgent(opts.fetcher, userAgent);
const maxLevel = Math.min(Math.max(maxLevels === undefined ? 1000 : Math.round(maxLevels), 0), 1000);
const maxNodes = maxNodesInput === undefined ? undefined : Math.max(Math.round(maxNodesInput), 0);
if (startNode && !threadcap.nodes[startNode])
throw new Error(`Invalid start node: ${startNode}`);
if (maxLevel === 0)
return;
if (maxNodes === 0)
return;
const implementation = computeProtocolImplementation(threadcap.protocol);
const state = {};
const idsBylevel = [
startNode ? [
startNode
] : [
...threadcap.roots
]
];
let remaining = 1;
let processed = 0;
const processLevel = async (level) => {
callbacks === null || callbacks === void 0 ? void 0 : callbacks.onEvent({
kind: 'process-level',
phase: 'before',
level: level + 1
});
const nextLevel = level + 1;
for (const id of idsBylevel[level] || []) {
const processReplies = nextLevel < maxLevel;
const node = await processNode(id, processReplies, threadcap, implementation, {
updateTime,
callbacks,
state,
fetcher,
cache,
bearerToken
});
remaining--;
processed++;
if (maxNodes && processed >= maxNodes)
return;
if (keepGoing && !keepGoing())
return;
if (node.replies && nextLevel < maxLevel) {
if (!idsBylevel[nextLevel])
idsBylevel[nextLevel] = [];
idsBylevel[nextLevel].push(...node.replies);
remaining += node.replies.length;
}
callbacks === null || callbacks === void 0 ? void 0 : callbacks.onEvent({
kind: 'nodes-remaining',
remaining
});
}
callbacks === null || callbacks === void 0 ? void 0 : callbacks.onEvent({
kind: 'process-level',
phase: 'after',
level: level + 1
});
if (idsBylevel[nextLevel])
await processLevel(nextLevel);
};
await processLevel(0);
}
class InMemoryCache {
constructor() {
Object.defineProperty(this, "map", {
enumerable: true,
configurable: true,
writable: true,
value: new Map()
});
Object.defineProperty(this, "onReturningCachedResponse", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
}
get(id, after) {
const { response, fetched } = this.map.get(id) || {};
if (response && fetched && fetched > after) {
if (this.onReturningCachedResponse)
this.onReturningCachedResponse(id, after, fetched, response);
return Promise.resolve(response);
}
return Promise.resolve(undefined);
}
put(id, fetched, response) {
this.map.set(id, {
response,
fetched
});
return Promise.resolve();
}
}
function computeDefaultMillisToWait(input) {
const { remaining, millisTillReset } = input;
if (remaining >= 100)
return 0;
return remaining > 0 ? Math.round(millisTillReset / remaining) : millisTillReset;
}
function makeRateLimitedFetcher(fetcher, opts1 = {}) {
const { callbacks } = opts1;
const computeMillisToWait = opts1.computeMillisToWait || computeDefaultMillisToWait;
const endpointLimits = new Map();
return async (url, opts) => {
const { hostname, pathname } = new URL(url);
const twitterEndpoint = computeTwitterEndpoint(hostname, pathname);
const endpoint = twitterEndpoint || hostname;
const limits = endpointLimits.get(endpoint);
if (limits) {
const { limit, remaining, reset } = limits;
const millisTillReset = new Date(reset).getTime() - Date.now();
const millisToWait = computeMillisToWait({
endpoint,
limit,
remaining,
reset,
millisTillReset
});
if (millisToWait > 0) {
callbacks === null || callbacks === void 0 ? void 0 : callbacks.onEvent({
kind: 'waiting-for-rate-limit',
endpoint,
millisToWait,
millisTillReset,
limit,
remaining,
reset
});
await sleep(millisToWait);
}
}
const res = await fetcher(url, opts);
const limitHeader = twitterEndpoint ? 'x-rate-limit-limit' : 'x-ratelimit-limit';
const remainingHeader = twitterEndpoint ? 'x-rate-limit-remaining' : 'x-ratelimit-remaining';
const resetHeader = twitterEndpoint ? 'x-rate-limit-reset' : 'x-ratelimit-reset';
const limit = tryParseInt(res.headers.get(limitHeader) || '');
const remaining = tryParseInt(res.headers.get(remainingHeader) || '');
const resetStr = res.headers.get(resetHeader) || '';
const reset = twitterEndpoint ? tryParseEpochSecondsAsIso8601(resetStr) : tryParseIso8601(resetStr);
if (limit !== undefined && remaining !== undefined && reset !== undefined) {
endpointLimits.set(endpoint, {
limit,
remaining,
reset
});
}
return res;
};
}
function computeTwitterEndpoint(hostname, pathname) {
if (hostname === 'api.twitter.com') {
return pathname.replaceAll(/\d{4,}/g, ':id');
}
}
function makeFetcherWithUserAgent(fetcher, userAgent) {
userAgent = userAgent.trim();
if (userAgent.length === 0)
throw new Error(`Expected non-blank user-agent`);
return async (url, opts) => {
const headers = {
...(opts === null || opts === void 0 ? void 0 : opts.headers) || {},
'user-agent': userAgent
};
return await fetcher(url, {
headers
});
};
}
function computeProtocolImplementation(protocol) {
if (protocol === undefined || protocol === 'activitypub')
return ActivityPubProtocolImplementation;
if (protocol === 'lightningcomments')
return LightningCommentsProtocolImplementation;
if (protocol === 'twitter')
return TwitterProtocolImplementation;
throw new Error(`Unsupported protocol: ${protocol}`);
}
async function processNode(id, processReplies, threadcap, implementation, opts) {
const { updateTime, callbacks } = opts;
let node = threadcap.nodes[id];
if (!node) {
node = {};
threadcap.nodes[id] = node;
}
const updateComment = !node.commentAsof || node.commentAsof < updateTime;
if (updateComment) {
try {
node.comment = await implementation.fetchComment(id, opts);
const { attributedTo } = node.comment;
const existingCommenter = threadcap.commenters[attributedTo];
if (!existingCommenter || existingCommenter.asof < updateTime) {
threadcap.commenters[attributedTo] = await implementation.fetchCommenter(attributedTo, opts);
}
node.commentError = undefined;
}
catch (e) {
node.comment = undefined;
node.commentError = `${e.stack || e}`;
}
node.commentAsof = updateTime;
}
callbacks === null || callbacks === void 0 ? void 0 : callbacks.onEvent({
kind: 'node-processed',
nodeId: id,
part: 'comment',
updated: updateComment
});
if (processReplies) {
const updateReplies = !node.repliesAsof || node.repliesAsof < updateTime;
if (updateReplies) {
try {
node.replies = await implementation.fetchReplies(id, opts);
node.repliesError = undefined;
}
catch (e) {
node.replies = undefined;
node.repliesError = `${e.stack || e}`;
}
node.repliesAsof = updateTime;
}
callbacks === null || callbacks === void 0 ? void 0 : callbacks.onEvent({
kind: 'node-processed',
nodeId: id,
part: 'replies',
updated: updateReplies
});
}
return node;
}
function sleep(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
function tryParseInt(value) {

@@ -566,5 +851,7 @@ try {

}
function sleep(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
function tryParseEpochSecondsAsIso8601(value) {
const seconds = tryParseInt(value);
return seconds && seconds > 0 ? new Date(seconds * 1000).toISOString() : undefined;
}
exports.isValidProtocol = isValidProtocol;
exports.MAX_LEVELS = MAX_LEVELS;

@@ -571,0 +858,0 @@ exports.makeThreadcap = makeThreadcap;

@@ -1,5 +0,4 @@

// deno-fmt-ignore-file
// deno-lint-ignore-file
// This code was bundled using `deno bundle` and it's not recommended to edit it manually
function isNonEmpty(value) {
return value.trim().length > 0;
}
function isStringRecord(obj) {

@@ -14,149 +13,27 @@ return typeof obj === 'object' && obj !== null && !Array.isArray(obj) && obj.constructor === Object;

}
const MAX_LEVELS = 1000;
async function makeThreadcap(url, opts) {
const { cache , userAgent } = opts;
const fetcher = makeFetcherWithUserAgent(opts.fetcher, userAgent);
const object = await findOrFetchActivityPubObject(url, new Date().toISOString(), fetcher, cache);
const { id , type } = object;
if (typeof type !== 'string') throw new Error(`Unexpected type for object: ${JSON.stringify(object)}`);
if (!/^(Note|Article|Video|PodcastEpisode)$/.test(type)) throw new Error(`Unexpected type: ${type}`);
if (typeof id !== 'string') throw new Error(`Unexpected id for object: ${JSON.stringify(object)}`);
return {
root: id,
nodes: {},
commenters: {}
};
function isNonNegativeInteger(value) {
return Number.isInteger(value) && value >= 0;
}
async function updateThreadcap(threadcap, opts) {
const { userAgent , cache , updateTime , callbacks , maxLevels , maxNodes: maxNodesInput , startNode , keepGoing } = opts;
const fetcher = makeFetcherWithUserAgent(opts.fetcher, userAgent);
const maxLevel = Math.min(Math.max(maxLevels === undefined ? 1000 : Math.round(maxLevels), 0), 1000);
const maxNodes = maxNodesInput === undefined ? undefined : Math.max(Math.round(maxNodesInput), 0);
if (startNode && !threadcap.nodes[startNode]) throw new Error(`Invalid start node: ${startNode}`);
if (maxLevel === 0) return;
if (maxNodes === 0) return;
const idsBylevel = [
[
startNode || threadcap.root
]
];
let remaining = 1;
let processed = 0;
const processLevel = async (level)=>{
callbacks?.onEvent({
kind: 'process-level',
phase: 'before',
level: level + 1
});
const nextLevel = level + 1;
for (const id of idsBylevel[level] || []){
const processReplies = nextLevel < maxLevel;
const node = await processNode(id, processReplies, threadcap, updateTime, fetcher, cache, callbacks);
remaining--;
processed++;
if (maxNodes && processed >= maxNodes) return;
if (keepGoing && !keepGoing()) return;
if (node.replies && nextLevel < maxLevel) {
if (!idsBylevel[nextLevel]) idsBylevel[nextLevel] = [];
idsBylevel[nextLevel].push(...node.replies);
remaining += node.replies.length;
}
callbacks?.onEvent({
kind: 'nodes-remaining',
remaining
});
}
callbacks?.onEvent({
kind: 'process-level',
phase: 'after',
level: level + 1
});
if (idsBylevel[nextLevel]) await processLevel(nextLevel);
};
await processLevel(0);
}
class InMemoryCache {
map = new Map();
onReturningCachedResponse;
get(id, after) {
const { response , fetched } = this.map.get(id) || {};
if (response && fetched && fetched > after) {
if (this.onReturningCachedResponse) this.onReturningCachedResponse(id, after, fetched, response);
return Promise.resolve(response);
}
return Promise.resolve(undefined);
}
put(id, fetched, response) {
this.map.set(id, {
response,
fetched
});
return Promise.resolve();
}
}
function computeDefaultMillisToWait(input) {
const { remaining , millisTillReset } = input;
if (remaining >= 100) return 0;
return remaining > 0 ? Math.round(millisTillReset / remaining) : millisTillReset;
}
function makeRateLimitedFetcher(fetcher, opts1 = {}) {
const { callbacks } = opts1;
const computeMillisToWait = opts1.computeMillisToWait || computeDefaultMillisToWait;
const hostLimits = new Map();
return async (url, opts)=>{
const hostname = new URL(url).hostname;
const limits = hostLimits.get(hostname);
if (limits) {
const { limit , remaining , reset } = limits;
const millisTillReset = new Date(reset).getTime() - Date.now();
const millisToWait = computeMillisToWait({
hostname,
limit,
remaining,
reset,
millisTillReset
});
if (millisToWait > 0) {
callbacks?.onEvent({
kind: 'waiting-for-rate-limit',
hostname,
millisToWait,
millisTillReset,
limit,
remaining,
reset
});
await sleep(millisToWait);
}
}
const res = await fetcher(url, opts);
const limit = tryParseInt(res.headers.get('x-ratelimit-limit') || '');
const remaining = tryParseInt(res.headers.get('x-ratelimit-remaining') || '');
const reset = tryParseIso8601(res.headers.get('x-ratelimit-reset') || '');
if (limit !== undefined && remaining !== undefined && reset !== undefined) {
hostLimits.set(hostname, {
limit,
remaining,
reset
});
}
return res;
};
}
const APPLICATION_ACTIVITY_JSON = 'application/activity+json';
async function findOrFetchActivityPubObject(url, after, fetcher, cache) {
const response = await findOrFetchActivityPubResponse(url, after, fetcher, cache);
const { status , headers , bodyText } = response;
if (status !== 200) throw new Error(`Expected 200 response for ${url}, found ${status} body=${bodyText}`);
async function findOrFetchJson(url, after, fetcher, cache, opts) {
const response = await findOrFetchTextResponse(url, after, fetcher, cache, opts);
const { status, headers, bodyText } = response;
if (status !== 200)
throw new Error(`Expected 200 response for ${url}, found ${status} body=${bodyText}`);
const contentType = headers['content-type'] || '<none>';
if (!contentType.toLowerCase().includes('json')) throw new Error(`Expected json response for ${url}, found ${contentType} body=${bodyText}`);
if (!contentType.toLowerCase().includes('json'))
throw new Error(`Expected json response for ${url}, found ${contentType} body=${bodyText}`);
return JSON.parse(bodyText);
}
async function findOrFetchActivityPubResponse(url, after, fetcher, cache) {
async function findOrFetchTextResponse(url, after, fetcher, cache, opts) {
const existing = await cache.get(url, after);
if (existing) return existing;
if (existing)
return existing;
const { accept, authorization } = opts;
const headers = {
accept
};
if (authorization)
headers.authorization = authorization;
const res = await fetcher(url, {
headers: {
accept: APPLICATION_ACTIVITY_JSON
}
headers
});

@@ -173,60 +50,44 @@ const response = {

}
async function processNode(id, processReplies, threadcap, updateTime, fetcher, cache, callbacks) {
let node = threadcap.nodes[id];
if (!node) {
node = {};
threadcap.nodes[id] = node;
}
const updateComment = !node.commentAsof || node.commentAsof < updateTime;
if (updateComment) {
try {
node.comment = await fetchComment(id, updateTime, fetcher, cache, callbacks);
const { attributedTo } = node.comment;
const existingCommenter = threadcap.commenters[attributedTo];
if (!existingCommenter || existingCommenter.asof < updateTime) {
threadcap.commenters[attributedTo] = await fetchCommenter(attributedTo, updateTime, fetcher, cache);
}
node.commentError = undefined;
} catch (e) {
node.comment = undefined;
node.commentError = `${e.stack || e}`;
}
node.commentAsof = updateTime;
}
callbacks?.onEvent({
kind: 'node-processed',
nodeId: id,
part: 'comment',
updated: updateComment
const ActivityPubProtocolImplementation = {
initThreadcap: initActivityPubThreadcap,
fetchComment: fetchActivityPubComment,
fetchCommenter: fetchActivityPubCommenter,
fetchReplies: fetchActivityPubReplies
};
async function findOrFetchActivityPubObject(url, after, fetcher, cache) {
return await findOrFetchJson(url, after, fetcher, cache, {
accept: 'application/activity+json'
});
if (processReplies) {
const updateReplies = !node.repliesAsof || node.repliesAsof < updateTime;
if (updateReplies) {
try {
node.replies = await fetchReplies(id, updateTime, fetcher, cache, callbacks);
node.repliesError = undefined;
} catch (e) {
node.replies = undefined;
node.repliesError = `${e.stack || e}`;
}
node.repliesAsof = updateTime;
}
callbacks?.onEvent({
kind: 'node-processed',
nodeId: id,
part: 'replies',
updated: updateReplies
});
}
return node;
}
async function fetchComment(id, updateTime, fetcher, cache, callbacks) {
async function initActivityPubThreadcap(url, opts) {
const { fetcher, cache } = opts;
const object = await findOrFetchActivityPubObject(url, new Date().toISOString(), fetcher, cache);
const { id, type } = object;
if (typeof type !== 'string')
throw new Error(`Unexpected type for object: ${JSON.stringify(object)}`);
if (!/^(Note|Article|Video|PodcastEpisode)$/.test(type))
throw new Error(`Unexpected type: ${type}`);
if (typeof id !== 'string')
throw new Error(`Unexpected id for object: ${JSON.stringify(object)}`);
return {
protocol: 'activitypub',
roots: [
id
],
nodes: {},
commenters: {}
};
}
async function fetchActivityPubComment(id, opts) {
const { fetcher, cache, updateTime, callbacks } = opts;
const object = await findOrFetchActivityPubObject(id, updateTime, fetcher, cache);
return computeComment(object, id, callbacks);
}
async function fetchCommenter(attributedTo, updateTime, fetcher, cache) {
async function fetchActivityPubCommenter(attributedTo, opts) {
const { fetcher, cache, updateTime } = opts;
const object = await findOrFetchActivityPubObject(attributedTo, updateTime, fetcher, cache);
return computeCommenter(object, updateTime);
}
async function fetchReplies(id, updateTime, fetcher, cache, callbacks) {
async function fetchActivityPubReplies(id, opts) {
const { fetcher, cache, updateTime, callbacks } = opts;
const fetchedObject = await findOrFetchActivityPubObject(id, updateTime, fetcher, cache);

@@ -237,3 +98,3 @@ const object = unwrapActivityIfNecessary(fetchedObject, id, callbacks);

const message = object.type === 'PodcastEpisode' ? `No 'comments' found on PodcastEpisode object` : `No 'replies' found on object`;
callbacks?.onEvent({
callbacks === null || callbacks === void 0 ? void 0 : callbacks.onEvent({
kind: 'warning',

@@ -253,8 +114,11 @@ url: id,

return await collectRepliesFromOrderedCollection(obj, updateTime, id, fetcher, cache, callbacks, fetched);
} else {
}
else {
throw new Error(`Expected 'replies' to point to an OrderedCollection, found ${JSON.stringify(obj)}`);
}
} else if (replies.first) {
}
else if (replies.first) {
if (typeof replies.first === 'object' && replies.first.type === 'CollectionPage') {
if (!replies.first.items && !replies.first.next) throw new Error(`Expected 'replies.first.items' or 'replies.first.next' to be present, found ${JSON.stringify(replies.first)}`);
if (!replies.first.items && !replies.first.next)
throw new Error(`Expected 'replies.first.items' or 'replies.first.next' to be present, found ${JSON.stringify(replies.first)}`);
if (Array.isArray(replies.first.items) && replies.first.items.length > 0) {

@@ -266,3 +130,4 @@ collectRepliesFromItems(replies.first.items, rt, id, id, callbacks);

rt.push(...await collectRepliesFromPages(replies.first.next, updateTime, id, fetcher, cache, callbacks, fetched));
} else {
}
else {
throw new Error(`Expected 'replies.first.next' to be a string, found ${JSON.stringify(replies.first.next)}`);

@@ -272,12 +137,17 @@ }

return rt;
} else {
}
else {
throw new Error(`Expected 'replies.first.items' array, or 'replies.first.next' string, found ${JSON.stringify(replies.first)}`);
}
} else if (Array.isArray(replies)) {
if (replies.length > 0) throw new Error(`Expected 'replies' array to be empty, found ${JSON.stringify(replies)}`);
}
else if (Array.isArray(replies)) {
if (replies.length > 0)
throw new Error(`Expected 'replies' array to be empty, found ${JSON.stringify(replies)}`);
return [];
} else if (Array.isArray(replies.items)) {
}
else if (Array.isArray(replies.items)) {
collectRepliesFromItems(replies.items, rt, id, id, callbacks);
return rt;
} else {
}
else {
throw new Error(`Expected 'replies' to be a string, array or object with 'first' or 'items', found ${JSON.stringify(replies)}`);

@@ -287,3 +157,4 @@ }

async function collectRepliesFromOrderedCollection(orderedCollection, after, nodeId, fetcher, cache, callbacks, fetched) {
if ((orderedCollection.items?.length || 0) > 0 || (orderedCollection.orderedItems?.length || 0) > 0) {
var _a, _b;
if ((((_a = orderedCollection.items) === null || _a === void 0 ? void 0 : _a.length) || 0) > 0 || (((_b = orderedCollection.orderedItems) === null || _b === void 0 ? void 0 : _b.length) || 0) > 0) {
throw new Error(`Expected OrderedCollection 'items'/'orderedItems' to be empty, found ${JSON.stringify(orderedCollection)}`);

@@ -293,5 +164,7 @@ }

return [];
} else if (typeof orderedCollection.first === 'string') {
}
else if (typeof orderedCollection.first === 'string') {
return await collectRepliesFromPages(orderedCollection.first, after, nodeId, fetcher, cache, callbacks, fetched);
} else {
}
else {
throw new Error(`Expected OrderedCollection 'first' to be a string, found ${JSON.stringify(orderedCollection)}`);

@@ -303,3 +176,3 @@ }

let page = await findOrFetchActivityPubObject(url, after, fetcher, cache);
while(true){
while (true) {
if (page.type !== 'CollectionPage' && page.type !== 'OrderedCollectionPage') {

@@ -309,15 +182,20 @@ throw new Error(`Expected page 'type' of CollectionPage or OrderedCollectionPage, found ${JSON.stringify(page)}`);

if (page.items) {
if (!Array.isArray(page.items)) throw new Error(`Expected page 'items' to be an array, found ${JSON.stringify(page)}`);
if (!Array.isArray(page.items))
throw new Error(`Expected page 'items' to be an array, found ${JSON.stringify(page)}`);
collectRepliesFromItems(page.items, replies, nodeId, url, callbacks);
}
if (page.type === 'OrderedCollectionPage' && page.orderedItems) {
if (!Array.isArray(page.orderedItems)) throw new Error(`Expected page 'orderedItems' to be an array, found ${JSON.stringify(page)}`);
if (!Array.isArray(page.orderedItems))
throw new Error(`Expected page 'orderedItems' to be an array, found ${JSON.stringify(page)}`);
collectRepliesFromItems(page.orderedItems, replies, nodeId, url, callbacks);
}
if (page.next) {
if (typeof page.next !== 'string') throw new Error(`Expected page 'next' to be a string, found ${JSON.stringify(page)}`);
if (fetched.has(page.next)) return replies;
if (typeof page.next !== 'string')
throw new Error(`Expected page 'next' to be a string, found ${JSON.stringify(page)}`);
if (fetched.has(page.next))
return replies;
page = await findOrFetchActivityPubObject(page.next, after, fetcher, cache);
fetched.add(page.next);
} else {
}
else {
return replies;

@@ -327,18 +205,5 @@ }

}
function makeFetcherWithUserAgent(fetcher, userAgent) {
userAgent = userAgent.trim();
if (userAgent.length === 0) throw new Error(`Expected non-blank user-agent`);
return async (url, opts)=>{
const headers = {
...opts?.headers || {},
'user-agent': userAgent
};
return await fetcher(url, {
headers
});
};
}
function unwrapActivityIfNecessary(object, id, callbacks) {
if (object.type === 'Create' && isStringRecord(object.object)) {
callbacks?.onEvent({
callbacks === null || callbacks === void 0 ? void 0 : callbacks.onEvent({
kind: 'warning',

@@ -355,12 +220,14 @@ url: id,

function collectRepliesFromItems(items, outReplies, nodeId, url, callbacks) {
for (const item of items){
for (const item of items) {
if (typeof item === 'string' && !item.startsWith('{')) {
outReplies.push(item);
} else {
}
else {
const itemObj = typeof item === 'string' ? JSON.parse(item) : item;
const { id } = itemObj;
if (typeof id !== 'string') throw new Error(`Expected item 'id' to be a string, found ${JSON.stringify(itemObj)}`);
const { id } = itemObj;
if (typeof id !== 'string')
throw new Error(`Expected item 'id' to be a string, found ${JSON.stringify(itemObj)}`);
outReplies.push(id);
if (typeof item === 'string') {
callbacks?.onEvent({
callbacks === null || callbacks === void 0 ? void 0 : callbacks.onEvent({
kind: 'warning',

@@ -381,5 +248,6 @@ nodeId,

const url = computeUrl(object.url) || id;
const { published } = object;
const { published } = object;
const attributedTo = computeAttributedTo(object.attributedTo);
if (typeof published !== 'string') throw new Error(`Expected 'published' to be a string, found ${JSON.stringify(published)}`);
if (typeof published !== 'string')
throw new Error(`Expected 'published' to be a string, found ${JSON.stringify(published)}`);
return {

@@ -394,8 +262,10 @@ url,

function computeUrl(url) {
if (url === undefined || url === null) return undefined;
if (typeof url === 'string') return url;
if (url === undefined || url === null)
return undefined;
if (typeof url === 'string')
return url;
if (Array.isArray(url)) {
const v1 = url.find((v)=>v.type === 'Link' && v.mediaType === 'text/html' && typeof v.href === 'string'
);
if (v1) return v1.href;
const v1 = url.find((v) => v.type === 'Link' && v.mediaType === 'text/html' && typeof v.href === 'string');
if (v1)
return v1.href;
}

@@ -405,9 +275,9 @@ throw new Error(`Expected 'url' to be a string, found ${JSON.stringify(url)}`);

function computeAttributedTo(attributedTo) {
if (typeof attributedTo === 'string') return attributedTo;
if (typeof attributedTo === 'string')
return attributedTo;
if (Array.isArray(attributedTo) && attributedTo.length > 0) {
if (attributedTo.every((v)=>typeof v === 'string'
)) return attributedTo[0];
if (attributedTo.every((v)=>isStringRecord(v)
)) {
for (const item of attributedTo){
if (attributedTo.every((v) => typeof v === 'string'))
return attributedTo[0];
if (attributedTo.every((v) => isStringRecord(v))) {
for (const item of attributedTo) {
if (item.type === 'Person' && typeof item.id === 'string') {

@@ -423,10 +293,15 @@ return item.id;

function computeContent(obj) {
if (obj.type === 'PodcastEpisode' && isStringRecord(obj.description) && obj.description.type === 'Note') obj = obj.description;
const { content , contentMap } = obj;
if (content !== undefined && typeof content !== 'string') throw new Error(`Expected 'content' to be a string, found ${JSON.stringify(content)}`);
if (contentMap !== undefined && !isStringRecord(contentMap)) throw new Error(`Expected 'contentMap' to be a string record, found ${JSON.stringify(contentMap)}`);
if (contentMap !== undefined) return contentMap;
if (content !== undefined) return {
und: content
};
if (obj.type === 'PodcastEpisode' && isStringRecord(obj.description) && obj.description.type === 'Note')
obj = obj.description;
const { content, contentMap } = obj;
if (content !== undefined && typeof content !== 'string')
throw new Error(`Expected 'content' to be a string, found ${JSON.stringify(content)}`);
if (contentMap !== undefined && !isStringRecord(contentMap))
throw new Error(`Expected 'contentMap' to be a string record, found ${JSON.stringify(contentMap)}`);
if (contentMap !== undefined)
return contentMap;
if (content !== undefined)
return {
und: content
};
throw new Error(`Expected either 'contentMap' or 'content' to be present ${JSON.stringify(obj)}`);

@@ -436,7 +311,8 @@ }

const rt = [];
if (!object.attachment) return rt;
if (!object.attachment)
return rt;
const attachments = isReadonlyArray(object.attachment) ? object.attachment : [
object.attachment
];
for (const attachment of attachments){
for (const attachment of attachments) {
rt.push(computeAttachment(attachment));

@@ -447,8 +323,13 @@ }

function computeAttachment(object) {
if (typeof object !== 'object' || object.type !== 'Document' && object.type !== 'Image') throw new Error(`Expected attachment 'type' of Document or Image, found ${JSON.stringify(object.type)}`);
const { mediaType , width , height , url } = object;
if (typeof mediaType !== 'string') throw new Error(`Expected attachment 'mediaType' to be a string, found ${JSON.stringify(mediaType)}`);
if (width !== undefined && typeof width !== 'number') throw new Error(`Expected attachment 'width' to be a number, found ${JSON.stringify(width)}`);
if (height !== undefined && typeof height !== 'number') throw new Error(`Expected attachment 'height' to be a number, found ${JSON.stringify(height)}`);
if (typeof url !== 'string') throw new Error(`Expected attachment 'url' to be a string, found ${JSON.stringify(url)}`);
if (typeof object !== 'object' || object.type !== 'Document' && object.type !== 'Image')
throw new Error(`Expected attachment 'type' of Document or Image, found ${JSON.stringify(object.type)}`);
const { mediaType, width, height, url } = object;
if (typeof mediaType !== 'string')
throw new Error(`Expected attachment 'mediaType' to be a string, found ${JSON.stringify(mediaType)}`);
if (width !== undefined && typeof width !== 'number')
throw new Error(`Expected attachment 'width' to be a number, found ${JSON.stringify(width)}`);
if (height !== undefined && typeof height !== 'number')
throw new Error(`Expected attachment 'height' to be a number, found ${JSON.stringify(height)}`);
if (typeof url !== 'string')
throw new Error(`Expected attachment 'url' to be a string, found ${JSON.stringify(url)}`);
return {

@@ -464,13 +345,19 @@ mediaType,

if (person.icon) {
if (typeof person.icon !== 'object' || isReadonlyArray(person.icon) || person.icon.type !== 'Image') throw new Error(`Expected person 'icon' to be an object, found: ${JSON.stringify(person.icon)}`);
if (typeof person.icon !== 'object' || isReadonlyArray(person.icon) || person.icon.type !== 'Image')
throw new Error(`Expected person 'icon' to be an object, found: ${JSON.stringify(person.icon)}`);
icon = computeIcon(person.icon);
}
const { name , preferredUsername , url: apUrl , id } = person;
if (name !== undefined && typeof name !== 'string') throw new Error(`Expected person 'name' to be a string, found: ${JSON.stringify(person)}`);
if (preferredUsername !== undefined && typeof preferredUsername !== 'string') throw new Error(`Expected person 'preferredUsername' to be a string, found: ${JSON.stringify(person)}`);
const { name, preferredUsername, url: apUrl, id } = person;
if (name !== undefined && typeof name !== 'string')
throw new Error(`Expected person 'name' to be a string, found: ${JSON.stringify(person)}`);
if (preferredUsername !== undefined && typeof preferredUsername !== 'string')
throw new Error(`Expected person 'preferredUsername' to be a string, found: ${JSON.stringify(person)}`);
const nameOrPreferredUsername = name || preferredUsername;
if (!nameOrPreferredUsername) throw new Error(`Expected person 'name' or 'preferredUsername', found: ${JSON.stringify(person)}`);
if (apUrl !== undefined && typeof apUrl !== 'string') throw new Error(`Expected person 'url' to be a string, found: ${JSON.stringify(apUrl)}`);
if (!nameOrPreferredUsername)
throw new Error(`Expected person 'name' or 'preferredUsername', found: ${JSON.stringify(person)}`);
if (apUrl !== undefined && typeof apUrl !== 'string')
throw new Error(`Expected person 'url' to be a string, found: ${JSON.stringify(apUrl)}`);
const url = apUrl || id;
if (typeof url !== 'string') throw new Error(`Expected person 'url' or 'id' to be a string, found: ${JSON.stringify(url)}`);
if (typeof url !== 'string')
throw new Error(`Expected person 'url' or 'id' to be a string, found: ${JSON.stringify(url)}`);
const fqUsername = computeFqUsername(url, person.preferredUsername);

@@ -486,5 +373,7 @@ return {

function computeIcon(image) {
const { url , mediaType } = image;
if (typeof url !== 'string') throw new Error(`Expected icon 'url' to be a string, found: ${JSON.stringify(url)}`);
if (mediaType !== undefined && typeof mediaType !== 'string') throw new Error(`Expected icon 'mediaType' to be a string, found: ${JSON.stringify(mediaType)}`);
const { url, mediaType } = image;
if (typeof url !== 'string')
throw new Error(`Expected icon 'url' to be a string, found: ${JSON.stringify(url)}`);
if (mediaType !== undefined && typeof mediaType !== 'string')
throw new Error(`Expected icon 'mediaType' to be a string, found: ${JSON.stringify(mediaType)}`);
return {

@@ -499,9 +388,474 @@ url,

const username = m ? m[1] : preferredUsername;
if (!username) throw new Error(`Unable to compute username from url: ${url}`);
if (!username)
throw new Error(`Unable to compute username from url: ${url}`);
return `${username}@${u.hostname}`;
}
const LightningCommentsProtocolImplementation = {
async initThreadcap(url, opts) {
const { fetcher, cache } = opts;
const time = new Date().toISOString();
const comments = await findOrFetchLightningComments(url, time, fetcher, cache);
const roots = comments.filter((v) => v.depth === 0).map((v) => computeUrlWithHash(url, `comment-${v.id}`));
return {
protocol: 'lightningcomments',
roots,
nodes: {},
commenters: {}
};
},
async fetchComment(id, opts) {
const { fetcher, cache, updateTime } = opts;
const m = /^#comment-(.*?)$/.exec(new URL(id).hash);
if (m) {
const [_, commentId] = m;
const comments = await findOrFetchLightningComments(computeUrlWithHash(id, ''), updateTime, fetcher, cache);
const comment = comments.find((v) => v.id === commentId);
if (!comment)
throw new Error(`Comment not found: ${commentId}`);
return {
attachments: [],
attributedTo: computeUrlWithHash(id, `commenter-${computeCommenterId(comment.sender)}`),
content: {
und: comment.message
},
published: comment.created
};
}
throw new Error(`fetchComment: unexpected id=${id}`);
},
async fetchCommenter(attributedTo, opts) {
const { fetcher, cache, updateTime } = opts;
const m = /^#commenter-(.*?)$/.exec(new URL(attributedTo).hash);
if (m) {
const [_, commenterId] = m;
const comments = await findOrFetchLightningComments(computeUrlWithHash(attributedTo, ''), updateTime, fetcher, cache);
const commenter = comments.map((v) => v.sender).find((v) => computeCommenterId(v) === commenterId);
if (!commenter)
throw new Error(`Commenter not found: ${commenterId}`);
return {
asof: updateTime,
name: `${commenter.name} from ${commenter.app}`
};
}
throw new Error(`fetchCommenter: unexpected attributedTo=${attributedTo}`);
},
async fetchReplies(id, opts) {
const { fetcher, cache, updateTime } = opts;
const m = /^#comment-(.*?)$/.exec(new URL(id).hash);
if (m) {
const [_, commentId] = m;
const url = computeUrlWithHash(id, '');
const comments = await findOrFetchLightningComments(url, updateTime, fetcher, cache);
const comment = comments.find((v) => v.id === commentId);
if (!comment)
throw new Error(`Comment not found: ${commentId}`);
return comment.children.map((v) => computeUrlWithHash(url, `comment-${v}`));
}
throw new Error(`fetchReplies: unexpected id=${id}`);
}
};
async function findOrFetchLightningComments(url, after, fetcher, cache) {
const obj = await findOrFetchJson(url, after, fetcher, cache, {
accept: 'application/json'
});
if (!isStringRecord(obj) || !isStringRecord(obj.data) || !Array.isArray(obj.data.comments))
throw new Error(`Unable to find obj.data.comments array: ${JSON.stringify(obj)}`);
return obj.data.comments.map((v, i) => {
if (!isValidLightningComment(v))
throw new Error(`Unexpected lightning comment at index ${i}: ${JSON.stringify(v)}`);
return v;
});
}
function computeUrlWithHash(url, hash) {
const u = new URL(url);
u.hash = hash;
return u.toString();
}
function computeCommenterId(sender) {
return sender.id === null ? `null-${sender.name}` : sender.id;
}
function isValidLightningComment(obj) {
return isStringRecord(obj) && typeof obj.id === 'string' && isNonEmpty(obj.id) && typeof obj.message === 'string' && isNonEmpty(obj.message) && (typeof obj.parent === 'string' && isNonEmpty(obj.parent) || obj.parent === null) && Array.isArray(obj.children) && obj.children.every((v) => typeof v === 'string' && isNonEmpty(v)) && typeof obj.depth === 'number' && isNonNegativeInteger(obj.depth) && typeof obj.created === 'string' && isValidIso8601(obj.created) && isValidLightningSender(obj.sender);
}
function isValidLightningSender(obj) {
return isStringRecord(obj) && typeof obj.app === 'string' && isNonEmpty(obj.app) && (obj.id === null || typeof obj.id === 'string' && isNonEmpty(obj.id)) && typeof obj.name === 'string' && isNonEmpty(obj.name);
}
const TwitterProtocolImplementation = {
async initThreadcap(url, opts) {
const { hostname, pathname } = new URL(url);
const m = /^\/.*?\/status\/(\d+)$/.exec(pathname);
if (hostname !== 'twitter.com' || !m)
throw new Error(`Unexpected tweet url: ${url}`);
const [_, id] = m;
const tweetApiUrl = `https://api.twitter.com/2/tweets/${id}`;
const obj = await findOrFetchTwitter(tweetApiUrl, new Date().toISOString(), opts);
if (DEBUG)
console.log(JSON.stringify(obj, undefined, 2));
return {
protocol: 'twitter',
roots: [
tweetApiUrl
],
nodes: {},
commenters: {}
};
},
async fetchComment(id, opts) {
const { updateTime } = opts;
const url = new URL(id);
url.searchParams.set('tweet.fields', 'author_id,lang,created_at');
const obj = await findOrFetchTwitter(url.toString(), updateTime, opts);
if (DEBUG)
console.log(JSON.stringify(obj, undefined, 2));
const tweetId = obj.data.id;
const text = obj.data.text;
const authorId = obj.data.author_id;
const lang = obj.data.lang;
const createdAt = obj.data.created_at;
const content = {};
content[lang] = text;
const tweetUrl = `https://twitter.com/i/web/status/${tweetId}`;
return {
attachments: [],
attributedTo: `https://api.twitter.com/2/users/${authorId}`,
content,
published: createdAt,
url: tweetUrl
};
},
async fetchCommenter(attributedTo, opts) {
const { updateTime } = opts;
const url = new URL(attributedTo);
url.searchParams.set('user.fields', 'url,profile_image_url');
const obj = await findOrFetchTwitter(url.toString(), updateTime, opts);
if (DEBUG)
console.log('fetchCommenter', JSON.stringify(obj, undefined, 2));
const name = obj.data.name;
const fqUsername = '@' + obj.data.username;
const userUrl = `https://twitter.com/${obj.data.username}`;
const iconUrl = obj.data.profile_image_url;
const iconUrlLower = (iconUrl || '').toLowerCase();
const iconMediaType = iconUrlLower.endsWith('.jpg') ? 'image/jpeg' : iconUrlLower.endsWith('.png') ? 'image/png' : undefined;
const icon = iconUrl ? {
url: iconUrl,
mediaType: iconMediaType
} : undefined;
return {
asof: updateTime,
name,
fqUsername,
url: userUrl,
icon
};
},
async fetchReplies(id, opts) {
const m = /^https:\/\/api\.twitter\.com\/2\/tweets\/(.*?)$/.exec(id);
if (!m)
throw new Error(`Unexpected tweet id: ${id}`);
const [_, tweetId] = m;
const convo = await findOrFetchConversation(tweetId, opts);
return Object.values(convo.tweets).filter((v) => v.referenced_tweets.some((w) => w.type === 'replied_to' && w.id === tweetId)).map((v) => `https://api.twitter.com/2/tweets/${v.id}`);
}
};
const DEBUG = false;
async function findOrFetchTwitter(url, after, opts) {
const { fetcher, cache, bearerToken } = opts;
const obj = await findOrFetchJson(url, after, fetcher, cache, {
accept: 'application/json',
authorization: `Bearer ${bearerToken}`
});
return obj;
}
async function findOrFetchConversation(tweetId, opts) {
const { updateTime, state } = opts;
let { conversation } = state;
if (!conversation) {
const conversationId = await findOrFetchConversationId(tweetId, opts);
const url = new URL('https://api.twitter.com/2/tweets/search/recent');
url.searchParams.set('query', `conversation_id:${conversationId}`);
url.searchParams.set('expansions', `referenced_tweets.id`);
url.searchParams.set('tweet.fields', `author_id,lang,created_at`);
const obj = await findOrFetchTwitter(url.toString(), updateTime, opts);
const tweets = {};
for (const tweetObj of obj.data) {
const tweet = tweetObj;
tweets[tweet.id] = tweet;
}
conversation = {
tweets
};
state.conversation = conversation;
}
return conversation;
}
async function findOrFetchConversationId(tweetId, opts) {
const { updateTime, state } = opts;
let { conversationId } = state;
if (typeof conversationId === 'string')
return conversationId;
const url = new URL(`https://api.twitter.com/2/tweets/${tweetId}`);
url.searchParams.set('tweet.fields', 'conversation_id');
const obj = await findOrFetchTwitter(url.toString(), updateTime, opts);
conversationId = obj.data.conversation_id;
if (typeof conversationId !== 'string')
throw new Error(`Unexpected conversationId in payload: ${JSON.stringify(obj, undefined, 2)}`);
state.conversationId = conversationId;
return conversationId;
}
function isValidProtocol(protocol) {
return protocol === 'activitypub' || protocol === 'lightningcomments' || protocol === 'twitter';
}
const MAX_LEVELS = 1000;
async function makeThreadcap(url, opts) {
const { cache, userAgent, protocol, bearerToken } = opts;
const fetcher = makeFetcherWithUserAgent(opts.fetcher, userAgent);
const implementation = computeProtocolImplementation(protocol);
return await implementation.initThreadcap(url, {
fetcher,
cache,
bearerToken
});
}
async function updateThreadcap(threadcap, opts) {
const { userAgent, cache, updateTime, callbacks, maxLevels, maxNodes: maxNodesInput, startNode, keepGoing, bearerToken } = opts;
const fetcher = makeFetcherWithUserAgent(opts.fetcher, userAgent);
const maxLevel = Math.min(Math.max(maxLevels === undefined ? 1000 : Math.round(maxLevels), 0), 1000);
const maxNodes = maxNodesInput === undefined ? undefined : Math.max(Math.round(maxNodesInput), 0);
if (startNode && !threadcap.nodes[startNode])
throw new Error(`Invalid start node: ${startNode}`);
if (maxLevel === 0)
return;
if (maxNodes === 0)
return;
const implementation = computeProtocolImplementation(threadcap.protocol);
const state = {};
const idsBylevel = [
startNode ? [
startNode
] : [
...threadcap.roots
]
];
let remaining = 1;
let processed = 0;
const processLevel = async (level) => {
callbacks === null || callbacks === void 0 ? void 0 : callbacks.onEvent({
kind: 'process-level',
phase: 'before',
level: level + 1
});
const nextLevel = level + 1;
for (const id of idsBylevel[level] || []) {
const processReplies = nextLevel < maxLevel;
const node = await processNode(id, processReplies, threadcap, implementation, {
updateTime,
callbacks,
state,
fetcher,
cache,
bearerToken
});
remaining--;
processed++;
if (maxNodes && processed >= maxNodes)
return;
if (keepGoing && !keepGoing())
return;
if (node.replies && nextLevel < maxLevel) {
if (!idsBylevel[nextLevel])
idsBylevel[nextLevel] = [];
idsBylevel[nextLevel].push(...node.replies);
remaining += node.replies.length;
}
callbacks === null || callbacks === void 0 ? void 0 : callbacks.onEvent({
kind: 'nodes-remaining',
remaining
});
}
callbacks === null || callbacks === void 0 ? void 0 : callbacks.onEvent({
kind: 'process-level',
phase: 'after',
level: level + 1
});
if (idsBylevel[nextLevel])
await processLevel(nextLevel);
};
await processLevel(0);
}
class InMemoryCache {
constructor() {
Object.defineProperty(this, "map", {
enumerable: true,
configurable: true,
writable: true,
value: new Map()
});
Object.defineProperty(this, "onReturningCachedResponse", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
}
get(id, after) {
const { response, fetched } = this.map.get(id) || {};
if (response && fetched && fetched > after) {
if (this.onReturningCachedResponse)
this.onReturningCachedResponse(id, after, fetched, response);
return Promise.resolve(response);
}
return Promise.resolve(undefined);
}
put(id, fetched, response) {
this.map.set(id, {
response,
fetched
});
return Promise.resolve();
}
}
function computeDefaultMillisToWait(input) {
const { remaining, millisTillReset } = input;
if (remaining >= 100)
return 0;
return remaining > 0 ? Math.round(millisTillReset / remaining) : millisTillReset;
}
function makeRateLimitedFetcher(fetcher, opts1 = {}) {
const { callbacks } = opts1;
const computeMillisToWait = opts1.computeMillisToWait || computeDefaultMillisToWait;
const endpointLimits = new Map();
return async (url, opts) => {
const { hostname, pathname } = new URL(url);
const twitterEndpoint = computeTwitterEndpoint(hostname, pathname);
const endpoint = twitterEndpoint || hostname;
const limits = endpointLimits.get(endpoint);
if (limits) {
const { limit, remaining, reset } = limits;
const millisTillReset = new Date(reset).getTime() - Date.now();
const millisToWait = computeMillisToWait({
endpoint,
limit,
remaining,
reset,
millisTillReset
});
if (millisToWait > 0) {
callbacks === null || callbacks === void 0 ? void 0 : callbacks.onEvent({
kind: 'waiting-for-rate-limit',
endpoint,
millisToWait,
millisTillReset,
limit,
remaining,
reset
});
await sleep(millisToWait);
}
}
const res = await fetcher(url, opts);
const limitHeader = twitterEndpoint ? 'x-rate-limit-limit' : 'x-ratelimit-limit';
const remainingHeader = twitterEndpoint ? 'x-rate-limit-remaining' : 'x-ratelimit-remaining';
const resetHeader = twitterEndpoint ? 'x-rate-limit-reset' : 'x-ratelimit-reset';
const limit = tryParseInt(res.headers.get(limitHeader) || '');
const remaining = tryParseInt(res.headers.get(remainingHeader) || '');
const resetStr = res.headers.get(resetHeader) || '';
const reset = twitterEndpoint ? tryParseEpochSecondsAsIso8601(resetStr) : tryParseIso8601(resetStr);
if (limit !== undefined && remaining !== undefined && reset !== undefined) {
endpointLimits.set(endpoint, {
limit,
remaining,
reset
});
}
return res;
};
}
function computeTwitterEndpoint(hostname, pathname) {
if (hostname === 'api.twitter.com') {
return pathname.replaceAll(/\d{4,}/g, ':id');
}
}
function makeFetcherWithUserAgent(fetcher, userAgent) {
userAgent = userAgent.trim();
if (userAgent.length === 0)
throw new Error(`Expected non-blank user-agent`);
return async (url, opts) => {
const headers = {
...(opts === null || opts === void 0 ? void 0 : opts.headers) || {},
'user-agent': userAgent
};
return await fetcher(url, {
headers
});
};
}
function computeProtocolImplementation(protocol) {
if (protocol === undefined || protocol === 'activitypub')
return ActivityPubProtocolImplementation;
if (protocol === 'lightningcomments')
return LightningCommentsProtocolImplementation;
if (protocol === 'twitter')
return TwitterProtocolImplementation;
throw new Error(`Unsupported protocol: ${protocol}`);
}
async function processNode(id, processReplies, threadcap, implementation, opts) {
const { updateTime, callbacks } = opts;
let node = threadcap.nodes[id];
if (!node) {
node = {};
threadcap.nodes[id] = node;
}
const updateComment = !node.commentAsof || node.commentAsof < updateTime;
if (updateComment) {
try {
node.comment = await implementation.fetchComment(id, opts);
const { attributedTo } = node.comment;
const existingCommenter = threadcap.commenters[attributedTo];
if (!existingCommenter || existingCommenter.asof < updateTime) {
threadcap.commenters[attributedTo] = await implementation.fetchCommenter(attributedTo, opts);
}
node.commentError = undefined;
}
catch (e) {
node.comment = undefined;
node.commentError = `${e.stack || e}`;
}
node.commentAsof = updateTime;
}
callbacks === null || callbacks === void 0 ? void 0 : callbacks.onEvent({
kind: 'node-processed',
nodeId: id,
part: 'comment',
updated: updateComment
});
if (processReplies) {
const updateReplies = !node.repliesAsof || node.repliesAsof < updateTime;
if (updateReplies) {
try {
node.replies = await implementation.fetchReplies(id, opts);
node.repliesError = undefined;
}
catch (e) {
node.replies = undefined;
node.repliesError = `${e.stack || e}`;
}
node.repliesAsof = updateTime;
}
callbacks === null || callbacks === void 0 ? void 0 : callbacks.onEvent({
kind: 'node-processed',
nodeId: id,
part: 'replies',
updated: updateReplies
});
}
return node;
}
function sleep(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
function tryParseInt(value) {
try {
return parseInt(value);
} catch {
}
catch {
return undefined;

@@ -513,6 +867,7 @@ }

}
function sleep(ms) {
return new Promise((resolve)=>setTimeout(resolve, ms)
);
function tryParseEpochSecondsAsIso8601(value) {
const seconds = tryParseInt(value);
return seconds && seconds > 0 ? new Date(seconds * 1000).toISOString() : undefined;
}
export { isValidProtocol as isValidProtocol };
export { MAX_LEVELS as MAX_LEVELS };

@@ -519,0 +874,0 @@ export { makeThreadcap as makeThreadcap };

@@ -12,7 +12,7 @@ /**

/**
* ActivityPub id of the root object url.
* One or more id urls of the root-level nodes.
*
* Use this to lookup the corresponding root {@link Node} when starting to recurse down a reply tree.
* Use these to lookup the corresponding root {@link Node} when starting to recurse down a reply tree.
*/
readonly root: string;
readonly roots: readonly string[];
/**

@@ -32,5 +32,14 @@ * Comment data nodes captured so far, keyed by ActivityPub id.

readonly commenters: Record<string, Commenter>;
/**
* Underlying protocol used to capture the thread.
*
* Supported protocols: activitypub (default), lightningcomments, twitter
*/
readonly protocol?: Protocol;
}
/** An ISO-8601 date at GMT, including optional milliseconds, e.g. `1970-01-01T00:00:00Z` or `1970-01-01T00:00:00.123Z` */
export declare type Instant = string;
/** Supported protocols for capturing comment threads: activitypub, lightningcomments, twitter */
export declare type Protocol = 'activitypub' | 'lightningcomments' | 'twitter';
export declare function isValidProtocol(protocol: string): protocol is Protocol;
/**

@@ -116,5 +125,5 @@ * Snapshot of a single comment inside of a larger {@link Threadcap}.

/** Web link to the commenter profile. */
readonly url: string;
readonly url?: string;
/** Fully-qualified fediverse username, e.g. `@user@example.com` */
readonly fqUsername: string;
readonly fqUsername?: string;
/** Time this information was last fetched */

@@ -171,3 +180,3 @@ readonly asof: Instant;

export declare type RateLimiterInput = {
hostname: string;
endpoint: string;
limit: number;

@@ -222,3 +231,3 @@ remaining: number;

readonly kind: 'waiting-for-rate-limit';
readonly hostname: string;
readonly endpoint: string;
readonly millisToWait: number;

@@ -244,2 +253,4 @@ readonly millisTillReset: number;

cache: Cache;
protocol?: Protocol;
bearerToken?: string;
}): Promise<Threadcap>;

@@ -271,2 +282,3 @@ /**

callbacks?: Callbacks;
bearerToken?: string;
}): Promise<void>;

@@ -273,0 +285,0 @@ /** Simple implementation of {@link Cache} that keeps everything around in memory. */

{
"name": "threadcap",
"version": "0.1.5",
"version": "0.1.6",
"description": "Threadcap helps you take and update snapshots of a public ActivityPub comment thread, given a root post url.",

@@ -5,0 +5,0 @@ "repository": {

@@ -109,3 +109,3 @@ Threadcap helps you take and update snapshots of a public ActivityPub comment thread, given a root post url.

```ts
import { makeThreadcap, InMemoryCache, updateThreadcap, makeRateLimitedFetcher, Callbacks } from 'https://raw.githubusercontent.com/skymethod/minipub/v0.1.5/src/threadcap/threadcap.ts';
import { makeThreadcap, InMemoryCache, updateThreadcap, makeRateLimitedFetcher, Callbacks } from 'https://raw.githubusercontent.com/skymethod/minipub/v0.1.6/src/threadcap/threadcap.ts';

@@ -133,1 +133,253 @@ const userAgent = 'my-podcast-app/1.0';

```
## Example threadcap JSON structure
Typescript type is fully documented in [threadcap.ts](https://github.com/skymethod/minipub/blob/master/src/threadcap/threadcap.ts#L4) to appear in IDEs, but here is a realistic example output JSON
with the same documentation:
```jsonc
// threadcap: Snapshot of an ActivityPub thread tree, starting at a given root object url.
// Serializable json object that can be saved, then reloaded to resume or update.
// Create a new threadcap using the 'makeThreadcap' function.
// Update an existing threadcap using the 'updateThreadcap' function.
{
// One or more id urls of the root-level nodes.
// Use these to lookup the corresponding root node when starting to recurse down a reply tree.
"roots": [ "https://example.social/users/alice/statuses/107939417586098696" ],
// Comment data nodes captured so far, keyed by ActivityPub id.
// Each Node has information on any comment content or error found, and pointers to its direct replies or error found.
"nodes": {
"https://example.social/users/alice/statuses/107939417586098696": {
// Inline comment info, enough to render the comment itself (no replies).
"comment": {
// Public web link to this comment, if available.
"url": "https://example.social/@alice/107939417586098696",
// Time this comment was published.
// Value comes directly from the ActivityPub payload, which is usually ISO-8601.
"published": "2022-03-11T18:53:24Z",
// Media attachments included in this comment, if any.
"attachments": [
{
// Mime type of the attachment.
"mediaType": "image/jpeg",
// Width of the attachment image or video, if applicable.
"width": 1024,
// Height of the attachment image or video, if applicable.
"height": 1024,
// Source url to the attachment image or video.
"url": "https://example.social/content/media_attachments/files/107/939/417/463/353/610/original/efbc7e05930e670a.jpeg"
}
],
// Content (which may include html) for this comment, broken out by language code.
// ActivityPub technically supports multiple translations of a single post, though most servers will capture only one in their user interface.
// A language code of `und` indicates the server did not specify a language.
"content": {
"en": "<p>Comment! 🎉🍻🎙</p>"
},
// ActivityPub id to the Person (or Service) actor that is responsible for this comment.
// Look up the full Commenter info using 'commenters', with this value as the index.
"attributedTo": "https://example.social/users/alice"
},
// Time when the comment info or error was updated.
"commentAsof": "2022-03-12T16:52:03.948Z",
// ActivityPub ids of the direct child replies, once found completely.
// Use these to lookup the corresponding nodes when recursing down a reply tree.
// An empty array indicates no child replies were found, `undefined` means they have yet to be fetched, or failed to fetch.
"replies": [
"https://example.social/users/Bob/statuses/107939427682302143",
"https://example.social/users/Carlos/statuses/107939930600043817",
"https://example.social/users/Dan/statuses/107939988318438193"
],
// Time when the replies info or error was updated.
"repliesAsof": "2022-03-12T16:52:03.948Z"
},
"https://example.social/users/Bob/statuses/107939427682302143": {
"comment": {
"url": "https://example.social/@Bob/107939427682302143",
"published": "2022-03-11T18:55:58Z",
"attachments": [],
"content": {
"en": "<p>Comment!</p>"
},
"attributedTo": "https://example.social/users/Bob"
},
"commentAsof": "2022-03-12T16:52:03.948Z",
// Error encountered when trying to fetch and parse the direct child replies.
// Either 'replies' or 'repliesError' will be defined, never both.
"repliesError": "Failed to enumerate replies!",
"repliesAsof": "2022-03-12T16:52:03.948Z"
},
"https://example.social/users/Carlos/statuses/107939930600043817": {
"comment": {
"url": "https://example.social/@Carlos/107939930600043817",
"published": "2022-03-11T21:03:52Z",
"attachments": [],
"content": {
"en": "<p>Comment!</p>"
},
"attributedTo": "https://example.social/users/Carlos"
},
"commentAsof": "2022-03-12T16:52:03.948Z",
"replies": [
"https://example.social/users/alice/statuses/107940172190413796"
],
"repliesAsof": "2022-03-12T16:52:03.948Z"
},
"https://example.social/users/Dan/statuses/107939988318438193": {
"comment": {
"url": "https://example.social/@Dan/107939988318438193",
"published": "2022-03-11T21:18:33Z",
"attachments": [],
"content": {
"en": "<p>Comment!</p>"
},
"attributedTo": "https://example.social/users/Dan"
},
"commentAsof": "2022-03-12T16:52:03.948Z",
"replies": [
"https://example.social/users/alice/statuses/107940180378482688"
],
"repliesAsof": "2022-03-12T16:52:03.948Z"
},
"https://example.social/users/alice/statuses/107940172190413796": {
"comment": {
"url": "https://example.social/@alice/107940172190413796",
"published": "2022-03-11T22:05:18Z",
"attachments": [],
"content": {
"en": "<p>Comment!</p>"
},
"attributedTo": "https://example.social/users/alice"
},
"commentAsof": "2022-03-12T16:52:03.948Z",
"replies": [
"https://example.social/users/Carlos/statuses/107940214277865378"
],
"repliesAsof": "2022-03-12T16:52:03.948Z"
},
"https://example.social/users/alice/statuses/107940180378482688": {
// Error encountered when trying to fetch and parse this comment info.
// Either 'comment' or 'commentError' will be defined, never both.
"commentError": "Failed to fetch!",
"commentAsof": "2022-03-12T16:52:03.948Z",
"replies": [],
"repliesAsof": "2022-03-12T16:52:03.948Z"
},
"https://example.social/users/Carlos/statuses/107940214277865378": {
"comment": {
"url": "https://example.social/@Carlos/107940214277865378",
"published": "2022-03-11T22:16:01Z",
"attachments": [],
"content": {
"en": "<p>Comment!</p>"
},
"attributedTo": "https://example.social/users/Carlos"
},
"commentAsof": "2022-03-12T16:52:03.948Z",
"replies": [
"https://anotherexample.space/objects/87b74fb9-913d-4a9b-9444-72e2a87ec540"
],
"repliesAsof": "2022-03-12T16:52:03.948Z"
},
"https://anotherexample.space/objects/87b74fb9-913d-4a9b-9444-72e2a87ec540": {
"comment": {
"url": "https://anotherexample.space/objects/87b74fb9-913d-4a9b-9444-72e2a87ec540",
"published": "2022-03-11T23:51:36.011246Z",
"attachments": [],
"content": {
"und": "<p>Comment!</p>"
},
"attributedTo": "https://anotherexample.space/users/eve"
},
"commentAsof": "2022-03-12T16:52:03.948Z",
"replies": [],
"repliesAsof": "2022-03-12T16:52:03.948Z"
}
},
// Information about each Commenter captured so far, keyed by ActivityPub id (e.g the Comment 'attributedTo')
// Kept here, outside of 'nodes', to minimize data duplication if a reply tree has multiple comments from the same commenter.
// In general, you can assume that all Comment 'attributedTo' references inside 'nodes' have corresponding referents here.
"commenters": {
"https://example.social/users/alice": {
// Profile icon for the commenter, if any
"icon": {
// Source url to the icon image.
"url": "https://example.social/content/accounts/avatars/000/000/269/original/4870123c3ae92a44.jpg",
// Mime type of the icon image, if known.
"mediaType": "image/jpeg"
},
// Display name of the commenter.
"name": "Alice Doe",
// Web link to the commenter profile.
"url": "https://example.social/@alice",
// Fully-qualified fediverse username, e.g. @user@example.com
"fqUsername": "@alice@example.social",
// Time this information was last fetched
"asof": "2022-03-12T16:52:03.948Z"
},
"https://example.social/users/Bob": {
"icon": {
"url": "https://example.social/content/accounts/avatars/106/557/219/416/316/803/original/c65012321a9d4807.png",
"mediaType": "image/png"
},
"name": "Bob Doe",
"url": "https://example.social/@Bob",
"fqUsername": "@Bob@example.social",
"asof": "2022-03-12T16:52:03.948Z"
},
"https://example.social/users/Carlos": {
"icon": {
"url": "https://example.social/content/accounts/avatars/106/533/207/812/918/186/original/fa83123dbc94380b.png",
"mediaType": "image/png"
},
"name": "Carlos Doe",
"url": "https://example.social/@Carlos",
"fqUsername": "@Carlos@example.social",
"asof": "2022-03-12T16:52:03.948Z"
},
"https://example.social/users/Dan": {
"icon": {
"url": "https://example.social/content/accounts/avatars/000/015/466/original/1dcbcd12319f90a7.png",
"mediaType": "image/png"
},
"name": "Dan Doe",
"url": "https://example.social/@Dan",
"fqUsername": "@Dan@example.social",
"asof": "2022-03-12T16:52:03.948Z"
},
"https://anotherexample.space/users/eve": {
"icon": {
"url": "https://anotherexample.space/media/ef0e3ca3a78c9cb2912338d3c476344b90358f497b3543ca1fe9c785b4ccdf62.jpg?name=blob.jpg"
},
"name": "Eve Doe",
"url": "https://anotherexample.space/users/eve",
"fqUsername": "eve@anotherexample.space",
"asof": "2022-03-12T16:52:03.948Z"
}
}
}
```
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc