mirror of
https://gitlab.com/soapbox-pub/ditto.git
synced 2025-12-06 11:29:46 +00:00
Merge branch 'negative-search' into 'main'
Upgrade Nostrify to support negative search queries, remove getIdsBySearch function See merge request soapbox-pub/ditto!650
This commit is contained in:
commit
7e459ac7d4
6 changed files with 21 additions and 185 deletions
|
|
@ -48,7 +48,7 @@
|
|||
"@isaacs/ttlcache": "npm:@isaacs/ttlcache@^1.4.1",
|
||||
"@negrel/webpush": "jsr:@negrel/webpush@^0.3.0",
|
||||
"@noble/secp256k1": "npm:@noble/secp256k1@^2.0.0",
|
||||
"@nostrify/db": "jsr:@nostrify/db@^0.38.0",
|
||||
"@nostrify/db": "jsr:@nostrify/db@^0.39.0",
|
||||
"@nostrify/nostrify": "jsr:@nostrify/nostrify@^0.38.1",
|
||||
"@nostrify/policies": "jsr:@nostrify/policies@^0.36.1",
|
||||
"@nostrify/types": "jsr:@nostrify/types@^0.36.0",
|
||||
|
|
|
|||
12
deno.lock
generated
12
deno.lock
generated
|
|
@ -31,7 +31,6 @@
|
|||
"jsr:@hono/hono@^4.4.6": "4.6.15",
|
||||
"jsr:@negrel/http-ece@0.6.0": "0.6.0",
|
||||
"jsr:@negrel/webpush@0.3": "0.3.0",
|
||||
"jsr:@nostrify/db@0.38": "0.38.0",
|
||||
"jsr:@nostrify/nostrify@0.31": "0.31.0",
|
||||
"jsr:@nostrify/nostrify@0.32": "0.32.0",
|
||||
"jsr:@nostrify/nostrify@0.36": "0.36.2",
|
||||
|
|
@ -357,15 +356,6 @@
|
|||
"jsr:@std/path@0.224.0"
|
||||
]
|
||||
},
|
||||
"@nostrify/db@0.38.0": {
|
||||
"integrity": "44118756b95f747779839f0e578a5e1dbca164ec44edb8885bd1c99840775e8a",
|
||||
"dependencies": [
|
||||
"jsr:@nostrify/nostrify@~0.38.1",
|
||||
"jsr:@nostrify/types@0.36",
|
||||
"npm:kysely@~0.27.3",
|
||||
"npm:nostr-tools@^2.10.4"
|
||||
]
|
||||
},
|
||||
"@nostrify/nostrify@0.22.4": {
|
||||
"integrity": "1c8a7847e5773213044b491e85fd7cafae2ad194ce59da4d957d2b27c776b42d",
|
||||
"dependencies": [
|
||||
|
|
@ -2372,7 +2362,7 @@
|
|||
"jsr:@gfx/canvas-wasm@~0.4.2",
|
||||
"jsr:@hono/hono@^4.4.6",
|
||||
"jsr:@negrel/webpush@0.3",
|
||||
"jsr:@nostrify/db@0.38",
|
||||
"jsr:@nostrify/db@0.39",
|
||||
"jsr:@nostrify/nostrify@~0.38.1",
|
||||
"jsr:@nostrify/policies@~0.36.1",
|
||||
"jsr:@nostrify/types@0.36",
|
||||
|
|
|
|||
|
|
@ -11,7 +11,8 @@ import { nip05Cache } from '@/utils/nip05.ts';
|
|||
import { accountFromPubkey, renderAccount } from '@/views/mastodon/accounts.ts';
|
||||
import { renderStatus } from '@/views/mastodon/statuses.ts';
|
||||
import { getFollowedPubkeys } from '@/queries.ts';
|
||||
import { getIdsBySearch, getPubkeysBySearch } from '@/utils/search.ts';
|
||||
import { getPubkeysBySearch } from '@/utils/search.ts';
|
||||
import { paginated } from '@/utils/api.ts';
|
||||
|
||||
const searchQuerySchema = z.object({
|
||||
q: z.string().transform(decodeURIComponent),
|
||||
|
|
@ -19,14 +20,14 @@ const searchQuerySchema = z.object({
|
|||
resolve: booleanParamSchema.optional().transform(Boolean),
|
||||
following: z.boolean().default(false),
|
||||
account_id: n.id().optional(),
|
||||
limit: z.coerce.number().catch(20).transform((value) => Math.min(Math.max(value, 0), 40)),
|
||||
offset: z.coerce.number().nonnegative().catch(0),
|
||||
});
|
||||
|
||||
type SearchQuery = z.infer<typeof searchQuerySchema>;
|
||||
type SearchQuery = z.infer<typeof searchQuerySchema> & { since?: number; until?: number; limit: number };
|
||||
|
||||
const searchController: AppController = async (c) => {
|
||||
const result = searchQuerySchema.safeParse(c.req.query());
|
||||
const params = c.get('pagination');
|
||||
const { signal } = c.req.raw;
|
||||
const viewerPubkey = await c.get('signer')?.getPublicKey();
|
||||
|
||||
|
|
@ -34,14 +35,14 @@ const searchController: AppController = async (c) => {
|
|||
return c.json({ error: 'Bad request', schema: result.error }, 422);
|
||||
}
|
||||
|
||||
const event = await lookupEvent(result.data, signal);
|
||||
const event = await lookupEvent({ ...result.data, ...params }, signal);
|
||||
const lookup = extractIdentifier(result.data.q);
|
||||
|
||||
// Render account from pubkey.
|
||||
if (!event && lookup) {
|
||||
const pubkey = await lookupPubkey(lookup);
|
||||
return c.json({
|
||||
accounts: pubkey ? [await accountFromPubkey(pubkey)] : [],
|
||||
accounts: pubkey ? [accountFromPubkey(pubkey)] : [],
|
||||
statuses: [],
|
||||
hashtags: [],
|
||||
});
|
||||
|
|
@ -52,7 +53,8 @@ const searchController: AppController = async (c) => {
|
|||
if (event) {
|
||||
events = [event];
|
||||
}
|
||||
events.push(...(await searchEvents({ ...result.data, viewerPubkey }, signal)));
|
||||
|
||||
events.push(...(await searchEvents({ ...result.data, ...params, viewerPubkey }, signal)));
|
||||
|
||||
const [accounts, statuses] = await Promise.all([
|
||||
Promise.all(
|
||||
|
|
@ -69,16 +71,18 @@ const searchController: AppController = async (c) => {
|
|||
),
|
||||
]);
|
||||
|
||||
return c.json({
|
||||
const body = {
|
||||
accounts,
|
||||
statuses,
|
||||
hashtags: [],
|
||||
});
|
||||
};
|
||||
|
||||
return paginated(c, events, body);
|
||||
};
|
||||
|
||||
/** Get events for the search params. */
|
||||
async function searchEvents(
|
||||
{ q, type, limit, offset, account_id, viewerPubkey }: SearchQuery & { viewerPubkey?: string },
|
||||
{ q, type, since, until, limit, offset, account_id, viewerPubkey }: SearchQuery & { viewerPubkey?: string },
|
||||
signal: AbortSignal,
|
||||
): Promise<NostrEvent[]> {
|
||||
// Hashtag search is not supported.
|
||||
|
|
@ -91,6 +95,8 @@ async function searchEvents(
|
|||
const filter: NostrFilter = {
|
||||
kinds: typeToKinds(type),
|
||||
search: q,
|
||||
since,
|
||||
until,
|
||||
limit,
|
||||
};
|
||||
|
||||
|
|
@ -105,13 +111,6 @@ async function searchEvents(
|
|||
filter.search = undefined;
|
||||
}
|
||||
|
||||
// For status search, use a specific query so it supports offset and is open to customizations.
|
||||
if (type === 'statuses') {
|
||||
const ids = await getIdsBySearch(kysely, { q, limit, offset });
|
||||
filter.ids = [...ids];
|
||||
filter.search = undefined;
|
||||
}
|
||||
|
||||
// Results should only be shown from one author.
|
||||
if (account_id) {
|
||||
filter.authors = [account_id];
|
||||
|
|
|
|||
|
|
@ -207,12 +207,10 @@ function buildLinkHeader(url: string, events: NostrEvent[]): string | undefined
|
|||
return `<${next}>; rel="next", <${prev}>; rel="prev"`;
|
||||
}
|
||||
|
||||
// deno-lint-ignore ban-types
|
||||
type Entity = {};
|
||||
type HeaderRecord = Record<string, string | string[]>;
|
||||
|
||||
/** Return results with pagination headers. Assumes chronological sorting of events. */
|
||||
function paginated(c: AppContext, events: NostrEvent[], entities: (Entity | undefined)[], headers: HeaderRecord = {}) {
|
||||
function paginated(c: AppContext, events: NostrEvent[], body: object | unknown[], headers: HeaderRecord = {}) {
|
||||
const link = buildLinkHeader(c.req.url, events);
|
||||
|
||||
if (link) {
|
||||
|
|
@ -220,7 +218,7 @@ function paginated(c: AppContext, events: NostrEvent[], entities: (Entity | unde
|
|||
}
|
||||
|
||||
// Filter out undefined entities.
|
||||
const results = entities.filter((entity): entity is Entity => Boolean(entity));
|
||||
const results = Array.isArray(body) ? body.filter(Boolean) : body;
|
||||
return c.json(results, 200, headers);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import { assertEquals } from '@std/assert';
|
||||
|
||||
import { createTestDB, genEvent } from '@/test.ts';
|
||||
import { getIdsBySearch, getPubkeysBySearch } from '@/utils/search.ts';
|
||||
import { createTestDB } from '@/test.ts';
|
||||
import { getPubkeysBySearch } from '@/utils/search.ts';
|
||||
|
||||
Deno.test('fuzzy search works', async () => {
|
||||
await using db = await createTestDB();
|
||||
|
|
@ -48,47 +48,3 @@ Deno.test('fuzzy search works with offset', async () => {
|
|||
new Set(),
|
||||
);
|
||||
});
|
||||
|
||||
Deno.test('Searching for posts work', async () => {
|
||||
await using db = await createTestDB();
|
||||
|
||||
const event = genEvent({ content: "I'm not an orphan. Death is my importance", kind: 1 });
|
||||
await db.store.event(event);
|
||||
await db.kysely.updateTable('nostr_events').set('search_ext', { language: 'en' }).where('id', '=', event.id)
|
||||
.execute();
|
||||
|
||||
const event2 = genEvent({ content: 'The more I explore is the more I fall in love with the music I make.', kind: 1 });
|
||||
await db.store.event(event2);
|
||||
await db.kysely.updateTable('nostr_events').set('search_ext', { language: 'en' }).where('id', '=', event2.id)
|
||||
.execute();
|
||||
|
||||
assertEquals(
|
||||
await getIdsBySearch(db.kysely, { q: 'Death is my importance', limit: 1, offset: 0 }), // ordered words
|
||||
new Set([event.id]),
|
||||
);
|
||||
|
||||
assertEquals(
|
||||
await getIdsBySearch(db.kysely, { q: 'make I music', limit: 1, offset: 0 }), // reversed words
|
||||
new Set([event2.id]),
|
||||
);
|
||||
|
||||
assertEquals(
|
||||
await getIdsBySearch(db.kysely, { q: 'language:en make I music', limit: 10, offset: 0 }), // reversed words, english
|
||||
new Set([event2.id]),
|
||||
);
|
||||
|
||||
assertEquals(
|
||||
await getIdsBySearch(db.kysely, { q: 'language:en an orphan', limit: 10, offset: 0 }), // all posts in english plus search
|
||||
new Set([event.id]),
|
||||
);
|
||||
|
||||
assertEquals(
|
||||
await getIdsBySearch(db.kysely, { q: 'language:en', limit: 10, offset: 0 }), // all posts in english
|
||||
new Set([event.id, event2.id]),
|
||||
);
|
||||
|
||||
assertEquals(
|
||||
await getIdsBySearch(db.kysely, { q: '', limit: 10, offset: 0 }),
|
||||
new Set(),
|
||||
);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import { Kysely, sql } from 'kysely';
|
||||
|
||||
import { DittoTables } from '@/db/DittoTables.ts';
|
||||
import { NIP50 } from '@nostrify/nostrify';
|
||||
|
||||
/** Get pubkeys whose name and NIP-05 is similar to 'q' */
|
||||
export async function getPubkeysBySearch(
|
||||
|
|
@ -33,109 +32,3 @@ export async function getPubkeysBySearch(
|
|||
|
||||
return new Set(Array.from(followingPubkeys.union(pubkeys)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get kind 1 ids whose content matches `q`.
|
||||
* It supports NIP-50 extensions.
|
||||
*/
|
||||
export async function getIdsBySearch(
|
||||
kysely: Kysely<DittoTables>,
|
||||
opts: { q: string; limit: number; offset: number },
|
||||
): Promise<Set<string>> {
|
||||
const { q, limit, offset } = opts;
|
||||
|
||||
const [lexemes] = (await sql<{ phraseto_tsquery: 'string' }>`SELECT phraseto_tsquery(${q})`.execute(kysely)).rows;
|
||||
|
||||
// if it's just stop words, don't bother making a request to the database
|
||||
if (!lexemes.phraseto_tsquery) {
|
||||
return new Set();
|
||||
}
|
||||
|
||||
const tokens = NIP50.parseInput(q);
|
||||
|
||||
const ext: Record<string, string[]> = {};
|
||||
const txt = tokens.filter((token) => typeof token === 'string').join(' ');
|
||||
|
||||
let query = kysely
|
||||
.selectFrom('nostr_events')
|
||||
.select('id')
|
||||
.where('kind', '=', 1)
|
||||
.orderBy(['created_at desc'])
|
||||
.limit(limit)
|
||||
.offset(offset);
|
||||
|
||||
const domains = new Set<string>();
|
||||
|
||||
for (const token of tokens) {
|
||||
if (typeof token === 'object' && token.key === 'domain') {
|
||||
domains.add(token.value);
|
||||
}
|
||||
}
|
||||
|
||||
for (const token of tokens) {
|
||||
if (typeof token === 'object') {
|
||||
ext[token.key] ??= [];
|
||||
ext[token.key].push(token.value);
|
||||
}
|
||||
}
|
||||
|
||||
for (let [key, values] of Object.entries(ext)) {
|
||||
if (key === 'domain' || key === '-domain') continue;
|
||||
|
||||
let negated = false;
|
||||
|
||||
if (key.startsWith('-')) {
|
||||
key = key.slice(1);
|
||||
negated = true;
|
||||
}
|
||||
|
||||
query = query.where((eb) => {
|
||||
if (negated) {
|
||||
return eb.and(
|
||||
values.map((value) => eb.not(eb('nostr_events.search_ext', '@>', { [key]: value }))),
|
||||
);
|
||||
} else {
|
||||
return eb.or(
|
||||
values.map((value) => eb('nostr_events.search_ext', '@>', { [key]: value })),
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (domains.size) {
|
||||
const pubkeys = (await kysely
|
||||
.selectFrom('pubkey_domains')
|
||||
.select('pubkey')
|
||||
.where('domain', 'in', [...domains])
|
||||
.execute()).map(({ pubkey }) => pubkey);
|
||||
|
||||
query = query.where('pubkey', 'in', pubkeys);
|
||||
}
|
||||
|
||||
// If there is not a specific content to search, return the query already
|
||||
// This is useful if the person only makes a query search such as `domain:patrickdosreis.com`
|
||||
if (!txt.length) {
|
||||
const ids = new Set((await query.execute()).map(({ id }) => id));
|
||||
return ids;
|
||||
}
|
||||
|
||||
let fallbackQuery = query;
|
||||
if (txt) {
|
||||
query = query.where('search', '@@', sql`phraseto_tsquery(${txt})`);
|
||||
}
|
||||
|
||||
const ids = new Set((await query.execute()).map(({ id }) => id));
|
||||
|
||||
// If there is no ids, fallback to `plainto_tsquery`
|
||||
if (!ids.size) {
|
||||
fallbackQuery = fallbackQuery.where(
|
||||
'search',
|
||||
'@@',
|
||||
sql`plainto_tsquery(${txt})`,
|
||||
);
|
||||
const ids = new Set((await fallbackQuery.execute()).map(({ id }) => id));
|
||||
return ids;
|
||||
}
|
||||
|
||||
return ids;
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue