Remove hashes table, add Weasyl lookups (#7)

This commit is contained in:
Syfaro 2021-04-19 17:22:40 -04:00 committed by GitHub
parent bc278441b6
commit bd1238b34a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 340 additions and 199 deletions

View File

@ -26,8 +26,6 @@ impl std::str::FromStr for Rating {
/// A general type for every result in a search. /// A general type for every result in a search.
#[derive(Clone, Debug, Default, Deserialize, Serialize)] #[derive(Clone, Debug, Default, Deserialize, Serialize)]
pub struct SearchResult { pub struct SearchResult {
pub id: i32,
pub site_id: i64, pub site_id: i64,
pub site_id_str: String, pub site_id_str: String,
@ -60,4 +58,5 @@ pub enum SiteInfo {
sources: Option<Vec<String>>, sources: Option<Vec<String>>,
}, },
Twitter, Twitter,
Weasyl,
} }

View File

@ -23,81 +23,25 @@
] ]
} }
}, },
"1bd0057782de5a3b41f90081a31d24d14bb70299391050c3404742a6d2915d9e": { "1fc936eeccf8260ca00d7758efdbbb74bd58900107d9dc05f881555f068238c6": {
"query": "SELECT\n hashes.id,\n hashes.hash,\n hashes.furaffinity_id,\n hashes.e621_id,\n hashes.twitter_id,\n CASE\n WHEN furaffinity_id IS NOT NULL THEN (f.url)\n WHEN e621_id IS NOT NULL THEN (e.data->'file'->>'url')\n WHEN twitter_id IS NOT NULL THEN (tm.url)\n END url,\n CASE\n WHEN furaffinity_id IS NOT NULL THEN (f.filename)\n WHEN e621_id IS NOT NULL THEN ((e.data->'file'->>'md5') || '.' || (e.data->'file'->>'ext'))\n WHEN twitter_id IS NOT NULL THEN (SELECT split_part(split_part(tm.url, '/', 5), ':', 1))\n END filename,\n CASE\n WHEN furaffinity_id IS NOT NULL THEN (ARRAY(SELECT f.name))\n WHEN e621_id IS NOT NULL THEN ARRAY(SELECT jsonb_array_elements_text(e.data->'tags'->'artist'))\n WHEN twitter_id IS NOT NULL THEN ARRAY(SELECT tw.data->'user'->>'screen_name')\n END artists,\n CASE\n WHEN furaffinity_id IS NOT NULL THEN (f.file_id)\n END file_id,\n CASE\n WHEN e621_id IS NOT NULL THEN ARRAY(SELECT jsonb_array_elements_text(e.data->'sources'))\n END sources,\n CASE\n WHEN furaffinity_id IS NOT NULL THEN (f.rating)\n WHEN e621_id IS NOT NULL THEN (e.data->>'rating')\n WHEN twitter_id IS NOT NULL THEN\n CASE\n WHEN (tw.data->'possibly_sensitive')::boolean IS true THEN 'adult'\n WHEN (tw.data->'possibly_sensitive')::boolean IS false THEN 'general'\n END\n END rating\n FROM\n hashes\n LEFT JOIN LATERAL (\n SELECT *\n FROM submission\n JOIN artist ON submission.artist_id = artist.id\n WHERE submission.id = hashes.furaffinity_id\n ) f ON hashes.furaffinity_id IS NOT NULL\n LEFT JOIN LATERAL (\n SELECT *\n FROM e621\n WHERE e621.id = hashes.e621_id\n ) e ON hashes.e621_id IS NOT NULL\n LEFT JOIN LATERAL (\n SELECT *\n FROM tweet\n WHERE tweet.id = hashes.twitter_id\n ) tw ON hashes.twitter_id IS NOT NULL\n LEFT JOIN LATERAL (\n SELECT *\n FROM tweet_media\n WHERE\n tweet_media.tweet_id = hashes.twitter_id AND\n tweet_media.hash <@ (hashes.hash, 0)\n LIMIT 1\n ) tm ON hashes.twitter_id IS NOT NULL\n WHERE hashes.id = $1", "query": "SELECT id, hash_int hash FROM submission WHERE hash_int IS NOT NULL\n UNION ALL\n SELECT id, hash FROM e621 WHERE hash IS NOT NULL\n UNION ALL\n SELECT tweet_id, hash FROM tweet_media WHERE hash IS NOT NULL\n UNION ALL\n SELECT id, hash FROM weasyl WHERE hash IS NOT NULL",
"describe": { "describe": {
"columns": [ "columns": [
{ {
"ordinal": 0, "ordinal": 0,
"name": "id", "name": "id",
"type_info": "Int4" "type_info": "Int8"
}, },
{ {
"ordinal": 1, "ordinal": 1,
"name": "hash", "name": "hash",
"type_info": "Int8" "type_info": "Int8"
},
{
"ordinal": 2,
"name": "furaffinity_id",
"type_info": "Int4"
},
{
"ordinal": 3,
"name": "e621_id",
"type_info": "Int4"
},
{
"ordinal": 4,
"name": "twitter_id",
"type_info": "Int8"
},
{
"ordinal": 5,
"name": "url",
"type_info": "Text"
},
{
"ordinal": 6,
"name": "filename",
"type_info": "Text"
},
{
"ordinal": 7,
"name": "artists",
"type_info": "TextArray"
},
{
"ordinal": 8,
"name": "file_id",
"type_info": "Int4"
},
{
"ordinal": 9,
"name": "sources",
"type_info": "TextArray"
},
{
"ordinal": 10,
"name": "rating",
"type_info": "Bpchar"
} }
], ],
"parameters": { "parameters": {
"Left": [ "Left": []
"Int4"
]
}, },
"nullable": [ "nullable": [
false,
false,
true,
true,
true,
null,
null,
null,
null,
null, null,
null null
] ]
@ -173,27 +117,71 @@
] ]
} }
}, },
"fe60be66b2d8a8f02b3bfe06d1f0e57e4bb07e80cba1b379a5f17f6cbd8b075c": { "f798404b69897f47a732fb6a8e08e843f233803aea5d5a7f04087619dbb55626": {
"query": "SELECT id, hash FROM hashes", "query": "SELECT\n 'FurAffinity' site,\n submission.id,\n submission.hash_int hash,\n submission.url,\n submission.filename,\n ARRAY(SELECT artist.name) artists,\n submission.file_id,\n null sources,\n submission.rating\n FROM submission\n JOIN artist ON submission.artist_id = artist.id\n WHERE hash_int <@ ($1, 0)\n UNION\n SELECT\n 'e621' site,\n e621.id,\n e621.hash,\n e621.data->'file'->>'url' url,\n (e621.data->'file'->>'md5') || '.' || (e621.data->'file'->>'ext') filename,\n ARRAY(SELECT jsonb_array_elements_text(e621.data->'tags'->'artist')) artists,\n null file_id,\n ARRAY(SELECT jsonb_array_elements_text(e621.data->'sources')) sources,\n e621.data->>'rating' rating\n FROM e621\n WHERE hash <@ ($1, 0)\n UNION\n SELECT\n 'Weasyl' site,\n weasyl.id,\n weasyl.hash,\n weasyl.data->>'link' url,\n null filename,\n ARRAY(SELECT weasyl.data->>'owner_login') artists,\n null file_id,\n null sources,\n weasyl.data->>'rating' rating\n FROM weasyl\n WHERE hash <@ ($1, 0)\n UNION\n SELECT\n 'Twitter' site,\n tweet.id,\n tweet_media.hash,\n tweet_media.url,\n null filename,\n ARRAY(SELECT tweet.data->'user'->>'screen_name') artists,\n null file_id,\n null sources,\n CASE\n WHEN (tweet.data->'possibly_sensitive')::boolean IS true THEN 'adult'\n WHEN (tweet.data->'possibly_sensitive')::boolean IS false THEN 'general'\n END rating\n FROM tweet_media\n JOIN tweet ON tweet_media.tweet_id = tweet.id\n WHERE hash <@ ($1, 0)",
"describe": { "describe": {
"columns": [ "columns": [
{ {
"ordinal": 0, "ordinal": 0,
"name": "id", "name": "site",
"type_info": "Int4" "type_info": "Text"
}, },
{ {
"ordinal": 1, "ordinal": 1,
"name": "id",
"type_info": "Int8"
},
{
"ordinal": 2,
"name": "hash", "name": "hash",
"type_info": "Int8" "type_info": "Int8"
},
{
"ordinal": 3,
"name": "url",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "filename",
"type_info": "Text"
},
{
"ordinal": 5,
"name": "artists",
"type_info": "TextArray"
},
{
"ordinal": 6,
"name": "file_id",
"type_info": "Int4"
},
{
"ordinal": 7,
"name": "sources",
"type_info": "TextArray"
},
{
"ordinal": 8,
"name": "rating",
"type_info": "Bpchar"
} }
], ],
"parameters": { "parameters": {
"Left": [] "Left": [
"Int8"
]
}, },
"nullable": [ "nullable": [
false, null,
false null,
null,
null,
null,
null,
null,
null,
null
] ]
} }
} }

View File

@ -418,7 +418,6 @@ pub async fn search_file(
let matches: Result<Vec<SearchResult>, _> = query let matches: Result<Vec<SearchResult>, _> = query
.map(|row| SearchResult { .map(|row| SearchResult {
id: row.get("hash_id"),
site_id: row.get::<i32, _>("id") as i64, site_id: row.get::<i32, _>("id") as i64,
site_id_str: row.get::<i32, _>("id").to_string(), site_id_str: row.get::<i32, _>("id").to_string(),
url: row.get("url"), url: row.get("url"),

View File

@ -14,14 +14,19 @@ type Tree = Arc<RwLock<bk_tree::BKTree<Node, Hamming>>>;
type Pool = sqlx::PgPool; type Pool = sqlx::PgPool;
#[derive(Debug)] #[derive(Debug)]
pub struct Node { pub struct Node(pub [u8; 8]);
id: i32,
hash: [u8; 8],
}
impl Node { impl Node {
pub fn new(hash: i64) -> Self {
Self(hash.to_be_bytes())
}
pub fn query(hash: [u8; 8]) -> Self { pub fn query(hash: [u8; 8]) -> Self {
Self { id: -1, hash } Self(hash)
}
pub fn num(&self) -> i64 {
i64::from_be_bytes(self.0)
} }
} }
@ -29,7 +34,7 @@ pub struct Hamming;
impl bk_tree::Metric<Node> for Hamming { impl bk_tree::Metric<Node> for Hamming {
fn distance(&self, a: &Node, b: &Node) -> u64 { fn distance(&self, a: &Node, b: &Node) -> u64 {
hamming::distance_fast(&a.hash, &b.hash).unwrap() hamming::distance_fast(&a.0, &b.0).unwrap()
} }
} }
@ -167,7 +172,6 @@ async fn serve_metrics() {
#[derive(serde::Deserialize)] #[derive(serde::Deserialize)]
struct HashRow { struct HashRow {
id: i32,
hash: i64, hash: i64,
} }
@ -176,13 +180,25 @@ async fn create_tree(conn: &Pool) -> bk_tree::BKTree<Node, Hamming> {
let mut tree = bk_tree::BKTree::new(Hamming); let mut tree = bk_tree::BKTree::new(Hamming);
let mut rows = sqlx::query_as!(HashRow, "SELECT id, hash FROM hashes").fetch(conn); let mut rows = sqlx::query!(
"SELECT id, hash_int hash FROM submission WHERE hash_int IS NOT NULL
UNION ALL
SELECT id, hash FROM e621 WHERE hash IS NOT NULL
UNION ALL
SELECT tweet_id, hash FROM tweet_media WHERE hash IS NOT NULL
UNION ALL
SELECT id, hash FROM weasyl WHERE hash IS NOT NULL"
)
.fetch(conn);
while let Some(row) = rows.try_next().await.expect("Unable to get row") { while let Some(row) = rows.try_next().await.expect("Unable to get row") {
tree.add(Node { if let Some(hash) = row.hash {
id: row.id, if tree.find_exact(&Node::new(hash)).is_some() {
hash: row.hash.to_be_bytes(), continue;
}) }
tree.add(Node::new(hash));
}
} }
tree tree
@ -207,13 +223,16 @@ async fn load_updates(conn: Pool, tree: Tree) {
.expect("Unable to recv notification") .expect("Unable to recv notification")
{ {
let payload: HashRow = serde_json::from_str(notification.payload()).unwrap(); let payload: HashRow = serde_json::from_str(notification.payload()).unwrap();
tracing::debug!(id = payload.id, "Adding new hash to tree"); tracing::debug!(hash = payload.hash, "Adding new hash to tree");
let lock = tree.read().await;
if lock.find_exact(&Node::new(payload.hash)).is_some() {
continue;
}
drop(lock);
let mut lock = tree.write().await; let mut lock = tree.write().await;
lock.add(Node { lock.add(Node(payload.hash.to_be_bytes()));
id: payload.id,
hash: payload.hash.to_be_bytes(),
});
drop(lock); drop(lock);
} }

View File

@ -1,9 +1,12 @@
use std::collections::HashSet;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use prometheus::{register_histogram, Histogram}; use prometheus::{register_histogram, Histogram};
use tracing_futures::Instrument; use tracing_futures::Instrument;
use crate::types::*; use crate::types::*;
use crate::{Pool, Tree}; use crate::{Pool, Tree};
use futures::TryStreamExt;
use fuzzysearch_common::types::{SearchResult, SiteInfo}; use fuzzysearch_common::types::{SearchResult, SiteInfo};
lazy_static! { lazy_static! {
@ -73,11 +76,14 @@ pub fn image_query_sync(
) -> tokio::sync::mpsc::Receiver<Result<Vec<SearchResult>, sqlx::Error>> { ) -> tokio::sync::mpsc::Receiver<Result<Vec<SearchResult>, sqlx::Error>> {
let (tx, rx) = tokio::sync::mpsc::channel(50); let (tx, rx) = tokio::sync::mpsc::channel(50);
tokio::spawn(async move { tokio::spawn(
async move {
let db = pool; let db = pool;
for query_hash in hashes { for query_hash in hashes {
let mut seen = std::collections::HashSet::new(); tracing::trace!(query_hash, "Evaluating hash");
let mut seen: HashSet<[u8; 8]> = HashSet::new();
let _timer = IMAGE_LOOKUP_DURATION.start_timer(); let _timer = IMAGE_LOOKUP_DURATION.start_timer();
@ -86,104 +92,91 @@ pub fn image_query_sync(
let items = lock.find(&node, distance as u64); let items = lock.find(&node, distance as u64);
for (dist, item) in items { for (dist, item) in items {
if seen.contains(&item.id) { if seen.contains(&item.0) {
tracing::trace!("Already searched for hash");
continue; continue;
} }
seen.insert(item.id); seen.insert(item.0);
let _timer = IMAGE_QUERY_DURATION.start_timer(); let _timer = IMAGE_QUERY_DURATION.start_timer();
let row = sqlx::query!("SELECT tracing::debug!(num = item.num(), "Searching database for hash in tree");
hashes.id,
hashes.hash, let mut row = sqlx::query!(
hashes.furaffinity_id, "SELECT
hashes.e621_id, 'FurAffinity' site,
hashes.twitter_id, submission.id,
CASE submission.hash_int hash,
WHEN furaffinity_id IS NOT NULL THEN (f.url) submission.url,
WHEN e621_id IS NOT NULL THEN (e.data->'file'->>'url') submission.filename,
WHEN twitter_id IS NOT NULL THEN (tm.url) ARRAY(SELECT artist.name) artists,
END url, submission.file_id,
CASE null sources,
WHEN furaffinity_id IS NOT NULL THEN (f.filename) submission.rating
WHEN e621_id IS NOT NULL THEN ((e.data->'file'->>'md5') || '.' || (e.data->'file'->>'ext'))
WHEN twitter_id IS NOT NULL THEN (SELECT split_part(split_part(tm.url, '/', 5), ':', 1))
END filename,
CASE
WHEN furaffinity_id IS NOT NULL THEN (ARRAY(SELECT f.name))
WHEN e621_id IS NOT NULL THEN ARRAY(SELECT jsonb_array_elements_text(e.data->'tags'->'artist'))
WHEN twitter_id IS NOT NULL THEN ARRAY(SELECT tw.data->'user'->>'screen_name')
END artists,
CASE
WHEN furaffinity_id IS NOT NULL THEN (f.file_id)
END file_id,
CASE
WHEN e621_id IS NOT NULL THEN ARRAY(SELECT jsonb_array_elements_text(e.data->'sources'))
END sources,
CASE
WHEN furaffinity_id IS NOT NULL THEN (f.rating)
WHEN e621_id IS NOT NULL THEN (e.data->>'rating')
WHEN twitter_id IS NOT NULL THEN
CASE
WHEN (tw.data->'possibly_sensitive')::boolean IS true THEN 'adult'
WHEN (tw.data->'possibly_sensitive')::boolean IS false THEN 'general'
END
END rating
FROM
hashes
LEFT JOIN LATERAL (
SELECT *
FROM submission FROM submission
JOIN artist ON submission.artist_id = artist.id JOIN artist ON submission.artist_id = artist.id
WHERE submission.id = hashes.furaffinity_id WHERE hash_int <@ ($1, 0)
) f ON hashes.furaffinity_id IS NOT NULL UNION
LEFT JOIN LATERAL ( SELECT
SELECT * 'e621' site,
e621.id,
e621.hash,
e621.data->'file'->>'url' url,
(e621.data->'file'->>'md5') || '.' || (e621.data->'file'->>'ext') filename,
ARRAY(SELECT jsonb_array_elements_text(e621.data->'tags'->'artist')) artists,
null file_id,
ARRAY(SELECT jsonb_array_elements_text(e621.data->'sources')) sources,
e621.data->>'rating' rating
FROM e621 FROM e621
WHERE e621.id = hashes.e621_id WHERE hash <@ ($1, 0)
) e ON hashes.e621_id IS NOT NULL UNION
LEFT JOIN LATERAL ( SELECT
SELECT * 'Weasyl' site,
FROM tweet weasyl.id,
WHERE tweet.id = hashes.twitter_id weasyl.hash,
) tw ON hashes.twitter_id IS NOT NULL weasyl.data->>'link' url,
LEFT JOIN LATERAL ( null filename,
SELECT * ARRAY(SELECT weasyl.data->>'owner_login') artists,
null file_id,
null sources,
weasyl.data->>'rating' rating
FROM weasyl
WHERE hash <@ ($1, 0)
UNION
SELECT
'Twitter' site,
tweet.id,
tweet_media.hash,
tweet_media.url,
null filename,
ARRAY(SELECT tweet.data->'user'->>'screen_name') artists,
null file_id,
null sources,
CASE
WHEN (tweet.data->'possibly_sensitive')::boolean IS true THEN 'adult'
WHEN (tweet.data->'possibly_sensitive')::boolean IS false THEN 'general'
END rating
FROM tweet_media FROM tweet_media
WHERE JOIN tweet ON tweet_media.tweet_id = tweet.id
tweet_media.tweet_id = hashes.twitter_id AND WHERE hash <@ ($1, 0)",
tweet_media.hash <@ (hashes.hash, 0) &item.num()
LIMIT 1
) tm ON hashes.twitter_id IS NOT NULL
WHERE hashes.id = $1", item.id).map(|row| {
let (site_id, site_info) = if let Some(fa_id) = row.furaffinity_id {
(
fa_id as i64,
Some(SiteInfo::FurAffinity {
file_id: row.file_id.unwrap(),
})
) )
} else if let Some(e621_id) = row.e621_id { .map(|row| {
( let site_info = match row.site.as_deref() {
e621_id as i64, Some("FurAffinity") => SiteInfo::FurAffinity { file_id: row.file_id.unwrap_or(-1) },
Some(SiteInfo::E621 { Some("e621") => SiteInfo::E621 { sources: row.sources },
sources: row.sources, Some("Twitter") => SiteInfo::Twitter,
}) Some("Weasyl") => SiteInfo::Weasyl,
) _ => panic!("Got unknown site"),
} else if let Some(twitter_id) = row.twitter_id {
(twitter_id, Some(SiteInfo::Twitter))
} else {
(-1, None)
}; };
let file = SearchResult { let file = SearchResult {
id: row.id, site_id: row.id.unwrap_or_default(),
site_id, site_info: Some(site_info),
site_info,
rating: row.rating.and_then(|rating| rating.parse().ok()), rating: row.rating.and_then(|rating| rating.parse().ok()),
site_id_str: site_id.to_string(), site_id_str: row.id.unwrap_or_default().to_string(),
url: row.url.unwrap_or_default(), url: row.url.unwrap_or_default(),
hash: Some(row.hash), hash: row.hash,
distance: Some(dist), distance: Some(dist),
artists: row.artists, artists: row.artists,
filename: row.filename.unwrap_or_default(), filename: row.filename.unwrap_or_default(),
@ -191,12 +184,17 @@ pub fn image_query_sync(
}; };
vec![file] vec![file]
}).fetch_one(&db).await; })
.fetch(&db);
tx.send(row).await.unwrap(); while let Some(row) = row.try_next().await.ok().flatten() {
tx.send(Ok(row)).await.unwrap();
} }
} }
}.in_current_span()); }
}
.in_current_span(),
);
rx rx
} }

View File

@ -0,0 +1,4 @@
DROP INDEX bk_furaffinity_hash;
DROP INDEX bk_e621_hash;
DROP INDEX bk_twitter_hash;
DROP INDEX bk_weasyl_hash;

View File

@ -0,0 +1,4 @@
CREATE INDEX bk_furaffinity_hash ON submission USING spgist (hash_int bktree_ops);
CREATE INDEX bk_e621_hash ON e621 USING spgist (hash bktree_ops);
CREATE INDEX bk_twitter_hash ON tweet_media USING spgist (hash bktree_ops);
CREATE INDEX bk_weasyl_hash ON weasyl USING spgist (hash bktree_ops);

View File

@ -0,0 +1,86 @@
DROP FUNCTION update_notify_furaffinity CASCADE;
DROP FUNCTION update_notify_others CASCADE;
CREATE TABLE hashes (
id SERIAL PRIMARY KEY,
hash BIGINT NOT NULL,
furaffinity_id INTEGER UNIQUE REFERENCES submission (id),
e621_id INTEGER UNIQUE REFERENCES e621 (id),
twitter_id BIGINT REFERENCES tweet (id)
);
CREATE FUNCTION hashes_insert_furaffinity()
RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
if NEW.hash_int IS NOT NULL THEN
INSERT INTO hashes (furaffinity_id, hash) VALUES (NEW.id, NEW.hash_int);
END IF;
RETURN NEW;
END;
$$;
CREATE FUNCTION hashes_insert_e621()
RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF NEW.hash IS NOT NULL THEN
IF exists(SELECT 1 FROM hashes WHERE hashes.e621_id = NEW.id) THEN
UPDATE hashes SET hashes.hash = NEW.hash WHERE e621_id = NEW.id;
ELSE
INSERT INTO hashes (e621_id, hash) VALUES (NEW.id, NEW.hash);
END IF;
END IF;
RETURN NEW;
END;
$$;
CREATE FUNCTION hashes_insert_twitter()
RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF NEW.hash IS NOT NULL THEN
INSERT INTO hashes (twitter_id, hash) VALUES (NEW.tweet_id, NEW.hash);
END IF;
RETURN NEW;
END;
$$;
CREATE TRIGGER hashes_insert_furaffinity AFTER INSERT ON submission
FOR EACH ROW EXECUTE PROCEDURE hashes_insert_furaffinity();
CREATE TRIGGER hashes_insert_e621 AFTER INSERT ON e621
FOR EACH ROW EXECUTE PROCEDURE hashes_insert_e621();
CREATE TRIGGER hashes_insert_twitter AFTER INSERT ON tweet_media
FOR EACH ROW EXECUTE PROCEDURE hashes_insert_twitter();
INSERT INTO hashes (furaffinity_id, hash)
SELECT id, hash_int FROM submission WHERE hash_int IS NOT NULL
ON CONFLICT DO NOTHING;
INSERT INTO hashes (e621_id, hash)
SELECT id, hash FROM e621 WHERE hash IS NOT NULL
ON CONFLICT DO NOTHING;
INSERT INTO hashes (twitter_id, hash)
SELECT tweet_id, hash FROM tweet_media WHERE hash IS NOT NULL
ON CONFLICT DO NOTHING;
CREATE INDEX ON hashes USING spgist (hash bktree_ops);
CREATE FUNCTION hashes_notify_inserted()
RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
PERFORM pg_notify('fuzzysearch_hash_added'::text,
json_build_object('id', NEW.id, 'hash', NEW.hash)::text);
RETURN NEW;
END;
$$;
CREATE TRIGGER hashes_notify_inserted AFTER INSERT ON hashes
FOR EACH ROW EXECUTE PROCEDURE hashes_notify_inserted();

View File

@ -0,0 +1,44 @@
DROP TABLE hashes;
DROP FUNCTION hashes_notify_inserted CASCADE;
DROP FUNCTION hashes_insert_furaffinity CASCADE;
DROP FUNCTION hashes_insert_e621 CASCADE;
DROP FUNCTION hashes_insert_twitter CASCADE;
CREATE FUNCTION update_notify_furaffinity()
RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
if NEW.hash_int IS NOT NULL THEN
PERFORM pg_notify('fuzzysearch_hash_added'::text,
json_build_object('hash', NEW.hash_int)::text);
RETURN NEW;
END IF;
RETURN NEW;
END;
$$;
CREATE FUNCTION update_notify_others()
RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
if NEW.hash IS NOT NULL THEN
PERFORM pg_notify('fuzzysearch_hash_added'::text,
json_build_object('hash', NEW.hash)::text);
RETURN NEW;
END IF;
RETURN NEW;
END;
$$;
CREATE TRIGGER update_notify_furaffinity AFTER INSERT OR UPDATE ON submission
FOR EACH ROW EXECUTE PROCEDURE update_notify_furaffinity();
CREATE TRIGGER update_notify_e621 AFTER INSERT OR UPDATE ON e621
FOR EACH ROW EXECUTE PROCEDURE update_notify_others();
CREATE TRIGGER update_notify_twitter AFTER INSERT OR UPDATE ON tweet_media
FOR EACH ROW EXECUTE PROCEDURE update_notify_others();
CREATE TRIGGER update_notify_weasyl AFTER INSERT OR UPDATE ON weasyl
FOR EACH ROW EXECUTE PROCEDURE update_notify_others();