Add complete database migrations.

This commit is contained in:
Syfaro 2021-02-20 22:48:03 -05:00
parent 4ad577cee5
commit ce3793f9a3
17 changed files with 216 additions and 15 deletions

View File

@ -16,18 +16,6 @@
"nullable": [] "nullable": []
} }
}, },
"18a59439be1a5b6f03326ad14960fe1ada5cee94638711df99b471d86235be24": {
"query": "INSERT INTO WEASYL (id) VALUES ($1)",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int4"
]
},
"nullable": []
}
},
"364c5c10ad748d1822c3e909aca601993f0ddb7690368a82ae467b3b0950478e": { "364c5c10ad748d1822c3e909aca601993f0ddb7690368a82ae467b3b0950478e": {
"query": "INSERT INTO WEASYL (id, data) VALUES ($1, $2)", "query": "INSERT INTO WEASYL (id, data) VALUES ($1, $2)",
"describe": { "describe": {

View File

@ -130,12 +130,12 @@
{ {
"ordinal": 4, "ordinal": 4,
"name": "name", "name": "name",
"type_info": "Varchar" "type_info": "Text"
}, },
{ {
"ordinal": 5, "ordinal": 5,
"name": "owner_email", "name": "owner_email",
"type_info": "Varchar" "type_info": "Text"
} }
], ],
"parameters": { "parameters": {

View File

@ -1 +0,0 @@
CREATE TABLE weasyl (id SERIAL PRIMARY KEY, hash BIGINT, sha256 BYTEA, file_size INT, data JSONB);

View File

@ -0,0 +1 @@
DROP EXTENSION bktree;

View File

@ -0,0 +1 @@
CREATE EXTENSION bktree;

View File

@ -0,0 +1,4 @@
DROP TABLE artist;
DROP TABLE submission;
DROP TABLE tag;
DROP TABLE tag_to_post;

View File

@ -0,0 +1,42 @@
CREATE TABLE artist (
id SERIAL PRIMARY KEY,
name TEXT UNIQUE NOT NULL
);
CREATE TABLE submission (
id SERIAL PRIMARY KEY,
artist_id INTEGER REFERENCES artist (id),
hash BYTEA,
hash_int BIGINT,
url TEXT,
filename TEXT,
rating CHAR(1),
posted_at TIMESTAMP WITH TIME ZONE,
description TEXT,
file_id INTEGER,
file_size INTEGER,
file_sha256 BYTEA,
imported BOOLEAN DEFAULT false,
removed BOOLEAN,
updated_at TIMESTAMP WITH TIME ZONE
);
CREATE INDEX ON submission (file_id);
CREATE INDEX ON submission (imported);
CREATE INDEX ON submission (posted_at);
CREATE INDEX ON submission (artist_id);
CREATE INDEX ON submission (file_sha256) WHERE file_sha256 IS NOT NULL;
CREATE INDEX ON submission (lower(url));
CREATE INDEX ON submission (lower(filename));
CREATE TABLE tag (
id SERIAL PRIMARY KEY,
name TEXT UNIQUE NOT NULL
);
CREATE TABLE tag_to_post (
tag_id INTEGER NOT NULL REFERENCES tag (id),
post_id INTEGER NOT NULL REFERENCES submission (id),
PRIMARY KEY (tag_id, post_id)
);

View File

@ -0,0 +1 @@
DROP TABLE e621;

View File

@ -0,0 +1,9 @@
CREATE TABLE e621 (
id INTEGER PRIMARY KEY,
hash BIGINT,
data JSONB,
sha256 BYTEA,
hash_error TEXT
);
CREATE INDEX ON e621 (sha256);

View File

@ -0,0 +1 @@
DROP TABLE weasyl;

View File

@ -0,0 +1,9 @@
CREATE TABLE weasyl (
id INTEGER PRIMARY KEY,
hash BIGINT,
data JSONB,
sha256 BYTEA,
file_size INTEGER
);
CREATE INDEX ON weasyl (sha256);

View File

@ -0,0 +1,3 @@
DROP TABLE tweet_media;
DROP TABLE tweet;
DROP TABLE twitter_user;

View File

@ -0,0 +1,30 @@
CREATE TABLE twitter_user (
twitter_id BIGINT PRIMARY KEY,
approved BOOLEAN NOT NULL DEFAULT false,
data JSONB,
last_update TIMESTAMP WITHOUT TIME ZONE,
max_id BIGINT,
completed_back BOOLEAN NOT NULL DEFAULT false,
min_id BIGINT
);
CREATE INDEX ON twitter_user (last_update);
CREATE INDEX ON twitter_user (lower(data->>'screen_name'));
CREATE INDEX ON twitter_user (min_id);
CREATE INDEX ON twitter_user (twitter_id, approved);
CREATE INDEX ON twitter_user (((data->'protected')::boolean));
CREATE TABLE tweet (
id BIGINT PRIMARY KEY,
twitter_user_id BIGINT NOT NULL REFERENCES twitter_user (twitter_id),
data JSONB
);
CREATE TABLE tweet_media (
media_id BIGINT NOT NULL,
tweet_id BIGINT NOT NULL REFERENCES tweet (id),
hash BIGINT,
url TEXT,
PRIMARY KEY (media_id, tweet_id)
);

View File

@ -0,0 +1 @@
DROP TABLE hashes;

View File

@ -0,0 +1,83 @@
CREATE TABLE hashes (
id SERIAL PRIMARY KEY,
hash BIGINT NOT NULL,
furaffinity_id INTEGER UNIQUE REFERENCES submission (id),
e621_id INTEGER UNIQUE REFERENCES e621 (id),
twitter_id BIGINT REFERENCES tweet (id)
);
CREATE FUNCTION hashes_insert_furaffinity()
RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
if NEW.hash_int IS NOT NULL THEN
INSERT INTO hashes (furaffinity_id, hash) VALUES (NEW.id, NEW.hash_int);
END IF;
RETURN NEW;
END;
$$;
CREATE FUNCTION hashes_insert_e621()
RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF NEW.hash IS NOT NULL THEN
IF exists(SELECT 1 FROM hashes WHERE hashes.e621_id = NEW.id) THEN
UPDATE hashes SET hashes.hash = NEW.hash WHERE e621_id = NEW.id;
ELSE
INSERT INTO hashes (e621_id, hash) VALUES (NEW.id, NEW.hash);
END IF;
END IF;
RETURN NEW;
END;
$$;
CREATE FUNCTION hashes_insert_twitter()
RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
IF NEW.hash IS NOT NULL THEN
INSERT INTO hashes (twitter_id, hash) VALUES (NEW.tweet_id, NEW.hash);
END IF;
RETURN NEW;
END;
$$;
CREATE TRIGGER hashes_insert_furaffinity AFTER INSERT ON submission
FOR EACH ROW EXECUTE PROCEDURE hashes_insert_furaffinity();
CREATE TRIGGER hashes_insert_e621 AFTER INSERT ON e621
FOR EACH ROW EXECUTE PROCEDURE hashes_insert_e621();
CREATE TRIGGER hashes_insert_twitter AFTER INSERT ON tweet_media
FOR EACH ROW EXECUTE PROCEDURE hashes_insert_twitter();
INSERT INTO hashes (furaffinity_id, hash)
SELECT id, hash_int FROM submission WHERE hash_int IS NOT NULL
ON CONFLICT DO NOTHING;
INSERT INTO hashes (e621_id, hash)
SELECT id, hash FROM e621 WHERE hash IS NOT NULL
ON CONFLICT DO NOTHING;
INSERT INTO hashes (twitter_id, hash)
SELECT tweet_id, hash FROM tweet_media WHERE hash IS NOT NULL
ON CONFLICT DO NOTHING;
CREATE INDEX ON hashes USING spgist (hash bktree_ops);
CREATE FUNCTION hashes_notify_inserted()
RETURNS trigger
LANGUAGE plpgsql
AS $$
BEGIN
PERFORM pg_notify('fuzzysearch_hash_added'::text,
json_build_object('id', NEW.id, 'hash', NEW.hash)::text);
RETURN NEW;
END;
$$;
CREATE TRIGGER hashes_notify_inserted AFTER INSERT ON hashes
FOR EACH ROW EXECUTE PROCEDURE hashes_notify_inserted();

View File

@ -0,0 +1,3 @@
DROP TABLE rate_limit;
DROP TABLE api_key;
DROP TABLE account;

View File

@ -0,0 +1,26 @@
CREATE TABLE account (
id SERIAL PRIMARY KEY,
email TEXT UNIQUE NOT NULL,
password TEXT NOT NULL,
email_verifier TEXT
);
CREATE TABLE api_key (
id SERIAL PRIMARY KEY,
user_id INTEGER NOT NULL REFERENCES account (id),
name TEXT,
key TEXT UNIQUE NOT NULL,
name_limit SMALLINT NOT NULL,
image_limit SMALLINT NOT NULL,
hash_limit SMALLINT NOT NULL
);
CREATE TABLE rate_limit (
api_key_id INTEGER NOT NULL REFERENCES api_key (id),
time_window BIGINT NOT NULL,
group_name TEXT NOT NULL,
count SMALLINT NOT NULL DEFAULT 0,
CONSTRAINT unique_window
PRIMARY KEY (api_key_id, time_window, group_name)
);