Skip to content

Commit

Permalink
Fix Duplicates (#199)
Browse files Browse the repository at this point in the history
* Fix Duplicates

* Version
  • Loading branch information
iPromKnight authored Apr 2, 2024
1 parent 79e0a0f commit 3c8ffd5
Show file tree
Hide file tree
Showing 4 changed files with 61 additions and 16 deletions.
14 changes: 7 additions & 7 deletions deployment/docker/docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ services:
condition: service_healthy
env_file: stack.env
hostname: knightcrawler-addon
image: gabisonfire/knightcrawler-addon:2.0.20
image: gabisonfire/knightcrawler-addon:2.0.21
labels:
logging: promtail
networks:
Expand All @@ -117,7 +117,7 @@ services:
redis:
condition: service_healthy
env_file: stack.env
image: gabisonfire/knightcrawler-consumer:2.0.20
image: gabisonfire/knightcrawler-consumer:2.0.21
labels:
logging: promtail
networks:
Expand All @@ -138,7 +138,7 @@ services:
redis:
condition: service_healthy
env_file: stack.env
image: gabisonfire/knightcrawler-debrid-collector:2.0.20
image: gabisonfire/knightcrawler-debrid-collector:2.0.21
labels:
logging: promtail
networks:
Expand All @@ -152,7 +152,7 @@ services:
migrator:
condition: service_completed_successfully
env_file: stack.env
image: gabisonfire/knightcrawler-metadata:2.0.20
image: gabisonfire/knightcrawler-metadata:2.0.21
networks:
- knightcrawler-network
restart: "no"
Expand All @@ -163,7 +163,7 @@ services:
postgres:
condition: service_healthy
env_file: stack.env
image: gabisonfire/knightcrawler-migrator:2.0.20
image: gabisonfire/knightcrawler-migrator:2.0.21
networks:
- knightcrawler-network
restart: "no"
Expand All @@ -182,7 +182,7 @@ services:
redis:
condition: service_healthy
env_file: stack.env
image: gabisonfire/knightcrawler-producer:2.0.20
image: gabisonfire/knightcrawler-producer:2.0.21
labels:
logging: promtail
networks:
Expand All @@ -207,7 +207,7 @@ services:
deploy:
replicas: ${QBIT_REPLICAS:-0}
env_file: stack.env
image: gabisonfire/knightcrawler-qbit-collector:2.0.20
image: gabisonfire/knightcrawler-qbit-collector:2.0.21
labels:
logging: promtail
networks:
Expand Down
14 changes: 7 additions & 7 deletions deployment/docker/src/components/knightcrawler.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ x-depends: &knightcrawler-app-depends

services:
metadata:
image: gabisonfire/knightcrawler-metadata:2.0.20
image: gabisonfire/knightcrawler-metadata:2.0.21
env_file: ../../.env
networks:
- knightcrawler-network
Expand All @@ -30,7 +30,7 @@ services:
condition: service_completed_successfully

migrator:
image: gabisonfire/knightcrawler-migrator:2.0.20
image: gabisonfire/knightcrawler-migrator:2.0.21
env_file: ../../.env
networks:
- knightcrawler-network
Expand All @@ -40,30 +40,30 @@ services:
condition: service_healthy

addon:
image: gabisonfire/knightcrawler-addon:2.0.20
image: gabisonfire/knightcrawler-addon:2.0.21
<<: [*knightcrawler-app, *knightcrawler-app-depends]
restart: unless-stopped
hostname: knightcrawler-addon
ports:
- "7000:7000"

consumer:
image: gabisonfire/knightcrawler-consumer:2.0.20
image: gabisonfire/knightcrawler-consumer:2.0.21
<<: [*knightcrawler-app, *knightcrawler-app-depends]
restart: unless-stopped

debridcollector:
image: gabisonfire/knightcrawler-debrid-collector:2.0.20
image: gabisonfire/knightcrawler-debrid-collector:2.0.21
<<: [*knightcrawler-app, *knightcrawler-app-depends]
restart: unless-stopped

producer:
image: gabisonfire/knightcrawler-producer:2.0.20
image: gabisonfire/knightcrawler-producer:2.0.21
<<: [*knightcrawler-app, *knightcrawler-app-depends]
restart: unless-stopped

qbitcollector:
image: gabisonfire/knightcrawler-qbit-collector:2.0.20
image: gabisonfire/knightcrawler-qbit-collector:2.0.21
<<: [*knightcrawler-app, *knightcrawler-app-depends]
restart: unless-stopped
depends_on:
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
-- Drop Duplicate Files in Files Table
DELETE FROM public.files
WHERE id NOT IN (
SELECT MAX(id)
FROM public.files
GROUP BY "infoHash", "fileIndex"
);

-- Add Index to files table
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1
FROM pg_constraint
WHERE conname = 'files_unique_infohash_fileindex'
) THEN
ALTER TABLE public.files
ADD CONSTRAINT files_unique_infohash_fileindex UNIQUE ("infoHash", "fileIndex");
END IF;
END $$;


-- Drop Duplicate subtitles in Subtitles Table
DELETE FROM public.subtitles
WHERE id NOT IN (
SELECT MAX(id)
FROM public.subtitles
GROUP BY "infoHash", "fileIndex"
);

-- Add Index to subtitles table
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1
FROM pg_constraint
WHERE conname = 'subtitles_unique_infohash_fileindex'
) THEN
ALTER TABLE public.subtitles
ADD CONSTRAINT subtitles_unique_infohash_fileindex UNIQUE ("infoHash", "fileIndex");
END IF;
END $$;

6 changes: 4 additions & 2 deletions src/shared/Dapper/DapperDataStorage.cs
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,8 @@ public Task InsertFiles(IEnumerable<TorrentFile> files, CancellationToken cancel
INSERT INTO files
("infoHash", "fileIndex", title, "size", "imdbId", "imdbSeason", "imdbEpisode", "kitsuId", "kitsuEpisode", "createdAt", "updatedAt")
VALUES
(@InfoHash, @FileIndex, @Title, @Size, @ImdbId, @ImdbSeason, @ImdbEpisode, @KitsuId, @KitsuEpisode, Now(), Now());
(@InfoHash, @FileIndex, @Title, @Size, @ImdbId, @ImdbSeason, @ImdbEpisode, @KitsuId, @KitsuEpisode, Now(), Now())
ON CONFLICT ("infoHash", "fileIndex") DO NOTHING;
""";

await connection.ExecuteAsync(query, files);
Expand All @@ -167,7 +168,8 @@ public Task InsertSubtitles(IEnumerable<SubtitleFile> subtitles, CancellationTok
INSERT INTO subtitles
("infoHash", "fileIndex", "fileId", "title")
VALUES
(@InfoHash, @FileIndex, @FileId, @Title);
(@InfoHash, @FileIndex, @FileId, @Title)
ON CONFLICT ("infoHash", "fileIndex") DO NOTHING;
""";

await connection.ExecuteAsync(query, subtitles);
Expand Down

0 comments on commit 3c8ffd5

Please sign in to comment.