From 7b96dd5cf9de4117c7303fb73cb5a2e8f3f5959c Mon Sep 17 00:00:00 2001 From: mykola2312 <49044616+mykola2312@users.noreply.github.com> Date: Mon, 22 Apr 2024 21:55:17 +0300 Subject: [PATCH] add new tables to sql migration, begin working on more proper relations for channels and crawls --- .../mykola2312/mptv/config/SourceItem.java | 10 ++++++- .../com/mykola2312/mptv/crawler/Crawler.java | 26 ++++++++++++++-- .../resources/db/migration/V001.01__init.sql | 30 +++++++++++++++++-- 3 files changed, 60 insertions(+), 6 deletions(-) diff --git a/src/main/java/com/mykola2312/mptv/config/SourceItem.java b/src/main/java/com/mykola2312/mptv/config/SourceItem.java index d5f41e7..87149ed 100644 --- a/src/main/java/com/mykola2312/mptv/config/SourceItem.java +++ b/src/main/java/com/mykola2312/mptv/config/SourceItem.java @@ -10,7 +10,15 @@ public class SourceItem { M3U, @JsonProperty("m3u-local") - M3U_LOCAL + M3U_LOCAL; + + public String getSqlName() { + switch (this) { + case M3U: return "m3u"; + case M3U_LOCAL: return "m3u-local"; + default: throw new RuntimeException("no sql alias for " + this.toString()); + } + } } @NonNull diff --git a/src/main/java/com/mykola2312/mptv/crawler/Crawler.java b/src/main/java/com/mykola2312/mptv/crawler/Crawler.java index 052a1ef..fe3a76c 100644 --- a/src/main/java/com/mykola2312/mptv/crawler/Crawler.java +++ b/src/main/java/com/mykola2312/mptv/crawler/Crawler.java @@ -16,6 +16,7 @@ import org.jooq.exception.NoDataFoundException; import org.jooq.impl.*; import static com.mykola2312.mptv.tables.Category.*; import static com.mykola2312.mptv.tables.Channel.*; +import static com.mykola2312.mptv.tables.Source.*; import com.mykola2312.mptv.config.SourceItem; import com.mykola2312.mptv.parser.M3U; @@ -23,6 +24,7 @@ import com.mykola2312.mptv.parser.M3UException; import com.mykola2312.mptv.parser.M3UParser; import com.mykola2312.mptv.db.DB; import com.mykola2312.mptv.tables.records.ChannelRecord; +import com.mykola2312.mptv.tables.records.SourceRecord; public class Crawler { private static final Logger logger = Logger.getLogger(Crawler.class); @@ -33,7 +35,25 @@ public class Crawler { this.sources = sources; } - private static Integer ensureRootCategory(String rootName) { + public void updateSources(List sourceItems) { + ArrayList> sources = new ArrayList<>(); + for (SourceItem item : sourceItems) { + UpdatableRecord source = new UpdatableRecordImpl<>(SOURCE); + source.set(SOURCE.TYPE, item.type.getSqlName()); + source.set(SOURCE.ROOT_NAME, item.rootCategory); + source.set(SOURCE.URL, item.url); + source.set(SOURCE.PATH, item.path); + source.set(SOURCE.COOKIES, item.cookies); + + sources.add(source); + } + + DSL.using(DB.CONFIG) + .batchMerge(sources) + .execute(); + } + + private Integer ensureRootCategory(String rootName) { try { return DSL.using(DB.CONFIG) .select(CATEGORY.ID) @@ -50,7 +70,7 @@ public class Crawler { } } - public static void loadAll(ArrayList items, String rootName) { + public void updateAllChannels(ArrayList items, String rootName) { Integer rootCategoryId = ensureRootCategory(rootName); HashMap categories = new HashMap<>(); // collect all groups, find or create them, cache their ids @@ -116,7 +136,7 @@ public class Crawler { String m3uData = Files.readString(Paths.get(source.path), StandardCharsets.UTF_8); ArrayList m3u = M3UParser.parse(m3uData); - loadAll(m3u, source.rootCategory); + updateAllChannels(m3u, source.rootCategory); } catch (IOException e) { logger.error(e); logger.error(String.format("failed to read local m3u file: %s", e.getMessage())); diff --git a/src/main/resources/db/migration/V001.01__init.sql b/src/main/resources/db/migration/V001.01__init.sql index 2f814de..8484bde 100644 --- a/src/main/resources/db/migration/V001.01__init.sql +++ b/src/main/resources/db/migration/V001.01__init.sql @@ -1,3 +1,19 @@ +CREATE TABLE source ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + type TEXT NOT NULL, + root_name TEXT NOT NULL, + url TEXT, + path TEXT, + cookies TEXT +); + +CREATE UNIQUE INDEX idx_source_url_path ON source(url,path); + +CREATE TABLE crawl ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + crawled_at INTEGER NOT NULL +); + CREATE TABLE category ( id INTEGER PRIMARY KEY AUTOINCREMENT, title TEXT NOT NULL @@ -11,8 +27,18 @@ CREATE TABLE channel ( title TEXT NOT NULL, url TEXT NOT NULL, logo TEXT, + crawl INTEGER NOT NULL, - FOREIGN KEY (category) REFERENCES category(id) + FOREIGN KEY (category) REFERENCES category(id), + FOREIGN KEY (crawl) REFERENCES crawl(id) ); -CREATE UNIQUE INDEX idx_channel_category_title ON channel(category,title); \ No newline at end of file +CREATE UNIQUE INDEX idx_channel_category_title ON channel(category,title); + +CREATE TABLE task ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL, + last_time INTEGER NOT NULL +); + +CREATE UNIQUE INDEX idx_task_name ON task(name); \ No newline at end of file