add new tables to sql migration, begin working on more proper relations for channels and crawls

This commit is contained in:
mykola2312 2024-04-22 21:55:17 +03:00
parent 845d15fc3f
commit 7b96dd5cf9
3 changed files with 60 additions and 6 deletions

View file

@ -10,7 +10,15 @@ public class SourceItem {
M3U,
@JsonProperty("m3u-local")
M3U_LOCAL
M3U_LOCAL;
public String getSqlName() {
switch (this) {
case M3U: return "m3u";
case M3U_LOCAL: return "m3u-local";
default: throw new RuntimeException("no sql alias for " + this.toString());
}
}
}
@NonNull

View file

@ -16,6 +16,7 @@ import org.jooq.exception.NoDataFoundException;
import org.jooq.impl.*;
import static com.mykola2312.mptv.tables.Category.*;
import static com.mykola2312.mptv.tables.Channel.*;
import static com.mykola2312.mptv.tables.Source.*;
import com.mykola2312.mptv.config.SourceItem;
import com.mykola2312.mptv.parser.M3U;
@ -23,6 +24,7 @@ import com.mykola2312.mptv.parser.M3UException;
import com.mykola2312.mptv.parser.M3UParser;
import com.mykola2312.mptv.db.DB;
import com.mykola2312.mptv.tables.records.ChannelRecord;
import com.mykola2312.mptv.tables.records.SourceRecord;
public class Crawler {
private static final Logger logger = Logger.getLogger(Crawler.class);
@ -33,7 +35,25 @@ public class Crawler {
this.sources = sources;
}
private static Integer ensureRootCategory(String rootName) {
public void updateSources(List<SourceItem> sourceItems) {
ArrayList<UpdatableRecord<SourceRecord>> sources = new ArrayList<>();
for (SourceItem item : sourceItems) {
UpdatableRecord<SourceRecord> source = new UpdatableRecordImpl<>(SOURCE);
source.set(SOURCE.TYPE, item.type.getSqlName());
source.set(SOURCE.ROOT_NAME, item.rootCategory);
source.set(SOURCE.URL, item.url);
source.set(SOURCE.PATH, item.path);
source.set(SOURCE.COOKIES, item.cookies);
sources.add(source);
}
DSL.using(DB.CONFIG)
.batchMerge(sources)
.execute();
}
private Integer ensureRootCategory(String rootName) {
try {
return DSL.using(DB.CONFIG)
.select(CATEGORY.ID)
@ -50,7 +70,7 @@ public class Crawler {
}
}
public static void loadAll(ArrayList<M3U> items, String rootName) {
public void updateAllChannels(ArrayList<M3U> items, String rootName) {
Integer rootCategoryId = ensureRootCategory(rootName);
HashMap<String, Integer> categories = new HashMap<>();
// collect all groups, find or create them, cache their ids
@ -116,7 +136,7 @@ public class Crawler {
String m3uData = Files.readString(Paths.get(source.path), StandardCharsets.UTF_8);
ArrayList<M3U> m3u = M3UParser.parse(m3uData);
loadAll(m3u, source.rootCategory);
updateAllChannels(m3u, source.rootCategory);
} catch (IOException e) {
logger.error(e);
logger.error(String.format("failed to read local m3u file: %s", e.getMessage()));

View file

@ -1,3 +1,19 @@
CREATE TABLE source (
id INTEGER PRIMARY KEY AUTOINCREMENT,
type TEXT NOT NULL,
root_name TEXT NOT NULL,
url TEXT,
path TEXT,
cookies TEXT
);
CREATE UNIQUE INDEX idx_source_url_path ON source(url,path);
CREATE TABLE crawl (
id INTEGER PRIMARY KEY AUTOINCREMENT,
crawled_at INTEGER NOT NULL
);
CREATE TABLE category (
id INTEGER PRIMARY KEY AUTOINCREMENT,
title TEXT NOT NULL
@ -11,8 +27,18 @@ CREATE TABLE channel (
title TEXT NOT NULL,
url TEXT NOT NULL,
logo TEXT,
crawl INTEGER NOT NULL,
FOREIGN KEY (category) REFERENCES category(id)
FOREIGN KEY (category) REFERENCES category(id),
FOREIGN KEY (crawl) REFERENCES crawl(id)
);
CREATE UNIQUE INDEX idx_channel_category_title ON channel(category,title);
CREATE UNIQUE INDEX idx_channel_category_title ON channel(category,title);
CREATE TABLE task (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
last_time INTEGER NOT NULL
);
CREATE UNIQUE INDEX idx_task_name ON task(name);