implement proper file tree parsing from torrent files
This commit is contained in:
parent
d98deec9e0
commit
5ce5f97924
2 changed files with 61 additions and 23 deletions
|
|
@ -42,6 +42,13 @@ impl Value {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn to_dict(&self) -> Option<&BTreeMap<Value, Value>> {
|
||||||
|
match self {
|
||||||
|
Value::Dict(dict) => Some(dict),
|
||||||
|
_ => None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_value(&self, key: &str) -> Option<&Value> {
|
pub fn get_value(&self, key: &str) -> Option<&Value> {
|
||||||
if let Value::Dict(dict) = self {
|
if let Value::Dict(dict) = self {
|
||||||
dict.get(&Value::String(ByteString::String(key.to_owned())))
|
dict.get(&Value::String(ByteString::String(key.to_owned())))
|
||||||
|
|
@ -64,6 +71,20 @@ impl Value {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_list(&self, key: &str) -> Option<&Vec<Value>> {
|
||||||
|
match self.get_value(key) {
|
||||||
|
Some(value) => value.to_list(),
|
||||||
|
None => None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_dict(&self, key: &str) -> Option<&BTreeMap<Value, Value>> {
|
||||||
|
match self.get_value(key) {
|
||||||
|
Some(value) => value.to_dict(),
|
||||||
|
None => None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_string_list(&self, key: &str) -> Option<Vec<String>> {
|
pub fn get_string_list(&self, key: &str) -> Option<Vec<String>> {
|
||||||
match self.get_value(key) {
|
match self.get_value(key) {
|
||||||
Some(value) => {
|
Some(value) => {
|
||||||
|
|
@ -91,7 +112,6 @@ impl Value {
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum ParseError {
|
pub enum ParseError {
|
||||||
WrongType,
|
|
||||||
UtfError,
|
UtfError,
|
||||||
ConvertError,
|
ConvertError,
|
||||||
NoTerminator
|
NoTerminator
|
||||||
|
|
|
||||||
62
src/main.rs
62
src/main.rs
|
|
@ -23,36 +23,57 @@ struct Args {
|
||||||
command: Command
|
command: Command
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
enum TorrentLogicError {
|
enum TorrentLogicError {
|
||||||
NoTorrentName,
|
NoTorrentName,
|
||||||
NoInfoDict,
|
NoInfoDict,
|
||||||
FileValueIsNotList,
|
|
||||||
NoPathList
|
NoPathList
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_torrent_files(torrent: &Value, hash: &str) -> Result<Vec<String>, TorrentLogicError> {
|
fn get_torrent_files(torrent: &Value) -> Result<Vec<String>, TorrentLogicError> {
|
||||||
let info = torrent.get_value("info")
|
let info = torrent.get_value("info")
|
||||||
.ok_or(TorrentLogicError::NoInfoDict)?;
|
.ok_or(TorrentLogicError::NoInfoDict)?;
|
||||||
|
|
||||||
// TODO: when torrent has single file in "files" that means "name" of the torrent is dir name
|
|
||||||
let mut files: Vec<String> = Vec::new();
|
let mut files: Vec<String> = Vec::new();
|
||||||
if let Some(file_list_value) = info.get_value("files") {
|
if let Some(file_list_v1) = info.get_list("files") {
|
||||||
let file_list = file_list_value.to_list()
|
// multiple files v1
|
||||||
.ok_or(TorrentLogicError::FileValueIsNotList)?;
|
let root = info.get_string("name")
|
||||||
|
.ok_or(TorrentLogicError::NoTorrentName)?;
|
||||||
for file_object in file_list {
|
for file_object in file_list_v1 {
|
||||||
let path_list = file_object.get_string_list("path")
|
let mut path_list = file_object.get_string_list("path")
|
||||||
.ok_or(TorrentLogicError::NoPathList)?;
|
.ok_or(TorrentLogicError::NoPathList)?;
|
||||||
// we will join with unixy / path separator since I dont want to make clusterfuck tables
|
|
||||||
// for multi-dimensional file list nor it would be ran on windows
|
path_list.insert(0, root.clone());
|
||||||
let file_name = path_list.join("/");
|
files.push(path_list.join("/"));
|
||||||
files.push(file_name);
|
}
|
||||||
|
} else if let Some(file_dict_v2) = info.get_dict("file tree") {
|
||||||
|
// single file / multiple files v2
|
||||||
|
let root = info.get_string("name")
|
||||||
|
.ok_or(TorrentLogicError::NoTorrentName)?;
|
||||||
|
|
||||||
|
let files_v2: Vec<String> = file_dict_v2
|
||||||
|
.keys()
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|k| k.to_string())
|
||||||
|
.collect();
|
||||||
|
if files_v2.len() > 1 {
|
||||||
|
// multiple
|
||||||
|
for file in files_v2 {
|
||||||
|
let mut path = root.clone();
|
||||||
|
path.push_str("/");
|
||||||
|
path.push_str(&file);
|
||||||
|
|
||||||
|
files.push(path);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// single
|
||||||
|
files.push(files_v2.first().unwrap().clone());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// when we don't have "files" list in "info" block,
|
// single file v1
|
||||||
// that means "name" of torrent is the file name
|
let single = info.get_string("name")
|
||||||
files.push(torrent.get_string("name")
|
.ok_or(TorrentLogicError::NoTorrentName)?;
|
||||||
.ok_or(TorrentLogicError::NoTorrentName)?);
|
files.push(single);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(files)
|
Ok(files)
|
||||||
|
|
@ -147,19 +168,16 @@ fn index(db: Connection, path: &String) {
|
||||||
let publisher_url = torrent.get_string("publisher-url");
|
let publisher_url = torrent.get_string("publisher-url");
|
||||||
|
|
||||||
// get torrent files
|
// get torrent files
|
||||||
// TODO: when torrent has single file in "files" that means "name" of the torrent is dir name
|
let files = match get_torrent_files(&torrent) {
|
||||||
let files = match get_torrent_files(&torrent, &hash) {
|
|
||||||
Ok(files) => files,
|
Ok(files) => files,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
eprintln!("can't get file list for {}", hash);
|
eprintln!("can't get file list for {}: {:#?}", hash, e);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
dbg!(hash, name, destination, downloaded, uploaded, announce, comment, created_by, creation_date, publisher, publisher_url);
|
dbg!(hash, name, destination, downloaded, uploaded, announce, comment, created_by, creation_date, publisher, publisher_url);
|
||||||
dbg!(files);
|
dbg!(files);
|
||||||
|
|
||||||
break
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue