// NOTE: for rusqlite, don't use plain BEGIN and COMMIT as rollback will never be called if an
// error occurs there.
use std::{
collections::HashSet,
fs,
path::{Path, PathBuf},
time::{SystemTime, UNIX_EPOCH},
};
use anyhow::{Context, Result, ensure};
use iced::{
Alignment, Element,
Length::Fill,
Subscription, Task,
widget::{
Container, button, center, checkbox, column, container, horizontal_space, keyed_column,
row, scrollable, text, text_input, tooltip,
},
};
mod migrations;
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
struct TableEntry {
md5: String,
level: String,
}
fn resolve_json_url(html_url: &str, data_url: &str) -> Result<String> {
if data_url.starts_with("https://") || data_url.starts_with("http://") {
return Ok(data_url.to_string());
}
let last_slash = html_url.rfind('/').context("No slash in URL")?;
let url_prefix = &html_url[..=last_slash];
Ok(format!("{url_prefix}{data_url}"))
}
fn extract_raw_header_url(html: &str) -> Result<&str> {
let lhs_search = r#"<meta name="bmstable" content=""#;
let lhs = html.find(lhs_search).context("missing bmstable meta")?;
let html = &html[lhs + lhs_search.len()..];
let rhs = html.find('"').context("missing bmstable meta rhs")?;
Ok(&html[..rhs])
}
// Table as fetched from the internet.
#[derive(Clone, Debug)]
struct TableData {
/// URL to the html page. That page contains header URL in its meta tags.
/// Tables can be uniquely identified by this field. Other fields may get updated.
web_url: String,
name: String,
symbol: String,
data_url: String, // Resolved URL
entries: Vec<TableEntry>,
folder_order: Vec<String>,
header_url: String, // Resolved URL
}
/// ID of the playlist in the database.
/// NOTE: ID must be preserved between database updates to avoid triggering folder reloading
/// related bugs in LR2.
//
// FIXME: lr2folder files must be preserved in the same way. It just happens that these don't
// appear/disappear in folder often. This case is not handled yet.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
struct PlaylistId(pub usize);
// Additional data we associate with a given table.
#[derive(Clone, Debug)]
struct TableAddData {
last_update: Option<UnixEpochTs>,
#[cfg(test)]
playlist_id: Option<PlaylistId>,
/// Table symbol defined by the user.
user_symbol: Option<String>,
edited_symbol: Option<String>,
edited_url: Option<String>,
}
#[derive(Clone, Debug)]
struct Table(TableData, TableAddData);
struct App {
/// LR2beta3/LR2files/Database/song.db
lr2_db: PathBuf,
playlists_folder: PathBuf,
write_tags_on_save: bool,
db: Option<rusqlite::Connection>,
/// Urls
tables: Vec<Table>,
new_table_text: String,
now: UnixEpochTs,
reqwest: reqwest::Client,
}
fn extract_levels(entries: &[TableEntry], folder_order: &[String]) -> Vec<String> {
let mut levels = entries
.iter()
.map(|e| e.level.clone())
.collect::<HashSet<String>>()
.into_iter()
.collect::<Vec<String>>();
let key_by_order = |e: &str| {
folder_order
.iter()
.position(|o| *o == *e)
.unwrap_or(usize::MAX)
};
let cmp_by_order =
|a: &String, b: &String| -> std::cmp::Ordering { key_by_order(a).cmp(&key_by_order(b)) };
levels.sort_by(|a, b| cmp_by_order(a, b).then_with(|| alphanumeric_sort::compare_str(a, b)));
levels
}
async fn fetch_url(client: &reqwest::Client, url: &str) -> Result<String> {
log::info!("getting url={url}");
if let Some(url) = url.strip_prefix("file://") {
std::fs::read_to_string(url).with_context(|| format!("failed to read file {url}"))
} else {
let body = client
.get(url)
.timeout(std::time::Duration::from_secs(30))
.send()
.await
.with_context(|| format!("failed to get url {url}"))?
.text()
.await
.context("failed to decode downloaded data")?;
Ok(body)
}
}
async fn fetch_table(client: &reqwest::Client, web_url: String, now: UnixEpochTs) -> Result<Table> {
#[derive(Debug, serde::Deserialize)]
#[serde(untagged)]
enum NumOrString {
Num(i64),
String(String),
}
#[derive(Debug, serde::Deserialize)]
struct JsonTableHeader {
name: String,
symbol: String,
/// Relative to current URL. For example:
/// `https://example.com/table/header.json`
/// `score.json` would refer to `https://example.com/table/score.json`.
data_url: String,
/// Aliased as `folder_order` in some tables. idc
/// Dystopia uses a mix of numbers and strings.
/// TODO: test with a mix of numbers and strings, e.g. [0, 1, 2, "?"]
level_order: Option<Vec<NumOrString>>,
}
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, serde::Deserialize)]
struct JsonTableDataEntry {
// Some new tables omit 'md5' and only include 'sha256', especially for bmson files.
md5: Option<String>,
level: String,
}
let web_page = fetch_url(client, &web_url)
.await
.context("Failed to fetch table web page")?;
let (header, header_url) = {
let (header, header_url) = match extract_raw_header_url(&web_page) {
Ok(header_url) => {
let header_url = resolve_json_url(&web_url, header_url)
.context("Failed to resolve header URL")?;
let header = fetch_url(client, &header_url)
.await
.context("Failed to fetch header")?;
(header, header_url)
}
Err(e) => {
log::debug!(
"Failed to extract table header URL, this is fine if this URL, {web_url}, is already a header URL. Error: {e:?}"
);
(web_page, web_url.clone())
}
};
let header: JsonTableHeader =
serde_json::from_str(&header).context("Failed to parse header")?;
(header, header_url)
};
let data_url = resolve_json_url(&header_url, &header.data_url)?;
let entries = fetch_url(client, &data_url)
.await
.context("Failed to fetch data")?;
let data = serde_json::from_str::<Vec<JsonTableDataEntry>>(&entries)
.context("Failed to parse data")?;
let orig_len = data.len();
let mut data = data
.into_iter()
.filter_map(|t| {
const MD5_LEN: usize = 32;
// Normal 2, Gachimijoy, Mini-jack tables, all have empty entries.
if t.md5.as_ref()?.len() == MD5_LEN {
Some(TableEntry {
md5: t.md5?,
level: t.level,
})
} else {
None
}
})
.collect::<Vec<_>>();
// Some tables have completely duplicated entries which is against our DB schema constraint.
data.sort();
data.dedup();
log::debug!(
"{}: parsed {} unique (out of {}) entries",
web_url,
data.len(),
orig_len
);
Ok(Table(
TableData {
web_url,
name: header.name,
symbol: header.symbol,
header_url,
data_url,
entries: data,
folder_order: header
.level_order
.unwrap_or_default()
.into_iter()
.map(|v| match v {
NumOrString::Num(v) => v.to_string(),
NumOrString::String(v) => v,
})
.collect(),
},
TableAddData {
last_update: Some(now),
// FIXME: use previous values.
#[cfg(test)]
playlist_id: None,
user_symbol: None,
edited_symbol: None,
edited_url: None,
},
))
}
fn load_db_entries(
conn: &rusqlite::Connection,
playlist_id: PlaylistId,
) -> Result<Vec<TableEntry>> {
let mut stmt = conn
.prepare("SELECT md5, folder FROM lr2oxytabler_playlist_entry WHERE playlist_id = ?")
.context("failed to prepare read existing playlist entries")?;
let mut rows = stmt.query([playlist_id.0])?;
let mut out = Vec::<TableEntry>::new();
while let Some(row) = rows.next()? {
out.push(TableEntry {
md5: row.get(0)?,
level: row.get(1)?,
});
}
Ok(out)
}
fn load_db_tables(conn: &rusqlite::Connection) -> Result<Vec<Table>> {
let mut stmt = conn
.prepare(
"SELECT
playlist_id,
name,
symbol,
folder_order,
page_url,
header_url,
data_url,
last_update,
user_symbol
FROM
lr2oxytabler_playlist
ORDER BY
name",
)
.context("failed to prepare read existing playlists")?;
let mut rows = stmt.query([])?;
let mut out = Vec::<Table>::new();
while let Some(row) = rows.next()? {
let playlist_id: usize = row.get(0)?;
let folder_order: String = row.get(3)?;
out.push(Table(
TableData {
web_url: row.get(4)?,
name: row.get(1)?,
symbol: row.get(2)?,
data_url: row.get(6)?,
entries: load_db_entries(conn, PlaylistId(playlist_id))
.context("failed to load playlist entries")?,
folder_order: serde_json::from_str(&folder_order)
.context("failed to parse folder_order")?,
header_url: row.get(5)?,
},
TableAddData {
last_update: row.get(7)?,
#[cfg(test)]
playlist_id: Some(PlaylistId(playlist_id)),
user_symbol: row.get(8)?,
edited_symbol: None,
edited_url: None,
},
));
}
Ok(out)
}
fn load_db_tables_fast(conn: &rusqlite::Connection) -> Result<Vec<String>> {
let mut stmt = conn
.prepare("SELECT page_url FROM lr2oxytabler_playlist")
.context("failed to prepare SELECT page_url")?;
let mut rows = stmt.query([])?;
let mut out = Vec::<String>::new();
while let Some(row) = rows.next()? {
out.push(row.get(0)?);
}
Ok(out)
}
fn save_table(conn: &rusqlite::Connection, table: &Table) -> Result<PlaylistId> {
let add_data = &table.1;
let table = &table.0;
let folder_order: String = serde_json::to_string(&table.folder_order)
.context("failed to format table folder order")?;
let id = conn
.query_row(
"INSERT INTO
lr2oxytabler_playlist (
name,
symbol,
folder_order,
page_url,
header_url,
data_url,
last_update,
user_symbol
)
VALUES
(?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8)
ON CONFLICT (page_url) DO UPDATE
SET
name = ?1,
symbol = ?2,
folder_order = ?3,
header_url = ?5,
data_url = ?6,
last_update = ?7,
user_symbol = ?8 RETURNING playlist_id",
rusqlite::params![
&table.name,
&table.symbol,
&folder_order,
&table.web_url,
&table.header_url,
&table.data_url,
&add_data.last_update,
&add_data.user_symbol,
],
|row| row.get(0),
)
.with_context(|| format!("failed to insert playlist into db; {:?}", &table))?;
conn.execute(
"DELETE FROM lr2oxytabler_playlist_entry WHERE playlist_id = ?",
rusqlite::params![id],
)
.with_context(|| format!("failed to delete old entries for playlist_id={id}"))?;
let mut insert_entry = conn
.prepare(
"INSERT INTO lr2oxytabler_playlist_entry(playlist_id, md5, folder) VALUES (?, ?, ?)",
)
.context("failed to prepare statement")?;
for entry in &table.entries {
insert_entry
.execute(rusqlite::params![id, &entry.md5, &entry.level])
.with_context(|| format!("failed to insert playlist entry into db; {:?}", &entry))?;
}
Ok(PlaylistId(id))
}
// TODO(v0.1.0): remove after releasing
fn run_alpha_migrations(conn: &rusqlite::Connection) -> Result<()> {
if let Err(e) = conn.query_row(
"SELECT 1 FROM sqlite_master WHERE type = 'table' and name = '__lr2oxytable_migrations'",
[],
|row| row.get::<_, usize>(0),
) {
match e {
rusqlite::Error::QueryReturnedNoRows => return Ok(()), // didn't run the tabler before
_ => Err(e).context("")?,
};
};
conn.execute_batch("ALTER TABLE __lr2oxytable_migrations RENAME TO lr2oxytabler_migrations")?;
Ok(())
}
/// Check that a valid song.db was supplied and not some other random database file.
fn validate_song_db(conn: &rusqlite::Connection) -> Result<()> {
match conn.query_row(
"SELECT 1 FROM sqlite_master WHERE type = 'table' and name = 'song'",
[],
|row| row.get::<_, usize>(0),
) {
Ok(_) => Ok(()),
Err(rusqlite::Error::QueryReturnedNoRows) => {
anyhow::bail!("supplied database seems not to be an LR2 song.db")
}
Err(e) => Err(e).context("failed to check song.db validity"),
}
}
fn run_db_migrations(conn: &rusqlite::Connection) -> Result<()> {
migrations::maybe_apply_migration(
conn,
&migrations::Migration {
#[allow(clippy::unreadable_literal)]
id: 20250101,
description: "init",
sql: "
CREATE TABLE IF NOT EXISTS lr2oxytable_playlist (
playlist_id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
name VARCHAR NOT NULL,
symbol VARCHAR NOT NULL,
folder_order JSONB NOT NULL,
page_url VARCHAR NOT NULL,
header_url VARCHAR NOT NULL,
data_url VARCHAR NOT NULL,
last_update BIGINT,
UNIQUE (page_url)
);
CREATE TABLE IF NOT EXISTS lr2oxytable_playlist_entry (
playlist_id INTEGER NOT NULL REFERENCES lr2oxytable_playlist (playlist_id) ON DELETE CASCADE,
md5 VARCHAR NOT NULL,
folder VARCHAR NOT NULL,
UNIQUE (md5, playlist_id, folder)
);
CREATE INDEX IF NOT EXISTS lr2oxytable_playlist_entry_folder ON lr2oxytable_playlist_entry (
playlist_id, folder
);
CREATE INDEX IF NOT EXISTS lr2oxytable_playlist_entry_md5 ON lr2oxytable_playlist_entry (
playlist_id, md5
);
",
},
)
.context("db migration 'init' failed")?;
migrations::maybe_apply_migration(
conn,
&migrations::Migration {
#[allow(clippy::unreadable_literal)]
id: 20250517,
description: "user symbol",
// nullable
sql: "ALTER TABLE lr2oxytable_playlist ADD COLUMN user_symbol VARCHAR;",
},
)
.context("db migration 'user symbol' failed")?;
migrations::maybe_apply_migration(
conn,
&migrations::Migration {
#[allow(clippy::unreadable_literal)]
id: 20250602,
description: "rename to tabler",
// NOTE: can't rename indexes in SQLite conveniently. Just ignore them.
sql: "
ALTER TABLE lr2oxytable_playlist RENAME TO lr2oxytabler_playlist;
ALTER TABLE lr2oxytable_playlist_entry RENAME TO lr2oxytabler_playlist_entry;
",
},
)
.context("db migration 'rename to tabler' failed")?;
Ok(())
}
/// * `tables` - all tables to be present in DB. Those not in the list will be removed from db.
fn save_db(conn: &rusqlite::Connection, tables: &[Table]) -> Result<Vec<PlaylistId>> {
load_db_tables_fast(conn)
.context("table enumerating failed")?
.into_iter()
.filter(|existing| !tables.iter().any(|new| new.0.web_url == *existing))
.try_for_each(|page_url| {
log::debug!("Deleting table from DB, page_url={page_url}");
conn.execute(
"DELETE FROM lr2oxytabler_playlist WHERE page_url = ?",
rusqlite::params![page_url],
)
.with_context(|| format!("failed to delete playlist WHERE page_url={page_url}"))
.map(|_| ())
})
.context("table enumerating failed 2")?;
let mut table_ids = Vec::<PlaylistId>::new();
for table in tables {
let id = save_table(conn, table).context("failed to upsert playlist")?;
table_ids.push(id);
}
Ok(table_ids)
}
const README_FILE_NAME: &str = "README-lr2oxytable.txt";
fn write_lr2folder_files(
playlists_folder: &Path,
tables: &[Table],
table_ids: &[PlaylistId],
) -> Result<()> {
ensure!(
tables.len() == table_ids.len(),
"mismatching amount of tables and their IDs"
);
ensure!(
!playlists_folder.as_os_str().is_empty(),
"path to playlists folder must not be empty"
);
validate_good_playlists_folder(playlists_folder)?; // TOCTOU
for entry in std::fs::read_dir(playlists_folder).context("failed to read dir")? {
let entry = entry.context("failed to stat")?;
let path = entry.path();
if path.is_file() {
ensure!(
path.file_name().is_some_and(|f| f == README_FILE_NAME),
"Non-readme file found: {path:?}",
);
continue;
}
for file in fs::read_dir(&path).context("failed to read lr2folder dir")? {
let path = file.context("failed to stat file")?.path();
ensure!(
path.extension().is_some_and(|ext| ext == "lr2folder"),
"Non-.lr2folder file found in a level: {path:?}",
);
fs::remove_file(path).context("failed to remove file")?;
}
log::debug!("Emptied {path:?}");
}
{
let mut path = PathBuf::new();
path.push(playlists_folder);
path.push(README_FILE_NAME);
fs::write(
&path,
"This folder is now managed by lr2oxytable.
Do NOT put anything here you don't want removed by lr2oxytable.
If you've pressed F8 (song update) on a playlist in-game,
and now the playlist doesn't open, the easiest fix is to delete this folder altogether, then
save playlists again in lr2oxytable.
You may need to set song reload type in LR2's launcher to 'Manual reload'.",
)?;
}
for (Table(table, add_data), playlist_id) in tables.iter().zip(table_ids) {
ensure!(!table.name.is_empty(), "table name must not be empty");
ensure!(
!table.name.contains('\n'),
"table name must not contain new-lines"
);
let effective_symbol = add_data.user_symbol.as_ref().unwrap_or(&table.symbol);
ensure!(
!effective_symbol.contains('\n'),
"table symbol must not contain new-lines"
);
if table.entries.is_empty() {
log::info!("Table {} is empty, skipping folder creation", table.name);
continue;
}
let mut path = PathBuf::new();
path.push(playlists_folder);
path.push(table.name.clone());
fs::create_dir_all(&path).with_context(|| format!("failed to create dir {path:?}"))?;
for (i, folder) in extract_levels(&table.entries, &table.folder_order)
.into_iter()
.enumerate()
{
ensure!(
!folder.contains('\n'),
"folder name must not contain new-lines"
);
ensure!(
!folder.contains('\''),
"folder name must not contain single quotes"
);
let contents = format!("
#COMMAND song.hash in (SELECT md5 FROM lr2oxytabler_playlist_entry WHERE playlist_id = {} AND folder = '{}')
#MAXTRACKS 0
#CATEGORY {}
#TITLE {}{}
#INFORMATION_A {}
#INFORMATION_B {}", playlist_id.0, folder, table.name, effective_symbol, folder, "", "");
// NOTE: LR2 always expects CP932 in text files.
let (contents, _, had_errors) = encoding_rs::SHIFT_JIS.encode(&contents);
ensure!(!had_errors, "failed to encode .lr2folder file");
debug_assert!(path.exists()); // Created above.
path.push(format!("{i:0>4}.lr2folder"));
fs::write(&path, contents).context("failed to write lr2folder file")?;
path.pop();
}
log::debug!("Filled {path:?}");
}
for entry in std::fs::read_dir(playlists_folder).context("failed to read dir")? {
let path = entry.context("failed to stat")?.path();
if path.is_dir()
&& fs::read_dir(&path)
.context("failed to read lr2folder dir")?
.next()
.is_none()
{
std::fs::remove_dir(&path).context("failed to remove empty playlist directory")?;
log::info!("Removed empty playlist directory: {path:?}");
}
}
Ok(())
}
fn update_tags_inplace(conn: &rusqlite::Connection) -> Result<()> {
let write_tags = r#"
UPDATE song SET tag = (
SELECT GROUP_CONCAT(TRIM(COALESCE(user_symbol, symbol) || folder), ", ")
FROM lr2oxytabler_playlist_entry
INNER JOIN
lr2oxytabler_playlist
ON
lr2oxytabler_playlist_entry.playlist_id
= lr2oxytabler_playlist.playlist_id
WHERE song.hash = lr2oxytabler_playlist_entry.md5
)
"#;
let rows = conn
.execute(write_tags, [])
.context("failed to set song tags to playlist entry levels")?;
log::debug!("Wrote tags to {rows} songs");
Ok(())
}
type UnixEpochTs = u64;
fn now() -> UnixEpochTs {
SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("UNIX_EPOCH")
.as_secs()
}
fn since_string(time: UnixEpochTs, now: UnixEpochTs) -> String {
let diff =
i64::try_from(now).expect("u64 from i64") - i64::try_from(time).expect("u64 from i64");
if diff < 60 {
format!("{diff} seconds ago")
} else if diff < 60 * 60 {
format!("{} minutes ago", diff / 60)
} else if diff < 60 * 60 * 60 {
format!("{} hours ago", diff / 60 / 60)
} else {
format!("{} days ago", diff / 60 / 60 / 24)
}
}
fn validate_good_playlists_folder(path: &Path) -> Result<()> {
let entries = std::fs::read_dir(path).context("failed to read dir")?;
// return is_empty() || contains(README_FILE_NAME);
let mut empty = true;
for entry in entries {
empty = false;
let entry = entry.context("failed to read dir entry")?;
if entry.file_name() == README_FILE_NAME {
return Ok(());
}
}
anyhow::ensure!(
empty,
"directory isn't empty and doesn't contain {README_FILE_NAME}"
);
Ok(())
}
impl App {
fn new(
db: Option<PathBuf>,
playlists_folder: Option<PathBuf>,
write_tags_on_save: bool,
) -> (Self, Task<Message>) {
let mut this = Self {
lr2_db: PathBuf::new(),
playlists_folder: PathBuf::new(),
write_tags_on_save,
db: None,
tables: vec![],
new_table_text: String::new(),
now: now(),
reqwest: reqwest::ClientBuilder::new()
// jounin.jp, site with the new overjoy/insane 2 tables, seems to block requests
// with no user-agent.
.user_agent("curl/8.12.1")
.build()
.expect("Failed to build reqwest client"),
};
if let Some(db) = db {
if let Err(e) = this.set_lr2_db(&db) {
log::error!("uh oh {e}");
};
}
if let Some(playlists_folder) = playlists_folder {
if let Err(e) = validate_good_playlists_folder(&playlists_folder) {
log::error!("bad playlists folder: {e}");
} else {
this.playlists_folder = playlists_folder;
}
}
(this, Task::none())
}
fn validate_table_url_for_adding(tables: &[Table], url: &str) -> Result<()> {
ensure!(
url.starts_with("https://") || url.starts_with("http://") || url.starts_with("file://"),
"Bad table URL"
);
ensure!(
!tables.iter().any(|t| t.0.web_url == url),
"Table already exists"
);
Ok(())
}
async fn fetch_table(client: reqwest::Client, table_to_fetch: String) -> Result<Table> {
fetch_table(&client, table_to_fetch, now()).await
}
fn add_table(&mut self, url: &str) -> Result<()> {
Self::validate_table_url_for_adding(&self.tables, url)?;
let table = Table(
TableData {
web_url: url.to_string(),
name: "NEW TABLE".to_string(),
symbol: "?".to_string(),
header_url: String::new(),
data_url: String::new(),
entries: vec![],
folder_order: vec![],
},
TableAddData {
last_update: None,
#[cfg(test)]
playlist_id: None,
user_symbol: None,
edited_symbol: None,
edited_url: None,
},
);
self.tables.insert(
self.tables.partition_point(|t| t.0.name < table.0.name),
table,
);
debug_assert!(self.tables.is_sorted_by(|l, r| l.0.name < r.0.name));
Ok(())
}
fn choose_lr2_db() -> Option<PathBuf> {
rfd::FileDialog::new()
.set_title("Choose song.db")
.add_filter("song.db", &["db"])
.pick_file()
}
fn set_lr2_db(&mut self, db_path: &Path) -> Result<()> {
ensure!(
db_path.extension().is_some_and(|ext| ext == "db"),
"suspicious db path extension: {db_path:?}"
);
// no SQLITE_OPEN_CREATE and SQLITE_OPEN_URI
let db = rusqlite::Connection::open_with_flags(
db_path,
rusqlite::OpenFlags::SQLITE_OPEN_READ_WRITE | rusqlite::OpenFlags::SQLITE_OPEN_NO_MUTEX,
)
.context("failed to open db")?;
validate_song_db(&db).context("likely an invalid song.db file")?;
run_alpha_migrations(&db)?;
run_db_migrations(&db)?;
let tables = load_db_tables(&db).context("Failed to read tables from DB")?;
self.db = Some(db);
self.lr2_db = db_path.to_path_buf();
self.tables = tables;
Ok(())
}
fn choose_playlists_folder() -> Option<PathBuf> {
let path = rfd::FileDialog::new()
.set_title("Choose playlists folder")
.pick_folder()?;
match validate_good_playlists_folder(&path) {
Ok(()) => Some(path),
Err(e) => {
log::error!("Bad playlists folder selected: {e}");
None
}
}
}
fn apply_tables(&mut self, mut table: Table) {
log::info!("Table '{}' was succesfully updated", table.0.name);
if let Some(prev) = self
.tables
.iter()
.position(|t| t.0.web_url == table.0.web_url)
{
let prev = self.tables.remove(prev);
table.1.user_symbol = prev.1.user_symbol;
// FIXME: unspaghettify. Do this in fetch_table (which also has a note for this).
};
self.tables.insert(
self.tables.partition_point(|t| t.0.name < table.0.name),
table,
);
debug_assert!(self.tables.is_sorted_by(|l, r| l.0.name < r.0.name));
}
fn handle_message(&mut self, message: Message) -> Result<Task<Message>> {
match message {
Message::AddTable => {
self.add_table(&self.new_table_text.clone())?;
Ok(Task::none())
}
Message::ApplyTables(table) => {
self.apply_tables(table);
Ok(Task::none())
}
Message::RemoveTable(idx) => {
self.tables.remove(idx);
Ok(Task::none())
}
Message::ClearTables => {
self.tables.clear();
Ok(Task::none())
}
Message::DisplayError(e) => {
// Must never return errors from this handler or there will be a loop.
log::error!("Error: {e}");
Ok(Task::none())
}
Message::EditTableFinish(idx, apply) => {
let new_url = self.tables[idx]
.1
.edited_url
.take()
.expect("finished editing table but there is no edited url");
if apply && new_url != self.tables[idx].0.web_url {
Self::validate_table_url_for_adding(&self.tables, &new_url)?;
self.tables[idx].0.web_url = new_url;
}
Ok(Task::none())
}
Message::EditTableStart(idx) => {
let t = &mut self.tables[idx];
t.1.edited_url = Some(t.0.web_url.clone());
Ok(text_input::focus(format!("edit-{idx}")))
}
Message::EditTableSymbolFinish(idx, apply) => {
let new_symbol = self.tables[idx]
.1
.edited_symbol
.take()
.expect("finished editing table but there is no edited symbol");
if apply {
self.tables[idx].1.user_symbol = match new_symbol.as_ref() {
"" => None,
_ => Some(new_symbol),
};
}
Ok(Task::none())
}
Message::EditTableSymbolStart(idx) => {
let t = &mut self.tables[idx];
t.1.edited_symbol = Some(t.1.user_symbol.as_ref().unwrap_or(&t.0.symbol).clone());
Ok(text_input::focus(format!("edit-{idx}")))
}
Message::EditTableSymbolText(idx, text) => {
let t = &mut self.tables[idx];
t.1.edited_symbol = Some(text);
Ok(Task::none())
}
Message::EditTableText(idx, text) => {
let t = &mut self.tables[idx];
t.1.edited_url = Some(text);
Ok(Task::none())
}
Message::FetchTables(task) => Ok(Task::batch(task.table_urls.into_iter().map(|url| {
Task::perform(Self::fetch_table(self.reqwest.clone(), url), |f| match f {
Ok(table) => Message::ApplyTables(table),
Err(e) => {
Message::DisplayError(format!("{:?}", e.context("fetch table error")))
}
})
}))),
Message::PickPlaylistsFolder => {
if let Some(playlists_folder) = Self::choose_playlists_folder() {
self.playlists_folder = playlists_folder;
}
Ok(Task::none())
}
Message::PickSongDb => {
if let Some(db) = Self::choose_lr2_db() {
self.set_lr2_db(&db)?;
}
Ok(Task::none())
}
Message::TableTextUpdate(turl) => {
self.new_table_text = turl;
Ok(Task::none())
}
Message::SaveDb => {
let tx = self
.db
.as_mut()
.context("no db opened")?
.transaction()
.context("failed to start transaction for updating db")?;
let table_ids = save_db(&tx, &self.tables)?;
if self.write_tags_on_save {
update_tags_inplace(&tx)?;
}
write_lr2folder_files(&self.playlists_folder, &self.tables, &table_ids)?;
tx.commit()
.context("failed to commit after writing playlists")?;
Ok(Task::none())
}
Message::Tick(now) => {
self.now = now;
Ok(Task::none())
}
Message::ToggleTagUpdating(on) => {
self.write_tags_on_save = on;
Ok(Task::none())
}
}
}
fn update(&mut self, message: Message) -> Task<Message> {
match self.handle_message(message) {
Ok(task) => task,
Err(e) => Task::done(Message::DisplayError(format!(
"{:?}",
e.context("action error")
))),
}
}
fn subscription(&self) -> Subscription<Message> {
const TICK_INTERVAL: Duration = Duration::from_secs(5);
use iced::time::{Duration, every};
every(TICK_INTERVAL).map(|_| Message::Tick(now()))
}
fn view(&self) -> Element<Message> {
const NEGATIVE_NEVER: UnixEpochTs = 0;
const UPDATE_INTERVAL: UnixEpochTs = 60 * 60 * 12;
let db_ui = row![
button("Pick song.db").on_press(Message::PickSongDb),
text(if self.lr2_db.as_os_str().is_empty() {
"/path/to/song.db".to_string()
} else {
self.lr2_db.display().to_string()
})
]
.align_y(Alignment::Center);
let playlist_ui = row![
button("Pick playlists folder").on_press(Message::PickPlaylistsFolder),
text(if self.playlists_folder.as_os_str().is_empty() {
"/path/to/playlists/".to_string()
} else {
self.playlists_folder.display().to_string()
})
]
.align_y(Alignment::Center);
const SETUP_HINT: &str = "HINT: you can pass settings through the command line.
Creating a shortcut to the executable file is a good way.
Arguments are <song.db> <playlists folder> <'yes' to update tags on save>.";
// TODO: type state?
if self.lr2_db.as_os_str().is_empty() || self.playlists_folder.as_os_str().is_empty() {
return column![db_ui, playlist_ui, SETUP_HINT].into();
}
let table_url_hint = "https://example.com/table";
let new_table = text_input(table_url_hint, &self.new_table_text)
.on_input(Message::TableTextUpdate)
.on_submit(Message::AddTable);
let add_table = button("Add").on_press(Message::AddTable);
let tables: Element<_> = if self.tables.is_empty() {
center(text("Go on, add some tables!")).height(100).into()
} else {
keyed_column(self.tables.iter().enumerate().map(|(i, table)| {
(
i,
if self.tables[i].1.edited_symbol.is_some() {
row![
text_input(
&self.tables[i].0.symbol,
self.tables[i].1.edited_symbol.as_ref().unwrap()
)
.id(format!("edit-{i}"))
.width(Fill)
.on_input(move |text| Message::EditTableSymbolText(i, text))
.on_submit(Message::EditTableSymbolFinish(i, true)),
button("Cancel")
.on_press(Message::EditTableSymbolFinish(i, false))
.style(button::secondary),
button("Apply")
.on_press(Message::EditTableSymbolFinish(i, true))
.style(button::primary),
]
.into()
} else if self.tables[i].1.edited_url.is_some() {
row![
text_input(
table_url_hint,
self.tables[i].1.edited_url.as_ref().unwrap()
)
.id(format!("edit-{i}"))
.width(Fill)
.on_input(move |text| Message::EditTableText(i, text))
.on_submit(Message::EditTableFinish(i, true)),
button("Cancel")
.on_press(Message::EditTableFinish(i, false))
.style(button::secondary),
button("Apply")
.on_press(Message::EditTableFinish(i, true))
.style(button::primary),
]
.into()
} else {
row![
text(format!(
"{} ({}) {}",
table.0.name,
table.1.user_symbol.as_ref().unwrap_or(&table.0.symbol),
table.0.web_url
))
.shaping(text::Shaping::Advanced) // CJK support.
.width(Fill),
text(table.1.last_update.map_or_else(
|| "Never".to_string(),
|last_update| since_string(last_update, self.now)
)),
container("").padding(5),
button("Force-update")
.on_press_with(|| Message::FetchTables(FetchTables {
table_urls: vec![table.0.web_url.clone()],
}))
.style(button::secondary),
button("Edit symbol")
.on_press(Message::EditTableSymbolStart(i))
.style(button::secondary),
button("Edit URL")
.on_press(Message::EditTableStart(i))
.style(button::secondary),
button("Remove")
.on_press(Message::RemoveTable(i))
.style(button::danger),
]
.align_y(Alignment::Center)
.into()
},
)
}))
.into()
};
let update_tables = button(Container::new(text("Update")).align_x(Alignment::Start))
.on_press_with(|| {
Message::FetchTables(FetchTables {
table_urls: self
.tables
.iter()
.filter(|t| {
t.1.last_update.unwrap_or(NEGATIVE_NEVER) + UPDATE_INTERVAL < self.now
})
.map(|t| t.0.web_url.clone())
.collect::<Vec<String>>(),
})
})
.padding(10);
let update_tables = tooltip(
update_tables,
text(format!(
"Tables are updated once in {} hours",
UPDATE_INTERVAL / 60 / 60
)),
tooltip::Position::default(),
)
.style(container::rounded_box);
let clear_tables = button(Container::new(text("Clear")).align_x(Alignment::End))
.on_press(Message::ClearTables)
.style(button::danger)
.padding(10);
let save = button("Save song.db").on_press_maybe(if self.db.is_some() {
Some(Message::SaveDb)
} else {
None
});
let write_tags =
checkbox("Write tags", self.write_tags_on_save).on_toggle(Message::ToggleTagUpdating);
let interface = column![
db_ui,
playlist_ui,
row![new_table, add_table].align_y(Alignment::Center),
tables,
row![update_tables, horizontal_space(), clear_tables].align_y(Alignment::Center),
row![save, write_tags].align_y(Alignment::Center),
SETUP_HINT,
];
scrollable(interface).into()
}
const fn theme(&self) -> iced::Theme {
iced::Theme::Dark
}
}
#[derive(Debug, Clone)]
struct FetchTables {
table_urls: Vec<String>,
}
#[allow(clippy::large_enum_variant)]
#[derive(Debug, Clone)]
enum Message {
AddTable,
ApplyTables(Table),
ClearTables,
DisplayError(String),
EditTableFinish(usize, bool),
EditTableStart(usize),
EditTableSymbolFinish(usize, bool),
EditTableSymbolStart(usize),
EditTableSymbolText(usize, String),
EditTableText(usize, String),
FetchTables(FetchTables),
PickPlaylistsFolder,
PickSongDb,
RemoveTable(usize),
SaveDb,
TableTextUpdate(String),
Tick(UnixEpochTs),
ToggleTagUpdating(bool),
}
fn main() -> iced::Result {
env_logger::init();
let mut db: Option<PathBuf> = None;
let mut playlists_folder: Option<PathBuf> = None;
let mut write_tags_on_save: Option<bool> = None;
// TODO(0.2.0): use proper arguments parsing with something like 'clap'.
std::env::args().enumerate().for_each(|(i, v)| match i {
0 => (),
1 => db = Some(v.into()),
2 => playlists_folder = Some(v.into()),
3 => write_tags_on_save = Some(v == "yes"),
_ => log::warn!("too many args"),
});
iced::application("LR2 OxyTabler", App::update, App::view)
.subscription(App::subscription)
.theme(App::theme)
.run_with(move || App::new(db, playlists_folder, write_tags_on_save.unwrap_or(false)))
}
#[cfg(test)]
mod tests {
use crate::{
PlaylistId, Table, TableEntry, extract_raw_header_url, load_db_tables, resolve_json_url,
save_db,
};
use anyhow::Result;
use test_log::test;
const NEVER: u64 = u64::MAX;
fn create_song_db() -> rusqlite::Connection {
let db = rusqlite::Connection::open_in_memory().unwrap();
crate::run_db_migrations(&db).unwrap();
// LR2 copy-paste
db.execute_batch("
CREATE TABLE song(hash TEXT ,title TEXT ,subtitle TEXT ,genre TEXT,artist TEXT,subartist TEXT,tag TEXT ,path TEXT primary key ,type INTEGER,folder TEXT,stagefile TEXT,banner TEXT,backbmp TEXT,parent TEXT,level INTEGER,difficulty INTEGER,maxbpm INTEGER,minbpm INTEGER,mode INTEGER,judge INTEGER,longnote INTEGER,bga INTEGER,random INTEGER,date INTEGER,favorite INTEGER,txt INTEGER,karinotes INTEGER,adddate INTEGER,exlevel INTEGER)
").unwrap();
db
}
fn insert_song(conn: &rusqlite::Connection, md5: &str) -> Result<()> {
// loh
conn.execute(r#"
INSERT INTO "main"."song" ("hash", "title", "subtitle", "genre", "artist", "subartist", "tag", "path", "type", "folder", "stagefile", "banner", "backbmp", "parent", "level", "difficulty", "maxbpm", "minbpm", "mode", "judge", "longnote", "bga", "random", "date", "favorite", "txt", "karinotes", "adddate", "exlevel") VALUES (?, '3y3s', '', 'DANCE SPEED', '青龍', '', NULL, 'C:\Microbot\Bimbows', 0, 'deadbeef', '', '', '', 'deadbeef', 12, 4, 191, 191, 14, 2, 0, 1, 0, 1111111111, 0, 0, 3132, 1111111111, 0);
"#, rusqlite::params![md5])?;
Ok(())
}
impl Table {
#[must_use]
const fn empty() -> Self {
Self(
crate::TableData {
web_url: String::new(),
name: String::new(),
symbol: String::new(),
data_url: String::new(),
entries: vec![],
folder_order: vec![],
header_url: String::new(),
},
crate::TableAddData {
last_update: None,
playlist_id: None,
user_symbol: None,
edited_symbol: None,
edited_url: None,
},
)
}
#[must_use]
fn with_entry(mut self) -> Self {
let idx = self.0.entries.len();
self.0.entries.push(TableEntry {
md5: format!("foodfoodfoodfoodfoodfoodfood{idx:4}"),
level: "1".to_string(),
});
self
}
#[must_use]
fn with_name(mut self, name: impl Into<String>) -> Self {
self.0.name = name.into();
self
}
#[must_use]
fn with_symbol(mut self, symbol: impl Into<String>) -> Self {
self.0.symbol = symbol.into();
self
}
#[must_use]
fn with_url(mut self, web_url: impl Into<String>) -> Self {
self.0.web_url = web_url.into();
self
}
#[must_use]
fn with_user_symbol(mut self, symbol: impl Into<String>) -> Self {
self.1.user_symbol = Some(symbol.into());
self
}
}
#[must_use]
fn ls_files<P: AsRef<std::path::Path>>(path: P) -> String {
let mut entries = std::fs::read_dir(path)
.unwrap()
.map(|res| res.unwrap().file_name().to_str().unwrap().to_string())
.collect::<Vec<_>>();
entries.sort();
entries.join(",")
}
#[test]
fn test_extract_header_url() {
assert_eq!(
extract_raw_header_url(
r#"<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"><html><head>
<meta name"blahblah" /><meta name="bmstable" content="header.json" />"#
)
.ok(),
Some("header.json")
);
assert_eq!(
extract_raw_header_url(r#"<meta name="bmstable" content="header.json"/>"#).ok(),
Some("header.json")
);
assert_eq!(
format!("{}", extract_raw_header_url("").unwrap_err()),
"missing bmstable meta"
);
assert_eq!(
format!(
"{}",
extract_raw_header_url(r#"<meta name="bmstable" content="header.json"#)
.unwrap_err()
),
"missing bmstable meta rhs"
);
// In practice, looks like it's fair to check for literal match like we do, instead of
// parsing.
assert_eq!(
format!(
"{}",
extract_raw_header_url(r#"<meta name="bmstable" content="header.json" />"#)
.unwrap_err()
),
"missing bmstable meta"
);
}
#[test]
fn test_json_table_header_score_url() {
assert_eq!(
resolve_json_url("https://stellabms.xyz/fr/table.html", "score.json").ok(),
Some("https://stellabms.xyz/fr/score.json".to_string())
);
assert_eq!(
resolve_json_url(
"http://flowermaster.web.fc2.com/lrnanido/gla/LN.html",
"http://flowermaster.web.fc2.com/lrnanido/gla/score.json"
)
.ok(),
Some("http://flowermaster.web.fc2.com/lrnanido/gla/score.json".to_string())
);
// idc
}
#[test]
fn updates_tags() {
use crate::update_tags_inplace;
let md5 = "feedfeedfeedfeedfeedfeedfeedfeed";
let entry = TableEntry {
md5: md5.to_string(),
level: " DELAYMASTER".to_string(),
};
let run = |table| {
let db = create_song_db();
save_db(&db, &[table]).unwrap();
insert_song(&db, md5).unwrap();
update_tags_inplace(&db).unwrap();
db.query_row(
"SELECT tag FROM song WHERE hash = ?",
rusqlite::params![md5],
|row| Ok(row.get::<_, String>(0).unwrap()),
)
.unwrap()
};
{
let mut table = Table::empty();
table.0.entries = vec![entry.clone()];
assert_eq!(run(table), "DELAYMASTER");
}
{
let mut table = Table::empty().with_symbol("omg ");
table.0.entries = vec![entry.clone()];
assert_eq!(run(table), "omg DELAYMASTER");
}
{
let mut table = Table::empty().with_symbol("nope").with_user_symbol(" omg");
table.0.entries = vec![entry];
assert_eq!(run(table), "omg DELAYMASTER");
}
}
// Necessary to work-around buggy folder updating in LR2.
#[test]
fn playlist_id_preserved_between_updates() {
let db = rusqlite::Connection::open_in_memory().unwrap();
crate::run_db_migrations(&db).unwrap();
let tables = &[Table::empty().with_url("1"), Table::empty().with_url("2")];
save_db(&db, tables).unwrap();
let tables = load_db_tables(&db).unwrap();
let id1 = tables
.iter()
.find(|t| t.0.web_url == "1")
.map(|t| t.1.playlist_id.unwrap())
.unwrap();
let id2 = tables
.iter()
.find(|t| t.0.web_url == "2")
.map(|t| t.1.playlist_id.unwrap())
.unwrap();
let tables = &[Table::empty().with_url("2"), Table::empty().with_url("3")];
save_db(&db, tables).unwrap();
let tables = load_db_tables(&db).unwrap();
assert!(!tables.iter().any(|t| t.1.playlist_id.unwrap() == id1));
assert!(tables.iter().any(|t| t.1.playlist_id.unwrap() == id2));
}
#[test]
fn cascade_deletes_entries() {
let db = rusqlite::Connection::open_in_memory().unwrap();
crate::run_db_migrations(&db).unwrap();
save_db(&db, &[Table::empty().with_url("1").with_entry()]).unwrap();
assert_eq!(load_db_tables(&db).unwrap().len(), 1);
save_db(&db, &[]).unwrap();
assert_eq!(load_db_tables(&db).unwrap().len(), 0);
}
#[test]
fn writes_files() {
use crate::write_lr2folder_files;
let tmp = tempfile::tempdir().unwrap();
let tables = &[Table::empty()
.with_url("http://url")
.with_name("somename")
.with_symbol("nahfam")
.with_user_symbol("YO")
.with_entry()];
let table_ids = &[PlaylistId(0)];
write_lr2folder_files(tmp.path(), tables, table_ids).unwrap();
assert_eq!(ls_files(tmp.path()), "README-lr2oxytable.txt,somename");
assert_eq!(ls_files(tmp.path().join("somename")), "0000.lr2folder");
assert_eq!(
std::fs::read_to_string(tmp.path().join("somename").join("0000.lr2folder")).unwrap(),
"
#COMMAND song.hash in (SELECT md5 FROM lr2oxytabler_playlist_entry WHERE playlist_id = 0 AND folder = '1')
#MAXTRACKS 0
#CATEGORY somename
#TITLE YO1
#INFORMATION_A
#INFORMATION_B "
);
}
/// E.g. 'Stella' got renamed to 'Stella Verified', also we will have such functionality in the
/// GUI later.
#[test]
fn writing_files_removes_old_table_on_table_name_changed() {
use crate::write_lr2folder_files;
let tmp = tempfile::tempdir().unwrap();
let old_table = Table::empty()
.with_url("http://url".to_string())
.with_name("oldname".to_string())
.with_entry();
let new_table = old_table.clone().with_name("newname");
write_lr2folder_files(tmp.path(), &[old_table], &[PlaylistId(1)]).unwrap();
assert_eq!(ls_files(tmp.path()), "README-lr2oxytable.txt,oldname");
// assume(is_good(oldname)); // per other tests
write_lr2folder_files(tmp.path(), &[new_table], &[PlaylistId(1)]).unwrap();
assert_eq!(ls_files(tmp.path()), "README-lr2oxytable.txt,newname");
assert_eq!(ls_files(tmp.path().join("newname")), "0000.lr2folder");
assert_eq!(
std::fs::read_to_string(tmp.path().join("newname").join("0000.lr2folder")).unwrap(),
"
#COMMAND song.hash in (SELECT md5 FROM lr2oxytabler_playlist_entry WHERE playlist_id = 1 AND folder = '1')
#MAXTRACKS 0
#CATEGORY newname
#TITLE 1
#INFORMATION_A
#INFORMATION_B "
);
}
#[test]
fn file_writing_disallows_bad_things() {
use crate::write_lr2folder_files;
assert_eq!(
write_lr2folder_files(&std::path::PathBuf::new(), &[], &[])
.unwrap_err()
.to_string(),
"path to playlists folder must not be empty"
);
assert_eq!(
write_lr2folder_files(&std::path::PathBuf::new(), &[], &[PlaylistId(1)])
.unwrap_err()
.to_string(),
"mismatching amount of tables and their IDs"
);
{
let tmp = tempfile::tempdir().unwrap();
std::fs::write(tmp.path().join("some-file"), "").unwrap();
assert_eq!(
write_lr2folder_files(tmp.path(), &[], &[])
.unwrap_err()
.to_string(),
"directory isn't empty and doesn't contain README-lr2oxytable.txt"
);
}
let tmp = tempfile::tempdir().unwrap();
let run = |table| write_lr2folder_files(tmp.path(), &[table], &[PlaylistId(0)]);
{
let table = Table::empty().with_name("bad\n");
assert_eq!(
run(table).unwrap_err().to_string(),
"table name must not contain new-lines"
);
}
{
let table = Table::empty().with_name("good").with_symbol("bad\n");
assert_eq!(
run(table).unwrap_err().to_string(),
"table symbol must not contain new-lines"
);
}
{
let table = Table::empty().with_name("good").with_user_symbol("bad\n");
assert_eq!(
run(table).unwrap_err().to_string(),
"table symbol must not contain new-lines"
);
}
{
let mut table = Table::empty().with_entry().with_name("good");
table.0.entries[0].level = "1\n".to_string();
assert_eq!(
run(table).unwrap_err().to_string(),
"folder name must not contain new-lines"
);
}
{
let mut table = Table::empty().with_entry().with_name("good");
table.0.entries[0].level = "1'".to_string();
assert_eq!(
run(table).unwrap_err().to_string(),
"folder name must not contain single quotes"
);
}
}
#[test(tokio::test)]
async fn fetches_table_from_web_page() {
use crate::fetch_table;
let mut server = mockito::Server::new_async().await;
let mock1 = server
.mock("GET", "/bmstable/")
.with_body(r#"<meta name="bmstable" content="header.json"/>"#)
.create();
let mock2 = server
.mock("GET", "/bmstable/header.json")
.with_body(
r#"{
"name" : "マイクールなテイブル",
"symbol" : "草",
"data_url" : "data"
}"#,
)
.create();
let mock3 = server
.mock("GET", "/bmstable/data")
.with_body(
// Also test deduplication, missing and empty md5
r#"[
{ "level": "0", "md5": "feedfeedfeedfeedfeedfeedfeedfeed" },
{ "level": "0", "md5": "feedfeedfeedfeedfeedfeedfeedfeed" },
{ "level": "2", "md5": "" },
{ "level": "2", "md5": null },
{ "level": "2" },
{ "level": "1", "md5": "feedfeedfeedfeedfeedfeedfeedfeed" }
]"#,
)
.create();
let reqwest = reqwest::Client::new();
let test = async |url: String| {
let table = fetch_table(&reqwest, url.clone(), NEVER).await.unwrap();
assert_eq!(table.0.web_url, url);
assert_eq!(table.0.name, "マイクールなテイブル");
assert_eq!(table.0.symbol, "草");
assert!(table.0.data_url.ends_with("/bmstable/data"));
assert_eq!(
table.0.entries,
&[
TableEntry {
md5: "feedfeedfeedfeedfeedfeedfeedfeed".to_string(),
level: "0".to_string(),
},
TableEntry {
md5: "feedfeedfeedfeedfeedfeedfeedfeed".to_string(),
level: "1".to_string(),
},
]
);
};
test(format!("http://{}/bmstable/", server.host_with_port())).await;
// Can pass header URL directly.
test(format!(
"http://{}/bmstable/header.json",
server.host_with_port()
))
.await;
mock1.assert();
mock2.expect(2).assert();
mock3.expect(2).assert();
}
#[test(tokio::test)]
async fn fetches_table_from_local_file() {
use crate::fetch_table;
let reqwest = reqwest::Client::new();
assert_eq!(
fetch_table(&reqwest, "file:///no/such/file".to_string(), NEVER)
.await
.unwrap_err()
.to_string(),
"Failed to fetch table web page"
);
let tmp_dir = tempfile::tempdir().unwrap();
let header_path = tmp_dir.path().join("hdr");
std::fs::write(
&header_path,
r#"{ "name": "マイクールなテイブル", "symbol": "草", "data_url": "dt" }"#,
)
.unwrap();
let data_path = tmp_dir.path().join("dt");
std::fs::write(
&data_path,
r#"[ { "level": "0", "md5": "feedfeedfeedfeedfeedfeedfeedfeed" } ]"#,
)
.unwrap();
let url = format!("file://{}", header_path.to_str().unwrap());
let table = fetch_table(&reqwest, url.clone(), NEVER).await.unwrap();
assert_eq!(table.0.web_url, url);
assert_eq!(table.0.name, "マイクールなテイブル");
assert_eq!(table.0.symbol, "草");
assert_eq!(
table.0.data_url,
format!("file://{}", data_path.to_str().unwrap())
);
assert_eq!(
table.0.entries,
&[TableEntry {
md5: "feedfeedfeedfeedfeedfeedfeedfeed".to_string(),
level: "0".to_string(),
},]
);
}
#[test]
fn disallows_non_song_db() {
use crate::validate_song_db;
assert!(validate_song_db(&create_song_db()).is_ok());
assert_eq!(
validate_song_db(&rusqlite::Connection::open_in_memory().unwrap())
.unwrap_err()
.to_string(),
"supplied database seems not to be an LR2 song.db"
);
}
}