diff --git a/.gitignore b/.gitignore index 23eb8a7a..1952496d 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ /.env /data.db* /config.toml +/uploads/ diff --git a/Cargo.lock b/Cargo.lock index 00b33a02..6ad2f989 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -382,9 +382,9 @@ checksum = "e91831deabf0d6d7ec49552e489aed63b7456a7a3c46cff62adad428110b0af0" [[package]] name = "async-trait" -version = "0.1.51" +version = "0.1.52" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44318e776df68115a881de9a8fd1b9e53368d7a4a5ce4cc48517da3393233a5e" +checksum = "061a7acccaa286c011ddc30970520b98fa40e00c9d644633fb26b5fc63a265e3" dependencies = [ "proc-macro2", "quote", @@ -406,6 +406,12 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "065374052e7df7ee4047b1160cca5e1467a12351a40b3da123c870ba0b8eda2a" +[[package]] +name = "autocfg" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2" + [[package]] name = "autocfg" version = "1.0.1" @@ -987,7 +993,7 @@ version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4c40298486cdf52cc00cd6d6987892ba502c7656a16a4192a9992b1ccedd121" dependencies = [ - "autocfg", + "autocfg 1.0.1", "proc-macro-hack", "proc-macro2", "quote", @@ -1012,7 +1018,7 @@ version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "feb5c238d27e2bf94ffdfd27b2c29e3df4a68c4193bb6427384259e2bf191967" dependencies = [ - "autocfg", + "autocfg 1.0.1", "futures-channel", "futures-core", "futures-io", @@ -1246,7 +1252,7 @@ version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc633605454125dec4b66843673f01c7df2b89479b32e0ed634e43a91cff62a5" dependencies = [ - "autocfg", + "autocfg 1.0.1", "hashbrown", ] @@ -1306,7 +1312,7 @@ dependencies = [ "ring", "serde 1.0.126", "serde_json", - "simple_asn1", + "simple_asn1 0.4.1", ] [[package]] @@ -1329,6 +1335,9 @@ name = "lazy_static" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +dependencies = [ + "spin", +] [[package]] name = "lettre" @@ -1377,6 +1386,12 @@ version = "0.2.101" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3cb00336871be5ed2c8ed44b60ae9959dc5b9f08539422ed43f09e34ecaeba21" +[[package]] +name = "libm" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33a33a362ce288760ec6a508b94caaec573ae7d3bbbd91b87aa0bad4456839db" + [[package]] name = "libsqlite3-sys" version = "0.22.2" @@ -1474,7 +1489,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b" dependencies = [ "adler", - "autocfg", + "autocfg 1.0.1", ] [[package]] @@ -1566,18 +1581,69 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "090c7f9998ee0ff65aa5b723e4009f7b217707f1fb5ea551329cc4d6231fb304" dependencies = [ - "autocfg", + "autocfg 1.0.1", "num-integer", "num-traits 0.2.14", ] +[[package]] +name = "num-bigint" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6f7833f2cbf2360a6cfd58cd41a53aa7a90bd4c202f5b1c7dd2ed73c57b2c3" +dependencies = [ + "autocfg 1.0.1", + "num-integer", + "num-traits 0.2.14", +] + +[[package]] +name = "num-bigint" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" +dependencies = [ + "autocfg 1.0.1", + "num-integer", + "num-traits 0.2.14", +] + +[[package]] +name = "num-bigint-dig" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4547ee5541c18742396ae2c895d0717d0f886d8823b8399cdaf7b07d63ad0480" +dependencies = [ + "autocfg 0.1.7", + "byteorder", + "lazy_static", + "libm", + "num-integer", + "num-iter", + "num-traits 0.2.14", + "rand", + "smallvec", + "zeroize", +] + [[package]] name = "num-integer" version = "0.1.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db" dependencies = [ - "autocfg", + "autocfg 1.0.1", + "num-traits 0.2.14", +] + +[[package]] +name = "num-iter" +version = "0.1.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d03e6c028c5dc5cac6e2dec0efda81fc887605bb3d884578bb6d6bf7514e252" +dependencies = [ + "autocfg 1.0.1", + "num-integer", "num-traits 0.2.14", ] @@ -1596,7 +1662,8 @@ version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" dependencies = [ - "autocfg", + "autocfg 1.0.1", + "libm", ] [[package]] @@ -1647,7 +1714,7 @@ version = "0.9.66" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1996d2d305e561b70d1ee0c53f1542833f4e1ac6ce9a6708b6ff2738ca67dc82" dependencies = [ - "autocfg", + "autocfg 1.0.1", "cc", "libc", "pkg-config", @@ -1956,6 +2023,26 @@ dependencies = [ "winapi", ] +[[package]] +name = "rsa" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b0aeddcca1082112a6eeb43bf25fd7820b066aaf6eaef776e19d0a1febe38fe" +dependencies = [ + "byteorder", + "digest", + "lazy_static", + "num-bigint-dig", + "num-integer", + "num-iter", + "num-traits 0.2.14", + "pem", + "rand", + "simple_asn1 0.5.4", + "subtle", + "zeroize", +] + [[package]] name = "rust-ini" version = "0.13.0" @@ -2250,10 +2337,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "692ca13de57ce0613a363c8c2f1de925adebc81b04c923ac60c5488bb44abe4b" dependencies = [ "chrono", - "num-bigint", + "num-bigint 0.2.6", "num-traits 0.2.14", ] +[[package]] +name = "simple_asn1" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eb4ea60fb301dc81dfc113df680571045d375ab7345d171c5dc7d7e13107a80" +dependencies = [ + "chrono", + "num-bigint 0.4.3", + "num-traits 0.2.14", + "thiserror", +] + [[package]] name = "slab" version = "0.4.3" @@ -2313,6 +2412,7 @@ checksum = "ec28b91a01e1fe286d6ba66f68289a2286df023fc97444e1fd86c2fd6d5dc026" dependencies = [ "ahash", "atoi", + "base64 0.13.0", "bitflags", "byteorder", "bytes", @@ -2320,11 +2420,13 @@ dependencies = [ "crossbeam-channel", "crossbeam-queue", "crossbeam-utils", + "digest", "either", "futures-channel", "futures-core", "futures-intrusive", "futures-util", + "generic-array", "hashlink", "hex", "itoa", @@ -2332,10 +2434,14 @@ dependencies = [ "libsqlite3-sys", "log", "memchr", + "num-bigint 0.3.3", "once_cell", "parking_lot", "percent-encoding", + "rand", + "rsa", "rustls", + "sha-1", "sha2", "smallvec", "sqlformat", @@ -2479,6 +2585,18 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "synstructure" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "unicode-xid", +] + [[package]] name = "tap" version = "1.0.1" @@ -2588,7 +2706,7 @@ version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "92036be488bb6594459f2e03b60e42df6f937fe6ca5c5ffdcb539c6b84dc40f5" dependencies = [ - "autocfg", + "autocfg 1.0.1", "bytes", "libc", "memchr", @@ -2676,16 +2794,16 @@ dependencies = [ "actix-multipart", "actix-web", "async-std", + "async-trait", "binascii", + "chrono", "config", "derive_builder", "derive_more", "futures", "jsonwebtoken", - "lazy_static", "lettre", "pbkdf2", - "rand", "rand_core", "reqwest", "sailfish", @@ -3023,6 +3141,27 @@ dependencies = [ "linked-hash-map", ] +[[package]] +name = "zeroize" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4756f7db3f7b5574938c3eb1c117038b8e07f95ee6718c0efad4ac21508f1efd" +dependencies = [ + "zeroize_derive", +] + +[[package]] +name = "zeroize_derive" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f8f187641dad4f680d25c4bfc4225b418165984179f26ca76ec4fb6441d3a17" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + [[package]] name = "zstd" version = "0.7.0+zstd.1.4.9" diff --git a/Cargo.toml b/Cargo.toml index d50ef6ec..8e832719 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,12 +12,11 @@ actix-web = "4.0.0-beta.8" actix-multipart = "0.4.0-beta.5" actix-cors = "0.6.0-beta.2" +async-trait = "0.1.52" async-std = "1.10.0" futures = "0.3.5" -lazy_static = "1.4" - -sqlx = { version = "0.5.7", features = [ "runtime-actix-rustls", "sqlite", "migrate", "time" ] } +sqlx = { version = "0.5.7", features = [ "runtime-actix-rustls", "sqlite", "mysql", "migrate", "time" ] } config = "0.11" toml = "0.5" @@ -36,7 +35,8 @@ urlencoding = "2.1.0" pbkdf2 = "0.9" rand_core = { version = "0.6", features = ["std"] } -rand = "0.8.5" + +chrono = "0.4.19" sanitize-filename = "0.3.0" diff --git a/migrations/20210831113004_torrust_users.sql b/migrations/20210831113004_torrust_users.sql deleted file mode 100644 index a4ad5e4a..00000000 --- a/migrations/20210831113004_torrust_users.sql +++ /dev/null @@ -1,7 +0,0 @@ -CREATE TABLE IF NOT EXISTS torrust_users ( - user_id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, - username VARCHAR(32) NOT NULL UNIQUE, - email VARCHAR(100) NOT NULL UNIQUE, - email_verified BOOLEAN NOT NULL DEFAULT FALSE, - password TEXT NOT NULL -) diff --git a/migrations/20210904135524_torrust_tracker_keys.sql b/migrations/20210904135524_torrust_tracker_keys.sql deleted file mode 100644 index ef6f6865..00000000 --- a/migrations/20210904135524_torrust_tracker_keys.sql +++ /dev/null @@ -1,7 +0,0 @@ -CREATE TABLE IF NOT EXISTS torrust_tracker_keys ( - key_id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, - user_id INTEGER, - key VARCHAR(32) NOT NULL, - valid_until INT(10) NOT NULL, - FOREIGN KEY(user_id) REFERENCES torrust_users(user_id) -) diff --git a/migrations/20210905160623_torrust_categories.sql b/migrations/20210905160623_torrust_categories.sql deleted file mode 100644 index c88abfe2..00000000 --- a/migrations/20210905160623_torrust_categories.sql +++ /dev/null @@ -1,7 +0,0 @@ -CREATE TABLE torrust_categories ( - category_id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, - name VARCHAR(64) NOT NULL UNIQUE -); - -INSERT INTO torrust_categories (name) VALUES -('movies'), ('tv shows'), ('games'), ('music'), ('software'); diff --git a/migrations/20210907083424_torrust_torrent_files.sql b/migrations/20210907083424_torrust_torrent_files.sql deleted file mode 100644 index aeb3135a..00000000 --- a/migrations/20210907083424_torrust_torrent_files.sql +++ /dev/null @@ -1,8 +0,0 @@ -CREATE TABLE IF NOT EXISTS torrust_torrent_files ( - file_id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, - torrent_id INTEGER NOT NULL, - number INTEGER NOT NULL, - path VARCHAR(255) NOT NULL, - length INTEGER NOT NULL, - FOREIGN KEY(torrent_id) REFERENCES torrust_torrents(torrent_id) -) diff --git a/migrations/20211208143338_torrust_users.sql b/migrations/20211208143338_torrust_users.sql deleted file mode 100644 index 0b574c69..00000000 --- a/migrations/20211208143338_torrust_users.sql +++ /dev/null @@ -1,2 +0,0 @@ -ALTER TABLE torrust_users -ADD COLUMN administrator BOOLEAN NOT NULL DEFAULT FALSE; diff --git a/migrations/20220308170028_torrust_categories.sql b/migrations/20220308170028_torrust_categories.sql deleted file mode 100644 index b786dcd2..00000000 --- a/migrations/20220308170028_torrust_categories.sql +++ /dev/null @@ -1,2 +0,0 @@ -ALTER TABLE torrust_categories -ADD COLUMN icon VARCHAR(32); diff --git a/migrations/mysql/20220721205537_torrust_users.sql b/migrations/mysql/20220721205537_torrust_users.sql new file mode 100644 index 00000000..480cc718 --- /dev/null +++ b/migrations/mysql/20220721205537_torrust_users.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS torrust_users ( + user_id INTEGER NOT NULL PRIMARY KEY AUTO_INCREMENT, + date_registered DATETIME NOT NULL, + administrator BOOLEAN NOT NULL DEFAULT FALSE +) diff --git a/migrations/mysql/20220721210530_torrust_user_authentication.sql b/migrations/mysql/20220721210530_torrust_user_authentication.sql new file mode 100644 index 00000000..08620f16 --- /dev/null +++ b/migrations/mysql/20220721210530_torrust_user_authentication.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS torrust_user_authentication ( + user_id INTEGER NOT NULL PRIMARY KEY, + password_hash TEXT NOT NULL, + FOREIGN KEY(user_id) REFERENCES torrust_users(user_id) ON DELETE CASCADE +) diff --git a/migrations/mysql/20220727213942_torrust_user_profiles.sql b/migrations/mysql/20220727213942_torrust_user_profiles.sql new file mode 100644 index 00000000..f2325a56 --- /dev/null +++ b/migrations/mysql/20220727213942_torrust_user_profiles.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS torrust_user_profiles ( + user_id INTEGER NOT NULL PRIMARY KEY, + username VARCHAR(24) NOT NULL UNIQUE, + email VARCHAR(320) UNIQUE, + email_verified BOOL NOT NULL DEFAULT FALSE, + bio TEXT, + avatar TEXT, + FOREIGN KEY(user_id) REFERENCES torrust_users(user_id) ON DELETE CASCADE +) diff --git a/migrations/mysql/20220727222313_torrust_tracker_keys.sql b/migrations/mysql/20220727222313_torrust_tracker_keys.sql new file mode 100644 index 00000000..696863b7 --- /dev/null +++ b/migrations/mysql/20220727222313_torrust_tracker_keys.sql @@ -0,0 +1,7 @@ +CREATE TABLE IF NOT EXISTS torrust_tracker_keys ( + tracker_key_id INTEGER NOT NULL PRIMARY KEY AUTO_INCREMENT, + user_id INTEGER NOT NULL, + tracker_key CHAR(32) NOT NULL, + date_expiry DATETIME NOT NULL, + FOREIGN KEY(user_id) REFERENCES torrust_users(user_id) ON DELETE CASCADE +) diff --git a/migrations/mysql/20220730102607_torrust_user_public_keys.sql b/migrations/mysql/20220730102607_torrust_user_public_keys.sql new file mode 100644 index 00000000..6b5f5f9e --- /dev/null +++ b/migrations/mysql/20220730102607_torrust_user_public_keys.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS torrust_user_public_keys ( + public_key_id INTEGER NOT NULL PRIMARY KEY AUTO_INCREMENT, + user_id INTEGER NOT NULL, + public_key CHAR(32) UNIQUE NOT NULL, + date_registered DATETIME NOT NULL, + date_expiry DATETIME NOT NULL, + revoked BOOLEAN NOT NULL DEFAULT FALSE, + FOREIGN KEY(user_id) REFERENCES torrust_users(user_id) ON DELETE CASCADE +) diff --git a/migrations/mysql/20220730104552_torrust_user_invitations.sql b/migrations/mysql/20220730104552_torrust_user_invitations.sql new file mode 100644 index 00000000..fd081f9e --- /dev/null +++ b/migrations/mysql/20220730104552_torrust_user_invitations.sql @@ -0,0 +1,12 @@ +CREATE TABLE IF NOT EXISTS torrust_user_invitations ( + invitation_id INTEGER NOT NULL PRIMARY KEY AUTO_INCREMENT, + user_id INTEGER NOT NULL, + public_key CHAR(32) NOT NULL, + signed_digest CHAR(32) NOT NULL, + date_begin DATETIME NOT NULL, + date_expiry DATETIME NOT NULL, + max_uses INTEGER NOT NULL, + personal_message VARCHAR(512), + FOREIGN KEY(user_id) REFERENCES torrust_users(user_id) ON DELETE CASCADE, + FOREIGN KEY(public_key) REFERENCES torrust_user_public_keys(public_key) ON DELETE CASCADE +) diff --git a/migrations/mysql/20220730105501_torrust_user_invitation_uses.sql b/migrations/mysql/20220730105501_torrust_user_invitation_uses.sql new file mode 100644 index 00000000..94ffc3b7 --- /dev/null +++ b/migrations/mysql/20220730105501_torrust_user_invitation_uses.sql @@ -0,0 +1,8 @@ +CREATE TABLE IF NOT EXISTS torrust_user_invitations ( + invitation_use_id INTEGER NOT NULL PRIMARY KEY AUTO_INCREMENT, + invitation_id INTEGER NOT NULL, + registered_user_id INTEGER NOT NULL, + date_used DATETIME NOT NULL, + FOREIGN KEY(invitation_id) REFERENCES torrust_user_invitations(invitation_id) ON DELETE CASCADE, + FOREIGN KEY(registered_user_id) REFERENCES torrust_users(user_id) ON DELETE CASCADE +) diff --git a/migrations/mysql/20220801201435_torrust_user_bans.sql b/migrations/mysql/20220801201435_torrust_user_bans.sql new file mode 100644 index 00000000..189d686f --- /dev/null +++ b/migrations/mysql/20220801201435_torrust_user_bans.sql @@ -0,0 +1,7 @@ +CREATE TABLE IF NOT EXISTS torrust_user_bans ( + ban_id INTEGER NOT NULL PRIMARY KEY AUTO_INCREMENT, + user_id INTEGER NOT NULL, + reason TEXT NOT NULL, + date_expiry DATETIME NOT NULL, + FOREIGN KEY(user_id) REFERENCES torrust_users(user_id) ON DELETE CASCADE +) diff --git a/migrations/mysql/20220802161524_torrust_categories.sql b/migrations/mysql/20220802161524_torrust_categories.sql new file mode 100644 index 00000000..1d98c46f --- /dev/null +++ b/migrations/mysql/20220802161524_torrust_categories.sql @@ -0,0 +1,6 @@ +CREATE TABLE torrust_categories ( + category_id INTEGER NOT NULL PRIMARY KEY AUTO_INCREMENT, + name VARCHAR(64) NOT NULL UNIQUE +); + +INSERT INTO torrust_categories (name) VALUES ('movies'), ('tv shows'), ('games'), ('music'), ('software'); diff --git a/migrations/20220308083424_torrust_torrents.sql b/migrations/mysql/20220802193037_torrust_torrents.sql similarity index 62% rename from migrations/20220308083424_torrust_torrents.sql rename to migrations/mysql/20220802193037_torrust_torrents.sql index 413539a4..561c915b 100644 --- a/migrations/20220308083424_torrust_torrents.sql +++ b/migrations/mysql/20220802193037_torrust_torrents.sql @@ -1,14 +1,14 @@ CREATE TABLE IF NOT EXISTS torrust_torrents ( - torrent_id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + torrent_id INTEGER NOT NULL PRIMARY KEY AUTO_INCREMENT, uploader VARCHAR(32) NOT NULL, - info_hash VARCHAR(20) UNIQUE NOT NULL, + info_hash CHAR(40) UNIQUE NOT NULL, title VARCHAR(256) UNIQUE NOT NULL, category_id INTEGER NOT NULL, description TEXT, - upload_date INT(10) NOT NULL, + upload_date BIGINT NOT NULL, file_size BIGINT NOT NULL, seeders INTEGER NOT NULL, leechers INTEGER NOT NULL, - FOREIGN KEY(uploader) REFERENCES torrust_users(username) ON DELETE CASCADE, + FOREIGN KEY(uploader) REFERENCES torrust_user_profiles(username) ON DELETE CASCADE, FOREIGN KEY(category_id) REFERENCES torrust_categories(category_id) ON DELETE CASCADE ) diff --git a/migrations/sqlite3/20220721205537_torrust_users.sql b/migrations/sqlite3/20220721205537_torrust_users.sql new file mode 100644 index 00000000..a68ba7a3 --- /dev/null +++ b/migrations/sqlite3/20220721205537_torrust_users.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS torrust_users ( + user_id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + date_registered TEXT NOT NULL, + administrator BOOL NOT NULL DEFAULT FALSE +) diff --git a/migrations/sqlite3/20220721210530_torrust_user_authentication.sql b/migrations/sqlite3/20220721210530_torrust_user_authentication.sql new file mode 100644 index 00000000..08620f16 --- /dev/null +++ b/migrations/sqlite3/20220721210530_torrust_user_authentication.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS torrust_user_authentication ( + user_id INTEGER NOT NULL PRIMARY KEY, + password_hash TEXT NOT NULL, + FOREIGN KEY(user_id) REFERENCES torrust_users(user_id) ON DELETE CASCADE +) diff --git a/migrations/sqlite3/20220727213942_torrust_user_profiles.sql b/migrations/sqlite3/20220727213942_torrust_user_profiles.sql new file mode 100644 index 00000000..5e67760d --- /dev/null +++ b/migrations/sqlite3/20220727213942_torrust_user_profiles.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS torrust_user_profiles ( + user_id INTEGER NOT NULL PRIMARY KEY, + username TEXT NOT NULL UNIQUE, + email TEXT UNIQUE, + email_verified BOOL NOT NULL DEFAULT FALSE, + bio TEXT, + avatar TEXT, + FOREIGN KEY(user_id) REFERENCES torrust_users(user_id) ON DELETE CASCADE +) diff --git a/migrations/sqlite3/20220727222313_torrust_tracker_keys.sql b/migrations/sqlite3/20220727222313_torrust_tracker_keys.sql new file mode 100644 index 00000000..5452667a --- /dev/null +++ b/migrations/sqlite3/20220727222313_torrust_tracker_keys.sql @@ -0,0 +1,7 @@ +CREATE TABLE IF NOT EXISTS torrust_tracker_keys ( + tracker_key_id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + user_id INTEGER NOT NULL, + tracker_key TEXT NOT NULL, + date_expiry TEXT NOT NULL, + FOREIGN KEY(user_id) REFERENCES torrust_users(user_id) ON DELETE CASCADE +) diff --git a/migrations/sqlite3/20220730102607_torrust_user_public_keys.sql b/migrations/sqlite3/20220730102607_torrust_user_public_keys.sql new file mode 100644 index 00000000..51a4846f --- /dev/null +++ b/migrations/sqlite3/20220730102607_torrust_user_public_keys.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS torrust_user_public_keys ( + public_key_id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + user_id INTEGER NOT NULL, + public_key TEXT UNIQUE NOT NULL, + date_registered TEXT NOT NULL, + date_expiry TEXT NOT NULL, + revoked INTEGER NOT NULL DEFAULT 0, + FOREIGN KEY(user_id) REFERENCES torrust_users(user_id) ON DELETE CASCADE +) diff --git a/migrations/sqlite3/20220730104552_torrust_user_invitations.sql b/migrations/sqlite3/20220730104552_torrust_user_invitations.sql new file mode 100644 index 00000000..e6a9a49e --- /dev/null +++ b/migrations/sqlite3/20220730104552_torrust_user_invitations.sql @@ -0,0 +1,12 @@ +CREATE TABLE IF NOT EXISTS torrust_user_invitations ( + invitation_id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + user_id INTEGER NOT NULL, + public_key TEXT NOT NULL, + signed_digest TEXT NOT NULL, + date_begin TEXT NOT NULL, + date_expiry TEXT NOT NULL, + max_uses INTEGER NOT NULL, + personal_message TEXT, + FOREIGN KEY(user_id) REFERENCES torrust_users(user_id) ON DELETE CASCADE, + FOREIGN KEY(public_key) REFERENCES torrust_user_public_keys(public_key) ON DELETE CASCADE +) diff --git a/migrations/sqlite3/20220730105501_torrust_user_invitation_uses.sql b/migrations/sqlite3/20220730105501_torrust_user_invitation_uses.sql new file mode 100644 index 00000000..69d9745c --- /dev/null +++ b/migrations/sqlite3/20220730105501_torrust_user_invitation_uses.sql @@ -0,0 +1,8 @@ +CREATE TABLE IF NOT EXISTS torrust_user_invitations ( + invitation_use_id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + invitation_id INTEGER NOT NULL, + registered_user_id INTEGER NOT NULL, + date_used TEXT NOT NULL, + FOREIGN KEY(invitation_id) REFERENCES torrust_user_invitations(invitation_id) ON DELETE CASCADE, + FOREIGN KEY(registered_user_id) REFERENCES torrust_users(user_id) ON DELETE CASCADE +) diff --git a/migrations/sqlite3/20220801201435_torrust_user_bans.sql b/migrations/sqlite3/20220801201435_torrust_user_bans.sql new file mode 100644 index 00000000..92d8e1d6 --- /dev/null +++ b/migrations/sqlite3/20220801201435_torrust_user_bans.sql @@ -0,0 +1,7 @@ +CREATE TABLE IF NOT EXISTS torrust_user_bans ( + ban_id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + user_id INTEGER NOT NULL, + reason TEXT NOT NULL, + date_expiry TEXT NOT NULL, + FOREIGN KEY(user_id) REFERENCES torrust_users(user_id) ON DELETE CASCADE +) diff --git a/migrations/sqlite3/20220802161524_torrust_categories.sql b/migrations/sqlite3/20220802161524_torrust_categories.sql new file mode 100644 index 00000000..0a0d82e0 --- /dev/null +++ b/migrations/sqlite3/20220802161524_torrust_categories.sql @@ -0,0 +1,6 @@ +CREATE TABLE torrust_categories ( + category_id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL UNIQUE +); + +INSERT INTO torrust_categories (name) VALUES ('movies'), ('tv shows'), ('games'), ('music'), ('software'); diff --git a/migrations/sqlite3/20220802193037_torrust_torrents.sql b/migrations/sqlite3/20220802193037_torrust_torrents.sql new file mode 100644 index 00000000..74be0bef --- /dev/null +++ b/migrations/sqlite3/20220802193037_torrust_torrents.sql @@ -0,0 +1,15 @@ +CREATE TABLE IF NOT EXISTS torrust_torrents ( + torrent_id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT , + uploader VARCHAR(32) NOT NULL, + info_hash CHAR(40) UNIQUE NOT NULL, + title VARCHAR(256) UNIQUE NOT NULL, + category_id INTEGER NOT NULL, + description TEXT, + upload_date BIGINT NOT NULL, + file_size BIGINT NOT NULL, + seeders INTEGER NOT NULL, + leechers INTEGER NOT NULL, + FOREIGN KEY(uploader) REFERENCES torrust_user_profiles(username) ON DELETE CASCADE, + FOREIGN KEY(category_id) REFERENCES torrust_categories(category_id) ON DELETE CASCADE +) + diff --git a/src/auth.rs b/src/auth.rs index 559db46f..75233e51 100644 --- a/src/auth.rs +++ b/src/auth.rs @@ -1,35 +1,34 @@ use actix_web::HttpRequest; -use crate::models::user::{Claims, User}; +use crate::models::user::{UserClaims, UserCompact}; use jsonwebtoken::{decode, DecodingKey, Validation, Algorithm, encode, Header, EncodingKey}; use crate::utils::time::current_time; use crate::errors::ServiceError; use std::sync::Arc; -use crate::database::Database; use crate::config::Configuration; +use crate::databases::database::Database; pub struct AuthorizationService { cfg: Arc, - database: Arc, + database: Arc> } impl AuthorizationService { - pub fn new(cfg: Arc, database: Arc) -> AuthorizationService { + pub fn new(cfg: Arc, database: Arc>) -> AuthorizationService { AuthorizationService { cfg, database } } - pub async fn sign_jwt(&self, user: User) -> String { + pub async fn sign_jwt(&self, user: UserCompact) -> String { let settings = self.cfg.settings.read().await; // create JWT that expires in two weeks let key = settings.auth.secret_key.as_bytes(); let exp_date = current_time() + 1_209_600; // two weeks from now - let claims = Claims { - sub: user.username, - admin: user.administrator, + let claims = UserClaims { + user, exp: exp_date, }; @@ -43,10 +42,10 @@ impl AuthorizationService { token } - pub async fn verify_jwt(&self, token: &str) -> Result { + pub async fn verify_jwt(&self, token: &str) -> Result { let settings = self.cfg.settings.read().await; - match decode::( + match decode::( token, &DecodingKey::from_secret(settings.auth.secret_key.as_bytes()), &Validation::new(Algorithm::HS256), @@ -61,7 +60,7 @@ impl AuthorizationService { } } - pub async fn get_claims_from_request(&self, req: &HttpRequest) -> Result { + pub async fn get_claims_from_request(&self, req: &HttpRequest) -> Result { let _auth = req.headers().get("Authorization"); match _auth { Some(_) => { @@ -77,15 +76,11 @@ impl AuthorizationService { } } - pub async fn get_user_from_request(&self, req: &HttpRequest) -> Result { - let claims = match self.get_claims_from_request(req).await { - Ok(claims) => Ok(claims), - Err(e) => Err(e) - }?; + pub async fn get_user_compact_from_request(&self, req: &HttpRequest) -> Result { + let claims = self.get_claims_from_request(req).await?; - match self.database.get_user_with_username(&claims.sub).await { - Some(user) => Ok(user), - None => Err(ServiceError::AccountNotFound) - } + self.database.get_user_compact_from_id(claims.user.user_id) + .await + .map_err(|_| ServiceError::UserNotFound) } } diff --git a/src/common.rs b/src/common.rs index 17653826..2f11f6ec 100644 --- a/src/common.rs +++ b/src/common.rs @@ -1,7 +1,7 @@ use std::sync::Arc; use crate::config::Configuration; -use crate::database::Database; use crate::auth::AuthorizationService; +use crate::databases::database::Database; use crate::tracker::TrackerService; use crate::mailer::MailerService; @@ -11,14 +11,14 @@ pub type WebAppData = actix_web::web::Data>; pub struct AppData { pub cfg: Arc, - pub database: Arc, + pub database: Arc>, pub auth: Arc, pub tracker: Arc, pub mailer: Arc } impl AppData { - pub fn new(cfg: Arc, database: Arc, auth: Arc, tracker: Arc, mailer: Arc) -> AppData { + pub fn new(cfg: Arc, database: Arc>, auth: Arc, tracker: Arc, mailer: Arc) -> AppData { AppData { cfg, database, diff --git a/src/config.rs b/src/config.rs index 871274dd..f73618f0 100644 --- a/src/config.rs +++ b/src/config.rs @@ -3,6 +3,7 @@ use config::{ConfigError, Config, File}; use std::path::Path; use serde::{Serialize, Deserialize}; use tokio::sync::RwLock; +use crate::databases::database::DatabaseDrivers; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Website { @@ -49,6 +50,7 @@ pub struct Auth { #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Database { + pub db_driver: DatabaseDrivers, pub connect_url: String, pub torrent_info_update_interval: u64, } @@ -109,6 +111,7 @@ impl Configuration { secret_key: "MaxVerstappenWC2021".to_string() }, database: Database { + db_driver: DatabaseDrivers::Sqlite3, connect_url: "sqlite://data.db?mode=rwc".to_string(), torrent_info_update_interval: 3600 }, diff --git a/src/database.rs b/src/database.rs deleted file mode 100644 index 3f4160d9..00000000 --- a/src/database.rs +++ /dev/null @@ -1,202 +0,0 @@ -use sqlx::SqlitePool; -use sqlx::sqlite::SqlitePoolOptions; -use crate::models::user::User; -use crate::errors::ServiceError; -use crate::models::torrent::TorrentListing; -use crate::utils::time::current_time; -use crate::models::tracker_key::TrackerKey; -use serde::{Serialize, Deserialize}; - -#[derive(Debug, Serialize)] -pub struct TorrentCompact { - pub torrent_id: i64, - pub info_hash: String, -} - -pub struct Database { - pub pool: SqlitePool -} - -#[derive(Debug, Serialize, Deserialize, sqlx::FromRow)] -pub struct Category { - pub name: String, - pub icon: Option, - pub num_torrents: i64 -} - -impl Database { - pub async fn new(database_url: &str) -> Database { - let db = SqlitePoolOptions::new() - .connect(database_url) - .await - .expect("Unable to create database pool"); - - Database { - pool: db - } - } - - pub async fn get_user_with_username(&self, username: &str) -> Option { - let res = sqlx::query_as!( - User, - "SELECT * FROM torrust_users WHERE username = ?", - username, - ) - .fetch_one(&self.pool) - .await; - - match res { - Ok(user) => Some(user), - _ => None - } - } - - pub async fn delete_user(&self, user_id: i64) -> Result<(), sqlx::Error> { - let _res = sqlx::query!( - "DELETE FROM torrust_users WHERE rowid = ?", - user_id - ) - .execute(&self.pool) - .await?; - - Ok(()) - } - - pub async fn insert_torrent_and_get_id(&self, username: String, info_hash: String, title: String, category_id: i64, description: String, file_size: i64, seeders: i64, leechers: i64) -> Result { - let current_time = current_time() as i64; - - let res = sqlx::query!( - r#"INSERT INTO torrust_torrents (uploader, info_hash, title, category_id, description, upload_date, file_size, seeders, leechers) - VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) - RETURNING torrent_id as "torrent_id: i64""#, - username, - info_hash, - title, - category_id, - description, - current_time, - file_size, - seeders, - leechers - ) - .fetch_one(&self.pool) - .await?; - - Ok(res.torrent_id) - } - - pub async fn get_torrent_by_id(&self, torrent_id: i64) -> Result { - let res = sqlx::query_as!( - TorrentListing, - r#"SELECT * FROM torrust_torrents - WHERE torrent_id = ?"#, - torrent_id - ) - .fetch_one(&self.pool) - .await; - - match res { - Ok(torrent) => Ok(torrent), - _ => Err(ServiceError::TorrentNotFound) - } - } - - pub async fn get_all_torrent_ids(&self) -> Result, ()> { - let res = sqlx::query_as!( - TorrentCompact, - r#"SELECT torrent_id, info_hash FROM torrust_torrents"# - ) - .fetch_all(&self.pool) - .await; - - match res { - Ok(torrents) => Ok(torrents), - Err(e) => { - println!("{:?}", e); - Err(()) - } - } - } - - pub async fn update_tracker_info(&self, info_hash: &str, seeders: i64, leechers: i64) -> Result<(), ()> { - let res = sqlx::query!( - "UPDATE torrust_torrents SET seeders = $1, leechers = $2 WHERE info_hash = $3", - seeders, - leechers, - info_hash - ) - .execute(&self.pool) - .await; - - match res { - Ok(_) => Ok(()), - _ => Err(()) - } - } - - pub async fn get_valid_tracker_key(&self, user_id: i64) -> Option { - const WEEK: i64 = 604_800; - let current_time_plus_week = (current_time() as i64) + WEEK; - - let res = sqlx::query_as!( - TrackerKey, - r#"SELECT key, valid_until FROM torrust_tracker_keys - WHERE user_id = $1 AND valid_until > $2"#, - user_id, - current_time_plus_week - ) - .fetch_one(&self.pool) - .await; - - match res { - Ok(tracker_key) => Some(tracker_key), - _ => None - } - } - - pub async fn issue_tracker_key(&self, tracker_key: &TrackerKey, user_id: i64) -> Result<(), ServiceError> { - let res = sqlx::query!( - "INSERT INTO torrust_tracker_keys (user_id, key, valid_until) VALUES ($1, $2, $3)", - user_id, - tracker_key.key, - tracker_key.valid_until, - ) - .execute(&self.pool) - .await; - - match res { - Ok(_) => Ok(()), - Err(_) => Err(ServiceError::InternalServerError) - } - } - - pub async fn get_category(&self, id: i64) -> Option { - let res = sqlx::query_as!( - Category, - "SELECT name, icon, (SELECT COUNT(*) FROM torrust_torrents WHERE torrust_torrents.category_id = torrust_categories.category_id) AS num_torrents FROM torrust_categories WHERE category_id = ?", - id - ) - .fetch_one(&self.pool) - .await; - - match res { - Ok(v) => Some(v), - Err(_) => None - } - } - - pub async fn get_category_by_name(&self, category: &str) -> Option { - let res = sqlx::query_as!( - Category, - "SELECT name, icon, (SELECT COUNT(*) FROM torrust_torrents WHERE torrust_torrents.category_id = torrust_categories.category_id) AS num_torrents FROM torrust_categories WHERE name = ?", - category - ) - .fetch_one(&self.pool) - .await; - - match res { - Ok(v) => Some(v), - Err(_) => None - } - } -} diff --git a/src/databases/database.rs b/src/databases/database.rs new file mode 100644 index 00000000..34f3cfc4 --- /dev/null +++ b/src/databases/database.rs @@ -0,0 +1,162 @@ +use async_trait::async_trait; +use chrono::{NaiveDateTime}; +use serde::{Serialize, Deserialize}; +use crate::databases::mysql::MysqlDatabase; +use crate::databases::sqlite::SqliteDatabase; +use crate::models::response::{TorrentsResponse}; +use crate::models::torrent::TorrentListing; +use crate::models::tracker_key::TrackerKey; +use crate::models::user::{User, UserAuthentication, UserCompact, UserProfile}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub enum DatabaseDrivers { + Sqlite3, + Mysql +} + +#[derive(Debug, Serialize, sqlx::FromRow)] +pub struct TorrentCompact { + pub torrent_id: i64, + pub info_hash: String, +} + +#[derive(Debug, Serialize, Deserialize, sqlx::FromRow)] +pub struct Category { + pub category_id: i64, + pub name: String, + pub num_torrents: i64 +} + +#[derive(Clone, Copy, Debug, Deserialize)] +pub enum Sorting { + UploadedAsc, + UploadedDesc, + SeedersAsc, + SeedersDesc, + LeechersAsc, + LeechersDesc, + NameAsc, + NameDesc, + SizeAsc, + SizeDesc, +} + +#[derive(Debug)] +pub enum DatabaseError { + Error, + UsernameTaken, + EmailTaken, + UserNotFound, + CategoryAlreadyExists, + CategoryNotFound, + TorrentNotFound, + TorrentAlreadyExists, // when uploading an already uploaded info_hash + TorrentTitleAlreadyExists, +} + +pub async fn connect_database(db_driver: &DatabaseDrivers, db_path: &str) -> Box { + // match &db_path.chars().collect::>() as &[char] { + // ['s', 'q', 'l', 'i', 't', 'e', ..] => { + // let db = SqliteDatabase::new(db_path).await; + // Ok(Box::new(db)) + // } + // ['m', 'y', 's', 'q', 'l', ..] => { + // let db = MysqlDatabase::new(db_path).await; + // Ok(Box::new(db)) + // } + // _ => { + // Err(()) + // } + // } + + match db_driver { + DatabaseDrivers::Sqlite3 => { + let db = SqliteDatabase::new(db_path).await; + Box::new(db) + } + DatabaseDrivers::Mysql => { + let db = MysqlDatabase::new(db_path).await; + Box::new(db) + } + } +} + +#[async_trait] +pub trait Database: Sync + Send { + // add new user and get the newly inserted user_id + async fn insert_user_and_get_id(&self, username: &str, email: &str, password: &str) -> Result; + + // get user profile by user_id + async fn get_user_from_id(&self, user_id: i64) -> Result; + + // get user authentication by user_id + async fn get_user_authentication_from_id(&self, user_id: i64) -> Result; + + // get user profile by username + async fn get_user_profile_from_username(&self, username: &str) -> Result; + + // get user compact by user_id + async fn get_user_compact_from_id(&self, user_id: i64) -> Result; + + // todo: change to get all tracker keys of user, no matter if they are still valid + // get a user's tracker key + async fn get_user_tracker_key(&self, user_id: i64) -> Option; + + // count users + async fn count_users(&self) -> Result; + + // todo: make DateTime struct for the date_expiry + // ban user + async fn ban_user(&self, user_id: i64, reason: &str, date_expiry: NaiveDateTime) -> Result<(), DatabaseError>; + + // give a user administrator rights + async fn grant_admin_role(&self, user_id: i64) -> Result<(), DatabaseError>; + + // verify email + async fn verify_email(&self, user_id: i64) -> Result<(), DatabaseError>; + + // create a new tracker key for a certain user + async fn add_tracker_key(&self, user_id: i64, tracker_key: &TrackerKey) -> Result<(), DatabaseError>; + + // delete user + async fn delete_user(&self, user_id: i64) -> Result<(), DatabaseError>; + + // add new category + async fn add_category(&self, category_name: &str) -> Result<(), DatabaseError>; + + // get category by id + async fn get_category_from_id(&self, id: i64) -> Result; + + // get category by name + async fn get_category_from_name(&self, category: &str) -> Result; + + // get all categories + async fn get_categories(&self) -> Result, DatabaseError>; + + // delete category + async fn delete_category(&self, category_name: &str) -> Result<(), DatabaseError>; + + // get results of a torrent search in a paginated and sorted form + async fn get_torrents_search_sorted_paginated(&self, search: &Option, categories: &Option>, sort: &Sorting, offset: u64, page_size: u8) -> Result; + + // add new torrent and get the newly inserted torrent_id + async fn insert_torrent_and_get_id(&self, username: String, info_hash: String, title: String, category_id: i64, description: String, file_size: i64, seeders: i64, leechers: i64) -> Result; + + // get torrent by id + async fn get_torrent_from_id(&self, torrent_id: i64) -> Result; + + // get all torrents (torrent_id + info_hash) + async fn get_all_torrents_compact(&self) -> Result, DatabaseError>; + + // update a torrent's title + async fn update_torrent_title(&self, torrent_id: i64, title: &str) -> Result<(), DatabaseError>; + + // update a torrent's description + async fn update_torrent_description(&self, torrent_id: i64, description: &str) -> Result<(), DatabaseError>; + + // update the seeders and leechers info for a particular torrent + async fn update_tracker_info(&self, info_hash: &str, seeders: i64, leechers: i64) -> Result<(), DatabaseError>; + + // delete a torrent + async fn delete_torrent(&self, torrent_id: i64) -> Result<(), DatabaseError>; +} diff --git a/src/databases/mod.rs b/src/databases/mod.rs new file mode 100644 index 00000000..9340e821 --- /dev/null +++ b/src/databases/mod.rs @@ -0,0 +1,3 @@ +pub mod database; +pub mod sqlite; +pub mod mysql; diff --git a/src/databases/mysql.rs b/src/databases/mysql.rs new file mode 100644 index 00000000..4778d7a1 --- /dev/null +++ b/src/databases/mysql.rs @@ -0,0 +1,459 @@ +use sqlx::{Acquire, MySqlPool, query, query_as}; +use async_trait::async_trait; +use chrono::{NaiveDateTime}; +use sqlx::mysql::MySqlPoolOptions; + +use crate::models::user::{User, UserAuthentication, UserCompact, UserProfile}; +use crate::models::torrent::TorrentListing; +use crate::utils::time::current_time; +use crate::models::tracker_key::TrackerKey; +use crate::databases::database::{Category, Database, DatabaseError, Sorting, TorrentCompact}; +use crate::handlers::torrent::TorrentCount; +use crate::models::response::{TorrentsResponse}; + +pub struct MysqlDatabase { + pub pool: MySqlPool +} + +impl MysqlDatabase { + pub async fn new(database_url: &str) -> Self { + let db = MySqlPoolOptions::new() + .connect(database_url) + .await + .expect("Unable to create database pool."); + + sqlx::migrate!("migrations/mysql") + .run(&db) + .await + .expect("Could not run database migrations."); + + Self { + pool: db + } + } +} + +#[async_trait] +impl Database for MysqlDatabase { + async fn insert_user_and_get_id(&self, username: &str, email: &str, password_hash: &str) -> Result { + + // open pool connection + let mut conn = self.pool.acquire() + .await + .map_err(|_| DatabaseError::Error)?; + + // start db transaction + let mut tx = conn.begin() + .await + .map_err(|_| DatabaseError::Error)?; + + // create the user account and get the user id + let user_id = query("INSERT INTO torrust_users (date_registered) VALUES (UTC_TIMESTAMP())") + .execute(&mut tx) + .await + .map(|v| v.last_insert_id()) + .map_err(|_| DatabaseError::Error)?; + + // add password hash for account + let insert_user_auth_result = query("INSERT INTO torrust_user_authentication (user_id, password_hash) VALUES (?, ?)") + .bind(user_id) + .bind(password_hash) + .execute(&mut tx) + .await + .map_err(|_| DatabaseError::Error); + + // rollback transaction on error + if let Err(e) = insert_user_auth_result { + let _ = tx.rollback().await; + return Err(e) + } + + // add account profile details + let insert_user_profile_result = query(r#"INSERT INTO torrust_user_profiles (user_id, username, email, email_verified, bio, avatar) VALUES (?, ?, NULLIF(?, ""), 0, NULL, NULL)"#) + .bind(user_id) + .bind(username) + .bind(email) + .execute(&mut tx) + .await + .map_err(|e| match e { + sqlx::Error::Database(err) => { + if err.message().contains("username") { + DatabaseError::UsernameTaken + } else if err.message().contains("email") { + DatabaseError::EmailTaken + } else { + DatabaseError::Error + } + } + _ => DatabaseError::Error + }); + + // commit or rollback transaction and return user_id on success + match insert_user_profile_result { + Ok(_) => { + let _ = tx.commit().await; + Ok(user_id as i64) + } + Err(e) => { + let _ = tx.rollback().await; + Err(e) + } + } + } + + async fn get_user_from_id(&self, user_id: i64) -> Result { + query_as::<_, User>("SELECT * FROM torrust_users WHERE user_id = ?") + .bind(user_id) + .fetch_one(&self.pool) + .await + .map_err(|_| DatabaseError::UserNotFound) + } + + async fn get_user_authentication_from_id(&self, user_id: i64) -> Result { + query_as::<_, UserAuthentication>("SELECT * FROM torrust_user_authentication WHERE user_id = ?") + .bind(user_id) + .fetch_one(&self.pool) + .await + .map_err(|_| DatabaseError::UserNotFound) + } + + async fn get_user_profile_from_username(&self, username: &str) -> Result { + query_as::<_, UserProfile>(r#"SELECT user_id, username, COALESCE(email, "") as email, email_verified, COALESCE(bio, "") as bio, COALESCE(avatar, "") as avatar FROM torrust_user_profiles WHERE username = ?"#) + .bind(username) + .fetch_one(&self.pool) + .await + .map_err(|_| DatabaseError::UserNotFound) + } + + async fn get_user_compact_from_id(&self, user_id: i64) -> Result { + query_as::<_, UserCompact>("SELECT tu.user_id, tp.username, tu.administrator FROM torrust_users tu INNER JOIN torrust_user_profiles tp ON tu.user_id = tp.user_id WHERE tu.user_id = ?") + .bind(user_id) + .fetch_one(&self.pool) + .await + .map_err(|_| DatabaseError::UserNotFound) + } + + async fn get_user_tracker_key(&self, user_id: i64) -> Option { + const HOUR_IN_SECONDS: i64 = 3600; + + // casting current_time() to i64 will overflow in the year 2262 + let current_time_plus_hour = (current_time() as i64) + HOUR_IN_SECONDS; + + // get tracker key that is valid for at least one hour from now + query_as::<_, TrackerKey>("SELECT tracker_key, date_expiry FROM torrust_tracker_keys WHERE user_id = ? AND date_expiry > ? ORDER BY date_expiry DESC") + .bind(user_id) + .bind(current_time_plus_hour) + .fetch_one(&self.pool) + .await + .ok() + } + + async fn count_users(&self) -> Result { + query_as("SELECT COUNT(*) FROM torrust_users") + .fetch_one(&self.pool) + .await + .map(|(v,)| v) + .map_err(|_| DatabaseError::Error) + } + + async fn ban_user(&self, user_id: i64, reason: &str, date_expiry: NaiveDateTime) -> Result<(), DatabaseError> { + // date needs to be in ISO 8601 format + let date_expiry_string = date_expiry.format("%Y-%m-%d %H:%M:%S").to_string(); + + query("INSERT INTO torrust_user_bans (user_id, reason, date_expiry) VALUES (?, ?, ?)") + .bind(user_id) + .bind(reason) + .bind(date_expiry_string) + .execute(&self.pool) + .await + .map(|_| ()) + .map_err(|_| DatabaseError::Error) + } + + async fn grant_admin_role(&self, user_id: i64) -> Result<(), DatabaseError> { + query("UPDATE torrust_users SET administrator = TRUE WHERE user_id = ?") + .bind(user_id) + .execute(&self.pool) + .await + .map_err(|_| DatabaseError::Error) + .and_then(|v| if v.rows_affected() > 0 { + Ok(()) + } else { + Err(DatabaseError::UserNotFound) + }) + } + + async fn verify_email(&self, user_id: i64) -> Result<(), DatabaseError> { + query("UPDATE torrust_user_profiles SET email_verified = TRUE WHERE user_id = ?") + .bind(user_id) + .execute(&self.pool) + .await + .map_err(|_| DatabaseError::Error) + .and_then(|v| if v.rows_affected() > 0 { + Ok(()) + } else { + Err(DatabaseError::UserNotFound) + }) + } + + async fn add_tracker_key(&self, user_id: i64, tracker_key: &TrackerKey) -> Result<(), DatabaseError> { + let key = tracker_key.key.clone(); + + // date needs to be in ISO 8601 format + let date_expiry = NaiveDateTime::from_timestamp(tracker_key.valid_until, 0) + .format("%Y-%m-%d %H:%M:%S") + .to_string(); + + query("INSERT INTO torrust_tracker_keys (user_id, tracker_key, date_expiry) VALUES (?, ?, ?)") + .bind(user_id) + .bind(key) + .bind(date_expiry) + .execute(&self.pool) + .await + .map(|_| ()) + .map_err(|_| DatabaseError::Error) + } + + async fn delete_user(&self, user_id: i64) -> Result<(), DatabaseError> { + query("DELETE FROM torrust_users WHERE user_id = ?") + .bind(user_id) + .execute(&self.pool) + .await + .map_err(|_| DatabaseError::Error) + .and_then(|v| if v.rows_affected() > 0 { + Ok(()) + } else { + Err(DatabaseError::UserNotFound) + }) + } + + async fn add_category(&self, category_name: &str) -> Result<(), DatabaseError> { + query("INSERT INTO torrust_categories (name) VALUES (?)") + .bind(category_name) + .execute(&self.pool) + .await + .map(|_| ()) + .map_err(|e| match e { + sqlx::Error::Database(err) => { + if err.message().contains("UNIQUE") { + DatabaseError::CategoryAlreadyExists + } else { + DatabaseError::Error + } + }, + _ => DatabaseError::Error + }) + } + + async fn get_category_from_id(&self, category_id: i64) -> Result { + query_as::<_, Category>("SELECT category_id, name, (SELECT COUNT(*) FROM torrust_torrents WHERE torrust_torrents.category_id = torrust_categories.category_id) AS num_torrents FROM torrust_categories WHERE category_id = ?") + .bind(category_id) + .fetch_one(&self.pool) + .await + .map_err(|_| DatabaseError::CategoryNotFound) + } + + async fn get_category_from_name(&self, category_name: &str) -> Result { + query_as::<_, Category>("SELECT category_id, name, (SELECT COUNT(*) FROM torrust_torrents WHERE torrust_torrents.category_id = torrust_categories.category_id) AS num_torrents FROM torrust_categories WHERE name = ?") + .bind(category_name) + .fetch_one(&self.pool) + .await + .map_err(|_| DatabaseError::CategoryNotFound) + } + + async fn get_categories(&self) -> Result, DatabaseError> { + query_as::<_, Category>("SELECT tc.category_id, tc.name, COUNT(tt.category_id) as num_torrents FROM torrust_categories tc LEFT JOIN torrust_torrents tt on tc.category_id = tt.category_id GROUP BY tc.name") + .fetch_all(&self.pool) + .await + .map_err(|_| DatabaseError::Error) + } + + async fn delete_category(&self, category_name: &str) -> Result<(), DatabaseError> { + query("DELETE FROM torrust_categories WHERE name = ?") + .bind(category_name) + .execute(&self.pool) + .await + .map_err(|_| DatabaseError::Error) + .and_then(|v| if v.rows_affected() > 0 { + Ok(()) + } else { + Err(DatabaseError::CategoryNotFound) + }) + } + + // todo: refactor this + async fn get_torrents_search_sorted_paginated(&self, search: &Option, categories: &Option>, sort: &Sorting, offset: u64, page_size: u8) -> Result { + let title = match search { + None => "%".to_string(), + Some(v) => format!("%{}%", v) + }; + + let sort_query: String = match sort { + Sorting::UploadedAsc => "upload_date ASC".to_string(), + Sorting::UploadedDesc => "upload_date DESC".to_string(), + Sorting::SeedersAsc => "seeders ASC".to_string(), + Sorting::SeedersDesc => "seeders DESC".to_string(), + Sorting::LeechersAsc => "leechers ASC".to_string(), + Sorting::LeechersDesc => "leechers DESC".to_string(), + Sorting::NameAsc => "title ASC".to_string(), + Sorting::NameDesc => "title DESC".to_string(), + Sorting::SizeAsc => "file_size ASC".to_string(), + Sorting::SizeDesc => "file_size DESC".to_string(), + }; + + let category_filter_query = if let Some(c) = categories { + let mut i = 0; + let mut category_filters = String::new(); + for category in c.iter() { + // don't take user input in the db query + if let Ok(sanitized_category) = self.get_category_from_name(category).await { + let mut str = format!("tc.name = '{}'", sanitized_category.name); + if i > 0 { str = format!(" OR {}", str); } + category_filters.push_str(&str); + i += 1; + } + } + if category_filters.len() > 0 { + format!("INNER JOIN torrust_categories tc ON tt.category_id = tc.category_id AND ({}) ", category_filters) + } else { + String::new() + } + } else { + String::new() + }; + + let mut query_string = format!("SELECT tt.* FROM torrust_torrents tt {}WHERE title LIKE ?", category_filter_query); + + let count_query = format!("SELECT COUNT(*) as count FROM ({}) AS count_table", query_string); + + let count_result: Result = query_as(&count_query) + .bind(title.clone()) + .fetch_one(&self.pool) + .await + .map(|(v,)| v) + .map_err(|_| DatabaseError::Error); + + let count = count_result?; + + query_string = format!("{} ORDER BY {} LIMIT ?, ?", query_string, sort_query); + + let res: Vec = sqlx::query_as::<_, TorrentListing>(&query_string) + .bind(title) + .bind(offset as i64) + .bind(page_size) + .fetch_all(&self.pool) + .await + .map_err(|_| DatabaseError::Error)?; + + Ok(TorrentsResponse { + total: count as u32, + results: res + }) + } + + async fn insert_torrent_and_get_id(&self, username: String, info_hash: String, title: String, category_id: i64, description: String, file_size: i64, seeders: i64, leechers: i64) -> Result { + let current_time = current_time() as i64; + + query(r#"INSERT INTO torrust_torrents (uploader, info_hash, title, category_id, description, upload_date, file_size, seeders, leechers) VALUES (?, ?, ?, NULLIF(?, ""), ?, ?, ?, ?, ?)"#) + .bind(username) + .bind(info_hash) + .bind(title) + .bind(category_id) + .bind(description) + .bind(current_time) + .bind(file_size) + .bind(seeders) + .bind(leechers) + .execute(&self.pool) + .await + .map(|v| v.last_insert_id() as i64) + .map_err(|e| match e { + sqlx::Error::Database(err) => { + if err.message().contains("info_hash") { + DatabaseError::TorrentAlreadyExists + } else if err.message().contains("title") { + DatabaseError::TorrentTitleAlreadyExists + } else { + DatabaseError::Error + } + } + _ => DatabaseError::Error + }) + } + + async fn get_torrent_from_id(&self, torrent_id: i64) -> Result { + query_as::<_, TorrentListing>("SELECT * FROM torrust_torrents WHERE torrent_id = ?") + .bind(torrent_id) + .fetch_one(&self.pool) + .await + .map_err(|_| DatabaseError::TorrentNotFound) + } + + async fn get_all_torrents_compact(&self) -> Result, DatabaseError> { + query_as::<_, TorrentCompact>("SELECT torrent_id, info_hash FROM torrust_torrents") + .fetch_all(&self.pool) + .await + .map_err(|_| DatabaseError::Error) + } + + async fn update_torrent_title(&self, torrent_id: i64, title: &str) -> Result<(), DatabaseError> { + query("UPDATE torrust_torrents SET title = ? WHERE torrent_id = ?") + .bind(title) + .bind(torrent_id) + .execute(&self.pool) + .await + .map_err(|e| match e { + sqlx::Error::Database(err) => { + if err.message().contains("UNIQUE") { + DatabaseError::TorrentTitleAlreadyExists + } else { + DatabaseError::Error + } + } + _ => DatabaseError::Error + }) + .and_then(|v| if v.rows_affected() > 0 { + Ok(()) + } else { + Err(DatabaseError::TorrentNotFound) + }) + } + + async fn update_torrent_description(&self, torrent_id: i64, description: &str) -> Result<(), DatabaseError> { + query("UPDATE torrust_torrents SET description = ? WHERE torrent_id = ?") + .bind(description) + .bind(torrent_id) + .execute(&self.pool) + .await + .map_err(|_| DatabaseError::Error) + .and_then(|v| if v.rows_affected() > 0 { + Ok(()) + } else { + Err(DatabaseError::TorrentNotFound) + }) + } + + async fn update_tracker_info(&self, info_hash: &str, seeders: i64, leechers: i64) -> Result<(), DatabaseError> { + query("UPDATE torrust_torrents SET seeders = ?, leechers = ? WHERE info_hash = ?") + .bind(seeders) + .bind(leechers) + .bind(info_hash) + .execute(&self.pool) + .await + .map(|_| ()) + .map_err(|_| DatabaseError::TorrentNotFound) + } + + async fn delete_torrent(&self, torrent_id: i64) -> Result<(), DatabaseError> { + query("DELETE FROM torrust_torrents WHERE torrent_id = ?") + .bind(torrent_id) + .execute(&self.pool) + .await + .map_err(|_| DatabaseError::Error) + .and_then(|v| if v.rows_affected() > 0 { + Ok(()) + } else { + Err(DatabaseError::TorrentNotFound) + }) + } +} diff --git a/src/databases/sqlite.rs b/src/databases/sqlite.rs new file mode 100644 index 00000000..212d67b2 --- /dev/null +++ b/src/databases/sqlite.rs @@ -0,0 +1,455 @@ +use sqlx::{Acquire, query, query_as, SqlitePool}; +use sqlx::sqlite::SqlitePoolOptions; +use async_trait::async_trait; +use chrono::{NaiveDateTime}; + +use crate::models::torrent::TorrentListing; +use crate::utils::time::current_time; +use crate::models::tracker_key::TrackerKey; +use crate::databases::database::{Category, Database, DatabaseError, Sorting, TorrentCompact}; +use crate::handlers::torrent::TorrentCount; +use crate::models::response::{TorrentsResponse}; +use crate::models::user::{User, UserAuthentication, UserCompact, UserProfile}; + +pub struct SqliteDatabase { + pub pool: SqlitePool +} + +impl SqliteDatabase { + pub async fn new(database_url: &str) -> Self { + let db = SqlitePoolOptions::new() + .connect(database_url) + .await + .expect("Unable to create database pool."); + + sqlx::migrate!("migrations/sqlite3") + .run(&db) + .await + .expect("Could not run database migrations."); + + Self { + pool: db + } + } +} + +#[async_trait] +impl Database for SqliteDatabase { + async fn insert_user_and_get_id(&self, username: &str, email: &str, password_hash: &str) -> Result { + + // open pool connection + let mut conn = self.pool.acquire() + .await + .map_err(|_| DatabaseError::Error)?; + + // start db transaction + let mut tx = conn.begin() + .await + .map_err(|_| DatabaseError::Error)?; + + // create the user account and get the user id + let user_id = query("INSERT INTO torrust_users (date_registered) VALUES (strftime('%Y-%m-%d %H:%M:%S',DATETIME('now', 'utc')))") + .execute(&mut tx) + .await + .map(|v| v.last_insert_rowid()) + .map_err(|_| DatabaseError::Error)?; + + // add password hash for account + let insert_user_auth_result = query("INSERT INTO torrust_user_authentication (user_id, password_hash) VALUES (?, ?)") + .bind(user_id) + .bind(password_hash) + .execute(&mut tx) + .await + .map_err(|_| DatabaseError::Error); + + // rollback transaction on error + if let Err(e) = insert_user_auth_result { + let _ = tx.rollback().await; + return Err(e) + } + + // add account profile details + let insert_user_profile_result = query(r#"INSERT INTO torrust_user_profiles (user_id, username, email, email_verified, bio, avatar) VALUES (?, ?, NULLIF(?, ""), 0, NULL, NULL)"#) + .bind(user_id) + .bind(username) + .bind(email) + .execute(&mut tx) + .await + .map_err(|e| match e { + sqlx::Error::Database(err) => { + if err.message().contains("username") { + DatabaseError::UsernameTaken + } else if err.message().contains("email") { + DatabaseError::EmailTaken + } else { + DatabaseError::Error + } + } + _ => DatabaseError::Error + }); + + // commit or rollback transaction and return user_id on success + match insert_user_profile_result { + Ok(_) => { + let _ = tx.commit().await; + Ok(user_id as i64) + } + Err(e) => { + let _ = tx.rollback().await; + Err(e) + } + } + } + + async fn get_user_from_id(&self, user_id: i64) -> Result { + query_as::<_, User>("SELECT * FROM torrust_users WHERE user_id = ?") + .bind(user_id) + .fetch_one(&self.pool) + .await + .map_err(|_| DatabaseError::UserNotFound) + } + + async fn get_user_authentication_from_id(&self, user_id: i64) -> Result { + query_as::<_, UserAuthentication>("SELECT * FROM torrust_user_authentication WHERE user_id = ?") + .bind(user_id) + .fetch_one(&self.pool) + .await + .map_err(|_| DatabaseError::UserNotFound) + } + + async fn get_user_profile_from_username(&self, username: &str) -> Result { + query_as::<_, UserProfile>("SELECT * FROM torrust_user_profiles WHERE username = ?") + .bind(username) + .fetch_one(&self.pool) + .await + .map_err(|_| DatabaseError::UserNotFound) + } + + async fn get_user_compact_from_id(&self, user_id: i64) -> Result { + query_as::<_, UserCompact>("SELECT tu.user_id, tp.username, tu.administrator FROM torrust_users tu INNER JOIN torrust_user_profiles tp ON tu.user_id = tp.user_id WHERE tu.user_id = ?") + .bind(user_id) + .fetch_one(&self.pool) + .await + .map_err(|_| DatabaseError::UserNotFound) + } + + async fn get_user_tracker_key(&self, user_id: i64) -> Option { + const HOUR_IN_SECONDS: i64 = 3600; + + // casting current_time() to i64 will overflow in the year 2262 + let current_time_plus_hour = (current_time() as i64) + HOUR_IN_SECONDS; + + // get tracker key that is valid for at least one hour from now + query_as::<_, TrackerKey>("SELECT tracker_key, date_expiry FROM torrust_tracker_keys WHERE user_id = $1 AND date_expiry > $2 ORDER BY date_expiry DESC") + .bind(user_id) + .bind(current_time_plus_hour) + .fetch_one(&self.pool) + .await + .ok() + } + + async fn count_users(&self) -> Result { + query_as("SELECT COUNT(*) FROM torrust_users") + .fetch_one(&self.pool) + .await + .map(|(v,)| v) + .map_err(|_| DatabaseError::Error) + } + + async fn ban_user(&self, user_id: i64, reason: &str, date_expiry: NaiveDateTime) -> Result<(), DatabaseError> { + // date needs to be in ISO 8601 format + let date_expiry_string = date_expiry.format("%Y-%m-%d %H:%M:%S").to_string(); + + query("INSERT INTO torrust_user_bans (user_id, reason, date_expiry) VALUES ($1, $2, $3)") + .bind(user_id) + .bind(reason) + .bind(date_expiry_string) + .execute(&self.pool) + .await + .map(|_| ()) + .map_err(|_| DatabaseError::Error) + } + + async fn grant_admin_role(&self, user_id: i64) -> Result<(), DatabaseError> { + query("UPDATE torrust_users SET administrator = TRUE WHERE user_id = ?") + .bind(user_id) + .execute(&self.pool) + .await + .map_err(|_| DatabaseError::Error) + .and_then(|v| if v.rows_affected() > 0 { + Ok(()) + } else { + Err(DatabaseError::UserNotFound) + }) + } + + async fn verify_email(&self, user_id: i64) -> Result<(), DatabaseError> { + query("UPDATE torrust_user_profiles SET email_verified = TRUE WHERE user_id = ?") + .bind(user_id) + .execute(&self.pool) + .await + .map(|_| ()) + .map_err(|_| DatabaseError::Error) + } + + async fn add_tracker_key(&self, user_id: i64, tracker_key: &TrackerKey) -> Result<(), DatabaseError> { + let key = tracker_key.key.clone(); + + // date needs to be in ISO 8601 format + let date_expiry = NaiveDateTime::from_timestamp(tracker_key.valid_until, 0) + .format("%Y-%m-%d %H:%M:%S") + .to_string(); + + query("INSERT INTO torrust_tracker_keys (user_id, tracker_key, date_expiry) VALUES ($1, $2, $3)") + .bind(user_id) + .bind(key) + .bind(date_expiry) + .execute(&self.pool) + .await + .map(|_| ()) + .map_err(|_| DatabaseError::Error) + } + + async fn delete_user(&self, user_id: i64) -> Result<(), DatabaseError> { + query("DELETE FROM torrust_users WHERE user_id = ?") + .bind(user_id) + .execute(&self.pool) + .await + .map_err(|_| DatabaseError::Error) + .and_then(|v| if v.rows_affected() > 0 { + Ok(()) + } else { + Err(DatabaseError::UserNotFound) + }) + } + + async fn add_category(&self, category_name: &str) -> Result<(), DatabaseError> { + query("INSERT INTO torrust_categories (name) VALUES (?)") + .bind(category_name) + .execute(&self.pool) + .await + .map(|_| ()) + .map_err(|e| match e { + sqlx::Error::Database(err) => { + if err.message().contains("UNIQUE") { + DatabaseError::CategoryAlreadyExists + } else { + DatabaseError::Error + } + }, + _ => DatabaseError::Error + }) + } + + async fn get_category_from_id(&self, category_id: i64) -> Result { + query_as::<_, Category>("SELECT category_id, name, (SELECT COUNT(*) FROM torrust_torrents WHERE torrust_torrents.category_id = torrust_categories.category_id) AS num_torrents FROM torrust_categories WHERE category_id = ?") + .bind(category_id) + .fetch_one(&self.pool) + .await + .map_err(|_| DatabaseError::CategoryNotFound) + } + + async fn get_category_from_name(&self, category_name: &str) -> Result { + query_as::<_, Category>("SELECT category_id, name, (SELECT COUNT(*) FROM torrust_torrents WHERE torrust_torrents.category_id = torrust_categories.category_id) AS num_torrents FROM torrust_categories WHERE name = ?") + .bind(category_name) + .fetch_one(&self.pool) + .await + .map_err(|_| DatabaseError::CategoryNotFound) + } + + async fn get_categories(&self) -> Result, DatabaseError> { + query_as::<_, Category>("SELECT tc.category_id, tc.name, COUNT(tt.category_id) as num_torrents FROM torrust_categories tc LEFT JOIN torrust_torrents tt on tc.category_id = tt.category_id GROUP BY tc.name") + .fetch_all(&self.pool) + .await + .map_err(|_| DatabaseError::Error) + } + + async fn delete_category(&self, category_name: &str) -> Result<(), DatabaseError> { + query("DELETE FROM torrust_categories WHERE name = ?") + .bind(category_name) + .execute(&self.pool) + .await + .map_err(|_| DatabaseError::Error) + .and_then(|v| if v.rows_affected() > 0 { + Ok(()) + } else { + Err(DatabaseError::CategoryNotFound) + }) + } + + // todo: refactor this + async fn get_torrents_search_sorted_paginated(&self, search: &Option, categories: &Option>, sort: &Sorting, offset: u64, page_size: u8) -> Result { + let title = match search { + None => "%".to_string(), + Some(v) => format!("%{}%", v) + }; + + let sort_query: String = match sort { + Sorting::UploadedAsc => "upload_date ASC".to_string(), + Sorting::UploadedDesc => "upload_date DESC".to_string(), + Sorting::SeedersAsc => "seeders ASC".to_string(), + Sorting::SeedersDesc => "seeders DESC".to_string(), + Sorting::LeechersAsc => "leechers ASC".to_string(), + Sorting::LeechersDesc => "leechers DESC".to_string(), + Sorting::NameAsc => "title ASC".to_string(), + Sorting::NameDesc => "title DESC".to_string(), + Sorting::SizeAsc => "file_size ASC".to_string(), + Sorting::SizeDesc => "file_size DESC".to_string(), + }; + + let category_filter_query = if let Some(c) = categories { + let mut i = 0; + let mut category_filters = String::new(); + for category in c.iter() { + // don't take user input in the db query + if let Ok(sanitized_category) = self.get_category_from_name(category).await { + let mut str = format!("tc.name = '{}'", sanitized_category.name); + if i > 0 { str = format!(" OR {}", str); } + category_filters.push_str(&str); + i += 1; + } + } + if category_filters.len() > 0 { + format!("INNER JOIN torrust_categories tc ON tt.category_id = tc.category_id AND ({}) ", category_filters) + } else { + String::new() + } + } else { + String::new() + }; + + let mut query_string = format!("SELECT tt.* FROM torrust_torrents tt {}WHERE title LIKE ?", category_filter_query); + + let count_query = format!("SELECT COUNT(*) as count FROM ({}) AS count_table", query_string); + + let count_result: Result = query_as(&count_query) + .bind(title.clone()) + .fetch_one(&self.pool) + .await + .map(|(v,)| v) + .map_err(|_| DatabaseError::Error); + + let count = count_result?; + + query_string = format!("{} ORDER BY {} LIMIT ?, ?", query_string, sort_query); + + let res: Vec = sqlx::query_as::<_, TorrentListing>(&query_string) + .bind(title) + .bind(offset as i64) + .bind(page_size) + .fetch_all(&self.pool) + .await + .map_err(|_| DatabaseError::Error)?; + + Ok(TorrentsResponse { + total: count as u32, + results: res + }) + } + + async fn insert_torrent_and_get_id(&self, username: String, info_hash: String, title: String, category_id: i64, description: String, file_size: i64, seeders: i64, leechers: i64) -> Result { + let current_time = current_time() as i64; + + query("INSERT INTO torrust_torrents (uploader, info_hash, title, category_id, description, upload_date, file_size, seeders, leechers) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)") + .bind(username) + .bind(info_hash) + .bind(title) + .bind(category_id) + .bind(description) + .bind(current_time) + .bind(file_size) + .bind(seeders) + .bind(leechers) + .execute(&self.pool) + .await + .map(|v| v.last_insert_rowid()) + .map_err(|e| match e { + sqlx::Error::Database(err) => { + if err.message().contains("info_hash") { + DatabaseError::TorrentAlreadyExists + } else if err.message().contains("title") { + DatabaseError::TorrentTitleAlreadyExists + } else { + DatabaseError::Error + } + } + _ => DatabaseError::Error + }) + } + + async fn get_torrent_from_id(&self, torrent_id: i64) -> Result { + query_as::<_, TorrentListing>("SELECT * FROM torrust_torrents WHERE torrent_id = ?") + .bind(torrent_id) + .fetch_one(&self.pool) + .await + .map_err(|_| DatabaseError::TorrentNotFound) + } + + async fn get_all_torrents_compact(&self) -> Result, DatabaseError> { + query_as::<_, TorrentCompact>("SELECT torrent_id, info_hash FROM torrust_torrents") + .fetch_all(&self.pool) + .await + .map_err(|_| DatabaseError::Error) + } + + async fn update_torrent_title(&self, torrent_id: i64, title: &str) -> Result<(), DatabaseError> { + query("UPDATE torrust_torrents SET title = $1 WHERE torrent_id = $2") + .bind(title) + .bind(torrent_id) + .execute(&self.pool) + .await + .map_err(|e| match e { + sqlx::Error::Database(err) => { + if err.message().contains("UNIQUE") { + DatabaseError::TorrentTitleAlreadyExists + } else { + DatabaseError::Error + } + } + _ => DatabaseError::Error + }) + .and_then(|v| if v.rows_affected() > 0 { + Ok(()) + } else { + Err(DatabaseError::TorrentNotFound) + }) + } + + async fn update_torrent_description(&self, torrent_id: i64, description: &str) -> Result<(), DatabaseError> { + query("UPDATE torrust_torrents SET description = $1 WHERE torrent_id = $2") + .bind(description) + .bind(torrent_id) + .execute(&self.pool) + .await + .map_err(|_| DatabaseError::Error) + .and_then(|v| if v.rows_affected() > 0 { + Ok(()) + } else { + Err(DatabaseError::TorrentNotFound) + }) + } + + async fn update_tracker_info(&self, info_hash: &str, seeders: i64, leechers: i64) -> Result<(), DatabaseError> { + query("UPDATE torrust_torrents SET seeders = $1, leechers = $2 WHERE info_hash = $3") + .bind(seeders) + .bind(leechers) + .bind(info_hash) + .execute(&self.pool) + .await + .map(|_| ()) + .map_err(|_| DatabaseError::TorrentNotFound) + } + + async fn delete_torrent(&self, torrent_id: i64) -> Result<(), DatabaseError> { + query("DELETE FROM torrust_torrents WHERE torrent_id = ?") + .bind(torrent_id) + .execute(&self.pool) + .await + .map_err(|_| DatabaseError::Error) + .and_then(|v| if v.rows_affected() > 0 { + Ok(()) + } else { + Err(DatabaseError::TorrentNotFound) + }) + } +} diff --git a/src/errors.rs b/src/errors.rs index efe173a7..2ce3ec81 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -4,8 +4,9 @@ use actix_web::{ResponseError, HttpResponse, HttpResponseBuilder}; use actix_web::http::{header, StatusCode}; use serde::{Deserialize, Serialize}; use std::error; +use crate::databases::database::DatabaseError; -pub type ServiceResult = std::result::Result; +pub type ServiceResult = Result; #[derive(Debug, Display, PartialEq, Error)] #[allow(dead_code)] @@ -14,7 +15,7 @@ pub enum ServiceError { InternalServerError, #[display( - fmt = "This server is is closed for registration. Contact admin if this is unexpecter" + fmt = "This server is is closed for registration. Contact admin if this is unexpected" )] ClosedForRegistration, @@ -30,12 +31,15 @@ pub enum ServiceError { WrongPasswordOrUsername, #[display(fmt = "Username not found")] UsernameNotFound, + #[display(fmt = "User not found")] + UserNotFound, + #[display(fmt = "Account not found")] AccountNotFound, - /// when the value passed contains profainity + /// when the value passed contains profanity #[display(fmt = "Can't allow profanity in usernames")] - ProfainityError, + ProfanityError, /// when the value passed contains blacklisted words /// see [blacklist](https://github.com/shuttlecraft/The-Big-Username-Blacklist) #[display(fmt = "Username contains blacklisted words")] @@ -46,7 +50,7 @@ pub enum ServiceError { #[display(fmt = "username_case_mapped violation")] UsernameCaseMappedError, - #[display(fmt = "Passsword too short")] + #[display(fmt = "Password too short")] PasswordTooShort, #[display(fmt = "Username too long")] PasswordTooLong, @@ -101,6 +105,9 @@ pub enum ServiceError { #[display(fmt = "This torrent already exists in our database.")] InfoHashAlreadyExists, + #[display(fmt = "This torrent title has already been used.")] + TorrentTitleAlreadyExists, + #[display(fmt = "Sorry, we have an error with our tracker connection.")] TrackerOffline, @@ -124,9 +131,10 @@ impl ResponseError for ServiceError { ServiceError::NotAUrl => StatusCode::BAD_REQUEST, ServiceError::WrongPasswordOrUsername => StatusCode::FORBIDDEN, ServiceError::UsernameNotFound => StatusCode::NOT_FOUND, + ServiceError::UserNotFound => StatusCode::NOT_FOUND, ServiceError::AccountNotFound => StatusCode::NOT_FOUND, - ServiceError::ProfainityError => StatusCode::BAD_REQUEST, + ServiceError::ProfanityError => StatusCode::BAD_REQUEST, ServiceError::BlacklistError => StatusCode::BAD_REQUEST, ServiceError::UsernameCaseMappedError => StatusCode::BAD_REQUEST, @@ -156,6 +164,8 @@ impl ResponseError for ServiceError { ServiceError::InfoHashAlreadyExists => StatusCode::BAD_REQUEST, + ServiceError::TorrentTitleAlreadyExists => StatusCode::BAD_REQUEST, + ServiceError::TrackerOffline => StatusCode::INTERNAL_SERVER_ERROR, ServiceError::CategoryExists => StatusCode::BAD_REQUEST, @@ -197,6 +207,22 @@ impl From for ServiceError { } } +impl From for ServiceError { + fn from(e: DatabaseError) -> Self { + match e { + DatabaseError::Error => ServiceError::InternalServerError, + DatabaseError::UsernameTaken => ServiceError::UsernameTaken, + DatabaseError::EmailTaken => ServiceError::EmailTaken, + DatabaseError::UserNotFound => ServiceError::UserNotFound, + DatabaseError::CategoryAlreadyExists => ServiceError::CategoryExists, + DatabaseError::CategoryNotFound => ServiceError::InvalidCategory, + DatabaseError::TorrentNotFound => ServiceError::TorrentNotFound, + DatabaseError::TorrentAlreadyExists => ServiceError::InfoHashAlreadyExists, + DatabaseError::TorrentTitleAlreadyExists => ServiceError::TorrentTitleAlreadyExists + } + } +} + impl From for ServiceError { fn from(e: pbkdf2::password_hash::Error) -> Self { eprintln!("{}", e); diff --git a/src/handlers/category.rs b/src/handlers/category.rs index b863fbbc..bc595227 100644 --- a/src/handlers/category.rs +++ b/src/handlers/category.rs @@ -3,7 +3,7 @@ use serde::{Serialize, Deserialize}; use crate::common::WebAppData; use crate::errors::{ServiceError, ServiceResult}; -use crate::models::response::{CategoryResponse, OkResponse}; +use crate::models::response::{OkResponse}; pub fn init_routes(cfg: &mut web::ServiceConfig) { cfg.service( @@ -17,18 +17,10 @@ pub fn init_routes(cfg: &mut web::ServiceConfig) { } pub async fn get_categories(app_data: WebAppData) -> ServiceResult { - // Count torrents with category - let res = sqlx::query_as::<_, CategoryResponse>( - r#"SELECT tc.category_id, tc.name, tc.icon, COUNT(tt.category_id) as num_torrents - FROM torrust_categories tc - LEFT JOIN torrust_torrents tt on tc.category_id = tt.category_id - GROUP BY tc.name"# - ) - .fetch_all(&app_data.database.pool) - .await?; + let categories = app_data.database.get_categories().await?; Ok(HttpResponse::Ok().json(OkResponse { - data: res + data: categories })) } @@ -40,25 +32,12 @@ pub struct Category { pub async fn add_category(req: HttpRequest, payload: web::Json, app_data: WebAppData) -> ServiceResult { // check for user - let user = app_data.auth.get_user_from_request(&req).await?; + let user = app_data.auth.get_user_compact_from_request(&req).await?; // check if user is administrator if !user.administrator { return Err(ServiceError::Unauthorized) } - let res = sqlx::query!( - "INSERT INTO torrust_categories (name) VALUES ($1)", - payload.name, - ) - .execute(&app_data.database.pool) - .await; - - if let Err(sqlx::Error::Database(err)) = res { - return if err.message().contains("UNIQUE") { - Err(ServiceError::CategoryExists) - } else { - Err(ServiceError::InternalServerError) - } - } + let _ = app_data.database.add_category(&payload.name).await?; Ok(HttpResponse::Ok().json(OkResponse { data: payload.name.clone() @@ -67,17 +46,12 @@ pub async fn add_category(req: HttpRequest, payload: web::Json, app_da pub async fn delete_category(req: HttpRequest, payload: web::Json, app_data: WebAppData) -> ServiceResult { // check for user - let user = app_data.auth.get_user_from_request(&req).await?; + let user = app_data.auth.get_user_compact_from_request(&req).await?; // check if user is administrator if !user.administrator { return Err(ServiceError::Unauthorized) } - let _res = sqlx::query!( - "DELETE FROM torrust_categories WHERE name = $1", - payload.name, - ) - .execute(&app_data.database.pool) - .await?; + let _ = app_data.database.delete_category(&payload.name).await?; Ok(HttpResponse::Ok().json(OkResponse { data: payload.name.clone() diff --git a/src/handlers/settings.rs b/src/handlers/settings.rs index a639b732..fc48a7e1 100644 --- a/src/handlers/settings.rs +++ b/src/handlers/settings.rs @@ -22,7 +22,7 @@ pub fn init_routes(cfg: &mut web::ServiceConfig) { pub async fn get_settings(req: HttpRequest, app_data: WebAppData) -> ServiceResult { // check for user - let user = app_data.auth.get_user_from_request(&req).await?; + let user = app_data.auth.get_user_compact_from_request(&req).await?; // check if user is administrator if !user.administrator { return Err(ServiceError::Unauthorized) } @@ -52,7 +52,7 @@ pub async fn get_site_name(app_data: WebAppData) -> ServiceResult, app_data: WebAppData) -> ServiceResult { // check for user - let user = app_data.auth.get_user_from_request(&req).await?; + let user = app_data.auth.get_user_compact_from_request(&req).await?; // check if user is administrator if !user.administrator { return Err(ServiceError::Unauthorized) } diff --git a/src/handlers/torrent.rs b/src/handlers/torrent.rs index 32b18318..68b85069 100644 --- a/src/handlers/torrent.rs +++ b/src/handlers/torrent.rs @@ -4,8 +4,8 @@ use actix_web::web::{Query}; use futures::{AsyncWriteExt, StreamExt, TryStreamExt}; use serde::{Deserialize}; use crate::errors::{ServiceError, ServiceResult}; -use crate::models::response::{NewTorrentResponse, OkResponse, TorrentResponse, TorrentsResponse}; -use crate::models::torrent::{TorrentListing, TorrentRequest}; +use crate::models::response::{NewTorrentResponse, OkResponse, TorrentResponse}; +use crate::models::torrent::TorrentRequest; use crate::utils::parse_torrent; use crate::common::{WebAppData}; use std::io::Cursor; @@ -14,6 +14,7 @@ use crate::models::torrent_file::{Torrent, File}; use crate::AsCSV; use std::option::Option::Some; use sqlx::{FromRow}; +use crate::databases::database::Sorting; pub fn init_routes(cfg: &mut web::ServiceConfig) { cfg.service( @@ -34,16 +35,6 @@ pub fn init_routes(cfg: &mut web::ServiceConfig) { ); } -#[derive(Debug, Deserialize)] -pub struct DisplayInfo { - page_size: Option, - page: Option, - sort: Option, - // expects comma separated string, eg: "?categories=movie,other,app" - categories: Option, - search: Option, -} - #[derive(FromRow)] pub struct TorrentCount { pub count: i32, @@ -66,94 +57,139 @@ impl CreateTorrent { } } -// eg: /torrents?categories=music,other,movie&search=bunny&sort=size_DESC -pub async fn get_torrents(params: Query, app_data: WebAppData) -> ServiceResult { - let page = params.page.unwrap_or(0); - let page_size = params.page_size.unwrap_or(30); - let offset = page * page_size; - let categories = params.categories.as_csv::().unwrap_or(None); - let search = match ¶ms.search { - None => "%".to_string(), - Some(v) => format!("%{}%", v) - }; +#[derive(Debug, Deserialize)] +pub struct TorrentSearch { + page_size: Option, + page: Option, + sort: Option, + // expects comma separated string, eg: "?categories=movie,other,app" + categories: Option, + search: Option, +} - let sort_query: String = match ¶ms.sort { - Some(sort) => { - match sort.as_str() { - "uploaded_ASC" => "upload_date ASC".to_string(), - "uploaded_DESC" => "upload_date DESC".to_string(), - "seeders_ASC" => "seeders ASC".to_string(), - "seeders_DESC" => "seeders DESC".to_string(), - "leechers_ASC" => "leechers ASC".to_string(), - "leechers_DESC" => "leechers DESC".to_string(), - "name_ASC" => "title ASC".to_string(), - "name_DESC" => "title DESC".to_string(), - "size_ASC" => "file_size ASC".to_string(), - "size_DESC" => "file_size DESC".to_string(), - _ => "upload_date DESC".to_string() - } - } - None => "upload_date DESC".to_string() - }; +#[derive(Debug, Deserialize)] +pub struct TorrentUpdate { + title: Option, + description: Option +} - let category_filter_query = if let Some(c) = categories { - let mut i = 0; - let mut category_filters = String::new(); - for category in c.iter() { - // don't take user input in the db query - if let Some(sanitized_category) = &app_data.database.get_category_by_name(category).await { - let mut str = format!("tc.name = '{}'", sanitized_category.name); - if i > 0 { str = format!(" OR {}", str); } - category_filters.push_str(&str); - i += 1; - } - } - if category_filters.len() > 0 { - format!("INNER JOIN torrust_categories tc ON tt.category_id = tc.category_id AND ({})", category_filters) - } else { - String::new() - } - } else { - String::new() - }; +pub async fn upload_torrent(req: HttpRequest, payload: Multipart, app_data: WebAppData) -> ServiceResult { + let user = app_data.auth.get_user_compact_from_request(&req).await?; + + let mut torrent_request = get_torrent_request_from_payload(payload).await?; - let mut query_string = format!("SELECT tt.* FROM torrust_torrents tt {} WHERE title LIKE ?", category_filter_query); - let count_query_string = format!("SELECT COUNT(torrent_id) as count FROM ({})", query_string); + // update announce url to our own tracker url + torrent_request.torrent.set_torrust_config(&app_data.cfg).await; + + let category = app_data.database.get_category_from_name(&torrent_request.fields.category).await + .map_err(|_| ServiceError::InvalidCategory)?; - let count: TorrentCount = sqlx::query_as::<_, TorrentCount>(&count_query_string) - .bind(search.clone()) - .fetch_one(&app_data.database.pool) - .await?; + let username = user.username; + let info_hash = torrent_request.torrent.info_hash(); + let title = torrent_request.fields.title; + //let category = torrent_request.fields.category; + let description = torrent_request.fields.description; + //let current_time = current_time() as i64; + let file_size = torrent_request.torrent.file_size(); + let mut seeders = 0; + let mut leechers = 0; - query_string = format!("{} ORDER BY {} LIMIT ?, ?", query_string, sort_query); + if let Ok(torrent_info) = app_data.tracker.get_torrent_info(&info_hash).await { + seeders = torrent_info.seeders; + leechers = torrent_info.leechers; + } - let res: Vec = sqlx::query_as::<_, TorrentListing>(&query_string) - .bind(search) - .bind(offset) - .bind(page_size) - .fetch_all(&app_data.database.pool).await?; + let torrent_id = app_data.database.insert_torrent_and_get_id(username, info_hash, title, category.category_id, description, file_size, seeders, leechers).await?; - let torrents_response = TorrentsResponse { - total: count.count as u32, - results: res - }; + // whitelist info hash on tracker + let _ = app_data.tracker.whitelist_info_hash(torrent_request.torrent.info_hash()).await; + + let settings = app_data.cfg.settings.read().await; + + let upload_folder = settings.storage.upload_path.clone(); + let filepath = format!("{}/{}", upload_folder, torrent_id.to_string() + ".torrent"); + + drop(settings); + + // save torrent file to uploads folder + // if fails, delete torrent from database and return error + if save_torrent_file(&upload_folder, &filepath, &torrent_request.torrent).await.is_err() { + let _ = app_data.database.delete_torrent(torrent_id).await; + return Err(ServiceError::InternalServerError) + } Ok(HttpResponse::Ok().json(OkResponse { - data: torrents_response + data: NewTorrentResponse { + torrent_id + } })) } +pub async fn download_torrent(req: HttpRequest, app_data: WebAppData) -> ServiceResult { + let torrent_id = get_torrent_id_from_request(&req)?; + + let settings = app_data.cfg.settings.read().await; + + // optional + let user = app_data.auth.get_user_compact_from_request(&req).await; + + let filepath = format!("{}/{}", settings.storage.upload_path, torrent_id.to_string() + ".torrent"); + + let mut torrent = match parse_torrent::read_torrent_from_file(&filepath) { + Ok(torrent) => Ok(torrent), + Err(e) => { + println!("{:?}", e); + Err(ServiceError::InternalServerError) + } + }?; + + let tracker_url = settings.tracker.url.clone(); + + drop(settings); + + // add personal tracker url or default tracker url + match user { + Ok(user) => { + let personal_announce_url = app_data.tracker.get_personal_announce_url(user.user_id).await.unwrap_or(tracker_url); + torrent.announce = Some(personal_announce_url.clone()); + if let Some(list) = &mut torrent.announce_list { + let vec = vec![personal_announce_url]; + list.insert(0, vec); + } + }, + Err(_) => { + torrent.announce = Some(tracker_url); + } + } + + let buffer = match parse_torrent::encode_torrent(&torrent) { + Ok(v) => Ok(v), + Err(e) => { + println!("{:?}", e); + Err(ServiceError::InternalServerError) + } + }?; + + Ok(HttpResponse::Ok() + .content_type("application/x-bittorrent") + .body(buffer) + ) +} + pub async fn get_torrent(req: HttpRequest, app_data: WebAppData) -> ServiceResult { // optional - let user = app_data.auth.get_user_from_request(&req).await; + let user = app_data.auth.get_user_compact_from_request(&req).await; let settings = app_data.cfg.settings.read().await; let torrent_id = get_torrent_id_from_request(&req)?; - let torrent_listing = app_data.database.get_torrent_by_id(torrent_id).await?; - let category = app_data.database.get_category(torrent_listing.category_id).await.unwrap(); + let torrent_listing = app_data.database.get_torrent_from_id(torrent_id).await?; + + let category = app_data.database.get_category_from_id(torrent_listing.category_id).await?; + let mut torrent_response = TorrentResponse::from_listing(torrent_listing); + torrent_response.category = category; let filepath = format!("{}/{}", settings.storage.upload_path, torrent_response.torrent_id.to_string() + ".torrent"); @@ -186,22 +222,26 @@ pub async fn get_torrent(req: HttpRequest, app_data: WebAppData) -> ServiceResul } // add self-hosted tracker url - if user.is_ok() { - let unwrapped_user = user.unwrap(); - let personal_announce_url = app_data.tracker.get_personal_announce_url(&unwrapped_user).await?; - // add personal tracker url to front of vec - torrent_response.trackers.insert(0, personal_announce_url); - } else { - // add tracker to front of vec - torrent_response.trackers.insert(0, tracker_url); + match user { + Ok(user) => { + // if no user owned tracker key can be found, use default tracker url + let personal_announce_url = app_data.tracker.get_personal_announce_url(user.user_id).await.unwrap_or(tracker_url); + // add personal tracker url to front of vec + torrent_response.trackers.insert(0, personal_announce_url); + }, + Err(_) => { + torrent_response.trackers.insert(0, tracker_url); + } } // add magnet link let mut magnet = format!("magnet:?xt=urn:btih:{}&dn={}", torrent_response.info_hash, urlencoding::encode(&torrent_response.title)); + // add trackers from torrent file to magnet link for tracker in &torrent_response.trackers { magnet.push_str(&format!("&tr={}", urlencoding::encode(tracker))); } + torrent_response.magnet_link = magnet; // get realtime seeders and leechers @@ -215,51 +255,27 @@ pub async fn get_torrent(req: HttpRequest, app_data: WebAppData) -> ServiceResul })) } -#[derive(Debug, Deserialize)] -pub struct TorrentUpdate { - title: Option, - description: Option -} - pub async fn update_torrent(req: HttpRequest, payload: web::Json, app_data: WebAppData) -> ServiceResult { - let user = app_data.auth.get_user_from_request(&req).await?; + let user = app_data.auth.get_user_compact_from_request(&req).await?; let torrent_id = get_torrent_id_from_request(&req)?; - let torrent_listing = app_data.database.get_torrent_by_id(torrent_id).await?; + let torrent_listing = app_data.database.get_torrent_from_id(torrent_id).await?; // check if user is owner or administrator if torrent_listing.uploader != user.username && !user.administrator { return Err(ServiceError::Unauthorized) } // update torrent title if let Some(title) = &payload.title { - let res = sqlx::query!( - "UPDATE torrust_torrents SET title = $1 WHERE torrent_id = $2", - title, - torrent_id - ) - .execute(&app_data.database.pool) - .await; - - if let Err(_) = res { return Err(ServiceError::TorrentNotFound) } - if res.unwrap().rows_affected() == 0 { return Err(ServiceError::TorrentNotFound) } + let _res = app_data.database.update_torrent_title(torrent_id, title).await?; } // update torrent description if let Some(description) = &payload.description { - let res = sqlx::query!( - "UPDATE torrust_torrents SET description = $1 WHERE torrent_id = $2", - description, - torrent_id - ) - .execute(&app_data.database.pool) - .await; - - if let Err(_) = res { return Err(ServiceError::TorrentNotFound) } - if res.unwrap().rows_affected() == 0 { return Err(ServiceError::TorrentNotFound) } + let _res = app_data.database.update_torrent_description(torrent_id, description).await?; } - let torrent_listing = app_data.database.get_torrent_by_id(torrent_id).await?; + let torrent_listing = app_data.database.get_torrent_from_id(torrent_id).await?; let torrent_response = TorrentResponse::from_listing(torrent_listing); Ok(HttpResponse::Ok().json(OkResponse { @@ -268,7 +284,7 @@ pub async fn update_torrent(req: HttpRequest, payload: web::Json, } pub async fn delete_torrent(req: HttpRequest, app_data: WebAppData) -> ServiceResult { - let user = app_data.auth.get_user_from_request(&req).await?; + let user = app_data.auth.get_user_compact_from_request(&req).await?; // check if user is administrator if !user.administrator { return Err(ServiceError::Unauthorized) } @@ -276,17 +292,9 @@ pub async fn delete_torrent(req: HttpRequest, app_data: WebAppData) -> ServiceRe let torrent_id = get_torrent_id_from_request(&req)?; // needed later for removing torrent from tracker whitelist - let torrent_listing = app_data.database.get_torrent_by_id(torrent_id).await?; - - let res = sqlx::query!( - "DELETE FROM torrust_torrents WHERE torrent_id = ?", - torrent_id - ) - .execute(&app_data.database.pool) - .await; + let torrent_listing = app_data.database.get_torrent_from_id(torrent_id).await?; - if let Err(_) = res { return Err(ServiceError::TorrentNotFound) } - if res.unwrap().rows_affected() == 0 { return Err(ServiceError::TorrentNotFound) } + let _res = app_data.database.delete_torrent(torrent_id).await?; // remove info_hash from tracker whitelist let _ = app_data.tracker.remove_info_hash_from_whitelist(torrent_listing.info_hash).await; @@ -298,116 +306,29 @@ pub async fn delete_torrent(req: HttpRequest, app_data: WebAppData) -> ServiceRe })) } -pub async fn upload_torrent(req: HttpRequest, payload: Multipart, app_data: WebAppData) -> ServiceResult { - let user = app_data.auth.get_user_from_request(&req).await?; - - let mut torrent_request = get_torrent_request_from_payload(payload).await?; - - // update announce url to our own tracker url - torrent_request.torrent.set_torrust_config(&app_data.cfg).await; +// eg: /torrents?categories=music,other,movie&search=bunny&sort=size_DESC +pub async fn get_torrents(params: Query, app_data: WebAppData) -> ServiceResult { + let sort = params.sort.unwrap_or(Sorting::UploadedDesc); - let res = sqlx::query!( - "SELECT category_id FROM torrust_categories WHERE name = ?", - torrent_request.fields.category - ) - .fetch_one(&app_data.database.pool) - .await; + let page = params.page.unwrap_or(0); - let row = match res { - Ok(row) => row, - Err(_) => return Err(ServiceError::InvalidCategory), + // make sure the min page size = 10 + let page_size = match params.page_size.unwrap_or(30) { + 0 ..= 9 => 10, + v => v }; - let username = user.username; - let info_hash = torrent_request.torrent.info_hash(); - let title = torrent_request.fields.title; - //let category = torrent_request.fields.category; - let description = torrent_request.fields.description; - //let current_time = current_time() as i64; - let file_size = torrent_request.torrent.file_size(); - let mut seeders = 0; - let mut leechers = 0; - - if let Ok(torrent_info) = app_data.tracker.get_torrent_info(&info_hash).await { - seeders = torrent_info.seeders; - leechers = torrent_info.leechers; - } - - let torrent_id = app_data.database.insert_torrent_and_get_id(username, info_hash, title, row.category_id, description, file_size, seeders, leechers).await?; - - // whitelist info hash on tracker - let _ = app_data.tracker.whitelist_info_hash(torrent_request.torrent.info_hash()).await; + let offset = (page * page_size as u32) as u64; - let settings = app_data.cfg.settings.read().await; - - let upload_folder = settings.storage.upload_path.clone(); - let filepath = format!("{}/{}", upload_folder, torrent_id.to_string() + ".torrent"); - - drop(settings); + let categories = params.categories.as_csv::().unwrap_or(None); - save_torrent_file(&upload_folder, &filepath, &torrent_request.torrent).await?; + let torrents_response = app_data.database.get_torrents_search_sorted_paginated(¶ms.search, &categories, &sort, offset, page_size as u8).await?; Ok(HttpResponse::Ok().json(OkResponse { - data: NewTorrentResponse { - torrent_id - } + data: torrents_response })) } -pub async fn download_torrent(req: HttpRequest, app_data: WebAppData) -> ServiceResult { - let torrent_id = get_torrent_id_from_request(&req)?; - - let settings = app_data.cfg.settings.read().await; - - // optional - let user = app_data.auth.get_user_from_request(&req).await; - - let filepath = format!("{}/{}", settings.storage.upload_path, torrent_id.to_string() + ".torrent"); - - let mut torrent = match parse_torrent::read_torrent_from_file(&filepath) { - Ok(torrent) => Ok(torrent), - Err(e) => { - println!("{:?}", e); - Err(ServiceError::InternalServerError) - } - }?; - - if user.is_ok() { - let unwrapped_user = user.unwrap(); - let personal_announce_url = app_data.tracker.get_personal_announce_url(&unwrapped_user).await?; - torrent.announce = Some(personal_announce_url.clone()); - if let Some(list) = &mut torrent.announce_list { - let mut vec = Vec::new(); - vec.push(personal_announce_url); - list.insert(0, vec); - } - } else { - torrent.announce = Some(settings.tracker.url.clone()); - } - - drop(settings); - - let buffer = match parse_torrent::encode_torrent(&torrent) { - Ok(v) => Ok(v), - Err(e) => { - println!("{:?}", e); - Err(ServiceError::InternalServerError) - } - }?; - - Ok(HttpResponse::Ok() - .content_type("application/x-bittorrent") - .body(buffer) - ) -} - -// async fn verify_torrent_ownership(user: &User, torrent_listing: &TorrentListing) -> Result<(), ServiceError> { -// match torrent_listing.uploader == user.username { -// true => Ok(()), -// false => Err(ServiceError::BadRequest) -// } -// } - async fn save_torrent_file(upload_folder: &str, filepath: &str, torrent: &Torrent) -> Result<(), ServiceError> { let torrent_bytes = match parse_torrent::encode_torrent(torrent) { Ok(v) => Ok(v), diff --git a/src/handlers/user.rs b/src/handlers/user.rs index 9f9a48f5..eb579e37 100644 --- a/src/handlers/user.rs +++ b/src/handlers/user.rs @@ -1,21 +1,15 @@ use actix_web::{web, Responder, HttpResponse, HttpRequest}; use serde::{Deserialize, Serialize}; -use pbkdf2::{ - password_hash::{ - rand_core::OsRng, - PasswordHash, PasswordHasher, PasswordVerifier, SaltString, - }, - Pbkdf2, -}; -use std::borrow::Cow; +use jsonwebtoken::{DecodingKey, decode, Validation, Algorithm}; +use pbkdf2::password_hash::{rand_core::OsRng, PasswordHash, PasswordHasher, PasswordVerifier, SaltString}; +use pbkdf2::Pbkdf2; + use crate::errors::{ServiceResult, ServiceError}; use crate::common::WebAppData; -use jsonwebtoken::{DecodingKey, decode, Validation, Algorithm}; use crate::config::EmailOnSignup; use crate::models::response::OkResponse; use crate::models::response::TokenResponse; use crate::mailer::VerifyClaims; -use crate::utils::random::random_string; pub fn init_routes(cfg: &mut web::ServiceConfig) { cfg.service( @@ -59,124 +53,102 @@ pub async fn register(req: HttpRequest, mut payload: web::Json, app_da } if payload.password != payload.confirm_password { - return Err(ServiceError::PasswordsDontMatch); + return Err(ServiceError::PasswordsDontMatch) } let password_length = payload.password.len(); + if password_length <= settings.auth.min_password_length { - return Err(ServiceError::PasswordTooShort); + return Err(ServiceError::PasswordTooShort) } + if password_length >= settings.auth.max_password_length { - return Err(ServiceError::PasswordTooLong); + return Err(ServiceError::PasswordTooLong) } let salt = SaltString::generate(&mut OsRng); - let password_hash; - if let Ok(password) = Pbkdf2.hash_password(payload.password.as_bytes(), &salt) { - password_hash = password.to_string(); - } else { - return Err(ServiceError::InternalServerError); - } + let password_hash = Pbkdf2.hash_password(payload.password.as_bytes(), &salt)?.to_string(); if payload.username.contains('@') { return Err(ServiceError::UsernameInvalid) } - // can't drop not null constraint on sqlite, so we fill the email with unique junk :) - let email = payload.email.as_ref().unwrap_or(&format!("EMPTY_EMAIL_{}", random_string(16))).to_string(); - - let res = sqlx::query!( - "INSERT INTO torrust_users (username, email, password) VALUES ($1, $2, $3)", - payload.username, - email, - password_hash, - ) - .execute(&app_data.database.pool) - .await; - - if let Err(sqlx::Error::Database(err)) = res { - return if err.code() == Some(Cow::from("2067")) { - if err.message().contains("torrust_users.username") { - Err(ServiceError::UsernameTaken) - } else if err.message().contains("torrust_users.email") { - Err(ServiceError::EmailTaken) - } else { - Err(ServiceError::InternalServerError) - } - } else { - Err(sqlx::Error::Database(err).into()) - }; - } + let email = payload.email.as_ref().unwrap_or(&"".to_string()).to_string(); - // count accounts - let res_count: (i64,) = sqlx::query_as("SELECT COUNT(*) FROM torrust_users") - .fetch_one(&app_data.database.pool) - .await?; - - // make admin if first account - if res_count.0 == 1 { - let _res_make_admin = sqlx::query!("UPDATE torrust_users SET administrator = 1") - .execute(&app_data.database.pool) - .await; - } + let user_id = app_data.database.insert_user_and_get_id(&payload.username, &email, &password_hash).await?; let conn_info = req.connection_info(); if settings.mail.email_verification_enabled && payload.email.is_some() { let mail_res = app_data.mailer.send_verification_mail( - &payload.email.as_ref().unwrap(), + payload.email.as_ref().unwrap(), &payload.username, + user_id, format!("{}://{}", conn_info.scheme(), conn_info.host()).as_str() ) .await; - // get user id from user insert res - let user_id = res.unwrap().last_insert_rowid(); - if mail_res.is_err() { let _ = app_data.database.delete_user(user_id).await; return Err(ServiceError::FailedToSendVerificationEmail) } - } else { - } Ok(HttpResponse::Ok()) } -pub async fn login(payload: web::Json, app_data: WebAppData) -> ServiceResult { - let settings = app_data.cfg.settings.read().await; +async fn grant_admin_role(app_data: &WebAppData, user_id: i64) { + // count accounts + let user_count = app_data.database.count_users().await; - let res = app_data.database.get_user_with_username(&payload.login).await; + // make admin if first account + if let Ok(1) = user_count { + let _ = app_data.database.grant_admin_role(user_id).await; + } +} - match res { - Some(user) => { - if settings.mail.email_verification_enabled && !user.email_verified { - return Err(ServiceError::EmailNotVerified) - } +pub async fn login(payload: web::Json, app_data: WebAppData) -> ServiceResult { + // get the user profile from database + let user_profile = app_data.database.get_user_profile_from_username(&payload.login) + .await + .map_err(|_| ServiceError::WrongPasswordOrUsername)?; + + // should not be able to fail if user_profile succeeded + let user_authentication = app_data.database.get_user_authentication_from_id(user_profile.user_id) + .await + .map_err(|_| ServiceError::InternalServerError)?; + + // wrap string of the hashed password into a PasswordHash struct for verification + let parsed_hash = PasswordHash::new(&user_authentication.password_hash)?; + + // verify if the user supplied and the database supplied passwords match + if Pbkdf2.verify_password(payload.password.as_bytes(), &parsed_hash).is_err() { + return Err(ServiceError::WrongPasswordOrUsername) + } + + let settings = app_data.cfg.settings.read().await; - drop(settings); + // fail login if email verification is required and this email is not verified + if settings.mail.email_verification_enabled && !user_profile.email_verified { + return Err(ServiceError::EmailNotVerified) + } - let parsed_hash = PasswordHash::new(&user.password)?; + // drop read lock on settings + drop(settings); - if !Pbkdf2.verify_password(payload.password.as_bytes(), &parsed_hash).is_ok() { - return Err(ServiceError::WrongPasswordOrUsername); - } + let user_compact = app_data.database.get_user_compact_from_id(user_profile.user_id).await?; - let username = user.username.clone(); - let token = app_data.auth.sign_jwt(user.clone()).await; + // sign jwt with compact user details as payload + let token = app_data.auth.sign_jwt(user_compact.clone()).await; - Ok(HttpResponse::Ok().json(OkResponse { - data: TokenResponse { - token, - username, - admin: user.administrator - } - })) + Ok(HttpResponse::Ok().json(OkResponse { + data: TokenResponse { + token, + username: user_compact.username, + admin: user_compact.administrator } - None => Err(ServiceError::WrongPasswordOrUsername) - } + })) } pub async fn verify_user(req: HttpRequest, app_data: WebAppData) -> String { @@ -200,57 +172,32 @@ pub async fn verify_user(req: HttpRequest, app_data: WebAppData) -> String { drop(settings); - let res = sqlx::query!( - "UPDATE torrust_users SET email_verified = TRUE WHERE username = ?", - token_data.sub - ) - .execute(&app_data.database.pool) - .await; - - if let Err(_) = res { + if app_data.database.verify_email(token_data.sub).await.is_err() { return ServiceError::InternalServerError.to_string() - } + }; String::from("Email verified, you can close this page.") } +// todo: add reason and date_expiry parameters to request pub async fn ban_user(req: HttpRequest, app_data: WebAppData) -> ServiceResult { - let user = app_data.auth.get_user_from_request(&req).await?; + let user = app_data.auth.get_user_compact_from_request(&req).await?; // check if user is administrator if !user.administrator { return Err(ServiceError::Unauthorized) } let to_be_banned_username = req.match_info().get("user").unwrap(); - let res = sqlx::query!( - "DELETE FROM torrust_users WHERE username = ? AND administrator = 0", - to_be_banned_username - ) - .execute(&app_data.database.pool) - .await; + let user_profile = app_data.database.get_user_profile_from_username(to_be_banned_username).await?; - if let Err(_) = res { return Err(ServiceError::UsernameNotFound) } - if res.unwrap().rows_affected() == 0 { return Err(ServiceError::UsernameNotFound) } + let reason = "no reason".to_string(); - Ok(HttpResponse::Ok().json(OkResponse { - data: format!("Banned user: {}", to_be_banned_username) - })) -} + // user will be banned until the year 9999 + let date_expiry = chrono::NaiveDateTime::parse_from_str("9999-01-01 00:00:00", "%Y-%m-%d %H:%M:%S").expect("Could not parse date from 9999-01-01 00:00:00."); -pub async fn me(req: HttpRequest, app_data: WebAppData) -> ServiceResult { - let user = match app_data.auth.get_user_from_request(&req).await { - Ok(user) => Ok(user), - Err(e) => Err(e) - }?; - - let username = user.username.clone(); - let token = app_data.auth.sign_jwt(user.clone()).await; + let _ = app_data.database.ban_user(user_profile.user_id, &reason, date_expiry).await?; Ok(HttpResponse::Ok().json(OkResponse { - data: TokenResponse { - token, - username, - admin: user.administrator - } + data: format!("Banned user: {}", to_be_banned_username) })) } diff --git a/src/lib.rs b/src/lib.rs index 76beae4d..63e2edf5 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,13 +1,13 @@ pub mod handlers; pub mod models; pub mod utils; -pub mod database; pub mod config; pub mod errors; pub mod common; pub mod auth; pub mod tracker; pub mod mailer; +pub mod databases; trait AsCSV { fn as_csv(&self) -> Result>, ()> diff --git a/src/mailer.rs b/src/mailer.rs index e39fc5d1..6168f1af 100644 --- a/src/mailer.rs +++ b/src/mailer.rs @@ -17,7 +17,7 @@ pub struct MailerService { #[derive(Debug, Serialize, Deserialize)] pub struct VerifyClaims { pub iss: String, - pub sub: String, + pub sub: i64, pub exp: u64, } @@ -55,18 +55,18 @@ impl MailerService { .build() } - pub async fn send_verification_mail(&self, to: &str, username: &str, base_url: &str) -> Result<(), ServiceError> { + pub async fn send_verification_mail(&self, to: &str, username: &str, user_id: i64, base_url: &str) -> Result<(), ServiceError> { let builder = self.get_builder(to).await; - let verification_url = self.get_verification_url(username, base_url).await; + let verification_url = self.get_verification_url(user_id, base_url).await; let mail_body = format!( r#" -Welcome to Torrust, {}! + Welcome to Torrust, {}! -Please click the confirmation link below to verify your account. -{} + Please click the confirmation link below to verify your account. + {} -If this account wasn't made by you, you can ignore this email. + If this account wasn't made by you, you can ignore this email. "#, username, verification_url @@ -112,7 +112,7 @@ If this account wasn't made by you, you can ignore this email. .to(to.parse().unwrap()) } - async fn get_verification_url(&self, username: &str, base_url: &str) -> String { + async fn get_verification_url(&self, user_id: i64, base_url: &str) -> String { let settings = self.cfg.settings.read().await; // create verification JWT @@ -121,7 +121,7 @@ If this account wasn't made by you, you can ignore this email. // Create non expiring token that is only valid for email-verification let claims = VerifyClaims { iss: String::from("email-verification"), - sub: String::from(username), + sub: user_id, exp: current_time() + 315_569_260 // 10 years from now }; diff --git a/src/main.rs b/src/main.rs index 78d4ef32..297377f1 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,11 +1,11 @@ use std::sync::Arc; use actix_web::{App, HttpServer, middleware, web}; use actix_cors::Cors; -use torrust_index_backend::database::Database; use torrust_index_backend::{handlers}; use torrust_index_backend::config::{Configuration}; use torrust_index_backend::common::AppData; use torrust_index_backend::auth::AuthorizationService; +use torrust_index_backend::databases::database::connect_database; use torrust_index_backend::tracker::TrackerService; use torrust_index_backend::mailer::MailerService; @@ -20,7 +20,7 @@ async fn main() -> std::io::Result<()> { let settings = cfg.settings.read().await; - let database = Arc::new(Database::new(&settings.database.connect_url).await); + let database = Arc::new(connect_database(&settings.database.db_driver, &settings.database.connect_url).await); let auth = Arc::new(AuthorizationService::new(cfg.clone(), database.clone())); let tracker_service = Arc::new(TrackerService::new(cfg.clone(), database.clone())); let mailer_service = Arc::new(MailerService::new(cfg.clone()).await); @@ -34,9 +34,6 @@ async fn main() -> std::io::Result<()> { ) ); - // create/update database tables - let _ = sqlx::migrate!().run(&database.pool).await; - // create torrent upload folder async_std::fs::create_dir_all(&settings.storage.upload_path).await?; diff --git a/src/models/date_time.rs b/src/models/date_time.rs new file mode 100644 index 00000000..51661201 --- /dev/null +++ b/src/models/date_time.rs @@ -0,0 +1,119 @@ +use std::fmt; +use chrono::{Datelike, Timelike}; + +pub struct DateTime { + pub year: u16, + pub month: u8, + pub day: u8, + pub hours: u8, + pub minutes: u8, + pub seconds: u8 +} + +impl DateTime { + pub fn now() -> Self { + let dt = chrono::offset::Utc::now(); + + Self { + year: dt.year() as u16, + month: dt.month() as u8, + day: dt.day() as u8, + hours: dt.hour() as u8, + minutes: dt.minute() as u8, + seconds: dt.second() as u8 + } + } + + // min 0000 max 9999 + pub fn year(&self) -> String { + let mut year_string = match self.year { + 10000 ..= u16::MAX => "9999".to_string(), + year => year.to_string() + }; + + while year_string.len() < 4 { + year_string = format!("0{}", year_string); + } + + year_string + } + + // min 01 max 12 + pub fn month(&self) -> String { + let mut month_string = match self.month { + 13 ..= u8::MAX => "12".to_string(), + 0 => "01".to_string(), + month => month.to_string() + }; + + while month_string.len() < 2 { + month_string = format!("0{}", month_string); + } + + month_string + } + + // min 01 max 31 + pub fn day(&self) -> String { + let mut day_string = match self.day { + 32 ..= u8::MAX => "31".to_string(), + 0 => "01".to_string(), + day => day.to_string() + }; + + while day_string.len() < 2 { + day_string = format!("0{}", day_string); + } + + day_string + } + + // min 00 max 23 + pub fn hours(&self) -> String { + let mut hours_string = match self.hours { + 24 ..= u8::MAX => "23".to_string(), + hours => hours.to_string() + }; + + while hours_string.len() < 2 { + hours_string = format!("0{}", hours_string); + } + + hours_string + } + + // min 00 max 59 + pub fn minutes(&self) -> String { + let mut minutes_string = match self.minutes { + 60 ..= u8::MAX => "59".to_string(), + minutes => minutes.to_string() + }; + + while minutes_string.len() < 2 { + minutes_string = format!("0{}", minutes_string); + } + + minutes_string + } + + // min 00 max 59 + pub fn seconds(&self) -> String { + let mut seconds_string = match self.seconds { + 60 ..= u8::MAX => "59".to_string(), + seconds => seconds.to_string() + }; + + while seconds_string.len() < 2 { + seconds_string = format!("0{}", seconds_string); + } + + seconds_string + } +} + +// display in 0000-00-00 00:00:00 format (ISO 8601) +impl fmt::Display for DateTime { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}-{}-{} {}:{}:{}", self.year(), self.month(), self.day(), self.hours(), self.minutes(), self.seconds()) + } +} diff --git a/src/models/mod.rs b/src/models/mod.rs index cb31379a..0a66b9ce 100644 --- a/src/models/mod.rs +++ b/src/models/mod.rs @@ -3,3 +3,4 @@ pub mod torrent; pub mod torrent_file; pub mod response; pub mod tracker_key; +pub mod date_time; diff --git a/src/models/response.rs b/src/models/response.rs index 76858bf3..668d8bc1 100644 --- a/src/models/response.rs +++ b/src/models/response.rs @@ -1,5 +1,5 @@ use serde::{Deserialize, Serialize}; -use crate::database::Category; +use crate::databases::database::Category; use crate::models::torrent::TorrentListing; use crate::models::torrent_file::File; @@ -29,14 +29,6 @@ pub struct NewTorrentResponse { pub torrent_id: i64, } -#[derive(Serialize, Deserialize, Debug, sqlx::FromRow)] -pub struct CategoryResponse { - pub category_id: i64, - pub name: String, - pub icon: Option, - pub num_torrents: Option, -} - #[derive(Debug, Serialize, Deserialize, sqlx::FromRow)] pub struct TorrentResponse { pub torrent_id: i64, @@ -62,7 +54,7 @@ impl TorrentResponse { info_hash: torrent_listing.info_hash, title: torrent_listing.title, description: torrent_listing.description, - category: Category { name: "".to_string(), icon: None, num_torrents: 0 }, + category: Category { category_id: 0, name: "".to_string(), num_torrents: 0 }, upload_date: torrent_listing.upload_date, file_size: torrent_listing.file_size, seeders: torrent_listing.seeders, diff --git a/src/models/tracker_key.rs b/src/models/tracker_key.rs index 71ba04f2..d130cd6d 100644 --- a/src/models/tracker_key.rs +++ b/src/models/tracker_key.rs @@ -1,6 +1,7 @@ use serde::{Serialize, Deserialize}; +use sqlx::FromRow; -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Serialize, Deserialize, FromRow)] pub struct TrackerKey { pub key: String, pub valid_until: i64, diff --git a/src/models/user.rs b/src/models/user.rs index 403a54f3..7b3aa1da 100644 --- a/src/models/user.rs +++ b/src/models/user.rs @@ -1,18 +1,49 @@ use serde::{Serialize, Deserialize}; -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone, sqlx::FromRow)] pub struct User { + pub user_id: i64, + pub date_registered: String, + pub administrator: bool, +} + +#[derive(Debug, Serialize, Deserialize, Clone, sqlx::FromRow)] +pub struct UserAuthentication { + pub user_id: i64, + pub password_hash: String, +} + +#[derive(Debug, Serialize, Deserialize, Clone, sqlx::FromRow)] +pub struct UserProfile { pub user_id: i64, pub username: String, pub email: String, pub email_verified: bool, - pub password: String, + pub bio: String, + pub avatar: String, +} + +#[derive(Debug, Serialize, Deserialize, Clone, sqlx::FromRow)] +pub struct UserCompact { + pub user_id: i64, + pub username: String, pub administrator: bool, } +#[derive(Debug, Serialize, Deserialize, Clone, sqlx::FromRow)] +pub struct UserFull { + pub user_id: i64, + pub date_registered: String, + pub administrator: bool, + pub username: String, + pub email: String, + pub email_verified: bool, + pub bio: String, + pub avatar: String, +} + #[derive(Debug, Serialize, Deserialize, Clone)] -pub struct Claims { - pub sub: String, // username - pub admin: bool, +pub struct UserClaims { + pub user: UserCompact, pub exp: u64, // epoch in seconds } diff --git a/src/tracker.rs b/src/tracker.rs index 7eecfafb..c3c7eb0f 100644 --- a/src/tracker.rs +++ b/src/tracker.rs @@ -1,10 +1,11 @@ -use crate::config::Configuration; use std::sync::Arc; -use crate::database::Database; + +use serde::{Serialize, Deserialize}; + +use crate::config::Configuration; +use crate::databases::database::Database; use crate::models::tracker_key::TrackerKey; use crate::errors::ServiceError; -use crate::models::user::User; -use serde::{Serialize, Deserialize}; #[derive(Debug, Serialize, Deserialize)] pub struct TorrentInfo { @@ -34,11 +35,11 @@ pub struct PeerId { pub struct TrackerService { cfg: Arc, - database: Arc, + database: Arc> } impl TrackerService { - pub fn new(cfg: Arc, database: Arc) -> TrackerService { + pub fn new(cfg: Arc, database: Arc>) -> TrackerService { TrackerService { cfg, database @@ -89,15 +90,15 @@ impl TrackerService { Err(ServiceError::InternalServerError) } - pub async fn get_personal_announce_url(&self, user: &User) -> Result { + pub async fn get_personal_announce_url(&self, user_id: i64) -> Result { let settings = self.cfg.settings.read().await; - let tracker_key = self.database.get_valid_tracker_key(user.user_id).await; + let tracker_key = self.database.get_user_tracker_key(user_id).await; match tracker_key { Some(v) => { Ok(format!("{}/{}", settings.tracker.url, v.key)) } None => { - match self.retrieve_new_tracker_key(user.user_id).await { + match self.retrieve_new_tracker_key(user_id).await { Ok(v) => { Ok(format!("{}/{}", settings.tracker.url, v.key)) }, Err(_) => { Err(ServiceError::TrackerOffline) } } @@ -128,7 +129,7 @@ impl TrackerService { println!("{:?}", tracker_key); - self.database.issue_tracker_key(&tracker_key, user_id).await?; + self.database.add_tracker_key(user_id, &tracker_key).await?; Ok(tracker_key) } @@ -165,9 +166,9 @@ impl TrackerService { Ok(torrent_info) } - pub async fn update_torrents(&self) -> Result<(), ()> { + pub async fn update_torrents(&self) -> Result<(), ServiceError> { println!("Updating torrents.."); - let torrents = self.database.get_all_torrent_ids().await?; + let torrents = self.database.get_all_torrents_compact().await?; for torrent in torrents { let _ = self.get_torrent_info(&torrent.info_hash).await; diff --git a/src/utils/mod.rs b/src/utils/mod.rs index 5b62b2e9..e14efa08 100644 --- a/src/utils/mod.rs +++ b/src/utils/mod.rs @@ -1,3 +1,2 @@ pub mod parse_torrent; pub mod time; -pub mod random; diff --git a/src/utils/random.rs b/src/utils/random.rs deleted file mode 100644 index 84b14948..00000000 --- a/src/utils/random.rs +++ /dev/null @@ -1,10 +0,0 @@ -use rand::distributions::Alphanumeric; -use rand::Rng; - -pub fn random_string(size: usize) -> String { - rand::thread_rng() - .sample_iter(&Alphanumeric) - .take(size) - .map(char::from) - .collect() -} diff --git a/src/utils/time.rs b/src/utils/time.rs index 34aabf8e..45f60cb4 100644 --- a/src/utils/time.rs +++ b/src/utils/time.rs @@ -1,7 +1,3 @@ -use std::time::SystemTime; - pub fn current_time() -> u64 { - SystemTime::now() - .duration_since(SystemTime::UNIX_EPOCH).unwrap() - .as_secs() + chrono::prelude::Utc::now().timestamp() as u64 }