Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion .github/workflows/develop.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,9 @@ jobs:
- name: Check
run: cargo check --all-targets
- name: Clippy
run: cargo clippy --all-targets
run: cargo clippy --version && cargo clippy --all-targets -- -D clippy::pedantic
env:
CARGO_INCREMENTAL: 0
- name: Install torrent edition tool (needed for testing)
run: cargo install imdl
- name: Unit and integration tests
Expand Down
4 changes: 2 additions & 2 deletions src/app.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,10 +99,10 @@ pub async fn run(configuration: Configuration) -> Running {

let running_server = server.run();

let starting_message = format!("Listening on http://{}", socket_address);
let starting_message = format!("Listening on http://{socket_address}");
info!("{}", starting_message);
// Logging could be disabled or redirected to file. So print to stdout too.
println!("{}", starting_message);
println!("{starting_message}");

Running {
api_server: running_server,
Expand Down
4 changes: 2 additions & 2 deletions src/bootstrap/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,13 @@ use crate::config::Configuration;
/// Will panic if configuration is not found or cannot be parsed
pub async fn init_configuration() -> Configuration {
if env::var(ENV_VAR_CONFIG).is_ok() {
println!("Loading configuration from env var `{}`", ENV_VAR_CONFIG);
println!("Loading configuration from env var `{ENV_VAR_CONFIG}`");

Configuration::load_from_env_var(ENV_VAR_CONFIG).unwrap()
} else {
let config_path = env::var(ENV_VAR_CONFIG_PATH).unwrap_or_else(|_| ENV_VAR_DEFAULT_CONFIG_PATH.to_string());

println!("Loading configuration from config file `{}`", config_path);
println!("Loading configuration from config file `{config_path}`");

match Configuration::load_from_file(&config_path).await {
Ok(config) => config,
Expand Down
2 changes: 1 addition & 1 deletion src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ impl Configuration {

let torrust_config: TorrustBackend = match config.try_deserialize() {
Ok(data) => Ok(data),
Err(e) => Err(ConfigError::Message(format!("Errors while processing config: {}.", e))),
Err(e) => Err(ConfigError::Message(format!("Errors while processing config: {e}."))),
}?;

Ok(Configuration {
Expand Down
18 changes: 7 additions & 11 deletions src/databases/mysql.rs
Original file line number Diff line number Diff line change
Expand Up @@ -298,7 +298,7 @@ impl Database for Mysql {
) -> Result<TorrentsResponse, database::Error> {
let title = match search {
None => "%".to_string(),
Some(v) => format!("%{}%", v),
Some(v) => format!("%{v}%"),
};

let sort_query: String = match sort {
Expand All @@ -322,7 +322,7 @@ impl Database for Mysql {
if let Ok(sanitized_category) = self.get_category_from_name(category).await {
let mut str = format!("tc.name = '{}'", sanitized_category.name);
if i > 0 {
str = format!(" OR {}", str);
str = format!(" OR {str}");
}
category_filters.push_str(&str);
i += 1;
Expand All @@ -331,10 +331,7 @@ impl Database for Mysql {
if category_filters.is_empty() {
String::new()
} else {
format!(
"INNER JOIN torrust_categories tc ON tt.category_id = tc.category_id AND ({}) ",
category_filters
)
format!("INNER JOIN torrust_categories tc ON tt.category_id = tc.category_id AND ({category_filters}) ")
}
} else {
String::new()
Expand All @@ -344,16 +341,15 @@ impl Database for Mysql {
"SELECT tt.torrent_id, tp.username AS uploader, tt.info_hash, ti.title, ti.description, tt.category_id, DATE_FORMAT(tt.date_uploaded, '%Y-%m-%d %H:%i:%s') AS date_uploaded, tt.size AS file_size,
CAST(COALESCE(sum(ts.seeders),0) as signed) as seeders,
CAST(COALESCE(sum(ts.leechers),0) as signed) as leechers
FROM torrust_torrents tt {}
FROM torrust_torrents tt {category_filter_query}
INNER JOIN torrust_user_profiles tp ON tt.uploader_id = tp.user_id
INNER JOIN torrust_torrent_info ti ON tt.torrent_id = ti.torrent_id
LEFT JOIN torrust_torrent_tracker_stats ts ON tt.torrent_id = ts.torrent_id
WHERE title LIKE ?
GROUP BY tt.torrent_id",
category_filter_query
GROUP BY tt.torrent_id"
);

let count_query = format!("SELECT COUNT(*) as count FROM ({}) AS count_table", query_string);
let count_query = format!("SELECT COUNT(*) as count FROM ({query_string}) AS count_table");

let count_result: Result<i64, database::Error> = query_as(&count_query)
.bind(title.clone())
Expand All @@ -364,7 +360,7 @@ impl Database for Mysql {

let count = count_result?;

query_string = format!("{} ORDER BY {} LIMIT ?, ?", query_string, sort_query);
query_string = format!("{query_string} ORDER BY {sort_query} LIMIT ?, ?");

let res: Vec<TorrentListing> = sqlx::query_as::<_, TorrentListing>(&query_string)
.bind(title)
Expand Down
18 changes: 7 additions & 11 deletions src/databases/sqlite.rs
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,7 @@ impl Database for Sqlite {
) -> Result<TorrentsResponse, database::Error> {
let title = match search {
None => "%".to_string(),
Some(v) => format!("%{}%", v),
Some(v) => format!("%{v}%"),
};

let sort_query: String = match sort {
Expand All @@ -312,7 +312,7 @@ impl Database for Sqlite {
if let Ok(sanitized_category) = self.get_category_from_name(category).await {
let mut str = format!("tc.name = '{}'", sanitized_category.name);
if i > 0 {
str = format!(" OR {}", str);
str = format!(" OR {str}");
}
category_filters.push_str(&str);
i += 1;
Expand All @@ -321,10 +321,7 @@ impl Database for Sqlite {
if category_filters.is_empty() {
String::new()
} else {
format!(
"INNER JOIN torrust_categories tc ON tt.category_id = tc.category_id AND ({}) ",
category_filters
)
format!("INNER JOIN torrust_categories tc ON tt.category_id = tc.category_id AND ({category_filters}) ")
}
} else {
String::new()
Expand All @@ -334,16 +331,15 @@ impl Database for Sqlite {
"SELECT tt.torrent_id, tp.username AS uploader, tt.info_hash, ti.title, ti.description, tt.category_id, tt.date_uploaded, tt.size AS file_size,
CAST(COALESCE(sum(ts.seeders),0) as signed) as seeders,
CAST(COALESCE(sum(ts.leechers),0) as signed) as leechers
FROM torrust_torrents tt {}
FROM torrust_torrents tt {category_filter_query}
INNER JOIN torrust_user_profiles tp ON tt.uploader_id = tp.user_id
INNER JOIN torrust_torrent_info ti ON tt.torrent_id = ti.torrent_id
LEFT JOIN torrust_torrent_tracker_stats ts ON tt.torrent_id = ts.torrent_id
WHERE title LIKE ?
GROUP BY tt.torrent_id",
category_filter_query
GROUP BY tt.torrent_id"
);

let count_query = format!("SELECT COUNT(*) as count FROM ({}) AS count_table", query_string);
let count_query = format!("SELECT COUNT(*) as count FROM ({query_string}) AS count_table");

let count_result: Result<i64, database::Error> = query_as(&count_query)
.bind(title.clone())
Expand All @@ -354,7 +350,7 @@ impl Database for Sqlite {

let count = count_result?;

query_string = format!("{} ORDER BY {} LIMIT ?, ?", query_string, sort_query);
query_string = format!("{query_string} ORDER BY {sort_query} LIMIT ?, ?");

let res: Vec<TorrentListing> = sqlx::query_as::<_, TorrentListing>(&query_string)
.bind(title)
Expand Down
11 changes: 5 additions & 6 deletions src/mailer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,14 +74,13 @@ impl Service {

let mail_body = format!(
r#"
Welcome to Torrust, {}!
Welcome to Torrust, {username}!

Please click the confirmation link below to verify your account.
{}
{verification_url}

If this account wasn't made by you, you can ignore this email.
"#,
username, verification_url
"#
);

let ctx = VerifyTemplate {
Expand Down Expand Up @@ -112,7 +111,7 @@ impl Service {
match self.mailer.send(mail).await {
Ok(_res) => Ok(()),
Err(e) => {
eprintln!("Failed to send email: {}", e);
eprintln!("Failed to send email: {e}");
Err(ServiceError::FailedToSendVerificationEmail)
}
}
Expand Down Expand Up @@ -147,7 +146,7 @@ impl Service {
base_url = cfg_base_url;
}

format!("{}/user/email/verify/{}", base_url, token)
format!("{base_url}/user/email/verify/{token}")
}
}

Expand Down
2 changes: 1 addition & 1 deletion src/routes/user.rs
Original file line number Diff line number Diff line change
Expand Up @@ -333,7 +333,7 @@ pub async fn ban(req: HttpRequest, app_data: WebAppData) -> ServiceResult<impl R
app_data.database.ban_user(user_profile.user_id, &reason, date_expiry).await?;

Ok(HttpResponse::Ok().json(OkResponse {
data: format!("Banned user: {}", to_be_banned_username),
data: format!("Banned user: {to_be_banned_username}"),
}))
}

Expand Down
4 changes: 2 additions & 2 deletions src/upgrades/from_v1_0_0_to_v2_0_0/databases/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,12 @@ pub mod sqlite_v1_0_0;
pub mod sqlite_v2_0_0;

pub async fn current_db(db_filename: &str) -> Arc<SqliteDatabaseV1_0_0> {
let source_database_connect_url = format!("sqlite://{}?mode=ro", db_filename);
let source_database_connect_url = format!("sqlite://{db_filename}?mode=ro");
Arc::new(SqliteDatabaseV1_0_0::new(&source_database_connect_url).await)
}

pub async fn new_db(db_filename: &str) -> Arc<SqliteDatabaseV2_0_0> {
let target_database_connect_url = format!("sqlite://{}?mode=rwc", db_filename);
let target_database_connect_url = format!("sqlite://{db_filename}?mode=rwc");
Arc::new(SqliteDatabaseV2_0_0::new(&target_database_connect_url).await)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ pub async fn transfer_categories(source_database: Arc<SqliteDatabaseV1_0_0>, tar
println!("[v1] categories: {:?}", &source_categories);

let result = target_database.reset_categories_sequence().await.unwrap();
println!("[v2] reset categories sequence result: {:?}", result);
println!("[v2] reset categories sequence result: {result:?}");

for cat in &source_categories {
println!("[v2] adding category {:?} with id {:?} ...", &cat.name, &cat.category_id);
Expand Down
7 changes: 2 additions & 5 deletions tests/e2e/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,11 @@ pub const ENV_VAR_E2E_DEFAULT_CONFIG_PATH: &str = "./config-idx-back.local.toml"
/// Will panic if configuration is not found or cannot be parsed
pub async fn init_shared_env_configuration() -> Configuration {
if env::var(ENV_VAR_E2E_CONFIG).is_ok() {
println!("Loading configuration for E2E env from env var `{}`", ENV_VAR_E2E_CONFIG);
println!("Loading configuration for E2E env from env var `{ENV_VAR_E2E_CONFIG}`");

Configuration::load_from_env_var(ENV_VAR_E2E_CONFIG).unwrap()
} else {
println!(
"Loading configuration for E2E env from config file `{}`",
ENV_VAR_E2E_DEFAULT_CONFIG_PATH
);
println!("Loading configuration for E2E env from config file `{ENV_VAR_E2E_DEFAULT_CONFIG_PATH}`");

match Configuration::load_from_file(ENV_VAR_E2E_DEFAULT_CONFIG_PATH).await {
Ok(config) => config,
Expand Down
4 changes: 2 additions & 2 deletions tests/e2e/contexts/torrent/asserts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ pub async fn expected_torrent(mut uploaded_torrent: Torrent, env: &TestEnv, down
// by the backend. For some of them it makes sense (`announce` and `announce_list`),
// for others it does not.

let tracker_url = format!("{}", env.server_settings().unwrap().tracker.url);
let tracker_url = env.server_settings().unwrap().tracker.url.to_string();

let tracker_key = match downloader {
Some(logged_in_user) => get_user_tracker_key(logged_in_user, env).await,
Expand Down Expand Up @@ -65,7 +65,7 @@ pub fn build_announce_url(tracker_url: &str, tracker_key: &Option<TrackerKey>) -
if let Some(key) = &tracker_key {
format!("{tracker_url}/{}", key.key)
} else {
format!("{tracker_url}")
tracker_url.to_string()
}
}

Expand Down
13 changes: 5 additions & 8 deletions tests/e2e/contexts/torrent/contract.rs
Original file line number Diff line number Diff line change
Expand Up @@ -107,11 +107,7 @@ mod for_guests {
// When we request more torrents than the page size limit
let response = client
.get_torrents(Query::with_params(
[QueryParam::new(
"page_size",
&format!("{}", (max_torrent_page_size + 1).to_string()),
)]
.to_vec(),
[QueryParam::new("page_size", &format!("{}", (max_torrent_page_size + 1)))].to_vec(),
))
.await;

Expand Down Expand Up @@ -172,7 +168,7 @@ mod for_guests {

let torrent_details_response: TorrentDetailsResponse = serde_json::from_str(&response.body).unwrap();

let tracker_url = format!("{}", env.server_settings().unwrap().tracker.url);
let tracker_url = env.server_settings().unwrap().tracker.url;
let encoded_tracker_url = urlencoding::encode(&tracker_url);

let expected_torrent = TorrentDetails {
Expand Down Expand Up @@ -396,7 +392,7 @@ mod for_authenticated_users {
// Upload the second torrent with the same infohash as the first one.
// We need to change the title otherwise the torrent will be rejected
// because of the duplicate title.
first_torrent_clone.index_info.title = format!("{}-clone", first_torrent_title);
first_torrent_clone.index_info.title = format!("{first_torrent_title}-clone");
let form: UploadTorrentMultipartForm = first_torrent_clone.index_info.into();
let response = client.upload_torrent(form.into()).await;

Expand Down Expand Up @@ -430,7 +426,8 @@ mod for_authenticated_users {
let tracker_key = get_user_tracker_key(&downloader, &env)
.await
.expect("uploader should have a valid tracker key");
let tracker_url = format!("{}", env.server_settings().unwrap().tracker.url);

let tracker_url = env.server_settings().unwrap().tracker.url;

assert_eq!(
torrent.announce.unwrap(),
Expand Down
2 changes: 1 addition & 1 deletion tests/e2e/contexts/torrent/steps.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ pub async fn upload_torrent(uploader: &LoggedInUserData, torrent: &TorrentIndexI
let res = serde_json::from_str::<UploadedTorrentResponse>(&response.body);

if res.is_err() {
println!("Error deserializing response: {:?}", res);
println!("Error deserializing response: {res:?}");
}

TorrentListedInIndex::from(torrent.clone(), res.unwrap().data.torrent_id)
Expand Down
8 changes: 4 additions & 4 deletions tests/upgrades/from_v1_0_0_to_v2_0_0/sqlite_v1_0_0.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ pub struct SqliteDatabaseV1_0_0 {

impl SqliteDatabaseV1_0_0 {
pub async fn db_connection(database_file: &str) -> Self {
let connect_url = format!("sqlite://{}?mode=rwc", database_file);
let connect_url = format!("sqlite://{database_file}?mode=rwc");
Self::new(&connect_url).await
}

Expand All @@ -28,7 +28,7 @@ impl SqliteDatabaseV1_0_0 {

/// Execute migrations for database in version v1.0.0
pub async fn migrate(&self, fixtures_dir: &str) {
let migrations_dir = format!("{}database/v1.0.0/migrations/", fixtures_dir);
let migrations_dir = format!("{fixtures_dir}database/v1.0.0/migrations/");

let migrations = vec![
"20210831113004_torrust_users.sql",
Expand All @@ -47,13 +47,13 @@ impl SqliteDatabaseV1_0_0 {
}

async fn run_migration_from_file(&self, migration_file_path: &str) {
println!("Executing migration: {:?}", migration_file_path);
println!("Executing migration: {migration_file_path:?}");

let sql = fs::read_to_string(migration_file_path).expect("Should have been able to read the file");

let res = sqlx::query(&sql).execute(&self.pool).await;

println!("Migration result {:?}", res);
println!("Migration result {res:?}");
}

pub async fn insert_category(&self, category: &CategoryRecordV1) -> Result<i64, sqlx::Error> {
Expand Down
2 changes: 1 addition & 1 deletion tests/upgrades/from_v1_0_0_to_v2_0_0/sqlite_v2_0_0.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ pub struct SqliteDatabaseV2_0_0 {

impl SqliteDatabaseV2_0_0 {
pub async fn db_connection(database_file: &str) -> Self {
let connect_url = format!("sqlite://{}?mode=rwc", database_file);
let connect_url = format!("sqlite://{database_file}?mode=rwc");
Self::new(&connect_url).await
}

Expand Down