diff --git a/apps/daedalus_client/.env.example b/apps/daedalus_client/.env.local similarity index 100% rename from apps/daedalus_client/.env.example rename to apps/daedalus_client/.env.local diff --git a/apps/docs/src/content/docs/contributing/labrinth.md b/apps/docs/src/content/docs/contributing/labrinth.md index 8ce0ac8817..155e854cbd 100644 --- a/apps/docs/src/content/docs/contributing/labrinth.md +++ b/apps/docs/src/content/docs/contributing/labrinth.md @@ -7,7 +7,7 @@ This project is part of our [monorepo](https://github.com/modrinth/code). You ca [labrinth] is the Rust-based backend serving Modrinth's API with the help of the [Actix](https://actix.rs) framework. To get started with a labrinth instance, install docker, docker-compose (which comes with Docker), and [Rust]. The initial startup can be done simply with the command `docker-compose up`, or with `docker compose up` (Compose V2 and later). That will deploy a PostgreSQL database on port 5432 and a MeiliSearch instance on port 7700. To run the API itself, you'll need to use the `cargo run` command, this will deploy the API on port 8000. -To get a basic configuration, copy the `.env.example` file to `.env`. Now, you'll have to install the sqlx CLI, which can be done with cargo: +To get a basic configuration, copy the `.env.local` file to `.env`. Now, you'll have to install the sqlx CLI, which can be done with cargo: ```bash cargo install --git https://github.com/launchbadge/sqlx sqlx-cli --no-default-features --features postgres,rustls diff --git a/apps/frontend/.env.example b/apps/frontend/.env.example deleted file mode 100644 index 43ceb1d532..0000000000 --- a/apps/frontend/.env.example +++ /dev/null @@ -1,3 +0,0 @@ -BASE_URL=https://api.modrinth.com/v2/ -BROWSER_BASE_URL=https://api.modrinth.com/v2/ -PYRO_BASE_URL=https://archon.modrinth.com/ diff --git a/apps/frontend/.env.local b/apps/frontend/.env.local new file mode 100644 index 0000000000..f764f85137 --- /dev/null +++ b/apps/frontend/.env.local @@ -0,0 +1,5 @@ +BASE_URL=http://127.0.0.1:8000/v2/ +BROWSER_BASE_URL=http://127.0.0.1:8000/v2/ +PYRO_BASE_URL=https://staging-archon.modrinth.com +PROD_OVERRIDE=true + diff --git a/apps/labrinth/.env.example b/apps/labrinth/.env.local similarity index 95% rename from apps/labrinth/.env.example rename to apps/labrinth/.env.local index 1fbb8eebdf..8675bcd697 100644 --- a/apps/labrinth/.env.example +++ b/apps/labrinth/.env.local @@ -2,7 +2,7 @@ DEBUG=true RUST_LOG=info,sqlx::query=warn SENTRY_DSN=none -SITE_URL=https://modrinth.com +SITE_URL=http://localhost:3000 CDN_URL=https://staging-cdn.modrinth.com LABRINTH_ADMIN_KEY=feedbeef RATE_LIMIT_IGNORE_KEY=feedbeef @@ -87,8 +87,8 @@ SMTP_HOST=none SMTP_PORT=465 SMTP_TLS=tls -SITE_VERIFY_EMAIL_PATH=none -SITE_RESET_PASSWORD_PATH=none +SITE_VERIFY_EMAIL_PATH=auth/verify-email +SITE_RESET_PASSWORD_PATH=auth/reset-password SITE_BILLING_PATH=none SENDY_URL=none diff --git a/apps/labrinth/src/models/v3/projects.rs b/apps/labrinth/src/models/v3/projects.rs index 55631a6f63..6e9f17cf29 100644 --- a/apps/labrinth/src/models/v3/projects.rs +++ b/apps/labrinth/src/models/v3/projects.rs @@ -1,4 +1,5 @@ -use std::collections::{HashMap, HashSet}; +use std::collections::HashMap; +use std::mem; use crate::database::models::loader_fields::VersionField; use crate::database::models::project_item::{LinkUrl, ProjectQueryResult}; @@ -8,6 +9,7 @@ use crate::models::ids::{ }; use ariadne::ids::UserId; use chrono::{DateTime, Utc}; +use itertools::Itertools; use serde::{Deserialize, Serialize}; use validator::Validate; @@ -95,19 +97,6 @@ pub struct Project { pub fields: HashMap>, } -fn remove_duplicates(values: Vec) -> Vec { - let mut seen = HashSet::new(); - values - .into_iter() - .filter(|value| { - // Convert the JSON value to a string for comparison - let as_string = value.to_string(); - // Check if the string is already in the set - seen.insert(as_string) - }) - .collect() -} - // This is a helper function to convert a list of VersionFields into a HashMap of field name to vecs of values // This allows for removal of duplicates pub fn from_duplicate_version_fields( @@ -132,9 +121,9 @@ pub fn from_duplicate_version_fields( } } - // Remove duplicates by converting to string and back + // Remove duplicates for (_, v) in fields.iter_mut() { - *v = remove_duplicates(v.clone()); + *v = mem::take(v).into_iter().unique().collect_vec(); } fields } @@ -624,7 +613,7 @@ pub struct Version { pub downloads: u32, /// The type of the release - `Alpha`, `Beta`, or `Release`. pub version_type: VersionType, - /// The status of tne version + /// The status of the version pub status: VersionStatus, /// The requested status of the version (used for scheduling) pub requested_status: Option, @@ -880,7 +869,7 @@ impl std::fmt::Display for DependencyType { } impl DependencyType { - // These are constant, so this can remove unneccessary allocations (`to_string`) + // These are constant, so this can remove unnecessary allocations (`to_string`) pub fn as_str(&self) -> &'static str { match self { DependencyType::Required => "required", diff --git a/apps/labrinth/src/routes/v2_reroute.rs b/apps/labrinth/src/routes/v2_reroute.rs index a73baac4d3..b6a1937578 100644 --- a/apps/labrinth/src/routes/v2_reroute.rs +++ b/apps/labrinth/src/routes/v2_reroute.rs @@ -264,11 +264,11 @@ pub fn convert_side_types_v2_bools( } pub fn capitalize_first(input: &str) -> String { - let mut result = input.to_owned(); - if let Some(first_char) = result.get_mut(0..1) { - first_char.make_ascii_uppercase(); - } - result + input + .chars() + .enumerate() + .map(|(i, c)| if i == 0 { c.to_ascii_uppercase() } else { c }) + .collect() } #[cfg(test)] diff --git a/apps/labrinth/src/routes/v3/version_file.rs b/apps/labrinth/src/routes/v3/version_file.rs index 744aa8d9be..4c7133221b 100644 --- a/apps/labrinth/src/routes/v3/version_file.rs +++ b/apps/labrinth/src/routes/v3/version_file.rs @@ -52,10 +52,9 @@ pub async fn get_version_from_hash( .map(|x| x.1) .ok(); let hash = info.into_inner().0.to_lowercase(); - let algorithm = hash_query - .algorithm - .clone() - .unwrap_or_else(|| default_algorithm_from_hashes(&[hash.clone()])); + let algorithm = hash_query.algorithm.clone().unwrap_or_else(|| { + default_algorithm_from_hashes(std::slice::from_ref(&hash)) + }); let file = database::models::DBVersion::get_file_from_hash( algorithm, hash, @@ -140,10 +139,9 @@ pub async fn get_update_from_hash( .ok(); let hash = info.into_inner().0.to_lowercase(); if let Some(file) = database::models::DBVersion::get_file_from_hash( - hash_query - .algorithm - .clone() - .unwrap_or_else(|| default_algorithm_from_hashes(&[hash.clone()])), + hash_query.algorithm.clone().unwrap_or_else(|| { + default_algorithm_from_hashes(std::slice::from_ref(&hash)) + }), hash, hash_query.version_id.map(|x| x.into()), &**pool, @@ -577,10 +575,9 @@ pub async fn delete_file( .1; let hash = info.into_inner().0.to_lowercase(); - let algorithm = hash_query - .algorithm - .clone() - .unwrap_or_else(|| default_algorithm_from_hashes(&[hash.clone()])); + let algorithm = hash_query.algorithm.clone().unwrap_or_else(|| { + default_algorithm_from_hashes(std::slice::from_ref(&hash)) + }); let file = database::models::DBVersion::get_file_from_hash( algorithm.clone(), hash, @@ -709,10 +706,9 @@ pub async fn download_version( .ok(); let hash = info.into_inner().0.to_lowercase(); - let algorithm = hash_query - .algorithm - .clone() - .unwrap_or_else(|| default_algorithm_from_hashes(&[hash.clone()])); + let algorithm = hash_query.algorithm.clone().unwrap_or_else(|| { + default_algorithm_from_hashes(std::slice::from_ref(&hash)) + }); let file = database::models::DBVersion::get_file_from_hash( algorithm.clone(), hash, diff --git a/apps/labrinth/tests/search.rs b/apps/labrinth/tests/search.rs index e05b13defb..e8562f5c74 100644 --- a/apps/labrinth/tests/search.rs +++ b/apps/labrinth/tests/search.rs @@ -151,7 +151,7 @@ async fn index_swaps() { test_env.api.remove_project("alpha", USER_USER_PAT).await; assert_status!(&resp, StatusCode::NO_CONTENT); - // We should not get any results, because the project has been deleted + // Deletions should not be indexed immediately let projects = test_env .api .search_deserialized( @@ -160,7 +160,8 @@ async fn index_swaps() { USER_USER_PAT, ) .await; - assert_eq!(projects.total_hits, 0); + assert_eq!(projects.total_hits, 1); + assert!(projects.hits[0].slug.as_ref().unwrap().contains("alpha")); // But when we reindex, it should be gone let resp = test_env.api.reset_search_index().await; diff --git a/packages/app-lib/.env.example b/packages/app-lib/.env.local similarity index 100% rename from packages/app-lib/.env.example rename to packages/app-lib/.env.local