diff --git a/.github/workflows/loco-rs-ci.yml b/.github/workflows/loco-rs-ci.yml index 5129cfc2b..c055fe22b 100644 --- a/.github/workflows/loco-rs-ci.yml +++ b/.github/workflows/loco-rs-ci.yml @@ -44,6 +44,21 @@ jobs: permissions: contents: read + services: + postgres: + image: postgres + env: + POSTGRES_DB: postgres_test + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + ports: + - "5432:5432" + # Set health checks to wait until postgres has started + options: --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + steps: - name: Checkout the code uses: actions/checkout@v4 @@ -58,3 +73,5 @@ jobs: with: command: test args: --all-features --workspace --exclude loco-gen --exclude loco + env: + DATABASE_URL: postgres://postgres:postgres@localhost:5432/postgres_test diff --git a/CHANGELOG.md b/CHANGELOG.md index 385b91ae0..c128ab926 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,37 @@ ## Unreleased +* feat: `cargo loco routes` will now pretty-print routes +* fix: guard jwt error behind feature flag. [https://github.com/loco-rs/loco/pull/1032](https://github.com/loco-rs/loco/pull/1032) +* fix: logger file_appender not using the seperated format setting. [https://github.com/loco-rs/loco/pull/1036](https://github.com/loco-rs/loco/pull/1036) +* seed cli command. [https://github.com/loco-rs/loco/pull/1046](https://github.com/loco-rs/loco/pull/1046) +* Updated validator to 0.19. [https://github.com/loco-rs/loco/pull/993](https://github.com/loco-rs/loco/pull/993) +* Testing helpers: simplified function calls + adding html selector. [https://github.com/loco-rs/loco/pull/1047](https://github.com/loco-rs/loco/pull/1047) + ### Breaking Changes + #### Updated Import Paths + The testing module import path has been updated. To adapt your code, update imports from: + ```rust + use loco_rs::testing; + ``` + to: + ```rust + use testing::prelude::*; + ``` + #### Simplified Function Calls + Function calls within the testing module no longer require the testing:: prefix. Update your code accordingly. For example: + + Before: + ```rust + let boot = testing::boot_test::().await.unwrap(); + ``` + + After: + ```rust + let boot = boot_test::().await.unwrap(); + ``` +* implement commands to manage background jobs. [https://github.com/loco-rs/loco/pull/1071](https://github.com/loco-rs/loco/pull/1071) + + ## v0.13.2 * static fallback now returns 200 and not 404 [https://github.com/loco-rs/loco/pull/991](https://github.com/loco-rs/loco/pull/991) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9115ea351..3447e9adb 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -54,12 +54,19 @@ Just clone the project and run `cargo test`. You can see how we test in [.github/workflows](.github/workflows/) #### Snapshots -To update/create a snapshots we are using [insta](https://github.com/mitsuhiko/insta). all you need to do is install insta (cargo install cargo-insta) and run the following command: +We use [insta](https://github.com/mitsuhiko/insta) for snapshot testing, which helps us detect changes in output formats and behavior. To work with snapshots: + +1. Install the insta CLI tool: +```sh +cargo install cargo-insta ``` + +2. Run tests and review/update snapshots: +```sh cargo insta test --review ``` -In case of cli changes we snapshot the binary commands. in case of changes run the following command yo update the CLI snapshot +For CLI-related changes, we maintain separate snapshots of binary command outputs. To update these CLI snapshots: ```sh LOCO_CI_MODE=true TRYCMD=overwrite cargo test ``` diff --git a/Cargo.toml b/Cargo.toml index fe46694c0..be586f6e0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -33,7 +33,7 @@ default = [ ] auth_jwt = ["dep:jsonwebtoken"] cli = ["dep:clap"] -testing = ["dep:axum-test"] +testing = ["dep:axum-test", "dep:scraper"] with-db = ["dep:sea-orm", "dep:sea-orm-migration", "loco-gen/with-db"] # Storage features all_storage = ["storage_aws_s3", "storage_azure", "storage_gcp"] @@ -45,6 +45,8 @@ cache_inmem = ["dep:moka"] bg_redis = ["dep:rusty-sidekiq", "dep:bb8"] bg_pg = ["dep:sqlx", "dep:ulid"] bg_sqlt = ["dep:sqlx", "dep:ulid"] +## Testing feature flags +integration_test = [] [dependencies] loco-gen = { version = "0.13.2", path = "./loco-gen" } @@ -53,6 +55,13 @@ backtrace_printer = { version = "1.3.0" } # cli clap = { version = "4.4.7", features = ["derive"], optional = true } colored = "2" +reqwest = { version = "0.12.7", features = [ + "charset", + "http2", + "json", + "macos-system-configuration", + "rustls-tls", +], default-features = false } sea-orm = { version = "1.1.0", features = [ @@ -104,7 +113,7 @@ byte-unit = "4.0.19" argon2 = { version = "0.5.2", features = ["std"] } rand = { version = "0.8.5", features = ["std"] } jsonwebtoken = { version = "9.3.0", optional = true } -validator = { version = "0.18.1", features = ["derive"] } +validator = { version = "0.19.0", features = ["derive"] } futures-util = "0.3" tower = { workspace = true } hyper = "1.1" @@ -143,6 +152,8 @@ ulid = { version = "1", optional = true } rusty-sidekiq = { version = "0.11.0", default-features = false, optional = true } bb8 = { version = "0.8.1", optional = true } +scraper = { version = "0.21.0", optional = true } + [workspace.dependencies] chrono = { version = "0.4", features = ["serde"] } @@ -188,3 +199,10 @@ tree-fs = { version = "0.2.1" } reqwest = { version = "0.12.7" } serial_test = "3.1.1" tower = { workspace = true, features = ["util"] } +sqlx = { version = "0.8.2", default-features = false, features = [ + "macros", + "json", + "postgres", + "chrono", + "sqlite", +] } diff --git a/README.ru.md b/README.ru.md index a4e8aae1c..3a68237b8 100644 --- a/README.ru.md +++ b/README.ru.md @@ -6,7 +6,7 @@

-🚂 *Loco* - Rust on Rails. +🚂 Loco is Rust on Rails.

@@ -45,7 +45,7 @@ ```sh cargo install loco -cargo install sea-orm-cli # Для работы с базами данных +cargo install sea-orm-cli # Only when DB is needed ``` @@ -56,13 +56,18 @@ cargo install sea-orm-cli # Для работы с базами данных ```sh ❯ loco new ✔ ❯ App name? · myapp -✔ ❯ What would you like to build? · SaaS app (with DB and user auth) +✔ ❯ What would you like to build? · Saas App with client side rendering ✔ ❯ Select a DB Provider · Sqlite ✔ ❯ Select your background worker type · Async (in-process tokio async tasks) -✔ ❯ Select an asset serving configuration · Client (configures assets for frontend serving) 🚂 Loco app generated successfully in: myapp/ + +- assets: You've selected `clientside` for your asset serving configuration. + +Next step, build your frontend: + $ cd frontend/ + $ npm install && npm run build ``` diff --git a/docs-site/config.toml b/docs-site/config.toml index 3133b8eeb..de12a6648 100644 --- a/docs-site/config.toml +++ b/docs-site/config.toml @@ -1,7 +1,7 @@ # The URL the site will be built for base_url = "https://loco.rs" -title = "Loco" -description = "Loco is a productivity-first web and service framework in Rust" +title = "Loco.rs - Productivity-first Rust Fullstack Web Framework" +description = "Loco.rs is like Ruby on Rails for Rust. Use it to quickly build and deploy Rust based apps from zero to production." # Whether to automatically compile all Sass files in the sass directory diff --git a/docs-site/content/casts/003-scaffolding-crud-with-html.md b/docs-site/content/casts/003-scaffolding-crud-with-html.md index cdee166f7..d9f7395b4 100644 --- a/docs-site/content/casts/003-scaffolding-crud-with-html.md +++ b/docs-site/content/casts/003-scaffolding-crud-with-html.md @@ -17,6 +17,6 @@ id = "EircfwF8c0E" Reference material for this episode: -* Loco.rs docs: [routes in controllers](https://loco.rs/docs/getting-started/scaffold/) +* Loco.rs docs: [routes in controllers](https://loco.rs/docs/the-app/controller/#routes-in-controllers) * The [SaaS starter](https://loco.rs/docs/starters/saas/) * The [REST API starter](https://loco.rs/docs/starters/rest-api/) diff --git a/docs-site/content/casts/005-testing-tasks.md b/docs-site/content/casts/005-testing-tasks.md index 6ae56741e..19c339879 100644 --- a/docs-site/content/casts/005-testing-tasks.md +++ b/docs-site/content/casts/005-testing-tasks.md @@ -17,7 +17,7 @@ id = "485JlLA-T6U" Reference material for this episode: -* Loco.rs docs: [routes in controllers](https://loco.rs/docs/the-app/task/) -* The [SaaS starter](https://loco.rs/docs/starters/saas/) -* The [REST API starter](https://loco.rs/docs/starters/rest-api/) -* The [Lightweight starter](https://loco.rs/docs/starters/service/) +* Loco.rs docs: [routes in controllers](https://loco.rs/docs/processing/task/) +* The [SaaS starter](https://loco.rs/docs/getting-started/starters/#saas-starter) +* The [REST API starter](https://loco.rs/docs/getting-started/starters/#rest-api-starter) +* The [Lightweight starter](https://loco.rs/docs/getting-started/starters/#lightweight-service-starter) diff --git a/docs-site/content/casts/006-mailers.md b/docs-site/content/casts/006-mailers.md index 9ee3a8431..3d9501ee7 100644 --- a/docs-site/content/casts/006-mailers.md +++ b/docs-site/content/casts/006-mailers.md @@ -16,4 +16,4 @@ id = "ieGeihxLGC8" Reference material for this episode: -* Loco.rs docs: [Mailers](https://loco.rs/docs/the-app/mailers/) +* Loco.rs docs: [Mailers](https://loco.rs/docs/processing/mailers/) diff --git a/docs-site/content/docs/getting-started/guide.md b/docs-site/content/docs/getting-started/guide.md index 4f2f11465..79cf889ba 100644 --- a/docs-site/content/docs/getting-started/guide.md +++ b/docs-site/content/docs/getting-started/guide.md @@ -550,9 +550,7 @@ pub async fn list(State(ctx): State) -> Result { } pub async fn add(State(ctx): State, Json(params): Json) -> Result { - let mut item = ActiveModel { - ..Default::default() - }; + let mut item: ActiveModel = Default::default(); params.update(&mut item); let item = item.insert(&ctx.db).await?; format::json(item) @@ -586,7 +584,7 @@ pub fn routes() -> Routes { .add("/", post(add)) .add("/:id", get(get_one)) .add("/:id", delete(remove)) - .add("/:id", post(update)) + .add("/:id", patch(update)) } ``` @@ -678,7 +676,7 @@ pub fn routes() -> Routes { // .add("/", get(list)) // .add("/:id", get(get_one)) // .add("/:id", delete(remove)) - // .add("/:id", post(update)) + // .add("/:id", patch(update)) } ``` @@ -800,6 +798,8 @@ use loco_rs::task::Vars; use crate::models::users; +pub struct UserReport; + #[async_trait] impl Task for UserReport { fn task(&self) -> TaskInfo { @@ -855,9 +855,7 @@ Go back to `src/controllers/comments.rs` and take a look at the `add` function: ```rust pub async fn add(State(ctx): State, Json(params): Json) -> Result { - let mut item = ActiveModel { - ..Default::default() - }; + let mut item: ActiveModel = Default::default(); params.update(&mut item); let item = item.insert(&ctx.db).await?; format::json(item) @@ -872,16 +870,14 @@ async fn add( State(ctx): State, Json(params): Json, ) -> Result { - // we only want to make sure it exists - let _current_user = crate::models::users::Model::find_by_pid(&ctx.db, &auth.claims.pid).await?; - - // next, update - // homework/bonus: make a comment _actually_ belong to user (user_id) - let mut item = ActiveModel { - ..Default::default() - }; - params.update(&mut item); - let item = item.insert(&ctx.db).await?; - format::json(item) + // we only want to make sure it exists + let _current_user = crate::models::users::Model::find_by_pid(&ctx.db, &auth.claims.pid).await?; + + // next, update + // homework/bonus: make a comment _actually_ belong to user (user_id) + let mut item: ActiveModel = Default::default(); + params.update(&mut item); + let item = item.insert(&ctx.db).await?; + format::json(item) } ``` diff --git a/docs-site/content/docs/infrastructure/storage.md b/docs-site/content/docs/infrastructure/storage.md index 6eee6aa04..7a0f5a596 100644 --- a/docs-site/content/docs/infrastructure/storage.md +++ b/docs-site/content/docs/infrastructure/storage.md @@ -190,10 +190,12 @@ async fn upload_file( By testing file storage in your controller you can follow this example: ```rust +use loco_rs::testing::prelude::*; + #[tokio::test] #[serial] async fn can_register() { - testing::request::(|request, ctx| async move { + request::(|request, ctx| async move { let file_content = "loco file upload"; let file_part = Part::bytes(file_content.as_bytes()).file_name("loco.txt"); diff --git a/docs-site/content/docs/processing/mailers.md b/docs-site/content/docs/processing/mailers.md index 0f7e02b07..80be540a3 100644 --- a/docs-site/content/docs/processing/mailers.md +++ b/docs-site/content/docs/processing/mailers.md @@ -218,18 +218,19 @@ Test Description: - Retrieve the mailer instance from the context and call the deliveries() function, which contains information about the number of sent emails and their content. ```rust +use loco_rs::testing::prelude::*; #[tokio::test] #[serial] async fn can_register() { configure_insta!(); - testing::request::(|request, ctx| async move { + request::(|request, ctx| async move { // Create a request for user registration. // Now you can call the context mailer and use the deliveries function. with_settings!({ - filters => testing::cleanup_email() + filters => cleanup_email() }, { assert_debug_snapshot!(ctx.mailer.unwrap().deliveries()); }); diff --git a/docs-site/content/docs/processing/task.md b/docs-site/content/docs/processing/task.md index 114d9f582..51d88c540 100644 --- a/docs-site/content/docs/processing/task.md +++ b/docs-site/content/docs/processing/task.md @@ -32,7 +32,17 @@ Generate the task: ```sh -cd ./examples/demo && cargo loco generate task --help +Generate a Task based on the given name + +Usage: demo_app-cli generate task [OPTIONS] + +Arguments: + Name of the thing to generate + +Options: + -e, --environment Specify the environment [default: development] + -h, --help Print help + -V, --version Print version ``` diff --git a/docs-site/content/docs/processing/workers.md b/docs-site/content/docs/processing/workers.md index 3ced08f27..49ecf5765 100644 --- a/docs-site/content/docs/processing/workers.md +++ b/docs-site/content/docs/processing/workers.md @@ -187,6 +187,52 @@ workers: mode: BackgroundQueue ``` +## Manage a Workers From UI +You can manage the jobs queue with the [Loco admin job project](https://github.com/loco-rs/admin-jobs). +![](https://github.com/loco-rs/admin-jobs/raw/main/media/screenshot.png) + +### Managing Job Queues via CLI + +The job queue management feature provides a powerful and flexible way to handle the lifecycle of jobs in your application. It allows you to cancel, clean up, remove outdated jobs, export job details, and import jobs, ensuring efficient and organized job processing. + +## Features Overview + +- **Cancel Jobs** + Provides the ability to cancel specific jobs by name, updating their status to `cancelled`. This is useful for stopping jobs that are no longer needed, relevant, or if you want to prevent them from being processed when a bug is detected. +- **Clean Up Jobs** + Enables the removal of jobs that have already been completed or cancelled. This helps maintain a clean and efficient job queue by eliminating unnecessary entries. +- **Purge Outdated Jobs** + Allows you to delete jobs based on their age, measured in days. This is particularly useful for maintaining a lean job queue by removing older, irrelevant jobs. + **Note**: You can use the `--dump` option to export job details to a file, manually modify the job parameters in the exported file, and then use the `import` feature to reintroduce the updated jobs into the system. +- **Export Job Details** + Supports exporting the details of all jobs to a specified location in file format. This feature is valuable for backups, audits, or further analysis. +- **Import Jobs** + Facilitates importing jobs from external files, making it easy to restore or add new jobs to the system. This ensures seamless integration of external job data into your application's workflow. + +To access the job management commands, use the following CLI structure: + +```sh +Managing jobs queue + +Usage: demo_app-cli jobs [OPTIONS] + +Commands: + cancel Cancels jobs with the specified names, setting their status to `cancelled` + tidy Deletes jobs that are either completed or cancelled + purge Deletes jobs based on their age in days + dump Saves the details of all jobs to files in the specified folder + import Imports jobs from a file + help Print this message or the help of the given subcommand(s) + +Options: + -e, --environment Specify the environment [default: development] + -h, --help Print help + -V, --version Print version +``` + + + + ## Testing a Worker You can easily test your worker background jobs using `Loco`. Ensure that your worker is set to the `ForegroundBlocking` mode, which blocks the job, ensuring it runs synchronously. When testing the worker, the test will wait until your worker is completed, allowing you to verify if the worker accomplished its intended tasks. @@ -199,11 +245,13 @@ Here's an example of how the test should be structured: ```rust +use loco_rs::testing::prelude::*; + #[tokio::test] #[serial] async fn test_run_report_worker_worker() { // Set up the test environment - let boot = testing::boot_test::().await.unwrap(); + let boot = boot_test::().await.unwrap(); // Execute the worker in 'ForegroundBlocking' mode, preventing it from running asynchronously assert!( diff --git a/docs-site/content/docs/the-app/controller.md b/docs-site/content/docs/the-app/controller.md index cbf35b9ea..c4f4b6233 100644 --- a/docs-site/content/docs/the-app/controller.md +++ b/docs-site/content/docs/the-app/controller.md @@ -722,7 +722,7 @@ middlewares: `Loco` also allow us to apply [layers](https://docs.rs/tower/latest/tower/trait.Layer.html) to specific handlers or routes. -For more information on handler and route based middleware, refer to the [middleware](/docs/the-app/middlewares) +For more information on handler and route based middleware, refer to the [middleware](/docs/the-app/controller/#middleware) documentation. @@ -858,18 +858,19 @@ impl PaginationResponse { # Testing When testing controllers, the goal is to call the router's controller endpoint and verify the HTTP response, including the status code, response content, headers, and more. -To initialize a test request, use `testing::request`, which prepares your app routers, providing the request instance and the application context. +To initialize a test request, use `use loco_rs::testing::prelude::*;`, which prepares your app routers, providing the request instance and the application context. In the following example, we have a POST endpoint that returns the data sent in the POST request. ```rust +use loco_rs::testing::prelude::*; #[tokio::test] #[serial] async fn can_print_echo() { configure_insta!(); - testing::request::(|request, _ctx| async move { + request::(|request, _ctx| async move { let response = request .post("/example") .json(&serde_json::json!({"site": "Loco"})) diff --git a/docs-site/content/docs/the-app/models.md b/docs-site/content/docs/the-app/models.md index 54a8bcc16..a8f66df10 100644 --- a/docs-site/content/docs/the-app/models.md +++ b/docs-site/content/docs/the-app/models.md @@ -526,44 +526,31 @@ impl Hooks for App { This implementation ensures that the seed is executed when the seed function is called. Adjust the specifics based on your application's structure and requirements. -## Running seeds +## Managing Seed via CLI -The seed process is not executed automatically. You can trigger the seed process either through a task or during testing. +- **Reset the Database** + Clear all existing data before importing seed files. This is useful when you want to start with a fresh database state, ensuring no old data remains. +- **Dump Database Tables to Files** + Export the contents of your database tables to files. This feature allows you to back up the current state of your database or prepare data for reuse across environments. -### Using a Task - -1. Create a seeding task by following the instructions in the [Task Documentation](@/docs/processing/task.md). -2. Configure the task to execute the `seed` function, as demonstrated in the example below: - -```rust -use std::collections::BTreeMap; - -use async_trait::async_trait; -use loco_rs::{ - app::AppContext, - db, - task::{Task, TaskInfo}, - Result, -}; -use sea_orm::EntityTrait; +To access the seed commands, use the following CLI structure: + +```sh +Seed your database with initial data or dump tables to files -use crate::{app::App, models::_entities::users}; +Usage: demo_app-cli db seed [OPTIONS] -pub struct SeedData; -#[async_trait] -impl Task for SeedData { - fn task(&self) -> TaskInfo { - TaskInfo { - name: "seed".to_string(), - detail: "Seeding data".to_string(), - } - } - async fn run(&self, app_context: &AppContext, vars: &BTreeMap) -> Result<()> { - let path = std::path::Path::new("src/fixtures"); - db::run_app_seed::(&app_context.db, path).await - } -} +Options: + -r, --reset Clears all data in the database before seeding + -d, --dump Dumps all database tables to files + --dump-tables Specifies specific tables to dump + --from Specifies the folder containing seed files (defaults to 'src/fixtures') [default: src/fixtures] + -e, --environment Specify the environment [default: development] + -h, --help Print help + -V, --version Print version ``` + + ### Using a Test @@ -572,12 +559,14 @@ impl Task for SeedData { 2. In your test section, follow the example below: ```rust +use loco_rs::testing::prelude::*; + #[tokio::test] #[serial] async fn handle_create_with_password_with_duplicate() { - let boot = testing::boot_test::().await; - testing::seed::(&boot.app_context.db).await.unwrap(); + let boot = boot_test::().await; + seed::(&boot.app_context.db).await.unwrap(); assert!(get_user_by_id(1).ok()); } ``` @@ -695,11 +684,13 @@ If you used the generator to crate a model migration, you should also have an au A typical test contains everything you need to set up test data, boot the app, and reset the database automatically before the testing code runs. It looks like this: ```rust +use loco_rs::testing::prelude::*; + async fn can_find_by_pid() { configure_insta!(); - let boot = testing::boot_test::().await; - testing::seed::(&boot.app_context.db).await.unwrap(); + let boot = boot_test::().await; + seed::(&boot.app_context.db).await.unwrap(); let existing_user = Model::find_by_pid(&boot.app_context.db, "11111111-1111-1111-1111-111111111111").await; @@ -747,13 +738,15 @@ impl Hooks for App { ## Seeding ```rust +use loco_rs::testing::prelude::*; + #[tokio::test] #[serial] async fn is_user_exists() { configure_insta!(); - let boot = testing::boot_test::().await; - testing::seed::(&boot.app_context.db).await.unwrap(); + let boot = boot_test::().await; + seed::(&boot.app_context.db).await.unwrap(); assert!(get_user_by_id(1).ok()); } @@ -770,14 +763,15 @@ Example using [insta](https://crates.io/crates/insta) for snapshots. in the following example you can use `cleanup_user_model` which clean all user model data. ```rust +use loco_rs::testing::prelude::*; #[tokio::test] #[serial] async fn can_create_user() { - testing::request::(|request, _ctx| async move { + request::(|request, _ctx| async move { // create user test with_settings!({ - filters => testing::cleanup_user_model() + filters => cleanup_user_model() }, { assert_debug_snapshot!(current_user_request.text()); }); diff --git a/docs-site/content/docs/the-app/your-project.md b/docs-site/content/docs/the-app/your-project.md index 3fc7f647d..bada3ef0a 100644 --- a/docs-site/content/docs/the-app/your-project.md +++ b/docs-site/content/docs/the-app/your-project.md @@ -48,7 +48,28 @@ cargo loco --help ```sh -cd ./examples/demo && cargo loco --help +The one-person framework for Rust + +Usage: demo_app-cli [OPTIONS] + +Commands: + start Start an app + db Perform DB operations + routes Describe all application endpoints + middleware Describe all application middlewares + task Run a custom task + jobs Managing jobs queue + scheduler Run the scheduler + generate code generation creates a set of files and code templates based on a predefined set of rules + doctor Validate and diagnose configurations + version Display the app version + watch Watch and restart the app + help Print this message or the help of the given subcommand(s) + +Options: + -e, --environment Specify the environment [default: development] + -h, --help Print help + -V, --version Print version ``` @@ -119,7 +140,22 @@ Scaffolding is an efficient and speedy method for generating key components of a See scaffold command: ```sh -cd ./examples/demo && cargo loco generate scaffold --help +Generates a CRUD scaffold, model and controller + +Usage: demo_app-cli generate scaffold [OPTIONS] [FIELDS]... + +Arguments: + Name of the thing to generate + [FIELDS]... Model fields, eg. title:string hits:int + +Options: + -k, --kind The kind of scaffold to generate [possible values: api, html, htmx] + --htmx Use HTMX scaffold + --html Use HTML scaffold + --api Use API scaffold + -e, --environment Specify the environment [default: development] + -h, --help Print help + -V, --version Print version ``` @@ -157,7 +193,7 @@ The scaffold generator will build several files in your application: | `assets/views/posts/show.html` | Show post template. only for HTML and HTMX templates. | ## Your app configuration -Configuration in `loco` lives in `config/` and by default sets up 3 different environments: +By default, loco stores its configuration files in the config/ directory. It provides predefined configurations for three environments: ``` config/ @@ -175,6 +211,9 @@ When nothing is given, the default value is `development`. The `Loco` framework allows support for custom environments in addition to the default environment. To add a custom environment, create a configuration file with a name matching the environment identifier used in the preceding example. +### Overriding the Default Configuration Path +To use a custom configuration directory, set the `LOCO_CONFIG_FOLDER` environment variable to the desired folder path. This will instruct `loco` to load configuration files from the specified directory instead of the default `config/` folder. + ### Placeholders / variables in config It is possible to inject values into a configuration file. In this example, we get a port value from the `NODE_PORT` environment variable: @@ -260,9 +299,9 @@ Here is a detailed description of the interface (listening, etc.) parameters und * `port:` as the name says, for changing ports, mostly when behind a load balancer, etc. -* `binding:` for changing what the IP interface "binds" to, mostly, when you are behind a load balancer like `ngnix` you bind to a local address (when the LB is also there). However you can also bind to "world" (`0.0.0.0`). You can set the binding: field via config, or via the CLI (using the `-b` flag) -- which is what Rails is doing. +* `binding:` for changing what the IP interface "binds" to, mostly, when you are behind a load balancer like `nginx` you bind to a local address (when the LB is also there). However, you can also bind to "world" (`0.0.0.0`). You can set the binding: field via config, or via the CLI (using the `-b` flag) -- which is what Rails is doing. -* `host:` - for "visibility" use cases or out-of-band use cases. For example, some times you want to display the current server host (in terms of domain name, etc.), which serves for visibility. And some times, as in the case of emails -- your server address is "out of band", meaning when I open my gmail account and I have your email -- I have to click what looks like your external address or visible address (official domain name, etc), and not an internal "host" address which is what may be the wrong thing to do (imagine an email link pointing to "http://127.0.0.1/account/verify") +* `host:` - for "visibility" use cases or out-of-band use cases. For example, sometimes you want to display the current server host (in terms of domain name, etc.), which serves for visibility. And sometimes, as in the case of emails -- your server address is "out of band", meaning when I open my gmail account and I have your email -- I have to click what looks like your external address or visible address (official domain name, etc), and not an internal "host" address which is what may be the wrong thing to do (imagine an email link pointing to "http://127.0.0.1/account/verify") diff --git a/docs-site/templates/base.html b/docs-site/templates/base.html index 82da86e0d..0d7c06b05 100644 --- a/docs-site/templates/base.html +++ b/docs-site/templates/base.html @@ -12,18 +12,74 @@ + {% if page.extra.meta %} + + {% for data in page.extra.meta %} + + {% endfor %} + {% endif %} + + {# Site title #} + {% set current_path = current_path | default(value="/") %} + {% if current_path == "/" %} + + {{ config.title | default(value="Home") }} + + + {% else %} + + {% if page.title %} {{ page.title ~ " - Loco.rs" }} + {% elif section.title %} {{ section.title ~ " - Loco.rs" }} + {% elif config.title %} {{ config.title }} + {% else %} Post ~ " - Loco.rs" {% endif %} + + + {% if not page_has_og_title %} + + + + {% endif %} + {% endif %} + + + {% if not page_has_og_description %} + {% if page.description %} + + {% elif config.description %} + + {% endif %} + {% endif %} + + {% if not page_has_description %} + {% if page.description %} + + {% elif config.description %} + + {% endif %} + {% endif %} + + - - - - + - diff --git a/examples/demo/Cargo.lock b/examples/demo/Cargo.lock index 218fb20da..1c18600ef 100644 --- a/examples/demo/Cargo.lock +++ b/examples/demo/Cargo.lock @@ -4,18 +4,18 @@ version = 3 [[package]] name = "addr2line" -version = "0.22.0" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" dependencies = [ "gimli", ] [[package]] -name = "adler" -version = "1.0.2" +name = "adler2" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" +checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" [[package]] name = "aead" @@ -70,6 +70,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", + "getrandom", "once_cell", "version_check", "zerocopy", @@ -107,9 +108,9 @@ dependencies = [ [[package]] name = "allocator-api2" -version = "0.2.18" +version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" +checksum = "45862d1c77f2228b9e10bc609d5bc203d86ebc9b87ad8d5d5167a6c9abf739d9" [[package]] name = "android-tzdata" @@ -128,9 +129,9 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.14" +version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "418c75fa768af9c03be99d17643f93f79bbba589895012a80e3452a19ddda15b" +checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" dependencies = [ "anstyle", "anstyle-parse", @@ -143,43 +144,43 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.7" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" +checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" [[package]] name = "anstyle-parse" -version = "0.2.4" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c03a11a9034d92058ceb6ee011ce58af4a9bf61491aa7e1e59ecd24bd40d22d4" +checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.1.0" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad186efb764318d35165f1758e7dcef3b10628e26d41a44bc5550652e6804391" +checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.3" +version = "3.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19" +checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125" dependencies = [ "anstyle", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "anyhow" -version = "1.0.86" +version = "1.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" +checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775" [[package]] name = "arc-swap" @@ -201,9 +202,9 @@ dependencies = [ [[package]] name = "arrayvec" -version = "0.7.4" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" +checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" [[package]] name = "assert-json-diff" @@ -250,9 +251,9 @@ dependencies = [ [[package]] name = "async-compression" -version = "0.4.11" +version = "0.4.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd066d0b4ef8ecb03a55319dc13aa6910616d0f44008a045bb1835af830abff5" +checksum = "0cb8f1d480b0ea3783ab015936d2a55c87e219676f0c0b7dec61494043f21857" dependencies = [ "brotli", "flate2", @@ -266,14 +267,14 @@ dependencies = [ [[package]] name = "async-executor" -version = "1.12.0" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8828ec6e544c02b0d6691d21ed9f9218d0384a82542855073c2a3f58304aaf0" +checksum = "30ca9a001c1e8ba5149f91a74362376cc6bc5b919d92d988668657bd570bdcec" dependencies = [ "async-task", "concurrent-queue", - "fastrand 2.1.0", - "futures-lite 2.3.0", + "fastrand", + "futures-lite", "slab", ] @@ -285,60 +286,31 @@ checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" dependencies = [ "async-channel 2.3.1", "async-executor", - "async-io 2.3.3", - "async-lock 3.4.0", + "async-io", + "async-lock", "blocking", - "futures-lite 2.3.0", + "futures-lite", "once_cell", "tokio", ] [[package]] name = "async-io" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af" -dependencies = [ - "async-lock 2.8.0", - "autocfg", - "cfg-if", - "concurrent-queue", - "futures-lite 1.13.0", - "log", - "parking", - "polling 2.8.0", - "rustix 0.37.27", - "slab", - "socket2 0.4.10", - "waker-fn", -] - -[[package]] -name = "async-io" -version = "2.3.3" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d6baa8f0178795da0e71bc42c9e5d13261aac7ee549853162e66a241ba17964" +checksum = "43a2b323ccce0a1d90b449fd71f2a06ca7faa7c54c2751f06c9bd851fc061059" dependencies = [ - "async-lock 3.4.0", + "async-lock", "cfg-if", "concurrent-queue", "futures-io", - "futures-lite 2.3.0", + "futures-lite", "parking", - "polling 3.7.2", - "rustix 0.38.34", + "polling", + "rustix", "slab", "tracing", - "windows-sys 0.52.0", -] - -[[package]] -name = "async-lock" -version = "2.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b" -dependencies = [ - "event-listener 2.5.3", + "windows-sys 0.59.0", ] [[package]] @@ -354,20 +326,20 @@ dependencies = [ [[package]] name = "async-std" -version = "1.12.0" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62565bb4402e926b29953c785397c6dc0391b7b446e45008b0049eb43cec6f5d" +checksum = "c634475f29802fde2b8f0b505b1bd00dfe4df7d4a000f0b36f7671197d5c3615" dependencies = [ "async-attributes", "async-channel 1.9.0", "async-global-executor", - "async-io 1.13.0", - "async-lock 2.8.0", + "async-io", + "async-lock", "crossbeam-utils", "futures-channel", "futures-core", "futures-io", - "futures-lite 1.13.0", + "futures-lite", "gloo-timers", "kv-log-macro", "log", @@ -381,9 +353,9 @@ dependencies = [ [[package]] name = "async-stream" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" +checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" dependencies = [ "async-stream-impl", "futures-core", @@ -392,13 +364,13 @@ dependencies = [ [[package]] name = "async-stream-impl" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" +checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", ] [[package]] @@ -409,13 +381,13 @@ checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" [[package]] name = "async-trait" -version = "0.1.80" +version = "0.1.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6fa2087f2753a7da8cc1c0dbfcf89579dd57458e36769de5ac750b4671737ca" +checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", ] [[package]] @@ -441,15 +413,15 @@ checksum = "3c1e7e457ea78e524f48639f551fd79703ac3f2237f5ecccdf4708f8a75ad373" [[package]] name = "autocfg" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "axum" -version = "0.7.7" +version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "504e3947307ac8326a5437504c517c4b56716c9d98fac0028c2acc7ca47d70ae" +checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f" dependencies = [ "async-trait", "axum-core", @@ -504,25 +476,26 @@ dependencies = [ [[package]] name = "axum-extra" -version = "0.9.3" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0be6ea09c9b96cb5076af0de2e383bd2bc0c18f827cf1967bdd353e0b910d733" +checksum = "c794b30c904f0a1c2fb7740f7df7f7972dfaa14ef6f57cb6178dc63e5dca2f04" dependencies = [ "axum", "axum-core", "bytes", "cookie", + "fastrand", "futures-util", "http 1.1.0", "http-body", "http-body-util", "mime", + "multer", "pin-project-lite", "serde", - "tower 0.4.13", + "tower 0.5.1", "tower-layer", "tower-service", - "tracing", ] [[package]] @@ -533,20 +506,21 @@ checksum = "57d123550fa8d071b7255cb0cc04dc302baa6c8c4a79f55701552684d8399bce" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", ] [[package]] name = "axum-test" -version = "16.1.0" +version = "16.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "020db7902508b764267eb61f5972129b3d5b6037632523af76ca8eb40504f89a" +checksum = "017cbca2776229a7100ebee44e065fcf5baccea6fc4cb9e5bea8328d83863a03" dependencies = [ "anyhow", "assert-json-diff", "auto-future", "axum", "bytes", + "bytesize", "cookie", "http 1.1.0", "http-body-util", @@ -595,17 +569,17 @@ dependencies = [ [[package]] name = "backtrace" -version = "0.3.73" +version = "0.3.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" dependencies = [ "addr2line", - "cc", "cfg-if", "libc", "miniz_oxide", "object", "rustc-demangle", + "windows-targets 0.52.6", ] [[package]] @@ -640,9 +614,9 @@ checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" [[package]] name = "bb8" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b10cf871f3ff2ce56432fddc2615ac7acc3aa22ca321f8fea800846fbb32f188" +checksum = "d89aabfae550a5c44b43ab941844ffcd2e993cb6900b342debf59e9ea74acdb8" dependencies = [ "async-trait", "futures-util", @@ -652,9 +626,9 @@ dependencies = [ [[package]] name = "bigdecimal" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51d712318a27c7150326677b321a5fa91b55f6d9034ffd67f20319e147d40cee" +checksum = "8f850665a0385e070b64c38d2354e6c104c8479c59868d1e48a0c13ee2c7a1c1" dependencies = [ "autocfg", "libm", @@ -666,15 +640,9 @@ dependencies = [ [[package]] name = "bitflags" -version = "1.3.2" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - -[[package]] -name = "bitflags" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" dependencies = [ "serde", ] @@ -718,15 +686,15 @@ dependencies = [ "async-channel 2.3.1", "async-task", "futures-io", - "futures-lite 2.3.0", + "futures-lite", "piper", ] [[package]] name = "borsh" -version = "1.5.1" +version = "1.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6362ed55def622cddc70a4746a68554d7b687713770de539e59a739b249f8ed" +checksum = "2506947f73ad44e344215ccd6403ac2ae18cd8e046e581a441bf8d199f257f03" dependencies = [ "borsh-derive", "cfg_aliases", @@ -734,23 +702,22 @@ dependencies = [ [[package]] name = "borsh-derive" -version = "1.5.1" +version = "1.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3ef8005764f53cd4dca619f5bf64cafd4664dada50ece25e4d81de54c80cc0b" +checksum = "c2593a3b8b938bd68373196c9832f516be11fa487ef4ae745eb282e6a56a7244" dependencies = [ "once_cell", "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.66", - "syn_derive", + "syn 2.0.87", ] [[package]] name = "brotli" -version = "6.0.0" +version = "7.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74f7971dbd9326d58187408ab83117d8ac1bb9c17b085fdacd1cf2f598719b6b" +checksum = "cc97b8f16f944bba54f0433f07e30be199b6dc2bd25937444bbad560bcea29bd" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -769,9 +736,9 @@ dependencies = [ [[package]] name = "bstr" -version = "1.9.1" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05efc5cfd9110c8416e471df0e96702d58690178e206e61b7173706673c93706" +checksum = "1a68f1f47cdf0ec8ee4b941b2eee2a80cb796db73118c0dd09ac63fbe405be22" dependencies = [ "memchr", "serde", @@ -829,15 +796,21 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.7.2" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3" +checksum = "9ac0150caa2ae65ca5bd83f25c7de183dea78d4d366469f148435e2acfbad0da" + +[[package]] +name = "bytesize" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3e368af43e418a04d52505cf3dbc23dda4e3407ae2fa99fd0e4f308ce546acc" [[package]] name = "cc" -version = "1.1.30" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b16803a61b81d9eabb7eae2588776c4c1e584b738ede45fdbb4c972cec1e9945" +checksum = "fd9de9f2205d5ef3fd67e685b0df337994ddd4495e2a28d185500d0e1edfea47" dependencies = [ "jobserver", "libc", @@ -868,7 +841,7 @@ dependencies = [ "num-traits", "serde", "wasm-bindgen", - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] @@ -915,9 +888,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.7" +version = "4.5.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5db83dced34638ad474f39f250d7fea9598bdd239eaced1bdf45d597da0f433f" +checksum = "fb3b4b9e5a7c7514dfa52869339ee98b3156b0bfb4e8a77c4ff4babb64b1604f" dependencies = [ "clap_builder", "clap_derive", @@ -925,9 +898,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.7" +version = "4.5.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7e204572485eb3fbf28f871612191521df159bc3e15a9f5064c66dba3a8c05f" +checksum = "b17a95aa67cc7b5ebd32aa5370189aa0d79069ef1c64ce893bd30fb24bff20ec" dependencies = [ "anstream", "anstyle", @@ -937,27 +910,27 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.5" +version = "4.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c780290ccf4fb26629baa7a1081e68ced113f1d3ec302fa5948f1c381ebf06c6" +checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", ] [[package]] name = "clap_lex" -version = "0.7.1" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b82cf0babdbd58558212896d1a4272303a57bdb245c2bf1147185fb45640e70" +checksum = "afb84c814227b90d6895e01398aee0d8033c00e7466aca416fb6a8e0eb19d8a7" [[package]] name = "colorchoice" -version = "1.0.1" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" +checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" [[package]] name = "colored" @@ -1044,17 +1017,27 @@ dependencies = [ "version_check", ] +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "core-foundation-sys" -version = "0.8.6" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cpufeatures" -version = "0.2.12" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" +checksum = "0ca741a962e1b0bff6d724a1a0958b686406e853bb14061f218562e1896f95e6" dependencies = [ "libc", ] @@ -1169,6 +1152,29 @@ dependencies = [ "typenum", ] +[[package]] +name = "cssparser" +version = "0.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c66d1cd8ed61bf80b38432613a7a2f09401ab8d0501110655f8b341484a3e3" +dependencies = [ + "cssparser-macros", + "dtoa-short", + "itoa", + "phf", + "smallvec", +] + +[[package]] +name = "cssparser-macros" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331" +dependencies = [ + "quote", + "syn 2.0.87", +] + [[package]] name = "ctr" version = "0.9.2" @@ -1199,7 +1205,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.66", + "syn 2.0.87", ] [[package]] @@ -1210,7 +1216,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.66", + "syn 2.0.87", ] [[package]] @@ -1280,6 +1286,17 @@ dependencies = [ "serde", ] +[[package]] +name = "derive_more" +version = "0.99.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", +] + [[package]] name = "deunicode" version = "1.6.0" @@ -1340,13 +1357,13 @@ dependencies = [ [[package]] name = "displaydoc" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", ] [[package]] @@ -1361,6 +1378,21 @@ version = "0.15.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" +[[package]] +name = "dtoa" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcbb2bf8e87535c23f7a8a321e364ce21462d0ff10cb6407820e8e96dfff6653" + +[[package]] +name = "dtoa-short" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd1511a7b6a56299bd043a9c167a6d2bfb37bf84a6dfceaba651168adfb43c87" +dependencies = [ + "dtoa", +] + [[package]] name = "duct" version = "0.13.7" @@ -1384,15 +1416,21 @@ dependencies = [ [[package]] name = "dunce" -version = "1.0.4" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56ce8c6da7551ec6c462cbaf3bfbc75131ebbfa1c944aeaa9dab51ca1c5f0c3b" +checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" + +[[package]] +name = "ego-tree" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c6ba7d4eec39eaa9ab24d44a0e73a7949a1095a8b3f3abb11eddf27dbb56a53" [[package]] name = "either" -version = "1.12.0" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dca9240753cf90908d7e4aac30f630662b02aebaa1b58a3cadabdb23385b58b" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" dependencies = [ "serde", ] @@ -1409,9 +1447,9 @@ dependencies = [ [[package]] name = "email_address" -version = "0.2.4" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2153bd83ebc09db15bcbdc3e2194d901804952e3dc96967e1cd3b0c5c32d112" +checksum = "e079f19b08ca6239f47f8ba8509c11cf3ea30095831f7fed61441475edd8c449" [[package]] name = "encode_unicode" @@ -1421,9 +1459,9 @@ checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" [[package]] name = "encoding_rs" -version = "0.8.34" +version = "0.8.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" dependencies = [ "cfg-if", ] @@ -1494,36 +1532,27 @@ dependencies = [ [[package]] name = "fastrand" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" -dependencies = [ - "instant", -] - -[[package]] -name = "fastrand" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" +checksum = "486f806e73c5707928240ddc295403b1b93c96a02038563881c4a2fd84b81ac4" [[package]] name = "filetime" -version = "0.2.23" +version = "0.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd" +checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.4.1", - "windows-sys 0.52.0", + "libredox", + "windows-sys 0.59.0", ] [[package]] name = "flate2" -version = "1.0.30" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f54427cfd1c7829e2a139fcefea601bf088ebca651d2bf53ebc600eac295dae" +checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" dependencies = [ "crc32fast", "miniz_oxide", @@ -1624,9 +1653,9 @@ dependencies = [ [[package]] name = "flume" -version = "0.11.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55ac459de2512911e4b674ce33cf20befaba382d05b62b008afc1c8b57cbf181" +checksum = "da0e4dd2a88388a1f4ccc7c9ce104604dab68d9f408dc34cd45823d5a9069095" dependencies = [ "futures-core", "futures-sink", @@ -1639,6 +1668,21 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + [[package]] name = "form_urlencoded" version = "1.2.1" @@ -1663,11 +1707,21 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" +[[package]] +name = "futf" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df420e2e84819663797d1ec6544b13c5be84629e7bb00dc960d6917db2987843" +dependencies = [ + "mac", + "new_debug_unreachable", +] + [[package]] name = "futures" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" dependencies = [ "futures-channel", "futures-core", @@ -1680,9 +1734,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" dependencies = [ "futures-core", "futures-sink", @@ -1690,15 +1744,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" [[package]] name = "futures-executor" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" dependencies = [ "futures-core", "futures-task", @@ -1718,32 +1772,17 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" [[package]] name = "futures-lite" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce" -dependencies = [ - "fastrand 1.9.0", - "futures-core", - "futures-io", - "memchr", - "parking", - "pin-project-lite", - "waker-fn", -] - -[[package]] -name = "futures-lite" -version = "2.3.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52527eb5074e35e9339c6b4e8d12600c7128b68fb25dcb9fa9dec18f7c25f3a5" +checksum = "cef40d21ae2c515b51041df9ed313ed21e572df340ea58a922a0aefe7e8891a1" dependencies = [ - "fastrand 2.1.0", + "fastrand", "futures-core", "futures-io", "parking", @@ -1752,26 +1791,26 @@ dependencies = [ [[package]] name = "futures-macro" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", ] [[package]] name = "futures-sink" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" [[package]] name = "futures-task" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" [[package]] name = "futures-timer" @@ -1781,9 +1820,9 @@ checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" [[package]] name = "futures-util" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ "futures-channel", "futures-core", @@ -1797,6 +1836,15 @@ dependencies = [ "slab", ] +[[package]] +name = "fxhash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" +dependencies = [ + "byteorder", +] + [[package]] name = "generic-array" version = "0.14.7" @@ -1817,6 +1865,15 @@ dependencies = [ "winapi", ] +[[package]] +name = "getopts" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14dbbfd5c71d70241ecf9e6f13737f7b5ce823821063188d7e46c41d371eebd5" +dependencies = [ + "unicode-width", +] + [[package]] name = "getrandom" version = "0.2.15" @@ -1842,9 +1899,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.29.0" +version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] name = "glob" @@ -1854,15 +1911,15 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "globset" -version = "0.4.14" +version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1" +checksum = "15f1ce686646e7f1e19bf7d5533fe443a45dbfb990e00629110797578b42fb19" dependencies = [ "aho-corasick", "bstr", "log", - "regex-automata 0.4.7", - "regex-syntax 0.8.4", + "regex-automata 0.4.9", + "regex-syntax 0.8.5", ] [[package]] @@ -1871,16 +1928,16 @@ version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bf760ebf69878d9fd8f110c89703d90ce35095324d1f1edcb595c63945ee757" dependencies = [ - "bitflags 2.5.0", + "bitflags", "ignore", "walkdir", ] [[package]] name = "gloo-timers" -version = "0.2.6" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" +checksum = "bbb143cf96099802033e0d4f4963b19fd2e0b728bcf076cd9cf7f6634f092994" dependencies = [ "futures-channel", "futures-core", @@ -1888,6 +1945,25 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "h2" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http 1.1.0", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + [[package]] name = "hashbrown" version = "0.12.3" @@ -1907,6 +1983,12 @@ dependencies = [ "allocator-api2", ] +[[package]] +name = "hashbrown" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a9bfc1af68b1726ea47d3d5109de126281def866b33970e10fbab11b5dafab3" + [[package]] name = "hashlink" version = "0.9.1" @@ -1984,6 +2066,20 @@ dependencies = [ "windows", ] +[[package]] +name = "html5ever" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e15626aaf9c351bc696217cbe29cb9b5e86c43f8a46b5e2f5c6c5cf7cb904ce" +dependencies = [ + "log", + "mac", + "markup5ever", + "proc-macro2", + "quote", + "syn 2.0.87", +] + [[package]] name = "http" version = "0.2.12" @@ -2008,9 +2104,9 @@ dependencies = [ [[package]] name = "http-body" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", "http 1.1.0", @@ -2037,9 +2133,9 @@ checksum = "08a397c49fec283e3d6211adbe480be95aae5f304cfb923e9970e08956d5168a" [[package]] name = "httparse" -version = "1.9.3" +version = "1.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0e7a4dd27b9476dc40cb050d3632d3bba3a70ddbff012285f7f8559a1e7e545" +checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946" [[package]] name = "httpdate" @@ -2074,13 +2170,14 @@ dependencies = [ [[package]] name = "hyper" -version = "1.4.1" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" +checksum = "bbbff0a806a4728c99295b254c8838933b5b082d75e3cb70c8dab21fdfbcfa9a" dependencies = [ "bytes", "futures-channel", "futures-util", + "h2", "http 1.1.0", "http-body", "httparse", @@ -2092,11 +2189,44 @@ dependencies = [ "want", ] +[[package]] +name = "hyper-rustls" +version = "0.27.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333" +dependencies = [ + "futures-util", + "http 1.1.0", + "hyper", + "hyper-util", + "rustls", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tower-service", +] + +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", +] + [[package]] name = "hyper-util" -version = "0.1.5" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b875924a60b96e5d7b9ae7b066540b1dd1cbd90d1828f54c92e02a283351c56" +checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" dependencies = [ "bytes", "futures-channel", @@ -2105,18 +2235,17 @@ dependencies = [ "http-body", "hyper", "pin-project-lite", - "socket2 0.5.7", + "socket2", "tokio", - "tower 0.4.13", "tower-service", "tracing", ] [[package]] name = "iana-time-zone" -version = "0.1.60" +version = "0.1.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" +checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -2206,9 +2335,9 @@ checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" [[package]] name = "icu_properties" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f8ac670d7422d7f76b32e17a5db556510825b29ec9154f235977c9caba61036" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" dependencies = [ "displaydoc", "icu_collections", @@ -2250,7 +2379,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", ] [[package]] @@ -2261,37 +2390,36 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "0.5.0" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" dependencies = [ - "unicode-bidi", - "unicode-normalization", + "idna_adapter", + "smallvec", + "utf8_iter", ] [[package]] -name = "idna" -version = "1.0.0" +name = "idna_adapter" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4716a3a0933a1d01c2f72450e89596eb51dd34ef3c211ccd875acdf1f8fe47ed" +checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" dependencies = [ "icu_normalizer", "icu_properties", - "smallvec", - "utf8_iter", ] [[package]] name = "ignore" -version = "0.4.22" +version = "0.4.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b46810df39e66e925525d6e38ce1e7f6e1d208f72dc39757880fcb66e2c58af1" +checksum = "6d89fd380afde86567dfba715db065673989d6253f42b88179abd3eae47bda4b" dependencies = [ "crossbeam-deque", "globset", "log", "memchr", - "regex-automata 0.4.7", + "regex-automata 0.4.9", "same-file", "walkdir", "winapi-util", @@ -2299,18 +2427,18 @@ dependencies = [ [[package]] name = "include_dir" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18762faeff7122e89e0857b02f7ce6fcc0d101d5e9ad2ad7846cc01d61b7f19e" +checksum = "923d117408f1e49d914f1a379a309cffe4f18c05cf4e3d12e613a15fc81bd0dd" dependencies = [ "include_dir_macros", ] [[package]] name = "include_dir_macros" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b139284b5cf57ecfa712bcc66950bb635b31aff41c188e8a4cfc758eca374a3f" +checksum = "7cab85a7ed0bd5f0e76d93846e0147172bed2e2d3f859bcc33a8d9699cad1a75" dependencies = [ "proc-macro2", "quote", @@ -2318,12 +2446,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.2.6" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" +checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" dependencies = [ "equivalent", - "hashbrown 0.14.5", + "hashbrown 0.15.1", "serde", ] @@ -2335,7 +2463,7 @@ checksum = "0122b7114117e64a63ac49f752a5ca4624d534c7b1c7de796ac196381cd2d947" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", ] [[package]] @@ -2349,9 +2477,9 @@ dependencies = [ [[package]] name = "insta" -version = "1.39.0" +version = "1.41.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "810ae6042d48e2c9e9215043563a58a80b877bc863228a74cf10c49d4620a6f5" +checksum = "7e9ffc4d4892617c50a928c52b2961cb5174b6fc6ebf252b2fac9d21955c48b8" dependencies = [ "console", "lazy_static", @@ -2363,15 +2491,6 @@ dependencies = [ "similar", ] -[[package]] -name = "instant" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" -dependencies = [ - "cfg-if", -] - [[package]] name = "intl-memoizer" version = "0.5.2" @@ -2392,15 +2511,10 @@ dependencies = [ ] [[package]] -name = "io-lifetimes" -version = "1.0.11" +name = "ipnet" +version = "2.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" -dependencies = [ - "hermit-abi 0.3.9", - "libc", - "windows-sys 0.48.0", -] +checksum = "ddc24109865250148c2e0f3d25d4f0f479571723792d3802153c60922a4fb708" [[package]] name = "ipnetwork" @@ -2413,20 +2527,20 @@ dependencies = [ [[package]] name = "is-terminal" -version = "0.4.12" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f23ff5ef2b80d608d61efee834934d862cd92461afc0560dedf493e4c033738b" +checksum = "261f68e344040fbd0edea105bef17c66edf46f984ddb1115b775ce31be948f4b" dependencies = [ - "hermit-abi 0.3.9", + "hermit-abi 0.4.0", "libc", "windows-sys 0.52.0", ] [[package]] name = "is_terminal_polyfill" -version = "1.70.0" +version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" +checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" [[package]] name = "itertools" @@ -2454,18 +2568,18 @@ checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "jobserver" -version = "0.1.31" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2b099aaa34a9751c5bf0878add70444e1ed2dd73f347be99003d4577277de6e" +checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" dependencies = [ "libc", ] [[package]] name = "js-sys" -version = "0.3.69" +version = "0.3.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" +checksum = "6a88f1bda2bd75b0452a14784937d796722fdebfe50df998aeb3f0b7603019a9" dependencies = [ "wasm-bindgen", ] @@ -2505,28 +2619,29 @@ dependencies = [ [[package]] name = "lettre" -version = "0.11.7" +version = "0.11.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a62049a808f1c4e2356a2a380bd5f2aca3b011b0b482cf3b914ba1731426969" +checksum = "0161e452348e399deb685ba05e55ee116cae9410f4f51fe42d597361444521d9" dependencies = [ "async-trait", "base64 0.22.1", "chumsky", "email-encoding", "email_address", - "fastrand 2.1.0", + "fastrand", "futures-io", "futures-util", "hostname", "httpdate", - "idna 0.5.0", + "idna", "mime", "nom", "percent-encoding", "quoted_printable", "rustls", "rustls-pemfile", - "socket2 0.5.7", + "rustls-pki-types", + "socket2", "tokio", "tokio-rustls", "url", @@ -2535,15 +2650,15 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.155" +version = "0.2.164" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" +checksum = "433bfe06b8c75da9b2e3fbea6e5329ff87748f0b144ef75306e674c3f6f7c13f" [[package]] name = "libm" -version = "0.2.8" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" +checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa" [[package]] name = "libredox" @@ -2551,8 +2666,9 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ - "bitflags 2.5.0", + "bitflags", "libc", + "redox_syscall", ] [[package]] @@ -2572,12 +2688,6 @@ version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" -[[package]] -name = "linux-raw-sys" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" - [[package]] name = "linux-raw-sys" version = "0.4.14" @@ -2602,7 +2712,7 @@ dependencies = [ [[package]] name = "loco-gen" -version = "0.13.0" +version = "0.13.2" dependencies = [ "chrono", "clap", @@ -2618,7 +2728,7 @@ dependencies = [ [[package]] name = "loco-rs" -version = "0.13.1" +version = "0.13.2" dependencies = [ "argon2", "async-trait", @@ -2650,7 +2760,9 @@ dependencies = [ "object_store", "rand", "regex", + "reqwest", "rusty-sidekiq", + "scraper", "sea-orm", "sea-orm-migration", "semver", @@ -2661,6 +2773,7 @@ dependencies = [ "sqlx", "tera", "thiserror", + "thousands", "tokio", "tokio-cron-scheduler", "tokio-util", @@ -2677,13 +2790,33 @@ dependencies = [ [[package]] name = "log" -version = "0.4.21" +version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" dependencies = [ "value-bag", ] +[[package]] +name = "mac" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" + +[[package]] +name = "markup5ever" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82c88c6129bd24319e62a0359cb6b958fa7e8be6e19bb1663bc396b90883aca5" +dependencies = [ + "log", + "phf", + "phf_codegen", + "string_cache", + "string_cache_codegen", + "tendril", +] + [[package]] name = "matchers" version = "0.1.0" @@ -2732,9 +2865,9 @@ checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" [[package]] name = "mime_guess" -version = "2.0.4" +version = "2.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef" +checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e" dependencies = [ "mime", "unicase", @@ -2748,11 +2881,11 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.7.3" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87dfd01fe195c66b572b37921ad8803d010623c0aca821bea2302239d155cdae" +checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" dependencies = [ - "adler", + "adler2", ] [[package]] @@ -2769,9 +2902,9 @@ dependencies = [ [[package]] name = "moka" -version = "0.12.7" +version = "0.12.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e0d88686dc561d743b40de8269b26eaf0dc58781bde087b0984646602021d08" +checksum = "32cf62eb4dd975d2dde76432fb1075c49e3ee2331cf36f1f8fd4b66550d32b6f" dependencies = [ "crossbeam-channel", "crossbeam-epoch", @@ -2804,6 +2937,29 @@ dependencies = [ "version_check", ] +[[package]] +name = "native-tls" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8614eb2c83d59d1c8cc974dd3f920198647674a0a035e1af1fa58707e317466" +dependencies = [ + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "new_debug_unreachable" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" + [[package]] name = "nom" version = "7.1.3" @@ -2832,9 +2988,9 @@ dependencies = [ [[package]] name = "num-bigint" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c165a9ab64cf766f73521c0dd2cfdff64f488b8f0b3e621face3462d3db536d7" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ "num-integer", "num-traits", @@ -2871,7 +3027,7 @@ checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", ] [[package]] @@ -2916,18 +3072,18 @@ dependencies = [ [[package]] name = "object" -version = "0.36.0" +version = "0.36.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "576dfe1fc8f9df304abb159d767a29d0476f7750fbf8aa7ad07816004a207434" +checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e" dependencies = [ "memchr", ] [[package]] name = "object_store" -version = "0.11.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25a0c4b3a0e31f8b66f71ad8064521efa773910196e2cde791436f13409f3b45" +checksum = "6eb4c22c6154a1e759d7099f9ffad7cc5ef8245f9efbab4a41b92623079c82f3" dependencies = [ "async-trait", "bytes", @@ -2946,9 +3102,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.19.0" +version = "1.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" [[package]] name = "opaque-debug" @@ -2956,6 +3112,50 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" +[[package]] +name = "openssl" +version = "0.10.68" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6174bc48f102d208783c2c84bf931bb75927a617866870de8a4ea85597f871f5" +dependencies = [ + "bitflags", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-sys" +version = "0.9.104" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45abf306cbf99debc8195b66b7346498d7b10c210de50418b5ccd7ceba08c741" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + [[package]] name = "ordered-float" version = "3.9.2" @@ -2967,12 +3167,12 @@ dependencies = [ [[package]] name = "os_pipe" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29d73ba8daf8fac13b0501d1abeddcfe21ba7401ada61a819144b6c2a4f32209" +checksum = "5ffd2b0a5634335b135d5728d84c5e0fd726954b87111f7506a61c502280d982" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -2997,7 +3197,7 @@ dependencies = [ "proc-macro2", "proc-macro2-diagnostics", "quote", - "syn 2.0.66", + "syn 2.0.87", ] [[package]] @@ -3008,9 +3208,9 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" [[package]] name = "parking" -version = "2.2.0" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" [[package]] name = "parking_lot" @@ -3030,9 +3230,9 @@ checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.5.2", + "redox_syscall", "smallvec", - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] @@ -3088,9 +3288,9 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pest" -version = "2.7.10" +version = "2.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "560131c633294438da9f7c4b08189194b20946c8274c6b9e38881a7874dc8ee8" +checksum = "879952a81a83930934cbf1786752d6dedc3b1f29e8f8fb2ad1d0a36f377cf442" dependencies = [ "memchr", "thiserror", @@ -3099,9 +3299,9 @@ dependencies = [ [[package]] name = "pest_derive" -version = "2.7.10" +version = "2.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26293c9193fbca7b1a3bf9b79dc1e388e927e6cacaa78b4a3ab705a1d3d41459" +checksum = "d214365f632b123a47fd913301e14c946c61d1c183ee245fa76eb752e59a02dd" dependencies = [ "pest", "pest_generator", @@ -3109,22 +3309,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.7.10" +version = "2.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ec22af7d3fb470a85dd2ca96b7c577a1eb4ef6f1683a9fe9a8c16e136c04687" +checksum = "eb55586734301717aea2ac313f50b2eb8f60d2fc3dc01d190eefa2e625f60c4e" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", ] [[package]] name = "pest_meta" -version = "2.7.10" +version = "2.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7a240022f37c361ec1878d646fc5b7d7c4d28d5946e1a80ad5a7a4f4ca0bdcd" +checksum = "b75da2a70cf4d9cb76833c990ac9cd3923c9a8905a8929789ce347c84564d03d" dependencies = [ "once_cell", "pest", @@ -3137,7 +3337,8 @@ version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" dependencies = [ - "phf_shared", + "phf_macros", + "phf_shared 0.11.2", ] [[package]] @@ -3146,8 +3347,18 @@ version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8d39688d359e6b34654d328e262234662d16cc0f60ec8dcbe5e718709342a5a" dependencies = [ - "phf_generator", - "phf_shared", + "phf_generator 0.11.2", + "phf_shared 0.11.2", +] + +[[package]] +name = "phf_generator" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6" +dependencies = [ + "phf_shared 0.10.0", + "rand", ] [[package]] @@ -3156,44 +3367,46 @@ version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" dependencies = [ - "phf_shared", + "phf_shared 0.11.2", "rand", ] [[package]] -name = "phf_shared" +name = "phf_macros" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" +checksum = "3444646e286606587e49f3bcf1679b8cef1dc2c5ecc29ddacaffc305180d464b" dependencies = [ - "siphasher", + "phf_generator 0.11.2", + "phf_shared 0.11.2", + "proc-macro2", + "quote", + "syn 2.0.87", ] [[package]] -name = "pin-project" -version = "1.1.5" +name = "phf_shared" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" +checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" dependencies = [ - "pin-project-internal", + "siphasher", ] [[package]] -name = "pin-project-internal" -version = "1.1.5" +name = "phf_shared" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" +checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.66", + "siphasher", ] [[package]] name = "pin-project-lite" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" +checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff" [[package]] name = "pin-utils" @@ -3203,12 +3416,12 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "piper" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae1d5c74c9876f070d3e8fd503d748c7d974c3e48da8f41350fa5222ef9b4391" +checksum = "96c8c490f422ef9a4efd2cb5b42b76c8613d7e7dfc1caf667b8a3350a5acc066" dependencies = [ "atomic-waker", - "fastrand 2.1.0", + "fastrand", "futures-io", ] @@ -3235,39 +3448,23 @@ dependencies = [ [[package]] name = "pkg-config" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" +checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" [[package]] name = "polling" -version = "2.8.0" +version = "3.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" -dependencies = [ - "autocfg", - "bitflags 1.3.2", - "cfg-if", - "concurrent-queue", - "libc", - "log", - "pin-project-lite", - "windows-sys 0.48.0", -] - -[[package]] -name = "polling" -version = "3.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3ed00ed3fbf728b5816498ecd316d1716eecaced9c0c8d2c5a6740ca214985b" +checksum = "a604568c3202727d1507653cb121dbd627a58684eb09a820fd746bee38b4442f" dependencies = [ "cfg-if", "concurrent-queue", "hermit-abi 0.4.0", "pin-project-lite", - "rustix 0.38.34", + "rustix", "tracing", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -3290,27 +3487,36 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "ppv-lite86" -version = "0.2.17" +version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" +checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "precomputed-hash" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" [[package]] name = "pretty_assertions" -version = "1.4.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66" +checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d" dependencies = [ "diff", - "yansi 0.5.1", + "yansi", ] [[package]] name = "proc-macro-crate" -version = "3.1.0" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284" +checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b" dependencies = [ - "toml_edit 0.21.1", + "toml_edit", ] [[package]] @@ -3337,6 +3543,28 @@ dependencies = [ "version_check", ] +[[package]] +name = "proc-macro-error-attr2" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5" +dependencies = [ + "proc-macro2", + "quote", +] + +[[package]] +name = "proc-macro-error2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802" +dependencies = [ + "proc-macro-error-attr2", + "proc-macro2", + "quote", + "syn 2.0.87", +] + [[package]] name = "proc-macro-hack" version = "0.5.20+deprecated" @@ -3345,9 +3573,9 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" [[package]] name = "proc-macro2" -version = "1.0.85" +version = "1.0.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22244ce15aa966053a896d1accb3a6e68469b97c7f33f284b99f0d576879fc23" +checksum = "f139b0662de085916d1fb67d2b4169d1addddda1919e696f3252b740b629986e" dependencies = [ "unicode-ident", ] @@ -3360,16 +3588,16 @@ checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", "version_check", - "yansi 1.0.1", + "yansi", ] [[package]] name = "psm" -version = "0.1.21" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5787f7cda34e3033a72192c018bc5883100330f362ef279a8cbccfce8bb4e874" +checksum = "200b9ff220857e53e184257720a14553b2f4aa02577d2ed9842d45d4b9654810" dependencies = [ "cc", ] @@ -3411,18 +3639,18 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.36" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" +checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" dependencies = [ "proc-macro2", ] [[package]] name = "quoted_printable" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79ec282e887b434b68c18fe5c121d38e72a5cf35119b59e54ec5b992ea9c8eb0" +checksum = "640c9bd8497b02465aeef5375144c26062e0dcd5939dfcbb0f5db76cb8c17c73" [[package]] name = "radium" @@ -3462,11 +3690,11 @@ dependencies = [ [[package]] name = "raw-cpuid" -version = "11.0.2" +version = "11.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e29830cbb1290e404f24c73af91c5d8d631ce7e128691e9477556b540cd01ecd" +checksum = "1ab240315c661615f2ee9f0f2cd32d5a7343a84d5ebcccb99d46e6637565e7b0" dependencies = [ - "bitflags 2.5.0", + "bitflags", ] [[package]] @@ -3511,27 +3739,18 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" -dependencies = [ - "bitflags 1.3.2", -] - -[[package]] -name = "redox_syscall" -version = "0.5.2" +version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c82cf8cff14456045f55ec4241383baeff27af886adb72ffb2162f99911de0fd" +checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f" dependencies = [ - "bitflags 2.5.0", + "bitflags", ] [[package]] name = "redox_users" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891" +checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" dependencies = [ "getrandom", "libredox", @@ -3540,14 +3759,14 @@ dependencies = [ [[package]] name = "regex" -version = "1.10.6" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.7", - "regex-syntax 0.8.4", + "regex-automata 0.4.9", + "regex-syntax 0.8.5", ] [[package]] @@ -3561,13 +3780,13 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.7" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.8.4", + "regex-syntax 0.8.5", ] [[package]] @@ -3578,9 +3797,9 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.8.4" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "relative-path" @@ -3597,6 +3816,49 @@ dependencies = [ "bytecheck", ] +[[package]] +name = "reqwest" +version = "0.12.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a77c62af46e79de0a562e1a9849205ffcb7fc1238876e9bd743357570e04046f" +dependencies = [ + "base64 0.22.1", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http 1.1.0", + "http-body", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-tls", + "hyper-util", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper 1.0.1", + "system-configuration", + "tokio", + "tokio-native-tls", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "windows-registry", +] + [[package]] name = "reserve-port" version = "2.0.1" @@ -3624,9 +3886,9 @@ dependencies = [ [[package]] name = "rkyv" -version = "0.7.44" +version = "0.7.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cba464629b3394fc4dbc6f940ff8f5b4ff5c7aef40f29166fd4ad12acbc99c0" +checksum = "9008cd6385b9e161d8229e1f6549dd23c3d022f132a2ea37ac3a10ac4935779b" dependencies = [ "bitvec", "bytecheck", @@ -3642,9 +3904,9 @@ dependencies = [ [[package]] name = "rkyv_derive" -version = "0.7.44" +version = "0.7.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7dddfff8de25e6f62b9d64e6e432bf1c6736c57d20323e15ee10435fbda7c65" +checksum = "503d1d27590a2b0a3a4ca4c94755aa2875657196ecbf401a42eff41d7de532c0" dependencies = [ "proc-macro2", "quote", @@ -3716,7 +3978,7 @@ dependencies = [ "regex", "relative-path", "rustc_version", - "syn 2.0.66", + "syn 2.0.87", "unicode-ident", ] @@ -3738,9 +4000,9 @@ dependencies = [ [[package]] name = "rust_decimal" -version = "1.35.0" +version = "1.36.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1790d1c4c0ca81211399e0e0af16333276f375209e71a37b67698a373db5b47a" +checksum = "b082d80e3e3cc52b2ed634388d436fe1f4de6af5786cc2de9ba9737527bdf555" dependencies = [ "arrayvec", "borsh", @@ -3766,45 +4028,31 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustc_version" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" dependencies = [ "semver", ] [[package]] name = "rustix" -version = "0.37.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fea8ca367a3a01fe35e6943c400addf443c0f57670e6ec51196f71a4b8762dd2" -dependencies = [ - "bitflags 1.3.2", - "errno", - "io-lifetimes", - "libc", - "linux-raw-sys 0.3.8", - "windows-sys 0.48.0", -] - -[[package]] -name = "rustix" -version = "0.38.34" +version = "0.38.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" +checksum = "99e4ea3e1cdc4b559b8e5650f9c8e5998e3e5c1343b4eaf034565f32318d63c0" dependencies = [ - "bitflags 2.5.0", + "bitflags", "errno", "libc", - "linux-raw-sys 0.4.14", + "linux-raw-sys", "windows-sys 0.52.0", ] [[package]] name = "rustls" -version = "0.23.15" +version = "0.23.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fbb44d7acc4e873d613422379f69f237a1b141928c02f6bc6ccfddddc2d7993" +checksum = "7f1a745511c54ba6d4465e8d5dfbd81b45791756de28d4981af70d6dca128f1e" dependencies = [ "log", "once_cell", @@ -3817,11 +4065,10 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "2.1.2" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29993a25686778eb88d4189742cd713c9bce943bc54251a33509dc63cbacf73d" +checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" dependencies = [ - "base64 0.22.1", "rustls-pki-types", ] @@ -3844,15 +4091,15 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.17" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" +checksum = "0e819f2bc632f285be6d7cd36e25940d45b2391dd6d9b939e79de557f7014248" [[package]] name = "rusty-sidekiq" -version = "0.11.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c64e90d83ece20649320d4d7eab794c43390e3db4dd8e67a82e4c82419e3ac7b" +checksum = "15544f047600b602c7b11ff7ee0882f9034f9cbe2c205693edd5615e2a6c03ee" dependencies = [ "async-trait", "bb8", @@ -3893,43 +4140,68 @@ dependencies = [ [[package]] name = "scc" -version = "2.1.4" +version = "2.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4465c22496331e20eb047ff46e7366455bc01c0c02015c4a376de0b2cd3a1af" +checksum = "66b202022bb57c049555430e11fc22fea12909276a80a4c3d368da36ac1d88ed" dependencies = [ "sdd", ] +[[package]] +name = "schannel" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" +dependencies = [ + "windows-sys 0.59.0", +] + [[package]] name = "scopeguard" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" +[[package]] +name = "scraper" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0e749d29b2064585327af5038a5a8eb73aeebad4a3472e83531a436563f7208" +dependencies = [ + "ahash 0.8.11", + "cssparser", + "ego-tree", + "getopts", + "html5ever", + "precomputed-hash", + "selectors", + "tendril", +] + [[package]] name = "sdd" -version = "1.7.0" +version = "3.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85f05a494052771fc5bd0619742363b5e24e5ad72ab3111ec2e27925b8edc5f3" +checksum = "49c1eeaf4b6a87c7479688c6d52b9f1153cedd3c489300564f932b065c6eab95" [[package]] name = "sea-bae" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bd3534a9978d0aa7edd2808dc1f8f31c4d0ecd31ddf71d997b3c98e9f3c9114" +checksum = "f694a6ab48f14bc063cfadff30ab551d3c7e46d8f81836c51989d548f44a2a25" dependencies = [ "heck 0.4.1", - "proc-macro-error", + "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", ] [[package]] name = "sea-orm" -version = "1.1.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c4872675cc5d5d399a2a202c60f3a393ec8d3f3307c36adb166517f348e4db5" +checksum = "d5680a8b686985116607ef5f5af2b1f9e1cc2c228330e93101816a0baa279afa" dependencies = [ "async-stream", "async-trait", @@ -3955,9 +4227,9 @@ dependencies = [ [[package]] name = "sea-orm-cli" -version = "1.1.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0aefbd960c9ed7b2dfbab97b11890f5d8c314ad6e2f68c7b36c73ea0967fcc25" +checksum = "70a157f42d291ccbd6e913b9d9b12dbe2ccbcf0472efc60c8715dd1254083aec" dependencies = [ "chrono", "clap", @@ -3972,23 +4244,23 @@ dependencies = [ [[package]] name = "sea-orm-macros" -version = "1.1.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85f714906b72e7265c0b2077d0ad8f235dabebda513c92f1326d5d40cef0dd01" +checksum = "3a239e3bb1b566ad4ec2654d0d193d6ceddfd733487edc9c21a64d214c773910" dependencies = [ "heck 0.4.1", "proc-macro2", "quote", "sea-bae", - "syn 2.0.66", + "syn 2.0.87", "unicode-ident", ] [[package]] name = "sea-orm-migration" -version = "1.1.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa7bbfbe3bec60b5925193acc9c98b9f8ae9853f52c8004df0c1ea5193c01ea0" +checksum = "63ba07e9f2479cc671758fcb1edee42ff2e32c34b3e67ab41d0af1e41f73c74e" dependencies = [ "async-trait", "clap", @@ -4044,7 +4316,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", "thiserror", ] @@ -4068,7 +4340,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", ] [[package]] @@ -4077,6 +4349,48 @@ version = "4.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b" +[[package]] +name = "security-framework" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa39c7303dc58b5543c94d22c1766b0d31f2ee58306363ea622b10bbc075eaa2" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "selectors" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd568a4c9bb598e291a08244a5c1f5a8a6650bee243b5b0f8dbb3d9cc1d87fe8" +dependencies = [ + "bitflags", + "cssparser", + "derive_more", + "fxhash", + "log", + "new_debug_unreachable", + "phf", + "phf_codegen", + "precomputed-hash", + "servo_arc", + "smallvec", +] + [[package]] name = "self_cell" version = "0.10.3" @@ -4100,31 +4414,32 @@ checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" [[package]] name = "serde" -version = "1.0.203" +version = "1.0.215" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7253ab4de971e72fb7be983802300c30b5a7f0c2e56fab8abfc6a214307c0094" +checksum = "6513c1ad0b11a9376da888e3e0baa0077f1aed55c17f50e7b2397136129fb88f" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.203" +version = "1.0.215" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "500cbc0ebeb6f46627f50f3f5811ccf6bf00643be300b4c3eabc0ef55dc5b5ba" +checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", ] [[package]] name = "serde_json" -version = "1.0.117" +version = "1.0.133" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3" +checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" dependencies = [ "itoa", + "memchr", "ryu", "serde", ] @@ -4151,9 +4466,9 @@ dependencies = [ [[package]] name = "serde_spanned" -version = "0.6.6" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79e674e01f999af37c49f70a6ede167a8a60b2503e56c5599532a65baa5969a0" +checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" dependencies = [ "serde", ] @@ -4194,9 +4509,9 @@ dependencies = [ [[package]] name = "serial_test" -version = "3.1.1" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b4b487fe2acf240a021cf57c6b2b4903b1e78ca0ecd862a71b71d2a51fed77d" +checksum = "1b258109f244e1d6891bf1053a55d63a5cd4f8f4c30cf9a1280989f80e7a1fa9" dependencies = [ "futures", "log", @@ -4208,13 +4523,22 @@ dependencies = [ [[package]] name = "serial_test_derive" -version = "3.1.1" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82fe9db325bcef1fbcde82e078a5cc4efdf787e96b3b9cf45b50b529f2083d67" +checksum = "5d69265a08751de7844521fd15003ae0a888e035773ba05695c5c759a6f89eef" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", +] + +[[package]] +name = "servo_arc" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae65c4249478a2647db249fb43e23cec56a2c8974a427e7bd8cb5a1d0964921a" +dependencies = [ + "stable_deref_trait", ] [[package]] @@ -4230,9 +4554,9 @@ dependencies = [ [[package]] name = "sha1_smol" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012" +checksum = "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d" [[package]] name = "sha2" @@ -4256,12 +4580,12 @@ dependencies = [ [[package]] name = "shared_child" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0d94659ad3c2137fef23ae75b03d5241d633f8acded53d672decfa0e6e0caef" +checksum = "09fa9338aed9a1df411814a5b2252f7cd206c55ae9bf2fa763f8de84603aa60c" dependencies = [ "libc", - "winapi", + "windows-sys 0.59.0", ] [[package]] @@ -4297,15 +4621,15 @@ dependencies = [ [[package]] name = "simdutf8" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f27f6278552951f1f2b8cf9da965d10969b2efdea95a6ec47987ab46edfe263a" +checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e" [[package]] name = "similar" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa42c91313f1d05da9b26f267f931cf178d4aba455b4c4622dd7355eb80c6640" +checksum = "1de1d4f81173b03af4c0cbed3c898f6bff5b870e4a7f5d6f4057d62a7a4b686e" [[package]] name = "simple_asn1" @@ -4355,9 +4679,9 @@ dependencies = [ [[package]] name = "slug" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bd94acec9c8da640005f8e135a39fc0372e74535e6b368b7a04b875f784c8c4" +checksum = "882a80f72ee45de3cc9a5afeb2da0331d58df69e4e7d8eeb5d3c7784ae67e724" dependencies = [ "deunicode", "wasm-bindgen", @@ -4412,7 +4736,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", ] [[package]] @@ -4439,23 +4763,13 @@ dependencies = [ [[package]] name = "snapbox-macros" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1f4c14672714436c09254801c934b203196a51182a5107fb76591c7cc56424d" +checksum = "16569f53ca23a41bb6f62e0a5084aa1661f4814a67fa33696a79073e03a664af" dependencies = [ "anstream", ] -[[package]] -name = "socket2" -version = "0.4.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" -dependencies = [ - "libc", - "winapi", -] - [[package]] name = "socket2" version = "0.5.7" @@ -4487,9 +4801,9 @@ dependencies = [ [[package]] name = "sqlformat" -version = "0.2.4" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f895e3734318cc55f1fe66258926c9b910c124d47520339efecbb6c59cec7c1f" +checksum = "7bba3a93db0cc4f7bdece8bb09e77e2e785c20bfebf79eb8340ed80708048790" dependencies = [ "nom", "unicode_categories", @@ -4565,7 +4879,7 @@ dependencies = [ "quote", "sqlx-core", "sqlx-macros-core", - "syn 2.0.66", + "syn 2.0.87", ] [[package]] @@ -4588,7 +4902,7 @@ dependencies = [ "sqlx-mysql", "sqlx-postgres", "sqlx-sqlite", - "syn 2.0.66", + "syn 2.0.87", "tempfile", "tokio", "url", @@ -4603,7 +4917,7 @@ dependencies = [ "atoi", "base64 0.22.1", "bigdecimal", - "bitflags 2.5.0", + "bitflags", "byteorder", "bytes", "chrono", @@ -4650,7 +4964,7 @@ dependencies = [ "atoi", "base64 0.22.1", "bigdecimal", - "bitflags 2.5.0", + "bitflags", "byteorder", "chrono", "crc", @@ -4693,7 +5007,7 @@ checksum = "d5b2cf34a45953bfd3daaf3db0f7a7878ab9b7a6b91b422d24a7a9e4c857b680" dependencies = [ "atoi", "chrono", - "flume 0.11.0", + "flume 0.11.1", "futures-channel", "futures-core", "futures-executor", @@ -4719,15 +5033,15 @@ checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" [[package]] name = "stacker" -version = "0.1.15" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c886bd4480155fd3ef527d45e9ac8dd7118a898a46530b7b94c3e21866259fce" +checksum = "799c883d55abdb5e98af1a7b3f23b9b6de8ecada0ecac058672d7635eb48ca7b" dependencies = [ "cc", "cfg-if", "libc", "psm", - "winapi", + "windows-sys 0.59.0", ] [[package]] @@ -4736,6 +5050,32 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" +[[package]] +name = "string_cache" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" +dependencies = [ + "new_debug_unreachable", + "once_cell", + "parking_lot", + "phf_shared 0.10.0", + "precomputed-hash", + "serde", +] + +[[package]] +name = "string_cache_codegen" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bb30289b722be4ff74a408c3cc27edeaad656e06cb1fe8fa9231fa59c728988" +dependencies = [ + "phf_generator 0.10.0", + "phf_shared 0.10.0", + "proc-macro2", + "quote", +] + [[package]] name = "stringprep" version = "0.1.5" @@ -4755,15 +5095,15 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "strum" -version = "0.26.2" +version = "0.26.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d8cec3501a5194c432b2b7976db6b7d10ec95c253208b45f83f7136aa985e29" +checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" [[package]] name = "subtle" -version = "2.5.0" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" @@ -4778,27 +5118,15 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.66" +version = "2.0.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5" +checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] -[[package]] -name = "syn_derive" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1329189c02ff984e9736652b1631330da25eaa6bc639089ed4915d25446cbe7b" -dependencies = [ - "proc-macro-error", - "proc-macro2", - "quote", - "syn 2.0.66", -] - [[package]] name = "sync_wrapper" version = "0.1.2" @@ -4810,6 +5138,9 @@ name = "sync_wrapper" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" +dependencies = [ + "futures-core", +] [[package]] name = "synstructure" @@ -4819,7 +5150,28 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", +] + +[[package]] +name = "system-configuration" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" +dependencies = [ + "bitflags", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +dependencies = [ + "core-foundation-sys", + "libc", ] [[package]] @@ -4836,14 +5188,26 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tempfile" -version = "3.10.1" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" +checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c" dependencies = [ "cfg-if", - "fastrand 2.1.0", - "rustix 0.38.34", - "windows-sys 0.52.0", + "fastrand", + "once_cell", + "rustix", + "windows-sys 0.59.0", +] + +[[package]] +name = "tendril" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d24a120c5fc464a3458240ee02c299ebcb9d67b5249c8848b09d639dca8d7bb0" +dependencies = [ + "futf", + "mac", + "utf-8", ] [[package]] @@ -4881,24 +5245,30 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.61" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.61" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", ] +[[package]] +name = "thousands" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3bf63baf9f5039dadc247375c29eb13706706cfde997d0330d05aa63a77d8820" + [[package]] name = "thread_local" version = "1.1.8" @@ -4952,9 +5322,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.6.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" dependencies = [ "tinyvec_macros", ] @@ -4967,9 +5337,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.40.0" +version = "1.41.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998" +checksum = "22cfb5bee7a6a52939ca9224d6ac897bb669134078daa8735560897f69de4d33" dependencies = [ "backtrace", "bytes", @@ -4978,16 +5348,16 @@ dependencies = [ "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2 0.5.7", + "socket2", "tokio-macros", "windows-sys 0.52.0", ] [[package]] name = "tokio-cron-scheduler" -version = "0.11.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7b9480125554f0ace1c3c3797a24b5cc56c6a7cd82c739db35fb54c4dc046f3" +checksum = "f2594dd7c2abbbafbb1c78d167fd10860dc7bd75f814cb051a1e0d3e796b9702" dependencies = [ "chrono", "cron", @@ -5006,7 +5376,17 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", ] [[package]] @@ -5022,9 +5402,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.15" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" +checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1" dependencies = [ "futures-core", "pin-project-lite", @@ -5033,9 +5413,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.11" +version = "0.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" +checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" dependencies = [ "bytes", "futures-core", @@ -5046,47 +5426,36 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.15" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac2caab0bf757388c6c0ae23b3293fdb463fee59434529014f85e3263b995c28" +checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.22.16", + "toml_edit", ] [[package]] name = "toml_datetime" -version = "0.6.6" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf" +checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" dependencies = [ "serde", ] [[package]] name = "toml_edit" -version = "0.21.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" -dependencies = [ - "indexmap", - "toml_datetime", - "winnow 0.5.40", -] - -[[package]] -name = "toml_edit" -version = "0.22.16" +version = "0.22.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "278f3d518e152219c994ce877758516bca5e118eaed6996192a774fb9fbf0788" +checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" dependencies = [ "indexmap", "serde", "serde_spanned", "toml_datetime", - "winnow 0.6.13", + "winnow", ] [[package]] @@ -5095,11 +5464,6 @@ version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" dependencies = [ - "futures-core", - "futures-util", - "pin-project", - "pin-project-lite", - "tokio", "tower-layer", "tower-service", "tracing", @@ -5128,7 +5492,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8437150ab6bbc8c5f0f519e3d5ed4aa883a83dd4cdd3d1b21f9482936046cb97" dependencies = [ "async-compression", - "bitflags 2.5.0", + "bitflags", "bytes", "futures-core", "futures-util", @@ -5192,7 +5556,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", ] [[package]] @@ -5249,9 +5613,9 @@ dependencies = [ [[package]] name = "triomphe" -version = "0.1.12" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b2cb4fbb9995eeb36ac86fadf24031ccd58f99d6b4b2d7b911db70bddb80d90" +checksum = "859eb650cfee7434994602c3a68b25d77ad9e68c8a6cd491616ef86661382eb3" [[package]] name = "try-lock" @@ -5272,7 +5636,7 @@ dependencies = [ "serde", "shlex", "snapbox", - "toml_edit 0.22.16", + "toml_edit", ] [[package]] @@ -5292,9 +5656,9 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "ucd-trie" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" +checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" [[package]] name = "ulid" @@ -5367,7 +5731,7 @@ checksum = "1ed7f4237ba393424195053097c1516bd4590dc82b84f2f97c5c69e12704555b" dependencies = [ "proc-macro-hack", "quote", - "syn 2.0.66", + "syn 2.0.87", "unic-langid-impl", ] @@ -5402,51 +5766,48 @@ dependencies = [ [[package]] name = "unicase" -version = "2.7.0" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" -dependencies = [ - "version_check", -] +checksum = "7e51b68083f157f853b6379db119d1c1be0e6e4dec98101079dec41f6f5cf6df" [[package]] name = "unicode-bidi" -version = "0.3.15" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" +checksum = "5ab17db44d7388991a428b2ee655ce0c212e862eff1768a455c58f9aad6e7893" [[package]] name = "unicode-ident" -version = "1.0.12" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" [[package]] name = "unicode-normalization" -version = "0.1.23" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" +checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" dependencies = [ "tinyvec", ] [[package]] name = "unicode-properties" -version = "0.1.1" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4259d9d4425d9f0661581b804cb85fe66a4c631cadd8f490d1c13a35d5d9291" +checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0" [[package]] name = "unicode-segmentation" -version = "1.11.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" [[package]] name = "unicode-width" -version = "0.1.13" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" +checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" [[package]] name = "unicode_categories" @@ -5478,15 +5839,21 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.5.1" +version = "2.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7c25da092f0a868cdf09e8674cd3b7ef3a7d92a24253e663a2fb85e2496de56" +checksum = "8d157f1b96d14500ffdc1f10ba712e780825526c03d9a49b4d0324b0d9113ada" dependencies = [ "form_urlencoded", - "idna 1.0.0", + "idna", "percent-encoding", ] +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + [[package]] name = "utf16_iter" version = "1.0.5" @@ -5532,14 +5899,14 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", ] [[package]] name = "uuid" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314" +checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a" dependencies = [ "getrandom", "rand", @@ -5548,11 +5915,11 @@ dependencies = [ [[package]] name = "validator" -version = "0.18.1" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db79c75af171630a3148bd3e6d7c4f42b6a9a014c2945bc5ed0020cbb8d9478e" +checksum = "d0b4a29d8709210980a09379f27ee31549b73292c87ab9899beee1c0d3be6303" dependencies = [ - "idna 0.5.0", + "idna", "once_cell", "regex", "serde", @@ -5564,16 +5931,16 @@ dependencies = [ [[package]] name = "validator_derive" -version = "0.18.2" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df0bcf92720c40105ac4b2dda2a4ea3aa717d4d6a862cc217da653a4bd5c6b10" +checksum = "bac855a2ce6f843beb229757e6e570a42e837bcb15e5f449dd48d5747d41bf77" dependencies = [ "darling", "once_cell", - "proc-macro-error", + "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", ] [[package]] @@ -5584,9 +5951,9 @@ checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" [[package]] name = "value-bag" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a84c137d37ab0142f0f2ddfe332651fdbf252e7b7dbb4e67b6c1f1b2e925101" +checksum = "3ef4c4aa54d5d05a279399bfa921ec387b7aba77caf7a682ae8d86785b8fdad2" [[package]] name = "vcpkg" @@ -5596,9 +5963,9 @@ checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "version_check" -version = "0.9.4" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "wait-timeout" @@ -5609,12 +5976,6 @@ dependencies = [ "libc", ] -[[package]] -name = "waker-fn" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "317211a0dc0ceedd78fb2ca9a44aed3d7b9b26f81870d485c07122b4350673b7" - [[package]] name = "walkdir" version = "2.5.0" @@ -5648,34 +6009,35 @@ checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" [[package]] name = "wasm-bindgen" -version = "0.2.92" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" +checksum = "128d1e363af62632b8eb57219c8fd7877144af57558fb2ef0368d0087bddeb2e" dependencies = [ "cfg-if", + "once_cell", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.92" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" +checksum = "cb6dd4d3ca0ddffd1dd1c9c04f94b868c37ff5fac97c30b97cff2d74fce3a358" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.42" +version = "0.4.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" +checksum = "cc7ec4f8827a71586374db3e87abdb5a2bb3a15afed140221307c3ec06b1f63b" dependencies = [ "cfg-if", "js-sys", @@ -5685,9 +6047,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.92" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" +checksum = "e79384be7f8f5a9dd5d7167216f022090cf1f9ec128e6e6a482a2cb5c5422c56" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -5695,28 +6057,28 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.92" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" +checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.92" +version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" +checksum = "65fc09f10666a9f147042251e0dda9c18f166ff7de300607007e96bdebc1068d" [[package]] name = "web-sys" -version = "0.3.69" +version = "0.3.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" +checksum = "f6488b90108c040df0fe62fa815cbdee25124641df01814dd7282749234c6112" dependencies = [ "js-sys", "wasm-bindgen", @@ -5734,20 +6096,20 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.26.2" +version = "0.26.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c452ad30530b54a4d8e71952716a212b08efd0f3562baa66c29a618b07da7c3" +checksum = "841c67bff177718f1d4dfefde8d8f0e78f9b6589319ba88312f567fc5841a958" dependencies = [ "rustls-pki-types", ] [[package]] name = "whoami" -version = "1.5.1" +version = "1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a44ab49fad634e88f55bf8f9bb3abd2f27d7204172a112c7c9987e01c1c94ea9" +checksum = "372d5b87f58ec45c384ba03563b03544dc5fadc3983e434b286913f5b4a9bb6d" dependencies = [ - "redox_syscall 0.4.1", + "redox_syscall", "wasite", ] @@ -5769,11 +6131,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -5789,7 +6151,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e48a53791691ab099e5e2ad123536d0fff50652600abaf43bbf952894110d0be" dependencies = [ "windows-core", - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] @@ -5798,7 +6160,37 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ - "windows-targets 0.52.5", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-registry" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0" +dependencies = [ + "windows-result", + "windows-strings", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-result" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-strings" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" +dependencies = [ + "windows-result", + "windows-targets 0.52.6", ] [[package]] @@ -5816,7 +6208,16 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.52.5", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", ] [[package]] @@ -5836,18 +6237,18 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ - "windows_aarch64_gnullvm 0.52.5", - "windows_aarch64_msvc 0.52.5", - "windows_i686_gnu 0.52.5", + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", "windows_i686_gnullvm", - "windows_i686_msvc 0.52.5", - "windows_x86_64_gnu 0.52.5", - "windows_x86_64_gnullvm 0.52.5", - "windows_x86_64_msvc 0.52.5", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", ] [[package]] @@ -5858,9 +6259,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" @@ -5870,9 +6271,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" @@ -5882,15 +6283,15 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnullvm" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" @@ -5900,9 +6301,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" @@ -5912,9 +6313,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" @@ -5924,9 +6325,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" @@ -5936,24 +6337,15 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.52.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" - -[[package]] -name = "winnow" -version = "0.5.40" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" -dependencies = [ - "memchr", -] +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.6.13" +version = "0.6.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59b5e5f6c299a3c7890b876a2a587f3115162487e704907d9b6cd29473052ba1" +checksum = "36c1fec1a2bb5866f07c25f68c26e565c4c200aebb96d7e55710c19d3e8ac49b" dependencies = [ "memchr", ] @@ -5979,12 +6371,6 @@ dependencies = [ "tap", ] -[[package]] -name = "yansi" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" - [[package]] name = "yansi" version = "1.0.1" @@ -6011,28 +6397,29 @@ checksum = "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", "synstructure", ] [[package]] name = "zerocopy" -version = "0.7.34" +version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae87e3fcd617500e5d106f0380cf7b77f3c6092aae37191433159dda23cfb087" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" dependencies = [ + "byteorder", "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.7.34" +version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15e934569e47891f7d9411f1a451d947a60e000ab3bd24fbb970f000387d1b3b" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", ] [[package]] @@ -6052,7 +6439,7 @@ checksum = "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", "synstructure", ] @@ -6081,32 +6468,32 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.66", + "syn 2.0.87", ] [[package]] name = "zstd" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d789b1514203a1120ad2429eae43a7bd32b90976a7bb8a05f7ec02fa88cc23a" +checksum = "fcf2b778a664581e31e389454a7072dab1647606d44f7feea22cd5abb9c9f3f9" dependencies = [ "zstd-safe", ] [[package]] name = "zstd-safe" -version = "7.1.0" +version = "7.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cd99b45c6bc03a018c8b8a86025678c87e55526064e38f9df301989dce7ec0a" +checksum = "54a3ab4db68cea366acc5c897c7b4d4d1b8994a9cd6e6f841f8964566a419059" dependencies = [ "zstd-sys", ] [[package]] name = "zstd-sys" -version = "2.0.10+zstd.1.5.6" +version = "2.0.13+zstd.1.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c253a4914af5bafc8fa8c86ee400827e83cf6ec01195ec1f1ed8441bf00d65aa" +checksum = "38ff0f21cfee8f97d94cef41359e0c89aa6113028ab0291aa8ca0038995a95aa" dependencies = [ "cc", "pkg-config", diff --git a/examples/demo/Cargo.toml b/examples/demo/Cargo.toml index d76d1f640..5b5d0d078 100644 --- a/examples/demo/Cargo.toml +++ b/examples/demo/Cargo.toml @@ -20,7 +20,7 @@ tokio = { version = "1.33.0", features = ["full"] } async-trait = "0.1.74" tracing = "0.1.40" chrono = "0.4" -validator = { version = "0.18" } +validator = { version = "0.19" } sea-orm = { version = "1.1.0", features = [ "sqlx-sqlite", "sqlx-postgres", diff --git a/examples/demo/src/models/roles.rs b/examples/demo/src/models/roles.rs index b591340cb..8c45bf9aa 100644 --- a/examples/demo/src/models/roles.rs +++ b/examples/demo/src/models/roles.rs @@ -1,5 +1,4 @@ use loco_rs::prelude::*; -use sea_orm::entity::prelude::*; pub use super::_entities::roles::{self, ActiveModel, Entity, Model}; use crate::models::{_entities::sea_orm_active_enums::RolesName, users, users_roles}; diff --git a/examples/demo/tests/cmd/cli.trycmd b/examples/demo/tests/cmd/cli.trycmd index 9320015b1..6eeff130a 100644 --- a/examples/demo/tests/cmd/cli.trycmd +++ b/examples/demo/tests/cmd/cli.trycmd @@ -11,6 +11,7 @@ Commands: routes Describe all application endpoints middleware Describe all application middlewares task Run a custom task + jobs Managing jobs queue scheduler Run the scheduler generate code generation creates a set of files and code templates based on a predefined set of rules doctor Validate and diagnose configurations @@ -86,43 +87,55 @@ user_report [output a user report] ```console $ demo_app-cli routes --environment test -[GET] /_health -[GET] /_ping -[POST] /auth/forgot -[POST] /auth/login -[POST] /auth/register -[POST] /auth/reset -[POST] /auth/verify -[GET] /cache -[GET] /cache/get_or_insert -[POST] /cache/insert -[GET] /mylayer/admin -[GET] /mylayer/echo -[GET] /mylayer/user -[GET] /mysession -[GET] /notes -[POST] /notes -[GET] /notes/:id -[DELETE] /notes/:id -[POST] /notes/:id -[GET] /response/album -[GET] /response/empty -[GET] /response/empty_json -[GET] /response/etag -[GET] /response/html -[GET] /response/json -[GET] /response/redirect -[GET] /response/render_with_status_code -[GET] /response/set_cookie -[GET] /response/text -[POST] /upload/file -[POST] /user/convert/admin -[POST] /user/convert/user -[GET] /user/current -[GET] /user/current_api_key -[GET] /view-engine/hello -[GET] /view-engine/home -[GET] /view-engine/simple +/_health + └─ GET /_health +/_ping + └─ GET /_ping +/auth + ├─ POST /auth/forgot + ├─ POST /auth/login + ├─ POST /auth/register + ├─ POST /auth/reset + └─ POST /auth/verify +/cache + ├─ GET /cache + ├─ GET /cache/get_or_insert + └─ POST /cache/insert +/mylayer + ├─ GET /mylayer/admin + ├─ GET /mylayer/echo + └─ GET /mylayer/user +/mysession + └─ GET /mysession +/notes + ├─ GET /notes + │ POST /notes + │ + ├─ GET /notes/:id + │ POST /notes/:id + └─ DELETE /notes/:id +/response + ├─ GET /response/album + ├─ GET /response/empty + ├─ GET /response/empty_json + ├─ GET /response/etag + ├─ GET /response/html + ├─ GET /response/json + ├─ GET /response/redirect + ├─ GET /response/render_with_status_code + ├─ GET /response/set_cookie + └─ GET /response/text +/upload + └─ POST /upload/file +/user + ├─ POST /user/convert/admin + ├─ POST /user/convert/user + ├─ GET /user/current + └─ GET /user/current_api_key +/view-engine + ├─ GET /view-engine/hello + ├─ GET /view-engine/home + └─ GET /view-engine/simple ``` @@ -133,7 +146,7 @@ $ demo_app-cli doctor ✅ DB connection: success ✅ redis queue: queue connection: success ✅ Dependencies - +✅ Loco version: latest ``` diff --git a/examples/demo/tests/models/roles.rs b/examples/demo/tests/models/roles.rs index 52f4d8af6..05b033100 100644 --- a/examples/demo/tests/models/roles.rs +++ b/examples/demo/tests/models/roles.rs @@ -1,9 +1,8 @@ use demo_app::{ app::App, - models::{roles, sea_orm_active_enums, users, users::RegisterParams, users_roles}, + models::{roles, sea_orm_active_enums, users, users::RegisterParams}, }; -use loco_rs::{prelude::*, testing}; -use sea_orm::DatabaseConnection; +use loco_rs::testing::prelude::*; use serial_test::serial; macro_rules! configure_insta { @@ -20,8 +19,8 @@ macro_rules! configure_insta { async fn can_add_user_to_admin() { configure_insta!(); - let boot = testing::boot_test::().await.unwrap(); - let new_user: Result = users::Model::create_with_password( + let boot = boot_test::().await.unwrap(); + let new_user = users::Model::create_with_password( &boot.app_context.db, &RegisterParams { email: "user1@example.com".to_string(), @@ -29,8 +28,8 @@ async fn can_add_user_to_admin() { name: "framework".to_string(), }, ) - .await; - let new_user = new_user.unwrap(); + .await + .unwrap(); let role = roles::Model::add_user_to_admin_role(&boot.app_context.db, &new_user) .await .unwrap(); @@ -42,8 +41,8 @@ async fn can_add_user_to_admin() { async fn can_add_user_to_user() { configure_insta!(); - let boot = testing::boot_test::().await.unwrap(); - let new_user: Result = users::Model::create_with_password( + let boot = boot_test::().await.unwrap(); + let new_user = users::Model::create_with_password( &boot.app_context.db, &RegisterParams { email: "user1@example.com".to_string(), @@ -51,8 +50,8 @@ async fn can_add_user_to_user() { name: "framework".to_string(), }, ) - .await; - let new_user = new_user.unwrap(); + .await + .unwrap(); let role = roles::Model::add_user_to_user_role(&boot.app_context.db, &new_user) .await .unwrap(); @@ -64,8 +63,8 @@ async fn can_add_user_to_user() { async fn can_convert_between_user_and_admin() { configure_insta!(); - let boot = testing::boot_test::().await.unwrap(); - let new_user: Result = users::Model::create_with_password( + let boot = boot_test::().await.unwrap(); + let new_user = users::Model::create_with_password( &boot.app_context.db, &RegisterParams { email: "user1@example.com".to_string(), @@ -73,8 +72,8 @@ async fn can_convert_between_user_and_admin() { name: "framework".to_string(), }, ) - .await; - let new_user = new_user.unwrap(); + .await + .unwrap(); let role = roles::Model::add_user_to_user_role(&boot.app_context.db, &new_user) .await .unwrap(); @@ -94,8 +93,8 @@ async fn can_convert_between_user_and_admin() { async fn can_find_user_roles() { configure_insta!(); - let boot = testing::boot_test::().await.unwrap(); - let new_user: Result = users::Model::create_with_password( + let boot = boot_test::().await.unwrap(); + let new_user = users::Model::create_with_password( &boot.app_context.db, &RegisterParams { email: "user1@example.com".to_string(), @@ -103,8 +102,8 @@ async fn can_find_user_roles() { name: "framework".to_string(), }, ) - .await; - let new_user = new_user.unwrap(); + .await + .unwrap(); let role = roles::Model::add_user_to_user_role(&boot.app_context.db, &new_user) .await .unwrap(); @@ -131,8 +130,8 @@ async fn can_find_user_roles() { async fn cannot_find_user_before_conversation() { configure_insta!(); - let boot = testing::boot_test::().await.unwrap(); - let new_user: Result = users::Model::create_with_password( + let boot = boot_test::().await.unwrap(); + let new_user = users::Model::create_with_password( &boot.app_context.db, &RegisterParams { email: "user1@example.com".to_string(), @@ -140,8 +139,8 @@ async fn cannot_find_user_before_conversation() { name: "framework".to_string(), }, ) - .await; - let new_user = new_user.unwrap(); + .await + .unwrap(); let role = roles::Model::find_by_user(&boot.app_context.db, &new_user).await; assert!(role.is_err()); } diff --git a/examples/demo/tests/models/users.rs b/examples/demo/tests/models/users.rs index d4d935bd2..4200f64a7 100644 --- a/examples/demo/tests/models/users.rs +++ b/examples/demo/tests/models/users.rs @@ -3,7 +3,7 @@ use demo_app::{ models::users::{self, Model, RegisterParams}, }; use insta::assert_debug_snapshot; -use loco_rs::{model::ModelError, testing}; +use loco_rs::{model::ModelError, prelude::*}; use sea_orm::{ActiveModelTrait, ActiveValue, IntoActiveModel}; use serial_test::serial; @@ -21,7 +21,7 @@ macro_rules! configure_insta { async fn test_can_validate_model() { configure_insta!(); - let boot = testing::boot_test::().await.unwrap(); + let boot = boot_test::().await.unwrap(); let res = users::ActiveModel { name: ActiveValue::set("1".to_string()), @@ -39,7 +39,7 @@ async fn test_can_validate_model() { async fn can_create_with_password() { configure_insta!(); - let boot = testing::boot_test::().await.unwrap(); + let boot = boot_test::().await.unwrap(); let params = RegisterParams { email: "test@framework.com".to_string(), @@ -49,7 +49,7 @@ async fn can_create_with_password() { let res = Model::create_with_password(&boot.app_context.db, ¶ms).await; insta::with_settings!({ - filters => testing::cleanup_user_model() + filters => cleanup_user_model() }, { assert_debug_snapshot!(res); }); @@ -60,8 +60,8 @@ async fn can_create_with_password() { async fn handle_create_with_password_with_duplicate() { configure_insta!(); - let boot = testing::boot_test::().await.unwrap(); - testing::seed::(&boot.app_context.db).await.unwrap(); + let boot = boot_test::().await.unwrap(); + seed::(&boot.app_context.db).await.unwrap(); let new_user: Result = Model::create_with_password( &boot.app_context.db, @@ -80,8 +80,8 @@ async fn handle_create_with_password_with_duplicate() { async fn can_find_by_email() { configure_insta!(); - let boot = testing::boot_test::().await.unwrap(); - testing::seed::(&boot.app_context.db).await.unwrap(); + let boot = boot_test::().await.unwrap(); + seed::(&boot.app_context.db).await.unwrap(); let existing_user = Model::find_by_email(&boot.app_context.db, "user1@example.com").await; let non_existing_user_results = @@ -96,8 +96,8 @@ async fn can_find_by_email() { async fn can_find_by_pid() { configure_insta!(); - let boot = testing::boot_test::().await.unwrap(); - testing::seed::(&boot.app_context.db).await.unwrap(); + let boot = boot_test::().await.unwrap(); + seed::(&boot.app_context.db).await.unwrap(); let existing_user = Model::find_by_pid(&boot.app_context.db, "11111111-1111-1111-1111-111111111111").await; @@ -113,8 +113,8 @@ async fn can_find_by_pid() { async fn can_verification_token() { configure_insta!(); - let boot = testing::boot_test::().await.unwrap(); - testing::seed::(&boot.app_context.db).await.unwrap(); + let boot = boot_test::().await.unwrap(); + seed::(&boot.app_context.db).await.unwrap(); let user = Model::find_by_pid(&boot.app_context.db, "11111111-1111-1111-1111-111111111111") .await @@ -142,8 +142,8 @@ async fn can_verification_token() { async fn can_set_forgot_password_sent() { configure_insta!(); - let boot = testing::boot_test::().await.unwrap(); - testing::seed::(&boot.app_context.db).await.unwrap(); + let boot = boot_test::().await.unwrap(); + seed::(&boot.app_context.db).await.unwrap(); let user = Model::find_by_pid(&boot.app_context.db, "11111111-1111-1111-1111-111111111111") .await @@ -171,8 +171,8 @@ async fn can_set_forgot_password_sent() { async fn can_verified() { configure_insta!(); - let boot = testing::boot_test::().await.unwrap(); - testing::seed::(&boot.app_context.db).await.unwrap(); + let boot = boot_test::().await.unwrap(); + seed::(&boot.app_context.db).await.unwrap(); let user = Model::find_by_pid(&boot.app_context.db, "11111111-1111-1111-1111-111111111111") .await @@ -198,8 +198,8 @@ async fn can_verified() { async fn can_reset_password() { configure_insta!(); - let boot = testing::boot_test::().await.unwrap(); - testing::seed::(&boot.app_context.db).await.unwrap(); + let boot = boot_test::().await.unwrap(); + seed::(&boot.app_context.db).await.unwrap(); let user = Model::find_by_pid(&boot.app_context.db, "11111111-1111-1111-1111-111111111111") .await diff --git a/examples/demo/tests/models/users_roles.rs b/examples/demo/tests/models/users_roles.rs index debacd2a8..619464fdb 100644 --- a/examples/demo/tests/models/users_roles.rs +++ b/examples/demo/tests/models/users_roles.rs @@ -2,8 +2,8 @@ use demo_app::{ app::App, models::{roles, sea_orm_active_enums, users, users::RegisterParams, users_roles}, }; -use loco_rs::{prelude::*, testing}; -use sea_orm::{ColumnTrait, DatabaseConnection}; +use loco_rs::prelude::*; +use sea_orm::ColumnTrait; use serial_test::serial; macro_rules! configure_insta { ($($expr:expr),*) => { @@ -19,7 +19,7 @@ macro_rules! configure_insta { async fn can_connect_user_to_user_role() { configure_insta!(); - let boot = testing::boot_test::().await.unwrap(); + let boot = boot_test::().await.unwrap(); let new_user: Result = users::Model::create_with_password( &boot.app_context.db, &RegisterParams { @@ -61,7 +61,7 @@ async fn can_connect_user_to_user_role() { async fn can_connect_user_to_admin_role() { configure_insta!(); - let boot = testing::boot_test::().await.unwrap(); + let boot = boot_test::().await.unwrap(); let new_user: Result = users::Model::create_with_password( &boot.app_context.db, &RegisterParams { diff --git a/examples/demo/tests/requests/auth.rs b/examples/demo/tests/requests/auth.rs index 957fb7632..93bb6bde4 100644 --- a/examples/demo/tests/requests/auth.rs +++ b/examples/demo/tests/requests/auth.rs @@ -1,6 +1,6 @@ use demo_app::{app::App, models::users}; use insta::{assert_debug_snapshot, with_settings}; -use loco_rs::testing; +use loco_rs::prelude::*; use rstest::rstest; use serial_test::serial; @@ -22,7 +22,7 @@ macro_rules! configure_insta { async fn can_register() { configure_insta!(); - testing::request::(|request, ctx| async move { + request::(|request, ctx| async move { let email = "test@loco.com"; let payload = serde_json::json!({ "name": "loco", @@ -34,13 +34,13 @@ async fn can_register() { let saved_user = users::Model::find_by_email(&ctx.db, email).await; with_settings!({ - filters => testing::cleanup_user_model() + filters => cleanup_user_model() }, { assert_debug_snapshot!(saved_user); }); with_settings!({ - filters => testing::cleanup_email() + filters => cleanup_email() }, { assert_debug_snapshot!(ctx.mailer.unwrap().deliveries()); }); @@ -56,7 +56,7 @@ async fn can_register() { async fn can_login_with_verify(#[case] test_name: &str, #[case] password: &str) { configure_insta!(); - testing::request::(|request, ctx| async move { + request::(|request, ctx| async move { let email = "test@loco.com"; let register_payload = serde_json::json!({ "name": "loco", @@ -90,7 +90,7 @@ async fn can_login_with_verify(#[case] test_name: &str, #[case] password: &str) .is_some()); with_settings!({ - filters => testing::cleanup_user_model() + filters => cleanup_user_model() }, { assert_debug_snapshot!(test_name, (response.status_code(), response.text())); }); @@ -103,7 +103,7 @@ async fn can_login_with_verify(#[case] test_name: &str, #[case] password: &str) async fn can_login_without_verify() { configure_insta!(); - testing::request::(|request, _ctx| async move { + request::(|request, _ctx| async move { let email = "test@loco.com"; let password = "12341234"; let register_payload = serde_json::json!({ @@ -125,7 +125,7 @@ async fn can_login_without_verify() { .await; with_settings!({ - filters => testing::cleanup_user_model() + filters => cleanup_user_model() }, { assert_debug_snapshot!((response.status_code(), response.text())); }); @@ -138,7 +138,7 @@ async fn can_login_without_verify() { async fn can_reset_password() { configure_insta!(); - testing::request::(|request, ctx| async move { + request::(|request, ctx| async move { let login_data = prepare_data::init_user_login(&request, &ctx).await; let forgot_payload = serde_json::json!({ @@ -180,7 +180,7 @@ async fn can_reset_password() { assert_eq!(response.status_code(), 200); with_settings!({ - filters => testing::cleanup_email() + filters => cleanup_email() }, { assert_debug_snapshot!(ctx.mailer.unwrap().deliveries()); }); diff --git a/examples/demo/tests/requests/cache.rs b/examples/demo/tests/requests/cache.rs index 6dc67c94b..e431f91d7 100644 --- a/examples/demo/tests/requests/cache.rs +++ b/examples/demo/tests/requests/cache.rs @@ -1,6 +1,6 @@ use demo_app::{app::App, models::users}; use insta::assert_debug_snapshot; -use loco_rs::testing; +use loco_rs::testing::prelude::*; use sea_orm::ModelTrait; use serial_test::serial; @@ -20,7 +20,7 @@ macro_rules! configure_insta { async fn ping() { configure_insta!(); - testing::request::(|request, _ctx| async move { + request::(|request, _ctx| async move { let response = request.get("cache").await; assert_debug_snapshot!("key_not_exists", (response.text(), response.status_code())); let response = request.post("/cache/insert").await; @@ -36,8 +36,8 @@ async fn ping() { async fn can_get_or_insert() { configure_insta!(); - testing::request::(|request, ctx| async move { - testing::seed::(&ctx.db).await.unwrap(); + request::(|request, ctx| async move { + seed::(&ctx.db).await.unwrap(); let response = request.get("/cache/get_or_insert").await; assert_eq!(response.text(), "user1"); diff --git a/examples/demo/tests/requests/mylayer.rs b/examples/demo/tests/requests/mylayer.rs index b78b45326..27b8a5e72 100644 --- a/examples/demo/tests/requests/mylayer.rs +++ b/examples/demo/tests/requests/mylayer.rs @@ -1,5 +1,5 @@ use demo_app::{app::App, views::user::UserResponse}; -use loco_rs::testing; +use loco_rs::testing::prelude::*; use serial_test::serial; use crate::requests::prepare_data; @@ -15,7 +15,7 @@ macro_rules! configure_insta { #[serial] async fn cannot_get_echo_when_no_role_assigned() { configure_insta!(); - testing::request::(|request, ctx| async move { + request::(|request, ctx| async move { let user = prepare_data::init_user_login(&request, &ctx).await; let (auth_key, auth_value) = prepare_data::auth_header(&user.token); let response = request @@ -31,7 +31,7 @@ async fn cannot_get_echo_when_no_role_assigned() { #[serial] async fn can_get_echo_when_admin_role_assigned() { configure_insta!(); - testing::request::(|request, ctx| async move { + request::(|request, ctx| async move { let user = prepare_data::init_user_login(&request, &ctx).await; let (auth_key, auth_value) = prepare_data::auth_header(&user.token); let response = request @@ -54,7 +54,7 @@ async fn can_get_echo_when_admin_role_assigned() { #[serial] async fn can_get_echo_when_user_role_assigned() { configure_insta!(); - testing::request::(|request, ctx| async move { + request::(|request, ctx| async move { let user = prepare_data::init_user_login(&request, &ctx).await; let (auth_key, auth_value) = prepare_data::auth_header(&user.token); let response = request @@ -78,7 +78,7 @@ async fn can_get_echo_when_user_role_assigned() { #[serial] async fn cannot_get_admin_when_no_role() { configure_insta!(); - testing::request::(|request, ctx| async move { + request::(|request, ctx| async move { let user = prepare_data::init_user_login(&request, &ctx).await; let (auth_key, auth_value) = prepare_data::auth_header(&user.token); let response = request @@ -94,7 +94,7 @@ async fn cannot_get_admin_when_no_role() { #[serial] async fn cannot_get_admin_when_user_role_assigned() { configure_insta!(); - testing::request::(|request, ctx| async move { + request::(|request, ctx| async move { let user = prepare_data::init_user_login(&request, &ctx).await; let (auth_key, auth_value) = prepare_data::auth_header(&user.token); let response = request @@ -118,7 +118,7 @@ async fn cannot_get_admin_when_user_role_assigned() { #[serial] async fn can_get_admin_when_admin_role_assigned() { configure_insta!(); - testing::request::(|request, ctx| async move { + request::(|request, ctx| async move { let user = prepare_data::init_user_login(&request, &ctx).await; let (auth_key, auth_value) = prepare_data::auth_header(&user.token); let response = request @@ -142,7 +142,7 @@ async fn can_get_admin_when_admin_role_assigned() { #[serial] async fn cannot_get_user_when_no_role() { configure_insta!(); - testing::request::(|request, ctx| async move { + request::(|request, ctx| async move { let user = prepare_data::init_user_login(&request, &ctx).await; let (auth_key, auth_value) = prepare_data::auth_header(&user.token); let response = request @@ -158,7 +158,7 @@ async fn cannot_get_user_when_no_role() { #[serial] async fn can_get_user_when_user_role_assigned() { configure_insta!(); - testing::request::(|request, ctx| async move { + request::(|request, ctx| async move { let user = prepare_data::init_user_login(&request, &ctx).await; let (auth_key, auth_value) = prepare_data::auth_header(&user.token); let response = request diff --git a/examples/demo/tests/requests/notes.rs b/examples/demo/tests/requests/notes.rs index ec7ec2f83..5edd5d9a0 100644 --- a/examples/demo/tests/requests/notes.rs +++ b/examples/demo/tests/requests/notes.rs @@ -1,6 +1,6 @@ use demo_app::{app::App, models::_entities::notes::Entity}; use insta::{assert_debug_snapshot, with_settings}; -use loco_rs::testing; +use loco_rs::testing::prelude::*; use rstest::rstest; use sea_orm::entity::prelude::*; use serial_test::serial; @@ -26,14 +26,14 @@ macro_rules! configure_insta { async fn can_get_notes(#[case] test_name: &str, #[case] params: serde_json::Value) { configure_insta!(); - testing::request::(|request, ctx| async move { - testing::seed::(&ctx.db).await.unwrap(); + request::(|request, ctx| async move { + seed::(&ctx.db).await.unwrap(); let notes = request.get("notes").add_query_params(params).await; with_settings!({ filters => { - let mut combined_filters = testing::get_cleanup_date().clone(); + let mut combined_filters = get_cleanup_date().clone(); combined_filters.extend(vec![(r#"\"id\\":\d+"#, r#""id\":ID"#)]); combined_filters } @@ -51,7 +51,7 @@ async fn can_get_notes(#[case] test_name: &str, #[case] params: serde_json::Valu async fn can_add_note() { configure_insta!(); - testing::request::(|request, _ctx| async move { + request::(|request, _ctx| async move { let payload = serde_json::json!({ "title": "loco", "content": "loco note test", @@ -61,7 +61,7 @@ async fn can_add_note() { with_settings!({ filters => { - let mut combined_filters = testing::get_cleanup_date().clone(); + let mut combined_filters = get_cleanup_date().clone(); combined_filters.extend(vec![(r#"\"id\\":\d+"#, r#""id\":ID"#)]); combined_filters } @@ -79,14 +79,14 @@ async fn can_add_note() { async fn can_get_note() { configure_insta!(); - testing::request::(|request, ctx| async move { - testing::seed::(&ctx.db).await.unwrap(); + request::(|request, ctx| async move { + seed::(&ctx.db).await.unwrap(); let add_note_request = request.get("/notes/1").await; with_settings!({ filters => { - let mut combined_filters = testing::get_cleanup_date().clone(); + let mut combined_filters = get_cleanup_date().clone(); combined_filters.extend(vec![(r#"\"id\\":\d+"#, r#""id\":ID"#)]); combined_filters } @@ -104,15 +104,15 @@ async fn can_get_note() { async fn can_delete_note() { configure_insta!(); - testing::request::(|request, ctx| async move { - testing::seed::(&ctx.db).await.unwrap(); + request::(|request, ctx| async move { + seed::(&ctx.db).await.unwrap(); let count_before_delete = Entity::find().all(&ctx.db).await.unwrap().len(); let delete_note_request = request.delete("/notes/1").await; with_settings!({ filters => { - let mut combined_filters = testing::get_cleanup_date().clone(); + let mut combined_filters = get_cleanup_date().clone(); combined_filters.extend(vec![(r#"\"id\\":\d+"#, r#""id\":ID"#)]); combined_filters } diff --git a/examples/demo/tests/requests/ping.rs b/examples/demo/tests/requests/ping.rs index 1303301de..84bfae8f6 100644 --- a/examples/demo/tests/requests/ping.rs +++ b/examples/demo/tests/requests/ping.rs @@ -1,6 +1,6 @@ use demo_app::app::App; use insta::assert_debug_snapshot; -use loco_rs::testing; +use loco_rs::testing::prelude::*; use rstest::rstest; // TODO: see how to dedup / extract this to app-local test utils @@ -22,7 +22,7 @@ macro_rules! configure_insta { async fn ping(#[case] test_name: &str, #[case] path: &str) { configure_insta!(); - testing::request::(|request, _ctx| async move { + request::(|request, _ctx| async move { let response = request.get(path).await; assert_debug_snapshot!(test_name, (response.text(), response.status_code())); diff --git a/examples/demo/tests/requests/responses.rs b/examples/demo/tests/requests/responses.rs index e68b84c60..05ae12f61 100644 --- a/examples/demo/tests/requests/responses.rs +++ b/examples/demo/tests/requests/responses.rs @@ -1,7 +1,7 @@ use axum::http::HeaderMap; use demo_app::app::App; use insta::assert_debug_snapshot; -use loco_rs::testing; +use loco_rs::testing::prelude::*; use rstest::rstest; use serial_test::serial; // TODO: see how to dedup / extract this to app-local test utils @@ -29,7 +29,7 @@ macro_rules! configure_insta { #[serial] async fn can_return_different_responses(#[case] uri: &str) { configure_insta!(); - testing::request::(|request, _ctx| async move { + request::(|request, _ctx| async move { let response = request.get(uri).await; let mut headers = HeaderMap::new(); diff --git a/examples/demo/tests/requests/upload.rs b/examples/demo/tests/requests/upload.rs index 0c2a46667..a5c1fa2e8 100644 --- a/examples/demo/tests/requests/upload.rs +++ b/examples/demo/tests/requests/upload.rs @@ -1,12 +1,12 @@ use axum_test::multipart::{MultipartForm, Part}; use demo_app::{app::App, views}; -use loco_rs::testing; +use loco_rs::testing::prelude::*; use serial_test::serial; #[tokio::test] #[serial] async fn can_upload_file() { - testing::request::(|request, ctx| async move { + request::(|request, ctx| async move { let file_content = "loco file upload"; let file_part = Part::bytes(file_content.as_bytes()).file_name("loco.txt"); diff --git a/examples/demo/tests/requests/user.rs b/examples/demo/tests/requests/user.rs index a6112a718..4ca15477c 100644 --- a/examples/demo/tests/requests/user.rs +++ b/examples/demo/tests/requests/user.rs @@ -1,6 +1,6 @@ use demo_app::app::App; use insta::{assert_debug_snapshot, with_settings}; -use loco_rs::testing; +use loco_rs::testing::prelude::*; use serial_test::serial; use super::prepare_data; @@ -21,7 +21,7 @@ macro_rules! configure_insta { async fn can_get_current_user() { configure_insta!(); - testing::request::(|request, ctx| async move { + request::(|request, ctx| async move { let user = prepare_data::init_user_login(&request, &ctx).await; let (auth_key, auth_value) = prepare_data::auth_header(&user.token); @@ -31,7 +31,7 @@ async fn can_get_current_user() { .await; with_settings!({ - filters => testing::cleanup_user_model() + filters => cleanup_user_model() }, { assert_debug_snapshot!((response.status_code(), response.text())); }); @@ -44,7 +44,7 @@ async fn can_get_current_user() { async fn can_get_current_user_with_api_key() { configure_insta!(); - testing::request::(|request, ctx| async move { + request::(|request, ctx| async move { let user_data = prepare_data::init_user_login(&request, &ctx).await; let (auth_key, auth_value) = prepare_data::auth_header(&user_data.user.api_key); @@ -54,7 +54,7 @@ async fn can_get_current_user_with_api_key() { .await; with_settings!({ - filters => testing::cleanup_user_model() + filters => cleanup_user_model() }, { assert_debug_snapshot!((response.status_code(), response.text())); }); @@ -67,7 +67,7 @@ async fn can_get_current_user_with_api_key() { async fn can_convert_user_to_user_role() { configure_insta!(); - testing::request::(|request, ctx| async move { + request::(|request, ctx| async move { let user = prepare_data::init_user_login(&request, &ctx).await; let (auth_key, auth_value) = prepare_data::auth_header(&user.token); @@ -77,7 +77,7 @@ async fn can_convert_user_to_user_role() { .await; with_settings!({ - filters => testing::cleanup_user_model() + filters => cleanup_user_model() }, { assert_debug_snapshot!((response.status_code(), response.text())); }); @@ -90,7 +90,7 @@ async fn can_convert_user_to_user_role() { async fn can_convert_user_to_admin_role() { configure_insta!(); - testing::request::(|request, ctx| async move { + request::(|request, ctx| async move { let user = prepare_data::init_user_login(&request, &ctx).await; let (auth_key, auth_value) = prepare_data::auth_header(&user.token); @@ -100,7 +100,7 @@ async fn can_convert_user_to_admin_role() { .await; with_settings!({ - filters => testing::cleanup_user_model() + filters => cleanup_user_model() }, { assert_debug_snapshot!((response.status_code(), response.text())); }); diff --git a/examples/demo/tests/requests/view_engine.rs b/examples/demo/tests/requests/view_engine.rs index 4f7422556..b8980e0e0 100644 --- a/examples/demo/tests/requests/view_engine.rs +++ b/examples/demo/tests/requests/view_engine.rs @@ -1,6 +1,6 @@ use demo_app::app::App; use insta::assert_debug_snapshot; -use loco_rs::testing; +use loco_rs::testing::prelude::*; use rstest::rstest; use serial_test::serial; // TODO: see how to dedup / extract this to app-local test utils @@ -22,7 +22,7 @@ macro_rules! configure_insta { #[serial] async fn can_get_view_engine(#[case] uri: &str) { configure_insta!(); - testing::request::(|request, _ctx| async move { + request::(|request, _ctx| async move { let response = request.get(&format!("/view-engine/{uri}")).await; assert_debug_snapshot!( diff --git a/examples/demo/tests/tasks/foo.rs b/examples/demo/tests/tasks/foo.rs index e45bbc085..4d19e4397 100644 --- a/examples/demo/tests/tasks/foo.rs +++ b/examples/demo/tests/tasks/foo.rs @@ -1,11 +1,11 @@ use demo_app::app::App; -use loco_rs::{boot::run_task, task, testing}; +use loco_rs::{boot::run_task, task, testing::prelude::*}; use serial_test::serial; #[tokio::test] #[serial] async fn test_can_run_foo_task() { - let boot = testing::boot_test::().await.unwrap(); + let boot = boot_test::().await.unwrap(); assert!(run_task::( &boot.app_context, diff --git a/examples/demo/tests/tasks/seed.rs b/examples/demo/tests/tasks/seed.rs index 2657bf2a4..f1666d08a 100644 --- a/examples/demo/tests/tasks/seed.rs +++ b/examples/demo/tests/tasks/seed.rs @@ -1,11 +1,11 @@ use demo_app::app::App; -use loco_rs::{boot::run_task, task, testing}; +use loco_rs::{boot::run_task, task, testing::prelude::*}; use serial_test::serial; #[tokio::test] #[serial] async fn test_can_seed_data() { - let boot = testing::boot_test::().await.unwrap(); + let boot = boot_test::().await.unwrap(); assert!(run_task::( &boot.app_context, diff --git a/examples/llm-candle-inference/Cargo.toml b/examples/llm-candle-inference/Cargo.toml index 1135718ec..caef5b977 100644 --- a/examples/llm-candle-inference/Cargo.toml +++ b/examples/llm-candle-inference/Cargo.toml @@ -17,7 +17,7 @@ serde_json = "1" tokio = { version = "1.33.0", default-features = false } async-trait = "0.1.74" tracing = "0.1.40" -validator = { version = "0.18.1" } +validator = { version = "0.19.0" } axum = "0.7.5" include_dir = "0.7.3" diff --git a/loco-gen/src/lib.rs b/loco-gen/src/lib.rs index 2f5f11daa..90ed56a93 100644 --- a/loco-gen/src/lib.rs +++ b/loco-gen/src/lib.rs @@ -15,30 +15,27 @@ mod scaffold; mod testutil; use std::{str::FromStr, sync::OnceLock}; -const CONTROLLER_T: &str = include_str!("templates/controller.t"); -const CONTROLLER_TEST_T: &str = include_str!("templates/request_test.t"); +const MAILER_T: &str = include_str!("templates/mailer/mailer.t"); +const MAILER_SUB_T: &str = include_str!("templates/mailer/subject.t"); +const MAILER_TEXT_T: &str = include_str!("templates/mailer/text.t"); +const MAILER_HTML_T: &str = include_str!("templates/mailer/html.t"); -const MAILER_T: &str = include_str!("templates/mailer.t"); -const MAILER_SUB_T: &str = include_str!("templates/mailer_sub.t"); -const MAILER_TEXT_T: &str = include_str!("templates/mailer_text.t"); -const MAILER_HTML_T: &str = include_str!("templates/mailer_html.t"); +const MIGRATION_T: &str = include_str!("templates/migration/migration.t"); -const MIGRATION_T: &str = include_str!("templates/migration.t"); +const TASK_T: &str = include_str!("templates/task/task.t"); +const TASK_TEST_T: &str = include_str!("templates/task/test.t"); -const TASK_T: &str = include_str!("templates/task.t"); -const TASK_TEST_T: &str = include_str!("templates/task_test.t"); +const SCHEDULER_T: &str = include_str!("templates/scheduler/scheduler.t"); -const SCHEDULER_T: &str = include_str!("templates/scheduler.t"); - -const WORKER_T: &str = include_str!("templates/worker.t"); -const WORKER_TEST_T: &str = include_str!("templates/worker_test.t"); +const WORKER_T: &str = include_str!("templates/worker/worker.t"); +const WORKER_TEST_T: &str = include_str!("templates/worker/test.t"); // Deployment templates -const DEPLOYMENT_DOCKER_T: &str = include_str!("templates/deployment_docker.t"); -const DEPLOYMENT_DOCKER_IGNORE_T: &str = include_str!("templates/deployment_docker_ignore.t"); -const DEPLOYMENT_SHUTTLE_T: &str = include_str!("templates/deployment_shuttle.t"); -const DEPLOYMENT_SHUTTLE_CONFIG_T: &str = include_str!("templates/deployment_shuttle_config.t"); -const DEPLOYMENT_NGINX_T: &str = include_str!("templates/deployment_nginx.t"); +const DEPLOYMENT_DOCKER_T: &str = include_str!("templates/deployment/docker/docker.t"); +const DEPLOYMENT_DOCKER_IGNORE_T: &str = include_str!("templates/deployment/docker/ignore.t"); +const DEPLOYMENT_SHUTTLE_T: &str = include_str!("templates/deployment/shuttle/shuttle.t"); +const DEPLOYMENT_SHUTTLE_CONFIG_T: &str = include_str!("templates/deployment/shuttle/config.t"); +const DEPLOYMENT_NGINX_T: &str = include_str!("templates/deployment/nginx/nginx.t"); const DEPLOYMENT_SHUTTLE_RUNTIME_VERSION: &str = "0.46.0"; diff --git a/loco-gen/src/model.rs b/loco-gen/src/model.rs index 0e1a74041..928f280be 100644 --- a/loco-gen/src/model.rs +++ b/loco-gen/src/model.rs @@ -8,8 +8,8 @@ use serde_json::json; use super::{Error, Result}; use crate::get_mappings; -const MODEL_T: &str = include_str!("templates/model.t"); -const MODEL_TEST_T: &str = include_str!("templates/model_test.t"); +const MODEL_T: &str = include_str!("templates/model/model.t"); +const MODEL_TEST_T: &str = include_str!("templates/model/test.t"); use super::{collect_messages, AppInfo}; diff --git a/loco-gen/src/templates/controller.t b/loco-gen/src/templates/controller.t deleted file mode 100644 index c1b4c202d..000000000 --- a/loco-gen/src/templates/controller.t +++ /dev/null @@ -1,35 +0,0 @@ -{% set file_name = name | snake_case -%} -{% set module_name = file_name | pascal_case -%} -to: src/controllers/{{ file_name }}.rs -skip_exists: true -message: "Controller `{{module_name}}` was added successfully." -injections: -- into: src/controllers/mod.rs - append: true - content: "pub mod {{ file_name }};" -- into: src/app.rs - after: "AppRoutes::" - content: " .add_route(controllers::{{ file_name }}::routes())" ---- -#![allow(clippy::unused_async)] -use loco_rs::prelude::*; -use axum::debug_handler; - - -#[debug_handler] -pub async fn echo(req_body: String) -> String { - req_body -} - -#[debug_handler] -pub async fn hello(State(_ctx): State) -> Result { - // do something with context (database, etc) - format::text("hello") -} - -pub fn routes() -> Routes { - Routes::new() - .prefix("{{ name | snake_case }}") - .add("/", get(hello)) - .add("/echo", post(echo)) -} diff --git a/loco-gen/src/templates/controller/api/test.t b/loco-gen/src/templates/controller/api/test.t index 006ccba28..9611bfb79 100644 --- a/loco-gen/src/templates/controller/api/test.t +++ b/loco-gen/src/templates/controller/api/test.t @@ -9,13 +9,13 @@ injections: content: "pub mod {{ file_name }};" --- use {{pkg_name}}::app::App; -use loco_rs::testing; +use loco_rs::testing::prelude::*; use serial_test::serial; #[tokio::test] #[serial] async fn can_get_{{ name | plural | snake_case }}() { - testing::request::(|request, _ctx| async move { + request::(|request, _ctx| async move { let res = request.get("/api/{{ name | plural | snake_case }}/").await; assert_eq!(res.status_code(), 200); @@ -29,7 +29,7 @@ async fn can_get_{{ name | plural | snake_case }}() { #[tokio::test] #[serial] async fn can_get_{{action}}() { - testing::request::(|request, _ctx| async move { + request::(|request, _ctx| async move { let res = request.get("/{{ name | plural | snake_case }}/{{action}}").await; assert_eq!(res.status_code(), 200); }) diff --git a/loco-gen/src/templates/deployment_docker.t b/loco-gen/src/templates/deployment/docker/docker.t similarity index 100% rename from loco-gen/src/templates/deployment_docker.t rename to loco-gen/src/templates/deployment/docker/docker.t diff --git a/loco-gen/src/templates/deployment_docker_ignore.t b/loco-gen/src/templates/deployment/docker/ignore.t similarity index 100% rename from loco-gen/src/templates/deployment_docker_ignore.t rename to loco-gen/src/templates/deployment/docker/ignore.t diff --git a/loco-gen/src/templates/deployment_nginx.t b/loco-gen/src/templates/deployment/nginx/nginx.t similarity index 100% rename from loco-gen/src/templates/deployment_nginx.t rename to loco-gen/src/templates/deployment/nginx/nginx.t diff --git a/loco-gen/src/templates/deployment_shuttle_config.t b/loco-gen/src/templates/deployment/shuttle/config.t similarity index 100% rename from loco-gen/src/templates/deployment_shuttle_config.t rename to loco-gen/src/templates/deployment/shuttle/config.t diff --git a/loco-gen/src/templates/deployment_shuttle.t b/loco-gen/src/templates/deployment/shuttle/shuttle.t similarity index 100% rename from loco-gen/src/templates/deployment_shuttle.t rename to loco-gen/src/templates/deployment/shuttle/shuttle.t diff --git a/loco-gen/src/templates/mailer_html.t b/loco-gen/src/templates/mailer/html.t similarity index 100% rename from loco-gen/src/templates/mailer_html.t rename to loco-gen/src/templates/mailer/html.t diff --git a/loco-gen/src/templates/mailer.t b/loco-gen/src/templates/mailer/mailer.t similarity index 100% rename from loco-gen/src/templates/mailer.t rename to loco-gen/src/templates/mailer/mailer.t diff --git a/loco-gen/src/templates/mailer_sub.t b/loco-gen/src/templates/mailer/subject.t similarity index 100% rename from loco-gen/src/templates/mailer_sub.t rename to loco-gen/src/templates/mailer/subject.t diff --git a/loco-gen/src/templates/mailer_text.t b/loco-gen/src/templates/mailer/text.t similarity index 100% rename from loco-gen/src/templates/mailer_text.t rename to loco-gen/src/templates/mailer/text.t diff --git a/loco-gen/src/templates/migration.t b/loco-gen/src/templates/migration/migration.t similarity index 100% rename from loco-gen/src/templates/migration.t rename to loco-gen/src/templates/migration/migration.t diff --git a/loco-gen/src/templates/model.t b/loco-gen/src/templates/model/model.t similarity index 100% rename from loco-gen/src/templates/model.t rename to loco-gen/src/templates/model/model.t diff --git a/loco-gen/src/templates/model_test.t b/loco-gen/src/templates/model/test.t similarity index 86% rename from loco-gen/src/templates/model_test.t rename to loco-gen/src/templates/model/test.t index cde199460..cd7268999 100644 --- a/loco-gen/src/templates/model_test.t +++ b/loco-gen/src/templates/model/test.t @@ -9,7 +9,7 @@ injections: content: "mod {{plural_snake}};" --- use {{pkg_name}}::app::App; -use loco_rs::testing; +use loco_rs::testing::prelude::*; use serial_test::serial; macro_rules! configure_insta { @@ -25,8 +25,8 @@ macro_rules! configure_insta { async fn test_model() { configure_insta!(); - let boot = testing::boot_test::().await.unwrap(); - testing::seed::(&boot.app_context.db).await.unwrap(); + let boot = boot_test::().await.unwrap(); + seed::(&boot.app_context.db).await.unwrap(); // query your model, e.g.: // diff --git a/loco-gen/src/templates/request_test.t b/loco-gen/src/templates/request_test.t deleted file mode 100644 index 43302faf3..000000000 --- a/loco-gen/src/templates/request_test.t +++ /dev/null @@ -1,39 +0,0 @@ -{% set file_name = name | snake_case -%} -{% set module_name = file_name | pascal_case -%} -to: tests/requests/{{ file_name }}.rs -skip_exists: true -message: "Tests for controller `{{module_name}}` was added successfully. Run `cargo test`." -injections: -- into: tests/requests/mod.rs - append: true - content: "pub mod {{ file_name }};" ---- -use {{pkg_name}}::app::App; -use loco_rs::testing; -use serial_test::serial; - -#[tokio::test] -#[serial] -async fn can_get_echo() { - testing::request::(|request, _ctx| async move { - let payload = serde_json::json!({ - "foo": "bar", - }); - - let res = request.post("/{{ name | snake_case }}/echo").json(&payload).await; - assert_eq!(res.status_code(), 200); - assert_eq!(res.text(), serde_json::to_string(&payload).unwrap()); - }) - .await; -} - -#[tokio::test] -#[serial] -async fn can_request_root() { - testing::request::(|request, _ctx| async move { - let res = request.get("/{{ name | snake_case }}").await; - assert_eq!(res.status_code(), 200); - assert_eq!(res.text(), "hello"); - }) - .await; -} diff --git a/loco-gen/src/templates/scaffold/api/test.t b/loco-gen/src/templates/scaffold/api/test.t index a0c672c3f..f20cfa0c6 100644 --- a/loco-gen/src/templates/scaffold/api/test.t +++ b/loco-gen/src/templates/scaffold/api/test.t @@ -9,13 +9,13 @@ injections: content: "pub mod {{ file_name }};" --- use {{pkg_name}}::app::App; -use loco_rs::testing; +use loco_rs::testing::prelude::*; use serial_test::serial; #[tokio::test] #[serial] async fn can_get_{{ name | plural | snake_case }}() { - testing::request::(|request, _ctx| async move { + request::(|request, _ctx| async move { let res = request.get("/api/{{ name | plural | snake_case }}/").await; assert_eq!(res.status_code(), 200); diff --git a/loco-gen/src/templates/scheduler.t b/loco-gen/src/templates/scheduler/scheduler.t similarity index 100% rename from loco-gen/src/templates/scheduler.t rename to loco-gen/src/templates/scheduler/scheduler.t diff --git a/loco-gen/src/templates/task.t b/loco-gen/src/templates/task/task.t similarity index 100% rename from loco-gen/src/templates/task.t rename to loco-gen/src/templates/task/task.t diff --git a/loco-gen/src/templates/task_test.t b/loco-gen/src/templates/task/test.t similarity index 87% rename from loco-gen/src/templates/task_test.t rename to loco-gen/src/templates/task/test.t index 69eee2d05..096a435ee 100644 --- a/loco-gen/src/templates/task_test.t +++ b/loco-gen/src/templates/task/test.t @@ -9,7 +9,7 @@ injections: content: "pub mod {{ file_name }};" --- use {{pkg_name}}::app::App; -use loco_rs::{task, testing}; +use loco_rs::{task, testing::prelude::*}; use loco_rs::boot::run_task; use serial_test::serial; @@ -17,7 +17,7 @@ use serial_test::serial; #[tokio::test] #[serial] async fn test_can_run_{{name | snake_case}}() { - let boot = testing::boot_test::().await.unwrap(); + let boot = boot_test::().await.unwrap(); assert!( run_task::(&boot.app_context, Some(&"{{name}}".to_string()), &task::Vars::default()) diff --git a/loco-gen/src/templates/worker_test.t b/loco-gen/src/templates/worker/test.t similarity index 89% rename from loco-gen/src/templates/worker_test.t rename to loco-gen/src/templates/worker/test.t index d302ae079..8fc46954c 100644 --- a/loco-gen/src/templates/worker_test.t +++ b/loco-gen/src/templates/worker/test.t @@ -9,8 +9,7 @@ injections: content: "pub mod {{ name | snake_case }};" --- use {{pkg_name}}::app::App; -use loco_rs::prelude::*; -use loco_rs::testing; +use loco_rs::{bgworker::BackgroundWorker, testing::prelude::*}; use {{pkg_name}}::workers::{{module_name}}::{{struct_name}}Worker; use {{pkg_name}}::workers::{{module_name}}::{{struct_name}}WorkerArgs; @@ -20,7 +19,7 @@ use serial_test::serial; #[tokio::test] #[serial] async fn test_run_{{module_name}}_worker() { - let boot = testing::boot_test::().await.unwrap(); + let boot = boot_test::().await.unwrap(); // Execute the worker ensuring that it operates in 'ForegroundBlocking' mode, which prevents the addition of your worker to the background assert!( diff --git a/loco-gen/src/templates/worker.t b/loco-gen/src/templates/worker/worker.t similarity index 100% rename from loco-gen/src/templates/worker.t rename to loco-gen/src/templates/worker/worker.t diff --git a/loco-new/Cargo.toml b/loco-new/Cargo.toml index f4729930a..73b269a2a 100644 --- a/loco-new/Cargo.toml +++ b/loco-new/Cargo.toml @@ -8,6 +8,7 @@ description = "Loco new app generator" license = "Apache-2.0" homepage = "https://docs.rs/loco" documentation = "https://docs.rs/loco" +readme = "README.md" authors = ["Dotan Nahum ", "Elad Kaplan "] include = ["src/**", "base_template/**", "Cargo.toml", "setup.rhai"] diff --git a/loco-new/README.md b/loco-new/README.md new file mode 100644 index 000000000..448516861 --- /dev/null +++ b/loco-new/README.md @@ -0,0 +1,37 @@ +# Loco CLI + +Loco is a powerful framework designed to streamline the development of modern web applications with a focus on ease of use and flexibility. Whether you're building a SaaS app, a REST API, or a minimal service, Loco provides the tools you need to get started quickly and scale as your application grows. With built-in configuration for popular databases, background workers, and asset serving options, Loco gives you the power to customize your project to fit your needs. + +## Templates Versatile Options +Loco empowers you to tailor your project to fit a variety of needs. Here are some of the versatile options it offers: + +### Application Types +* **SaaS Applications:** Create platforms with features like user authentication, database integration, and scalable background processing. +* **REST APIs:** Build robust APIs with database support, authentication, and modular controllers. +* **Lightweight** Services: Focus on simplicity with minimal setups that include only essential controllers and views. + +#### Advanced Customization +Loco is designed to offer advanced customization to meet the unique needs of your project. Whether you need a simple app that can evolve over time or a complex application that requires a specific configuration, Loco provides the flexibility to fine-tune your setup. + + +## Getting Started + +To install the Loco CLI on your machine, simply run the following command in your terminal: +```sh +cargo install loco +``` +This will install the latest version of Loco globally, making it accessible from anywhere in your terminal. + +## Create a New Project: +Once installed, you can create a new Loco project by running the following command: +```sh +loco new +``` +This will initiate a wizard that will guide you through the process of setting up your project. + +## Upgrade +The Loco CLI is bundled with the Loco framework create version. To ensure you're using the latest version of Loco and to get the most up-to-date templates, simply run the following command: +```sh +cargo install loco +``` +This will update the Loco CLI to the latest version, replacing the existing loco binary with the newest release. After upgrading, any new templates you generate will reflect the latest features and improvements. \ No newline at end of file diff --git a/loco-new/base_template/Cargo.toml.t b/loco-new/base_template/Cargo.toml.t index 94358209f..b54aae8d7 100644 --- a/loco-new/base_template/Cargo.toml.t +++ b/loco-new/base_template/Cargo.toml.t @@ -38,7 +38,7 @@ sea-orm = { version = "1.1.0", features = [ "macros", ] } chrono = "0.4" -validator = { version = "0.18" } +validator = { version = "0.19" } uuid = { version = "1.6.0", features = ["v4"] } {%- endif %} diff --git a/loco-new/base_template/config/development.yaml.t b/loco-new/base_template/config/development.yaml.t index cd1bf250d..682613ad8 100644 --- a/loco-new/base_template/config/development.yaml.t +++ b/loco-new/base_template/config/development.yaml.t @@ -7,7 +7,7 @@ logger: # Enable pretty backtrace (sets RUST_BACKTRACE=1) pretty_backtrace: true # Log level, options: trace, debug, info, warn or error. - level: debug + level: {{ get_env(name="LOG_LEVEL", default="debug") }} # Define the logging format. options: compact, pretty or json format: compact # By default the logger has filtering only logs that came from your code or logs that came from `loco` framework. to see all third party libraries @@ -17,13 +17,15 @@ logger: # Web server configuration server: # Port on which the server will listen. the server binding is 0.0.0.0:{PORT} - port: 5150 + port: {{ get_env(name="PORT", default="5150") }} + # Binding for the server (which interface to bind to) + binding: {{ get_env(name="BINDING", default="localhost") }} # The UI hostname or IP address that mailers will point to. host: http://localhost # Out of the box middleware configuration. to disable middleware you can changed the `enable` field to `false` of comment the middleware block middlewares: - {%- if settings.asset %} - {%- if settings.asset.kind == "server" %} + {%- if settings.asset %} + {%- if settings.asset.kind == "server" %} static: enable: true must_exist: true @@ -32,7 +34,7 @@ server: uri: "/static" path: "assets/static" fallback: "assets/static/404.html" - {%- elif settings.asset.kind == "client" %} + {%- elif settings.asset.kind == "client" %} static: enable: true must_exist: true @@ -42,9 +44,9 @@ server: path: "frontend/dist" fallback: "frontend/dist/index.html" {%- endif -%} - + {%- endif -%} - + {%- if settings.background%} # Worker Configuration @@ -83,6 +85,8 @@ mailer: # auth: # user: # password: + # Override the SMTP hello name (default is the machine's hostname) + # hello_name: {%- endif %} # Initializers Configuration @@ -99,7 +103,7 @@ database: # Database connection URI uri: {% raw %}{{{% endraw %} get_env(name="DATABASE_URL", default="{{settings.db.endpoint | replace(from='NAME', to=settings.package_name) | replace(from='ENV', to='development')}}") {% raw %}}}{% endraw %} # When enabled, the sql query will be logged. - enable_logging: false + enable_logging: {{ get_env(name="DB_LOGGING", default="false") }} # Set the timeout duration when acquiring a connection. connect_timeout: {% raw %}{{{% endraw %} get_env(name="DB_CONNECT_TIMEOUT", default="500") {% raw %}}}{% endraw %} # Set the idle duration before closing a connection. diff --git a/loco-new/base_template/tests/models/users.rs.t b/loco-new/base_template/tests/models/users.rs.t index e569a1c4c..7ace87eca 100644 --- a/loco-new/base_template/tests/models/users.rs.t +++ b/loco-new/base_template/tests/models/users.rs.t @@ -1,5 +1,5 @@ use insta::assert_debug_snapshot; -use loco_rs::{model::ModelError, testing}; +use loco_rs::{model::ModelError, testing::prelude::*}; use {{settings.module_name}}::{ app::App, models::users::{self, Model, RegisterParams}, @@ -21,7 +21,7 @@ macro_rules! configure_insta { async fn test_can_validate_model() { configure_insta!(); - let boot = testing::boot_test::().await.unwrap(); + let boot = boot_test::().await.unwrap(); let res = users::ActiveModel { name: ActiveValue::set("1".to_string()), @@ -39,7 +39,7 @@ async fn test_can_validate_model() { async fn can_create_with_password() { configure_insta!(); - let boot = testing::boot_test::().await.unwrap(); + let boot = boot_test::().await.unwrap(); let params = RegisterParams { email: "test@framework.com".to_string(), @@ -49,7 +49,7 @@ async fn can_create_with_password() { let res = Model::create_with_password(&boot.app_context.db, ¶ms).await; insta::with_settings!({ - filters => testing::cleanup_user_model() + filters => cleanup_user_model() }, { assert_debug_snapshot!(res); }); @@ -60,8 +60,8 @@ async fn can_create_with_password() { async fn handle_create_with_password_with_duplicate() { configure_insta!(); - let boot = testing::boot_test::().await.unwrap(); - testing::seed::(&boot.app_context.db).await.unwrap(); + let boot = boot_test::().await.unwrap(); + seed::(&boot.app_context.db).await.unwrap(); let new_user: Result = Model::create_with_password( &boot.app_context.db, @@ -80,8 +80,8 @@ async fn handle_create_with_password_with_duplicate() { async fn can_find_by_email() { configure_insta!(); - let boot = testing::boot_test::().await.unwrap(); - testing::seed::(&boot.app_context.db).await.unwrap(); + let boot = boot_test::().await.unwrap(); + seed::(&boot.app_context.db).await.unwrap(); let existing_user = Model::find_by_email(&boot.app_context.db, "user1@example.com").await; let non_existing_user_results = @@ -96,8 +96,8 @@ async fn can_find_by_email() { async fn can_find_by_pid() { configure_insta!(); - let boot = testing::boot_test::().await.unwrap(); - testing::seed::(&boot.app_context.db).await.unwrap(); + let boot = boot_test::().await.unwrap(); + seed::(&boot.app_context.db).await.unwrap(); let existing_user = Model::find_by_pid(&boot.app_context.db, "11111111-1111-1111-1111-111111111111").await; @@ -113,8 +113,8 @@ async fn can_find_by_pid() { async fn can_verification_token() { configure_insta!(); - let boot = testing::boot_test::().await.unwrap(); - testing::seed::(&boot.app_context.db).await.unwrap(); + let boot = boot_test::().await.unwrap(); + seed::(&boot.app_context.db).await.unwrap(); let user = Model::find_by_pid(&boot.app_context.db, "11111111-1111-1111-1111-111111111111") .await @@ -142,8 +142,8 @@ async fn can_verification_token() { async fn can_set_forgot_password_sent() { configure_insta!(); - let boot = testing::boot_test::().await.unwrap(); - testing::seed::(&boot.app_context.db).await.unwrap(); + let boot = boot_test::().await.unwrap(); + seed::(&boot.app_context.db).await.unwrap(); let user = Model::find_by_pid(&boot.app_context.db, "11111111-1111-1111-1111-111111111111") .await @@ -171,8 +171,8 @@ async fn can_set_forgot_password_sent() { async fn can_verified() { configure_insta!(); - let boot = testing::boot_test::().await.unwrap(); - testing::seed::(&boot.app_context.db).await.unwrap(); + let boot = boot_test::().await.unwrap(); + seed::(&boot.app_context.db).await.unwrap(); let user = Model::find_by_pid(&boot.app_context.db, "11111111-1111-1111-1111-111111111111") .await @@ -198,8 +198,8 @@ async fn can_verified() { async fn can_reset_password() { configure_insta!(); - let boot = testing::boot_test::().await.unwrap(); - testing::seed::(&boot.app_context.db).await.unwrap(); + let boot = boot_test::().await.unwrap(); + seed::(&boot.app_context.db).await.unwrap(); let user = Model::find_by_pid(&boot.app_context.db, "11111111-1111-1111-1111-111111111111") .await diff --git a/loco-new/base_template/tests/requests/auth.rs.t b/loco-new/base_template/tests/requests/auth.rs.t index fbc2d2875..c7e37da7d 100644 --- a/loco-new/base_template/tests/requests/auth.rs.t +++ b/loco-new/base_template/tests/requests/auth.rs.t @@ -1,5 +1,5 @@ use insta::{assert_debug_snapshot, with_settings}; -use loco_rs::testing; +use loco_rs::testing::prelude::*; use {{settings.module_name}}::{app::App, models::users}; use rstest::rstest; use serial_test::serial; @@ -22,7 +22,7 @@ macro_rules! configure_insta { async fn can_register() { configure_insta!(); - testing::request::(|request, ctx| async move { + request::(|request, ctx| async move { let email = "test@loco.com"; let payload = serde_json::json!({ "name": "loco", @@ -34,13 +34,13 @@ async fn can_register() { let saved_user = users::Model::find_by_email(&ctx.db, email).await; with_settings!({ - filters => testing::cleanup_user_model() + filters => cleanup_user_model() }, { assert_debug_snapshot!(saved_user); }); with_settings!({ - filters => testing::cleanup_email() + filters => cleanup_email() }, { assert_debug_snapshot!(ctx.mailer.unwrap().deliveries()); }); @@ -56,7 +56,7 @@ async fn can_register() { async fn can_login_with_verify(#[case] test_name: &str, #[case] password: &str) { configure_insta!(); - testing::request::(|request, ctx| async move { + request::(|request, ctx| async move { let email = "test@loco.com"; let register_payload = serde_json::json!({ "name": "loco", @@ -93,7 +93,7 @@ async fn can_login_with_verify(#[case] test_name: &str, #[case] password: &str) .is_some()); with_settings!({ - filters => testing::cleanup_user_model() + filters => cleanup_user_model() }, { assert_debug_snapshot!(test_name, (response.status_code(), response.text())); }); @@ -106,7 +106,7 @@ async fn can_login_with_verify(#[case] test_name: &str, #[case] password: &str) async fn can_login_without_verify() { configure_insta!(); - testing::request::(|request, _ctx| async move { + request::(|request, _ctx| async move { let email = "test@loco.com"; let password = "12341234"; let register_payload = serde_json::json!({ @@ -131,7 +131,7 @@ async fn can_login_without_verify() { .await; with_settings!({ - filters => testing::cleanup_user_model() + filters => cleanup_user_model() }, { assert_debug_snapshot!((response.status_code(), response.text())); }); @@ -144,7 +144,7 @@ async fn can_login_without_verify() { async fn can_reset_password() { configure_insta!(); - testing::request::(|request, ctx| async move { + request::(|request, ctx| async move { let login_data = prepare_data::init_user_login(&request, &ctx).await; let forgot_payload = serde_json::json!({ @@ -186,7 +186,7 @@ async fn can_reset_password() { assert_eq!(response.status_code(), 200); with_settings!({ - filters => testing::cleanup_email() + filters => cleanup_email() }, { assert_debug_snapshot!(ctx.mailer.unwrap().deliveries()); }); @@ -199,7 +199,7 @@ async fn can_reset_password() { async fn can_get_current_user() { configure_insta!(); - testing::request::(|request, ctx| async move { + request::(|request, ctx| async move { let user = prepare_data::init_user_login(&request, &ctx).await; let (auth_key, auth_value) = prepare_data::auth_header(&user.token); @@ -209,7 +209,7 @@ async fn can_get_current_user() { .await; with_settings!({ - filters => testing::cleanup_user_model() + filters => cleanup_user_model() }, { assert_debug_snapshot!((response.status_code(), response.text())); }); diff --git a/loco-new/base_template/tests/requests/home.rs.t b/loco-new/base_template/tests/requests/home.rs.t index 492acc632..fc33ebe8f 100644 --- a/loco-new/base_template/tests/requests/home.rs.t +++ b/loco-new/base_template/tests/requests/home.rs.t @@ -1,4 +1,4 @@ -use loco_rs::testing; +use loco_rs::testing::prelude::*; use {{settings.module_name}}::app::App; use serial_test::serial; @@ -6,7 +6,7 @@ use serial_test::serial; #[serial] async fn can_get_home() { - testing::request::(|request, _ctx| async move { + request::(|request, _ctx| async move { let res = request.get("/api").await; assert_eq!(res.status_code(), 200); diff --git a/loco-new/tests/templates/snapshots/r#mod__templates__db__cargo_dependencies_Postgres.snap b/loco-new/tests/templates/snapshots/r#mod__templates__db__cargo_dependencies_Postgres.snap index 1049a56a8..859cb0fdf 100644 --- a/loco-new/tests/templates/snapshots/r#mod__templates__db__cargo_dependencies_Postgres.snap +++ b/loco-new/tests/templates/snapshots/r#mod__templates__db__cargo_dependencies_Postgres.snap @@ -2,4 +2,4 @@ source: loco-new/tests/templates/db.rs expression: "content.get(\"dependencies\").unwrap()" --- -{ async-trait = "0.1.74", axum = "0.7.5", chrono = "0.4", serde_json = "1", tracing = "0.1.40", loco-rs = { workspace = true }, migration = { path = "migration" }, sea-orm = { features = ["sqlx-sqlite", "sqlx-postgres", "runtime-tokio-rustls", "macros"], version = "1.1.0" }, serde = { features = ["derive"], version = "1" }, tokio = { default-features = false, features = ["rt-multi-thread"], version = "1.33.0" }, tracing-subscriber = { features = ["env-filter", "json"], version = "0.3.17" }, uuid = { features = ["v4"], version = "1.6.0" }, validator = { version = "0.18" } } +{ async-trait = "0.1.74", axum = "0.7.5", chrono = "0.4", serde_json = "1", tracing = "0.1.40", loco-rs = { workspace = true }, migration = { path = "migration" }, sea-orm = { features = ["sqlx-sqlite", "sqlx-postgres", "runtime-tokio-rustls", "macros"], version = "1.1.0" }, serde = { features = ["derive"], version = "1" }, tokio = { default-features = false, features = ["rt-multi-thread"], version = "1.33.0" }, tracing-subscriber = { features = ["env-filter", "json"], version = "0.3.17" }, uuid = { features = ["v4"], version = "1.6.0" }, validator = { version = "0.19" } } diff --git a/loco-new/tests/templates/snapshots/r#mod__templates__db__cargo_dependencies_Sqlite.snap b/loco-new/tests/templates/snapshots/r#mod__templates__db__cargo_dependencies_Sqlite.snap index 1049a56a8..859cb0fdf 100644 --- a/loco-new/tests/templates/snapshots/r#mod__templates__db__cargo_dependencies_Sqlite.snap +++ b/loco-new/tests/templates/snapshots/r#mod__templates__db__cargo_dependencies_Sqlite.snap @@ -2,4 +2,4 @@ source: loco-new/tests/templates/db.rs expression: "content.get(\"dependencies\").unwrap()" --- -{ async-trait = "0.1.74", axum = "0.7.5", chrono = "0.4", serde_json = "1", tracing = "0.1.40", loco-rs = { workspace = true }, migration = { path = "migration" }, sea-orm = { features = ["sqlx-sqlite", "sqlx-postgres", "runtime-tokio-rustls", "macros"], version = "1.1.0" }, serde = { features = ["derive"], version = "1" }, tokio = { default-features = false, features = ["rt-multi-thread"], version = "1.33.0" }, tracing-subscriber = { features = ["env-filter", "json"], version = "0.3.17" }, uuid = { features = ["v4"], version = "1.6.0" }, validator = { version = "0.18" } } +{ async-trait = "0.1.74", axum = "0.7.5", chrono = "0.4", serde_json = "1", tracing = "0.1.40", loco-rs = { workspace = true }, migration = { path = "migration" }, sea-orm = { features = ["sqlx-sqlite", "sqlx-postgres", "runtime-tokio-rustls", "macros"], version = "1.1.0" }, serde = { features = ["derive"], version = "1" }, tokio = { default-features = false, features = ["rt-multi-thread"], version = "1.33.0" }, tracing-subscriber = { features = ["env-filter", "json"], version = "0.3.17" }, uuid = { features = ["v4"], version = "1.6.0" }, validator = { version = "0.19" } } diff --git a/snipdoc.yml b/snipdoc.yml index 183145382..a793cb854 100644 --- a/snipdoc.yml +++ b/snipdoc.yml @@ -138,3 +138,9 @@ snippets: cli-middleware-list: content: cargo loco middleware --config path: ./snipdoc.yml + jobs-help-command: + content: cd ./examples/demo && cargo loco jobs --help + path: ./snipdoc.yml + seed-help-command: + content: cd ./examples/demo && cargo loco db seed --help + path: ./snipdoc.yml diff --git a/src/bgworker/mod.rs b/src/bgworker/mod.rs index c5a765eda..be6b43b92 100644 --- a/src/bgworker/mod.rs +++ b/src/bgworker/mod.rs @@ -1,8 +1,15 @@ -use std::sync::Arc; +use std::{ + fs::File, + io::Write, + path::{Path, PathBuf}, + sync::Arc, +}; use async_trait::async_trait; -use serde::Serialize; -use tracing::{debug, error}; +#[cfg(feature = "cli")] +use clap::ValueEnum; +use serde::{Deserialize, Serialize}; +use serde_variant::to_variant_name; #[cfg(feature = "bg_pg")] pub mod pg; #[cfg(feature = "bg_redis")] @@ -19,6 +26,42 @@ use crate::{ Error, Result, }; +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] +#[cfg_attr(feature = "cli", derive(ValueEnum))] +pub enum JobStatus { + #[serde(rename = "queued")] + Queued, + #[serde(rename = "processing")] + Processing, + #[serde(rename = "completed")] + Completed, + #[serde(rename = "failed")] + Failed, + #[serde(rename = "cancelled")] + Cancelled, +} + +impl std::str::FromStr for JobStatus { + type Err = String; + + fn from_str(s: &str) -> Result { + match s { + "queued" => Ok(Self::Queued), + "processing" => Ok(Self::Processing), + "completed" => Ok(Self::Completed), + "failed" => Ok(Self::Failed), + "cancelled" => Ok(Self::Cancelled), + _ => Err(format!("Invalid status: {s}")), + } + } +} + +impl std::fmt::Display for JobStatus { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + to_variant_name(self).expect("only enum supported").fmt(f) + } +} + // Queue struct now holds both a QueueProvider and QueueRegistrar pub enum Queue { #[cfg(feature = "bg_redis")] @@ -30,13 +73,13 @@ pub enum Queue { #[cfg(feature = "bg_pg")] Postgres( pg::PgPool, - std::sync::Arc>, + std::sync::Arc>, pg::RunOpts, ), #[cfg(feature = "bg_sqlt")] Sqlite( sqlt::SqlitePool, - std::sync::Arc>, + std::sync::Arc>, sqlt::RunOpts, ), None, @@ -55,7 +98,7 @@ impl Queue { queue: Option, args: A, ) -> Result<()> { - debug!(worker = class, "job enqueue"); + tracing::debug!(worker = class, "job enqueue"); match self { #[cfg(feature = "bg_redis")] Self::Redis(pool, _, _) => { @@ -103,7 +146,7 @@ impl Queue { &self, worker: W, ) -> Result<()> { - debug!(worker = W::class_name(), "register worker"); + tracing::debug!(worker = W::class_name(), "register worker"); match self { #[cfg(feature = "bg_redis")] Self::Redis(_, p, _) => { @@ -131,7 +174,7 @@ impl Queue { /// /// This function will return an error if fails pub async fn run(&self) -> Result<()> { - debug!("running background jobs"); + tracing::debug!("running background jobs"); match self { #[cfg(feature = "bg_redis")] Self::Redis(_, p, _) => { @@ -154,7 +197,7 @@ impl Queue { } } _ => { - error!( + tracing::error!( "no queue provider is configured: compile with at least one queue provider \ feature" ); @@ -169,7 +212,7 @@ impl Queue { /// /// This function will return an error if fails pub async fn setup(&self) -> Result<()> { - debug!("workers setup"); + tracing::debug!("workers setup"); match self { #[cfg(feature = "bg_redis")] Self::Redis(_, _, _) => {} @@ -192,7 +235,7 @@ impl Queue { /// /// This function will return an error if fails pub async fn clear(&self) -> Result<()> { - debug!("clearing job queues"); + tracing::debug!("clearing job"); match self { #[cfg(feature = "bg_redis")] Self::Redis(pool, _, _) => { @@ -217,7 +260,7 @@ impl Queue { /// /// This function will return an error if fails pub async fn ping(&self) -> Result<()> { - debug!("job queue ping requested"); + tracing::debug!("job queue ping requested"); match self { #[cfg(feature = "bg_redis")] Self::Redis(pool, _, _) => { @@ -254,7 +297,7 @@ impl Queue { /// Does not currently return an error, but the postgres or other future /// queue implementations might, so using Result here as return type. pub fn shutdown(&self) -> Result<()> { - println!("waiting for running jobs to finish..."); + tracing::debug!("waiting for running jobs to finish..."); match self { #[cfg(feature = "bg_redis")] Self::Redis(_, _, cancellation_token) => cancellation_token.cancel(), @@ -263,6 +306,229 @@ impl Queue { Ok(()) } + + async fn get_jobs( + &self, + status: Option<&Vec>, + age_days: Option, + ) -> Result { + tracing::debug!(status = ?status, age_days = ?age_days, "getting jobs"); + let jobs = match self { + #[cfg(feature = "bg_pg")] + Self::Postgres(pool, _, _) => { + let jobs = pg::get_jobs(pool, status, age_days) + .await + .map_err(Box::from)?; + serde_json::to_value(jobs)? + } + #[cfg(feature = "bg_sqlt")] + Self::Sqlite(pool, _, _) => { + let jobs = sqlt::get_jobs(pool, status, age_days) + .await + .map_err(Box::from)?; + + serde_json::to_value(jobs)? + } + #[cfg(feature = "bg_redis")] + Self::Redis(_, _, _) => { + tracing::error!("getting jobs for redis provider not implemented"); + return Err(Error::string( + "getting jobs not supported for redis provider", + )); + } + Self::None => { + tracing::error!( + "no queue provider is configured: compile with at least one queue provider \ + feature" + ); + return Err(Error::string("provider not configure")); + } + }; + + Ok(jobs) + } + + /// Cancels jobs based on the given job name for the configured queue provider. + /// + /// # Errors + /// - If no queue provider is configured, it will return an error indicating the lack of configuration. + /// - If the Redis provider is selected, it will return an error stating that cancellation is not supported. + /// - Any error in the underlying provider's cancellation logic will propagate from the respective function. + /// + pub async fn cancel_jobs(&self, job_name: &str) -> Result<()> { + tracing::debug!(job_name = ?job_name, "cancel jobs"); + + match self { + #[cfg(feature = "bg_pg")] + Self::Postgres(pool, _, _) => pg::cancel_jobs_by_name(pool, job_name).await, + #[cfg(feature = "bg_sqlt")] + Self::Sqlite(pool, _, _) => sqlt::cancel_jobs_by_name(pool, job_name).await, + #[cfg(feature = "bg_redis")] + Self::Redis(_, _, _) => { + tracing::error!("canceling jobs for redis provider not implemented"); + Err(Error::string( + "canceling jobs not supported for redis provider", + )) + } + Self::None => { + tracing::error!( + "no queue provider is configured: compile with at least one queue provider \ + feature" + ); + Err(Error::string("provider not configure")) + } + } + } + + /// Clears jobs older than a specified number of days for the configured queue provider. + /// + /// # Errors + /// - If no queue provider is configured, it will return an error indicating the lack of configuration. + /// - If the Redis provider is selected, it will return an error stating that clearing jobs is not supported. + /// - Any error in the underlying provider's job clearing logic will propagate from the respective function. + /// + pub async fn clear_jobs_older_than( + &self, + age_days: i64, + status: &Vec, + ) -> Result<()> { + tracing::debug!(age_days = age_days, status = ?status, "cancel jobs with age"); + + match self { + #[cfg(feature = "bg_pg")] + Self::Postgres(pool, _, _) => { + pg::clear_jobs_older_than(pool, age_days, Some(status)).await + } + #[cfg(feature = "bg_sqlt")] + Self::Sqlite(pool, _, _) => { + sqlt::clear_jobs_older_than(pool, age_days, Some(status)).await + } + #[cfg(feature = "bg_redis")] + Self::Redis(_, _, _) => { + tracing::error!("clear jobs for redis provider not implemented"); + Err(Error::string("clear jobs not supported for redis provider")) + } + Self::None => { + tracing::error!( + "no queue provider is configured: compile with at least one queue provider \ + feature" + ); + Err(Error::string("provider not configure")) + } + } + } + + /// Clears jobs based on their status for the configured queue provider. + /// + /// # Errors + /// - If no queue provider is configured, it will return an error indicating the lack of configuration. + /// - If the Redis provider is selected, it will return an error stating that clearing jobs is not supported. + /// - Any error in the underlying provider's job clearing logic will propagate from the respective function. + pub async fn clear_by_status(&self, status: Vec) -> Result<()> { + tracing::debug!(status = ?status, "clear jobs by status"); + match self { + #[cfg(feature = "bg_pg")] + Self::Postgres(pool, _, _) => pg::clear_by_status(pool, status).await, + #[cfg(feature = "bg_sqlt")] + Self::Sqlite(pool, _, _) => sqlt::clear_by_status(pool, status).await, + #[cfg(feature = "bg_redis")] + Self::Redis(_, _, _) => { + tracing::error!("clear jobs for redis provider not implemented"); + Err(Error::string("clear jobs not supported for redis provider")) + } + Self::None => { + tracing::error!( + "no queue provider is configured: compile with at least one queue provider \ + feature" + ); + Err(Error::string("provider not configure")) + } + } + } + + /// Dumps the list of jobs to a YAML file at the specified path. + /// + /// This function retrieves jobs from the queue, optionally filtered by their status, and + /// writes the job data to a YAML file. + /// + /// # Errors + /// - If the specified path cannot be created, an error will be returned. + /// - If the job retrieval or YAML serialization fails, an error will be returned. + /// - If there is an issue creating the dump file, an error will be returned + pub async fn dump( + &self, + path: &Path, + status: Option<&Vec>, + age_days: Option, + ) -> Result { + tracing::debug!(path = %path.display(), status = ?status, age_days = ?age_days, "dumping jobs"); + + if !path.exists() { + tracing::debug!(path = %path.display(), "folder not exists, creating..."); + std::fs::create_dir_all(path)?; + } + + let dump_file = path.join(format!( + "loco-dump-jobs-{}.yaml", + chrono::Utc::now().format("%Y-%m-%d-%H-%M-%S") + )); + + let jobs = self.get_jobs(status, age_days).await?; + + let data = serde_yaml::to_string(&jobs)?; + let mut file = File::create(&dump_file)?; + file.write_all(data.as_bytes())?; + + Ok(dump_file) + } + + /// Imports jobs from a YAML file into the configured queue provider. + /// + /// This function reads job data from a YAML file located at the specified `path` and imports + /// the jobs into the queue. + /// + /// # Errors + /// - If there is an issue opening or reading the YAML file, an error will be returned. + /// - If the queue provider is Redis or none, an error will be returned indicating the lack of support. + /// - If any issues occur while enqueuing the jobs, the function will return an error. + /// + pub async fn import(&self, path: &Path) -> Result<()> { + tracing::debug!(path = %path.display(), "import jobs"); + + match &self { + #[cfg(feature = "bg_pg")] + Self::Postgres(_, _, _) => { + let jobs: Vec = serde_yaml::from_reader(File::open(path)?)?; + for job in jobs { + self.enqueue(job.name.to_string(), None, job.data).await?; + } + + Ok(()) + } + #[cfg(feature = "bg_sqlt")] + Self::Sqlite(_, _, _) => { + let jobs: Vec = serde_yaml::from_reader(File::open(path)?)?; + for job in jobs { + self.enqueue(job.name.to_string(), None, job.data).await?; + } + Ok(()) + } + #[cfg(feature = "bg_redis")] + Self::Redis(_, _, _) => { + tracing::error!("import jobs for redis provider not implemented"); + Err(Error::string( + "getting jobs not supported for redis provider", + )) + } + Self::None => { + tracing::error!( + "no queue provider is configured: compile with at least one queue provider \ + feature" + ); + Err(Error::string("provider not configure")) + } + } + } } #[async_trait] @@ -294,7 +560,7 @@ pub trait BackgroundWorker: Send + if let Some(p) = &ctx.queue_provider { p.enqueue(Self::class_name(), Self::queue(), args).await?; } else { - error!( + tracing::error!( "perform_later: background queue is selected, but queue was not populated \ in context" ); @@ -307,7 +573,7 @@ pub trait BackgroundWorker: Send + let dx = ctx.clone(); tokio::spawn(async move { if let Err(err) = Self::build(&dx).perform(args).await { - error!(err = err.to_string(), "worker failed to perform job"); + tracing::error!(err = err.to_string(), "worker failed to perform job"); } }); } @@ -399,3 +665,104 @@ pub async fn create_queue_provider(config: &Config) -> Result> Ok(None) } } + +#[cfg(test)] +mod tests { + + use std::path::Path; + + use insta::assert_debug_snapshot; + + use super::*; + use crate::tests_cfg; + + fn sqlite_config(db_path: &Path) -> SqliteQueueConfig { + SqliteQueueConfig { + uri: format!( + "sqlite://{}?mode=rwc", + db_path.join("sample.sqlite").display() + ), + dangerously_flush: false, + enable_logging: false, + max_connections: 1, + min_connections: 1, + connect_timeout: 500, + idle_timeout: 500, + poll_interval_sec: 1, + num_workers: 1, + } + } + + #[tokio::test] + async fn can_dump_jobs() { + let tree_fs = tree_fs::TreeBuilder::default() + .drop(true) + .create() + .expect("create temp folder"); + let qcfg = sqlite_config(tree_fs.root.as_path()); + let queue = sqlt::create_provider(&qcfg) + .await + .expect("create sqlite queue"); + + let pool = sqlx::SqlitePool::connect(&qcfg.uri) + .await + .expect("connect to sqlite db"); + + queue.setup().await.expect("setup sqlite db"); + tests_cfg::queue::sqlite_seed_data(&pool).await; + + let dump_file = queue + .dump( + tree_fs.root.as_path(), + Some(&vec![JobStatus::Failed, JobStatus::Cancelled]), + None, + ) + .await + .expect("dump jobs"); + + assert_debug_snapshot!(std::fs::read_to_string(dump_file)); + } + + #[tokio::test] + async fn cat_import_jobs_form_file() { + let tree_fs = tree_fs::TreeBuilder::default() + .drop(true) + .create() + .expect("create temp folder"); + let qcfg = sqlite_config(tree_fs.root.as_path()); + let queue = sqlt::create_provider(&qcfg) + .await + .expect("create sqlite queue"); + + let pool = sqlx::SqlitePool::connect(&qcfg.uri) + .await + .expect("connect to sqlite db"); + + queue.setup().await.expect("setup sqlite db"); + + let count: i64 = sqlx::query_scalar("SELECT COUNT(*) FROM sqlt_loco_queue") + .fetch_one(&pool) + .await + .unwrap(); + + assert_eq!(count, 0); + + queue + .import( + PathBuf::from("tests") + .join("fixtures") + .join("queue") + .join("jobs.yaml") + .as_path(), + ) + .await + .expect("dump import"); + + let count: i64 = sqlx::query_scalar("SELECT COUNT(*) FROM sqlt_loco_queue") + .fetch_one(&pool) + .await + .unwrap(); + + assert_eq!(count, 14); + } +} diff --git a/src/bgworker/pg.rs b/src/bgworker/pg.rs index 786acbd90..f2dd888a2 100644 --- a/src/bgworker/pg.rs +++ b/src/bgworker/pg.rs @@ -13,38 +13,39 @@ use tokio::{task::JoinHandle, time::sleep}; use tracing::{debug, error, trace}; use ulid::Ulid; -use super::{BackgroundWorker, Queue}; +use super::{BackgroundWorker, JobStatus, Queue}; use crate::{config::PostgresQueueConfig, Error, Result}; -type TaskId = String; -type TaskData = JsonValue; -type TaskStatus = String; +type JobId = String; +type JobData = JsonValue; -type TaskHandler = Box< +type JobHandler = Box< dyn Fn( - TaskId, - TaskData, + JobId, + JobData, ) -> Pin> + Send>> + Send + Sync, >; -#[derive(Debug, Deserialize, Serialize)] -struct Task { - pub id: TaskId, +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct Job { + pub id: JobId, pub name: String, - #[allow(clippy::struct_field_names)] - pub task_data: TaskData, - pub status: TaskStatus, + #[serde(rename = "task_data")] + pub data: JobData, + pub status: JobStatus, pub run_at: DateTime, pub interval: Option, + pub created_at: Option>, + pub updated_at: Option>, } -pub struct TaskRegistry { - handlers: Arc>, +pub struct JobRegistry { + handlers: Arc>, } -impl TaskRegistry { - /// Creates a new `TaskRegistry`. +impl JobRegistry { + /// Creates a new `JobRegistry`. #[must_use] pub fn new() -> Self { Self { @@ -52,7 +53,7 @@ impl TaskRegistry { } } - /// Registers a task handler with the provided name. + /// Registers a job handler with the provided name. /// # Errors /// Fails if cannot register worker pub fn register_worker(&mut self, name: String, worker: W) -> Result<()> @@ -62,11 +63,11 @@ impl TaskRegistry { for<'de> Args: Deserialize<'de>, { let worker = Arc::new(worker); - let wrapped_handler = move |_task_id: String, task_data: TaskData| { + let wrapped_handler = move |_job_id: String, job_data: JobData| { let w = worker.clone(); Box::pin(async move { - let args = serde_json::from_value::(task_data); + let args = serde_json::from_value::(job_data); match args { Ok(args) => w.perform(args).await, Err(err) => Err(err.into()), @@ -80,30 +81,30 @@ impl TaskRegistry { Ok(()) } - /// Returns a reference to the task handlers. + /// Returns a reference to the job handlers. #[must_use] - pub fn handlers(&self) -> &Arc> { + pub fn handlers(&self) -> &Arc> { &self.handlers } - /// Runs the task handlers with the provided number of workers. + /// Runs the job handlers with the provided number of workers. #[must_use] pub fn run(&self, pool: &PgPool, opts: &RunOpts) -> Vec> { - let mut tasks = Vec::new(); + let mut jobs = Vec::new(); let interval = opts.poll_interval_sec; for idx in 0..opts.num_workers { let handlers = self.handlers.clone(); let pool = pool.clone(); - let task = tokio::spawn(async move { + let job = tokio::spawn(async move { loop { trace!( pool_conns = pool.num_idle(), worker_num = idx, "pg workers stats" ); - let task_opt = match dequeue(&pool).await { + let job_opt = match dequeue(&pool).await { Ok(t) => t, Err(err) => { error!(err = err.to_string(), "cannot fetch from queue"); @@ -111,33 +112,33 @@ impl TaskRegistry { } }; - if let Some(task) = task_opt { - debug!(task_id = task.id, name = task.name, "working on task"); - if let Some(handler) = handlers.get(&task.name) { - match handler(task.id.clone(), task.task_data.clone()).await { + if let Some(job) = job_opt { + debug!(job_id = job.id, name = job.name, "working on job"); + if let Some(handler) = handlers.get(&job.name) { + match handler(job.id.clone(), job.data.clone()).await { Ok(()) => { if let Err(err) = - complete_task(&pool, &task.id, task.interval).await + complete_job(&pool, &job.id, job.interval).await { error!( err = err.to_string(), - task = ?task, - "cannot complete task" + job = ?job, + "cannot complete job" ); } } Err(err) => { - if let Err(err) = fail_task(&pool, &task.id, &err).await { + if let Err(err) = fail_job(&pool, &job.id, &err).await { error!( err = err.to_string(), - task = ?task, - "cannot fail task" + job = ?job, + "cannot fail job" ); } } } } else { - error!(task = task.name, "no handler found for task"); + error!(job = job.name, "no handler found for job"); } } else { sleep(Duration::from_secs(interval.into())).await; @@ -145,14 +146,14 @@ impl TaskRegistry { } }); - tasks.push(task); + jobs.push(job); } - tasks + jobs } } -impl Default for TaskRegistry { +impl Default for JobRegistry { fn default() -> Self { Self::new() } @@ -173,33 +174,34 @@ async fn connect(cfg: &PostgresQueueConfig) -> Result { Ok(pool) } -/// Initialize task tables +/// Initialize job tables /// /// # Errors /// /// This function will return an error if it fails pub async fn initialize_database(pool: &PgPool) -> Result<()> { debug!("pg worker: initialize database"); - sqlx::raw_sql( + sqlx::raw_sql(&format!( r" CREATE TABLE IF NOT EXISTS pg_loco_queue ( id VARCHAR NOT NULL, name VARCHAR NOT NULL, task_data JSONB NOT NULL, - status VARCHAR NOT NULL DEFAULT 'queued', + status VARCHAR NOT NULL DEFAULT '{}', run_at TIMESTAMPTZ NOT NULL, interval BIGINT, created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() ); ", - ) + JobStatus::Queued + )) .execute(pool) .await?; Ok(()) } -/// Add a task +/// Add a job /// /// # Errors /// @@ -207,11 +209,11 @@ pub async fn initialize_database(pool: &PgPool) -> Result<()> { pub async fn enqueue( pool: &PgPool, name: &str, - task_data: TaskData, + data: JobData, run_at: DateTime, interval: Option, -) -> Result { - let task_data_json = serde_json::to_value(task_data)?; +) -> Result { + let data_json = serde_json::to_value(data)?; #[allow(clippy::cast_possible_truncation)] let interval_ms: Option = interval.map(|i| i.as_millis() as i64); @@ -222,7 +224,7 @@ pub async fn enqueue( $4, $5)", ) .bind(id.clone()) - .bind(task_data_json) + .bind(data_json) .bind(name) .bind(run_at) .bind(interval_ms) @@ -231,90 +233,164 @@ pub async fn enqueue( Ok(id) } -async fn dequeue(client: &PgPool) -> Result> { +async fn dequeue(client: &PgPool) -> Result> { let mut tx = client.begin().await?; let row = sqlx::query( "SELECT id, name, task_data, status, run_at, interval FROM pg_loco_queue WHERE status = \ - 'queued' AND run_at <= NOW() ORDER BY run_at LIMIT 1 FOR UPDATE SKIP LOCKED", + $1 AND run_at <= NOW() ORDER BY run_at LIMIT 1 FOR UPDATE SKIP LOCKED", ) - // avoid using FromRow because it requires the 'macros' feature, which nothing - // in our dep tree uses, so it'll create smaller, faster builds if we do this manually - .map(|row: PgRow| Task { - id: row.get("id"), - name: row.get("name"), - task_data: row.get("task_data"), - status: row.get("status"), - run_at: row.get("run_at"), - interval: row.get("interval"), - }) + .bind(JobStatus::Queued.to_string()) + .map(|row: PgRow| to_job(&row).ok()) .fetch_optional(&mut *tx) - .await?; + .await? + .flatten(); - if let Some(task) = row { - sqlx::query( - "UPDATE pg_loco_queue SET status = 'processing', updated_at = NOW() WHERE id = $1", - ) - .bind(&task.id) - .execute(&mut *tx) - .await?; + if let Some(job) = row { + sqlx::query("UPDATE pg_loco_queue SET status = $1, updated_at = NOW() WHERE id = $2") + .bind(JobStatus::Processing.to_string()) + .bind(&job.id) + .execute(&mut *tx) + .await?; tx.commit().await?; - Ok(Some(task)) + Ok(Some(job)) } else { Ok(None) } } -async fn complete_task(pool: &PgPool, task_id: &TaskId, interval_ms: Option) -> Result<()> { - if let Some(interval_ms) = interval_ms { - let next_run_at = Utc::now() + chrono::Duration::milliseconds(interval_ms); - sqlx::query( - "UPDATE pg_loco_queue SET status = 'queued', updated_at = NOW(), run_at = $1 WHERE id \ - = $2", - ) - .bind(next_run_at) - .bind(task_id) - .execute(pool) - .await?; - } else { - sqlx::query( - "UPDATE pg_loco_queue SET status = 'completed', updated_at = NOW() WHERE id = $1", - ) - .bind(task_id) - .execute(pool) - .await?; - } +async fn complete_job(pool: &PgPool, id: &JobId, interval_ms: Option) -> Result<()> { + let (status, run_at) = interval_ms.map_or_else( + || (JobStatus::Completed.to_string(), Utc::now()), + |interval_ms| { + ( + JobStatus::Queued.to_string(), + Utc::now() + chrono::Duration::milliseconds(interval_ms), + ) + }, + ); + + sqlx::query( + "UPDATE pg_loco_queue SET status = $1, updated_at = NOW(), run_at = $2 WHERE id = $3", + ) + .bind(status) + .bind(run_at) + .bind(id) + .execute(pool) + .await?; + Ok(()) } -async fn fail_task(pool: &PgPool, task_id: &TaskId, error: &crate::Error) -> Result<()> { +async fn fail_job(pool: &PgPool, id: &JobId, error: &crate::Error) -> Result<()> { let msg = error.to_string(); - error!(err = msg, "failed task"); + error!(err = msg, "failed job"); let error_json = serde_json::json!({ "error": msg }); sqlx::query( - "UPDATE pg_loco_queue SET status = 'failed', updated_at = NOW(), task_data = task_data || \ - $1::jsonb WHERE id = $2", + "UPDATE pg_loco_queue SET status = $1, updated_at = NOW(), task_data = task_data || \ + $2::jsonb WHERE id = $3", ) + .bind(JobStatus::Failed.to_string()) .bind(error_json) - .bind(task_id) + .bind(id) .execute(pool) .await?; Ok(()) } -/// Clear all tasks +/// Cancels jobs in the `pg_loco_queue` table by their name. +/// +/// This function updates the status of all jobs with the given `name` and a status of +/// [`JobStatus::Queued`] to [`JobStatus::Cancelled`]. The update also sets the `updated_at` timestamp to the +/// current time. +/// +/// # Errors +/// +/// This function will return an error if it fails +pub async fn cancel_jobs_by_name(pool: &PgPool, name: &str) -> Result<()> { + sqlx::query( + "UPDATE pg_loco_queue SET status = $1, updated_at = NOW() WHERE name = $2 AND status = $3", + ) + .bind(JobStatus::Cancelled.to_string()) + .bind(name) + .bind(JobStatus::Queued.to_string()) + .execute(pool) + .await?; + Ok(()) +} + +/// Clear all jobs /// /// # Errors /// /// This function will return an error if it fails pub async fn clear(pool: &PgPool) -> Result<()> { - sqlx::query("DELETE from pg_loco_queue") + sqlx::query("DELETE FROM pg_loco_queue") + .execute(pool) + .await?; + Ok(()) +} + +/// Deletes jobs from the `pg_loco_queue` table based on their status. +/// +/// This function removes all jobs with a status that matches any of the statuses provided +/// in the `status` argument. The statuses are checked against the `status` column in the +/// database, and any matching rows are deleted. +/// +/// # Errors +/// +/// This function will return an error if it fails +pub async fn clear_by_status(pool: &PgPool, status: Vec) -> Result<()> { + let status_in = status + .iter() + .map(std::string::ToString::to_string) + .collect::>(); + + sqlx::query("DELETE FROM pg_loco_queue WHERE status = ANY($1)") + .bind(status_in) .execute(pool) .await?; Ok(()) } +/// Deletes jobs from the `pg_loco_queue` table that are older than a specified number of days. +/// +/// This function removes jobs that have a `created_at` timestamp older than the provided +/// number of days. Additionally, if a `status` is provided, only jobs with a status matching +/// one of the provided values will be deleted. +/// +/// # Errors +/// +/// This function will return an error if it fails +pub async fn clear_jobs_older_than( + pool: &PgPool, + age_days: i64, + status: Option<&Vec>, +) -> Result<()> { + let mut query_builder = sqlx::query_builder::QueryBuilder::::new( + "DELETE FROM pg_loco_queue WHERE created_at < NOW() - INTERVAL '1 day' * ", + ); + + query_builder.push_bind(age_days); + + if let Some(status_list) = status { + if !status_list.is_empty() { + let status_in = status_list + .iter() + .map(|s| format!("'{s}'")) + .collect::>() + .join(","); + + query_builder.push(format!(" AND status IN ({status_in})")); + } + } + + query_builder.build().execute(pool).await?; + + Ok(()) +} + /// Ping system /// /// # Errors @@ -327,6 +403,68 @@ pub async fn ping(pool: &PgPool) -> Result<()> { Ok(()) } +/// Retrieves a list of jobs from the `pg_loco_queue` table in the database. +/// +/// This function queries the database for jobs, optionally filtering by their +/// `status`. If a status is provided, only jobs with statuses included in the +/// provided list will be fetched. If no status is provided, all jobs will be +/// returned. +/// +/// # Errors +/// +/// This function will return an error if it fails +pub async fn get_jobs( + pool: &PgPool, + status: Option<&Vec>, + age_days: Option, +) -> Result, sqlx::Error> { + let mut query = String::from("SELECT * FROM pg_loco_queue where true"); + + if let Some(status) = status { + let status_in = status + .iter() + .map(|s| format!("'{s}'")) + .collect::>() + .join(","); + query.push_str(&format!(" AND status in ({status_in})")); + } + + if let Some(age_days) = age_days { + query.push_str(&format!( + "AND created_at <= NOW() - INTERVAL '1 day' * {age_days}" + )); + } + + let rows = sqlx::query(&query).fetch_all(pool).await?; + Ok(rows.iter().filter_map(|row| to_job(row).ok()).collect()) +} + +/// Converts a row from the database into a [`Job`] object. +/// +/// This function takes a row from the `Postgres` database and manually extracts the necessary +/// fields to populate a [`Job`] object. +/// +/// **Note:** This function manually extracts values from the database row instead of using +/// the `FromRow` trait, which would require enabling the 'macros' feature in the dependencies. +/// The decision to avoid `FromRow` is made to keep the build smaller and faster, as the 'macros' +/// feature is unnecessary in the current dependency tree. +fn to_job(row: &PgRow) -> Result { + Ok(Job { + id: row.get("id"), + name: row.get("name"), + data: row.get("task_data"), + status: row.get::("status").parse().map_err(|err| { + let status: String = row.get("status"); + tracing::error!(status, err, "job status is unsupported"); + Error::string("invalid job status") + })?, + run_at: row.get("run_at"), + interval: row.get("interval"), + created_at: row.try_get("created_at").unwrap_or_default(), + updated_at: row.try_get("updated_at").unwrap_or_default(), + }) +} + #[derive(Debug)] pub struct RunOpts { pub num_workers: u32, @@ -340,7 +478,7 @@ pub struct RunOpts { /// This function will return an error if it fails pub async fn create_provider(qcfg: &PostgresQueueConfig) -> Result { let pool = connect(qcfg).await.map_err(Box::from)?; - let registry = TaskRegistry::new(); + let registry = JobRegistry::new(); Ok(Queue::Postgres( pool, Arc::new(tokio::sync::Mutex::new(registry)), @@ -350,3 +488,469 @@ pub async fn create_provider(qcfg: &PostgresQueueConfig) -> Result { }, )) } + +#[cfg(all(test, feature = "integration_test"))] +use serial_test::serial; +#[cfg(all(test, feature = "integration_test"))] +mod tests { + + use chrono::{NaiveDate, NaiveTime, TimeZone}; + use insta::{assert_debug_snapshot, with_settings}; + use sqlx::{query_as, FromRow}; + + use super::*; + use crate::tests_cfg; + + fn reduction() -> &'static [(&'static str, &'static str)] { + &[ + ("[A-Z0-9]{26}", ""), + ( + r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d+)?Z", + "", + ), + ] + } + + #[derive(Debug, Serialize, FromRow)] + pub struct TableInfo { + pub table_schema: Option, + pub column_name: Option, + pub column_default: Option, + pub is_nullable: Option, + pub data_type: Option, + pub is_updatable: Option, + } + + async fn init() -> PgPool { + let qcfg = PostgresQueueConfig { + uri: std::env::var("DATABASE_URL") + .expect("environment variable should be exists 'DATABASE_URL'"), + dangerously_flush: false, + enable_logging: false, + max_connections: 1, + min_connections: 1, + connect_timeout: 500, + idle_timeout: 500, + poll_interval_sec: 1, + num_workers: 1, + }; + + let pool = connect(&qcfg).await.unwrap(); + sqlx::raw_sql("DROP TABLE IF EXISTS pg_loco_queue;") + .execute(&pool) + .await + .expect("drop table if exists"); + + pool + } + + async fn get_all_jobs(pool: &PgPool) -> Vec { + sqlx::query("select * from pg_loco_queue") + .fetch_all(pool) + .await + .expect("get jobs") + .iter() + .filter_map(|row| to_job(row).ok()) + .collect() + } + + async fn get_job(pool: &PgPool, id: &str) -> Job { + sqlx::query(&format!("select * from pg_loco_queue where id = '{id}'")) + .fetch_all(pool) + .await + .expect("get jobs") + .first() + .and_then(|row| to_job(row).ok()) + .expect("job not found") + } + + #[tokio::test] + #[serial] + async fn can_initialize_database() { + let pool = init().await; + + assert!(initialize_database(&pool).await.is_ok()); + + let table_info: Vec = query_as::<_, TableInfo>( + "SELECT * FROM information_schema.columns WHERE table_name = + 'pg_loco_queue'", + ) + .fetch_all(&pool) + .await + .unwrap(); + + assert_debug_snapshot!(table_info); + } + + #[tokio::test] + #[serial] + async fn can_enqueue() { + let pool = init().await; + + assert!(initialize_database(&pool).await.is_ok()); + + let jobs = get_all_jobs(&pool).await; + + assert_eq!(jobs.len(), 0); + + let run_at = Utc.from_utc_datetime( + &NaiveDate::from_ymd_opt(2023, 1, 15) + .unwrap() + .and_time(NaiveTime::from_hms_opt(12, 30, 0).unwrap()), + ); + + let job_data: JobData = serde_json::json!({"user_id": 1}); + assert!( + enqueue(&pool, "PasswordChangeNotification", job_data, run_at, None) + .await + .is_ok() + ); + + let jobs = get_all_jobs(&pool).await; + + assert_eq!(jobs.len(), 1); + with_settings!({ + filters => reduction().iter().map(|&(pattern, replacement)| + (pattern, replacement)), }, { + assert_debug_snapshot!(jobs); + }); + } + + #[tokio::test] + #[serial] + async fn can_dequeue() { + let pool = init().await; + + assert!(initialize_database(&pool).await.is_ok()); + + let run_at = Utc.from_utc_datetime( + &NaiveDate::from_ymd_opt(2023, 1, 15) + .unwrap() + .and_time(NaiveTime::from_hms_opt(12, 30, 0).unwrap()), + ); + + let job_data: JobData = serde_json::json!({"user_id": 1}); + assert!( + enqueue(&pool, "PasswordChangeNotification", job_data, run_at, None) + .await + .is_ok() + ); + + let job_before_dequeue = get_all_jobs(&pool) + .await + .first() + .cloned() + .expect("gets first job"); + + assert_eq!(job_before_dequeue.status, JobStatus::Queued); + + std::thread::sleep(std::time::Duration::from_secs(1)); + + assert!(dequeue(&pool).await.is_ok()); + + let job_after_dequeue = get_all_jobs(&pool) + .await + .first() + .cloned() + .expect("gets first job"); + + assert_ne!(job_after_dequeue.updated_at, job_before_dequeue.updated_at); + with_settings!({ + filters => reduction().iter().map(|&(pattern, replacement)| + (pattern, replacement)), }, { + assert_debug_snapshot!(job_after_dequeue); + }); + } + + #[tokio::test] + #[serial] + async fn can_complete_job_without_interval() { + let pool = init().await; + + assert!(initialize_database(&pool).await.is_ok()); + tests_cfg::queue::postgres_seed_data(&pool).await; + + let job = get_job(&pool, "01JDM0X8EVAM823JZBGKYNBA99").await; + + assert_eq!(job.status, JobStatus::Queued); + assert!(complete_job(&pool, &job.id, None).await.is_ok()); + + let job = get_job(&pool, "01JDM0X8EVAM823JZBGKYNBA99").await; + + assert_eq!(job.status, JobStatus::Completed); + } + + #[tokio::test] + #[serial] + async fn can_complete_job_with_interval() { + let pool = init().await; + + assert!(initialize_database(&pool).await.is_ok()); + tests_cfg::queue::postgres_seed_data(&pool).await; + + let before_complete_job = get_job(&pool, "01JDM0X8EVAM823JZBGKYNBA98").await; + + assert_eq!(before_complete_job.status, JobStatus::Completed); + + std::thread::sleep(std::time::Duration::from_secs(1)); + + assert!(complete_job(&pool, &before_complete_job.id, Some(10)) + .await + .is_ok()); + + let after_complete_job = get_job(&pool, "01JDM0X8EVAM823JZBGKYNBA98").await; + + assert_ne!( + after_complete_job.updated_at, + before_complete_job.updated_at + ); + with_settings!({ + filters => reduction().iter().map(|&(pattern, replacement)| (pattern, + replacement)), }, { + assert_debug_snapshot!(after_complete_job); + }); + } + + #[tokio::test] + #[serial] + async fn can_fail_job() { + let pool = init().await; + + assert!(initialize_database(&pool).await.is_ok()); + tests_cfg::queue::postgres_seed_data(&pool).await; + + let before_fail_job = get_job(&pool, "01JDM0X8EVAM823JZBGKYNBA97").await; + + std::thread::sleep(std::time::Duration::from_secs(1)); + + assert!(fail_job( + &pool, + &before_fail_job.id, + &crate::Error::string("some error") + ) + .await + .is_ok()); + + let after_fail_job = get_job(&pool, "01JDM0X8EVAM823JZBGKYNBA97").await; + + assert_ne!(after_fail_job.updated_at, before_fail_job.updated_at); + with_settings!({ + filters => reduction().iter().map(|&(pattern, replacement)| (pattern, + replacement)), }, { + assert_debug_snapshot!(after_fail_job); + }); + } + + #[tokio::test] + #[serial] + async fn can_cancel_job_by_name() { + let pool = init().await; + + assert!(initialize_database(&pool).await.is_ok()); + tests_cfg::queue::postgres_seed_data(&pool).await; + + let count_cancelled_jobs = get_all_jobs(&pool) + .await + .iter() + .filter(|j| j.status == JobStatus::Cancelled) + .count(); + + assert_eq!(count_cancelled_jobs, 1); + + assert!(cancel_jobs_by_name(&pool, "UserAccountActivation") + .await + .is_ok()); + + let count_cancelled_jobs = get_all_jobs(&pool) + .await + .iter() + .filter(|j| j.status == JobStatus::Cancelled) + .count(); + + assert_eq!(count_cancelled_jobs, 2); + } + + #[tokio::test] + #[serial] + async fn can_clear() { + let pool = init().await; + + assert!(initialize_database(&pool).await.is_ok()); + tests_cfg::queue::postgres_seed_data(&pool).await; + + let job_count: i64 = sqlx::query_scalar("SELECT COUNT(*) FROM pg_loco_queue") + .fetch_one(&pool) + .await + .unwrap(); + + assert_ne!(job_count, 0); + + assert!(clear(&pool).await.is_ok()); + let job_count: i64 = sqlx::query_scalar("SELECT COUNT(*) FROM pg_loco_queue") + .fetch_one(&pool) + .await + .unwrap(); + + assert_eq!(job_count, 0); + } + + #[tokio::test] + #[serial] + async fn can_clear_by_status() { + let pool = init().await; + + assert!(initialize_database(&pool).await.is_ok()); + tests_cfg::queue::postgres_seed_data(&pool).await; + + let jobs = get_all_jobs(&pool).await; + + assert_eq!(jobs.len(), 14); + assert_eq!( + jobs.iter() + .filter(|j| j.status == JobStatus::Completed) + .count(), + 3 + ); + assert_eq!( + jobs.iter() + .filter(|j| j.status == JobStatus::Failed) + .count(), + 2 + ); + + assert!( + clear_by_status(&pool, vec![JobStatus::Completed, JobStatus::Failed]) + .await + .is_ok() + ); + let jobs = get_all_jobs(&pool).await; + + assert_eq!(jobs.len(), 9); + assert_eq!( + jobs.iter() + .filter(|j| j.status == JobStatus::Completed) + .count(), + 0 + ); + assert_eq!( + jobs.iter() + .filter(|j| j.status == JobStatus::Failed) + .count(), + 0 + ); + } + + #[tokio::test] + #[serial] + async fn can_clear_jobs_older_than() { + let pool = init().await; + + assert!(initialize_database(&pool).await.is_ok()); + + sqlx::query( + r"INSERT INTO pg_loco_queue (id, name, task_data, status, run_at,created_at, updated_at) VALUES + ('job1', 'Test Job 1', '{}', 'queued', NOW(), NOW() - INTERVAL '15days', NOW()), + ('job2', 'Test Job 2', '{}', 'queued', NOW(),NOW() - INTERVAL '5 days', NOW()), + ('job3', 'Test Job 3', '{}','queued', NOW(), NOW(), NOW())" + ) + .execute(&pool) + .await + .unwrap(); + + assert_eq!(get_all_jobs(&pool).await.len(), 3); + assert!(clear_jobs_older_than(&pool, 10, None).await.is_ok()); + assert_eq!(get_all_jobs(&pool).await.len(), 2); + } + + #[tokio::test] + #[serial] + async fn can_clear_jobs_older_than_with_status() { + let pool = init().await; + + assert!(initialize_database(&pool).await.is_ok()); + + sqlx::query( + r"INSERT INTO pg_loco_queue (id, name, task_data, status, run_at,created_at, updated_at) VALUES + ('job1', 'Test Job 1', '{}', 'completed', NOW(), NOW() - INTERVAL '20days', NOW()), + ('job2', 'Test Job 2', '{}', 'failed', NOW(),NOW() - INTERVAL '15 days', NOW()), + ('job3', 'Test Job 3', '{}', 'completed', NOW(),NOW() - INTERVAL '5 days', NOW()), + ('job4', 'Test Job 3', '{}','cancelled', NOW(), NOW(), NOW())" + ) + .execute(&pool) + .await + .unwrap(); + + assert_eq!(get_all_jobs(&pool).await.len(), 4); + assert!(clear_jobs_older_than( + &pool, + 10, + Some(&vec![JobStatus::Cancelled, JobStatus::Completed]) + ) + .await + .is_ok()); + + assert_eq!(get_all_jobs(&pool).await.len(), 3); + } + + #[tokio::test] + #[serial] + async fn can_get_jobs() { + let pool = init().await; + + assert!(initialize_database(&pool).await.is_ok()); + tests_cfg::queue::postgres_seed_data(&pool).await; + + assert_eq!( + get_jobs(&pool, Some(&vec![JobStatus::Failed]), None) + .await + .expect("get jobs") + .len(), + 2 + ); + assert_eq!( + get_jobs( + &pool, + Some(&vec![JobStatus::Failed, JobStatus::Completed]), + None + ) + .await + .expect("get jobs") + .len(), + 5 + ); + assert_eq!( + get_jobs(&pool, None, None).await.expect("get jobs").len(), + 14 + ); + } + + #[tokio::test] + #[serial] + async fn can_get_jobs_with_age() { + let pool = init().await; + + assert!(initialize_database(&pool).await.is_ok()); + + sqlx::query( + r"INSERT INTO pg_loco_queue (id, name, task_data, status, run_at,created_at, updated_at) VALUES + ('job1', 'Test Job 1', '{}', 'completed', NOW(), NOW() - INTERVAL '20days', NOW()), + ('job2', 'Test Job 2', '{}', 'failed', NOW(),NOW() - INTERVAL '15 days', NOW()), + ('job3', 'Test Job 3', '{}', 'completed', NOW(),NOW() - INTERVAL '5 days', NOW()), + ('job4', 'Test Job 3', '{}','cancelled', NOW(), NOW(), NOW())" + ) + .execute(&pool) + .await + .unwrap(); + assert_eq!( + get_jobs( + &pool, + Some(&vec![JobStatus::Failed, JobStatus::Completed]), + Some(11) + ) + .await + .expect("get jobs") + .len(), + 2 + ); + } +} diff --git a/src/bgworker/snapshots/loco_rs__bgworker__pg__tests__can_complete_job_with_interval.snap b/src/bgworker/snapshots/loco_rs__bgworker__pg__tests__can_complete_job_with_interval.snap new file mode 100644 index 000000000..80ae35fd3 --- /dev/null +++ b/src/bgworker/snapshots/loco_rs__bgworker__pg__tests__can_complete_job_with_interval.snap @@ -0,0 +1,22 @@ +--- +source: src/bgworker/pg.rs +expression: after_complete_job +--- +Job { + id: "", + name: "PasswordChangeNotification", + data: Object { + "change_time": String(""), + "email": String("user12@example.com"), + "user_id": Number(134), + }, + status: Queued, + run_at: , + interval: None, + created_at: Some( + , + ), + updated_at: Some( + , + ), +} diff --git a/src/bgworker/snapshots/loco_rs__bgworker__pg__tests__can_dequeue.snap b/src/bgworker/snapshots/loco_rs__bgworker__pg__tests__can_dequeue.snap new file mode 100644 index 000000000..a59fe073f --- /dev/null +++ b/src/bgworker/snapshots/loco_rs__bgworker__pg__tests__can_dequeue.snap @@ -0,0 +1,20 @@ +--- +source: src/bgworker/pg.rs +expression: job_after_dequeue +--- +Job { + id: "", + name: "PasswordChangeNotification", + data: Object { + "user_id": Number(1), + }, + status: Processing, + run_at: , + interval: None, + created_at: Some( + , + ), + updated_at: Some( + , + ), +} diff --git a/src/bgworker/snapshots/loco_rs__bgworker__pg__tests__can_enqueue.snap b/src/bgworker/snapshots/loco_rs__bgworker__pg__tests__can_enqueue.snap new file mode 100644 index 000000000..20dc978dd --- /dev/null +++ b/src/bgworker/snapshots/loco_rs__bgworker__pg__tests__can_enqueue.snap @@ -0,0 +1,22 @@ +--- +source: src/bgworker/pg.rs +expression: jobs +--- +[ + Job { + id: "", + name: "PasswordChangeNotification", + data: Object { + "user_id": Number(1), + }, + status: Queued, + run_at: , + interval: None, + created_at: Some( + , + ), + updated_at: Some( + , + ), + }, +] diff --git a/src/bgworker/snapshots/loco_rs__bgworker__pg__tests__can_fail_job.snap b/src/bgworker/snapshots/loco_rs__bgworker__pg__tests__can_fail_job.snap new file mode 100644 index 000000000..851f0f894 --- /dev/null +++ b/src/bgworker/snapshots/loco_rs__bgworker__pg__tests__can_fail_job.snap @@ -0,0 +1,23 @@ +--- +source: src/bgworker/pg.rs +expression: after_fail_job +--- +Job { + id: "", + name: "SendInvoice", + data: Object { + "email": String("user13@example.com"), + "error": String("some error"), + "invoice_id": String("INV-2024-01"), + "user_id": Number(135), + }, + status: Failed, + run_at: , + interval: None, + created_at: Some( + , + ), + updated_at: Some( + , + ), +} diff --git a/src/bgworker/snapshots/loco_rs__bgworker__pg__tests__can_initialize_database.snap b/src/bgworker/snapshots/loco_rs__bgworker__pg__tests__can_initialize_database.snap new file mode 100644 index 000000000..3cbca818b --- /dev/null +++ b/src/bgworker/snapshots/loco_rs__bgworker__pg__tests__can_initialize_database.snap @@ -0,0 +1,156 @@ +--- +source: src/bgworker/pg.rs +expression: table_info +--- +[ + TableInfo { + table_schema: Some( + "public", + ), + column_name: Some( + "updated_at", + ), + column_default: Some( + "now()", + ), + is_nullable: Some( + "NO", + ), + data_type: Some( + "timestamp with time zone", + ), + is_updatable: Some( + "YES", + ), + }, + TableInfo { + table_schema: Some( + "public", + ), + column_name: Some( + "run_at", + ), + column_default: None, + is_nullable: Some( + "NO", + ), + data_type: Some( + "timestamp with time zone", + ), + is_updatable: Some( + "YES", + ), + }, + TableInfo { + table_schema: Some( + "public", + ), + column_name: Some( + "interval", + ), + column_default: None, + is_nullable: Some( + "YES", + ), + data_type: Some( + "bigint", + ), + is_updatable: Some( + "YES", + ), + }, + TableInfo { + table_schema: Some( + "public", + ), + column_name: Some( + "created_at", + ), + column_default: Some( + "now()", + ), + is_nullable: Some( + "NO", + ), + data_type: Some( + "timestamp with time zone", + ), + is_updatable: Some( + "YES", + ), + }, + TableInfo { + table_schema: Some( + "public", + ), + column_name: Some( + "task_data", + ), + column_default: None, + is_nullable: Some( + "NO", + ), + data_type: Some( + "jsonb", + ), + is_updatable: Some( + "YES", + ), + }, + TableInfo { + table_schema: Some( + "public", + ), + column_name: Some( + "name", + ), + column_default: None, + is_nullable: Some( + "NO", + ), + data_type: Some( + "character varying", + ), + is_updatable: Some( + "YES", + ), + }, + TableInfo { + table_schema: Some( + "public", + ), + column_name: Some( + "id", + ), + column_default: None, + is_nullable: Some( + "NO", + ), + data_type: Some( + "character varying", + ), + is_updatable: Some( + "YES", + ), + }, + TableInfo { + table_schema: Some( + "public", + ), + column_name: Some( + "status", + ), + column_default: Some( + "'queued'::character varying", + ), + is_nullable: Some( + "NO", + ), + data_type: Some( + "character varying", + ), + is_updatable: Some( + "YES", + ), + }, +] diff --git a/src/bgworker/snapshots/loco_rs__bgworker__sqlt__tests__can_complete_job_with_interval.snap b/src/bgworker/snapshots/loco_rs__bgworker__sqlt__tests__can_complete_job_with_interval.snap new file mode 100644 index 000000000..c99cae444 --- /dev/null +++ b/src/bgworker/snapshots/loco_rs__bgworker__sqlt__tests__can_complete_job_with_interval.snap @@ -0,0 +1,22 @@ +--- +source: src/bgworker/sqlt.rs +expression: after_complete_job +--- +Job { + id: "", + name: "PasswordChangeNotification", + data: Object { + "change_time": String(""), + "email": String("user12@example.com"), + "user_id": Number(134), + }, + status: Queued, + run_at: , + interval: None, + created_at: Some( + , + ), + updated_at: Some( + , + ), +} diff --git a/src/bgworker/snapshots/loco_rs__bgworker__sqlt__tests__can_dequeue.snap b/src/bgworker/snapshots/loco_rs__bgworker__sqlt__tests__can_dequeue.snap new file mode 100644 index 000000000..193d25071 --- /dev/null +++ b/src/bgworker/snapshots/loco_rs__bgworker__sqlt__tests__can_dequeue.snap @@ -0,0 +1,20 @@ +--- +source: src/bgworker/sqlt.rs +expression: job_after_dequeue +--- +Job { + id: "", + name: "PasswordChangeNotification", + data: Object { + "user_id": Number(1), + }, + status: Processing, + run_at: , + interval: None, + created_at: Some( + , + ), + updated_at: Some( + , + ), +} diff --git a/src/bgworker/snapshots/loco_rs__bgworker__sqlt__tests__can_enqueue.snap b/src/bgworker/snapshots/loco_rs__bgworker__sqlt__tests__can_enqueue.snap new file mode 100644 index 000000000..80f38428d --- /dev/null +++ b/src/bgworker/snapshots/loco_rs__bgworker__sqlt__tests__can_enqueue.snap @@ -0,0 +1,22 @@ +--- +source: src/bgworker/sqlt.rs +expression: jobs +--- +[ + Job { + id: "", + name: "PasswordChangeNotification", + data: Object { + "user_id": Number(1), + }, + status: Queued, + run_at: , + interval: None, + created_at: Some( + , + ), + updated_at: Some( + , + ), + }, +] diff --git a/src/bgworker/snapshots/loco_rs__bgworker__sqlt__tests__can_fail_job.snap b/src/bgworker/snapshots/loco_rs__bgworker__sqlt__tests__can_fail_job.snap new file mode 100644 index 000000000..ec76a9fea --- /dev/null +++ b/src/bgworker/snapshots/loco_rs__bgworker__sqlt__tests__can_fail_job.snap @@ -0,0 +1,23 @@ +--- +source: src/bgworker/sqlt.rs +expression: after_fail_job +--- +Job { + id: "", + name: "SendInvoice", + data: Object { + "email": String("user13@example.com"), + "error": String("some error"), + "invoice_id": String("INV-2024-01"), + "user_id": Number(135), + }, + status: Failed, + run_at: , + interval: None, + created_at: Some( + , + ), + updated_at: Some( + , + ), +} diff --git a/src/bgworker/snapshots/loco_rs__bgworker__sqlt__tests__sqlt_loco_queue.snap b/src/bgworker/snapshots/loco_rs__bgworker__sqlt__tests__sqlt_loco_queue.snap new file mode 100644 index 000000000..030d9b986 --- /dev/null +++ b/src/bgworker/snapshots/loco_rs__bgworker__sqlt__tests__sqlt_loco_queue.snap @@ -0,0 +1,76 @@ +--- +source: src/bgworker/sqlt.rs +expression: table_info +--- +[ + TableInfo { + cid: 0, + name: "id", + _type: "TEXT", + notnull: true, + dflt_value: None, + pk: false, + }, + TableInfo { + cid: 1, + name: "name", + _type: "TEXT", + notnull: true, + dflt_value: None, + pk: false, + }, + TableInfo { + cid: 2, + name: "task_data", + _type: "JSON", + notnull: true, + dflt_value: None, + pk: false, + }, + TableInfo { + cid: 3, + name: "status", + _type: "TEXT", + notnull: true, + dflt_value: Some( + "'queued'", + ), + pk: false, + }, + TableInfo { + cid: 4, + name: "run_at", + _type: "TIMESTAMP", + notnull: true, + dflt_value: None, + pk: false, + }, + TableInfo { + cid: 5, + name: "interval", + _type: "INTEGER", + notnull: false, + dflt_value: None, + pk: false, + }, + TableInfo { + cid: 6, + name: "created_at", + _type: "TIMESTAMP", + notnull: true, + dflt_value: Some( + "CURRENT_TIMESTAMP", + ), + pk: false, + }, + TableInfo { + cid: 7, + name: "updated_at", + _type: "TIMESTAMP", + notnull: true, + dflt_value: Some( + "CURRENT_TIMESTAMP", + ), + pk: false, + }, +] diff --git a/src/bgworker/snapshots/loco_rs__bgworker__sqlt__tests__sqlt_loco_queue_lock.snap b/src/bgworker/snapshots/loco_rs__bgworker__sqlt__tests__sqlt_loco_queue_lock.snap new file mode 100644 index 000000000..0b24e715f --- /dev/null +++ b/src/bgworker/snapshots/loco_rs__bgworker__sqlt__tests__sqlt_loco_queue_lock.snap @@ -0,0 +1,32 @@ +--- +source: src/bgworker/sqlt.rs +expression: table_info +--- +[ + TableInfo { + cid: 0, + name: "id", + _type: "INTEGER", + notnull: false, + dflt_value: None, + pk: true, + }, + TableInfo { + cid: 1, + name: "is_locked", + _type: "BOOLEAN", + notnull: true, + dflt_value: Some( + "FALSE", + ), + pk: false, + }, + TableInfo { + cid: 2, + name: "locked_at", + _type: "TIMESTAMP", + notnull: false, + dflt_value: None, + pk: false, + }, +] diff --git a/src/bgworker/snapshots/loco_rs__bgworker__tests__can_dump_jobs.snap b/src/bgworker/snapshots/loco_rs__bgworker__tests__can_dump_jobs.snap new file mode 100644 index 000000000..44f9ac2ba --- /dev/null +++ b/src/bgworker/snapshots/loco_rs__bgworker__tests__can_dump_jobs.snap @@ -0,0 +1,7 @@ +--- +source: src/bgworker/mod.rs +expression: "std::fs::read_to_string(dump_file)" +--- +Ok( + "- created_at: 2024-11-28T08:03:25Z\n id: 01JDM0X8EVAM823JZBGKYNBA94\n interval: null\n name: DataBackup\n run_at: 2024-11-28T08:04:25Z\n status: cancelled\n task_data:\n backup_id: backup-12345\n email: user16@example.com\n user_id: 138\n updated_at: 2024-11-28T08:03:25Z\n- created_at: 2024-11-28T08:03:25Z\n id: 01JDM0X8EVAM823JZBGKYNBA96\n interval: null\n name: UserDeactivation\n run_at: 2024-11-28T08:04:25Z\n status: failed\n task_data:\n deactivation_reason: user requested\n email: user14@example.com\n user_id: 136\n updated_at: 2024-11-28T08:03:25Z\n- created_at: 2024-11-28T08:03:25Z\n id: 01JDM0X8EVAM823JZBGKYNBA87\n interval: null\n name: UserDeactivation\n run_at: 2024-11-28T08:04:25Z\n status: failed\n task_data:\n deactivation_reason: account inactive\n email: user24@example.com\n user_id: 146\n updated_at: 2024-11-28T08:03:25Z\n", +) diff --git a/src/bgworker/sqlt.rs b/src/bgworker/sqlt.rs index f1edf40ce..4d9e99dc0 100644 --- a/src/bgworker/sqlt.rs +++ b/src/bgworker/sqlt.rs @@ -7,44 +7,45 @@ use serde_json::Value as JsonValue; pub use sqlx::SqlitePool; use sqlx::{ sqlite::{SqliteConnectOptions, SqlitePoolOptions, SqliteRow}, - ConnectOptions, Row, + ConnectOptions, QueryBuilder, Row, }; use tokio::{task::JoinHandle, time::sleep}; use tracing::{debug, error, trace}; use ulid::Ulid; -use super::{BackgroundWorker, Queue}; +use super::{BackgroundWorker, JobStatus, Queue}; use crate::{config::SqliteQueueConfig, Error, Result}; -type TaskId = String; -type TaskData = JsonValue; -type TaskStatus = String; +type JobId = String; +type JobData = JsonValue; -type TaskHandler = Box< +type JobHandler = Box< dyn Fn( - TaskId, - TaskData, + JobId, + JobData, ) -> Pin> + Send>> + Send + Sync, >; -#[derive(Debug, Deserialize, Serialize)] -struct Task { - pub id: TaskId, +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct Job { + pub id: JobId, pub name: String, - #[allow(clippy::struct_field_names)] - pub task_data: TaskData, - pub status: TaskStatus, + #[serde(rename = "task_data")] + pub data: JobData, + pub status: JobStatus, pub run_at: DateTime, pub interval: Option, + pub created_at: Option>, + pub updated_at: Option>, } -pub struct TaskRegistry { - handlers: Arc>, +pub struct JobRegistry { + handlers: Arc>, } -impl TaskRegistry { - /// Creates a new `TaskRegistry`. +impl JobRegistry { + /// Creates a new `JobRegistry`. #[must_use] pub fn new() -> Self { Self { @@ -52,7 +53,7 @@ impl TaskRegistry { } } - /// Registers a task handler with the provided name. + /// Registers a job handler with the provided name. /// # Errors /// Fails if cannot register worker pub fn register_worker(&mut self, name: String, worker: W) -> Result<()> @@ -62,11 +63,11 @@ impl TaskRegistry { for<'de> Args: Deserialize<'de>, { let worker = Arc::new(worker); - let wrapped_handler = move |_task_id: String, task_data: TaskData| { + let wrapped_handler = move |_job_id: String, job_data: JobData| { let w = worker.clone(); Box::pin(async move { - let args = serde_json::from_value::(task_data); + let args = serde_json::from_value::(job_data); match args { Ok(args) => w.perform(args).await, Err(err) => Err(err.into()), @@ -80,30 +81,30 @@ impl TaskRegistry { Ok(()) } - /// Returns a reference to the task handlers. + /// Returns a reference to the job handlers. #[must_use] - pub fn handlers(&self) -> &Arc> { + pub fn handlers(&self) -> &Arc> { &self.handlers } - /// Runs the task handlers with the provided number of workers. + /// Runs the job handlers with the provided number of workers. #[must_use] pub fn run(&self, pool: &SqlitePool, opts: &RunOpts) -> Vec> { - let mut tasks = Vec::new(); + let mut jobs = Vec::new(); let interval = opts.poll_interval_sec; for idx in 0..opts.num_workers { let handlers = self.handlers.clone(); let pool = pool.clone(); - let task = tokio::spawn(async move { + let job: JoinHandle<()> = tokio::spawn(async move { loop { trace!( pool_conns = pool.num_idle(), worker_num = idx, "sqlite workers stats" ); - let task_opt = match dequeue(&pool).await { + let job_opt = match dequeue(&pool).await { Ok(t) => t, Err(err) => { error!(err = err.to_string(), "cannot fetch from queue"); @@ -111,33 +112,33 @@ impl TaskRegistry { } }; - if let Some(task) = task_opt { - debug!(task_id = task.id, name = task.name, "working on task"); - if let Some(handler) = handlers.get(&task.name) { - match handler(task.id.clone(), task.task_data.clone()).await { + if let Some(job) = job_opt { + debug!(job_id = job.id, name = job.name, "working on job"); + if let Some(handler) = handlers.get(&job.name) { + match handler(job.id.clone(), job.data.clone()).await { Ok(()) => { if let Err(err) = - complete_task(&pool, &task.id, task.interval).await + complete_job(&pool, &job.id, job.interval).await { error!( err = err.to_string(), - task = ?task, - "cannot complete task" + job = ?job, + "cannot complete job" ); } } Err(err) => { - if let Err(err) = fail_task(&pool, &task.id, &err).await { + if let Err(err) = fail_job(&pool, &job.id, &err).await { error!( err = err.to_string(), - task = ?task, - "cannot fail task" + job = ?job, + "cannot fail job" ); } } } } else { - error!(task = task.name, "no handler found for task"); + error!(job_name = job.name, "no handler found for job"); } } else { sleep(Duration::from_secs(interval.into())).await; @@ -145,14 +146,14 @@ impl TaskRegistry { } }); - tasks.push(task); + jobs.push(job); } - tasks + jobs } } -impl Default for TaskRegistry { +impl Default for JobRegistry { fn default() -> Self { Self::new() } @@ -173,7 +174,7 @@ async fn connect(cfg: &SqliteQueueConfig) -> Result { Ok(pool) } -/// Initialize task tables +/// Initialize job tables /// /// # Errors /// @@ -181,12 +182,12 @@ async fn connect(cfg: &SqliteQueueConfig) -> Result { pub async fn initialize_database(pool: &SqlitePool) -> Result<()> { debug!("sqlite worker: initialize database"); sqlx::query( - r" + &format!(r" CREATE TABLE IF NOT EXISTS sqlt_loco_queue ( id TEXT NOT NULL, name TEXT NOT NULL, task_data JSON NOT NULL, - status TEXT NOT NULL DEFAULT 'queued', + status TEXT NOT NULL DEFAULT '{}', run_at TIMESTAMP NOT NULL, interval INTEGER, created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, @@ -202,14 +203,14 @@ pub async fn initialize_database(pool: &SqlitePool) -> Result<()> { INSERT OR IGNORE INTO sqlt_loco_queue_lock (id, is_locked) VALUES (1, FALSE); CREATE INDEX IF NOT EXISTS idx_sqlt_queue_status_run_at ON sqlt_loco_queue(status, run_at); - ", + ", JobStatus::Queued), ) .execute(pool) .await?; Ok(()) } -/// Add a task +/// Add a job /// /// # Errors /// @@ -217,11 +218,11 @@ pub async fn initialize_database(pool: &SqlitePool) -> Result<()> { pub async fn enqueue( pool: &SqlitePool, name: &str, - task_data: TaskData, + data: JobData, run_at: DateTime, interval: Option, -) -> Result { - let task_data_json = serde_json::to_value(task_data)?; +) -> Result { + let data = serde_json::to_value(data)?; #[allow(clippy::cast_possible_truncation)] let interval_ms: Option = interval.map(|i| i.as_millis() as i64); @@ -232,7 +233,7 @@ pub async fn enqueue( DATETIME($4), $5)", ) .bind(id.clone()) - .bind(task_data_json) + .bind(data) .bind(name) .bind(run_at) .bind(interval_ms) @@ -241,7 +242,7 @@ pub async fn enqueue( Ok(id) } -async fn dequeue(client: &SqlitePool) -> Result> { +async fn dequeue(client: &SqlitePool) -> Result> { let mut tx = client.begin().await?; let acquired_write_lock = sqlx::query( @@ -263,29 +264,22 @@ async fn dequeue(client: &SqlitePool) -> Result> { "SELECT id, name, task_data, status, run_at, interval FROM sqlt_loco_queue WHERE - status = 'queued' AND + status = ? AND run_at <= CURRENT_TIMESTAMP ORDER BY run_at LIMIT 1", ) - // avoid using FromRow because it requires the 'macros' feature, which nothing - // in our dep tree uses, so it'll create smaller, faster builds if we do this manually - .map(|row: SqliteRow| Task { - id: row.get("id"), - name: row.get("name"), - task_data: row.get("task_data"), - status: row.get("status"), - run_at: row.get("run_at"), - interval: row.get("interval"), - }) + .bind(JobStatus::Queued.to_string()) + .map(|row: SqliteRow| to_job(&row).ok()) .fetch_optional(&mut *tx) - .await?; + .await? + .flatten(); - if let Some(task) = row { + if let Some(job) = row { sqlx::query( - "UPDATE sqlt_loco_queue SET status = 'processing', updated_at = CURRENT_TIMESTAMP \ - WHERE id = $1", + "UPDATE sqlt_loco_queue SET status = $1, updated_at = CURRENT_TIMESTAMP WHERE id = $2", ) - .bind(&task.id) + .bind(JobStatus::Processing.to_string()) + .bind(&job.id) .execute(&mut *tx) .await?; @@ -301,9 +295,9 @@ async fn dequeue(client: &SqlitePool) -> Result> { tx.commit().await?; - Ok(Some(task)) + Ok(Some(job)) } else { - // Release the write lock, no task found + // Release the write lock, no job found sqlx::query( "UPDATE sqlt_loco_queue_lock SET is_locked = FALSE, @@ -318,57 +312,146 @@ async fn dequeue(client: &SqlitePool) -> Result> { } } -async fn complete_task( - pool: &SqlitePool, - task_id: &TaskId, - interval_ms: Option, -) -> Result<()> { +async fn complete_job(pool: &SqlitePool, id: &JobId, interval_ms: Option) -> Result<()> { if let Some(interval_ms) = interval_ms { let next_run_at = Utc::now() + chrono::Duration::milliseconds(interval_ms); sqlx::query( - "UPDATE sqlt_loco_queue SET status = 'queued', updated_at = CURRENT_TIMESTAMP, run_at \ - = DATETIME($1) WHERE id = $2", + "UPDATE sqlt_loco_queue SET status = $1, updated_at = CURRENT_TIMESTAMP, run_at = \ + DATETIME($2) WHERE id = $3", ) + .bind(JobStatus::Queued.to_string()) .bind(next_run_at) - .bind(task_id) + .bind(id) .execute(pool) .await?; } else { sqlx::query( - "UPDATE sqlt_loco_queue SET status = 'completed', updated_at = CURRENT_TIMESTAMP \ - WHERE id = $1", + "UPDATE sqlt_loco_queue SET status = $1, updated_at = CURRENT_TIMESTAMP WHERE id = $2", ) - .bind(task_id) + .bind(JobStatus::Completed.to_string()) + .bind(id) .execute(pool) .await?; } Ok(()) } -async fn fail_task(pool: &SqlitePool, task_id: &TaskId, error: &crate::Error) -> Result<()> { +async fn fail_job(pool: &SqlitePool, id: &JobId, error: &crate::Error) -> Result<()> { let msg = error.to_string(); - error!(err = msg, "failed task"); + error!(err = msg, "failed job"); let error_json = serde_json::json!({ "error": msg }); sqlx::query( - "UPDATE sqlt_loco_queue SET status = 'failed', updated_at = CURRENT_TIMESTAMP, task_data \ - = json_patch(task_data, $1) WHERE id = $2", + "UPDATE sqlt_loco_queue SET status = $1, updated_at = CURRENT_TIMESTAMP, task_data = \ + json_patch(task_data, $2) WHERE id = $3", ) + .bind(JobStatus::Failed.to_string()) .bind(error_json) - .bind(task_id) + .bind(id) .execute(pool) .await?; Ok(()) } -/// Clear all tasks +/// Cancels jobs in the `sqlt_loco_queue` table by their name. +/// +/// This function updates the status of all jobs with the given `name` and a status of +/// [`JobStatus::Queued`] to [`JobStatus::Cancelled`]. The update also sets the `updated_at` timestamp to the +/// current time. +/// +/// # Errors +/// +/// This function will return an error if it fails +pub async fn cancel_jobs_by_name(pool: &SqlitePool, name: &str) -> Result<()> { + sqlx::query( + "UPDATE sqlt_loco_queue SET status = $1, updated_at = CURRENT_TIMESTAMP WHERE name = $2 \ + AND status = $3", + ) + .bind(JobStatus::Cancelled.to_string()) + .bind(name) + .bind(JobStatus::Queued.to_string()) + .execute(pool) + .await?; + Ok(()) +} + +/// Clear all jobs /// /// # Errors /// /// This function will return an error if it fails pub async fn clear(pool: &SqlitePool) -> Result<()> { - sqlx::query("DELETE from sqlt_loco_queue") - .execute(pool) - .await?; + // Clear all rows in the relevant tables + sqlx::query( + " + DELETE FROM sqlt_loco_queue; + DELETE FROM sqlt_loco_queue_lock; + ", + ) + .execute(pool) + .await?; + + Ok(()) +} + +/// Deletes jobs from the `sqlt_loco_queue` table based on their status. +/// +/// This function removes all jobs with a status that matches any of the statuses provided +/// in the `status` argument. The statuses are checked against the `status` column in the +/// database, and any matching rows are deleted. +/// +/// # Errors +/// +/// This function will return an error if it fails +pub async fn clear_by_status(pool: &SqlitePool, status: Vec) -> Result<()> { + let status_in = status + .iter() + .map(|s| format!("'{s}'")) + .collect::>() + .join(","); + + sqlx::query(&format!( + "DELETE FROM sqlt_loco_queue WHERE status IN ({status_in})" + )) + .execute(pool) + .await?; + + Ok(()) +} + +/// Deletes jobs from the `sqlt_loco_queue` table that are older than a specified number of days. +/// +/// This function removes jobs that have a `created_at` timestamp older than the provided +/// number of days. Additionally, if a `status` is provided, only jobs with a status matching +/// one of the provided values will be deleted. +/// +/// # Errors +/// +/// This function will return an error if it fails +pub async fn clear_jobs_older_than( + pool: &SqlitePool, + age_days: i64, + status: Option<&Vec>, +) -> Result<()> { + let cutoff_date = Utc::now() - chrono::Duration::days(age_days); + let threshold_date = cutoff_date.format("%+").to_string(); + + let mut query_builder = + QueryBuilder::::new("DELETE FROM sqlt_loco_queue WHERE created_at <= "); + query_builder.push_bind(threshold_date); + + if let Some(status_list) = status { + if !status_list.is_empty() { + let status_in = status_list + .iter() + .map(|s| format!("'{s}'")) + .collect::>() + .join(","); + + query_builder.push(format!(" AND status IN ({status_in})")); + } + } + + query_builder.build().execute(pool).await?; Ok(()) } @@ -397,7 +480,7 @@ pub struct RunOpts { /// This function will return an error if it fails pub async fn create_provider(qcfg: &SqliteQueueConfig) -> Result { let pool = connect(qcfg).await.map_err(Box::from)?; - let registry = TaskRegistry::new(); + let registry = JobRegistry::new(); Ok(Queue::Sqlite( pool, Arc::new(tokio::sync::Mutex::new(registry)), @@ -407,3 +490,599 @@ pub async fn create_provider(qcfg: &SqliteQueueConfig) -> Result { }, )) } + +/// Retrieves a list of jobs from the `sqlt_loco_queue` table in the database. +/// +/// This function queries the database for jobs, optionally filtering by their +/// `status`. If a status is provided, only jobs with statuses included in the +/// provided list will be fetched. If no status is provided, all jobs will be +/// returned. +/// +/// # Errors +/// +/// This function will return an error if it fails +pub async fn get_jobs( + pool: &SqlitePool, + status: Option<&Vec>, + age_days: Option, +) -> Result> { + let mut query = String::from("SELECT * FROM sqlt_loco_queue WHERE 1 = 1 "); + + if let Some(status) = status { + let status_in = status + .iter() + .map(|s| format!("'{s}'")) + .collect::>() + .join(","); + query.push_str(&format!("AND status IN ({status_in}) ")); + } + + if let Some(age_days) = age_days { + let cutoff_date = Utc::now() - chrono::Duration::days(age_days); + let threshold_date = cutoff_date.format("%+").to_string(); + query.push_str(&format!("AND created_at <= '{threshold_date}' ")); + } + + let rows = sqlx::query(&query).fetch_all(pool).await?; + Ok(rows.iter().filter_map(|row| to_job(row).ok()).collect()) +} + +/// Converts a row from the database into a [`Job`] object. +/// +/// This function takes a row from the `SQLite` database and manually extracts the necessary +/// fields to populate a [`Job`] object. +/// +/// **Note:** This function manually extracts values from the database row instead of using +/// the `FromRow` trait, which would require enabling the 'macros' feature in the dependencies. +/// The decision to avoid `FromRow` is made to keep the build smaller and faster, as the 'macros' +/// feature is unnecessary in the current dependency tree. +fn to_job(row: &SqliteRow) -> Result { + Ok(Job { + id: row.get("id"), + name: row.get("name"), + data: row.get("task_data"), + status: row.get::("status").parse().map_err(|err| { + let status: String = row.get("status"); + tracing::error!(status, err, "job status is unsupported"); + Error::string("invalid job status") + })?, + run_at: row.get("run_at"), + interval: row.get("interval"), + created_at: row.try_get("created_at").unwrap_or_default(), + updated_at: row.try_get("updated_at").unwrap_or_default(), + }) +} + +#[cfg(test)] +mod tests { + + use std::path::Path; + + use chrono::{NaiveDate, NaiveTime, TimeZone}; + use insta::{assert_debug_snapshot, with_settings}; + use sqlx::{query_as, FromRow, Pool, Sqlite}; + + use super::*; + use crate::tests_cfg; + + #[derive(Debug, Serialize, FromRow)] + pub struct TableInfo { + cid: i32, + name: String, + #[sqlx(rename = "type")] + _type: String, + notnull: bool, + dflt_value: Option, + pk: bool, + } + + #[derive(Debug, Serialize, FromRow)] + struct JobQueueLock { + id: i32, + is_locked: bool, + locked_at: Option>, + } + + fn reduction() -> &'static [(&'static str, &'static str)] { + &[ + ("[A-Z0-9]{26}", ""), + (r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z", ""), + ] + } + + async fn init(db_path: &Path) -> Pool { + let qcfg = SqliteQueueConfig { + uri: format!( + "sqlite://{}?mode=rwc", + db_path.join("sample.sqlite").display() + ), + dangerously_flush: false, + enable_logging: false, + max_connections: 1, + min_connections: 1, + connect_timeout: 500, + idle_timeout: 500, + poll_interval_sec: 1, + num_workers: 1, + }; + + let pool = connect(&qcfg).await.unwrap(); + sqlx::raw_sql( + r" + DROP TABLE IF EXISTS sqlt_loco_queue; + DROP TABLE IF EXISTS sqlt_loco_queue_lock; + ", + ) + .execute(&pool) + .await + .expect("drop table if exists"); + + pool + } + + async fn get_all_jobs(pool: &SqlitePool) -> Vec { + sqlx::query("select * from sqlt_loco_queue") + .fetch_all(pool) + .await + .expect("get jobs") + .iter() + .filter_map(|row| to_job(row).ok()) + .collect() + } + + async fn get_job(pool: &SqlitePool, id: &str) -> Job { + sqlx::query(&format!("select * from sqlt_loco_queue where id = '{id}'")) + .fetch_all(pool) + .await + .expect("get jobs") + .first() + .and_then(|row| to_job(row).ok()) + .expect("job not found") + } + + #[tokio::test] + async fn can_initialize_database() { + let tree_fs = tree_fs::TreeBuilder::default() + .drop(true) + .create() + .expect("create temp folder"); + let pool = init(&tree_fs.root).await; + + assert!(initialize_database(&pool).await.is_ok()); + + for table in ["sqlt_loco_queue", "sqlt_loco_queue_lock"] { + let table_info: Vec = + query_as::<_, TableInfo>(&format!("PRAGMA table_info({table})")) + .fetch_all(&pool) + .await + .unwrap(); + + assert_debug_snapshot!(table, table_info); + } + } + + #[tokio::test] + async fn can_enqueue() { + let tree_fs = tree_fs::TreeBuilder::default() + .drop(true) + .create() + .expect("create temp folder"); + let pool = init(&tree_fs.root).await; + + assert!(initialize_database(&pool).await.is_ok()); + + let jobs = get_all_jobs(&pool).await; + + assert_eq!(jobs.len(), 0); + + let run_at = Utc.from_utc_datetime( + &NaiveDate::from_ymd_opt(2023, 1, 15) + .unwrap() + .and_time(NaiveTime::from_hms_opt(12, 30, 0).unwrap()), + ); + + let job_data = serde_json::json!({"user_id": 1}); + assert!( + enqueue(&pool, "PasswordChangeNotification", job_data, run_at, None) + .await + .is_ok() + ); + + let jobs = get_all_jobs(&pool).await; + + assert_eq!(jobs.len(), 1); + with_settings!({ + filters => reduction().iter().map(|&(pattern, replacement)| (pattern, replacement)), + }, { + assert_debug_snapshot!(jobs); + }); + + // validate lock status + let job_lock: JobQueueLock = + query_as::<_, JobQueueLock>("select * from sqlt_loco_queue_lock") + .fetch_one(&pool) + .await + .unwrap(); + + assert!(!job_lock.is_locked); + } + + #[tokio::test] + async fn can_dequeue() { + let tree_fs = tree_fs::TreeBuilder::default() + .drop(true) + .create() + .expect("create temp folder"); + let pool = init(&tree_fs.root).await; + + assert!(initialize_database(&pool).await.is_ok()); + + let jobs = get_all_jobs(&pool).await; + + assert_eq!(jobs.len(), 0); + + let run_at = Utc.from_utc_datetime( + &NaiveDate::from_ymd_opt(2023, 1, 15) + .unwrap() + .and_time(NaiveTime::from_hms_opt(12, 30, 0).unwrap()), + ); + + let job_data = serde_json::json!({"user_id": 1}); + assert!( + enqueue(&pool, "PasswordChangeNotification", job_data, run_at, None) + .await + .is_ok() + ); + + let job_before_dequeue = get_all_jobs(&pool) + .await + .first() + .cloned() + .expect("gets first job"); + assert_eq!(job_before_dequeue.status, JobStatus::Queued); + + std::thread::sleep(std::time::Duration::from_secs(1)); + + assert!(dequeue(&pool).await.is_ok()); + + let job_after_dequeue = get_all_jobs(&pool) + .await + .first() + .cloned() + .expect("gets first job"); + + assert_ne!(job_after_dequeue.updated_at, job_before_dequeue.updated_at); + with_settings!({ + filters => reduction().iter().map(|&(pattern, replacement)| (pattern, replacement)), + }, { + assert_debug_snapshot!(job_after_dequeue); + }); + } + + #[tokio::test] + async fn can_complete_job_without_interval() { + let tree_fs = tree_fs::TreeBuilder::default() + .drop(true) + .create() + .expect("create temp folder"); + let pool = init(&tree_fs.root).await; + + assert!(initialize_database(&pool).await.is_ok()); + tests_cfg::queue::sqlite_seed_data(&pool).await; + + let job = get_job(&pool, "01JDM0X8EVAM823JZBGKYNBA99").await; + + assert_eq!(job.status, JobStatus::Queued); + assert!(complete_job(&pool, &job.id, None).await.is_ok()); + + let job = get_job(&pool, "01JDM0X8EVAM823JZBGKYNBA99").await; + + assert_eq!(job.status, JobStatus::Completed); + } + + #[tokio::test] + async fn can_complete_job_with_interval() { + let tree_fs = tree_fs::TreeBuilder::default() + .drop(true) + .create() + .expect("create temp folder"); + let pool = init(&tree_fs.root).await; + + assert!(initialize_database(&pool).await.is_ok()); + tests_cfg::queue::sqlite_seed_data(&pool).await; + + let before_complete_job = get_job(&pool, "01JDM0X8EVAM823JZBGKYNBA98").await; + assert_eq!(before_complete_job.status, JobStatus::Completed); + + std::thread::sleep(std::time::Duration::from_secs(1)); + + assert!(complete_job(&pool, &before_complete_job.id, Some(10)) + .await + .is_ok()); + + let after_complete_job = get_job(&pool, "01JDM0X8EVAM823JZBGKYNBA98").await; + + assert_ne!( + after_complete_job.updated_at, + before_complete_job.updated_at + ); + with_settings!({ + filters => reduction().iter().map(|&(pattern, replacement)| (pattern, replacement)), + }, { + assert_debug_snapshot!(after_complete_job); + }); + } + + #[tokio::test] + async fn can_fail_job() { + let tree_fs = tree_fs::TreeBuilder::default() + .drop(true) + .create() + .expect("create temp folder"); + let pool = init(&tree_fs.root).await; + + assert!(initialize_database(&pool).await.is_ok()); + tests_cfg::queue::sqlite_seed_data(&pool).await; + + let before_fail_job = get_job(&pool, "01JDM0X8EVAM823JZBGKYNBA97").await; + + std::thread::sleep(std::time::Duration::from_secs(1)); + + assert!(fail_job( + &pool, + &before_fail_job.id, + &crate::Error::string("some error") + ) + .await + .is_ok()); + + let after_fail_job = get_job(&pool, "01JDM0X8EVAM823JZBGKYNBA97").await; + + assert_ne!(after_fail_job.updated_at, before_fail_job.updated_at); + with_settings!({ + filters => reduction().iter().map(|&(pattern, replacement)| (pattern, replacement)), + }, { + assert_debug_snapshot!(after_fail_job); + }); + } + + #[tokio::test] + async fn can_cancel_job_by_name() { + let tree_fs = tree_fs::TreeBuilder::default() + .drop(true) + .create() + .expect("create temp folder"); + let pool = init(&tree_fs.root).await; + + assert!(initialize_database(&pool).await.is_ok()); + tests_cfg::queue::sqlite_seed_data(&pool).await; + + let count_cancelled_jobs = get_all_jobs(&pool) + .await + .iter() + .filter(|j| j.status == JobStatus::Cancelled) + .count(); + + assert_eq!(count_cancelled_jobs, 1); + + assert!(cancel_jobs_by_name(&pool, "UserAccountActivation") + .await + .is_ok()); + + let count_cancelled_jobs = get_all_jobs(&pool) + .await + .iter() + .filter(|j| j.status == JobStatus::Cancelled) + .count(); + + assert_eq!(count_cancelled_jobs, 2); + } + + #[tokio::test] + async fn can_clear() { + let tree_fs = tree_fs::TreeBuilder::default() + .drop(true) + .create() + .expect("create temp folder"); + let pool = init(&tree_fs.root).await; + + assert!(initialize_database(&pool).await.is_ok()); + tests_cfg::queue::sqlite_seed_data(&pool).await; + + let job_count: i64 = sqlx::query_scalar("SELECT COUNT(*) FROM sqlt_loco_queue") + .fetch_one(&pool) + .await + .unwrap(); + let lock_count: i64 = sqlx::query_scalar("SELECT COUNT(*) FROM sqlt_loco_queue_lock") + .fetch_one(&pool) + .await + .unwrap(); + assert_ne!(job_count, 0); + assert_ne!(lock_count, 0); + + assert!(clear(&pool).await.is_ok()); + let job_count: i64 = sqlx::query_scalar("SELECT COUNT(*) FROM sqlt_loco_queue") + .fetch_one(&pool) + .await + .unwrap(); + let lock_count: i64 = sqlx::query_scalar("SELECT COUNT(*) FROM sqlt_loco_queue_lock") + .fetch_one(&pool) + .await + .unwrap(); + assert_eq!(job_count, 0); + assert_eq!(lock_count, 0); + } + + #[tokio::test] + async fn can_clear_by_status() { + let tree_fs = tree_fs::TreeBuilder::default() + .drop(true) + .create() + .expect("create temp folder"); + let pool = init(&tree_fs.root).await; + + assert!(initialize_database(&pool).await.is_ok()); + tests_cfg::queue::sqlite_seed_data(&pool).await; + + let jobs = get_all_jobs(&pool).await; + + assert_eq!(jobs.len(), 14); + assert_eq!( + jobs.iter() + .filter(|j| j.status == JobStatus::Completed) + .count(), + 3 + ); + assert_eq!( + jobs.iter() + .filter(|j| j.status == JobStatus::Failed) + .count(), + 2 + ); + + assert!( + clear_by_status(&pool, vec![JobStatus::Completed, JobStatus::Failed]) + .await + .is_ok() + ); + let jobs = get_all_jobs(&pool).await; + + assert_eq!(jobs.len(), 9); + assert_eq!( + jobs.iter() + .filter(|j| j.status == JobStatus::Completed) + .count(), + 0 + ); + assert_eq!( + jobs.iter() + .filter(|j| j.status == JobStatus::Failed) + .count(), + 0 + ); + } + + #[tokio::test] + async fn can_clear_jobs_older_than() { + let tree_fs = tree_fs::TreeBuilder::default() + .drop(true) + .create() + .expect("create temp folder"); + let pool = init(&tree_fs.root).await; + + assert!(initialize_database(&pool).await.is_ok()); + + sqlx::query( + r"INSERT INTO sqlt_loco_queue (id, name, task_data, status,run_at, created_at, updated_at) VALUES + ('job1', 'Test Job 1', '{}', 'queued', CURRENT_TIMESTAMP,DATETIME('now', '-15 days'), CURRENT_TIMESTAMP), + ('job2', 'Test Job 2', '{}', 'queued', CURRENT_TIMESTAMP, DATETIME('now', '-5 days'), CURRENT_TIMESTAMP), + ('job3', 'Test Job 3', '{}', 'queued', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)", + ) + .execute(&pool) + .await + .unwrap(); + + assert_eq!(get_all_jobs(&pool).await.len(), 3); + assert!(clear_jobs_older_than(&pool, 10, None).await.is_ok()); + assert_eq!(get_all_jobs(&pool).await.len(), 2); + } + + #[tokio::test] + async fn can_clear_jobs_older_than_with_status() { + let tree_fs = tree_fs::TreeBuilder::default() + .drop(true) + .create() + .expect("create temp folder"); + let pool = init(&tree_fs.root).await; + + assert!(initialize_database(&pool).await.is_ok()); + + sqlx::query( + r"INSERT INTO sqlt_loco_queue (id, name, task_data, status,run_at, created_at, updated_at) VALUES + ('job1', 'Test Job 1', '{}', 'completed', CURRENT_TIMESTAMP,DATETIME('now', '-20 days'), CURRENT_TIMESTAMP), + ('job2', 'Test Job 2', '{}', 'failed', CURRENT_TIMESTAMP,DATETIME('now', '-15 days'), CURRENT_TIMESTAMP), + ('job3', 'Test Job 3', '{}', 'completed', CURRENT_TIMESTAMP, DATETIME('now', '-5 days'), CURRENT_TIMESTAMP), + ('job4', 'Test Job 4', '{}', 'cancelled', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)", + ) + .execute(&pool) + .await + .unwrap(); + + assert_eq!(get_all_jobs(&pool).await.len(), 4); + assert!(clear_jobs_older_than( + &pool, + 10, + Some(&vec![JobStatus::Cancelled, JobStatus::Completed]) + ) + .await + .is_ok()); + + assert_eq!(get_all_jobs(&pool).await.len(), 3); + } + + #[tokio::test] + async fn can_get_jobs() { + let tree_fs = tree_fs::TreeBuilder::default() + .drop(true) + .create() + .expect("create temp folder"); + let pool = init(&tree_fs.root).await; + assert!(initialize_database(&pool).await.is_ok()); + tests_cfg::queue::sqlite_seed_data(&pool).await; + + assert_eq!( + get_jobs(&pool, Some(&vec![JobStatus::Failed]), None) + .await + .expect("get jobs") + .len(), + 2 + ); + assert_eq!( + get_jobs( + &pool, + Some(&vec![JobStatus::Failed, JobStatus::Completed]), + None + ) + .await + .expect("get jobs") + .len(), + 5 + ); + assert_eq!( + get_jobs(&pool, None, None).await.expect("get jobs").len(), + 14 + ); + } + + #[tokio::test] + async fn can_get_jobs_with_age() { + let tree_fs = tree_fs::TreeBuilder::default() + .drop(true) + .create() + .expect("create temp folder"); + let pool = init(&tree_fs.root).await; + assert!(initialize_database(&pool).await.is_ok()); + + sqlx::query( + r"INSERT INTO sqlt_loco_queue (id, name, task_data, status,run_at, created_at, updated_at) VALUES + ('job1', 'Test Job 1', '{}', 'completed', CURRENT_TIMESTAMP,DATETIME('now', '-20 days'), CURRENT_TIMESTAMP), + ('job2', 'Test Job 2', '{}', 'failed', CURRENT_TIMESTAMP,DATETIME('now', '-15 days'), CURRENT_TIMESTAMP), + ('job3', 'Test Job 3', '{}', 'completed', CURRENT_TIMESTAMP, DATETIME('now', '-5 days'), CURRENT_TIMESTAMP), + ('job4', 'Test Job 4', '{}', 'cancelled', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)", + ) + .execute(&pool) + .await + .unwrap(); + assert_eq!( + get_jobs( + &pool, + Some(&vec![JobStatus::Failed, JobStatus::Completed]), + Some(10) + ) + .await + .expect("get jobs") + .len(), + 2 + ); + } +} diff --git a/src/cli.rs b/src/cli.rs index bbcfa30ea..a179bc7c1 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -24,14 +24,16 @@ cfg_if::cfg_if! { } else {} } -use std::path::PathBuf; +use std::{collections::BTreeMap, path::PathBuf}; use clap::{ArgAction, Parser, Subcommand}; +use colored::Colorize; use duct::cmd; use loco_gen::{Component, ScaffoldKind}; use crate::{ app::{AppContext, Hooks}, + bgworker::JobStatus, boot::{ create_app, create_context, list_endpoints, list_middlewares, run_scheduler, run_task, start, RunDbCommand, ServeParams, StartMode, @@ -105,6 +107,12 @@ enum Commands { #[clap(value_parser = parse_key_val::)] params: Vec<(String, String)>, }, + #[cfg(any(feature = "bg_redis", feature = "bg_pg", feature = "bg_sqlt"))] + /// Managing jobs queue. + Jobs { + #[command(subcommand)] + command: JobsCommands, + }, /// Run the scheduler Scheduler { /// Run a specific job by its name. @@ -418,6 +426,49 @@ impl From for RunDbCommand { } } +#[cfg(any(feature = "bg_redis", feature = "bg_pg", feature = "bg_sqlt"))] +#[derive(Subcommand)] +enum JobsCommands { + /// Cancels jobs with the specified names, setting their status to + /// `cancelled`. + Cancel { + /// Names of jobs to cancel. + #[arg(long)] + name: String, + }, + /// Deletes jobs that are either completed or cancelled. + Tidy {}, + /// Deletes jobs based on their age in days. + Purge { + /// Deletes jobs with errors or cancelled, older than the specified maximum age in days. + #[arg(long, default_value_t = 90)] + max_age: i64, + /// Limits the jobs being saved to those with specific criteria like + /// completed or queued. + #[arg(long, use_value_delimiter = true)] + status: Option>, + /// Saves the details of jobs into a file before deleting them. + #[arg(long)] + dump: Option, + }, + /// Saves the details of all jobs to files in the specified folder. + Dump { + /// Limits the jobs being saved to those with specific criteria like + /// completed or queued. + #[arg(long, use_value_delimiter = true)] + status: Option>, + /// Folder to save the job files (default: current directory). + #[arg(short, long, default_value = ".")] + folder: PathBuf, + }, + /// Imports jobs from a file. + Import { + /// Path to the file containing job details to import. + #[arg(short, long)] + file: PathBuf, + }, +} + /// Parse a single key-value pair fn parse_key_val( s: &str, @@ -444,6 +495,12 @@ pub async fn playground() -> crate::Result { let cli = Playground::parse(); let environment: Environment = cli.environment.unwrap_or_else(resolve_from_env).into(); + let config = environment.load()?; + + if !H::init_logger(&config, &environment)? { + logger::init::(&config.logger)?; + } + let app_context = create_context::(&environment).await?; Ok(app_context) } @@ -485,7 +542,7 @@ pub async fn main() -> crate::Result<()> { let config = environment.load()?; if !H::init_logger(&config, &environment)? { - logger::init::(&config.logger); + logger::init::(&config.logger)?; } let task_span = create_root_span(&environment); @@ -524,6 +581,8 @@ pub async fn main() -> crate::Result<()> { run_db::(&app_context, command.into()).await?; } } + #[cfg(any(feature = "bg_redis", feature = "bg_pg", feature = "bg_sqlt"))] + Commands::Jobs { command } => handle_job_command::(command, &environment).await?, Commands::Routes {} => { let app_context = create_context::(&environment).await?; show_list_endpoints::(&app_context); @@ -629,7 +688,7 @@ pub async fn main() -> crate::Result<()> { let config = environment.load()?; if !H::init_logger(&config, &environment)? { - logger::init::(&config.logger); + logger::init::(&config.logger)?; } let task_span = create_root_span(&environment); @@ -689,6 +748,8 @@ pub async fn main() -> crate::Result<()> { let app_context = create_context::(&environment).await?; run_task::(&app_context, name.as_ref(), &vars).await?; } + #[cfg(any(feature = "bg_redis", feature = "bg_pg", feature = "bg_sqlt"))] + Commands::Jobs { command } => handle_job_command::(command, &environment).await?, Commands::Scheduler { name, config, @@ -736,12 +797,162 @@ pub async fn main() -> crate::Result<()> { fn show_list_endpoints(ctx: &AppContext) { let mut routes = list_endpoints::(ctx); - routes.sort_by(|a, b| a.uri.cmp(&b.uri)); + + // Sort first by path, then ensure HTTP methods are in a consistent order + routes.sort_by(|a, b| { + let method_priority = |actions: &[_]| match actions + .first() + .map(ToString::to_string) + .unwrap_or_default() + .as_str() + { + "GET" => 0, + "POST" => 1, + "PUT" => 2, + "PATCH" => 3, + "DELETE" => 4, + _ => 5, + }; + + let a_priority = method_priority(&a.actions); + let b_priority = method_priority(&b.actions); + + a.uri.cmp(&b.uri).then(a_priority.cmp(&b_priority)) + }); + + // Group routes by their first path segment and full path + let mut path_groups: BTreeMap>> = BTreeMap::new(); + for router in routes { - println!("{router}"); + let path = router.uri.trim_start_matches('/'); + let segments: Vec<&str> = path.split('/').collect(); + let root = (*segments.first().unwrap_or(&"")).to_string(); + + let actions_str = router + .actions + .iter() + .map(ToString::to_string) + .collect::>() + .join(","); + + path_groups + .entry(root) + .or_default() + .entry(router.uri.to_string()) + .or_default() + .push(actions_str); + } + + // Print tree structure + for (root, paths) in path_groups { + println!("/{}", root.bold()); + let paths_count = paths.len(); + let mut path_idx = 0; + + for (path, methods) in paths { + path_idx += 1; + let is_last_path = path_idx == paths_count; + let is_group = methods.len() > 1; + + // Print first method + let prefix = if is_last_path && !is_group { + " └─ " + } else { + " ├─ " + }; + let colored_method = color_method(&methods[0]); + println!("{prefix}{colored_method}\t{path}"); + + // Print additional methods in group + if is_group { + for (i, method) in methods[1..].iter().enumerate() { + let is_last_in_group = i == methods.len() - 2; + let group_prefix = if is_last_path && is_last_in_group { + " └─ " + } else { + " │ " + }; + let colored_method = color_method(method); + println!("{group_prefix}{colored_method}\t{path}"); + } + + // Add spacing between groups if not the last path + if !is_last_path { + println!(" │"); + } + } + } + } +} + +fn color_method(method: &str) -> String { + match method { + "GET" => method.green().to_string(), + "POST" => method.blue().to_string(), + "PUT" => method.yellow().to_string(), + "PATCH" => method.magenta().to_string(), + "DELETE" => method.red().to_string(), + _ => method.to_string(), } } fn create_root_span(environment: &Environment) -> tracing::Span { tracing::span!(tracing::Level::DEBUG, "app", environment = %environment) } + +#[cfg(any(feature = "bg_redis", feature = "bg_pg", feature = "bg_sqlt"))] +async fn handle_job_command( + command: JobsCommands, + environment: &Environment, +) -> crate::Result<()> { + let app_context = create_context::(environment).await?; + let queue = app_context.queue_provider.map_or_else( + || { + println!("queue not configured"); + exit(1); + }, + |queue_provider| queue_provider, + ); + + match &command { + JobsCommands::Cancel { name } => queue.cancel_jobs(name).await, + JobsCommands::Tidy {} => { + queue + .clear_by_status(vec![JobStatus::Completed, JobStatus::Cancelled]) + .await + } + JobsCommands::Purge { + max_age, + status, + dump, + } => { + let status = status.as_ref().map_or_else( + || { + vec![ + JobStatus::Failed, + JobStatus::Cancelled, + JobStatus::Queued, + JobStatus::Completed, + ] + }, + std::clone::Clone::clone, + ); + + if let Some(path) = dump { + let dump_path = queue + .dump(path.as_path(), Some(&status), Some(*max_age)) + .await?; + + println!("Jobs successfully dumped to: {}", dump_path.display()); + } + + queue.clear_jobs_older_than(*max_age, &status).await + } + JobsCommands::Dump { status, folder } => { + let dump_path = queue.dump(folder.as_path(), status.as_ref(), None).await?; + println!("Jobs successfully dumped to: {}", dump_path.display()); + Ok(()) + } + JobsCommands::Import { file } => queue.import(file.as_path()).await, + } +} diff --git a/src/config.rs b/src/config.rs index 31eeccb1a..69d1a73e0 100644 --- a/src/config.rs +++ b/src/config.rs @@ -500,6 +500,8 @@ pub struct SmtpMailer { pub secure: bool, /// Auth SMTP server pub auth: Option, + /// Optional EHLO client ID instead of hostname + pub hello_name: Option, } /// Authentication details for the mailer @@ -563,10 +565,12 @@ impl Config { path.join(format!("{env}.yaml")), ]; - let selected_path = files - .iter() - .find(|p| p.exists()) - .ok_or_else(|| Error::Message("no configuration file found".to_string()))?; + let selected_path = files.iter().find(|p| p.exists()).ok_or_else(|| { + Error::Message(format!( + "no configuration file found in folder: {}", + path.display() + )) + })?; info!(selected_path =? selected_path, "loading environment from"); diff --git a/src/db.rs b/src/db.rs index 749cf5662..3c7359582 100644 --- a/src/db.rs +++ b/src/db.rs @@ -19,7 +19,7 @@ use tracing::info; use super::Result as AppResult; use crate::{ app::{AppContext, Hooks}, - config, doctor, + config, doctor, env_vars, errors::Error, }; @@ -277,6 +277,111 @@ where Ok(()) } +/// Checks if the specified table has an 'id' column. +/// +/// This function checks if the specified table has an 'id' column, which is a +/// common primary key column. It supports `Postgres`, `SQLite`, and `MySQL` database +/// backends. +/// +/// # Arguments +/// +/// - `db`: A reference to the `DatabaseConnection`. +/// - `db_backend`: A reference to the `DatabaseBackend`. +/// - `table_name`: The name of the table to check. +/// +/// # Returns +/// +/// A `Result` containing a `bool` indicating whether the table has an 'id' +/// column. +async fn has_id_column( + db: &DatabaseConnection, + db_backend: &DatabaseBackend, + table_name: &str, +) -> crate::Result { + // First check if 'id' column exists + let result = match db_backend { + DatabaseBackend::Postgres => { + let query = format!( + "SELECT EXISTS ( + SELECT 1 + FROM information_schema.columns + WHERE table_name = '{table_name}' + AND column_name = 'id' + )" + ); + let result = db + .query_one(Statement::from_string(DatabaseBackend::Postgres, query)) + .await?; + result.map_or(false, |row| { + row.try_get::("", "exists").unwrap_or(false) + }) + } + DatabaseBackend::Sqlite => { + let query = format!( + "SELECT COUNT(*) as count + FROM pragma_table_info('{table_name}') + WHERE name = 'id'" + ); + let result = db + .query_one(Statement::from_string(DatabaseBackend::Sqlite, query)) + .await?; + result.map_or(false, |row| { + row.try_get::("", "count").unwrap_or(0) > 0 + }) + } + DatabaseBackend::MySql => { + return Err(Error::Message( + "Unsupported database backend: MySQL".to_string(), + )) + } + }; + + Ok(result) +} + +/// Checks whether the specified table has an auto-increment 'id' column. +/// +/// # Returns +/// +/// A `Result` containing a `bool` indicating whether the table has an +/// auto-increment 'id' column. +async fn is_auto_increment( + db: &DatabaseConnection, + db_backend: &DatabaseBackend, + table_name: &str, +) -> crate::Result { + let result = match db_backend { + DatabaseBackend::Postgres => { + let query = format!( + "SELECT pg_get_serial_sequence('{table_name}', 'id') IS NOT NULL as is_serial" + ); + let result = db + .query_one(Statement::from_string(DatabaseBackend::Postgres, query)) + .await?; + result.map_or(false, |row| { + row.try_get::("", "is_serial").unwrap_or(false) + }) + } + DatabaseBackend::Sqlite => { + let query = + format!("SELECT sql FROM sqlite_master WHERE type='table' AND name='{table_name}'"); + let result = db + .query_one(Statement::from_string(DatabaseBackend::Sqlite, query)) + .await?; + result.map_or(false, |row| { + row.try_get::("", "sql") + .map_or(false, |sql| sql.to_lowercase().contains("autoincrement")) + }) + } + DatabaseBackend::MySql => { + return Err(Error::Message( + "Unsupported database backend: MySQL".to_string(), + )) + } + }; + Ok(result) +} + /// Function to reset auto-increment /// # Errors /// Returns error if it fails @@ -285,6 +390,17 @@ pub async fn reset_autoincrement( table_name: &str, db: &DatabaseConnection, ) -> crate::Result<()> { + // Check if 'id' column exists + let has_id_column = has_id_column(db, &db_backend, table_name).await?; + if !has_id_column { + return Ok(()); + } + // Check if 'id' column is auto-increment + let is_auto_increment = is_auto_increment(db, &db_backend, table_name).await?; + if !is_auto_increment { + return Ok(()); + } + match db_backend { DatabaseBackend::Postgres => { let query_str = format!( @@ -400,13 +516,11 @@ fn fix_entities() -> AppResult<()> { &new_file, format!( r"use sea_orm::entity::prelude::*; -use super::_entities::{module}::{{ActiveModel, Entity}}; +pub use super::_entities::{module}::{{ActiveModel, Model, Entity}}; pub type {module_pascal} = Entity; #[async_trait::async_trait] impl ActiveModelBehavior for ActiveModel {{ - // extend activemodel below (keep comment for generators) - async fn before_save(self, _db: &C, insert: bool) -> std::result::Result where C: ConnectionTrait, @@ -420,6 +534,15 @@ impl ActiveModelBehavior for ActiveModel {{ }} }} }} + +// implement your read-oriented logic here +impl Model {{}} + +// implement your write-oriented logic here +impl ActiveModel {{}} + +// implement your custom finders, selectors oriented logic here +impl Entity {{}} " ), )?; @@ -479,8 +602,7 @@ async fn create_postgres_database( db_name: &str, db: &DatabaseConnection, ) -> Result<(), sea_orm::DbErr> { - let with_options = - std::env::var("LOCO_POSTGRES_DB_OPTIONS").unwrap_or_else(|_| "ENCODING='UTF8'".to_string()); + let with_options = env_vars::get_or_default(env_vars::POSTGRES_DB_OPTIONS, "ENCODING='UTF8'"); let query = format!("CREATE DATABASE {db_name} WITH {with_options}"); tracing::info!(query, "creating postgres database"); diff --git a/src/doctor.rs b/src/doctor.rs index a25452a53..1b12cb2cf 100644 --- a/src/doctor.rs +++ b/src/doctor.rs @@ -7,6 +7,7 @@ use std::{ use colored::Colorize; use regex::Regex; use semver::Version; +use serde::Deserialize; use crate::{ bgworker, @@ -32,13 +33,58 @@ fn get_min_dep_versions() -> &'static HashMap<&'static str, &'static str> { min_vers.insert("tokio", "1.33.0"); min_vers.insert("sea-orm", "1.1.0"); - min_vers.insert("validator", "0.18.0"); + min_vers.insert("validator", "0.19.0"); min_vers.insert("axum", "0.7.5"); min_vers }) } +#[derive(Deserialize)] +struct CrateResponse { + #[serde(rename = "crate")] + krate: CrateInfo, +} + +#[derive(Deserialize)] +struct CrateInfo { + max_version: String, +} + +/// .Check latest crate version in crates.io +/// +/// # Errors +/// +/// This function will return an error if it fails +pub async fn check_cratesio_version( + crate_name: &str, + current_version: &str, +) -> Result> { + // Construct the URL for the crates.io API + let url = format!("https://crates.io/api/v1/crates/{crate_name}"); + + let client = reqwest::Client::new(); + // Fetch crate information + let response = client + .get(&url) + .header("User-Agent", "Loco-Version-Check/1.0") + .send() + .await? + .json::() + .await?; + + // Parse versions + let current = Version::parse(current_version)?; + let latest = Version::parse(&response.krate.max_version)?; + + // Compare versions + if latest > current { + Ok(Some(response.krate.max_version)) + } else { + Ok(None) + } +} + /// Represents different resources that can be checked. #[derive(PartialOrd, PartialEq, Eq, Ord, Debug)] pub enum Resource { @@ -46,6 +92,7 @@ pub enum Resource { Database, Queue, Deps, + PublishedLocoVersion, } /// Represents the status of a resource check. @@ -129,6 +176,10 @@ pub async fn run_all(config: &Config, production: bool) -> Result Result { let crate_statuses = depcheck::check_crate_versions(&cargolock, get_min_dep_versions().clone())?; let mut report = String::new(); - report.push_str("Dependencies\n"); + report.push_str("Dependencies"); let mut all_ok = true; for status in &crate_statuses { @@ -155,7 +206,7 @@ pub fn check_deps() -> Result { } = &status.status { report.push_str(&format!( - " {}: version {} does not meet minimum version {}\n", + "\n {}: version {} does not meet minimum version {}", status.crate_name.yellow(), version.red(), min_version.green() @@ -281,3 +332,29 @@ pub fn check_seaorm_cli() -> Result { }), } } + +/// Check for the latest Loco version +/// +/// # Errors +/// +/// This function will return an error if it fails +pub async fn check_published_loco_version() -> Result { + let compiled_version = env!("CARGO_PKG_VERSION"); + match check_cratesio_version("loco-rs", compiled_version).await { + Ok(Some(v)) => Ok(Check { + status: CheckStatus::NotOk, + message: format!("Loco version: `{compiled_version}`, latest version: `{v}`"), + description: Some("It is recommended to upgrade your main Loco version.".to_string()), + }), + Ok(None) => Ok(Check { + status: CheckStatus::Ok, + message: "Loco version: latest".to_string(), + description: None, + }), + Err(e) => Ok(Check { + status: CheckStatus::NotOk, + message: format!("Checking Loco version failed: {e}"), + description: None, + }), + } +} diff --git a/src/env_vars.rs b/src/env_vars.rs new file mode 100644 index 000000000..83a6e8587 --- /dev/null +++ b/src/env_vars.rs @@ -0,0 +1,25 @@ +//! This module contains utility functions and constants for working with +//! environment variables in the application. It centralizes the logic for +//! fetching environment variables, ensuring that keys are easily accessible +//! from a single location in the codebase. + +/// The key for `PostgreSQL` database options environment variable. +pub const POSTGRES_DB_OPTIONS: &str = "LOCO_POSTGRES_DB_OPTIONS"; +/// The key for the application's environment (e.g., development, production). +pub const LOCO_ENV: &str = "LOCO_ENV"; +/// The key for the application's environment (e.g., development, production). +pub const RAILS_ENV: &str = "RAILS_ENV"; +/// The key for the application's environment (e.g., development, production). +pub const NODE_ENV: &str = "NODE_ENV"; +// The key for the application environment configuration +pub const CONFIG_FOLDER: &str = "LOCO_CONFIG_FOLDER"; + +/// Fetches the value of the given environment variable. +pub fn get(key: &str) -> Result { + std::env::var(key) +} + +/// Retrieves the value of the given environment variable, or returns a default value if the variable is not set. +pub fn get_or_default(key: &str, default: &str) -> String { + get(key).unwrap_or_else(|_| default.to_string()) +} diff --git a/src/environment.rs b/src/environment.rs index 4beb8fd08..52ea42b0b 100644 --- a/src/environment.rs +++ b/src/environment.rs @@ -12,13 +12,11 @@ //! let config = environment.load().expect("failed to load environment"); //! } //! ``` -use std::{path::Path, str::FromStr}; - +use super::config::Config; +use crate::{env_vars, Result}; use serde::{Deserialize, Serialize}; use serde_variant::to_variant_name; - -use super::config::Config; -use crate::Result; +use std::{path::Path, str::FromStr}; pub const DEFAULT_ENVIRONMENT: &str = "development"; pub const LOCO_ENV: &str = "LOCO_ENV"; @@ -33,9 +31,9 @@ impl From for Environment { #[must_use] pub fn resolve_from_env() -> String { - std::env::var("LOCO_ENV") - .or_else(|_| std::env::var("RAILS_ENV")) - .or_else(|_| std::env::var("NODE_ENV")) + env_vars::get(env_vars::LOCO_ENV) + .or_else(|_| env_vars::get(env_vars::RAILS_ENV)) + .or_else(|_| env_vars::get(env_vars::NODE_ENV)) .unwrap_or_else(|_| DEFAULT_ENVIRONMENT.to_string()) } @@ -59,7 +57,10 @@ impl Environment { /// Returns error if an error occurs during loading /// configuration file an parse into [`Config`] struct. pub fn load(&self) -> Result { - Config::new(self) + env_vars::get(env_vars::CONFIG_FOLDER).map_or_else( + |_| Config::new(self), + |config_folder| self.load_from_folder(Path::new(&config_folder)), + ) } /// Load environment variables from the given config path diff --git a/src/errors.rs b/src/errors.rs index a70aa02c2..d5aef1c94 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -132,7 +132,7 @@ pub enum Error { #[error(transparent)] Redis(#[from] sidekiq::redis_rs::RedisError), - #[cfg(feature = "bg_pg")] + #[cfg(any(feature = "bg_pg", feature = "bg_sqlt"))] #[error(transparent)] Sqlx(#[from] sqlx::Error), @@ -148,6 +148,12 @@ pub enum Error { #[error(transparent)] VersionCheck(#[from] depcheck::VersionCheckError), + #[error(transparent)] + RequestError(#[from] reqwest::Error), + + #[error(transparent)] + SemVer(#[from] semver::Error), + #[error(transparent)] Any(#[from] Box), } diff --git a/src/lib.rs b/src/lib.rs index 90b46515b..fb29c0c91 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -21,18 +21,18 @@ pub mod schema; mod tera; pub mod app; -#[cfg(feature = "cli")] -pub mod cli; - pub mod auth; pub mod boot; pub mod cache; +#[cfg(feature = "cli")] +pub mod cli; pub mod config; pub mod controller; +mod env_vars; pub mod environment; pub mod errors; pub mod hash; -mod logger; +pub mod logger; pub mod mailer; pub mod scheduler; pub mod task; diff --git a/src/logger.rs b/src/logger.rs index cd791836e..8bcd53c0f 100644 --- a/src/logger.rs +++ b/src/logger.rs @@ -9,7 +9,7 @@ use tracing_subscriber::{ fmt, fmt::MakeWriter, layer::SubscriberExt, util::SubscriberInitExt, EnvFilter, Layer, Registry, }; -use crate::{app::Hooks, config}; +use crate::{app::Hooks, config, Error, Result}; // Define an enumeration for log levels #[derive(Debug, Default, Clone, Deserialize, Serialize)] @@ -75,6 +75,7 @@ const MODULE_WHITELIST: &[&str] = &[ "tower_http", "sqlx::query", "sidekiq", + "playground", ]; // Keep nonblocking file appender work guard @@ -96,7 +97,11 @@ static NONBLOCKING_WORK_GUARD_KEEP: OnceLock = OnceLock::new(); /// use via PR) /// 3. regardless of (1) and (2) operators in production, or elsewhere can /// always use `RUST_LOG` to quickly diagnose a service -pub fn init(config: &config::Logger) { +/// +/// # Errors +/// Fails if cannot initialize logger or set up an appender (in case the option +/// is enabled) +pub fn init(config: &config::Logger) -> Result<()> { let mut layers: Vec + Sync + Send>> = Vec::new(); if let Some(file_appender_config) = config.file_appender.as_ref() { @@ -138,12 +143,14 @@ pub fn init(config: &config::Logger) { .map_or_else(String::new, ToString::to_string), ) .build(dir) - .expect("logger file appender initialization failed"); + .map_err(Error::msg)?; let file_appender_layer = if file_appender_config.non_blocking { let (non_blocking_file_appender, work_guard) = tracing_appender::non_blocking(file_appender); - NONBLOCKING_WORK_GUARD_KEEP.set(work_guard).unwrap(); + NONBLOCKING_WORK_GUARD_KEEP + .set(work_guard) + .map_err(|_| Error::string("cannot lock for appender"))?; init_layer( non_blocking_file_appender, &file_appender_config.format, @@ -168,6 +175,7 @@ pub fn init(config: &config::Logger) { .with(env_filter) .init(); } + Ok(()) } fn init_env_filter(override_filter: Option<&String>, level: &LogLevel) -> EnvFilter { diff --git a/src/mailer/email_sender.rs b/src/mailer/email_sender.rs index e392365b5..3cb2b86ca 100644 --- a/src/mailer/email_sender.rs +++ b/src/mailer/email_sender.rs @@ -3,8 +3,9 @@ //! sending emails with options like sender, recipient, subject, and content. use lettre::{ - message::MultiPart, transport::smtp::authentication::Credentials, AsyncTransport, Message, - Tokio1Executor, Transport, + message::MultiPart, + transport::smtp::{authentication::Credentials, extension::ClientId}, + AsyncTransport, Message, Tokio1Executor, Transport, }; use tracing::error; @@ -60,6 +61,10 @@ impl EmailSender { .credentials(Credentials::new(auth.user.clone(), auth.password.clone())); } + if let Some(hello_name) = config.hello_name.as_ref() { + email_builder = email_builder.hello_name(ClientId::Domain(hello_name.clone())); + } + Ok(Self { transport: EmailTransport::Smtp(email_builder.build()), }) diff --git a/src/model/mod.rs b/src/model/mod.rs index 749b9ea8c..d454e4848 100644 --- a/src/model/mod.rs +++ b/src/model/mod.rs @@ -35,11 +35,30 @@ pub enum ModelError { #[error(transparent)] Any(#[from] Box), + + #[error("{0}")] + Message(String), } #[allow(clippy::module_name_repetitions)] pub type ModelResult = std::result::Result; +impl ModelError { + #[must_use] + pub fn wrap(err: impl std::error::Error + Send + Sync + 'static) -> Self { + Self::Any(Box::new(err)) + } + + #[must_use] + pub fn to_msg(err: impl std::error::Error + Send + Sync + 'static) -> Self { + Self::Message(err.to_string()) + } + + #[must_use] + pub fn msg(s: &str) -> Self { + Self::Message(s.to_string()) + } +} #[async_trait] pub trait Authenticable: Clone { async fn find_by_api_key(db: &DatabaseConnection, api_key: &str) -> ModelResult; diff --git a/src/prelude.rs b/src/prelude.rs index 3f247cd0d..03ae9cf8c 100644 --- a/src/prelude.rs +++ b/src/prelude.rs @@ -48,3 +48,5 @@ pub use crate::{ pub mod model { pub use crate::model::query; } +#[cfg(feature = "testing")] +pub use crate::testing::prelude::*; diff --git a/src/testing.rs b/src/testing.rs deleted file mode 100644 index 6ca5fda10..000000000 --- a/src/testing.rs +++ /dev/null @@ -1,233 +0,0 @@ -//! # Test Utilities Module -//! -//! This module provides utility functions and constants for easy testing -//! purposes, including cleaning up data patterns and bootstrapping the -//! application for testing. - -use std::{net::SocketAddr, sync::OnceLock}; - -use axum_test::{TestServer, TestServerConfig}; -#[cfg(feature = "with-db")] -use sea_orm::DatabaseConnection; - -use crate::{ - app::{AppContext, Hooks}, - boot::{self, BootResult}, - environment::Environment, - Result, -}; - -static CLEANUP_USER_MODEL: OnceLock> = OnceLock::new(); -static CLEANUP_DATE: OnceLock> = OnceLock::new(); -static CLEANUP_MODEL: OnceLock> = OnceLock::new(); -static CLEANUP_MAIL: OnceLock> = OnceLock::new(); - -pub fn get_cleanup_user_model() -> &'static Vec<(&'static str, &'static str)> { - CLEANUP_USER_MODEL.get_or_init(|| { - vec![ - ( - r"([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12})", - "PID", - ), - (r"password: (.*{60}),", "password: \"PASSWORD\","), - (r"([A-Za-z0-9-_]*\.[A-Za-z0-9-_]*\.[A-Za-z0-9-_]*)", "TOKEN"), - ] - }) -} - -pub fn get_cleanup_date() -> &'static Vec<(&'static str, &'static str)> { - CLEANUP_DATE.get_or_init(|| { - vec![ - ( - r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?\+\d{2}:\d{2}", - "DATE", - ), // with tz - (r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+", "DATE"), - (r"(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2})", "DATE"), - ] - }) -} - -pub fn get_cleanup_model() -> &'static Vec<(&'static str, &'static str)> { - CLEANUP_MODEL.get_or_init(|| vec![(r"id: \d+,", "id: ID")]) -} - -pub fn get_cleanup_mail() -> &'static Vec<(&'static str, &'static str)> { - CLEANUP_MAIL.get_or_init(|| { - vec![ - (r"[0-9A-Za-z]+{40}", "IDENTIFIER"), - ( - r"\w+, \d{1,2} \w+ \d{4} \d{2}:\d{2}:\d{2} [+-]\d{4}", - "DATE", - ), - ( - r"([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12})", - "RANDOM_ID", - ), - ( - r"([0-9a-fA-F]{8}-[0-9a-fA-F]{4})-[0-9a-fA-F]{4}-.*[0-9a-fA-F]{2}", - "RANDOM_ID", - ), - ] - }) -} - -/// Combines cleanup filters from various categories (user model, date, and -/// model) into one list. This is used for data cleaning and pattern -/// replacement. -/// -/// # Example -/// -/// The provided example demonstrates how to efficiently clean up a user model. -/// This process is particularly valuable when you need to capture a snapshot of -/// user model data that includes dynamic elements such as incrementing IDs, -/// automatically generated PIDs, creation/update timestamps, and similar -/// attributes. -/// -/// ```rust,ignore -/// use myapp::app::App; -/// use loco_rs::testing; -/// use migration::Migrator; -/// -/// #[tokio::test] -/// async fn test_create_user() { -/// let boot = testing::boot_test::().await; -/// -/// // Create a user and save into the database. -/// -/// // capture the snapshot and cleanup the data. -/// with_settings!({ -/// filters => testing::cleanup_user_model() -/// }, { -/// assert_debug_snapshot!(saved_user); -/// }); -/// } -/// ``` -#[must_use] -pub fn cleanup_user_model() -> Vec<(&'static str, &'static str)> { - let mut combined_filters = get_cleanup_user_model().clone(); - combined_filters.extend(get_cleanup_date().iter().copied()); - combined_filters.extend(get_cleanup_model().iter().copied()); - combined_filters -} - -/// Combines cleanup filters from emails that can be dynamic -#[must_use] -pub fn cleanup_email() -> Vec<(&'static str, &'static str)> { - let mut combined_filters = get_cleanup_mail().clone(); - combined_filters.extend(get_cleanup_date().iter().copied()); - combined_filters -} - -/// Bootstraps test application with test environment hard coded. -/// -/// # Errors -/// when could not bootstrap the test environment -/// -/// # Example -/// -/// The provided example demonstrates how to boot the test case with the -/// application context. -/// -/// ```rust,ignore -/// use myapp::app::App; -/// use loco_rs::testing; -/// use migration::Migrator; -/// -/// #[tokio::test] -/// async fn test_create_user() { -/// let boot = testing::boot_test::().await; -/// -/// /// ..... -/// assert!(false) -/// } -/// ``` -pub async fn boot_test() -> Result { - H::boot(boot::StartMode::ServerOnly, &Environment::Test).await -} - -#[cfg(feature = "with-db")] -/// Seeds data into the database. -/// -/// -/// # Errors -/// When seed fails -/// -/// # Example -/// -/// The provided example demonstrates how to boot the test case and run seed -/// data. -/// -/// ```rust,ignore -/// use myapp::app::App; -/// use loco_rs::testing; -/// use migration::Migrator; -/// -/// #[tokio::test] -/// async fn test_create_user() { -/// let boot = testing::boot_test::().await; -/// testing::seed::(&boot.app_context.db).await.unwrap(); -/// -/// /// ..... -/// assert!(false) -/// } -/// ``` -pub async fn seed(db: &DatabaseConnection) -> Result<()> { - let path = std::path::Path::new("src/fixtures"); - H::seed(db, path).await -} - -#[allow(clippy::future_not_send)] -/// Initiates a test request with a provided callback. -/// -/// -/// # Panics -/// When could not initialize the test request.this errors can be when could not -/// initialize the test app -/// -/// # Example -/// -/// The provided example demonstrates how to create a test that check -/// application HTTP endpoints -/// -/// ```rust,ignore -/// use myapp::app::App; -/// use loco_rs::testing; -/// -/// #[tokio::test] -/// #[serial] -/// async fn can_register() { -/// testing::request::(|request, ctx| async move { -/// let response = request.post("/auth/register").json(&serde_json::json!({})).await; -/// -/// with_settings!({ -/// filters => testing::cleanup_user_model() -/// }, { -/// assert_debug_snapshot!(response); -/// }); -/// }) -/// .await; -/// } -/// ``` -#[allow(clippy::future_not_send)] -pub async fn request(callback: F) -where - F: FnOnce(TestServer, AppContext) -> Fut, - Fut: std::future::Future, -{ - let boot = boot_test::().await.unwrap(); - - let config = TestServerConfig { - default_content_type: Some("application/json".to_string()), - ..Default::default() - }; - let server = TestServer::new_with_config( - boot.router - .unwrap() - .into_make_service_with_connect_info::(), - config, - ) - .unwrap(); - - callback(server, boot.app_context.clone()).await; -} diff --git a/src/testing/db.rs b/src/testing/db.rs new file mode 100644 index 000000000..6ea2889fc --- /dev/null +++ b/src/testing/db.rs @@ -0,0 +1,33 @@ +use sea_orm::DatabaseConnection; + +use crate::{app::Hooks, Result}; + +/// Seeds data into the database. +/// +/// +/// # Errors +/// When seed fails +/// +/// # Example +/// +/// The provided example demonstrates how to boot the test case and run seed +/// data. +/// +/// ```rust,ignore +/// use myapp::app::App; +/// use loco_rs::testing::prelude::*; +/// use migration::Migrator; +/// +/// #[tokio::test] +/// async fn test_create_user() { +/// let boot = boot_test::().await; +/// seed::(&boot.app_context.db).await.unwrap(); +/// +/// /// ..... +/// assert!(false) +/// } +/// ``` +pub async fn seed(db: &DatabaseConnection) -> Result<()> { + let path = std::path::Path::new("src/fixtures"); + H::seed(db, path).await +} diff --git a/src/testing/mod.rs b/src/testing/mod.rs new file mode 100644 index 000000000..2eefa8bd2 --- /dev/null +++ b/src/testing/mod.rs @@ -0,0 +1,6 @@ +#[cfg(feature = "with-db")] +pub mod db; +pub mod prelude; +pub mod redaction; +pub mod request; +pub mod selector; diff --git a/src/testing/prelude.rs b/src/testing/prelude.rs new file mode 100644 index 000000000..fc5e22aac --- /dev/null +++ b/src/testing/prelude.rs @@ -0,0 +1,3 @@ +#[cfg(feature = "with-db")] +pub use crate::testing::db::*; +pub use crate::testing::{redaction::*, request::*, selector::*}; diff --git a/src/testing/redaction.rs b/src/testing/redaction.rs new file mode 100644 index 000000000..0555f2f76 --- /dev/null +++ b/src/testing/redaction.rs @@ -0,0 +1,103 @@ +use std::sync::OnceLock; + +static CLEANUP_USER_MODEL: OnceLock> = OnceLock::new(); +static CLEANUP_DATE: OnceLock> = OnceLock::new(); +static CLEANUP_MODEL: OnceLock> = OnceLock::new(); +static CLEANUP_MAIL: OnceLock> = OnceLock::new(); + +pub fn get_cleanup_user_model() -> &'static Vec<(&'static str, &'static str)> { + CLEANUP_USER_MODEL.get_or_init(|| { + vec![ + ( + r"([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12})", + "PID", + ), + (r"password: (.*{60}),", "password: \"PASSWORD\","), + (r"([A-Za-z0-9-_]*\.[A-Za-z0-9-_]*\.[A-Za-z0-9-_]*)", "TOKEN"), + ] + }) +} + +pub fn get_cleanup_date() -> &'static Vec<(&'static str, &'static str)> { + CLEANUP_DATE.get_or_init(|| { + vec![ + ( + r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?\+\d{2}:\d{2}", + "DATE", + ), // with tz + (r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+", "DATE"), + (r"(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2})", "DATE"), + ] + }) +} + +pub fn get_cleanup_model() -> &'static Vec<(&'static str, &'static str)> { + CLEANUP_MODEL.get_or_init(|| vec![(r"id: \d+,", "id: ID")]) +} + +pub fn get_cleanup_mail() -> &'static Vec<(&'static str, &'static str)> { + CLEANUP_MAIL.get_or_init(|| { + vec![ + (r"[0-9A-Za-z]+{40}", "IDENTIFIER"), + ( + r"\w+, \d{1,2} \w+ \d{4} \d{2}:\d{2}:\d{2} [+-]\d{4}", + "DATE", + ), + ( + r"([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12})", + "RANDOM_ID", + ), + ( + r"([0-9a-fA-F]{8}-[0-9a-fA-F]{4})-[0-9a-fA-F]{4}-.*[0-9a-fA-F]{2}", + "RANDOM_ID", + ), + ] + }) +} + +/// Combines cleanup filters from various categories (user model, date, and +/// model) into one list. This is used for data cleaning and pattern +/// replacement. +/// +/// # Example +/// +/// The provided example demonstrates how to efficiently clean up a user model. +/// This process is particularly valuable when you need to capture a snapshot of +/// user model data that includes dynamic elements such as incrementing IDs, +/// automatically generated PIDs, creation/update timestamps, and similar +/// attributes. +/// +/// ```rust,ignore +/// use myapp::app::App; +/// use loco_rs::testing::prelude::*; +/// use migration::Migrator; +/// +/// #[tokio::test] +/// async fn test_create_user() { +/// let boot = boot_test::().await; +/// +/// // Create a user and save into the database. +/// +/// // capture the snapshot and cleanup the data. +/// with_settings!({ +/// filters => cleanup_user_model() +/// }, { +/// assert_debug_snapshot!(saved_user); +/// }); +/// } +/// ``` +#[must_use] +pub fn cleanup_user_model() -> Vec<(&'static str, &'static str)> { + let mut combined_filters = get_cleanup_user_model().clone(); + combined_filters.extend(get_cleanup_date().iter().copied()); + combined_filters.extend(get_cleanup_model().iter().copied()); + combined_filters +} + +/// Combines cleanup filters from emails that can be dynamic +#[must_use] +pub fn cleanup_email() -> Vec<(&'static str, &'static str)> { + let mut combined_filters = get_cleanup_mail().clone(); + combined_filters.extend(get_cleanup_date().iter().copied()); + combined_filters +} diff --git a/src/testing/request.rs b/src/testing/request.rs new file mode 100644 index 000000000..1aec50ff0 --- /dev/null +++ b/src/testing/request.rs @@ -0,0 +1,92 @@ +use std::net::SocketAddr; + +use axum_test::{TestServer, TestServerConfig}; + +use crate::{ + app::{AppContext, Hooks}, + boot::{self, BootResult}, + environment::Environment, + Result, +}; + +/// Bootstraps test application with test environment hard coded. +/// +/// # Errors +/// when could not bootstrap the test environment +/// +/// # Example +/// +/// The provided example demonstrates how to boot the test case with the +/// application context. +/// +/// ```rust,ignore +/// use myapp::app::App; +/// use loco_rs::testing::prelude::*; +/// use migration::Migrator; +/// +/// #[tokio::test] +/// async fn test_create_user() { +/// let boot = boot_test::().await; +/// +/// /// ..... +/// assert!(false) +/// } +/// ``` +pub async fn boot_test() -> Result { + H::boot(boot::StartMode::ServerOnly, &Environment::Test).await +} + +#[allow(clippy::future_not_send)] +/// Initiates a test request with a provided callback. +/// +/// +/// # Panics +/// When could not initialize the test request.this errors can be when could not +/// initialize the test app +/// +/// # Example +/// +/// The provided example demonstrates how to create a test that check +/// application HTTP endpoints +/// +/// ```rust,ignore +/// use myapp::app::App; +/// use loco_rs::testing::prelude::*; +/// +/// #[tokio::test] +/// #[serial] +/// async fn can_register() { +/// request::(|request, ctx| async move { +/// let response = request.post("/auth/register").json(&serde_json::json!({})).await; +/// +/// with_settings!({ +/// filters => cleanup_user_model() +/// }, { +/// assert_debug_snapshot!(response); +/// }); +/// }) +/// .await; +/// } +/// ``` +#[allow(clippy::future_not_send)] +pub async fn request(callback: F) +where + F: FnOnce(TestServer, AppContext) -> Fut, + Fut: std::future::Future, +{ + let boot = boot_test::().await.unwrap(); + + let config = TestServerConfig { + default_content_type: Some("application/json".to_string()), + ..Default::default() + }; + let server = TestServer::new_with_config( + boot.router + .unwrap() + .into_make_service_with_connect_info::(), + config, + ) + .unwrap(); + + callback(server, boot.app_context.clone()).await; +} diff --git a/src/testing/selector.rs b/src/testing/selector.rs new file mode 100644 index 000000000..aa1a82350 --- /dev/null +++ b/src/testing/selector.rs @@ -0,0 +1,568 @@ +use scraper::{Html, Selector}; + +/// Asserts that an element matching the given CSS selector exists in the +/// provided HTML. +/// +/// # Example +/// +/// ```rust +/// use loco_rs::testing::prelude::*; +/// +/// let html = r#" +/// +/// +///
Some content here
+/// +/// "#; +/// assert_css_exists(html, ".some-class"); +/// ``` +/// +/// # Panics +/// +/// This function will panic if no element matching the selector is found in the +/// HTML. +pub fn assert_css_exists(html: &str, selector: &str) { + let document = Html::parse_document(html); + let parsed_selector = Selector::parse(selector).unwrap(); + assert!( + document.select(&parsed_selector).count() > 0, + "Element matching selector '{selector:?}' not found" + ); +} + +/// Asserts that an element matching the given CSS selector does **not** exist +/// in the provided HTML. +/// +/// # Example +/// +/// ```rust +/// use loco_rs::testing::prelude::*; +/// +/// let html = r#" +/// +/// +///
Some content here
+/// +/// "#; +/// assert_css_not_exists(html, ".nonexistent-class"); +/// ``` +/// +/// # Panics +/// +/// This function will panic if an element matching the selector is found in the +/// HTML. +pub fn assert_css_not_exists(html: &str, selector: &str) { + let document = Html::parse_document(html); + let parsed_selector = Selector::parse(selector).unwrap(); + assert!( + document.select(&parsed_selector).count() == 0, + "Element matching selector '{selector:?}' should not exist" + ); +} + +/// Asserts that the text content of an element matching the given CSS selector +/// exactly matches the expected text. +/// +/// # Example +/// +/// ```rust +/// use loco_rs::testing::prelude::*; +/// +/// let html = r#" +/// +/// +///

Welcome to Loco

+/// +/// "#; +/// assert_css_eq(html, "h1.title", "Welcome to Loco"); +/// ``` +/// +/// # Panics +/// +/// This function will panic if the text of the found element does not match the +/// expected text. +pub fn assert_css_eq(html: &str, selector: &str, expected_text: &str) { + let document = Html::parse_document(html); + let parsed_selector = Selector::parse(selector).unwrap(); + let mut found = false; + + for element in document.select(&parsed_selector) { + let text = element.text().collect::>().join(""); + if text == expected_text { + found = true; + break; + } + } + + assert!( + found, + "Text does not match: Expected '{expected_text:?}' but found a different value or no \ + match for selector '{selector:?}'" + ); +} + +/// Asserts that an `` element matching the given CSS selector has the `href` +/// attribute with the specified value. +/// +/// # Example +/// +/// ```rust +/// use loco_rs::testing::prelude::*; +/// +/// let html = r#" +/// +/// +/// Link +/// +/// "#; +/// assert_link(html, "a", "https://loco.rs"); +/// ``` +/// +/// # Panics +/// +/// This function will panic if no `` element matching the selector is found, +/// if the element does not have the `href` attribute, or if the `href` +/// attribute's value does not match the expected value. +pub fn assert_link(html: &str, selector: &str, expected_href: &str) { + // Use `assert_attribute_eq` to check that the `href` attribute exists and + // matches the expected value + assert_attribute_eq(html, selector, "href", expected_href); +} + +/// Asserts that an element matching the given CSS selector has the specified +/// attribute. +/// +/// # Example +/// +/// ```rust +/// use loco_rs::testing::prelude::*; +/// +/// let html = r#" +/// +/// +/// +/// Link +/// +/// "#; +/// assert_attribute_exists(html, "button", "onclick"); +/// assert_attribute_exists(html, "a", "href"); +/// ``` +/// +/// # Panics +/// +/// This function will panic if no element matching the selector is found, or if +/// the element does not have the specified attribute. +pub fn assert_attribute_exists(html: &str, selector: &str, attribute: &str) { + let document = Html::parse_document(html); + let parsed_selector = Selector::parse(selector).unwrap(); + + let mut found = false; + + for element in document.select(&parsed_selector) { + if element.value().attr(attribute).is_some() { + found = true; + break; + } + } + + assert!( + found, + "Element matching selector '{selector:?}' does not have the attribute '{attribute}'" + ); +} + +/// Asserts that the specified attribute of an element matching the given CSS +/// selector matches the expected value. +/// +/// # Example +/// +/// ```rust +/// use loco_rs::testing::prelude::*; +/// +/// let html = r#" +/// +/// +/// +/// Link +/// +/// "#; +/// assert_attribute_exists(html, "button", "onclick"); +/// assert_attribute_exists(html, "a", "href"); +/// ``` +/// +/// # Panics +/// +/// This function will panic if no element matching the selector is found, if +/// the element does not have the specified attribute, or if the attribute's +/// value does not match the expected value. +pub fn assert_attribute_eq(html: &str, selector: &str, attribute: &str, expected_value: &str) { + let document = Html::parse_document(html); + let parsed_selector = Selector::parse(selector).unwrap(); + + let mut found = false; + + for element in document.select(&parsed_selector) { + if let Some(attr_value) = element.value().attr(attribute) { + if attr_value == expected_value { + found = true; + break; + } + } + } + + assert!( + found, + "Expected attribute '{attribute}' with value '{expected_value}' for selector \ + '{selector:?}', but found a different value or no value." + ); +} + +/// Asserts that the number of elements matching the given CSS selector in the +/// provided HTML is exactly the expected count. +/// +/// # Example +/// +/// ```rust +/// use loco_rs::testing::prelude::*; +/// +/// let html = r#" +/// +/// +///
    +///
  • Post 1
  • +///
  • Post 2
  • +///
  • Post 3
  • +///
+/// +/// "#; +/// assert_count(html, "ul#posts li", 3); +/// ``` +/// +/// # Panics +/// +/// This function will panic if the number of elements matching the selector is +/// not equal to the expected count. +pub fn assert_count(html: &str, selector: &str, expected_count: usize) { + let document = Html::parse_document(html); + let parsed_selector = Selector::parse(selector).unwrap(); + + let count = document.select(&parsed_selector).count(); + + assert!( + count == expected_count, + "Expected {expected_count} elements matching selector '{selector:?}', but found {count} \ + elements." + ); +} + +/// Collects the text content of all elements matching the given CSS selector +/// and asserts that they match the expected text. +/// +/// # Example +/// +/// ```rust +/// use loco_rs::testing::prelude::*; +/// +/// let html = r#" +/// +/// +///
    +///
  • Post 1
  • +///
  • Post 2
  • +///
  • Post 3
  • +///
+/// +/// "#; +/// assert_css_eq_list(html, "ul#posts li", &["Post 1", "Post 2", "Post 3"]); +/// ``` +/// +/// # Panics +/// +/// This function will panic if the text content of the elements does not match +/// the expected values. +pub fn assert_css_eq_list(html: &str, selector: &str, expected_texts: &[&str]) { + let document = Html::parse_document(html); + let parsed_selector = Selector::parse(selector).unwrap(); + + let collected_texts: Vec = document + .select(&parsed_selector) + .map(|element| element.text().collect::>().concat()) + .collect(); + + assert_eq!( + collected_texts, expected_texts, + "Expected texts {expected_texts:?}, but found {collected_texts:?}." + ); +} + +/// Parses the given HTML string and selects the elements matching the specified CSS selector. +/// +/// # Examples +/// +/// ```rust +/// use loco_rs::testing::prelude::*; +/// +/// let html = r#" +/// +/// +///
Item 1
+///
Item 2
+///
Item 3
+/// +/// +/// "#; +/// let items = select(html, ".item"); +/// assert_eq!(items, vec!["
Item 1
", "
Item 2
", "
Item 3
"]); +/// ``` +/// +/// # Panics +/// +/// This function will panic when could not pase the selector +#[must_use] +pub fn select(html: &str, selector: &str) -> Vec { + let document = Html::parse_document(html); + let parsed_selector = Selector::parse(selector).unwrap(); + document + .select(&parsed_selector) + .map(|element| element.html()) + .collect() +} + +// Test cases +#[cfg(test)] +mod tests { + use super::*; + + fn setup_test_html() -> &'static str { + r#" + + +
Some content here
+
Another content here
+

Welcome to Loco

+ + Link +
    +
  • Post 1
  • +
  • Post 2
  • +
  • Post 3
  • +
+ + + + + + + + + + + + + + + +
Post 1Author 1
Post 2Author 2
Post 3Author 3
+ + + + "# + } + + #[test] + fn test_assert_css_exists() { + let html = setup_test_html(); + + assert_css_exists(html, ".some-class"); + + let result = std::panic::catch_unwind(|| { + assert_css_exists(html, ".nonexistent-class"); + }); + assert!(result.is_err(), "Expected panic for non-existent selector"); + if let Err(panic_message) = result { + let panic_message = panic_message.downcast_ref::().unwrap(); + assert_eq!( + panic_message, + &"Element matching selector '\".nonexistent-class\"' not found" + ); + } + } + + #[test] + fn test_assert_css_not_exists() { + let html = setup_test_html(); + + assert_css_not_exists(html, ".nonexistent-class"); + + let result = std::panic::catch_unwind(|| { + assert_css_not_exists(html, ".some-class"); + }); + assert!(result.is_err(), "Expected panic for non-existent selector"); + if let Err(panic_message) = result { + let panic_message = panic_message.downcast_ref::().unwrap(); + assert_eq!( + panic_message, + &"Element matching selector '\".some-class\"' should not exist" + ); + } + } + + #[test] + fn test_assert_css_eq() { + let html = setup_test_html(); + + assert_css_eq(html, "h1.title", "Welcome to Loco"); + + let result = std::panic::catch_unwind(|| { + assert_css_eq(html, "h1.title", "Wrong text"); + }); + assert!(result.is_err(), "Expected panic for mismatched text"); + if let Err(panic_message) = result { + let panic_message = panic_message.downcast_ref::().unwrap(); + assert_eq!( + panic_message, + &"Text does not match: Expected '\"Wrong text\"' but found a different value or \ + no match for selector '\"h1.title\"'" + ); + } + } + + #[test] + fn test_assert_link() { + let html = setup_test_html(); + + assert_link(html, "a", "https://loco.rs"); + + let result = std::panic::catch_unwind(|| { + assert_link(html, "a", "https://nonexistent.com"); + }); + + assert!(result.is_err()); + if let Err(panic_message) = result { + let panic_message = panic_message.downcast_ref::().unwrap(); + assert_eq!( + panic_message, + &"Expected attribute 'href' with value 'https://nonexistent.com' for selector \ + '\"a\"', but found a different value or no value." + ); + } + } + + #[test] + fn test_assert_attribute_exists() { + let html = setup_test_html(); + + assert_attribute_exists(html, "button", "onclick"); + assert_attribute_exists(html, "a", "href"); + + let result = std::panic::catch_unwind(|| { + assert_attribute_exists(html, "button", "href"); + }); + if let Err(panic_message) = result { + let panic_message = panic_message.downcast_ref::().unwrap(); + assert_eq!( + panic_message, + &"Element matching selector '\"button\"' does not have the attribute 'href'" + ); + } + } + + #[test] + fn test_assert_attribute_eq() { + let html = setup_test_html(); + assert_attribute_eq(html, "button", "onclick", "alert('clicked')"); + assert_attribute_eq(html, "a", "href", "https://loco.rs"); + + let result = std::panic::catch_unwind(|| { + assert_attribute_eq(html, "button", "onclick", "alert('wrong')"); + }); + + assert!(result.is_err()); + if let Err(panic_message) = result { + let panic_message = panic_message.downcast_ref::().unwrap(); + assert_eq!( + panic_message, + &"Expected attribute 'onclick' with value 'alert('wrong')' for selector \ + '\"button\"', but found a different value or no value." + ); + } + } + + #[test] + fn test_assert_count() { + let html = setup_test_html(); + assert_count(html, "ul#posts li", 3); + + let result = std::panic::catch_unwind(|| { + assert_count(html, "ul#posts li", 1); + }); + + assert!(result.is_err()); + if let Err(panic_message) = result { + let panic_message = panic_message.downcast_ref::().unwrap(); + assert_eq!( + panic_message, + &"Expected 1 elements matching selector '\"ul#posts li\"', but found 3 elements." + ); + } + } + + #[test] + fn test_assert_css_eq_list() { + let html = setup_test_html(); + assert_css_eq_list(html, "ul#posts li", &["Post 1", "Post 2", "Post 3"]); + + let result = std::panic::catch_unwind(|| { + assert_css_eq_list(html, "ul#posts li", &["Post 1", "Post 2", "Wrong Post"]); + }); + + assert!(result.is_err()); + if let Err(panic_message) = result { + let panic_message = panic_message.downcast_ref::().unwrap(); + assert_eq!( + panic_message, + &"assertion `left == right` failed: Expected texts [\"Post 1\", \"Post 2\", \ + \"Wrong Post\"], but found [\"Post 1\", \"Post 2\", \"Post 3\"].\n left: \ + [\"Post 1\", \"Post 2\", \"Post 3\"]\n right: [\"Post 1\", \"Post 2\", \"Wrong \ + Post\"]" + ); + } + } + + #[test] + fn test_assert_css_eq_list_table() { + let html = setup_test_html(); + assert_css_eq_list( + html, + "table tr td", + &[ + "Post 1", "Author 1", "Post 2", "Author 2", "Post 3", "Author 3", + ], + ); + + let result = std::panic::catch_unwind(|| { + assert_css_eq_list(html, "table#posts_t tr td", &["Post 1", "Post 2", "Post 3"]); + }); + + assert!(result.is_err()); + if let Err(panic_message) = result { + let panic_message = panic_message.downcast_ref::().unwrap(); + assert_eq!( + panic_message, + &"assertion `left == right` failed: Expected texts [\"Post 1\", \"Post 2\", \ + \"Post 3\"], but found [].\n left: []\n right: [\"Post 1\", \"Post 2\", \"Post \ + 3\"]" + ); + } + } + + #[test] + fn test_select() { + let html = setup_test_html(); + assert_eq!( + select(html, ".some-class"), + vec!["
Some content here
"] + ); + assert_eq!(select(html, "ul"), vec!["
    \n
  • Post 1
  • \n
  • Post 2
  • \n
  • Post 3
  • \n
"]); + } +} diff --git a/src/tests_cfg/mod.rs b/src/tests_cfg/mod.rs index de88ab4e0..f32873b37 100644 --- a/src/tests_cfg/mod.rs +++ b/src/tests_cfg/mod.rs @@ -1,6 +1,7 @@ -#[cfg(feature = "with-db")] -pub mod db; - pub mod app; pub mod config; +#[cfg(feature = "with-db")] +pub mod db; +#[cfg(any(feature = "bg_pg", feature = "bg_sqlt"))] +pub mod queue; pub mod task; diff --git a/src/tests_cfg/queue.rs b/src/tests_cfg/queue.rs new file mode 100644 index 000000000..8d59105fc --- /dev/null +++ b/src/tests_cfg/queue.rs @@ -0,0 +1,85 @@ +#[cfg(any(feature = "bg_pg", feature = "bg_sqlt"))] +use crate::bgworker; +use std::path::PathBuf; + +#[cfg(feature = "bg_pg")] +/// # Panics +/// +/// This function will panic if it fails to prepare or insert the seed data, causing the tests to fail quickly +/// and preventing further test execution with incomplete setup. +pub async fn postgres_seed_data(pool: &sqlx::PgPool) { + let yaml_tasks = std::fs::read_to_string( + PathBuf::from("tests") + .join("fixtures") + .join("queue") + .join("jobs.yaml"), + ) + .expect("Failed to read YAML file"); + + let tasks: Vec = + serde_yaml::from_str(&yaml_tasks).expect("Failed to parse YAML"); + for task in tasks { + sqlx::query( + r" + INSERT INTO pg_loco_queue (id, name, task_data, status, run_at, interval, created_at, updated_at) + VALUES ($1, $2, $3, $4, $5, NULL, $6, $7) + ", + ) + .bind(task.id) + .bind(task.name) + .bind(task.data) + .bind(task.status.to_string()) + .bind(task.run_at) + .bind(task.created_at) + .bind(task.updated_at) + .execute(pool) + .await.expect("execute insert query"); + } +} + +#[cfg(feature = "bg_sqlt")] +/// # Panics +/// +/// This function will panic if it fails to prepare or insert the seed data, causing the tests to fail quickly +/// and preventing further test execution with incomplete setup. +pub async fn sqlite_seed_data(pool: &sqlx::Pool) { + let yaml_tasks = std::fs::read_to_string( + PathBuf::from("tests") + .join("fixtures") + .join("queue") + .join("jobs.yaml"), + ) + .expect("Failed to read YAML file"); + + let tasks: Vec = + serde_yaml::from_str(&yaml_tasks).expect("Failed to parse YAML"); + for task in tasks { + sqlx::query( + r" + INSERT INTO sqlt_loco_queue (id, name, task_data, status, run_at, interval, created_at, updated_at) + VALUES (?, ?, ?, ?, ?, NULL, ?, ?) + " + ) + .bind(task.id) + .bind(task.name) + .bind(task.data.to_string()) + .bind(task.status.to_string()) + .bind(task.run_at) + .bind(task.created_at) + .bind(task.updated_at) + .execute(pool) + .await.expect("create row"); + } + + sqlx::query( + r" + INSERT INTO sqlt_loco_queue_lock (id, is_locked, locked_at) + VALUES (1, FALSE, NULL) + ON CONFLICT (id) DO NOTHING; + + ", + ) + .execute(pool) + .await + .expect("execute insert query"); +} diff --git a/tests/fixtures/queue/jobs.yaml b/tests/fixtures/queue/jobs.yaml new file mode 100644 index 000000000..a0eb461bd --- /dev/null +++ b/tests/fixtures/queue/jobs.yaml @@ -0,0 +1,153 @@ +- id: "01JDM0X8EVAM823JZBGKYNBA99" + name: "UserAccountActivation" + task_data: + user_id: 133 + email: "user11@example.com" + activation_token: "abcdef123456" + status: "queued" + run_at: "2024-11-28T08:19:08Z" + created_at: "2024-11-28T08:03:25Z" + updated_at: "2024-11-28T08:03:25Z" + +- id: "01JDM0X8EVAM823JZBGKYNBA98" + name: "PasswordChangeNotification" + task_data: + user_id: 134 + email: "user12@example.com" + change_time: "2024-11-27T12:30:00Z" + status: "completed" + run_at: "2024-11-28T08:04:25Z" + created_at: "2024-11-28T08:03:25Z" + updated_at: "2024-11-28T08:03:25Z" + +- id: "01JDM0X8EVAM823JZBGKYNBA97" + name: "SendInvoice" + task_data: + user_id: 135 + email: "user13@example.com" + invoice_id: "INV-2024-01" + status: "processing" + run_at: "2024-11-28T08:04:25Z" + created_at: "2024-11-28T08:03:25Z" + updated_at: "2024-11-28T08:03:25Z" + +- id: "01JDM0X8EVAM823JZBGKYNBA96" + name: "UserDeactivation" + task_data: + user_id: 136 + email: "user14@example.com" + deactivation_reason: "user requested" + status: "failed" + run_at: "2024-11-28T08:04:25Z" + created_at: "2024-11-28T08:03:25Z" + updated_at: "2024-11-28T08:03:25Z" + +- id: "01JDM0X8EVAM823JZBGKYNBA95" + name: "SubscriptionReminder" + task_data: + user_id: 137 + email: "user15@example.com" + renewal_date: "2024-12-01" + status: "queued" + run_at: "2024-11-28T08:04:25Z" + created_at: "2024-11-28T08:03:25Z" + updated_at: "2024-11-28T08:03:25Z" + +- id: "01JDM0X8EVAM823JZBGKYNBA94" + name: "DataBackup" + task_data: + backup_id: "backup-12345" + user_id: 138 + email: "user16@example.com" + status: "cancelled" + run_at: "2024-11-28T08:04:25Z" + created_at: "2024-11-28T08:03:25Z" + updated_at: "2024-11-28T08:03:25Z" + +- id: "01JDM0X8EVAM823JZBGKYNBA93" + name: "SecurityAlert" + task_data: + user_id: 139 + email: "user17@example.com" + alert_type: "login attempt from new device" + status: "queued" + run_at: "2024-11-28T08:04:25Z" + created_at: "2024-11-28T08:03:25Z" + updated_at: "2024-11-28T08:03:25Z" + +- id: "01JDM0X8EVAM823JZBGKYNBA92" + name: "WeeklyReportEmail" + task_data: + user_id: 140 + email: "user18@example.com" + report_period: "2024-11-20 to 2024-11-27" + status: "processing" + run_at: "2024-11-28T08:04:25Z" + created_at: "2024-11-28T08:03:25Z" + updated_at: "2024-11-28T08:03:25Z" + +- id: "01JDM0X8EVAM823JZBGKYNBA91" + name: "AccountDeletion" + task_data: + user_id: 142 + email: "user20@example.com" + deletion_request_time: "2024-11-27T14:00:00Z" + status: "queued" + run_at: "2024-11-28T08:04:25Z" + created_at: "2024-11-28T08:03:25Z" + updated_at: "2024-11-28T08:03:25Z" + +- id: "01JDM0X8EVAM823JZBGKYNBA90" + name: "UserAccountActivation" + task_data: + user_id: 143 + email: "user21@example.com" + activation_token: "xyz987654" + status: "completed" + run_at: "2024-11-28T08:04:25Z" + created_at: "2024-11-28T08:03:25Z" + updated_at: "2024-11-28T08:03:25Z" + +- id: "01JDM0X8EVAM823JZBGKYNBA89" + name: "PasswordChangeNotification" + task_data: + user_id: 144 + email: "user22@example.com" + change_time: "2024-11-27T15:00:00Z" + status: "completed" + run_at: "2024-11-28T08:04:25Z" + created_at: "2024-11-28T08:03:25Z" + updated_at: "2024-11-28T08:03:25Z" + +- id: "01JDM0X8EVAM823JZBGKYNBA88" + name: "SendInvoice" + task_data: + user_id: 145 + email: "user23@example.com" + invoice_id: "INV-2024-02" + status: "processing" + run_at: "2024-11-28T08:04:25Z" + created_at: "2024-11-28T08:03:25Z" + updated_at: "2024-11-28T08:03:25Z" + +- id: "01JDM0X8EVAM823JZBGKYNBA87" + name: "UserDeactivation" + task_data: + user_id: 146 + email: "user24@example.com" + deactivation_reason: "account inactive" + status: "failed" + run_at: "2024-11-28T08:04:25Z" + created_at: "2024-11-28T08:03:25Z" + updated_at: "2024-11-28T08:03:25Z" + +- id: "01JDM0X8EVAM823JZBGKYNBA86" + name: "SubscriptionReminder" + task_data: + user_id: 147 + email: "user25@example.com" + renewal_date: "2024-12-05" + status: "queued" + run_at: "2024-11-28T08:04:25Z" + created_at: "2024-11-28T08:03:25Z" + updated_at: "2024-11-28T08:03:25Z" \ No newline at end of file