From 31853f914a9ae098d18753f347a19389ea34582a Mon Sep 17 00:00:00 2001 From: yamelsenih Date: Mon, 17 Jun 2024 16:05:11 -0400 Subject: [PATCH] Add tree support --- .env | 4 +- docker-compose/.env | 27 +-- docker-compose/docker-compose-dev.yml | 120 ++++++++++ docker-compose/docker-compose.yml | 6 +- src/bin/server.rs | 36 ++- src/controller/kafka.rs | 2 + src/models/menu_item.rs | 327 ++++++++++++++++++++++++++ src/models/menu_tree.rs | 297 +++++++++++++++++++++++ src/models/mod.rs | 2 + 9 files changed, 800 insertions(+), 21 deletions(-) create mode 100644 docker-compose/docker-compose-dev.yml create mode 100644 src/models/menu_item.rs create mode 100644 src/models/menu_tree.rs diff --git a/.env b/.env index f6054b1..b326ce4 100644 --- a/.env +++ b/.env @@ -2,8 +2,8 @@ RUST_LOG=info PORT=7878 ALLOWED_ORIGIN="*" KAFKA_ENABLED="Y" -KAFKA_QUEUES="menu browser form process window" -KAFKA_HOST="0.0.0.0:29092" +KAFKA_QUEUES="menu browser form process window menu_item menu_tree" +KAFKA_HOST="localhost:29092" KAFKA_GROUP="default" OPENSEARCH_URL="http://localhost:9200" VERSION="1.0.0-dev" diff --git a/docker-compose/.env b/docker-compose/.env index 549874d..39e88c4 100644 --- a/docker-compose/.env +++ b/docker-compose/.env @@ -6,18 +6,18 @@ GENERIC_RESTART="on-failure" # OpenSearch https://opensearch.org/ OPENSEARCH_IMAGE="opensearchproject/opensearch:2.13.0" -OPENSEARCH_HOST="default-stack.opensearch-service" -OPENSEARCH_VOLUME="default-stack.volume_opensearch" +OPENSEARCH_HOST="${COMPOSE_PROJECT_NAME}.opensearch-service" +OPENSEARCH_VOLUME="${COMPOSE_PROJECT_NAME}.volume_opensearch" OPENSEARCH_PORT="9200" OPENSEARCH_PERFORMANCE_PORT="9600" # OpenSearch restore db -OPENSEARCH_SETUP_HOST="default-stack.opensearch-setup" +OPENSEARCH_SETUP_HOST="${COMPOSE_PROJECT_NAME}.opensearch-setup" OPENSEARCH_SETUP_NODE_HOSTNAME="http://${OPENSEARCH_HOST}:${OPENSEARCH_PORT}" # OpenSearch Dashboards UI OPENSEARCH_DASHBOARDS_IMAGE="opensearchproject/opensearch-dashboards:2.13.0" -OPENSEARCH_DASHBOARDS_HOST="default-stack.opensearch-dashboards" +OPENSEARCH_DASHBOARDS_HOST="${COMPOSE_PROJECT_NAME}.opensearch-dashboards" OPENSEARCH_DASHBOARDS_PORT=5601 # Do not change, not parameterizable OPENSEARCH_DASHBOARDS_EXTERNAL_PORT=5601 OPENSEARCH_DASHBOARDS_OPENSEARCH_HOSTS="[\"http://${OPENSEARCH_HOST}:${OPENSEARCH_PORT}\"]" @@ -25,34 +25,31 @@ OPENSEARCH_DASHBOARDS_OPENSEARCH_HOSTS="[\"http://${OPENSEARCH_HOST}:${OPENSEARC # Zookeeper to manage kafka brokers ZOOKEEPER_IMAGE="confluentinc/cp-zookeeper:7.6.1" -ZOOKEEPER_HOST="default-stack.zookeeper" +ZOOKEEPER_HOST="${COMPOSE_PROJECT_NAME}.zookeeper" ZOOKEEPER_PORT=2181 ZOOKEEPER_TICK_TIME=2000 # Kafka https://www.confluent.io/home/ KAFKA_IMAGE="confluentinc/cp-kafka:latest" -KAFKA_HOST="default-stack.kafka" -KAFKA_PORT=9092 -KAFKA_EXTERNAL_PORT=9092 -KAFKA_BROKERCONNECT="kafka:${KAFKA_PORT}" -KAFKA_BROKER_PORT=29092 -KAFKA_BROKER_EXTERNAL_PORT=29092 -KAFKA_EXTERNAL_BROKERCONNECT="${KAFKA_HOST}:${KAFKA_BROKER_EXTERNAL_PORT}" +KAFKA_HOST="${COMPOSE_PROJECT_NAME}.kafka" +KAFKA_EXTERNAL_PORT=29092 +KAFKA_INTERNAL_HOST="kafka:9092" +KAFKA_BROKER_HOST="localhost:${KAFKA_EXTERNAL_PORT}" # Kafdrop Kafka Cluster Overview KAFDROP_IMAGE="obsidiandynamics/kafdrop:4.0.1" -KAFDROP_HOST="default-stack.kafdrop" +KAFDROP_HOST="${COMPOSE_PROJECT_NAME}.kafdrop" KAFDROP_PORT=9000 KAFDROP_EXTERNAL_PORT=19000 KAFDROP_KAFKA_HOST="${KAFKA_BROKERCONNECT}" # Dictionary gateway with OpenSearch -DICTIONARY_RS_HOST="default-stack.dictionary-rs" +DICTIONARY_RS_HOST="${COMPOSE_PROJECT_NAME}.dictionary-rs" DICTIONARY_RS_IMAGE="openls/dictionary-rs:1.2.0" DICTIONARY_RS_PORT=7878 # Networks -DEFAULT_NETWORK="default-stack.adempiere_network" +DEFAULT_NETWORK="${COMPOSE_PROJECT_NAME}.adempiere_network" diff --git a/docker-compose/docker-compose-dev.yml b/docker-compose/docker-compose-dev.yml new file mode 100644 index 0000000..5ee7fc1 --- /dev/null +++ b/docker-compose/docker-compose-dev.yml @@ -0,0 +1,120 @@ +services: + + zookeeper: + image: $ZOOKEEPER_IMAGE + container_name: $ZOOKEEPER_HOST + restart: $GENERIC_RESTART + healthcheck: + test: "bash -c 'printf \"GET / HTTP/1.1\n\n\" > /dev/tcp/127.0.0.1/$ZOOKEEPER_PORT; exit $?;'" + interval: 10s + retries: 60 + start_period: 20s + timeout: 10s + environment: + ZOOKEEPER_CLIENT_PORT: ${ZOOKEEPER_PORT} + ZOOKEEPER_TICK_TIME: ${ZOOKEEPER_TICK_TIME} + ports: + - ${ZOOKEEPER_PORT}:2181 + networks: + - shared_network + + kafka: + image: ${KAFKA_IMAGE} + container_name: ${KAFKA_HOST} + restart: ${GENERIC_RESTART} + depends_on: + zookeeper: + condition: service_healthy + healthcheck: + test: "bash -c 'printf \"GET / HTTP/1.1\n\n\" > /dev/tcp/127.0.0.1/${KAFKA_PORT}; exit $?;'" + interval: 10s + retries: 60 + start_period: 20s + timeout: 10s + ports: + - ${KAFKA_EXTERNAL_PORT}:${KAFKA_EXTERNAL_PORT} + environment: + KAFKA_BROKER_ID: 1 + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,PLAINTEXT_HOST://${KAFKA_BROKER_HOST} + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT + KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + networks: + - shared_network + + kafdrop: + image: ${KAFDROP_IMAGE} + container_name: ${KAFDROP_HOST} + hostname: ${KAFDROP_HOST} + restart: ${GENERIC_RESTART} + depends_on: + kafka: + condition: service_healthy + environment: + KAFKA_BROKERCONNECT: ${KAFDROP_KAFKA_HOST} + ports: + - ${KAFDROP_EXTERNAL_PORT}:${KAFDROP_PORT} + networks: + - shared_network + + opensearch-service: + image: ${OPENSEARCH_IMAGE} + container_name: ${OPENSEARCH_HOST} + restart: ${GENERIC_RESTART} + healthcheck: + test: "bash -c 'printf \"GET / HTTP/1.1\n\n\" > /dev/tcp/127.0.0.1/9200; exit $?;'" + interval: 10s + retries: 60 + start_period: 20s + timeout: 10s + environment: + - node.name=opensearch-service + - discovery.type=single-node + - "DISABLE_INSTALL_DEMO_CONFIG=true" # Prevents execution of bundled demo script which installs demo certificates and security configurations to OpenSearch + - "DISABLE_SECURITY_PLUGIN=true" # Disables Security plugin + ulimits: + memlock: + soft: -1 + hard: -1 + nofile: + soft: 65536 # maximum number of open files for the OpenSearch user, set to at least 65536 on modern systems + hard: 65536 + volumes: + - volume_opensearch:/usr/share/opensearch/data + - ./opensearch/snapshots:/mnt/snapshots + - ./opensearch/opensearch.yml:/usr/share/opensearch/config/opensearch.yml + ports: + - ${OPENSEARCH_PORT}:9200 + - ${OPENSEARCH_PERFORMANCE_PORT}:9600 # required for Performance Analyzer + networks: + - shared_network + + opensearch-dashboards: + image: ${OPENSEARCH_DASHBOARDS_IMAGE} + container_name: ${OPENSEARCH_DASHBOARDS_HOST} + hostname: ${OPENSEARCH_DASHBOARDS_HOST} + healthcheck: + test: "bash -c 'printf \"GET / HTTP/1.1\n\n\" > /dev/tcp/127.0.0.1/5601; exit $?;'" + interval: 10s + retries: 60 + start_period: 20s + timeout: 10s + depends_on: + opensearch-service: + condition: service_healthy + environment: + OPENSEARCH_HOSTS: ${OPENSEARCH_DASHBOARDS_OPENSEARCH_HOSTS} + DISABLE_SECURITY_DASHBOARDS_PLUGIN: true + ports: + - ${OPENSEARCH_DASHBOARDS_EXTERNAL_PORT}:5601 # Map host port 5601 to container port 5601 + networks: + - shared_network + +networks: + shared_network: + name: ${DEFAULT_NETWORK} + +volumes: + volume_opensearch: + name: ${OPENSEARCH_VOLUME} diff --git a/docker-compose/docker-compose.yml b/docker-compose/docker-compose.yml index 2d3e4bc..a1abe6d 100644 --- a/docker-compose/docker-compose.yml +++ b/docker-compose/docker-compose.yml @@ -11,7 +11,7 @@ services: start_period: 20s timeout: 10s environment: - ZOOKEEPER_CLIENT_PORT: $ZOOKEEPER_PORT + ZOOKEEPER_CLIENT_PORT: ${ZOOKEEPER_PORT} ZOOKEEPER_TICK_TIME: ${ZOOKEEPER_TICK_TIME} # ports: # - ${ZOOKEEPER_PORT}:2181 @@ -32,11 +32,11 @@ services: start_period: 20s timeout: 10s ports: - - ${KAFKA_BROKER_EXTERNAL_PORT}:${KAFKA_BROKER_PORT} + - ${KAFKA_EXTERNAL_PORT}:${KAFKA_EXTERNAL_PORT} environment: KAFKA_BROKER_ID: 1 KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 - KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://${KAFKA_BROKERCONNECT},PLAINTEXT_HOST://${KAFKA_EXTERNAL_BROKERCONNECT} + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,PLAINTEXT_HOST://${KAFKA_BROKER_HOST} KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 diff --git a/src/bin/server.rs b/src/bin/server.rs index 0422a2d..515f170 100644 --- a/src/bin/server.rs +++ b/src/bin/server.rs @@ -1,5 +1,5 @@ use std::env; -use dictionary_rs::{controller::{kafka::create_consumer, opensearch::{create, delete, IndexDocument}}, models::{browser::{browser_from_id, browsers, BrowserDocument}, form::{form_from_id, forms, FormDocument}, menu::{menu_from_id, menus, MenuDocument}, process::{process_from_id, processes, ProcessDocument}, window::{window_from_id, windows, WindowDocument}}}; +use dictionary_rs::{controller::{kafka::create_consumer, opensearch::{create, delete, IndexDocument}}, models::{browser::{browser_from_id, browsers, BrowserDocument}, form::{form_from_id, forms, FormDocument}, menu::{menu_from_id, menus, MenuDocument}, menu_item::MenuItemDocument, menu_tree::MenuTreeDocument, process::{process_from_id, processes, ProcessDocument}, window::{window_from_id, windows, WindowDocument}}}; use dotenv::dotenv; use rdkafka::{Message, consumer::{CommitMode, Consumer}}; use salvo::{conn::tcp::TcpAcceptor, cors::Cors, http::header, hyper::Method, prelude::*}; @@ -471,6 +471,40 @@ async fn consume_queue() { Err(error) => log::warn!("{}", error) } } + } else if topic == "menu_item" { + let _document = match serde_json::from_str(payload) { + Ok(value) => value, + Err(error) => { + log::warn!("{}", error); + MenuItemDocument { + document: None + } + }, + }; + if _document.document.is_some() { + let _menu_document: &dyn IndexDocument = &(_document.document.unwrap()); + match process_index(event_type, _menu_document).await { + Ok(_) => consumer.commit_message(&message, CommitMode::Async).unwrap(), + Err(error) => log::warn!("{}", error) + } + } + } else if topic == "menu_tree" { + let _document = match serde_json::from_str(payload) { + Ok(value) => value, + Err(error) => { + log::warn!("{}", error); + MenuTreeDocument { + document: None + } + }, + }; + if _document.document.is_some() { + let _menu_document: &dyn IndexDocument = &(_document.document.unwrap()); + match process_index(event_type, _menu_document).await { + Ok(_) => consumer.commit_message(&message, CommitMode::Async).unwrap(), + Err(error) => log::warn!("{}", error) + } + } } else if topic == "process" { let _document = match serde_json::from_str(payload) { Ok(value) => value, diff --git a/src/controller/kafka.rs b/src/controller/kafka.rs index 2d6fbc5..a3bd6f9 100644 --- a/src/controller/kafka.rs +++ b/src/controller/kafka.rs @@ -35,6 +35,7 @@ pub fn create_consumer(brokers: &str, group_id: &str, topics: &[&str]) -> Result .set("group.id", group_id) .set("bootstrap.servers", brokers) .set("enable.partition.eof", "false") + .set("enable.partition.eof", "false") .set("session.timeout.ms", "6000") .set("enable.auto.commit", "true") .set("message.max.bytes", "1000000000") @@ -45,6 +46,7 @@ pub fn create_consumer(brokers: &str, group_id: &str, topics: &[&str]) -> Result .set("queued.max.messages.kbytes", "2097151") .set("fetch.message.max.bytes", "1000000000") .set("max.partition.fetch.bytes", "1000000000") + .set("max.poll.interval.ms", "86400000") .set("fetch.max.bytes", "2147483135") .set("auto.offset.reset", "earliest") .set_log_level(RDKafkaLogLevel::Debug) diff --git a/src/models/menu_item.rs b/src/models/menu_item.rs new file mode 100644 index 0000000..a625442 --- /dev/null +++ b/src/models/menu_item.rs @@ -0,0 +1,327 @@ +use serde::{Deserialize, Serialize}; +use salvo::prelude::*; +use serde_json::json; +use std::{io::ErrorKind, io::Error}; + +use crate::{controller::opensearch::{IndexDocument, get_by_id, find, exists_index}, models::{user_index, role_index}}; + +#[derive(Deserialize, Extractible, Debug, Clone)] +#[salvo(extract(default_source(from = "body")))] +pub struct MenuItemDocument { + pub document: Option +} + +#[derive(Serialize, Debug, Clone)] +pub struct MenuItemResponse { + pub menu: Option +} + +#[derive(Serialize, Debug, Clone)] +pub struct MenuItemListResponse { + pub menus: Option> +} + +impl Default for MenuItemResponse { + fn default() -> Self { + MenuItemResponse { + menu: None + } + } +} + +#[derive(Deserialize, Serialize, Extractible, Debug, Clone)] +pub struct MenuItem { + pub uuid: Option, + pub id: Option, + pub parent_id: Option, + pub sequence: Option, + pub name: Option, + pub description: Option, + pub is_summary: Option, + pub is_sales_transaction: Option, + pub is_read_only: Option, + // index + pub index_value: Option, + pub language: Option, + pub client_id: Option, + pub role_id: Option, + pub user_id: Option, + // Supported References + pub action: Option, + pub action_id: Option, + pub action_uuid: Option, + pub window: Option, + pub process: Option, + pub form: Option
, + pub browser: Option, + pub workflow: Option, + // Tree menu childs + pub children: Option> +} + +impl Default for MenuItem { + fn default() -> Self { + Self { + uuid: None, + id: None, + parent_id: None, + sequence: None, + name: None, + description: None, + is_summary: None, + is_sales_transaction: None, + is_read_only: None, + // index + index_value: None, + language: None, + client_id: None, + role_id: None, + user_id: None, + // Supported References + action: None, + action_id: None, + action_uuid: None, + window: None, + process: None, + form: None, + browser: None, + workflow: None, + // Tree menu childs + children: None + } + } +} + +impl MenuItem { + pub fn from_id(_id: Option) -> Self { + let mut menu = MenuItem::default(); + menu.id = _id; + menu + } +} + +impl IndexDocument for MenuItem { + fn mapping(self: &Self) -> serde_json::Value { + json!({ + "mappings" : { + "properties" : { + "uuid" : { "type" : "text" }, + "id" : { "type" : "integer" }, + "parent_id" : { "type" : "integer" }, + "sequence" : { "type" : "integer" }, + "name" : { "type" : "text" }, + "description" : { "type" : "text" } + } + } + }) + } + + fn data(self: &Self) -> serde_json::Value { + json!(self) + } + + fn id(self: &Self) -> String { + self.id.unwrap().to_string() + } + + fn index_name(self: &Self) -> String { + match &self.index_value { + Some(value) => value.to_string(), + None => "menu".to_string(), + } + } + + fn find(self: &Self, _search_value: String) -> serde_json::Value { + let mut query = "*".to_owned(); + query.push_str(&_search_value.to_owned()); + query.push_str(&"*".to_owned()); + + json!({ + "query": { + "query_string": { + "query": query + } + } + }) + } +} + +#[derive(Deserialize, Serialize, Extractible, Debug, Clone)] +pub struct Window { + pub uuid: Option, + pub id: Option, + pub name: Option, + pub description: Option, + pub help: Option, +} + +#[derive(Deserialize, Serialize, Extractible, Debug, Clone)] +pub struct Process { + pub uuid: Option, + pub id: Option, + pub name: Option, + pub description: Option, + pub help: Option, +} + +#[derive(Deserialize, Serialize, Extractible, Debug, Clone)] +pub struct Form { + pub uuid: Option, + pub id: Option, + pub name: Option, + pub description: Option, + pub help: Option, +} + +#[derive(Deserialize, Serialize, Extractible, Debug, Clone)] +pub struct Browser { + pub uuid: Option, + pub id: Option, + pub name: Option, + pub description: Option, + pub help: Option, +} + +#[derive(Deserialize, Serialize, Extractible, Debug, Clone)] +pub struct Workflow { + pub uuid: Option, + pub id: Option, + pub name: Option, + pub description: Option, + pub help: Option, +} + +pub async fn menu_from_id(_id: Option, _language: Option<&String>, _client_id: Option<&String>, _role_id: Option<&String>, _user_id: Option<&String>) -> Result { + if _id.is_none() || _id.map(|id| id <= 0).unwrap_or(false) { + return Err(Error::new(ErrorKind::InvalidData.into(), "MenuItem Identifier is Mandatory").to_string()); + } + let mut _document = MenuItem::from_id(_id); + + let _index_name = match get_index_name(_language, _client_id, _role_id, _user_id).await { + Ok(index_name) => index_name, + Err(error) => { + log::error!("Index name error: {:?}", error.to_string()); + return Err(error.to_string()) + } + }; + log::info!("Index to search {:}", _index_name); + + _document.index_value = Some(_index_name); + let _menu_document: &dyn IndexDocument = &_document; + match get_by_id(_menu_document).await { + Ok(value) => { + let mut menu: MenuItem = serde_json::from_value(value).unwrap(); + log::info!("Finded Value: {:?}", menu.id); + + // sort menu children nodes by sequence + if let Some(ref mut children) = menu.children { + children.sort_by_key(|child| child.sequence.clone().unwrap_or(0)); + } + + Ok( + menu + ) + }, + Err(error) => { + log::error!("{}", error); + Err(error) + }, + } +} + +async fn get_index_name(_language: Option<&String>, _client_id: Option<&String>, _role_id: Option<&String>, _user_id: Option<&String>) -> Result { + // Validate + if _language.is_none() { + return Err(Error::new(ErrorKind::InvalidData.into(), "Language is Mandatory")); + } + if _client_id.is_none() { + return Err(Error::new(ErrorKind::InvalidData.into(), "Client is Mandatory")); + } + if _role_id.is_none() { + return Err(Error::new(ErrorKind::InvalidData.into(), "Role is Mandatory")); + } + + let _index: String = "menu".to_string(); + + let _user_index = user_index(_index.to_owned(), _language, _client_id, _role_id, _user_id); + let _role_index = role_index(_index.to_owned(), _language, _client_id, _role_id); + + // Find index + match exists_index(_user_index.to_owned()).await { + Ok(_) => { + log::info!("Find with user index `{:}`", _user_index); + Ok(_user_index) + }, + Err(_) => { + log::warn!("No user index `{:}`", _user_index); + match exists_index(_role_index.to_owned()).await { + Ok(_) => { + log::info!("Find with role index `{:}`", _role_index); + Ok(_role_index) + }, + Err(error) => { + log::error!("No role index `{:}`", _role_index); + return Err(Error::new(ErrorKind::InvalidData.into(), error)) + } + } + } + } +} + +pub async fn menus( + _language: Option<&String>, _client_id: Option<&String>, _role_id: Option<&String>, _user_id: Option<&String>, + _search_value: Option<&String>, _page_number: Option<&String>, _page_size: Option<&String> +) -> Result { + let _search_value = match _search_value { + Some(value) => value.clone(), + None => "".to_owned() + }; + + // Find index + let _index_name = match get_index_name(_language, _client_id, _role_id, _user_id).await { + Ok(index_name) => index_name, + Err(error) => { + log::error!("Index name error: {:?}", error.to_string()); + return Err(Error::new(ErrorKind::InvalidData.into(), error)) + } + }; + log::info!("Index to search {:}", _index_name); + + let mut _document = MenuItem::default(); + _document.index_value = Some(_index_name); + let _menu_document: &dyn IndexDocument = &_document; + + // pagination + let page_number: i64 = match _page_number { + Some(value) => value.clone().parse::().to_owned(), + None => "0".parse::().to_owned() + }.unwrap(); + let page_size: i64 = match _page_size { + Some(value) => value.clone().parse::().to_owned(), + None => "100".parse::().to_owned() + }.unwrap(); + + match find(_menu_document, _search_value, page_number, page_size).await { + Ok(values) => { + let mut menus_list: Vec = vec![]; + for value in values { + let mut menu: MenuItem = serde_json::from_value(value).unwrap(); + // sort menu children nodes by sequence + if let Some(ref mut children) = menu.children { + children.sort_by_key(|child| child.sequence.clone().unwrap_or(0)); + } + menus_list.push(menu.to_owned()); + } + + // sort root menu nodes by sequence + menus_list.sort_by_key(|menu| menu.sequence.clone().unwrap_or(0)); + + Ok(MenuItemListResponse { + menus: Some(menus_list) + }) + }, + Err(error) => { + Err(Error::new(ErrorKind::InvalidData.into(), error)) + } + } +} diff --git a/src/models/menu_tree.rs b/src/models/menu_tree.rs new file mode 100644 index 0000000..491f538 --- /dev/null +++ b/src/models/menu_tree.rs @@ -0,0 +1,297 @@ +use serde::{Deserialize, Serialize}; +use salvo::prelude::*; +use serde_json::json; +use std::{io::ErrorKind, io::Error}; + +use crate::{controller::opensearch::{IndexDocument, get_by_id, find, exists_index}, models::{user_index, role_index}}; + +#[derive(Deserialize, Extractible, Debug, Clone)] +#[salvo(extract(default_source(from = "body")))] +pub struct MenuTreeDocument { + pub document: Option +} + +#[derive(Serialize, Debug, Clone)] +pub struct MenuTreeResponse { + pub menu: Option +} + +#[derive(Serialize, Debug, Clone)] +pub struct MenuTreeListResponse { + pub menus: Option> +} + +impl Default for MenuTreeResponse { + fn default() -> Self { + MenuTreeResponse { + menu: None + } + } +} + +#[derive(Deserialize, Serialize, Extractible, Debug, Clone)] +pub struct MenuTree { + pub id: Option, + pub parent_id: Option, + pub sequence: Option, + // index + pub index_value: Option, + pub language: Option, + pub client_id: Option, + pub role_id: Option, + pub user_id: Option, + // Tree menu childs + pub children: Option> +} + +impl Default for MenuTree { + fn default() -> Self { + Self { + id: None, + parent_id: None, + sequence: None, + // index + index_value: None, + language: None, + client_id: None, + role_id: None, + user_id: None, + // Tree menu childs + children: None + } + } +} + +impl MenuTree { + pub fn from_id(_id: Option) -> Self { + let mut menu = MenuTree::default(); + menu.id = _id; + menu + } +} + +impl IndexDocument for MenuTree { + fn mapping(self: &Self) -> serde_json::Value { + json!({ + "mappings" : { + "properties" : { + "uuid" : { "type" : "text" }, + "id" : { "type" : "integer" }, + "parent_id" : { "type" : "integer" }, + "sequence" : { "type" : "integer" }, + "name" : { "type" : "text" }, + "description" : { "type" : "text" } + } + } + }) + } + + fn data(self: &Self) -> serde_json::Value { + json!(self) + } + + fn id(self: &Self) -> String { + self.id.unwrap().to_string() + } + + fn index_name(self: &Self) -> String { + match &self.index_value { + Some(value) => value.to_string(), + None => "menu".to_string(), + } + } + + fn find(self: &Self, _search_value: String) -> serde_json::Value { + let mut query = "*".to_owned(); + query.push_str(&_search_value.to_owned()); + query.push_str(&"*".to_owned()); + + json!({ + "query": { + "query_string": { + "query": query + } + } + }) + } +} + +#[derive(Deserialize, Serialize, Extractible, Debug, Clone)] +pub struct Window { + pub uuid: Option, + pub id: Option, + pub name: Option, + pub description: Option, + pub help: Option, +} + +#[derive(Deserialize, Serialize, Extractible, Debug, Clone)] +pub struct Process { + pub uuid: Option, + pub id: Option, + pub name: Option, + pub description: Option, + pub help: Option, +} + +#[derive(Deserialize, Serialize, Extractible, Debug, Clone)] +pub struct Form { + pub uuid: Option, + pub id: Option, + pub name: Option, + pub description: Option, + pub help: Option, +} + +#[derive(Deserialize, Serialize, Extractible, Debug, Clone)] +pub struct Browser { + pub uuid: Option, + pub id: Option, + pub name: Option, + pub description: Option, + pub help: Option, +} + +#[derive(Deserialize, Serialize, Extractible, Debug, Clone)] +pub struct Workflow { + pub uuid: Option, + pub id: Option, + pub name: Option, + pub description: Option, + pub help: Option, +} + +pub async fn menu_from_id(_id: Option, _language: Option<&String>, _client_id: Option<&String>, _role_id: Option<&String>, _user_id: Option<&String>) -> Result { + if _id.is_none() || _id.map(|id| id <= 0).unwrap_or(false) { + return Err(Error::new(ErrorKind::InvalidData.into(), "MenuTree Identifier is Mandatory").to_string()); + } + let mut _document = MenuTree::from_id(_id); + + let _index_name = match get_index_name(_language, _client_id, _role_id, _user_id).await { + Ok(index_name) => index_name, + Err(error) => { + log::error!("Index name error: {:?}", error.to_string()); + return Err(error.to_string()) + } + }; + log::info!("Index to search {:}", _index_name); + + _document.index_value = Some(_index_name); + let _menu_document: &dyn IndexDocument = &_document; + match get_by_id(_menu_document).await { + Ok(value) => { + let mut menu: MenuTree = serde_json::from_value(value).unwrap(); + log::info!("Finded Value: {:?}", menu.id); + + // sort menu children nodes by sequence + if let Some(ref mut children) = menu.children { + children.sort_by_key(|child| child.sequence.clone().unwrap_or(0)); + } + + Ok( + menu + ) + }, + Err(error) => { + log::error!("{}", error); + Err(error) + }, + } +} + +async fn get_index_name(_language: Option<&String>, _client_id: Option<&String>, _role_id: Option<&String>, _user_id: Option<&String>) -> Result { + // Validate + if _language.is_none() { + return Err(Error::new(ErrorKind::InvalidData.into(), "Language is Mandatory")); + } + if _client_id.is_none() { + return Err(Error::new(ErrorKind::InvalidData.into(), "Client is Mandatory")); + } + if _role_id.is_none() { + return Err(Error::new(ErrorKind::InvalidData.into(), "Role is Mandatory")); + } + + let _index: String = "menu".to_string(); + + let _user_index = user_index(_index.to_owned(), _language, _client_id, _role_id, _user_id); + let _role_index = role_index(_index.to_owned(), _language, _client_id, _role_id); + + // Find index + match exists_index(_user_index.to_owned()).await { + Ok(_) => { + log::info!("Find with user index `{:}`", _user_index); + Ok(_user_index) + }, + Err(_) => { + log::warn!("No user index `{:}`", _user_index); + match exists_index(_role_index.to_owned()).await { + Ok(_) => { + log::info!("Find with role index `{:}`", _role_index); + Ok(_role_index) + }, + Err(error) => { + log::error!("No role index `{:}`", _role_index); + return Err(Error::new(ErrorKind::InvalidData.into(), error)) + } + } + } + } +} + +pub async fn menus( + _language: Option<&String>, _client_id: Option<&String>, _role_id: Option<&String>, _user_id: Option<&String>, + _search_value: Option<&String>, _page_number: Option<&String>, _page_size: Option<&String> +) -> Result { + let _search_value = match _search_value { + Some(value) => value.clone(), + None => "".to_owned() + }; + + // Find index + let _index_name = match get_index_name(_language, _client_id, _role_id, _user_id).await { + Ok(index_name) => index_name, + Err(error) => { + log::error!("Index name error: {:?}", error.to_string()); + return Err(Error::new(ErrorKind::InvalidData.into(), error)) + } + }; + log::info!("Index to search {:}", _index_name); + + let mut _document = MenuTree::default(); + _document.index_value = Some(_index_name); + let _menu_document: &dyn IndexDocument = &_document; + + // pagination + let page_number: i64 = match _page_number { + Some(value) => value.clone().parse::().to_owned(), + None => "0".parse::().to_owned() + }.unwrap(); + let page_size: i64 = match _page_size { + Some(value) => value.clone().parse::().to_owned(), + None => "100".parse::().to_owned() + }.unwrap(); + + match find(_menu_document, _search_value, page_number, page_size).await { + Ok(values) => { + let mut menus_list: Vec = vec![]; + for value in values { + let mut menu: MenuTree = serde_json::from_value(value).unwrap(); + // sort menu children nodes by sequence + if let Some(ref mut children) = menu.children { + children.sort_by_key(|child| child.sequence.clone().unwrap_or(0)); + } + menus_list.push(menu.to_owned()); + } + + // sort root menu nodes by sequence + menus_list.sort_by_key(|menu| menu.sequence.clone().unwrap_or(0)); + + Ok(MenuTreeListResponse { + menus: Some(menus_list) + }) + }, + Err(error) => { + Err(Error::new(ErrorKind::InvalidData.into(), error)) + } + } +} diff --git a/src/models/mod.rs b/src/models/mod.rs index 7b8caf1..cfa4326 100644 --- a/src/models/mod.rs +++ b/src/models/mod.rs @@ -4,6 +4,8 @@ pub mod menu; pub mod process; pub mod window; pub mod generic; +pub mod menu_item; +pub mod menu_tree; use serde::{Deserialize, Serialize}; use salvo::prelude::*;