From d4513305fec81e858a9e79b7f11c6415e77686a6 Mon Sep 17 00:00:00 2001 From: Mohamed Habib Date: Sun, 21 Jul 2024 19:20:30 +0100 Subject: [PATCH] next features part 2: repo added connections and validation on callback (#1630) * generation models and repo add functionality --- .github/workflows/next_deploy.yml | 5 +- go.work | 4 +- go.work.sum | 23 +- next/controllers/github.go | 801 +++++----------- next/controllers/github_after_merge.go | 12 +- next/dbgen/dbgen.go | 47 + next/dbgen/go.mod | 21 + next/dbgen/go.sum | 103 +++ next/go.mod | 10 + next/go.sum | 14 + next/main.go | 9 +- next/middleware/supabase_cookie_auth.go | 74 ++ next/model/account_delete_tokens.gen.go | 18 + next/model/chats.gen.go | 25 + next/model/customers.gen.go | 18 + next/model/digger_batches.gen.go | 30 + next/model/digger_job_parent_links.gen.go | 28 + next/model/digger_job_summaries.gen.go | 29 + next/model/digger_job_tokens.gen.go | 30 + next/model/digger_jobs.gen.go | 37 + next/model/digger_locks.gen.go | 29 + next/model/digger_run_queue_items.gen.go | 28 + next/model/digger_run_stages.gen.go | 27 + next/model/digger_runs.gen.go | 40 + .../github_app_installation_links.gen.go | 29 + next/model/github_app_installations.gen.go | 32 + next/model/github_apps.gen.go | 29 + next/model/internal_blog_author_posts.gen.go | 18 + .../internal_blog_author_profiles.gen.go | 31 + next/model/internal_blog_post_tags.gen.go | 20 + ...nternal_blog_post_tags_relationship.gen.go | 18 + next/model/internal_blog_posts.gen.go | 32 + next/model/internal_changelog.gen.go | 27 + next/model/internal_feedback_comments.gen.go | 26 + next/model/internal_feedback_threads.gen.go | 32 + next/model/organization_credits.gen.go | 18 + .../organization_join_invitations.gen.go | 28 + next/model/organization_members.gen.go | 25 + next/model/organizations.gen.go | 24 + next/model/organizations_private_info.gen.go | 19 + next/model/prices.gen.go | 27 + next/model/products.gen.go | 22 + next/model/project_comments.gen.go | 26 + next/model/projects.gen.go | 40 + next/model/repos.gen.go | 33 + next/model/subscriptions.gen.go | 35 + next/model/user_api_keys.gen.go | 26 + next/model/user_notifications.gen.go | 27 + next/model/user_onboarding.gen.go | 23 + next/model/user_private_info.gen.go | 23 + next/model/user_profiles.gen.go | 24 + next/model/user_roles.gen.go | 19 + next/models/github.go | 28 - next/models/locking.go | 11 - next/models/orgs.go | 126 --- next/models/policies.go | 23 - next/models/runs.go | 112 --- next/models/scheduler.go | 126 --- next/models/scheduler_test.go | 133 --- next/models/setup.go | 18 +- next/models/storage.go | 857 +++++++++--------- next/models/storage_test.go | 167 ---- next/models/user.go | 8 - .../account_delete_tokens.gen.go | 384 ++++++++ next/models_generated/chats.gen.go | 396 ++++++++ next/models_generated/customers.gen.go | 384 ++++++++ next/models_generated/digger_batches.gen.go | 432 +++++++++ .../digger_job_parent_links.gen.go | 400 ++++++++ .../digger_job_summaries.gen.go | 404 +++++++++ .../models_generated/digger_job_tokens.gen.go | 408 +++++++++ next/models_generated/digger_jobs.gen.go | 436 +++++++++ next/models_generated/digger_locks.gen.go | 404 +++++++++ .../digger_run_queue_items.gen.go | 400 ++++++++ .../models_generated/digger_run_stages.gen.go | 396 ++++++++ next/models_generated/digger_runs.gen.go | 448 +++++++++ next/models_generated/gen.go | 415 +++++++++ .../github_app_installation_links.gen.go | 404 +++++++++ .../github_app_installations.gen.go | 416 +++++++++ next/models_generated/github_apps.gen.go | 404 +++++++++ .../internal_blog_author_posts.gen.go | 384 ++++++++ .../internal_blog_author_profiles.gen.go | 420 +++++++++ .../internal_blog_post_tags.gen.go | 392 ++++++++ ...nternal_blog_post_tags_relationship.gen.go | 384 ++++++++ .../internal_blog_posts.gen.go | 424 +++++++++ .../internal_changelog.gen.go | 404 +++++++++ .../internal_feedback_comments.gen.go | 400 ++++++++ .../internal_feedback_threads.gen.go | 424 +++++++++ .../organization_credits.gen.go | 384 ++++++++ .../organization_join_invitations.gen.go | 408 +++++++++ .../organization_members.gen.go | 396 ++++++++ next/models_generated/organizations.gen.go | 392 ++++++++ .../organizations_private_info.gen.go | 388 ++++++++ next/models_generated/prices.gen.go | 420 +++++++++ next/models_generated/products.gen.go | 400 ++++++++ next/models_generated/project_comments.gen.go | 400 ++++++++ next/models_generated/projects.gen.go | 448 +++++++++ next/models_generated/repos.gen.go | 420 +++++++++ next/models_generated/subscriptions.gen.go | 436 +++++++++ next/models_generated/user_api_keys.gen.go | 400 ++++++++ .../user_notifications.gen.go | 404 +++++++++ next/models_generated/user_onboarding.gen.go | 388 ++++++++ .../models_generated/user_private_info.gen.go | 388 ++++++++ next/models_generated/user_profiles.gen.go | 392 ++++++++ next/models_generated/user_roles.gen.go | 388 ++++++++ next/supa/supa.go | 21 + 105 files changed, 18663 insertions(+), 1777 deletions(-) create mode 100644 next/dbgen/dbgen.go create mode 100644 next/dbgen/go.mod create mode 100644 next/dbgen/go.sum create mode 100644 next/go.sum create mode 100644 next/middleware/supabase_cookie_auth.go create mode 100644 next/model/account_delete_tokens.gen.go create mode 100644 next/model/chats.gen.go create mode 100644 next/model/customers.gen.go create mode 100644 next/model/digger_batches.gen.go create mode 100644 next/model/digger_job_parent_links.gen.go create mode 100644 next/model/digger_job_summaries.gen.go create mode 100644 next/model/digger_job_tokens.gen.go create mode 100644 next/model/digger_jobs.gen.go create mode 100644 next/model/digger_locks.gen.go create mode 100644 next/model/digger_run_queue_items.gen.go create mode 100644 next/model/digger_run_stages.gen.go create mode 100644 next/model/digger_runs.gen.go create mode 100644 next/model/github_app_installation_links.gen.go create mode 100644 next/model/github_app_installations.gen.go create mode 100644 next/model/github_apps.gen.go create mode 100644 next/model/internal_blog_author_posts.gen.go create mode 100644 next/model/internal_blog_author_profiles.gen.go create mode 100644 next/model/internal_blog_post_tags.gen.go create mode 100644 next/model/internal_blog_post_tags_relationship.gen.go create mode 100644 next/model/internal_blog_posts.gen.go create mode 100644 next/model/internal_changelog.gen.go create mode 100644 next/model/internal_feedback_comments.gen.go create mode 100644 next/model/internal_feedback_threads.gen.go create mode 100644 next/model/organization_credits.gen.go create mode 100644 next/model/organization_join_invitations.gen.go create mode 100644 next/model/organization_members.gen.go create mode 100644 next/model/organizations.gen.go create mode 100644 next/model/organizations_private_info.gen.go create mode 100644 next/model/prices.gen.go create mode 100644 next/model/products.gen.go create mode 100644 next/model/project_comments.gen.go create mode 100644 next/model/projects.gen.go create mode 100644 next/model/repos.gen.go create mode 100644 next/model/subscriptions.gen.go create mode 100644 next/model/user_api_keys.gen.go create mode 100644 next/model/user_notifications.gen.go create mode 100644 next/model/user_onboarding.gen.go create mode 100644 next/model/user_private_info.gen.go create mode 100644 next/model/user_profiles.gen.go create mode 100644 next/model/user_roles.gen.go delete mode 100644 next/models/locking.go delete mode 100644 next/models/policies.go delete mode 100644 next/models/scheduler_test.go delete mode 100644 next/models/storage_test.go delete mode 100644 next/models/user.go create mode 100644 next/models_generated/account_delete_tokens.gen.go create mode 100644 next/models_generated/chats.gen.go create mode 100644 next/models_generated/customers.gen.go create mode 100644 next/models_generated/digger_batches.gen.go create mode 100644 next/models_generated/digger_job_parent_links.gen.go create mode 100644 next/models_generated/digger_job_summaries.gen.go create mode 100644 next/models_generated/digger_job_tokens.gen.go create mode 100644 next/models_generated/digger_jobs.gen.go create mode 100644 next/models_generated/digger_locks.gen.go create mode 100644 next/models_generated/digger_run_queue_items.gen.go create mode 100644 next/models_generated/digger_run_stages.gen.go create mode 100644 next/models_generated/digger_runs.gen.go create mode 100644 next/models_generated/gen.go create mode 100644 next/models_generated/github_app_installation_links.gen.go create mode 100644 next/models_generated/github_app_installations.gen.go create mode 100644 next/models_generated/github_apps.gen.go create mode 100644 next/models_generated/internal_blog_author_posts.gen.go create mode 100644 next/models_generated/internal_blog_author_profiles.gen.go create mode 100644 next/models_generated/internal_blog_post_tags.gen.go create mode 100644 next/models_generated/internal_blog_post_tags_relationship.gen.go create mode 100644 next/models_generated/internal_blog_posts.gen.go create mode 100644 next/models_generated/internal_changelog.gen.go create mode 100644 next/models_generated/internal_feedback_comments.gen.go create mode 100644 next/models_generated/internal_feedback_threads.gen.go create mode 100644 next/models_generated/organization_credits.gen.go create mode 100644 next/models_generated/organization_join_invitations.gen.go create mode 100644 next/models_generated/organization_members.gen.go create mode 100644 next/models_generated/organizations.gen.go create mode 100644 next/models_generated/organizations_private_info.gen.go create mode 100644 next/models_generated/prices.gen.go create mode 100644 next/models_generated/products.gen.go create mode 100644 next/models_generated/project_comments.gen.go create mode 100644 next/models_generated/projects.gen.go create mode 100644 next/models_generated/repos.gen.go create mode 100644 next/models_generated/subscriptions.gen.go create mode 100644 next/models_generated/user_api_keys.gen.go create mode 100644 next/models_generated/user_notifications.gen.go create mode 100644 next/models_generated/user_onboarding.gen.go create mode 100644 next/models_generated/user_private_info.gen.go create mode 100644 next/models_generated/user_profiles.gen.go create mode 100644 next/models_generated/user_roles.gen.go create mode 100644 next/supa/supa.go diff --git a/.github/workflows/next_deploy.yml b/.github/workflows/next_deploy.yml index 2e821a918..98fb89a5d 100644 --- a/.github/workflows/next_deploy.yml +++ b/.github/workflows/next_deploy.yml @@ -3,10 +3,7 @@ on: push: branches: - develop # change to main if needed - - feat/nxt - pull_request: - branches: - - feat/next + - feat/next2 jobs: deploy: diff --git a/go.work b/go.work index 6c623d57a..76a84981b 100644 --- a/go.work +++ b/go.work @@ -6,10 +6,12 @@ use ( ./cli_e2e ./dgctl ./next - + ./next/dbgen + ./ee/backend ./ee/cli ./libs + ) diff --git a/go.work.sum b/go.work.sum index 4057dbb4f..3c562539c 100644 --- a/go.work.sum +++ b/go.work.sum @@ -484,8 +484,7 @@ github.com/CloudyKit/fastprinter v0.0.0-20200109182630-33d98a066a53/go.mod h1:+3 github.com/CloudyKit/jet/v6 v6.2.0 h1:EpcZ6SR9n28BUGtNJSvlBqf90IpjeFr36Tizxhn/oME= github.com/CloudyKit/jet/v6 v6.2.0/go.mod h1:d3ypHeIRNo2+XyqnGA8s+aphtcVpjP5hPwP/Lzo7Ro4= github.com/DataDog/datadog-go v3.2.0+incompatible h1:qSG2N4FghB1He/r2mFrWKCaL7dXCilEuNEeAn20fdD4= -github.com/Joker/jade v1.1.3 h1:Qbeh12Vq6BxURXT1qZBRHsDxeURB8ztcL6f3EXSGeHk= -github.com/Joker/jade v1.1.3/go.mod h1:T+2WLyt7VH6Lp0TRxQrUYEs64nRc83wkMQrfeIQKduM= +github.com/Joker/hpp v1.0.0 h1:65+iuJYdRXv/XyN62C1uEmmOx3432rNG/rKlX6V7Kkc= github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI= github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= @@ -513,8 +512,6 @@ github.com/alecthomas/chroma v0.10.0 h1:7XDcGkCQopCNKjZHfYrNLraA+M7e0fMiJ/Mfikbf github.com/alecthomas/chroma v0.10.0/go.mod h1:jtJATyUxlIORhUOFNA9NZDWGAQ8wpxQQqNSB4rjA/1s= github.com/alecthomas/kingpin/v2 v2.4.0 h1:f48lwail6p8zpO1bC4TxtqACaGqHYA22qkHjHpqDjYY= github.com/alecthomas/kingpin/v2 v2.4.0/go.mod h1:0gyi0zQnjuFk8xrkNKamJoyUo382HRL7ATRpFZCw6tE= -github.com/alecthomas/kong v0.7.1 h1:azoTh0IOfwlAX3qN9sHWTxACE2oV8Bg2gAwBsMwDQY4= -github.com/alecthomas/kong v0.7.1/go.mod h1:n1iCIO2xS46oE8ZfYCNDqdR0b0wZNrXAIAqro/2132U= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 h1:JYp7IbQjafoB+tBA3gMyHYHrpOtNuDiK/uB5uXxq5wM= github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 h1:s6gZFSlWYmbqAuRjVTiNNhvNRfY2Wxp9nhfyel4rklc= github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE= @@ -677,6 +674,8 @@ github.com/cpuguy83/go-md2man v1.0.10 h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.0-20210816181553-5444fa50b93d h1:1iy2qD6JEhHKKhUOA9IWs7mjco7lnw2qx8FsRI2wirE= github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.0-20210816181553-5444fa50b93d/go.mod h1:tmAIfUFEirG/Y8jhZ9M+h36obRZAk/1fcSpXwAVlfqE= +github.com/denisenkom/go-mssqldb v0.12.0 h1:VtrkII767ttSPNRfFekePK3sctr+joXgO58stqQbtUA= +github.com/denisenkom/go-mssqldb v0.12.0/go.mod h1:iiK0YP1ZeepvmBQk/QpLEhhTNJgfzrpArPY/aFvc9yU= github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM= github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 h1:fAjc9m62+UWV/WAFKLNi6ZS0675eEUC9y3AlwSbQu1Y= @@ -841,17 +840,13 @@ github.com/iris-contrib/httpexpect/v2 v2.12.1/go.mod h1:7+RB6W5oNClX7PTwJgJnsQP3 github.com/iris-contrib/schema v0.0.6 h1:CPSBLyx2e91H2yJzPuhGuifVRnZBBJ3pCOMbOvPZaTw= github.com/iris-contrib/schema v0.0.6/go.mod h1:iYszG0IOsuIsfzjymw1kMzTL8YQcCWlm65f3wX8J5iA= github.com/jackc/chunkreader v1.0.0 h1:4s39bBR8ByfqH+DKm8rQA3E1LHZWB9XWcrz8fqaZbe0= -github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8= -github.com/jackc/pgconn v1.13.0 h1:3L1XMNV2Zvca/8BYhzcRFS70Lr0WlDg16Di6SFGAbys= -github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE= github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65 h1:DadwsjnMwFjfWc9y5Wi/+Zz7xoE5ALHsRQlOctkOiHc= github.com/jackc/pgproto3 v1.1.0 h1:FYYE4yRw+AgI8wXIinMlNjBbp/UitDJwfj5LqqewP1A= -github.com/jackc/pgproto3/v2 v2.3.1 h1:nwj7qwf0S+Q7ISFfBndqeLwSwxs+4DPsbRFjECT1Y4Y= -github.com/jackc/pgtype v1.12.0 h1:Dlq8Qvcch7kiehm8wPGIW0W3KsCCHJnRacKW0UM8n5w= -github.com/jackc/pgx/v4 v4.17.2 h1:0Ut0rpeKwvIVbMQ1KbMBU4h6wxehBI535LK6Flheh8E= github.com/jackc/puddle v1.3.0 h1:eHK/5clGOatcjX3oWGBO/MpxpbHzSwud5EWTSCI+MX0= github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk= github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= +github.com/jarcoal/httpmock v1.3.1 h1:iUx3whfZWVf3jT01hQTO/Eo5sAYtB2/rqaUuOtpInww= +github.com/jarcoal/httpmock v1.3.1/go.mod h1:3yb8rc4BI7TCBhFY8ng0gjuLKJNquuDNiPaZjnENuYg= github.com/jcmturner/aescts/v2 v2.0.0 h1:9YKLH6ey7H4eDBXW8khjYslgyqG2xZikXP0EQFKrle8= github.com/jcmturner/dnsutils/v2 v2.0.0 h1:lltnkeZGL0wILNvrNiVCR6Ro5PGU/SeBvVO/8c/iPbo= github.com/jcmturner/gofork v1.7.6 h1:QH0l3hzAU1tfT3rZCnW5zXl+orbkNMMRGJfdJjHVETg= @@ -860,6 +855,8 @@ github.com/jcmturner/gokrb5/v8 v8.4.4 h1:x1Sv4HaTpepFkXbt2IkL29DXRf8sOfZXo8eRKh6 github.com/jcmturner/rpc/v2 v2.0.3 h1:7FXXj8Ti1IaVFpSAziCZWNzbNuZmnvw/i6CqLNdWfZY= github.com/jinzhu/copier v0.0.0-20190924061706-b57f9002281a h1:zPPuIq2jAWWPTrGt70eK/BSch+gFAGrNzecsoENgu2o= github.com/jinzhu/copier v0.0.0-20190924061706-b57f9002281a/go.mod h1:yL958EeXv8Ylng6IfnvG4oflryUi3vgA3xPs9hmII1s= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= github.com/jonboulle/clockwork v0.1.0 h1:VKV+ZcuP6l3yW9doeqz6ziZGgcynBVQO+obU0+0hcPo= github.com/josephspurrier/goversioninfo v1.4.0 h1:Puhl12NSHUSALHSuzYwPYQkqa2E1+7SrtAPJorKK0C8= github.com/josephspurrier/goversioninfo v1.4.0/go.mod h1:JWzv5rKQr+MmW+LvM412ToT/IkYDZjaclF2pKDss8IY= @@ -1074,8 +1071,6 @@ github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B github.com/spf13/afero v1.10.0/go.mod h1:UBogFpq8E9Hx+xc5CNTTEpTnuHVmXDwZcZcE1eb/UhQ= github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= -github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM= -github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y= github.com/spf13/jwalterweatherman v0.0.0-20180109140146-7c0cea34c8ec h1:2ZXvIUGghLpdTVHR1UfvfrzoVlZaE/yOWC5LueIHZig= github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk= github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= @@ -1103,7 +1098,6 @@ github.com/urfave/cli v1.22.12/go.mod h1:sSBEIC79qR6OvcmsD4U3KABeOTxDqQtdDnaFuUN github.com/urfave/negroni v1.0.0 h1:kIimOitoypq34K7TG7DUaJ9kq/N4Ofuwi1sjz0KipXc= github.com/urfave/negroni v1.0.0/go.mod h1:Meg73S6kFm/4PpbYdq35yYWoCZ9mS/YSx+lKnmiohz4= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= -github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasthttp v1.40.0 h1:CRq/00MfruPGFLTQKY8b+8SfdK60TxNztjRMnH0t1Yc= github.com/valyala/fasthttp v1.40.0/go.mod h1:t/G+3rLek+CyY9bnIE+YlMRddxVAAGjhxndDB4i4C0I= github.com/valyala/fasthttp v1.52.0 h1:wqBQpxH71XW0e2g+Og4dzQM8pk34aFYlA1Ga8db7gU0= @@ -1201,7 +1195,6 @@ go.uber.org/zap v1.21.0/go.mod h1:wjWOCqI0f2ZZrJF/UufIOkiC8ii6tm1iqIsLo76RfJw= golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw= golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= -golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= golang.org/x/crypto v0.15.0/go.mod h1:4ChreQoLWfG3xLDer1WdlH5NdlQ3+mwnQq1YTKY+72g= golang.org/x/crypto v0.16.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= @@ -1214,7 +1207,6 @@ golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 h1:VLliZ0d+/avPrXXH+OakdXhp golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028 h1:4+4C/Iv2U4fMZBiMCc98MG1In4gJY5YRhtpDNeDeHWs= golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/mod v0.14.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= golang.org/x/net v0.18.0/go.mod h1:/czyP5RqHAH4odGYxBJ1qz0+CE5WZ+2j1YgoEo8F2jQ= @@ -1231,7 +1223,6 @@ golang.org/x/oauth2 v0.19.0/go.mod h1:vYi7skDa1x015PmRRYZ7+s1cWyPgrPiSYRe4rnsexc golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20211117180635-dee7805ff2e1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20221010170243-090e33056c14/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= diff --git a/next/controllers/github.go b/next/controllers/github.go index 287b796f2..1fc9b6b1d 100644 --- a/next/controllers/github.go +++ b/next/controllers/github.go @@ -6,15 +6,8 @@ import ( "encoding/json" "fmt" "github.com/diggerhq/digger/backend/ci_backends" - "github.com/diggerhq/digger/backend/locking" - "github.com/diggerhq/digger/backend/segment" - "github.com/diggerhq/digger/backend/services" - "github.com/diggerhq/digger/libs/ci" - "github.com/diggerhq/digger/libs/ci/generic" - comment_updater "github.com/diggerhq/digger/libs/comment_utils/reporting" - dg_locking "github.com/diggerhq/digger/libs/locking" orchestrator_scheduler "github.com/diggerhq/digger/libs/scheduler" - "github.com/google/uuid" + "github.com/diggerhq/digger/next/model" "log" "math/rand" "net/http" @@ -26,10 +19,10 @@ import ( "strings" "github.com/diggerhq/digger/backend/middleware" - "github.com/diggerhq/digger/backend/models" "github.com/diggerhq/digger/backend/utils" dg_github "github.com/diggerhq/digger/libs/ci/github" dg_configuration "github.com/diggerhq/digger/libs/digger_config" + "github.com/diggerhq/digger/next/models" "github.com/dominikbraun/graph" "github.com/gin-gonic/gin" "github.com/google/go-github/v61/github" @@ -251,13 +244,13 @@ func (d DiggerController) GithubSetupExchangeCode(c *gin.Context) { } -func createOrGetDiggerRepoForGithubRepo(ghRepoFullName string, ghRepoOrganisation string, ghRepoName string, ghRepoUrl string, installationId int64) (*models.Repo, *models.Organisation, error) { +func createOrGetDiggerRepoForGithubRepo(ghRepoFullName string, ghRepoOrganisation string, ghRepoName string, ghRepoUrl string, installationId int64) (*model.Repo, *model.Organization, error) { link, err := models.DB.GetGithubInstallationLinkForInstallationId(installationId) if err != nil { log.Printf("Error fetching installation link: %v", err) return nil, nil, err } - orgId := link.OrganisationId + orgId := link.OrganizationID org, err := models.DB.GetOrganisationById(orgId) if err != nil { log.Printf("Error fetching organisation by id: %v, error: %v\n", orgId, err) @@ -395,7 +388,7 @@ func handlePushEvent(gh utils.GithubClientProvider, payload *github.PushEvent) e return fmt.Errorf("error getting github app link") } - orgId := link.OrganisationId + orgId := link.OrganizationID diggerRepoName := strings.ReplaceAll(repoFullName, "/", "-") repo, err := models.DB.GetRepo(orgId, diggerRepoName) if err != nil { @@ -426,7 +419,7 @@ func handlePushEvent(gh utils.GithubClientProvider, payload *github.PushEvent) e log.Printf("ERROR load digger.yml: %v", err) return fmt.Errorf("error loading digger.yml %v", err) } - models.DB.UpdateRepoDiggerConfig(link.OrganisationId, *config, repo, isMainBranch) + models.DB.UpdateRepoDiggerConfig(link.OrganizationID, *config, repo, isMainBranch) return nil }) if err != nil { @@ -436,199 +429,199 @@ func handlePushEvent(gh utils.GithubClientProvider, payload *github.PushEvent) e return nil } -func handlePullRequestEvent(gh utils.GithubClientProvider, payload *github.PullRequestEvent, ciBackendProvider ci_backends.CiBackendProvider) error { - installationId := *payload.Installation.ID - repoName := *payload.Repo.Name - repoOwner := *payload.Repo.Owner.Login - repoFullName := *payload.Repo.FullName - cloneURL := *payload.Repo.CloneURL - prNumber := *payload.PullRequest.Number - isDraft := payload.PullRequest.GetDraft() - commitSha := payload.PullRequest.Head.GetSHA() - branch := payload.PullRequest.Head.GetRef() - - link, err := models.DB.GetGithubAppInstallationLink(installationId) - if err != nil { - log.Printf("Error getting GetGithubAppInstallationLink: %v", err) - return fmt.Errorf("error getting github app link") - } - organisationId := link.OrganisationId - - diggerYmlStr, ghService, config, projectsGraph, _, _, err := getDiggerConfigForPR(gh, installationId, repoFullName, repoOwner, repoName, cloneURL, prNumber) - if err != nil { - ghService, _, err := utils.GetGithubService(gh, installationId, repoFullName, repoOwner, repoName) - if err != nil { - log.Printf("GetGithubService error: %v", err) - return fmt.Errorf("error getting ghService to post error comment") - } - utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: Could not load digger config, error: %v", err)) - log.Printf("getDiggerConfigForPR error: %v", err) - return fmt.Errorf("error getting digger config") - } - - impactedProjects, impactedProjectsSourceMapping, _, err := dg_github.ProcessGitHubPullRequestEvent(payload, config, projectsGraph, ghService) - if err != nil { - log.Printf("Error processing event: %v", err) - utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: Error processing event: %v", err)) - return fmt.Errorf("error processing event") - } - - jobsForImpactedProjects, _, err := dg_github.ConvertGithubPullRequestEventToJobs(payload, impactedProjects, nil, *config) - if err != nil { - log.Printf("Error converting event to jobsForImpactedProjects: %v", err) - utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: Error converting event to jobsForImpactedProjects: %v", err)) - return fmt.Errorf("error converting event to jobsForImpactedProjects") - } - - if len(jobsForImpactedProjects) == 0 { - // do not report if no projects are impacted to minimise noise in the PR thread - // TODO use status checks instead: https://github.com/diggerhq/digger/issues/1135 - log.Printf("No projects impacted; not starting any jobs") - // This one is for aggregate reporting - err = utils.SetPRStatusForJobs(ghService, prNumber, jobsForImpactedProjects) - return nil - } - - diggerCommand, err := orchestrator_scheduler.GetCommandFromJob(jobsForImpactedProjects[0]) - if err != nil { - log.Printf("could not determine digger command from job: %v", jobsForImpactedProjects[0].Commands) - utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: could not determine digger command from job: %v", err)) - return fmt.Errorf("unkown digger command in comment %v", err) - } - - if *diggerCommand == orchestrator_scheduler.DiggerCommandNoop { - log.Printf("job is of type noop, no actions top perform") - return nil - } - - // perform locking/unlocking in backend - if config.PrLocks { - for _, project := range impactedProjects { - prLock := dg_locking.PullRequestLock{ - InternalLock: locking.BackendDBLock{ - OrgId: organisationId, - }, - CIService: ghService, - Reporter: comment_updater.NoopReporter{}, - ProjectName: project.Name, - ProjectNamespace: repoFullName, - PrNumber: prNumber, - } - err = dg_locking.PerformLockingActionFromCommand(prLock, *diggerCommand) - if err != nil { - utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: Failed perform lock action on project: %v %v", project.Name, err)) - return fmt.Errorf("failed to perform lock action on project: %v, %v", project.Name, err) - } - } - } - - // if commands are locking or unlocking we don't need to trigger any jobs - if *diggerCommand == orchestrator_scheduler.DiggerCommandUnlock || - *diggerCommand == orchestrator_scheduler.DiggerCommandLock { - utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":white_check_mark: Command %v completed successfully", *diggerCommand)) - return nil - } - - if !config.AllowDraftPRs && isDraft { - log.Printf("Draft PRs are disabled, skipping PR: %v", prNumber) - return nil - } - - commentReporter, err := utils.InitCommentReporter(ghService, prNumber, ":construction_worker: Digger starting...") - if err != nil { - log.Printf("Error initializing comment reporter: %v", err) - return fmt.Errorf("error initializing comment reporter") - } - - err = utils.ReportInitialJobsStatus(commentReporter, jobsForImpactedProjects) - if err != nil { - log.Printf("Failed to comment initial status for jobs: %v", err) - utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: Failed to comment initial status for jobs: %v", err)) - return fmt.Errorf("failed to comment initial status for jobs") - } - - err = utils.SetPRStatusForJobs(ghService, prNumber, jobsForImpactedProjects) - if err != nil { - log.Printf("error setting status for PR: %v", err) - utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: error setting status for PR: %v", err)) - fmt.Errorf("error setting status for PR: %v", err) - } - - impactedProjectsMap := make(map[string]dg_configuration.Project) - for _, p := range impactedProjects { - impactedProjectsMap[p.Name] = p - } - - impactedJobsMap := make(map[string]orchestrator_scheduler.Job) - for _, j := range jobsForImpactedProjects { - impactedJobsMap[j.ProjectName] = j - } - - commentId, err := strconv.ParseInt(commentReporter.CommentId, 10, 64) - if err != nil { - log.Printf("strconv.ParseInt error: %v", err) - utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: could not handle commentId: %v", err)) - } - batchId, _, err := utils.ConvertJobsToDiggerJobs(*diggerCommand, models.DiggerVCSGithub, organisationId, impactedJobsMap, impactedProjectsMap, projectsGraph, installationId, branch, prNumber, repoOwner, repoName, repoFullName, commitSha, commentId, diggerYmlStr, 0) - if err != nil { - log.Printf("ConvertJobsToDiggerJobs error: %v", err) - utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: ConvertJobsToDiggerJobs error: %v", err)) - return fmt.Errorf("error converting jobs") - } - - if config.CommentRenderMode == dg_configuration.CommentRenderModeGroupByModule { - sourceDetails, err := comment_updater.PostInitialSourceComments(ghService, prNumber, impactedProjectsSourceMapping) - if err != nil { - log.Printf("PostInitialSourceComments error: %v", err) - utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: PostInitialSourceComments error: %v", err)) - return fmt.Errorf("error posting initial comments") - } - batch, err := models.DB.GetDiggerBatch(batchId) - if err != nil { - log.Printf("GetDiggerBatch error: %v", err) - utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: PostInitialSourceComments error: %v", err)) - return fmt.Errorf("error getting digger batch") - } - batch.SourceDetails, err = json.Marshal(sourceDetails) - if err != nil { - log.Printf("sourceDetails, json Marshal error: %v", err) - utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: json Marshal error: %v", err)) - return fmt.Errorf("error marshalling sourceDetails") - } - err = models.DB.UpdateDiggerBatch(batch) - if err != nil { - log.Printf("UpdateDiggerBatch error: %v", err) - utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: UpdateDiggerBatch error: %v", err)) - return fmt.Errorf("error updating digger batch") - } - } - - segment.Track(strconv.Itoa(int(organisationId)), "backend_trigger_job") - - ciBackend, err := ciBackendProvider.GetCiBackend( - ci_backends.CiBackendOptions{ - GithubClientProvider: gh, - GithubInstallationId: installationId, - RepoName: repoName, - RepoOwner: repoOwner, - RepoFullName: repoFullName, - }, - ) - if err != nil { - log.Printf("GetCiBackend error: %v", err) - utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: GetCiBackend error: %v", err)) - return fmt.Errorf("error fetching ci backed %v", err) - } - - err = TriggerDiggerJobs(ciBackend, repoFullName, repoOwner, repoName, batchId, prNumber, ghService, gh) - if err != nil { - log.Printf("TriggerDiggerJobs error: %v", err) - utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: TriggerDiggerJobs error: %v", err)) - return fmt.Errorf("error triggerring Digger Jobs") - } - - return nil -} +//func handlePullRequestEvent(gh utils.GithubClientProvider, payload *github.PullRequestEvent, ciBackendProvider ci_backends.CiBackendProvider) error { +// installationId := *payload.Installation.ID +// repoName := *payload.Repo.Name +// repoOwner := *payload.Repo.Owner.Login +// repoFullName := *payload.Repo.FullName +// cloneURL := *payload.Repo.CloneURL +// prNumber := *payload.PullRequest.Number +// isDraft := payload.PullRequest.GetDraft() +// commitSha := payload.PullRequest.Head.GetSHA() +// branch := payload.PullRequest.Head.GetRef() +// +// link, err := models.DB.GetGithubAppInstallationLink(installationId) +// if err != nil { +// log.Printf("Error getting GetGithubAppInstallationLink: %v", err) +// return fmt.Errorf("error getting github app link") +// } +// organisationId := link.OrganizationID +// +// diggerYmlStr, ghService, config, projectsGraph, _, _, err := getDiggerConfigForPR(gh, installationId, repoFullName, repoOwner, repoName, cloneURL, prNumber) +// if err != nil { +// ghService, _, err := utils.GetGithubService(gh, installationId, repoFullName, repoOwner, repoName) +// if err != nil { +// log.Printf("GetGithubService error: %v", err) +// return fmt.Errorf("error getting ghService to post error comment") +// } +// utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: Could not load digger config, error: %v", err)) +// log.Printf("getDiggerConfigForPR error: %v", err) +// return fmt.Errorf("error getting digger config") +// } +// +// impactedProjects, impactedProjectsSourceMapping, _, err := dg_github.ProcessGitHubPullRequestEvent(payload, config, projectsGraph, ghService) +// if err != nil { +// log.Printf("Error processing event: %v", err) +// utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: Error processing event: %v", err)) +// return fmt.Errorf("error processing event") +// } +// +// jobsForImpactedProjects, _, err := dg_github.ConvertGithubPullRequestEventToJobs(payload, impactedProjects, nil, *config) +// if err != nil { +// log.Printf("Error converting event to jobsForImpactedProjects: %v", err) +// utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: Error converting event to jobsForImpactedProjects: %v", err)) +// return fmt.Errorf("error converting event to jobsForImpactedProjects") +// } +// +// if len(jobsForImpactedProjects) == 0 { +// // do not report if no projects are impacted to minimise noise in the PR thread +// // TODO use status checks instead: https://github.com/diggerhq/digger/issues/1135 +// log.Printf("No projects impacted; not starting any jobs") +// // This one is for aggregate reporting +// err = utils.SetPRStatusForJobs(ghService, prNumber, jobsForImpactedProjects) +// return nil +// } +// +// diggerCommand, err := orchestrator_scheduler.GetCommandFromJob(jobsForImpactedProjects[0]) +// if err != nil { +// log.Printf("could not determine digger command from job: %v", jobsForImpactedProjects[0].Commands) +// utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: could not determine digger command from job: %v", err)) +// return fmt.Errorf("unkown digger command in comment %v", err) +// } +// +// if *diggerCommand == orchestrator_scheduler.DiggerCommandNoop { +// log.Printf("job is of type noop, no actions top perform") +// return nil +// } +// +// // perform locking/unlocking in backend +// //if config.PrLocks { +// // for _, project := range impactedProjects { +// // prLock := dg_locking.PullRequestLock{ +// // InternalLock: locking.BackendDBLock{ +// // OrgId: organisationId, +// // }, +// // CIService: ghService, +// // Reporter: comment_updater.NoopReporter{}, +// // ProjectName: project.Name, +// // ProjectNamespace: repoFullName, +// // PrNumber: prNumber, +// // } +// // err = dg_locking.PerformLockingActionFromCommand(prLock, *diggerCommand) +// // if err != nil { +// // utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: Failed perform lock action on project: %v %v", project.Name, err)) +// // return fmt.Errorf("failed to perform lock action on project: %v, %v", project.Name, err) +// // } +// // } +// //} +// +// // if commands are locking or unlocking we don't need to trigger any jobs +// if *diggerCommand == orchestrator_scheduler.DiggerCommandUnlock || +// *diggerCommand == orchestrator_scheduler.DiggerCommandLock { +// utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":white_check_mark: Command %v completed successfully", *diggerCommand)) +// return nil +// } +// +// if !config.AllowDraftPRs && isDraft { +// log.Printf("Draft PRs are disabled, skipping PR: %v", prNumber) +// return nil +// } +// +// commentReporter, err := utils.InitCommentReporter(ghService, prNumber, ":construction_worker: Digger starting...") +// if err != nil { +// log.Printf("Error initializing comment reporter: %v", err) +// return fmt.Errorf("error initializing comment reporter") +// } +// +// err = utils.ReportInitialJobsStatus(commentReporter, jobsForImpactedProjects) +// if err != nil { +// log.Printf("Failed to comment initial status for jobs: %v", err) +// utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: Failed to comment initial status for jobs: %v", err)) +// return fmt.Errorf("failed to comment initial status for jobs") +// } +// +// err = utils.SetPRStatusForJobs(ghService, prNumber, jobsForImpactedProjects) +// if err != nil { +// log.Printf("error setting status for PR: %v", err) +// utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: error setting status for PR: %v", err)) +// fmt.Errorf("error setting status for PR: %v", err) +// } +// +// impactedProjectsMap := make(map[string]dg_configuration.Project) +// for _, p := range impactedProjects { +// impactedProjectsMap[p.Name] = p +// } +// +// impactedJobsMap := make(map[string]orchestrator_scheduler.Job) +// for _, j := range jobsForImpactedProjects { +// impactedJobsMap[j.ProjectName] = j +// } +// +// commentId, err := strconv.ParseInt(commentReporter.CommentId, 10, 64) +// if err != nil { +// log.Printf("strconv.ParseInt error: %v", err) +// utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: could not handle commentId: %v", err)) +// } +// batchId, _, err := utils.ConvertJobsToDiggerJobs(*diggerCommand, models2.DiggerVCSGithub, organisationId, impactedJobsMap, impactedProjectsMap, projectsGraph, installationId, branch, prNumber, repoOwner, repoName, repoFullName, commitSha, commentId, diggerYmlStr, 0) +// if err != nil { +// log.Printf("ConvertJobsToDiggerJobs error: %v", err) +// utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: ConvertJobsToDiggerJobs error: %v", err)) +// return fmt.Errorf("error converting jobs") +// } +// +// if config.CommentRenderMode == dg_configuration.CommentRenderModeGroupByModule { +// sourceDetails, err := comment_updater.PostInitialSourceComments(ghService, prNumber, impactedProjectsSourceMapping) +// if err != nil { +// log.Printf("PostInitialSourceComments error: %v", err) +// utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: PostInitialSourceComments error: %v", err)) +// return fmt.Errorf("error posting initial comments") +// } +// batch, err := models.DB.GetDiggerBatch(batchId) +// if err != nil { +// log.Printf("GetDiggerBatch error: %v", err) +// utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: PostInitialSourceComments error: %v", err)) +// return fmt.Errorf("error getting digger batch") +// } +// batch.SourceDetails, err = json.Marshal(sourceDetails) +// if err != nil { +// log.Printf("sourceDetails, json Marshal error: %v", err) +// utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: json Marshal error: %v", err)) +// return fmt.Errorf("error marshalling sourceDetails") +// } +// err = models.DB.UpdateDiggerBatch(batch) +// if err != nil { +// log.Printf("UpdateDiggerBatch error: %v", err) +// utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: UpdateDiggerBatch error: %v", err)) +// return fmt.Errorf("error updating digger batch") +// } +// } +// +// segment.Track(strconv.Itoa(int(organisationId)), "backend_trigger_job") +// +// //ciBackend, err := ciBackendProvider.GetCiBackend( +// // ci_backends.CiBackendOptions{ +// // GithubClientProvider: gh, +// // GithubInstallationId: installationId, +// // RepoName: repoName, +// // RepoOwner: repoOwner, +// // RepoFullName: repoFullName, +// // }, +// //) +// //if err != nil { +// // log.Printf("GetCiBackend error: %v", err) +// // utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: GetCiBackend error: %v", err)) +// // return fmt.Errorf("error fetching ci backed %v", err) +// //} +// // +// //err = TriggerDiggerJobs(ciBackend, repoFullName, repoOwner, repoName, batchId, prNumber, ghService, gh) +// //if err != nil { +// // log.Printf("TriggerDiggerJobs error: %v", err) +// // utils.InitCommentReporter(ghService, prNumber, fmt.Sprintf(":x: TriggerDiggerJobs error: %v", err)) +// // return fmt.Errorf("error triggerring Digger Jobs") +// //} +// +// return nil +//} func getDiggerConfigForBranch(gh utils.GithubClientProvider, installationId int64, repoFullName string, repoOwner string, repoName string, cloneUrl string, branch string, prNumber int) (string, *dg_github.GithubService, *dg_configuration.DiggerConfig, graph.Graph[string, dg_configuration.Project], error) { ghService, token, err := utils.GetGithubService(gh, installationId, repoFullName, repoOwner, repoName) @@ -690,7 +683,7 @@ func getDiggerConfigForPR(gh utils.GithubClientProvider, installationId int64, r return diggerYmlStr, ghService, config, dependencyGraph, &prBranch, &prCommitSha, nil } -func GetRepoByInstllationId(installationId int64, repoOwner string, repoName string) (*models.Repo, error) { +func GetRepoByInstllationId(installationId int64, repoOwner string, repoName string) (*model.Repo, error) { link, err := models.DB.GetGithubAppInstallationLink(installationId) if err != nil { log.Printf("Error getting GetGithubAppInstallationLink: %v", err) @@ -703,7 +696,7 @@ func GetRepoByInstllationId(installationId int64, repoOwner string, repoName str } diggerRepoName := repoOwner + "-" + repoName - repo, err := models.DB.GetRepo(link.Organisation.ID, diggerRepoName) + repo, err := models.DB.GetRepo(link.OrganizationID, diggerRepoName) return repo, nil } @@ -718,362 +711,6 @@ func getBatchType(jobs []orchestrator_scheduler.Job) orchestrator_scheduler.Digg } } -func handleIssueCommentEvent(gh utils.GithubClientProvider, payload *github.IssueCommentEvent, ciBackendProvider ci_backends.CiBackendProvider) error { - installationId := *payload.Installation.ID - repoName := *payload.Repo.Name - repoOwner := *payload.Repo.Owner.Login - repoFullName := *payload.Repo.FullName - cloneURL := *payload.Repo.CloneURL - issueNumber := *payload.Issue.Number - isDraft := payload.Issue.GetDraft() - userCommentId := *payload.GetComment().ID - actor := *payload.Sender.Login - commentBody := *payload.Comment.Body - defaultBranch := *payload.Repo.DefaultBranch - - link, err := models.DB.GetGithubAppInstallationLink(installationId) - if err != nil { - log.Printf("Error getting GetGithubAppInstallationLink: %v", err) - return fmt.Errorf("error getting github app link") - } - orgId := link.OrganisationId - - if *payload.Action != "created" { - log.Printf("comment is not of type 'created', ignoring") - return nil - } - - if !strings.HasPrefix(*payload.Comment.Body, "digger") { - log.Printf("comment is not a Digger command, ignoring") - return nil - } - - diggerYmlStr, ghService, config, projectsGraph, branch, commitSha, err := getDiggerConfigForPR(gh, installationId, repoFullName, repoOwner, repoName, cloneURL, issueNumber) - if err != nil { - ghService, _, gherr := utils.GetGithubService(gh, installationId, repoFullName, repoOwner, repoName) - if gherr != nil { - log.Printf("GetGithubService error: %v", gherr) - return fmt.Errorf("error getting ghService to post error comment") - } - utils.InitCommentReporter(ghService, issueNumber, fmt.Sprintf(":x: Could not load digger config, error: %v", err)) - log.Printf("getDiggerConfigForPR error: %v", err) - return fmt.Errorf("error getting digger config") - } - - commentIdStr := strconv.FormatInt(userCommentId, 10) - err = ghService.CreateCommentReaction(commentIdStr, string(dg_github.GithubCommentEyesReaction)) - if err != nil { - log.Printf("CreateCommentReaction error: %v", err) - } - - if !config.AllowDraftPRs && isDraft { - log.Printf("AllowDraftPRs is disabled, skipping PR: %v", issueNumber) - return nil - } - - commentReporter, err := utils.InitCommentReporter(ghService, issueNumber, ":construction_worker: Digger starting....") - if err != nil { - log.Printf("Error initializing comment reporter: %v", err) - return fmt.Errorf("error initializing comment reporter") - } - - diggerCommand, err := orchestrator_scheduler.GetCommandFromComment(*payload.Comment.Body) - if err != nil { - log.Printf("unkown digger command in comment: %v", *payload.Comment.Body) - utils.InitCommentReporter(ghService, issueNumber, fmt.Sprintf(":x: Could not recognise comment, error: %v", err)) - return fmt.Errorf("unkown digger command in comment %v", err) - } - - prBranchName, _, err := ghService.GetBranchName(issueNumber) - if err != nil { - log.Printf("GetBranchName error: %v", err) - utils.InitCommentReporter(ghService, issueNumber, fmt.Sprintf(":x: GetBranchName error: %v", err)) - return fmt.Errorf("error while fetching branch name") - } - - impactedProjects, impactedProjectsSourceMapping, requestedProject, _, err := generic.ProcessIssueCommentEvent(issueNumber, *payload.Comment.Body, config, projectsGraph, ghService) - if err != nil { - log.Printf("Error processing event: %v", err) - utils.InitCommentReporter(ghService, issueNumber, fmt.Sprintf(":x: Error processing event: %v", err)) - return fmt.Errorf("error processing event") - } - log.Printf("GitHub IssueComment event processed successfully\n") - - // perform unlocking in backend - if config.PrLocks { - for _, project := range impactedProjects { - prLock := dg_locking.PullRequestLock{ - InternalLock: locking.BackendDBLock{ - OrgId: orgId, - }, - CIService: ghService, - Reporter: comment_updater.NoopReporter{}, - ProjectName: project.Name, - ProjectNamespace: repoFullName, - PrNumber: issueNumber, - } - err = dg_locking.PerformLockingActionFromCommand(prLock, *diggerCommand) - if err != nil { - utils.InitCommentReporter(ghService, issueNumber, fmt.Sprintf(":x: Failed perform lock action on project: %v %v", project.Name, err)) - return fmt.Errorf("failed perform lock action on project: %v %v", project.Name, err) - } - } - } - - // if commands are locking or unlocking we don't need to trigger any jobs - if *diggerCommand == orchestrator_scheduler.DiggerCommandUnlock || - *diggerCommand == orchestrator_scheduler.DiggerCommandLock { - utils.InitCommentReporter(ghService, issueNumber, fmt.Sprintf(":white_check_mark: Command %v completed successfully", *diggerCommand)) - return nil - } - - jobs, _, err := generic.ConvertIssueCommentEventToJobs(repoFullName, actor, issueNumber, commentBody, impactedProjects, requestedProject, config.Workflows, prBranchName, defaultBranch) - if err != nil { - log.Printf("Error converting event to jobs: %v", err) - utils.InitCommentReporter(ghService, issueNumber, fmt.Sprintf(":x: Error converting event to jobs: %v", err)) - return fmt.Errorf("error converting event to jobs") - } - log.Printf("GitHub IssueComment event converted to Jobs successfully\n") - - err = utils.ReportInitialJobsStatus(commentReporter, jobs) - if err != nil { - log.Printf("Failed to comment initial status for jobs: %v", err) - utils.InitCommentReporter(ghService, issueNumber, fmt.Sprintf(":x: Failed to comment initial status for jobs: %v", err)) - return fmt.Errorf("failed to comment initial status for jobs") - } - - if len(jobs) == 0 { - log.Printf("no projects impacated, succeeding") - // This one is for aggregate reporting - err = utils.SetPRStatusForJobs(ghService, issueNumber, jobs) - return nil - } - - err = utils.SetPRStatusForJobs(ghService, issueNumber, jobs) - if err != nil { - log.Printf("error setting status for PR: %v", err) - utils.InitCommentReporter(ghService, issueNumber, fmt.Sprintf(":x: error setting status for PR: %v", err)) - fmt.Errorf("error setting status for PR: %v", err) - } - - impactedProjectsMap := make(map[string]dg_configuration.Project) - for _, p := range impactedProjects { - impactedProjectsMap[p.Name] = p - } - - impactedProjectsJobMap := make(map[string]orchestrator_scheduler.Job) - for _, j := range jobs { - impactedProjectsJobMap[j.ProjectName] = j - } - - reporterCommentId, err := strconv.ParseInt(commentReporter.CommentId, 10, 64) - if err != nil { - log.Printf("strconv.ParseInt error: %v", err) - utils.InitCommentReporter(ghService, issueNumber, fmt.Sprintf(":x: could not handle commentId: %v", err)) - } - - batchId, _, err := utils.ConvertJobsToDiggerJobs(*diggerCommand, "github", orgId, impactedProjectsJobMap, impactedProjectsMap, projectsGraph, installationId, *branch, issueNumber, repoOwner, repoName, repoFullName, *commitSha, reporterCommentId, diggerYmlStr, 0) - if err != nil { - log.Printf("ConvertJobsToDiggerJobs error: %v", err) - utils.InitCommentReporter(ghService, issueNumber, fmt.Sprintf(":x: ConvertJobsToDiggerJobs error: %v", err)) - return fmt.Errorf("error convertingjobs") - } - - if config.CommentRenderMode == dg_configuration.CommentRenderModeGroupByModule && - (*diggerCommand == orchestrator_scheduler.DiggerCommandPlan || *diggerCommand == orchestrator_scheduler.DiggerCommandApply) { - - sourceDetails, err := comment_updater.PostInitialSourceComments(ghService, issueNumber, impactedProjectsSourceMapping) - if err != nil { - log.Printf("PostInitialSourceComments error: %v", err) - utils.InitCommentReporter(ghService, issueNumber, fmt.Sprintf(":x: PostInitialSourceComments error: %v", err)) - return fmt.Errorf("error posting initial comments") - } - batch, err := models.DB.GetDiggerBatch(batchId) - if err != nil { - log.Printf("GetDiggerBatch error: %v", err) - utils.InitCommentReporter(ghService, issueNumber, fmt.Sprintf(":x: PostInitialSourceComments error: %v", err)) - return fmt.Errorf("error getting digger batch") - } - - batch.SourceDetails, err = json.Marshal(sourceDetails) - if err != nil { - log.Printf("sourceDetails, json Marshal error: %v", err) - utils.InitCommentReporter(ghService, issueNumber, fmt.Sprintf(":x: json Marshal error: %v", err)) - return fmt.Errorf("error marshalling sourceDetails") - } - err = models.DB.UpdateDiggerBatch(batch) - if err != nil { - log.Printf("UpdateDiggerBatch error: %v", err) - utils.InitCommentReporter(ghService, issueNumber, fmt.Sprintf(":x: UpdateDiggerBatch error: %v", err)) - return fmt.Errorf("error updating digger batch") - } - } - - segment.Track(strconv.Itoa(int(orgId)), "backend_trigger_job") - - ciBackend, err := ciBackendProvider.GetCiBackend( - ci_backends.CiBackendOptions{ - GithubClientProvider: gh, - GithubInstallationId: installationId, - RepoName: repoName, - RepoOwner: repoOwner, - RepoFullName: repoFullName, - }, - ) - if err != nil { - log.Printf("GetCiBackend error: %v", err) - utils.InitCommentReporter(ghService, issueNumber, fmt.Sprintf(":x: GetCiBackend error: %v", err)) - return fmt.Errorf("error fetching ci backed %v", err) - } - err = TriggerDiggerJobs(ciBackend, repoFullName, repoOwner, repoName, batchId, issueNumber, ghService, gh) - if err != nil { - log.Printf("TriggerDiggerJobs error: %v", err) - utils.InitCommentReporter(ghService, issueNumber, fmt.Sprintf(":x: TriggerDiggerJobs error: %v", err)) - return fmt.Errorf("error triggerring Digger Jobs") - } - return nil -} - -func TriggerDiggerJobs(ciBackend ci_backends.CiBackend, repoFullName string, repoOwner string, repoName string, batchId *uuid.UUID, prNumber int, prService ci.PullRequestService, gh utils.GithubClientProvider) error { - _, err := models.DB.GetDiggerBatch(batchId) - if err != nil { - log.Printf("failed to get digger batch, %v\n", err) - return fmt.Errorf("failed to get digger batch, %v\n", err) - } - diggerJobs, err := models.DB.GetPendingParentDiggerJobs(batchId) - - if err != nil { - log.Printf("failed to get pending digger jobs, %v\n", err) - return fmt.Errorf("failed to get pending digger jobs, %v\n", err) - } - - log.Printf("number of diggerJobs:%v\n", len(diggerJobs)) - - for _, job := range diggerJobs { - if job.SerializedJobSpec == nil { - return fmt.Errorf("GitHub job can't be nil") - } - jobString := string(job.SerializedJobSpec) - log.Printf("jobString: %v \n", jobString) - - // TODO: make workflow file name configurable - err = services.ScheduleJob(ciBackend, repoFullName, repoOwner, repoName, batchId, &job, gh) - if err != nil { - log.Printf("failed to trigger CI workflow, %v\n", err) - return fmt.Errorf("failed to trigger CI workflow, %v\n", err) - } - } - return nil -} - -// CreateDiggerWorkflowWithPullRequest for specified repo it will create a new branch 'digger/configure' and a pull request to default branch -// in the pull request it will try to add .github/workflows/digger_workflow.yml file with workflow for digger -func CreateDiggerWorkflowWithPullRequest(org *models.Organisation, client *github.Client, githubRepo string) error { - ctx := context.Background() - if strings.Index(githubRepo, "/") == -1 { - return fmt.Errorf("githubRepo is in a wrong format: %v", githubRepo) - } - githubRepoSplit := strings.Split(githubRepo, "/") - if len(githubRepoSplit) != 2 { - return fmt.Errorf("githubRepo is in a wrong format: %v", githubRepo) - } - repoOwner := githubRepoSplit[0] - repoName := githubRepoSplit[1] - - // check if workflow file exist already in default branch, if it does, do nothing - // else try to create a branch and PR - - workflowFilePath := ".github/workflows/digger_workflow.yml" - repo, _, _ := client.Repositories.Get(ctx, repoOwner, repoName) - defaultBranch := *repo.DefaultBranch - - defaultBranchRef, _, _ := client.Git.GetRef(ctx, repoOwner, repoName, "refs/heads/"+defaultBranch) // or "refs/heads/main" - branch := "digger/configure" - refName := fmt.Sprintf("refs/heads/%s", branch) - branchRef := &github.Reference{ - Ref: &refName, - Object: &github.GitObject{ - SHA: defaultBranchRef.Object.SHA, - }, - } - - opts := &github.RepositoryContentGetOptions{Ref: *defaultBranchRef.Ref} - contents, _, _, err := client.Repositories.GetContents(ctx, repoOwner, repoName, workflowFilePath, opts) - if err != nil { - if !strings.Contains(err.Error(), "Not Found") { - log.Printf("failed to get contents of the file %v", err) - return fmt.Errorf("failed to get contents of the file %v", workflowFilePath) - } - } - - // workflow file doesn't already exist, we can create it - if contents == nil { - // trying to create a new branch - _, _, err := client.Git.CreateRef(ctx, repoOwner, repoName, branchRef) - if err != nil { - // if branch already exist, do nothing - if strings.Contains(err.Error(), "Reference already exists") { - log.Printf("Branch %v already exist, do nothing\n", branchRef) - return nil - } - return fmt.Errorf("failed to create a branch, %w", err) - } - - // TODO: move to a separate config - jobName := "Digger Workflow" - setupAws := false - disableLocking := false - diggerHostname := os.Getenv("DIGGER_CLOUD_HOSTNAME") - diggerOrg := org.Name - - workflowFileContents := fmt.Sprintf(`on: - workflow_dispatch: - inputs: - job: - required: true - id: - description: 'run identifier' - required: false -jobs: - build: - name: %v - runs-on: ubuntu-latest - steps: - - name: digger run - uses: diggerhq/digger@develop - with: - setup-aws: %v - disable-locking: %v - digger-token: ${{ secrets.DIGGER_TOKEN }} - digger-hostname: '%v' - digger-organisation: '%v' - env: - GITHUB_CONTEXT: ${{ toJson(github) }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} -`, jobName, setupAws, disableLocking, diggerHostname, diggerOrg) - - commitMessage := "Configure Digger workflow" - var req github.RepositoryContentFileOptions - req.Content = []byte(workflowFileContents) - req.Message = &commitMessage - req.Branch = &branch - - _, _, err = client.Repositories.CreateFile(ctx, repoOwner, repoName, workflowFilePath, &req) - if err != nil { - return fmt.Errorf("failed to create digger workflow file, %w", err) - } - - prTitle := "Configure Digger" - pullRequest := &github.NewPullRequest{Title: &prTitle, - Head: &branch, Base: &defaultBranch} - _, _, err = client.PullRequests.Create(ctx, repoOwner, repoName, pullRequest) - if err != nil { - return fmt.Errorf("failed to create a pull request for digger/configure, %w", err) - } - } - return nil -} - func (d DiggerController) GithubAppCallbackPage(c *gin.Context) { installationId := c.Request.URL.Query()["installation_id"][0] //setupAction := c.Request.URL.Query()["setup_action"][0] @@ -1081,12 +718,6 @@ func (d DiggerController) GithubAppCallbackPage(c *gin.Context) { clientId := os.Getenv("GITHUB_APP_CLIENT_ID") clientSecret := os.Getenv("GITHUB_APP_CLIENT_SECRET") - //orgId, exists := c.Get(middleware.ORGANISATION_ID_KEY) - //if !exists { - // c.String(http.StatusForbidden, "Not allowed to access this resource") - // return - //} - installationId64, err := strconv.ParseInt(installationId, 10, 64) if err != nil { log.Printf("err: %v", err) @@ -1101,19 +732,21 @@ func (d DiggerController) GithubAppCallbackPage(c *gin.Context) { return } - //org, err := models.DB.GetOrganisationById(orgId) - //if err != nil { - // log.Printf("Error fetching organisation: %v", err) - // c.JSON(http.StatusInternalServerError, gin.H{"error": "Error fetching organisation"}) - // return - //} + orgId := c.GetString(middleware.ORGANISATION_ID_KEY) + org, err := models.DB.GetOrganisationById(orgId) + if err != nil { + log.Printf("Error fetching organisation: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Error fetching organisation"}) + return + } + + _, err = models.DB.CreateGithubInstallationLink(org, installationId64) + if err != nil { + log.Printf("Error saving CreateGithubInstallationLink to database: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Error updating GitHub installation"}) + return + } - //_, err = models.DB.CreateGithubInstallationLink(org, installationId64) - //if err != nil { - // log.Printf("Error saving CreateGithubInstallationLink to database: %v", err) - // c.JSON(http.StatusInternalServerError, gin.H{"error": "Error updating GitHub installation"}) - // return - //} c.HTML(http.StatusOK, "github_success.tmpl", gin.H{}) } @@ -1132,7 +765,7 @@ func (d DiggerController) GithubReposPage(c *gin.Context) { return } - installations, err := models.DB.GetGithubAppInstallations(link.GithubInstallationId) + installations, err := models.DB.GetGithubAppInstallations(link.GithubInstallationID) if err != nil { log.Printf("GetGithubAppInstallations error: %v\n", err) c.String(http.StatusForbidden, "Failed to find any GitHub installations for this org") @@ -1145,7 +778,7 @@ func (d DiggerController) GithubReposPage(c *gin.Context) { } gh := d.GithubClientProvider - client, _, err := gh.Get(installations[0].GithubAppId, installations[0].GithubInstallationId) + client, _, err := gh.Get(installations[0].GithubAppID, installations[0].GithubInstallationID) if err != nil { log.Printf("failed to create github client, %v", err) c.JSON(http.StatusInternalServerError, gin.H{"error": "Error creating GitHub client"}) diff --git a/next/controllers/github_after_merge.go b/next/controllers/github_after_merge.go index 4917585fa..ee2d45e3d 100644 --- a/next/controllers/github_after_merge.go +++ b/next/controllers/github_after_merge.go @@ -87,12 +87,12 @@ func (d DiggerController) GithubAppWebHookAfterMerge(c *gin.Context) { // } case *github.PullRequestEvent: log.Printf("Got pull request event for %d IN APPLY AFTER MERGE", *event.PullRequest.ID) - err := handlePullRequestEvent(gh, event, nil) - if err != nil { - log.Printf("handlePullRequestEvent error: %v", err) - c.String(http.StatusInternalServerError, err.Error()) - return - } + //err := handlePullRequestEvent(gh, event, nil) + //if err != nil { + // log.Printf("handlePullRequestEvent error: %v", err) + // c.String(http.StatusInternalServerError, err.Error()) + // return + //} case *github.PushEvent: log.Printf("Got push event for %d", event.Repo.URL) err := handlePushEventApplyAfterMerge(gh, event) diff --git a/next/dbgen/dbgen.go b/next/dbgen/dbgen.go new file mode 100644 index 000000000..b089b1e90 --- /dev/null +++ b/next/dbgen/dbgen.go @@ -0,0 +1,47 @@ +package main + +import ( + "gorm.io/driver/postgres" + "gorm.io/gen" + "gorm.io/gorm" + "os" +) + +// Dynamic SQL +type Querier interface { + // SELECT * FROM @@table WHERE name = @name{{if role !=""}} AND role = @role{{end}} + FilterWithNameAndRole(name, role string) ([]gen.T, error) +} + +func main() { + g := gen.NewGenerator(gen.Config{ + OutPath: "../models_generated", + Mode: gen.WithoutContext | gen.WithDefaultQuery | gen.WithQueryInterface, // generate mode + }) + + dburl := os.Getenv("DB_URL") + if dburl == "" { + dburl = "postgresql://postgres:postgres@127.0.0.1:54322/postgres" + } + gormdb, _ := gorm.Open(postgres.Open(dburl)) + g.UseDB(gormdb) // reuse your gorm db + + // Generate basic type-safe DAO API for struct `model.User` following conventions + + //g.ApplyBasic( + // // Generate struct `User` based on table `users` + // g.GenerateModel("users"), + // g.GenerateModel("organizations"), + // g.GenerateModel("digger_jobs"), + // + // // Generate struct `Customer` based on table `customer` and generating options + // // customer table may have a tags column, it can be JSON type, gorm/gen tool can generate for your JSON data type + // g.GenerateModel("customers", gen.FieldType("tags", "datatypes.JSON")), + //) + g.ApplyBasic( + // Generate structs from all tables of current database + g.GenerateAllTable()..., + ) + // Generate the code + g.Execute() +} diff --git a/next/dbgen/go.mod b/next/dbgen/go.mod new file mode 100644 index 000000000..3f7583367 --- /dev/null +++ b/next/dbgen/go.mod @@ -0,0 +1,21 @@ +module dbgen + +go 1.22.4 + +require ( + github.com/Joker/jade v1.1.3 + gorm.io/gen v0.3.26 +) + +require ( + github.com/go-sql-driver/mysql v1.7.0 // indirect + github.com/jinzhu/inflection v1.0.0 // indirect + github.com/jinzhu/now v1.1.5 // indirect + golang.org/x/mod v0.14.0 // indirect + golang.org/x/tools v0.17.0 // indirect + gorm.io/datatypes v1.1.1-0.20230130040222-c43177d3cf8c // indirect + gorm.io/driver/mysql v1.4.4 // indirect + gorm.io/gorm v1.25.9 // indirect + gorm.io/hints v1.1.0 // indirect + gorm.io/plugin/dbresolver v1.5.0 // indirect +) diff --git a/next/dbgen/go.sum b/next/dbgen/go.sum new file mode 100644 index 000000000..92c6d443f --- /dev/null +++ b/next/dbgen/go.sum @@ -0,0 +1,103 @@ +github.com/Joker/hpp v1.0.0/go.mod h1:8x5n+M1Hp5hC0g8okX3sR3vFQwynaX/UgSOM9MeBKzY= +github.com/Joker/jade v1.1.3 h1:Qbeh12Vq6BxURXT1qZBRHsDxeURB8ztcL6f3EXSGeHk= +github.com/Joker/jade v1.1.3/go.mod h1:T+2WLyt7VH6Lp0TRxQrUYEs64nRc83wkMQrfeIQKduM= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/go-sql-driver/mysql v1.7.0 h1:ueSltNNllEqE3qcWBTD0iQd3IpL/6U+mJxLkazJ7YPc= +github.com/go-sql-driver/mysql v1.7.0/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI= +github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA= +github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= +github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A= +github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI= +github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8= +github.com/jackc/chunkreader/v2 v2.0.1/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= +github.com/jackc/pgconn v1.13.0 h1:3L1XMNV2Zvca/8BYhzcRFS70Lr0WlDg16Di6SFGAbys= +github.com/jackc/pgconn v1.13.0/go.mod h1:AnowpAqO4CMIIJNZl2VJp+KrkAZciAkhEl0W0JIobpI= +github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE= +github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgproto3/v2 v2.3.1 h1:nwj7qwf0S+Q7ISFfBndqeLwSwxs+4DPsbRFjECT1Y4Y= +github.com/jackc/pgproto3/v2 v2.3.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b h1:C8S2+VttkHFdOOCXJe+YGfa4vHYwlt4Zx+IVXQ97jYg= +github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E= +github.com/jackc/pgtype v1.12.0 h1:Dlq8Qvcch7kiehm8wPGIW0W3KsCCHJnRacKW0UM8n5w= +github.com/jackc/pgtype v1.12.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4= +github.com/jackc/pgx/v4 v4.17.2 h1:0Ut0rpeKwvIVbMQ1KbMBU4h6wxehBI535LK6Flheh8E= +github.com/jackc/pgx/v4 v4.17.2/go.mod h1:lcxIZN44yMIrWI78a5CpucdD14hX0SBDbNRvjDBItsw= +github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= +github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= +github.com/jinzhu/now v1.1.2/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= +github.com/jinzhu/now v1.1.4/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= +github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ= +github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= +github.com/mattn/go-sqlite3 v1.14.8/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= +github.com/mattn/go-sqlite3 v1.14.15 h1:vfoHhTN1af61xCRSWzFIWzx2YskyMTwHLrExkBOjvxI= +github.com/mattn/go-sqlite3 v1.14.15/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= +github.com/microsoft/go-mssqldb v0.17.0 h1:Fto83dMZPnYv1Zwx5vHHxpNraeEaUlQ/hhHLgZiaenE= +github.com/microsoft/go-mssqldb v0.17.0/go.mod h1:OkoNGhGEs8EZqchVTtochlXruEhEOaO4S0d2sB5aeGQ= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= +github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= +golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= +golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= +golang.org/x/mod v0.14.0 h1:dGoOF9QVLYng8IHTm7BAyWqCqSheQ5pYWGhzW00YJr0= +golang.org/x/mod v0.14.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/net v0.0.0-20190327091125-710a502c58a2/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= +golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= +golang.org/x/tools v0.17.0 h1:FvmRgNOcs3kOa+T20R1uhfP9F6HgG2mfxDv1vrx1Htc= +golang.org/x/tools v0.17.0/go.mod h1:xsh6VxdV005rRVaS6SSAf9oiAqljS7UZUacMZ8Bnsps= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gorm.io/datatypes v1.1.1-0.20230130040222-c43177d3cf8c h1:jWdr7cHgl8c/ua5vYbR2WhSp+NQmzhsj0xoY3foTzW8= +gorm.io/datatypes v1.1.1-0.20230130040222-c43177d3cf8c/go.mod h1:SH2K9R+2RMjuX1CkCONrPwoe9JzVv2hkQvEu4bXGojE= +gorm.io/driver/mysql v1.4.3/go.mod h1:sSIebwZAVPiT+27jK9HIwvsqOGKx3YMPmrA3mBJR10c= +gorm.io/driver/mysql v1.4.4 h1:MX0K9Qvy0Na4o7qSC/YI7XxqUw5KDw01umqgID+svdQ= +gorm.io/driver/mysql v1.4.4/go.mod h1:BCg8cKI+R0j/rZRQxeKis/forqRwRSYOR8OM3Wo6hOM= +gorm.io/driver/postgres v1.4.5 h1:mTeXTTtHAgnS9PgmhN2YeUbazYpLhUI1doLnw42XUZc= +gorm.io/driver/postgres v1.4.5/go.mod h1:GKNQYSJ14qvWkvPwXljMGehpKrhlDNsqYRr5HnYGncg= +gorm.io/driver/sqlite v1.1.6/go.mod h1:W8LmC/6UvVbHKah0+QOC7Ja66EaZXHwUTjgXY8YNWX8= +gorm.io/driver/sqlite v1.4.3 h1:HBBcZSDnWi5BW3B3rwvVTc510KGkBkexlOg0QrmLUuU= +gorm.io/driver/sqlite v1.4.3/go.mod h1:0Aq3iPO+v9ZKbcdiz8gLWRw5VOPcBOPUQJFLq5e2ecI= +gorm.io/driver/sqlserver v1.4.1 h1:t4r4r6Jam5E6ejqP7N82qAJIJAht27EGT41HyPfXRw0= +gorm.io/driver/sqlserver v1.4.1/go.mod h1:DJ4P+MeZbc5rvY58PnmN1Lnyvb5gw5NPzGshHDnJLig= +gorm.io/gen v0.3.26 h1:sFf1j7vNStimPRRAtH4zz5NiHM+1dr6eA9aaRdplyhY= +gorm.io/gen v0.3.26/go.mod h1:a5lq5y3w4g5LMxBcw0wnO6tYUCdNutWODq5LrIt75LE= +gorm.io/gorm v1.21.15/go.mod h1:F+OptMscr0P2F2qU97WT1WimdH9GaQPoDW7AYd5i2Y0= +gorm.io/gorm v1.22.2/go.mod h1:F+OptMscr0P2F2qU97WT1WimdH9GaQPoDW7AYd5i2Y0= +gorm.io/gorm v1.23.8/go.mod h1:l2lP/RyAtc1ynaTjFksBde/O8v9oOGIApu2/xRitmZk= +gorm.io/gorm v1.25.2/go.mod h1:L4uxeKpfBml98NYqVqwAdmV1a2nBtAec/cf3fpucW/k= +gorm.io/gorm v1.25.9 h1:wct0gxZIELDk8+ZqF/MVnHLkA1rvYlBWUMv2EdsK1g8= +gorm.io/gorm v1.25.9/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8= +gorm.io/hints v1.1.0 h1:Lp4z3rxREufSdxn4qmkK3TLDltrM10FLTHiuqwDPvXw= +gorm.io/hints v1.1.0/go.mod h1:lKQ0JjySsPBj3uslFzY3JhYDtqEwzm+G1hv8rWujB6Y= +gorm.io/plugin/dbresolver v1.5.0 h1:XVHLxh775eP0CqVh3vcfJtYqja3uFl5Wr3cKlY8jgDY= +gorm.io/plugin/dbresolver v1.5.0/go.mod h1:l4Cn87EHLEYuqUncpEeTC2tTJQkjngPSD+lo8hIvcT0= diff --git a/next/go.mod b/next/go.mod index 17ce7e741..d5033a154 100644 --- a/next/go.mod +++ b/next/go.mod @@ -3,5 +3,15 @@ module github.com/diggerhq/digger/next go 1.22.4 replace github.com/diggerhq/digger/backend => ../backend + replace github.com/diggerhq/digger/libs => ../libs +require ( + github.com/google/uuid v1.6.0 // indirect + github.com/supabase-community/functions-go v0.0.0-20220927045802-22373e6cb51d // indirect + github.com/supabase-community/gotrue-go v1.2.0 // indirect + github.com/supabase-community/postgrest-go v0.0.11 // indirect + github.com/supabase-community/storage-go v0.7.0 // indirect + github.com/supabase-community/supabase-go v0.0.4 // indirect + github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80 // indirect +) diff --git a/next/go.sum b/next/go.sum new file mode 100644 index 000000000..a09a0a6e7 --- /dev/null +++ b/next/go.sum @@ -0,0 +1,14 @@ +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/supabase-community/functions-go v0.0.0-20220927045802-22373e6cb51d h1:LOrsumaZy615ai37h9RjUIygpSubX+F+6rDct1LIag0= +github.com/supabase-community/functions-go v0.0.0-20220927045802-22373e6cb51d/go.mod h1:nnIju6x3+OZSojtGQCQzu0h3kv4HdIZk+UWCnNxtSak= +github.com/supabase-community/gotrue-go v1.2.0 h1:Zm7T5q3qbuwPgC6xyomOBKrSb7X5dvmjDZEmNST7MoE= +github.com/supabase-community/gotrue-go v1.2.0/go.mod h1:86DXBiAUNcbCfgbeOPEh0PQxScLfowUbYgakETSFQOw= +github.com/supabase-community/postgrest-go v0.0.11 h1:717GTUMfLJxSBuAeEQG2MuW5Q62Id+YrDjvjprTSErg= +github.com/supabase-community/postgrest-go v0.0.11/go.mod h1:cw6LfzMyK42AOSBA1bQ/HZ381trIJyuui2GWhraW7Cc= +github.com/supabase-community/storage-go v0.7.0 h1:cJ8HLbbnL54H5rHPtHfiwtpRwcbDfA3in9HL/ucHnqA= +github.com/supabase-community/storage-go v0.7.0/go.mod h1:oBKcJf5rcUXy3Uj9eS5wR6mvpwbmvkjOtAA+4tGcdvQ= +github.com/supabase-community/supabase-go v0.0.4 h1:sxMenbq6N8a3z9ihNpN3lC2FL3E1YuTQsjX09VPRp+U= +github.com/supabase-community/supabase-go v0.0.4/go.mod h1:SSHsXoOlc+sq8XeXaf0D3gE2pwrq5bcUfzm0+08u/o8= +github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80 h1:nrZ3ySNYwJbSpD6ce9duiP+QkD3JuLCcWkdaehUS/3Y= +github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80/go.mod h1:iFyPdL66DjUD96XmzVL3ZntbzcflLnznH0fr99w5VqE= diff --git a/next/main.go b/next/main.go index 0eb582f6f..353fbd959 100644 --- a/next/main.go +++ b/next/main.go @@ -7,6 +7,8 @@ import ( "github.com/diggerhq/digger/backend/config" "github.com/diggerhq/digger/backend/utils" controllers "github.com/diggerhq/digger/next/controllers" + "github.com/diggerhq/digger/next/middleware" + "github.com/diggerhq/digger/next/models" "github.com/getsentry/sentry-go" "github.com/gin-gonic/gin" "io/fs" @@ -41,6 +43,9 @@ func main() { log.Printf("Sentry initialization failed: %v\n", err) } + // initialize the database + models.ConnectDatabase() + r := gin.Default() if _, err := os.Stat("templates"); err != nil { @@ -61,7 +66,9 @@ func main() { r.GET("/", controllers.Home) - r.GET("/github/callback", diggerController.GithubAppCallbackPage) + r.GET("/github/callback", middleware.SupabaseCookieAuth(), diggerController.GithubAppCallbackPage) + r.POST("/github-app-webhook", diggerController.GithubAppWebHook) port := config.GetPort() r.Run(fmt.Sprintf(":%d", port)) + } diff --git a/next/middleware/supabase_cookie_auth.go b/next/middleware/supabase_cookie_auth.go new file mode 100644 index 000000000..911d0399d --- /dev/null +++ b/next/middleware/supabase_cookie_auth.go @@ -0,0 +1,74 @@ +package middleware + +import ( + "encoding/json" + "fmt" + "github.com/diggerhq/digger/next/model" + "github.com/diggerhq/digger/next/models" + "github.com/diggerhq/digger/next/supa" + "github.com/gin-gonic/gin" + "log" + "net/http" + "os" +) + +func SupabaseCookieAuth() gin.HandlerFunc { + return func(c *gin.Context) { + client, err := supa.GetClient() + if err != nil { + log.Printf("could not create client: %v", err) + c.String(http.StatusBadRequest, "error checking auth") + c.Abort() + return + } + supbaseProjectId := os.Getenv("DIGGER_SUPABASE_PROJECT_REF") + authTokenCookie, err := c.Cookie(fmt.Sprintf("sb-%v-auth-token", supbaseProjectId)) + var authTokenCookieItems []string + err = json.Unmarshal([]byte(authTokenCookie), &authTokenCookieItems) + if err != nil { + log.Printf("could not find supabase auth cookie: %v", err) + c.String(http.StatusBadRequest, "error checking cookie") + c.Abort() + return + } + if len(authTokenCookieItems) == 0 { + log.Printf("could not find supabase auth cookie token: %v", err) + c.String(http.StatusBadRequest, "error checking cookie") + c.Abort() + return + } + authToken := authTokenCookieItems[0] + authenticatedClient := client.Auth.WithToken(authToken) + user, err := authenticatedClient.GetUser() + if err != nil { + log.Printf("err: %v", err) + } + userId := user.ID.String() + + // TODO: We will have an additional cookie represnting the orgId of the user, and we will just query + // for membership to verify + var orgsForUser []model.OrganizationMember + + _, err = client.From("organization_members").Select("*", "exact", false).Eq("member_id", userId).ExecuteTo(&orgsForUser) + if err != nil { + log.Printf("could not get org members: %v", err) + } + + if len(orgsForUser) == 0 { + log.Printf("could not find any orgs for user: %v", userId) + c.String(http.StatusBadRequest, "User does not belong to any orgs") + c.Abort() + return + } + + selectedOrg, err := models.DB.GetUserOrganizationsFirstMatch(userId) + if err != nil { + log.Printf("error while finding organisation: %v", err) + c.String(http.StatusBadRequest, "User does not belong to any orgs") + c.Abort() + return + } + c.Set(ORGANISATION_ID_KEY, selectedOrg.ID) + c.Next() + } +} diff --git a/next/model/account_delete_tokens.gen.go b/next/model/account_delete_tokens.gen.go new file mode 100644 index 000000000..36ef84351 --- /dev/null +++ b/next/model/account_delete_tokens.gen.go @@ -0,0 +1,18 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +const TableNameAccountDeleteToken = "account_delete_tokens" + +// AccountDeleteToken mapped from table +type AccountDeleteToken struct { + Token string `gorm:"column:token;not null;default:uuid_generate_v4()" json:"token"` + UserID string `gorm:"column:user_id;primaryKey" json:"user_id"` +} + +// TableName AccountDeleteToken's table name +func (*AccountDeleteToken) TableName() string { + return TableNameAccountDeleteToken +} diff --git a/next/model/chats.gen.go b/next/model/chats.gen.go new file mode 100644 index 000000000..03b17ef0f --- /dev/null +++ b/next/model/chats.gen.go @@ -0,0 +1,25 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" +) + +const TableNameChat = "chats" + +// Chat mapped from table +type Chat struct { + ID string `gorm:"column:id;primaryKey" json:"id"` + UserID string `gorm:"column:user_id" json:"user_id"` + Payload string `gorm:"column:payload" json:"payload"` + CreatedAt time.Time `gorm:"column:created_at;not null;default:timezone('utc" json:"created_at"` + ProjectID string `gorm:"column:project_id;not null" json:"project_id"` +} + +// TableName Chat's table name +func (*Chat) TableName() string { + return TableNameChat +} diff --git a/next/model/customers.gen.go b/next/model/customers.gen.go new file mode 100644 index 000000000..f6adb9cfc --- /dev/null +++ b/next/model/customers.gen.go @@ -0,0 +1,18 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +const TableNameCustomer = "customers" + +// Customer mapped from table +type Customer struct { + StripeCustomerID string `gorm:"column:stripe_customer_id;primaryKey" json:"stripe_customer_id"` + OrganizationID string `gorm:"column:organization_id;primaryKey" json:"organization_id"` +} + +// TableName Customer's table name +func (*Customer) TableName() string { + return TableNameCustomer +} diff --git a/next/model/digger_batches.gen.go b/next/model/digger_batches.gen.go new file mode 100644 index 000000000..5fef19d2e --- /dev/null +++ b/next/model/digger_batches.gen.go @@ -0,0 +1,30 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +const TableNameDiggerBatch = "digger_batches" + +// DiggerBatch mapped from table +type DiggerBatch struct { + ID string `gorm:"column:id;primaryKey;default:gen_random_uuid()" json:"id"` + PrNumber int64 `gorm:"column:pr_number" json:"pr_number"` + Status int16 `gorm:"column:status;not null" json:"status"` + BranchName string `gorm:"column:branch_name;not null" json:"branch_name"` + DiggerConfig string `gorm:"column:digger_config" json:"digger_config"` + GithubInstallationID int64 `gorm:"column:github_installation_id" json:"github_installation_id"` + RepoFullName string `gorm:"column:repo_full_name;not null" json:"repo_full_name"` + RepoOwner string `gorm:"column:repo_owner;not null" json:"repo_owner"` + RepoName string `gorm:"column:repo_name;not null" json:"repo_name"` + BatchType string `gorm:"column:batch_type;not null" json:"batch_type"` + CommentID int64 `gorm:"column:comment_id" json:"comment_id"` + SourceDetails []uint8 `gorm:"column:source_details" json:"source_details"` + Vcs string `gorm:"column:vcs" json:"vcs"` + GitlabProjectID int64 `gorm:"column:gitlab_project_id" json:"gitlab_project_id"` +} + +// TableName DiggerBatch's table name +func (*DiggerBatch) TableName() string { + return TableNameDiggerBatch +} diff --git a/next/model/digger_job_parent_links.gen.go b/next/model/digger_job_parent_links.gen.go new file mode 100644 index 000000000..f324c90d0 --- /dev/null +++ b/next/model/digger_job_parent_links.gen.go @@ -0,0 +1,28 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" + + "gorm.io/gorm" +) + +const TableNameDiggerJobParentLink = "digger_job_parent_links" + +// DiggerJobParentLink mapped from table +type DiggerJobParentLink struct { + ID int64 `gorm:"column:id;primaryKey;autoIncrement:true" json:"id"` + CreatedAt time.Time `gorm:"column:created_at" json:"created_at"` + UpdatedAt time.Time `gorm:"column:updated_at" json:"updated_at"` + DeletedAt gorm.DeletedAt `gorm:"column:deleted_at" json:"deleted_at"` + DiggerJobID string `gorm:"column:digger_job_id" json:"digger_job_id"` + ParentDiggerJobID string `gorm:"column:parent_digger_job_id" json:"parent_digger_job_id"` +} + +// TableName DiggerJobParentLink's table name +func (*DiggerJobParentLink) TableName() string { + return TableNameDiggerJobParentLink +} diff --git a/next/model/digger_job_summaries.gen.go b/next/model/digger_job_summaries.gen.go new file mode 100644 index 000000000..89dd40ba3 --- /dev/null +++ b/next/model/digger_job_summaries.gen.go @@ -0,0 +1,29 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" + + "gorm.io/gorm" +) + +const TableNameDiggerJobSummary = "digger_job_summaries" + +// DiggerJobSummary mapped from table +type DiggerJobSummary struct { + ID string `gorm:"column:id;primaryKey;default:gen_random_uuid()" json:"id"` + CreatedAt time.Time `gorm:"column:created_at;not null;default:CURRENT_TIMESTAMP" json:"created_at"` + UpdatedAt time.Time `gorm:"column:updated_at;not null;default:CURRENT_TIMESTAMP" json:"updated_at"` + DeletedAt gorm.DeletedAt `gorm:"column:deleted_at" json:"deleted_at"` + ResourcesCreated int64 `gorm:"column:resources_created;not null" json:"resources_created"` + ResourcesDeleted int64 `gorm:"column:resources_deleted;not null" json:"resources_deleted"` + ResourcesUpdated int64 `gorm:"column:resources_updated;not null" json:"resources_updated"` +} + +// TableName DiggerJobSummary's table name +func (*DiggerJobSummary) TableName() string { + return TableNameDiggerJobSummary +} diff --git a/next/model/digger_job_tokens.gen.go b/next/model/digger_job_tokens.gen.go new file mode 100644 index 000000000..524495adf --- /dev/null +++ b/next/model/digger_job_tokens.gen.go @@ -0,0 +1,30 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" + + "gorm.io/gorm" +) + +const TableNameDiggerJobToken = "digger_job_tokens" + +// DiggerJobToken mapped from table +type DiggerJobToken struct { + ID int64 `gorm:"column:id;primaryKey;autoIncrement:true" json:"id"` + CreatedAt time.Time `gorm:"column:created_at" json:"created_at"` + UpdatedAt time.Time `gorm:"column:updated_at" json:"updated_at"` + DeletedAt gorm.DeletedAt `gorm:"column:deleted_at" json:"deleted_at"` + Value string `gorm:"column:value" json:"value"` + Expiry time.Time `gorm:"column:expiry" json:"expiry"` + OrganisationID int64 `gorm:"column:organisation_id" json:"organisation_id"` + Type string `gorm:"column:type" json:"type"` +} + +// TableName DiggerJobToken's table name +func (*DiggerJobToken) TableName() string { + return TableNameDiggerJobToken +} diff --git a/next/model/digger_jobs.gen.go b/next/model/digger_jobs.gen.go new file mode 100644 index 000000000..f7de72088 --- /dev/null +++ b/next/model/digger_jobs.gen.go @@ -0,0 +1,37 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" + + "gorm.io/gorm" +) + +const TableNameDiggerJob = "digger_jobs" + +// DiggerJob mapped from table +type DiggerJob struct { + ID string `gorm:"column:id;primaryKey;default:gen_random_uuid()" json:"id"` + CreatedAt time.Time `gorm:"column:created_at;not null;default:CURRENT_TIMESTAMP" json:"created_at"` + UpdatedAt time.Time `gorm:"column:updated_at;not null;default:CURRENT_TIMESTAMP" json:"updated_at"` + DeletedAt gorm.DeletedAt `gorm:"column:deleted_at" json:"deleted_at"` + DiggerJobID string `gorm:"column:digger_job_id;not null" json:"digger_job_id"` + Status int16 `gorm:"column:status;not null" json:"status"` + BatchID string `gorm:"column:batch_id;not null" json:"batch_id"` + StatusUpdatedAt time.Time `gorm:"column:status_updated_at" json:"status_updated_at"` + DiggerJobSummaryID string `gorm:"column:digger_job_summary_id" json:"digger_job_summary_id"` + WorkflowFile string `gorm:"column:workflow_file" json:"workflow_file"` + WorkflowRunURL string `gorm:"column:workflow_run_url" json:"workflow_run_url"` + PlanFootprint []uint8 `gorm:"column:plan_footprint" json:"plan_footprint"` + PrCommentURL string `gorm:"column:pr_comment_url" json:"pr_comment_url"` + TerraformOutput string `gorm:"column:terraform_output" json:"terraform_output"` + JobSpec []uint8 `gorm:"column:job_spec" json:"job_spec"` +} + +// TableName DiggerJob's table name +func (*DiggerJob) TableName() string { + return TableNameDiggerJob +} diff --git a/next/model/digger_locks.gen.go b/next/model/digger_locks.gen.go new file mode 100644 index 000000000..55a6e7d7f --- /dev/null +++ b/next/model/digger_locks.gen.go @@ -0,0 +1,29 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" + + "gorm.io/gorm" +) + +const TableNameDiggerLock = "digger_locks" + +// DiggerLock mapped from table +type DiggerLock struct { + ID string `gorm:"column:id;primaryKey;default:gen_random_uuid()" json:"id"` + CreatedAt time.Time `gorm:"column:created_at;not null;default:CURRENT_TIMESTAMP" json:"created_at"` + UpdatedAt time.Time `gorm:"column:updated_at;not null;default:CURRENT_TIMESTAMP" json:"updated_at"` + DeletedAt gorm.DeletedAt `gorm:"column:deleted_at" json:"deleted_at"` + Resource string `gorm:"column:resource;not null" json:"resource"` + LockID int64 `gorm:"column:lock_id;not null" json:"lock_id"` + OrganizationID string `gorm:"column:organization_id;not null" json:"organization_id"` +} + +// TableName DiggerLock's table name +func (*DiggerLock) TableName() string { + return TableNameDiggerLock +} diff --git a/next/model/digger_run_queue_items.gen.go b/next/model/digger_run_queue_items.gen.go new file mode 100644 index 000000000..8c9c1bcd5 --- /dev/null +++ b/next/model/digger_run_queue_items.gen.go @@ -0,0 +1,28 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" + + "gorm.io/gorm" +) + +const TableNameDiggerRunQueueItem = "digger_run_queue_items" + +// DiggerRunQueueItem mapped from table +type DiggerRunQueueItem struct { + ID int64 `gorm:"column:id;primaryKey;autoIncrement:true" json:"id"` + CreatedAt time.Time `gorm:"column:created_at" json:"created_at"` + UpdatedAt time.Time `gorm:"column:updated_at" json:"updated_at"` + DeletedAt gorm.DeletedAt `gorm:"column:deleted_at" json:"deleted_at"` + DiggerRunID int64 `gorm:"column:digger_run_id" json:"digger_run_id"` + ProjectID int64 `gorm:"column:project_id" json:"project_id"` +} + +// TableName DiggerRunQueueItem's table name +func (*DiggerRunQueueItem) TableName() string { + return TableNameDiggerRunQueueItem +} diff --git a/next/model/digger_run_stages.gen.go b/next/model/digger_run_stages.gen.go new file mode 100644 index 000000000..5bc407fb9 --- /dev/null +++ b/next/model/digger_run_stages.gen.go @@ -0,0 +1,27 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" + + "gorm.io/gorm" +) + +const TableNameDiggerRunStage = "digger_run_stages" + +// DiggerRunStage mapped from table +type DiggerRunStage struct { + ID string `gorm:"column:id;primaryKey;default:gen_random_uuid()" json:"id"` + CreatedAt time.Time `gorm:"column:created_at;not null;default:CURRENT_TIMESTAMP" json:"created_at"` + UpdatedAt time.Time `gorm:"column:updated_at;not null;default:CURRENT_TIMESTAMP" json:"updated_at"` + DeletedAt gorm.DeletedAt `gorm:"column:deleted_at" json:"deleted_at"` + BatchID string `gorm:"column:batch_id;not null" json:"batch_id"` +} + +// TableName DiggerRunStage's table name +func (*DiggerRunStage) TableName() string { + return TableNameDiggerRunStage +} diff --git a/next/model/digger_runs.gen.go b/next/model/digger_runs.gen.go new file mode 100644 index 000000000..03cda5975 --- /dev/null +++ b/next/model/digger_runs.gen.go @@ -0,0 +1,40 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" + + "gorm.io/gorm" +) + +const TableNameDiggerRun = "digger_runs" + +// DiggerRun mapped from table +type DiggerRun struct { + ID string `gorm:"column:id;primaryKey;default:gen_random_uuid()" json:"id"` + CreatedAt time.Time `gorm:"column:created_at;not null;default:CURRENT_TIMESTAMP" json:"created_at"` + UpdatedAt time.Time `gorm:"column:updated_at;not null;default:CURRENT_TIMESTAMP" json:"updated_at"` + DeletedAt gorm.DeletedAt `gorm:"column:deleted_at" json:"deleted_at"` + Triggertype string `gorm:"column:triggertype;not null" json:"triggertype"` + PrNumber int64 `gorm:"column:pr_number" json:"pr_number"` + Status string `gorm:"column:status;not null" json:"status"` + CommitID string `gorm:"column:commit_id;not null" json:"commit_id"` + DiggerConfig string `gorm:"column:digger_config" json:"digger_config"` + GithubInstallationID int64 `gorm:"column:github_installation_id" json:"github_installation_id"` + RepoID int64 `gorm:"column:repo_id;not null" json:"repo_id"` + RunType string `gorm:"column:run_type;not null" json:"run_type"` + PlanStageID string `gorm:"column:plan_stage_id" json:"plan_stage_id"` + ApplyStageID string `gorm:"column:apply_stage_id" json:"apply_stage_id"` + ProjectName string `gorm:"column:project_name" json:"project_name"` + IsApproved bool `gorm:"column:is_approved" json:"is_approved"` + ApprovalAuthor string `gorm:"column:approval_author" json:"approval_author"` + ApprovalDate time.Time `gorm:"column:approval_date" json:"approval_date"` +} + +// TableName DiggerRun's table name +func (*DiggerRun) TableName() string { + return TableNameDiggerRun +} diff --git a/next/model/github_app_installation_links.gen.go b/next/model/github_app_installation_links.gen.go new file mode 100644 index 000000000..ffed28d7f --- /dev/null +++ b/next/model/github_app_installation_links.gen.go @@ -0,0 +1,29 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" + + "gorm.io/gorm" +) + +const TableNameGithubAppInstallationLink = "github_app_installation_links" + +// GithubAppInstallationLink mapped from table +type GithubAppInstallationLink struct { + ID string `gorm:"column:id;primaryKey;default:gen_random_uuid()" json:"id"` + CreatedAt time.Time `gorm:"column:created_at;not null;default:CURRENT_TIMESTAMP" json:"created_at"` + UpdatedAt time.Time `gorm:"column:updated_at;not null;default:CURRENT_TIMESTAMP" json:"updated_at"` + DeletedAt gorm.DeletedAt `gorm:"column:deleted_at" json:"deleted_at"` + GithubInstallationID int64 `gorm:"column:github_installation_id;not null" json:"github_installation_id"` + OrganizationID string `gorm:"column:organization_id;not null" json:"organization_id"` + Status int16 `gorm:"column:status;not null" json:"status"` +} + +// TableName GithubAppInstallationLink's table name +func (*GithubAppInstallationLink) TableName() string { + return TableNameGithubAppInstallationLink +} diff --git a/next/model/github_app_installations.gen.go b/next/model/github_app_installations.gen.go new file mode 100644 index 000000000..95f3c81d7 --- /dev/null +++ b/next/model/github_app_installations.gen.go @@ -0,0 +1,32 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" + + "gorm.io/gorm" +) + +const TableNameGithubAppInstallation = "github_app_installations" + +// GithubAppInstallation mapped from table +type GithubAppInstallation struct { + ID string `gorm:"column:id;primaryKey;default:gen_random_uuid()" json:"id"` + CreatedAt time.Time `gorm:"column:created_at;not null;default:CURRENT_TIMESTAMP" json:"created_at"` + UpdatedAt time.Time `gorm:"column:updated_at;not null;default:CURRENT_TIMESTAMP" json:"updated_at"` + DeletedAt gorm.DeletedAt `gorm:"column:deleted_at" json:"deleted_at"` + GithubInstallationID int64 `gorm:"column:github_installation_id;not null" json:"github_installation_id"` + GithubAppID int64 `gorm:"column:github_app_id;not null" json:"github_app_id"` + AccountID int64 `gorm:"column:account_id;not null" json:"account_id"` + Login string `gorm:"column:login;not null" json:"login"` + Repo string `gorm:"column:repo" json:"repo"` + Status int64 `gorm:"column:status;not null" json:"status"` +} + +// TableName GithubAppInstallation's table name +func (*GithubAppInstallation) TableName() string { + return TableNameGithubAppInstallation +} diff --git a/next/model/github_apps.gen.go b/next/model/github_apps.gen.go new file mode 100644 index 000000000..2a2f07849 --- /dev/null +++ b/next/model/github_apps.gen.go @@ -0,0 +1,29 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" + + "gorm.io/gorm" +) + +const TableNameGithubApp = "github_apps" + +// GithubApp mapped from table +type GithubApp struct { + ID string `gorm:"column:id;primaryKey;default:gen_random_uuid()" json:"id"` + CreatedAt time.Time `gorm:"column:created_at;not null;default:CURRENT_TIMESTAMP" json:"created_at"` + UpdatedAt time.Time `gorm:"column:updated_at;not null;default:CURRENT_TIMESTAMP" json:"updated_at"` + DeletedAt gorm.DeletedAt `gorm:"column:deleted_at" json:"deleted_at"` + GithubID int64 `gorm:"column:github_id;not null" json:"github_id"` + Name string `gorm:"column:name;not null" json:"name"` + GithubAppURL string `gorm:"column:github_app_url;not null" json:"github_app_url"` +} + +// TableName GithubApp's table name +func (*GithubApp) TableName() string { + return TableNameGithubApp +} diff --git a/next/model/internal_blog_author_posts.gen.go b/next/model/internal_blog_author_posts.gen.go new file mode 100644 index 000000000..9843951de --- /dev/null +++ b/next/model/internal_blog_author_posts.gen.go @@ -0,0 +1,18 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +const TableNameInternalBlogAuthorPost = "internal_blog_author_posts" + +// InternalBlogAuthorPost mapped from table +type InternalBlogAuthorPost struct { + AuthorID string `gorm:"column:author_id;primaryKey" json:"author_id"` + PostID string `gorm:"column:post_id;primaryKey" json:"post_id"` +} + +// TableName InternalBlogAuthorPost's table name +func (*InternalBlogAuthorPost) TableName() string { + return TableNameInternalBlogAuthorPost +} diff --git a/next/model/internal_blog_author_profiles.gen.go b/next/model/internal_blog_author_profiles.gen.go new file mode 100644 index 000000000..cc9411a16 --- /dev/null +++ b/next/model/internal_blog_author_profiles.gen.go @@ -0,0 +1,31 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" +) + +const TableNameInternalBlogAuthorProfile = "internal_blog_author_profiles" + +// InternalBlogAuthorProfile mapped from table +type InternalBlogAuthorProfile struct { + UserID string `gorm:"column:user_id;primaryKey" json:"user_id"` + DisplayName string `gorm:"column:display_name;not null" json:"display_name"` + Bio string `gorm:"column:bio;not null" json:"bio"` + AvatarURL string `gorm:"column:avatar_url;not null" json:"avatar_url"` + WebsiteURL string `gorm:"column:website_url" json:"website_url"` + TwitterHandle string `gorm:"column:twitter_handle" json:"twitter_handle"` + FacebookHandle string `gorm:"column:facebook_handle" json:"facebook_handle"` + LinkedinHandle string `gorm:"column:linkedin_handle" json:"linkedin_handle"` + InstagramHandle string `gorm:"column:instagram_handle" json:"instagram_handle"` + CreatedAt time.Time `gorm:"column:created_at;not null;default:CURRENT_TIMESTAMP" json:"created_at"` + UpdatedAt time.Time `gorm:"column:updated_at;not null;default:CURRENT_TIMESTAMP" json:"updated_at"` +} + +// TableName InternalBlogAuthorProfile's table name +func (*InternalBlogAuthorProfile) TableName() string { + return TableNameInternalBlogAuthorProfile +} diff --git a/next/model/internal_blog_post_tags.gen.go b/next/model/internal_blog_post_tags.gen.go new file mode 100644 index 000000000..1a6842511 --- /dev/null +++ b/next/model/internal_blog_post_tags.gen.go @@ -0,0 +1,20 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +const TableNameInternalBlogPostTag = "internal_blog_post_tags" + +// InternalBlogPostTag mapped from table +type InternalBlogPostTag struct { + ID int32 `gorm:"column:id;primaryKey;autoIncrement:true" json:"id"` + Slug string `gorm:"column:slug;not null" json:"slug"` + Name string `gorm:"column:name;not null" json:"name"` + Description string `gorm:"column:description" json:"description"` +} + +// TableName InternalBlogPostTag's table name +func (*InternalBlogPostTag) TableName() string { + return TableNameInternalBlogPostTag +} diff --git a/next/model/internal_blog_post_tags_relationship.gen.go b/next/model/internal_blog_post_tags_relationship.gen.go new file mode 100644 index 000000000..8d37bbb53 --- /dev/null +++ b/next/model/internal_blog_post_tags_relationship.gen.go @@ -0,0 +1,18 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +const TableNameInternalBlogPostTagsRelationship = "internal_blog_post_tags_relationship" + +// InternalBlogPostTagsRelationship mapped from table +type InternalBlogPostTagsRelationship struct { + BlogPostID string `gorm:"column:blog_post_id;primaryKey" json:"blog_post_id"` + TagID int32 `gorm:"column:tag_id;primaryKey" json:"tag_id"` +} + +// TableName InternalBlogPostTagsRelationship's table name +func (*InternalBlogPostTagsRelationship) TableName() string { + return TableNameInternalBlogPostTagsRelationship +} diff --git a/next/model/internal_blog_posts.gen.go b/next/model/internal_blog_posts.gen.go new file mode 100644 index 000000000..a71d28864 --- /dev/null +++ b/next/model/internal_blog_posts.gen.go @@ -0,0 +1,32 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" +) + +const TableNameInternalBlogPost = "internal_blog_posts" + +// InternalBlogPost mapped from table +type InternalBlogPost struct { + ID string `gorm:"column:id;primaryKey;default:gen_random_uuid()" json:"id"` + Slug string `gorm:"column:slug;not null" json:"slug"` + Title string `gorm:"column:title;not null" json:"title"` + Summary string `gorm:"column:summary;not null" json:"summary"` + Content string `gorm:"column:content;not null" json:"content"` + CreatedAt time.Time `gorm:"column:created_at;not null;default:CURRENT_TIMESTAMP" json:"created_at"` + UpdatedAt time.Time `gorm:"column:updated_at;not null;default:CURRENT_TIMESTAMP" json:"updated_at"` + IsFeatured bool `gorm:"column:is_featured;not null" json:"is_featured"` + Status string `gorm:"column:status;not null;default:draft" json:"status"` + CoverImage string `gorm:"column:cover_image" json:"cover_image"` + SeoData string `gorm:"column:seo_data" json:"seo_data"` + JSONContent string `gorm:"column:json_content;not null;default:{}" json:"json_content"` +} + +// TableName InternalBlogPost's table name +func (*InternalBlogPost) TableName() string { + return TableNameInternalBlogPost +} diff --git a/next/model/internal_changelog.gen.go b/next/model/internal_changelog.gen.go new file mode 100644 index 000000000..4d4e2e28e --- /dev/null +++ b/next/model/internal_changelog.gen.go @@ -0,0 +1,27 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" +) + +const TableNameInternalChangelog = "internal_changelog" + +// InternalChangelog mapped from table +type InternalChangelog struct { + ID string `gorm:"column:id;primaryKey;default:gen_random_uuid()" json:"id"` + Title string `gorm:"column:title;not null" json:"title"` + Changes string `gorm:"column:changes;not null" json:"changes"` + UserID string `gorm:"column:user_id" json:"user_id"` + CreatedAt time.Time `gorm:"column:created_at;default:CURRENT_TIMESTAMP" json:"created_at"` + UpdatedAt time.Time `gorm:"column:updated_at;default:CURRENT_TIMESTAMP" json:"updated_at"` + CoverImage string `gorm:"column:cover_image" json:"cover_image"` +} + +// TableName InternalChangelog's table name +func (*InternalChangelog) TableName() string { + return TableNameInternalChangelog +} diff --git a/next/model/internal_feedback_comments.gen.go b/next/model/internal_feedback_comments.gen.go new file mode 100644 index 000000000..4d722351b --- /dev/null +++ b/next/model/internal_feedback_comments.gen.go @@ -0,0 +1,26 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" +) + +const TableNameInternalFeedbackComment = "internal_feedback_comments" + +// InternalFeedbackComment mapped from table +type InternalFeedbackComment struct { + ID string `gorm:"column:id;primaryKey;default:gen_random_uuid()" json:"id"` + UserID string `gorm:"column:user_id;not null" json:"user_id"` + ThreadID string `gorm:"column:thread_id;not null" json:"thread_id"` + Content string `gorm:"column:content;not null" json:"content"` + CreatedAt time.Time `gorm:"column:created_at;not null;default:CURRENT_TIMESTAMP" json:"created_at"` + UpdatedAt time.Time `gorm:"column:updated_at;not null;default:CURRENT_TIMESTAMP" json:"updated_at"` +} + +// TableName InternalFeedbackComment's table name +func (*InternalFeedbackComment) TableName() string { + return TableNameInternalFeedbackComment +} diff --git a/next/model/internal_feedback_threads.gen.go b/next/model/internal_feedback_threads.gen.go new file mode 100644 index 000000000..2e37568ad --- /dev/null +++ b/next/model/internal_feedback_threads.gen.go @@ -0,0 +1,32 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" +) + +const TableNameInternalFeedbackThread = "internal_feedback_threads" + +// InternalFeedbackThread mapped from table +type InternalFeedbackThread struct { + ID string `gorm:"column:id;primaryKey;default:gen_random_uuid()" json:"id"` + Title string `gorm:"column:title;not null" json:"title"` + Content string `gorm:"column:content;not null" json:"content"` + UserID string `gorm:"column:user_id;not null" json:"user_id"` + CreatedAt time.Time `gorm:"column:created_at;not null;default:CURRENT_TIMESTAMP" json:"created_at"` + UpdatedAt time.Time `gorm:"column:updated_at;not null;default:CURRENT_TIMESTAMP" json:"updated_at"` + Priority string `gorm:"column:priority;not null;default:low" json:"priority"` + Type string `gorm:"column:type;not null;default:general" json:"type"` + Status string `gorm:"column:status;not null;default:open" json:"status"` + AddedToRoadmap bool `gorm:"column:added_to_roadmap;not null" json:"added_to_roadmap"` + OpenForPublicDiscussion bool `gorm:"column:open_for_public_discussion;not null" json:"open_for_public_discussion"` + IsPubliclyVisible bool `gorm:"column:is_publicly_visible;not null" json:"is_publicly_visible"` +} + +// TableName InternalFeedbackThread's table name +func (*InternalFeedbackThread) TableName() string { + return TableNameInternalFeedbackThread +} diff --git a/next/model/organization_credits.gen.go b/next/model/organization_credits.gen.go new file mode 100644 index 000000000..e7460bad3 --- /dev/null +++ b/next/model/organization_credits.gen.go @@ -0,0 +1,18 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +const TableNameOrganizationCredit = "organization_credits" + +// OrganizationCredit mapped from table +type OrganizationCredit struct { + OrganizationID string `gorm:"column:organization_id;primaryKey" json:"organization_id"` + Credits int64 `gorm:"column:credits;not null;default:12" json:"credits"` +} + +// TableName OrganizationCredit's table name +func (*OrganizationCredit) TableName() string { + return TableNameOrganizationCredit +} diff --git a/next/model/organization_join_invitations.gen.go b/next/model/organization_join_invitations.gen.go new file mode 100644 index 000000000..586ea5f0b --- /dev/null +++ b/next/model/organization_join_invitations.gen.go @@ -0,0 +1,28 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" +) + +const TableNameOrganizationJoinInvitation = "organization_join_invitations" + +// OrganizationJoinInvitation mapped from table +type OrganizationJoinInvitation struct { + CreatedAt time.Time `gorm:"column:created_at;not null;default:now()" json:"created_at"` + InviterUserID string `gorm:"column:inviter_user_id;not null" json:"inviter_user_id"` + Status string `gorm:"column:status;not null;default:active" json:"status"` + ID string `gorm:"column:id;primaryKey;default:uuid_generate_v4()" json:"id"` + InviteeUserEmail string `gorm:"column:invitee_user_email;not null" json:"invitee_user_email"` + OrganizationID string `gorm:"column:organization_id;not null" json:"organization_id"` + InviteeOrganizationRole string `gorm:"column:invitee_organization_role;not null;default:member" json:"invitee_organization_role"` + InviteeUserID string `gorm:"column:invitee_user_id" json:"invitee_user_id"` +} + +// TableName OrganizationJoinInvitation's table name +func (*OrganizationJoinInvitation) TableName() string { + return TableNameOrganizationJoinInvitation +} diff --git a/next/model/organization_members.gen.go b/next/model/organization_members.gen.go new file mode 100644 index 000000000..e3ca74808 --- /dev/null +++ b/next/model/organization_members.gen.go @@ -0,0 +1,25 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" +) + +const TableNameOrganizationMember = "organization_members" + +// OrganizationMember mapped from table +type OrganizationMember struct { + ID int64 `gorm:"column:id;primaryKey;autoIncrement:true" json:"id"` + CreatedAt time.Time `gorm:"column:created_at;not null;default:now()" json:"created_at"` + MemberID string `gorm:"column:member_id;not null" json:"member_id"` + MemberRole string `gorm:"column:member_role;not null" json:"member_role"` + OrganizationID string `gorm:"column:organization_id;not null" json:"organization_id"` +} + +// TableName OrganizationMember's table name +func (*OrganizationMember) TableName() string { + return TableNameOrganizationMember +} diff --git a/next/model/organizations.gen.go b/next/model/organizations.gen.go new file mode 100644 index 000000000..6c804913d --- /dev/null +++ b/next/model/organizations.gen.go @@ -0,0 +1,24 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" +) + +const TableNameOrganization = "organizations" + +// Organization mapped from table +type Organization struct { + CreatedAt time.Time `gorm:"column:created_at;not null;default:now()" json:"created_at"` + ID string `gorm:"column:id;primaryKey;default:uuid_generate_v4()" json:"id"` + Title string `gorm:"column:title;not null;default:Test Organization" json:"title"` + Slug string `gorm:"column:slug;not null;default:(gen_random_uuid())" json:"slug"` +} + +// TableName Organization's table name +func (*Organization) TableName() string { + return TableNameOrganization +} diff --git a/next/model/organizations_private_info.gen.go b/next/model/organizations_private_info.gen.go new file mode 100644 index 000000000..30e9baa2a --- /dev/null +++ b/next/model/organizations_private_info.gen.go @@ -0,0 +1,19 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +const TableNameOrganizationsPrivateInfo = "organizations_private_info" + +// OrganizationsPrivateInfo mapped from table +type OrganizationsPrivateInfo struct { + ID string `gorm:"column:id;primaryKey" json:"id"` + BillingAddress string `gorm:"column:billing_address" json:"billing_address"` + PaymentMethod string `gorm:"column:payment_method" json:"payment_method"` +} + +// TableName OrganizationsPrivateInfo's table name +func (*OrganizationsPrivateInfo) TableName() string { + return TableNameOrganizationsPrivateInfo +} diff --git a/next/model/prices.gen.go b/next/model/prices.gen.go new file mode 100644 index 000000000..fa4b7d85e --- /dev/null +++ b/next/model/prices.gen.go @@ -0,0 +1,27 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +const TableNamePrice = "prices" + +// Price mapped from table +type Price struct { + ID string `gorm:"column:id;primaryKey" json:"id"` + ProductID string `gorm:"column:product_id" json:"product_id"` + Active bool `gorm:"column:active" json:"active"` + Description string `gorm:"column:description" json:"description"` + UnitAmount int64 `gorm:"column:unit_amount" json:"unit_amount"` + Currency string `gorm:"column:currency" json:"currency"` + Type string `gorm:"column:type" json:"type"` + Interval string `gorm:"column:interval" json:"interval"` + IntervalCount int64 `gorm:"column:interval_count" json:"interval_count"` + TrialPeriodDays int64 `gorm:"column:trial_period_days" json:"trial_period_days"` + Metadata string `gorm:"column:metadata" json:"metadata"` +} + +// TableName Price's table name +func (*Price) TableName() string { + return TableNamePrice +} diff --git a/next/model/products.gen.go b/next/model/products.gen.go new file mode 100644 index 000000000..a34c223a8 --- /dev/null +++ b/next/model/products.gen.go @@ -0,0 +1,22 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +const TableNameProduct = "products" + +// Product mapped from table +type Product struct { + ID string `gorm:"column:id;primaryKey" json:"id"` + Active bool `gorm:"column:active" json:"active"` + Name string `gorm:"column:name" json:"name"` + Description string `gorm:"column:description" json:"description"` + Image string `gorm:"column:image" json:"image"` + Metadata string `gorm:"column:metadata" json:"metadata"` +} + +// TableName Product's table name +func (*Product) TableName() string { + return TableNameProduct +} diff --git a/next/model/project_comments.gen.go b/next/model/project_comments.gen.go new file mode 100644 index 000000000..3ad12d211 --- /dev/null +++ b/next/model/project_comments.gen.go @@ -0,0 +1,26 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" +) + +const TableNameProjectComment = "project_comments" + +// ProjectComment mapped from table +type ProjectComment struct { + ID int64 `gorm:"column:id;primaryKey;autoIncrement:true" json:"id"` + CreatedAt time.Time `gorm:"column:created_at;default:now()" json:"created_at"` + Text string `gorm:"column:text;not null" json:"text"` + UserID string `gorm:"column:user_id;not null" json:"user_id"` + InReplyTo int64 `gorm:"column:in_reply_to" json:"in_reply_to"` + ProjectID string `gorm:"column:project_id;not null" json:"project_id"` +} + +// TableName ProjectComment's table name +func (*ProjectComment) TableName() string { + return TableNameProjectComment +} diff --git a/next/model/projects.gen.go b/next/model/projects.gen.go new file mode 100644 index 000000000..fac8a712c --- /dev/null +++ b/next/model/projects.gen.go @@ -0,0 +1,40 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" + + "gorm.io/gorm" +) + +const TableNameProject = "projects" + +// Project mapped from table +type Project struct { + ID string `gorm:"column:id;primaryKey;default:uuid_generate_v4()" json:"id"` + Name string `gorm:"column:name;not null" json:"name"` + CreatedAt time.Time `gorm:"column:created_at;not null;default:CURRENT_TIMESTAMP" json:"created_at"` + UpdatedAt time.Time `gorm:"column:updated_at;not null;default:CURRENT_TIMESTAMP" json:"updated_at"` + OrganizationID string `gorm:"column:organization_id;not null" json:"organization_id"` + TeamID int64 `gorm:"column:team_id" json:"team_id"` + ProjectStatus string `gorm:"column:project_status;not null;default:draft" json:"project_status"` + Slug string `gorm:"column:slug;not null;default:(gen_random_uuid())" json:"slug"` + LatestActionOn string `gorm:"column:latest_action_on" json:"latest_action_on"` + RepoID int64 `gorm:"column:repo_id;not null" json:"repo_id"` + ConfigurationYaml string `gorm:"column:configuration_yaml" json:"configuration_yaml"` + Status string `gorm:"column:status" json:"status"` + IsGenerated bool `gorm:"column:is_generated" json:"is_generated"` + IsInMainBranch bool `gorm:"column:is_in_main_branch" json:"is_in_main_branch"` + DeletedAt gorm.DeletedAt `gorm:"column:deleted_at" json:"deleted_at"` + TerraformWorkingDir string `gorm:"column:terraform_working_dir" json:"terraform_working_dir"` + IsManagingState bool `gorm:"column:is_managing_state" json:"is_managing_state"` + Labels string `gorm:"column:labels" json:"labels"` +} + +// TableName Project's table name +func (*Project) TableName() string { + return TableNameProject +} diff --git a/next/model/repos.gen.go b/next/model/repos.gen.go new file mode 100644 index 000000000..42ad490fb --- /dev/null +++ b/next/model/repos.gen.go @@ -0,0 +1,33 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" + + "gorm.io/gorm" +) + +const TableNameRepo = "repos" + +// Repo mapped from table +type Repo struct { + ID int64 `gorm:"column:id;primaryKey;autoIncrement:true" json:"id"` + CreatedAt time.Time `gorm:"column:created_at;default:now()" json:"created_at"` + UpdatedAt time.Time `gorm:"column:updated_at" json:"updated_at"` + DeletedAt gorm.DeletedAt `gorm:"column:deleted_at" json:"deleted_at"` + Name string `gorm:"column:name;not null" json:"name"` + OrganizationID string `gorm:"column:organization_id" json:"organization_id"` + DiggerConfig string `gorm:"column:digger_config" json:"digger_config"` + RepoName string `gorm:"column:repo_name" json:"repo_name"` + RepoFullName string `gorm:"column:repo_full_name" json:"repo_full_name"` + RepoOrganisation string `gorm:"column:repo_organisation" json:"repo_organisation"` + RepoURL string `gorm:"column:repo_url" json:"repo_url"` +} + +// TableName Repo's table name +func (*Repo) TableName() string { + return TableNameRepo +} diff --git a/next/model/subscriptions.gen.go b/next/model/subscriptions.gen.go new file mode 100644 index 000000000..15c93db09 --- /dev/null +++ b/next/model/subscriptions.gen.go @@ -0,0 +1,35 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" +) + +const TableNameSubscription = "subscriptions" + +// Subscription mapped from table +type Subscription struct { + ID string `gorm:"column:id;primaryKey" json:"id"` + Status string `gorm:"column:status" json:"status"` + Metadata string `gorm:"column:metadata" json:"metadata"` + PriceID string `gorm:"column:price_id" json:"price_id"` + Quantity int64 `gorm:"column:quantity" json:"quantity"` + CancelAtPeriodEnd bool `gorm:"column:cancel_at_period_end" json:"cancel_at_period_end"` + Created time.Time `gorm:"column:created;not null" json:"created"` + CurrentPeriodStart time.Time `gorm:"column:current_period_start;not null" json:"current_period_start"` + CurrentPeriodEnd time.Time `gorm:"column:current_period_end;not null" json:"current_period_end"` + EndedAt time.Time `gorm:"column:ended_at" json:"ended_at"` + CancelAt time.Time `gorm:"column:cancel_at" json:"cancel_at"` + CanceledAt time.Time `gorm:"column:canceled_at" json:"canceled_at"` + TrialStart time.Time `gorm:"column:trial_start" json:"trial_start"` + TrialEnd time.Time `gorm:"column:trial_end" json:"trial_end"` + OrganizationID string `gorm:"column:organization_id" json:"organization_id"` +} + +// TableName Subscription's table name +func (*Subscription) TableName() string { + return TableNameSubscription +} diff --git a/next/model/user_api_keys.gen.go b/next/model/user_api_keys.gen.go new file mode 100644 index 000000000..8c38d5e6c --- /dev/null +++ b/next/model/user_api_keys.gen.go @@ -0,0 +1,26 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" +) + +const TableNameUserAPIKey = "user_api_keys" + +// UserAPIKey mapped from table +type UserAPIKey struct { + KeyID string `gorm:"column:key_id;primaryKey" json:"key_id"` + MaskedKey string `gorm:"column:masked_key;not null" json:"masked_key"` + CreatedAt time.Time `gorm:"column:created_at;not null;default:now()" json:"created_at"` + UserID string `gorm:"column:user_id;not null" json:"user_id"` + ExpiresAt time.Time `gorm:"column:expires_at" json:"expires_at"` + IsRevoked bool `gorm:"column:is_revoked;not null" json:"is_revoked"` +} + +// TableName UserAPIKey's table name +func (*UserAPIKey) TableName() string { + return TableNameUserAPIKey +} diff --git a/next/model/user_notifications.gen.go b/next/model/user_notifications.gen.go new file mode 100644 index 000000000..70014efd3 --- /dev/null +++ b/next/model/user_notifications.gen.go @@ -0,0 +1,27 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" +) + +const TableNameUserNotification = "user_notifications" + +// UserNotification mapped from table +type UserNotification struct { + ID string `gorm:"column:id;primaryKey;default:uuid_generate_v4()" json:"id"` + UserID string `gorm:"column:user_id" json:"user_id"` + IsRead bool `gorm:"column:is_read;not null" json:"is_read"` + IsSeen bool `gorm:"column:is_seen;not null" json:"is_seen"` + Payload string `gorm:"column:payload;not null;default:{}" json:"payload"` + CreatedAt time.Time `gorm:"column:created_at;not null;default:CURRENT_TIMESTAMP" json:"created_at"` + UpdatedAt time.Time `gorm:"column:updated_at;not null;default:CURRENT_TIMESTAMP" json:"updated_at"` +} + +// TableName UserNotification's table name +func (*UserNotification) TableName() string { + return TableNameUserNotification +} diff --git a/next/model/user_onboarding.gen.go b/next/model/user_onboarding.gen.go new file mode 100644 index 000000000..6430f4673 --- /dev/null +++ b/next/model/user_onboarding.gen.go @@ -0,0 +1,23 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" +) + +const TableNameUserOnboarding = "user_onboarding" + +// UserOnboarding mapped from table +type UserOnboarding struct { + UserID string `gorm:"column:user_id;primaryKey" json:"user_id"` + CreatedAt time.Time `gorm:"column:created_at;not null;default:now()" json:"created_at"` + AcceptedTerms bool `gorm:"column:accepted_terms;not null" json:"accepted_terms"` +} + +// TableName UserOnboarding's table name +func (*UserOnboarding) TableName() string { + return TableNameUserOnboarding +} diff --git a/next/model/user_private_info.gen.go b/next/model/user_private_info.gen.go new file mode 100644 index 000000000..a78e28243 --- /dev/null +++ b/next/model/user_private_info.gen.go @@ -0,0 +1,23 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" +) + +const TableNameUserPrivateInfo = "user_private_info" + +// UserPrivateInfo mapped from table +type UserPrivateInfo struct { + ID string `gorm:"column:id;primaryKey" json:"id"` + CreatedAt time.Time `gorm:"column:created_at;default:now()" json:"created_at"` + DefaultOrganization string `gorm:"column:default_organization" json:"default_organization"` +} + +// TableName UserPrivateInfo's table name +func (*UserPrivateInfo) TableName() string { + return TableNameUserPrivateInfo +} diff --git a/next/model/user_profiles.gen.go b/next/model/user_profiles.gen.go new file mode 100644 index 000000000..f8fcb1d87 --- /dev/null +++ b/next/model/user_profiles.gen.go @@ -0,0 +1,24 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +import ( + "time" +) + +const TableNameUserProfile = "user_profiles" + +// UserProfile mapped from table +type UserProfile struct { + ID string `gorm:"column:id;primaryKey" json:"id"` + FullName string `gorm:"column:full_name" json:"full_name"` + AvatarURL string `gorm:"column:avatar_url" json:"avatar_url"` + CreatedAt time.Time `gorm:"column:created_at;not null;default:now()" json:"created_at"` +} + +// TableName UserProfile's table name +func (*UserProfile) TableName() string { + return TableNameUserProfile +} diff --git a/next/model/user_roles.gen.go b/next/model/user_roles.gen.go new file mode 100644 index 000000000..882b69b4d --- /dev/null +++ b/next/model/user_roles.gen.go @@ -0,0 +1,19 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package model + +const TableNameUserRole = "user_roles" + +// UserRole mapped from table +type UserRole struct { + ID int64 `gorm:"column:id;primaryKey;autoIncrement:true" json:"id"` + UserID string `gorm:"column:user_id;not null" json:"user_id"` + Role string `gorm:"column:role;not null" json:"role"` +} + +// TableName UserRole's table name +func (*UserRole) TableName() string { + return TableNameUserRole +} diff --git a/next/models/github.go b/next/models/github.go index 94b7efc52..425662fe2 100644 --- a/next/models/github.go +++ b/next/models/github.go @@ -1,14 +1,5 @@ package models -import "gorm.io/gorm" - -type GithubApp struct { - gorm.Model - GithubId int64 - Name string - GithubAppUrl string -} - type GithubAppInstallStatus int const ( @@ -16,28 +7,9 @@ const ( GithubAppInstallDeleted GithubAppInstallStatus = 2 ) -type GithubAppInstallation struct { - gorm.Model - GithubInstallationId int64 - GithubAppId int64 - AccountId int - Login string - Repo string - Status GithubAppInstallStatus -} - type GithubAppInstallationLinkStatus int8 const ( GithubAppInstallationLinkActive GithubAppInstallationLinkStatus = 1 GithubAppInstallationLinkInactive GithubAppInstallationLinkStatus = 2 ) - -// GithubAppInstallationLink links GitHub App installation Id to Digger's organisation Id -type GithubAppInstallationLink struct { - gorm.Model - GithubInstallationId int64 `gorm:"index:idx_github_installation_org"` - OrganisationId uint `gorm:"index:idx_github_installation_org"` - Organisation *Organisation - Status GithubAppInstallationLinkStatus -} diff --git a/next/models/locking.go b/next/models/locking.go deleted file mode 100644 index 2753592e6..000000000 --- a/next/models/locking.go +++ /dev/null @@ -1,11 +0,0 @@ -package models - -import "gorm.io/gorm" - -type DiggerLock struct { - gorm.Model - Resource string `gorm:"index:idx_digger_locked_resource"` - LockId int - Organisation *Organisation - OrganisationID uint -} diff --git a/next/models/orgs.go b/next/models/orgs.go index c58718ad4..97bdf55fa 100644 --- a/next/models/orgs.go +++ b/next/models/orgs.go @@ -1,69 +1,5 @@ package models -import ( - "gorm.io/gorm" - "time" -) - -type Organisation struct { - gorm.Model - Name string `gorm:"uniqueIndex:idx_organisation"` - ExternalSource string `gorm:"uniqueIndex:idx_external_source"` - ExternalId string `gorm:"uniqueIndex:idx_external_source"` -} - -type Repo struct { - gorm.Model - Name string `gorm:"uniqueIndex:idx_org_repo"` - RepoFullName string - RepoOrganisation string - RepoName string - RepoUrl string - OrganisationID uint `gorm:"uniqueIndex:idx_org_repo"` - Organisation *Organisation - DiggerConfig string -} - -type ProjectRun struct { - gorm.Model - ProjectID uint - Project *Project - StartedAt int64 - EndedAt int64 - Status string - Command string - Output string - ActorUsername string -} - -func (p *ProjectRun) MapToJsonStruct() interface{} { - return struct { - Id uint `json:"id"` - ProjectID uint `json:"project_id"` - ProjectName string `json:"project_name"` - RepoFullName string `json:"repo_full_name"` - RepoUrl string `json:"repo_url"` - ActorUsername string `json:"actor_username"` - StartedAt time.Time `json:"started_at"` - EndedAt time.Time `json:"ended_at"` - Status string `json:"status"` - Command string `json:"command"` - Output string `json:"output"` - }{ - Id: p.ID, - ProjectID: p.ProjectID, - ProjectName: p.Project.Name, - RepoUrl: p.Project.Repo.RepoUrl, - RepoFullName: p.Project.Repo.RepoFullName, - StartedAt: time.UnixMilli(p.StartedAt), - EndedAt: time.UnixMilli(p.EndedAt), - Status: p.Status, - Command: p.Command, - Output: p.Output, - ActorUsername: p.ActorUsername, - } -} - type ProjectStatus int const ( @@ -71,68 +7,6 @@ const ( ProjectInactive ProjectStatus = 2 ) -type Project struct { - gorm.Model - Name string `gorm:"uniqueIndex:idx_project"` - OrganisationID uint `gorm:"uniqueIndex:idx_project"` - Organisation *Organisation - RepoID uint `gorm:"uniqueIndex:idx_project"` - Repo *Repo - ConfigurationYaml string // TODO: probably needs to be deleted - Status ProjectStatus - IsGenerated bool - IsInMainBranch bool -} - -func (p *Project) MapToJsonStruct() interface{} { - lastRun, _ := DB.GetLastDiggerRunForProject(p.Name) - status := RunSucceeded - if lastRun != nil { - status = lastRun.Status - } - return struct { - Id uint `json:"id"` - Name string `json:"name"` - Directory string `json:"directory"` - OrganisationID uint `json:"organisation_id"` - OrganisationName string `json:"organisation_name"` - RepoID uint `json:"repo_id"` - RepoFullName string `json:"repo_full_name"` - RepoName string `json:"repo_name"` - RepoOrg string `json:"repo_org"` - RepoUrl string `json:"repo_url"` - IsInMainBranch bool `json:"is_in_main_branch"` - IsGenerated bool `json:"is_generated"` - LastActivityTimestamp string `json:"last_activity_timestamp"` - LastActivityAuthor string `json:"last_activity_author"` - LastActivityStatus string `json:"last_activity_status"` - }{ - Id: p.ID, - Name: p.Name, - OrganisationID: p.OrganisationID, - RepoID: p.RepoID, - OrganisationName: p.Organisation.Name, - RepoFullName: p.Repo.RepoFullName, - RepoName: p.Repo.RepoName, - RepoOrg: p.Repo.RepoOrganisation, - RepoUrl: p.Repo.RepoUrl, - LastActivityTimestamp: p.UpdatedAt.String(), - LastActivityAuthor: "unknown", - LastActivityStatus: string(status), - IsGenerated: p.IsGenerated, - IsInMainBranch: p.IsInMainBranch, - } - -} - -type Token struct { - gorm.Model - Value string `gorm:"uniqueIndex:idx_token"` - OrganisationID uint - Organisation *Organisation - Type string -} - const ( AccessPolicyType = "access" AdminPolicyType = "admin" diff --git a/next/models/policies.go b/next/models/policies.go deleted file mode 100644 index b68f0c8ed..000000000 --- a/next/models/policies.go +++ /dev/null @@ -1,23 +0,0 @@ -package models - -import "gorm.io/gorm" - -const ( - POLICY_TYPE_ACCESS = "access" - POLICY_TYPE_PLAN = "plan" - POLICY_TYPE_DRIFT = "drift" -) - -type Policy struct { - gorm.Model - Project *Project - ProjectID *uint - Policy string - Type string - CreatedBy *User - CreatedByID *uint - Organisation *Organisation - OrganisationID uint - Repo *Repo - RepoID *uint -} diff --git a/next/models/runs.go b/next/models/runs.go index 8512580d7..fca1361b3 100644 --- a/next/models/runs.go +++ b/next/models/runs.go @@ -1,12 +1,5 @@ package models -import ( - orchestrator_scheduler "github.com/diggerhq/digger/libs/scheduler" - "gorm.io/gorm" - "log" - "time" -) - type DiggerRunStatus string const ( @@ -27,108 +20,3 @@ const ( PlanAndApply RunType = "Plan and Apply" PlanOnly RunType = "Plan Only" ) - -type DiggerRunQueueItem struct { - gorm.Model - DiggerRunId uint `gorm:"index:idx_digger_run_queue_run_id"` - DiggerRun DiggerRun - ProjectId uint - Project *Project - time time.Time -} - -type DiggerRun struct { - gorm.Model - Triggertype string // pr_merge, manual_invocation, push_to_trunk - PrNumber *int - Status DiggerRunStatus - CommitId string - DiggerConfig string - GithubInstallationId int64 - RepoId uint - Repo *Repo - ProjectName string - RunType RunType - PlanStage DiggerRunStage - PlanStageId *uint - ApplyStage DiggerRunStage - ApplyStageId *uint - IsApproved bool - ApprovalAuthor string - ApprovalDate time.Time -} - -type DiggerRunStage struct { - gorm.Model - Batch *DiggerBatch - BatchID *string `gorm:"index:idx_digger_run_batch_id"` -} - -type SerializedRunStage struct { - //DiggerRunId uint `json:"digger_run_id"` - DiggerJobId string `json:"digger_job_id"` - Status orchestrator_scheduler.DiggerJobStatus `json:"status"` - ProjectName string `json:"project_name"` - WorkflowRunUrl *string `json:"workflow_run_url"` - ResourcesCreated uint `json:"resources_created"` - ResourcesDeleted uint `json:"resources_deleted"` - ResourcesUpdated uint `json:"resources_updated"` - LastActivityTimeStamp string `json:"last_activity_timestamp"` -} - -func (r *DiggerRun) MapToJsonStruct() (interface{}, error) { - planStage, err := r.PlanStage.MapToJsonStruct() - if err != nil { - log.Printf("error serializing run: %v", err) - return nil, err - } - - applyStage, err := r.ApplyStage.MapToJsonStruct() - if err != nil { - log.Printf("error serializing run: %v", err) - return nil, err - } - - x := struct { - Id uint `json:"id"` - Status string `json:"status"` - Type string `json:"type"` - ApprovalAuthor string `json:"approval_author"` - ApprovalDate string `json:"approval_date"` - LastActivityTimeStamp string `json:"last_activity_time_stamp"` - PlanStage SerializedRunStage `json:"plan_stage"` - ApplyStage SerializedRunStage `json:"apply_stage"` - IsApproved bool `json:"is_approved"` - }{ - Id: r.ID, - Status: string(r.Status), - Type: string(r.RunType), - LastActivityTimeStamp: r.UpdatedAt.String(), - PlanStage: *planStage, - ApplyStage: *applyStage, - IsApproved: r.IsApproved, - ApprovalAuthor: r.ApprovalAuthor, - ApprovalDate: r.ApprovalDate.String(), - } - - return x, nil -} - -func (r DiggerRunStage) MapToJsonStruct() (*SerializedRunStage, error) { - job, err := DB.GetDiggerJobFromRunStage(r) - if err != nil { - log.Printf("Could not retrive job from run") - return nil, err - } - - return &SerializedRunStage{ - DiggerJobId: job.DiggerJobID, - Status: job.Status, - //ProjectName: r.Run.ProjectName, - WorkflowRunUrl: job.WorkflowRunUrl, - ResourcesCreated: job.DiggerJobSummary.ResourcesCreated, - ResourcesUpdated: job.DiggerJobSummary.ResourcesUpdated, - ResourcesDeleted: job.DiggerJobSummary.ResourcesDeleted, - LastActivityTimeStamp: r.UpdatedAt.String(), - }, nil -} diff --git a/next/models/scheduler.go b/next/models/scheduler.go index 0d0943b01..a7667ce8a 100644 --- a/next/models/scheduler.go +++ b/next/models/scheduler.go @@ -1,139 +1,13 @@ package models -import ( - "encoding/json" - "fmt" - orchestrator_scheduler "github.com/diggerhq/digger/libs/scheduler" - "github.com/google/uuid" - "gorm.io/gorm" - "log" - "time" -) - -type DiggerJobParentLink struct { - gorm.Model - DiggerJobId string `gorm:"size:50,index:idx_digger_job_id"` - ParentDiggerJobId string `gorm:"size:50,index:idx_parent_digger_job_id"` -} - type DiggerVCSType string const DiggerVCSGithub DiggerVCSType = "github" const DiggerVCSGitlab DiggerVCSType = "gitlab" -type DiggerBatch struct { - ID uuid.UUID `gorm:"primary_key"` - VCS DiggerVCSType - PrNumber int - CommentId *int64 - Status orchestrator_scheduler.DiggerBatchStatus - BranchName string - DiggerConfig string - GithubInstallationId int64 - GitlabProjectId int - RepoFullName string - RepoOwner string - RepoName string - BatchType orchestrator_scheduler.DiggerCommand - // used for module source grouping comments - SourceDetails []byte -} - -type DiggerJob struct { - gorm.Model - DiggerJobID string `gorm:"size:50,index:idx_digger_job_id"` - Status orchestrator_scheduler.DiggerJobStatus - Batch *DiggerBatch - BatchID *string `gorm:"index:idx_digger_job_id"` - PRCommentUrl string - DiggerJobSummary DiggerJobSummary - DiggerJobSummaryID uint - SerializedJobSpec []byte - TerraformOutput string - // represents a footprint of terraform plan json for similarity checks - PlanFootprint []byte - WorkflowFile string - WorkflowRunUrl *string - StatusUpdatedAt time.Time -} - -type DiggerJobSummary struct { - gorm.Model - ResourcesCreated uint - ResourcesDeleted uint - ResourcesUpdated uint -} - -// These tokens will be pre -type JobToken struct { - gorm.Model - Value string `gorm:"uniqueJobTokenIndex:idx_token"` - Expiry time.Time - OrganisationID uint - Organisation Organisation - Type string // AccessTokenType starts with j: -} - type DiggerJobLinkStatus int8 const ( DiggerJobLinkCreated DiggerJobLinkStatus = 1 DiggerJobLinkSucceeded DiggerJobLinkStatus = 2 ) - -// GithubDiggerJobLink links GitHub Workflow Job id to Digger's Job Id -type GithubDiggerJobLink struct { - gorm.Model - DiggerJobId string `gorm:"size:50,index:idx_digger_job_id"` - RepoFullName string - GithubJobId int64 `gorm:"index:idx_github_job_id"` - GithubWorkflowRunId int64 - Status DiggerJobLinkStatus -} - -func (j *DiggerJob) MapToJsonStruct() (orchestrator_scheduler.SerializedJob, error) { - var job orchestrator_scheduler.JobJson - err := json.Unmarshal(j.SerializedJobSpec, &job) - if err != nil { - log.Printf("Failed to convert unmarshall Serialized job, %v", err) - } - return orchestrator_scheduler.SerializedJob{ - DiggerJobId: j.DiggerJobID, - Status: j.Status, - JobString: j.SerializedJobSpec, - PlanFootprint: j.PlanFootprint, - ProjectName: job.ProjectName, - WorkflowRunUrl: j.WorkflowRunUrl, - PRCommentUrl: j.PRCommentUrl, - ResourcesCreated: j.DiggerJobSummary.ResourcesCreated, - ResourcesUpdated: j.DiggerJobSummary.ResourcesUpdated, - ResourcesDeleted: j.DiggerJobSummary.ResourcesDeleted, - }, nil -} -func (b *DiggerBatch) MapToJsonStruct() (orchestrator_scheduler.SerializedBatch, error) { - res := orchestrator_scheduler.SerializedBatch{ - ID: b.ID.String(), - PrNumber: b.PrNumber, - Status: b.Status, - BranchName: b.BranchName, - RepoFullName: b.RepoFullName, - RepoOwner: b.RepoOwner, - RepoName: b.RepoName, - BatchType: b.BatchType, - } - - serializedJobs := make([]orchestrator_scheduler.SerializedJob, 0) - jobs, err := DB.GetDiggerJobsForBatch(b.ID) - if err != nil { - return res, fmt.Errorf("could not unmarshall digger batch: %v", err) - } - for _, job := range jobs { - jobJson, err := job.MapToJsonStruct() - if err != nil { - return res, fmt.Errorf("error mapping job to struct (ID: %v); %v", job.ID, err) - } - serializedJobs = append(serializedJobs, jobJson) - } - res.Jobs = serializedJobs - return res, nil -} diff --git a/next/models/scheduler_test.go b/next/models/scheduler_test.go deleted file mode 100644 index 84ebbe5f9..000000000 --- a/next/models/scheduler_test.go +++ /dev/null @@ -1,133 +0,0 @@ -package models - -import ( - "github.com/google/uuid" - "github.com/stretchr/testify/assert" - "gorm.io/driver/sqlite" - "gorm.io/gorm" - "log" - "os" - "strings" - "testing" -) - -func init() { - log.SetOutput(os.Stdout) - log.SetFlags(log.Ldate | log.Ltime | log.Lshortfile) -} - -func setupSuiteScheduler(tb testing.TB) (func(tb testing.TB), *Database) { - log.Println("setup suite") - - // database file name - dbName := "database_test.db" - - // remove old database - e := os.Remove(dbName) - if e != nil { - if !strings.Contains(e.Error(), "no such file or directory") { - log.Fatal(e) - } - } - - // open and create a new database - gdb, err := gorm.Open(sqlite.Open(dbName), &gorm.Config{}) - if err != nil { - log.Fatal(err) - } - - // migrate tables - err = gdb.AutoMigrate(&Policy{}, &Organisation{}, &Repo{}, &Project{}, &Token{}, - &User{}, &ProjectRun{}, &GithubAppInstallation{}, &GithubApp{}, &GithubAppInstallationLink{}, - &GithubDiggerJobLink{}, &DiggerJob{}, &DiggerJobParentLink{}) - if err != nil { - log.Fatal(err) - } - - database := &Database{GormDB: gdb} - - orgTenantId := "11111111-1111-1111-1111-111111111111" - externalSource := "test" - orgName := "testOrg" - org, err := database.CreateOrganisation(orgName, externalSource, orgTenantId) - if err != nil { - log.Fatal(err) - } - - repoName := "test repo" - repo, err := database.CreateRepo(repoName, "", "", "", "", org, "") - if err != nil { - log.Fatal(err) - } - - projectName := "test project" - _, err = database.CreateProject(projectName, org, repo, false, false) - if err != nil { - log.Fatal(err) - } - - // Return a function to teardown the test - return func(tb testing.TB) { - log.Println("teardown suite") - e := os.Remove(dbName) - if e != nil { - if !strings.Contains(e.Error(), "no such file or directory") { - log.Fatal(e) - } - } - }, database -} - -func TestCreateDiggerJob(t *testing.T) { - teardownSuite, database := setupSuiteScheduler(t) - defer teardownSuite(t) - - batchId, _ := uuid.NewUUID() - job, err := database.CreateDiggerJob(batchId, []byte{100}, "digger_workflow.yml") - - assert.NoError(t, err) - assert.NotNil(t, job) - assert.NotZero(t, job.ID) -} - -func TestCreateSingleJob(t *testing.T) { - teardownSuite, database := setupSuiteScheduler(t) - defer teardownSuite(t) - - batchId, _ := uuid.NewUUID() - job, err := database.CreateDiggerJob(batchId, []byte{100}, "digger_workflow.yml") - - assert.NoError(t, err) - assert.NotNil(t, job) - assert.NotZero(t, job.ID) -} - -func TestFindDiggerJobsByParentJobId(t *testing.T) { - teardownSuite, database := setupSuiteScheduler(t) - defer teardownSuite(t) - - batchId, _ := uuid.NewUUID() - job, err := database.CreateDiggerJob(batchId, []byte{100}, "digger_workflow.yml") - parentJobId := job.DiggerJobID - assert.NoError(t, err) - assert.NotNil(t, job) - assert.NotZero(t, job.ID) - - job, err = database.CreateDiggerJob(batchId, []byte{100}, "digger_workflow.yml") - assert.NoError(t, err) - assert.NotNil(t, job) - assert.NotZero(t, job.ID) - err = database.CreateDiggerJobParentLink(parentJobId, job.DiggerJobID) - assert.Nil(t, err) - - job, err = database.CreateDiggerJob(batchId, []byte{100}, "digger_workflow.yml") - assert.NoError(t, err) - assert.NotNil(t, job) - err = database.CreateDiggerJobParentLink(parentJobId, job.DiggerJobID) - assert.Nil(t, err) - assert.NotZero(t, job.ID) - - jobs, err := database.GetDiggerJobParentLinksByParentId(&parentJobId) - assert.NoError(t, err) - assert.Equal(t, 2, len(jobs)) -} diff --git a/next/models/setup.go b/next/models/setup.go index 4a7ceff92..de7b86794 100644 --- a/next/models/setup.go +++ b/next/models/setup.go @@ -1,26 +1,25 @@ package models import ( + "github.com/diggerhq/digger/next/models_generated" "gorm.io/driver/postgres" _ "gorm.io/driver/postgres" "gorm.io/gorm" "gorm.io/gorm/logger" - "log" "os" ) type Database struct { GormDB *gorm.DB + Query *models_generated.Query } -var DEFAULT_ORG_NAME = "digger" - // var DB *gorm.DB var DB *Database func ConnectDatabase() { - database, err := gorm.Open(postgres.Open(os.Getenv("DATABASE_URL")), &gorm.Config{ + database, err := gorm.Open(postgres.Open(os.Getenv("DIGGER_DATABASE_URL")), &gorm.Config{ Logger: logger.Default.LogMode(logger.Info), }) @@ -28,13 +27,10 @@ func ConnectDatabase() { panic("Failed to connect to database!") } - DB = &Database{GormDB: database} - - // data and fixtures added - orgNumberOne, err := DB.GetOrganisation(DEFAULT_ORG_NAME) - if orgNumberOne == nil { - log.Print("No default found, creating default organisation") - DB.CreateOrganisation("digger", "", DEFAULT_ORG_NAME) + query := models_generated.Use(database) + DB = &Database{ + Query: query, + GormDB: database, } } diff --git a/next/models/storage.go b/next/models/storage.go index 599718ad3..121d83b53 100644 --- a/next/models/storage.go +++ b/next/models/storage.go @@ -5,17 +5,17 @@ import ( "fmt" "github.com/dchest/uniuri" configuration "github.com/diggerhq/digger/libs/digger_config" - scheduler "github.com/diggerhq/digger/libs/scheduler" + "github.com/diggerhq/digger/libs/scheduler" + "github.com/diggerhq/digger/next/model" "github.com/gin-gonic/gin" "github.com/google/uuid" - "github.com/samber/lo" "gorm.io/gorm" "log" "net/http" "time" ) -func (db *Database) GetProjectsFromContext(c *gin.Context, orgIdKey string) ([]Project, bool) { +func (db *Database) GetProjectsFromContext(c *gin.Context, orgIdKey string) ([]model.Project, bool) { loggedInOrganisationId, exists := c.Get(orgIdKey) log.Printf("getProjectsFromContext, org id: %v\n", loggedInOrganisationId) @@ -25,12 +25,12 @@ func (db *Database) GetProjectsFromContext(c *gin.Context, orgIdKey string) ([]P return nil, false } - var projects []Project + var projects []model.Project err := db.GormDB.Preload("Organisation").Preload("Repo"). Joins("INNER JOIN repos ON projects.repo_id = repos.id"). - Joins("INNER JOIN organisations ON projects.organisation_id = organisations.id"). - Where("projects.organisation_id = ?", loggedInOrganisationId).Find(&projects).Error + Joins("INNER JOIN organizations ON projects.organization_id = organizations.id"). + Where("projects.organization_id = ?", loggedInOrganisationId).Find(&projects).Error if err != nil { log.Printf("Unknown error occurred while fetching database, %v\n", err) @@ -41,7 +41,7 @@ func (db *Database) GetProjectsFromContext(c *gin.Context, orgIdKey string) ([]P return projects, true } -func (db *Database) GetReposFromContext(c *gin.Context, orgIdKey string) ([]Repo, bool) { +func (db *Database) GetReposFromContext(c *gin.Context, orgIdKey string) ([]model.Repo, bool) { loggedInOrganisationId, exists := c.Get(orgIdKey) log.Printf("GetReposFromContext, org id: %v\n", loggedInOrganisationId) @@ -51,11 +51,11 @@ func (db *Database) GetReposFromContext(c *gin.Context, orgIdKey string) ([]Repo return nil, false } - var repos []Repo + var repos []model.Repo err := db.GormDB.Preload("Organisation"). - Joins("INNER JOIN organisations ON repos.organisation_id = organisations.id"). - Where("repos.organisation_id = ?", loggedInOrganisationId).Find(&repos).Error + Joins("INNER JOIN organizations ON repos.organization_id = organizations.id"). + Where("repos.organization_id = ?", loggedInOrganisationId).Find(&repos).Error if err != nil { log.Printf("Unknown error occurred while fetching database, %v\n", err) @@ -66,95 +66,95 @@ func (db *Database) GetReposFromContext(c *gin.Context, orgIdKey string) ([]Repo return repos, true } -func (db *Database) GetPoliciesFromContext(c *gin.Context, orgIdKey string) ([]Policy, bool) { - loggedInOrganisationId, exists := c.Get(orgIdKey) - - log.Printf("getPoliciesFromContext, org id: %v\n", loggedInOrganisationId) - - if !exists { - c.String(http.StatusForbidden, "Not allowed to access this resource") - return nil, false - } - - var policies []Policy - - err := db.GormDB.Preload("Organisation").Preload("Repo").Preload("Project"). - Joins("LEFT JOIN projects ON projects.id = policies.project_id"). - Joins("LEFT JOIN repos ON projects.repo_id = repos.id"). - Joins("LEFT JOIN organisations ON projects.organisation_id = organisations.id"). - Where("projects.organisation_id = ?", loggedInOrganisationId).Find(&policies).Error - - if err != nil { - log.Printf("Unknown error occurred while fetching database, %v\n", err) - return nil, false - } - - log.Printf("getPoliciesFromContext, number of policies:%d\n", len(policies)) - return policies, true -} - -func (db *Database) GetProjectRunsForOrg(orgId int) ([]ProjectRun, error) { - var runs []ProjectRun - - err := db.GormDB.Preload("Project").Preload("Project.Organisation").Preload("Project.Repo"). - Joins("INNER JOIN projects ON projects.id = project_runs.project_id"). - Joins("INNER JOIN repos ON projects.repo_id = repos.id"). - Joins("INNER JOIN organisations ON projects.organisation_id = organisations.id"). - Where("projects.organisation_id = ?", orgId).Order("created_at desc").Limit(100).Find(&runs).Error - - if err != nil { - log.Printf("Unknown error occurred while fetching database, %v\n", err) - return nil, fmt.Errorf("unknown error occurred while fetching database, %v\n", err) - } - - log.Printf("getProjectRunsFromContext, number of runs:%d\n", len(runs)) - return runs, nil -} - -func (db *Database) GetProjectRunsFromContext(c *gin.Context, orgIdKey string) ([]ProjectRun, bool) { - loggedInOrganisationId, exists := c.Get(orgIdKey) - - log.Printf("getProjectRunsFromContext, org id: %v\n", loggedInOrganisationId) - - if !exists { - c.String(http.StatusForbidden, "Not allowed to access this resource") - return nil, false - } - - runs, err := db.GetProjectRunsForOrg(loggedInOrganisationId.(int)) - if err != nil { - return nil, false - } - return runs, true - -} - -func (db *Database) GetProjectByRunId(c *gin.Context, runId uint, orgIdKey string) (*ProjectRun, bool) { - loggedInOrganisationId, exists := c.Get(orgIdKey) - if !exists { - c.String(http.StatusForbidden, "Not allowed to access this resource") - return nil, false - } - - log.Printf("GetProjectByRunId, org id: %v\n", loggedInOrganisationId) - var projectRun ProjectRun - - err := db.GormDB.Preload("Project").Preload("Project.Organisation").Preload("Project.Repo"). - Joins("INNER JOIN projects ON projects.id = project_runs.project_id"). - Joins("INNER JOIN repos ON projects.repo_id = repos.id"). - Joins("INNER JOIN organisations ON projects.organisation_id = organisations.id"). - Where("projects.organisation_id = ?", loggedInOrganisationId). - Where("project_runs.id = ?", runId).First(&projectRun).Error - - if err != nil { - log.Printf("Unknown error occurred while fetching database, %v\n", err) - return nil, false - } - - return &projectRun, true -} - -func (db *Database) GetProjectByProjectId(c *gin.Context, projectId uint, orgIdKey string) (*Project, bool) { +//func (db *Database) GetPoliciesFromContext(c *gin.Context, orgIdKey string) ([]Policy, bool) { +// loggedInOrganisationId, exists := c.Get(orgIdKey) +// +// log.Printf("getPoliciesFromContext, org id: %v\n", loggedInOrganisationId) +// +// if !exists { +// c.String(http.StatusForbidden, "Not allowed to access this resource") +// return nil, false +// } +// +// var policies []Policy +// +// err := db.GormDB.Preload("Organisation").Preload("Repo").Preload("Project"). +// Joins("LEFT JOIN projects ON projects.id = policies.project_id"). +// Joins("LEFT JOIN repos ON projects.repo_id = repos.id"). +// Joins("LEFT JOIN organisations ON projects.organisation_id = organisations.id"). +// Where("projects.organisation_id = ?", loggedInOrganisationId).Find(&policies).Error +// +// if err != nil { +// log.Printf("Unknown error occurred while fetching database, %v\n", err) +// return nil, false +// } +// +// log.Printf("getPoliciesFromContext, number of policies:%d\n", len(policies)) +// return policies, true +//} + +//func (db *Database) GetProjectRunsForOrg(orgId int) ([]ProjectRun, error) { +// var runs []ProjectRun +// +// err := db.GormDB.Preload("Project").Preload("Project.Organisation").Preload("Project.Repo"). +// Joins("INNER JOIN projects ON projects.id = project_runs.project_id"). +// Joins("INNER JOIN repos ON projects.repo_id = repos.id"). +// Joins("INNER JOIN organisations ON projects.organisation_id = organisations.id"). +// Where("projects.organisation_id = ?", orgId).Order("created_at desc").Limit(100).Find(&runs).Error +// +// if err != nil { +// log.Printf("Unknown error occurred while fetching database, %v\n", err) +// return nil, fmt.Errorf("unknown error occurred while fetching database, %v\n", err) +// } +// +// log.Printf("getProjectRunsFromContext, number of runs:%d\n", len(runs)) +// return runs, nil +//} + +//func (db *Database) GetProjectRunsFromContext(c *gin.Context, orgIdKey string) ([]ProjectRun, bool) { +// loggedInOrganisationId, exists := c.Get(orgIdKey) +// +// log.Printf("getProjectRunsFromContext, org id: %v\n", loggedInOrganisationId) +// +// if !exists { +// c.String(http.StatusForbidden, "Not allowed to access this resource") +// return nil, false +// } +// +// runs, err := db.GetProjectRunsForOrg(loggedInOrganisationId.(int)) +// if err != nil { +// return nil, false +// } +// return runs, true +// +//} + +//func (db *Database) GetProjectByRunId(c *gin.Context, runId uint, orgIdKey string) (*ProjectRun, bool) { +// loggedInOrganisationId, exists := c.Get(orgIdKey) +// if !exists { +// c.String(http.StatusForbidden, "Not allowed to access this resource") +// return nil, false +// } +// +// log.Printf("GetProjectByRunId, org id: %v\n", loggedInOrganisationId) +// var projectRun ProjectRun +// +// err := db.GormDB.Preload("Project").Preload("Project.Organisation").Preload("Project.Repo"). +// Joins("INNER JOIN projects ON projects.id = project_runs.project_id"). +// Joins("INNER JOIN repos ON projects.repo_id = repos.id"). +// Joins("INNER JOIN organisations ON projects.organisation_id = organisations.id"). +// Where("projects.organisation_id = ?", loggedInOrganisationId). +// Where("project_runs.id = ?", runId).First(&projectRun).Error +// +// if err != nil { +// log.Printf("Unknown error occurred while fetching database, %v\n", err) +// return nil, false +// } +// +// return &projectRun, true +//} + +func (db *Database) GetProjectByProjectId(c *gin.Context, projectId uint, orgIdKey string) (*model.Project, bool) { loggedInOrganisationId, exists := c.Get(orgIdKey) if !exists { c.String(http.StatusForbidden, "Not allowed to access this resource") @@ -162,12 +162,12 @@ func (db *Database) GetProjectByProjectId(c *gin.Context, projectId uint, orgIdK } log.Printf("GetProjectByProjectId, org id: %v\n", loggedInOrganisationId) - var project Project + var project model.Project err := db.GormDB.Preload("Organisation").Preload("Repo"). Joins("INNER JOIN repos ON projects.repo_id = repos.id"). - Joins("INNER JOIN organisations ON projects.organisation_id = organisations.id"). - Where("projects.organisation_id = ?", loggedInOrganisationId). + Joins("INNER JOIN organizations ON projects.organization_id = organizations.id"). + Where("projects.organization_id = ?", loggedInOrganisationId). Where("projects.id = ?", projectId).First(&project).Error if err != nil { @@ -178,9 +178,9 @@ func (db *Database) GetProjectByProjectId(c *gin.Context, projectId uint, orgIdK return &project, true } -func (db *Database) GetProject(projectId uint) (*Project, error) { +func (db *Database) GetProject(projectId uint) (*model.Project, error) { log.Printf("GetProject, project id: %v\n", projectId) - var project Project + var project model.Project err := db.GormDB.Preload("Organisation").Preload("Repo"). Where("id = ?", projectId). @@ -196,14 +196,14 @@ func (db *Database) GetProject(projectId uint) (*Project, error) { // GetProjectByName return project for specified org and repo // if record doesn't exist return nil -func (db *Database) GetProjectByName(orgId any, repo *Repo, name string) (*Project, error) { +func (db *Database) GetProjectByName(orgId any, repo *model.Repo, name string) (*model.Project, error) { log.Printf("GetProjectByName, org id: %v, project name: %v\n", orgId, name) - var project Project + var project model.Project err := db.GormDB.Preload("Organisation").Preload("Repo"). Joins("INNER JOIN repos ON projects.repo_id = repos.id"). - Joins("INNER JOIN organisations ON projects.organisation_id = organisations.id"). - Where("projects.organisation_id = ?", orgId). + Joins("INNER JOIN organizations ON projects.organization_id = organizations.id"). + Where("projects.organization_id = ?", orgId). Where("repos.id = ?", repo.ID). Where("projects.name = ?", name).First(&project).Error @@ -219,14 +219,14 @@ func (db *Database) GetProjectByName(orgId any, repo *Repo, name string) (*Proje } // GetProjectByRepo return projects for specified org and repo -func (db *Database) GetProjectByRepo(orgId any, repo *Repo) ([]Project, error) { +func (db *Database) GetProjectByRepo(orgId any, repo *model.Repo) ([]model.Project, error) { log.Printf("GetProjectByRepo, org id: %v, repo name: %v\n", orgId, repo.Name) - projects := make([]Project, 0) + projects := make([]model.Project, 0) err := db.GormDB.Preload("Organisation").Preload("Repo"). Joins("INNER JOIN repos ON projects.repo_id = repos.id"). - Joins("INNER JOIN organisations ON projects.organisation_id = organisations.id"). - Where("projects.organisation_id = ?", orgId). + Joins("INNER JOIN organizations ON projects.organization_id = organizations.id"). + Where("projects.organization_id = ?", orgId). Where("repos.id = ?", repo.ID).Find(&projects).Error if err != nil { @@ -240,32 +240,32 @@ func (db *Database) GetProjectByRepo(orgId any, repo *Repo) ([]Project, error) { return projects, nil } -func (db *Database) GetPolicyByPolicyId(c *gin.Context, policyId uint, orgIdKey string) (*Policy, bool) { - loggedInOrganisationId, exists := c.Get(orgIdKey) - if !exists { - c.String(http.StatusForbidden, "Not allowed to access this resource") - return nil, false - } - - log.Printf("getPolicyByPolicyId, org id: %v\n", loggedInOrganisationId) - var policy Policy - - err := db.GormDB.Preload("Project").Preload("Project.Organisation").Preload("Project.Repo"). - Joins("INNER JOIN projects ON projects.id = policies.project_id"). - Joins("INNER JOIN repos ON projects.repo_id = repos.id"). - Joins("INNER JOIN organisations ON projects.organisation_id = organisations.id"). - Where("projects.organisation_id = ?", loggedInOrganisationId). - Where("policies.id = ?", policyId).First(&policy).Error - - if err != nil { - log.Printf("Unknown error occurred while fetching database, %v\n", err) - return nil, false - } - - return &policy, true -} - -func (db *Database) GetDefaultRepo(c *gin.Context, orgIdKey string) (*Repo, bool) { +//func (db *Database) GetPolicyByPolicyId(c *gin.Context, policyId uint, orgIdKey string) (*Policy, bool) { +// loggedInOrganisationId, exists := c.Get(orgIdKey) +// if !exists { +// c.String(http.StatusForbidden, "Not allowed to access this resource") +// return nil, false +// } +// +// log.Printf("getPolicyByPolicyId, org id: %v\n", loggedInOrganisationId) +// var policy Policy +// +// err := db.GormDB.Preload("Project").Preload("Project.Organisation").Preload("Project.Repo"). +// Joins("INNER JOIN projects ON projects.id = policies.project_id"). +// Joins("INNER JOIN repos ON projects.repo_id = repos.id"). +// Joins("INNER JOIN organisations ON projects.organisation_id = organisations.id"). +// Where("projects.organisation_id = ?", loggedInOrganisationId). +// Where("policies.id = ?", policyId).First(&policy).Error +// +// if err != nil { +// log.Printf("Unknown error occurred while fetching database, %v\n", err) +// return nil, false +// } +// +// return &policy, true +//} + +func (db *Database) GetDefaultRepo(c *gin.Context, orgIdKey string) (*model.Repo, bool) { loggedInOrganisationId, exists := c.Get(orgIdKey) if !exists { log.Print("Not allowed to access this resource") @@ -273,11 +273,11 @@ func (db *Database) GetDefaultRepo(c *gin.Context, orgIdKey string) (*Repo, bool } log.Printf("getDefaultRepo, org id: %v\n", loggedInOrganisationId) - var repo Repo + var repo model.Repo - err := db.GormDB.Preload("Organisation"). - Joins("INNER JOIN organisations ON repos.organisation_id = organisations.id"). - Where("organisations.id = ?", loggedInOrganisationId).First(&repo).Error + err := db.GormDB.Preload("organizations"). + Joins("INNER JOIN organizations ON repos.organization_id = organizations.id"). + Where("organizations.id = ?", loggedInOrganisationId).First(&repo).Error if err != nil { log.Printf("Unknown error occurred while fetching database, %v\n", err) @@ -289,12 +289,12 @@ func (db *Database) GetDefaultRepo(c *gin.Context, orgIdKey string) (*Repo, bool // GetRepo returns digger repo by organisationId and repo name (diggerhq-digger) // it will return an empty object if record doesn't exist in database -func (db *Database) GetRepo(orgIdKey any, repoName string) (*Repo, error) { - var repo Repo +func (db *Database) GetRepo(orgIdKey any, repoName string) (*model.Repo, error) { + var repo model.Repo err := db.GormDB.Preload("Organisation"). - Joins("INNER JOIN organisations ON repos.organisation_id = organisations.id"). - Where("organisations.id = ? AND repos.name=?", orgIdKey, repoName).First(&repo).Error + Joins("INNER JOIN organizations ON repos.organization_id = organizations.id"). + Where("organizations.id = ? AND repos.name=?", orgIdKey, repoName).First(&repo).Error if err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { @@ -307,12 +307,12 @@ func (db *Database) GetRepo(orgIdKey any, repoName string) (*Repo, error) { } // GetRepoById returns digger repo by organisationId and repo name (diggerhq-digger) -func (db *Database) GetRepoById(orgIdKey any, repoId any) (*Repo, error) { - var repo Repo +func (db *Database) GetRepoById(orgIdKey any, repoId any) (*model.Repo, error) { + var repo model.Repo - err := db.GormDB.Preload("Organisation"). - Joins("INNER JOIN organisations ON repos.organisation_id = organisations.id"). - Where("organisations.id = ? AND repos.ID=?", orgIdKey, repoId).First(&repo).Error + err := db.GormDB.Preload("organizations"). + Joins("INNER JOIN organizations ON repos.organization_id = organizations.id"). + Where("organizations.id = ? AND repos.ID=?", orgIdKey, repoId).First(&repo).Error if err != nil { log.Printf("Failed to find digger repo for orgId: %v, and repoId: %v, error: %v\n", orgIdKey, repoId, err) @@ -322,10 +322,10 @@ func (db *Database) GetRepoById(orgIdKey any, repoId any) (*Repo, error) { } // GithubRepoAdded handles github drift that github repo has been added to the app installation -func (db *Database) GithubRepoAdded(installationId int64, appId int64, login string, accountId int64, repoFullName string) (*GithubAppInstallation, error) { +func (db *Database) GithubRepoAdded(installationId int64, appId int64, login string, accountId int64, repoFullName string) (*model.GithubAppInstallation, error) { // check if item exist already - item := &GithubAppInstallation{} + item := &model.GithubAppInstallation{} result := db.GormDB.Where("github_installation_id = ? AND repo=? AND github_app_id=?", installationId, repoFullName, appId).First(item) if result.Error != nil { if !errors.Is(result.Error, gorm.ErrRecordNotFound) { @@ -341,7 +341,7 @@ func (db *Database) GithubRepoAdded(installationId int64, appId int64, login str } } else { log.Printf("Record for installation_id: %d, repo: %s, with status=active exist already.", installationId, repoFullName) - item.Status = GithubAppInstallActive + item.Status = int64(GithubAppInstallActive) item.UpdatedAt = time.Now() err := db.GormDB.Save(item).Error if err != nil { @@ -351,8 +351,8 @@ func (db *Database) GithubRepoAdded(installationId int64, appId int64, login str return item, nil } -func (db *Database) GithubRepoRemoved(installationId int64, appId int64, repoFullName string) (*GithubAppInstallation, error) { - item := &GithubAppInstallation{} +func (db *Database) GithubRepoRemoved(installationId int64, appId int64, repoFullName string) (*model.GithubAppInstallation, error) { + item := &model.GithubAppInstallation{} err := db.GormDB.Where("github_installation_id = ? AND status=? AND github_app_id=? AND repo=?", installationId, GithubAppInstallActive, appId, repoFullName).First(item).Error if err != nil { if errors.Is(err, gorm.ErrRecordNotFound) { @@ -361,7 +361,7 @@ func (db *Database) GithubRepoRemoved(installationId int64, appId int64, repoFul } return nil, fmt.Errorf("failed to find github installation in database. %v", err) } - item.Status = GithubAppInstallDeleted + item.Status = int64(GithubAppInstallDeleted) item.UpdatedAt = time.Now() err = db.GormDB.Save(item).Error if err != nil { @@ -370,30 +370,29 @@ func (db *Database) GithubRepoRemoved(installationId int64, appId int64, repoFul return item, nil } -func (db *Database) GetGithubAppInstallationByOrgAndRepo(orgId any, repo string, status GithubAppInstallStatus) (*GithubAppInstallation, error) { +func (db *Database) GetGithubAppInstallationByOrgAndRepo(orgId any, repo string, status GithubAppInstallStatus) (*model.GithubAppInstallation, error) { link, err := db.GetGithubInstallationLinkForOrg(orgId) if err != nil { return nil, err } - installation := GithubAppInstallation{} - result := db.GormDB.Where("github_installation_id = ? AND status=? AND repo=?", link.GithubInstallationId, status, repo).Find(&installation) + installation := model.GithubAppInstallation{} + result := db.GormDB.Where("github_installation_id = ? AND status=? AND repo=?", link.GithubInstallationID, status, repo).Find(&installation) if result.Error != nil { if !errors.Is(result.Error, gorm.ErrRecordNotFound) { return nil, result.Error } } - // If not found, the values will be default values, which means ID will be 0 - if installation.ID == 0 { + if errors.Is(result.Error, gorm.ErrRecordNotFound) { return nil, nil } return &installation, nil } // GetGithubAppInstallationByIdAndRepo repoFullName should be in the following format: org/repo_name, for example "diggerhq/github-job-scheduler" -func (db *Database) GetGithubAppInstallationByIdAndRepo(installationId int64, repoFullName string) (*GithubAppInstallation, error) { - installation := GithubAppInstallation{} +func (db *Database) GetGithubAppInstallationByIdAndRepo(installationId int64, repoFullName string) (*model.GithubAppInstallation, error) { + installation := model.GithubAppInstallation{} result := db.GormDB.Where("github_installation_id = ? AND status=? AND repo=?", installationId, GithubAppInstallActive, repoFullName).Find(&installation) if result.Error != nil { if !errors.Is(result.Error, gorm.ErrRecordNotFound) { @@ -402,14 +401,14 @@ func (db *Database) GetGithubAppInstallationByIdAndRepo(installationId int64, re } // If not found, the values will be default values, which means ID will be 0 - if installation.Model.ID == 0 { + if errors.Is(result.Error, gorm.ErrRecordNotFound) { return nil, fmt.Errorf("GithubAppInstallation with id=%v doesn't exist", installationId) } return &installation, nil } -func (db *Database) GetGithubAppInstallations(installationId int64) ([]GithubAppInstallation, error) { - var installations []GithubAppInstallation +func (db *Database) GetGithubAppInstallations(installationId int64) ([]model.GithubAppInstallation, error) { + var installations []model.GithubAppInstallation result := db.GormDB.Where("github_installation_id = ? AND status=?", installationId, GithubAppInstallActive).Find(&installations) if result.Error != nil { if !errors.Is(result.Error, gorm.ErrRecordNotFound) { @@ -420,8 +419,8 @@ func (db *Database) GetGithubAppInstallations(installationId int64) ([]GithubApp } // GetGithubAppInstallationLink repoFullName should be in the following format: org/repo_name, for example "diggerhq/github-job-scheduler" -func (db *Database) GetGithubAppInstallationLink(installationId int64) (*GithubAppInstallationLink, error) { - var link GithubAppInstallationLink +func (db *Database) GetGithubAppInstallationLink(installationId int64) (*model.GithubAppInstallationLink, error) { + var link model.GithubAppInstallationLink result := db.GormDB.Preload("Organisation").Where("github_installation_id = ? AND status=?", installationId, GithubAppInstallationLinkActive).Find(&link) if result.Error != nil { if !errors.Is(result.Error, gorm.ErrRecordNotFound) { @@ -430,25 +429,25 @@ func (db *Database) GetGithubAppInstallationLink(installationId int64) (*GithubA } // If not found, the values will be default values, which means ID will be 0 - if link.Model.ID == 0 { + if errors.Is(result.Error, gorm.ErrRecordNotFound) { return nil, nil } return &link, nil } -func (db *Database) CreateGithubApp(name string, githubId int64, url string) (*GithubApp, error) { - app := GithubApp{Name: name, GithubId: githubId, GithubAppUrl: url} +func (db *Database) CreateGithubApp(name string, githubId int64, url string) (*model.GithubApp, error) { + app := model.GithubApp{Name: name, GithubID: githubId, GithubAppURL: url} result := db.GormDB.Save(&app) if result.Error != nil { return nil, result.Error } - log.Printf("CreateGithubApp (name: %v, url: %v) has been created successfully\n", app.Name, app.GithubAppUrl) + log.Printf("CreateGithubApp (name: %v, url: %v) has been created successfully\n", app.Name, app.GithubAppURL) return &app, nil } // GetGithubApp return GithubApp by Id -func (db *Database) GetGithubApp(gitHubAppId any) (*GithubApp, error) { - app := GithubApp{} +func (db *Database) GetGithubApp(gitHubAppId any) (*model.GithubApp, error) { + app := model.GithubApp{} result := db.GormDB.Where("github_id = ?", gitHubAppId).Find(&app) if result.Error != nil { log.Printf("Failed to find GitHub App for id: %v, error: %v\n", gitHubAppId, result.Error) @@ -457,8 +456,8 @@ func (db *Database) GetGithubApp(gitHubAppId any) (*GithubApp, error) { return &app, nil } -func (db *Database) CreateGithubInstallationLink(org *Organisation, installationId int64) (*GithubAppInstallationLink, error) { - l := GithubAppInstallationLink{} +func (db *Database) CreateGithubInstallationLink(org *model.Organization, installationId int64) (*model.GithubAppInstallationLink, error) { + l := model.GithubAppInstallationLink{} // check if there is already a link to another org, and throw an error in this case result := db.GormDB.Where("github_installation_id = ? AND status=?", installationId, GithubAppInstallationLinkActive).Find(&l) if result.Error != nil { @@ -467,59 +466,63 @@ func (db *Database) CreateGithubInstallationLink(org *Organisation, installation } } if result.RowsAffected > 0 { - if l.OrganisationId != org.ID { + if l.OrganizationID != org.ID { return nil, fmt.Errorf("GitHub app installation %v already linked to another org ", installationId) } - log.Printf("installation %v has been linked to the org %v already.", installationId, org.Name) + log.Printf("installation %v has been linked to the org %v already.", installationId, org.Slug) // record already exist, do nothing return &l, nil } - var list []GithubAppInstallationLink + var list []model.GithubAppInstallationLink // if there are other installation for this org, we need to make them inactive - result = db.GormDB.Preload("Organisation").Where("github_installation_id <> ? AND organisation_id = ? AND status=?", installationId, org.ID, GithubAppInstallationLinkActive).Find(&list) + //orgstbname := db.Query.Organization.TableName() + result = db.GormDB.Where("github_installation_id <> ? AND organization_id = ? AND status=?", installationId, org.ID, GithubAppInstallationLinkActive).Find(&list) if result.Error != nil { if !errors.Is(result.Error, gorm.ErrRecordNotFound) { return nil, result.Error } } for _, item := range list { - item.Status = GithubAppInstallationLinkInactive + item.Status = int16(GithubAppInstallationLinkInactive) db.GormDB.Save(&item) } - link := GithubAppInstallationLink{Organisation: org, GithubInstallationId: installationId, Status: GithubAppInstallationLinkActive} + link := model.GithubAppInstallationLink{OrganizationID: org.ID, GithubInstallationID: installationId, Status: int16(GithubAppInstallationLinkActive)} result = db.GormDB.Save(&link) if result.Error != nil { return nil, result.Error } - log.Printf("GithubAppInstallationLink (org: %v, installationId: %v) has been created successfully\n", org.Name, installationId) + log.Printf("GithubAppInstallationLink (org: %v, installationId: %v) has been created successfully\n", org.Slug, installationId) return &link, nil } -func (db *Database) GetGithubInstallationLinkForOrg(orgId any) (*GithubAppInstallationLink, error) { - l := GithubAppInstallationLink{} - result := db.GormDB.Where("organisation_id = ? AND status=?", orgId, GithubAppInstallationLinkActive).Find(&l) +func (db *Database) GetGithubInstallationLinkForOrg(orgId any) (*model.GithubAppInstallationLink, error) { + l := model.GithubAppInstallationLink{} + result := db.GormDB.Where("organization_id = ? AND status=?", orgId, GithubAppInstallationLinkActive).Find(&l) if result.Error != nil { return nil, result.Error } - if l.ID == 0 { + if errors.Is(result.Error, gorm.ErrRecordNotFound) { return nil, fmt.Errorf("GithubAppInstallationLink not found for orgId: %v\n", orgId) } return &l, nil } -func (db *Database) GetGithubInstallationLinkForInstallationId(installationId any) (*GithubAppInstallationLink, error) { - l := GithubAppInstallationLink{} - result := db.GormDB.Where("github_installation_id = ? AND status=?", installationId, GithubAppInstallationLinkActive).Find(&l) - if result.Error != nil { - return nil, result.Error +func (db *Database) GetGithubInstallationLinkForInstallationId(installationId int64) (*model.GithubAppInstallationLink, error) { + l := model.GithubAppInstallationLink{} + err := db.GormDB.Where("github_installation_id = ? AND status=?", installationId, GithubAppInstallationLinkActive).Find(&l).Error + if err != nil { + return nil, err + } + if l.ID == "" { + return nil, fmt.Errorf("github installation link not found") } return &l, nil } -func (db *Database) MakeGithubAppInstallationLinkInactive(link *GithubAppInstallationLink) (*GithubAppInstallationLink, error) { - link.Status = GithubAppInstallationLinkInactive +func (db *Database) MakeGithubAppInstallationLinkInactive(link *model.GithubAppInstallationLink) (*model.GithubAppInstallationLink, error) { + link.Status = int16(GithubAppInstallationLinkInactive) result := db.GormDB.Save(link) if result.Error != nil { log.Printf("Failed to update GithubAppInstallationLink, id: %v, error: %v", link.ID, result.Error) @@ -528,55 +531,65 @@ func (db *Database) MakeGithubAppInstallationLinkInactive(link *GithubAppInstall return link, nil } -func (db *Database) CreateDiggerJobLink(diggerJobId string, repoFullName string) (*GithubDiggerJobLink, error) { - link := GithubDiggerJobLink{Status: DiggerJobLinkCreated, DiggerJobId: diggerJobId, RepoFullName: repoFullName} - result := db.GormDB.Save(&link) - if result.Error != nil { - log.Printf("Failed to create GithubDiggerJobLink, %v, repo: %v \n", diggerJobId, repoFullName) - return nil, result.Error - } - log.Printf("GithubDiggerJobLink %v, (repo: %v) has been created successfully\n", diggerJobId, repoFullName) - return &link, nil -} - -func (db *Database) GetDiggerJobLink(diggerJobId string) (*GithubDiggerJobLink, error) { - link := GithubDiggerJobLink{} - result := db.GormDB.Where("digger_job_id = ?", diggerJobId).Find(&link) - if result.Error != nil { - if !errors.Is(result.Error, gorm.ErrRecordNotFound) { - return nil, nil - } - log.Printf("Failed to get DiggerJobLink, %v", diggerJobId) - return nil, result.Error - } - return &link, nil -} - -func (db *Database) UpdateDiggerJobLink(diggerJobId string, repoFullName string, githubJobId int64) (*GithubDiggerJobLink, error) { - jobLink := GithubDiggerJobLink{} - // check if there is already a link to another org, and throw an error in this case - result := db.GormDB.Where("digger_job_id = ? AND repo_full_name=? ", diggerJobId, repoFullName).Find(&jobLink) - if result.Error != nil { - if !errors.Is(result.Error, gorm.ErrRecordNotFound) { - log.Printf("Failed to update GithubDiggerJobLink, %v, repo: %v \n", diggerJobId, repoFullName) - return nil, result.Error - } - } - if result.RowsAffected == 1 { - jobLink.GithubJobId = githubJobId - result = db.GormDB.Save(&jobLink) - if result.Error != nil { - return nil, result.Error - } - log.Printf("GithubDiggerJobLink %v, (repo: %v) has been updated successfully\n", diggerJobId, repoFullName) - return &jobLink, nil +//func (db *Database) CreateDiggerJobLink(diggerJobId string, repoFullName string) (*GithubDiggerJobLink, error) { +// link := GithubDiggerJobLink{Status: DiggerJobLinkCreated, DiggerJobId: diggerJobId, RepoFullName: repoFullName} +// result := db.GormDB.Save(&link) +// if result.Error != nil { +// log.Printf("Failed to create GithubDiggerJobLink, %v, repo: %v \n", diggerJobId, repoFullName) +// return nil, result.Error +// } +// log.Printf("GithubDiggerJobLink %v, (repo: %v) has been created successfully\n", diggerJobId, repoFullName) +// return &link, nil +//} + +//func (db *Database) GetDiggerJobLink(diggerJobId string) (*GithubDiggerJobLink, error) { +// link := GithubDiggerJobLink{} +// result := db.GormDB.Where("digger_job_id = ?", diggerJobId).Find(&link) +// if result.Error != nil { +// if !errors.Is(result.Error, gorm.ErrRecordNotFound) { +// return nil, nil +// } +// log.Printf("Failed to get DiggerJobLink, %v", diggerJobId) +// return nil, result.Error +// } +// return &link, nil +//} + +//func (db *Database) UpdateDiggerJobLink(diggerJobId string, repoFullName string, githubJobId int64) (*GithubDiggerJobLink, error) { +// jobLink := GithubDiggerJobLink{} +// // check if there is already a link to another org, and throw an error in this case +// result := db.GormDB.Where("digger_job_id = ? AND repo_full_name=? ", diggerJobId, repoFullName).Find(&jobLink) +// if result.Error != nil { +// if !errors.Is(result.Error, gorm.ErrRecordNotFound) { +// log.Printf("Failed to update GithubDiggerJobLink, %v, repo: %v \n", diggerJobId, repoFullName) +// return nil, result.Error +// } +// } +// if result.RowsAffected == 1 { +// jobLink.GithubJobId = githubJobId +// result = db.GormDB.Save(&jobLink) +// if result.Error != nil { +// return nil, result.Error +// } +// log.Printf("GithubDiggerJobLink %v, (repo: %v) has been updated successfully\n", diggerJobId, repoFullName) +// return &jobLink, nil +// } +// return &jobLink, nil +//} + +func (db *Database) GetUserOrganizationsFirstMatch(userId string) (*model.Organization, error) { + log.Printf("GetOrganisationById, userId: %v\n", userId) + org := model.Organization{} + err := db.GormDB.Joins("JOIN organization_members AS om ON om.organization_id=organizations.id").Where("om.member_id = ?", userId).First(&org).Error + if err != nil { + return nil, fmt.Errorf("Error fetching organisation: %v\n", err) } - return &jobLink, nil + return &org, nil } -func (db *Database) GetOrganisationById(orgId any) (*Organisation, error) { +func (db *Database) GetOrganisationById(orgId string) (*model.Organization, error) { log.Printf("GetOrganisationById, orgId: %v, type: %T \n", orgId, orgId) - org := Organisation{} + org := model.Organization{} err := db.GormDB.Where("id = ?", orgId).First(&org).Error if err != nil { return nil, fmt.Errorf("Error fetching organisation: %v\n", err) @@ -584,8 +597,8 @@ func (db *Database) GetOrganisationById(orgId any) (*Organisation, error) { return &org, nil } -func (db *Database) GetDiggerBatch(batchId *uuid.UUID) (*DiggerBatch, error) { - batch := &DiggerBatch{} +func (db *Database) GetDiggerBatch(batchId *uuid.UUID) (*model.DiggerBatch, error) { + batch := &model.DiggerBatch{} result := db.GormDB.Where("id=? ", batchId).Find(batch) if result.Error != nil { if !errors.Is(result.Error, gorm.ErrRecordNotFound) { @@ -595,22 +608,22 @@ func (db *Database) GetDiggerBatch(batchId *uuid.UUID) (*DiggerBatch, error) { return batch, nil } -func (db *Database) CreateDiggerBatch(vcsType DiggerVCSType, githubInstallationId int64, repoOwner string, repoName string, repoFullname string, PRNumber int, diggerConfig string, branchName string, batchType scheduler.DiggerCommand, commentId *int64, gitlabProjectId int) (*DiggerBatch, error) { +func (db *Database) CreateDiggerBatch(vcsType DiggerVCSType, githubInstallationId int64, repoOwner string, repoName string, repoFullname string, PRNumber int, diggerConfig string, branchName string, batchType scheduler.DiggerCommand, commentId *int64, gitlabProjectId int) (*model.DiggerBatch, error) { uid := uuid.New() - batch := &DiggerBatch{ - ID: uid, - VCS: vcsType, - GithubInstallationId: githubInstallationId, + batch := &model.DiggerBatch{ + ID: uid.String(), + Vcs: string(vcsType), + GithubInstallationID: githubInstallationId, RepoOwner: repoOwner, RepoName: repoName, RepoFullName: repoFullname, - PrNumber: PRNumber, - CommentId: commentId, - Status: scheduler.BatchJobCreated, + PrNumber: int64(PRNumber), + CommentID: *commentId, + Status: int16(scheduler.BatchJobCreated), BranchName: branchName, DiggerConfig: diggerConfig, - BatchType: batchType, - GitlabProjectId: gitlabProjectId, + BatchType: string(batchType), + GitlabProjectID: int64(gitlabProjectId), } result := db.GormDB.Save(batch) if result.Error != nil { @@ -621,7 +634,7 @@ func (db *Database) CreateDiggerBatch(vcsType DiggerVCSType, githubInstallationI return batch, nil } -func (db *Database) UpdateDiggerBatch(batch *DiggerBatch) error { +func (db *Database) UpdateDiggerBatch(batch *model.DiggerBatch) error { result := db.GormDB.Save(batch) if result.Error != nil { return result.Error @@ -630,12 +643,12 @@ func (db *Database) UpdateDiggerBatch(batch *DiggerBatch) error { return nil } -func (db *Database) UpdateBatchStatus(batch *DiggerBatch) error { - if batch.Status == scheduler.BatchJobInvalidated || batch.Status == scheduler.BatchJobFailed || batch.Status == scheduler.BatchJobSucceeded { +func (db *Database) UpdateBatchStatus(batch *model.DiggerBatch) error { + if batch.Status == int16(scheduler.BatchJobInvalidated) || batch.Status == int16(scheduler.BatchJobFailed) || batch.Status == int16(scheduler.BatchJobSucceeded) { return nil } batchId := batch.ID - var diggerJobs []DiggerJob + var diggerJobs []model.DiggerJob result := db.GormDB.Where("batch_id=?", batchId).Find(&diggerJobs) if result.Error != nil { if !errors.Is(result.Error, gorm.ErrRecordNotFound) { @@ -646,33 +659,33 @@ func (db *Database) UpdateBatchStatus(batch *DiggerBatch) error { allJobsSucceeded := true for _, job := range diggerJobs { - if job.Status != scheduler.DiggerJobSucceeded { + if job.Status != int16(scheduler.DiggerJobSucceeded) { allJobsSucceeded = false } } if allJobsSucceeded == true { - batch.Status = scheduler.BatchJobSucceeded + batch.Status = int16(scheduler.BatchJobSucceeded) } return nil } -func (db *Database) CreateDiggerJob(batchId uuid.UUID, serializedJob []byte, workflowFile string) (*DiggerJob, error) { +func (db *Database) CreateDiggerJob(batchId uuid.UUID, serializedJob []byte, workflowFile string) (*model.DiggerJob, error) { if serializedJob == nil || len(serializedJob) == 0 { return nil, fmt.Errorf("serializedJob can't be empty") } jobId := uniuri.New() batchIdStr := batchId.String() - summary := &DiggerJobSummary{} + summary := &model.DiggerJobSummary{} result := db.GormDB.Save(summary) if result.Error != nil { return nil, result.Error } workflowUrl := "#" - job := &DiggerJob{DiggerJobID: jobId, Status: scheduler.DiggerJobCreated, - BatchID: &batchIdStr, SerializedJobSpec: serializedJob, DiggerJobSummary: *summary, WorkflowRunUrl: &workflowUrl, WorkflowFile: workflowFile} + job := &model.DiggerJob{DiggerJobID: jobId, Status: int16(scheduler.DiggerJobCreated), + BatchID: batchIdStr, JobSpec: serializedJob, DiggerJobSummaryID: summary.ID, WorkflowRunURL: workflowUrl, WorkflowFile: workflowFile} result = db.GormDB.Save(job) if result.Error != nil { return nil, result.Error @@ -682,8 +695,8 @@ func (db *Database) CreateDiggerJob(batchId uuid.UUID, serializedJob []byte, wor return job, nil } -func (db *Database) ListDiggerRunsForProject(projectName string, repoId uint) ([]DiggerRun, error) { - var runs []DiggerRun +func (db *Database) ListDiggerRunsForProject(projectName string, repoId uint) ([]model.DiggerRun, error) { + var runs []model.DiggerRun err := db.GormDB.Preload("PlanStage").Preload("ApplyStage"). Where("project_name = ? AND repo_id= ?", projectName, repoId).Order("created_at desc").Find(&runs).Error @@ -697,19 +710,19 @@ func (db *Database) ListDiggerRunsForProject(projectName string, repoId uint) ([ return runs, nil } -func (db *Database) CreateDiggerRun(Triggertype string, PrNumber int, Status DiggerRunStatus, CommitId string, DiggerConfig string, GithubInstallationId int64, RepoId uint, ProjectName string, RunType RunType, planStageId *uint, applyStageId *uint) (*DiggerRun, error) { - dr := &DiggerRun{ +func (db *Database) CreateDiggerRun(Triggertype string, PrNumber int, Status DiggerRunStatus, CommitId string, DiggerConfig string, GithubInstallationId int64, RepoId uint, ProjectName string, RunType RunType, planStageId string, applyStageId string) (*model.DiggerRun, error) { + dr := &model.DiggerRun{ Triggertype: Triggertype, - PrNumber: &PrNumber, - Status: Status, - CommitId: CommitId, + PrNumber: int64(PrNumber), + Status: string(Status), + CommitID: CommitId, DiggerConfig: DiggerConfig, - GithubInstallationId: GithubInstallationId, - RepoId: RepoId, + GithubInstallationID: GithubInstallationId, + RepoID: int64(RepoId), ProjectName: ProjectName, - RunType: RunType, - PlanStageId: planStageId, - ApplyStageId: applyStageId, + RunType: string(RunType), + PlanStageID: planStageId, + ApplyStageID: applyStageId, IsApproved: false, } result := db.GormDB.Save(dr) @@ -721,9 +734,9 @@ func (db *Database) CreateDiggerRun(Triggertype string, PrNumber int, Status Dig return dr, nil } -func (db *Database) CreateDiggerRunStage(batchId string) (*DiggerRunStage, error) { - drs := &DiggerRunStage{ - BatchID: &batchId, +func (db *Database) CreateDiggerRunStage(batchId string) (*model.DiggerRunStage, error) { + drs := &model.DiggerRunStage{ + BatchID: batchId, } result := db.GormDB.Save(drs) if result.Error != nil { @@ -734,8 +747,8 @@ func (db *Database) CreateDiggerRunStage(batchId string) (*DiggerRunStage, error return drs, nil } -func (db *Database) GetLastDiggerRunForProject(projectName string) (*DiggerRun, error) { - diggerRun := &DiggerRun{} +func (db *Database) GetLastDiggerRunForProject(projectName string) (*model.DiggerRun, error) { + diggerRun := &model.DiggerRun{} result := db.GormDB.Where("project_name = ? AND status <> ?", projectName, RunQueued).Order("created_at Desc").First(diggerRun) if result.Error != nil { log.Printf("error while fetching last digger run: %v", result.Error) @@ -744,8 +757,8 @@ func (db *Database) GetLastDiggerRunForProject(projectName string) (*DiggerRun, return diggerRun, nil } -func (db *Database) GetDiggerRun(id uint) (*DiggerRun, error) { - dr := &DiggerRun{} +func (db *Database) GetDiggerRun(id uint) (*model.DiggerRun, error) { + dr := &model.DiggerRun{} result := db.GormDB.Preload("Repo"). Preload("ApplyStage"). Preload("PlanStage"). @@ -756,10 +769,10 @@ func (db *Database) GetDiggerRun(id uint) (*DiggerRun, error) { return dr, nil } -func (db *Database) CreateDiggerRunQueueItem(diggeRrunId uint, projectId uint) (*DiggerRunQueueItem, error) { - drq := &DiggerRunQueueItem{ - DiggerRunId: diggeRrunId, - ProjectId: projectId, +func (db *Database) CreateDiggerRunQueueItem(diggeRrunId int64, projectId int64) (*model.DiggerRunQueueItem, error) { + drq := &model.DiggerRunQueueItem{ + DiggerRunID: diggeRrunId, + ProjectID: projectId, } result := db.GormDB.Save(drq) if result.Error != nil { @@ -770,8 +783,8 @@ func (db *Database) CreateDiggerRunQueueItem(diggeRrunId uint, projectId uint) ( return drq, nil } -func (db *Database) GetDiggerRunQueueItem(id uint) (*DiggerRunQueueItem, error) { - dr := &DiggerRunQueueItem{} +func (db *Database) GetDiggerRunQueueItem(id uint) (*model.DiggerRunQueueItem, error) { + dr := &model.DiggerRunQueueItem{} result := db.GormDB.Preload("DiggerRun").Where("id=? ", id).Find(dr) if result.Error != nil { return nil, result.Error @@ -779,8 +792,8 @@ func (db *Database) GetDiggerRunQueueItem(id uint) (*DiggerRunQueueItem, error) return dr, nil } -func (db *Database) GetDiggerJobFromRunStage(stage DiggerRunStage) (*DiggerJob, error) { - job := &DiggerJob{} +func (db *Database) GetDiggerJobFromRunStage(stage model.DiggerRunStage) (*model.DiggerJob, error) { + job := &model.DiggerJob{} result := db.GormDB.Preload("Batch").Take(job, "batch_id = ?", stage.BatchID) if result.Error != nil { if errors.Is(result.Error, gorm.ErrRecordNotFound) { @@ -792,7 +805,7 @@ func (db *Database) GetDiggerJobFromRunStage(stage DiggerRunStage) (*DiggerJob, return job, nil } -func (db *Database) UpdateDiggerRun(diggerRun *DiggerRun) error { +func (db *Database) UpdateDiggerRun(diggerRun *model.DiggerRun) error { result := db.GormDB.Save(diggerRun) if result.Error != nil { return result.Error @@ -801,7 +814,7 @@ func (db *Database) UpdateDiggerRun(diggerRun *DiggerRun) error { return nil } -func (db *Database) DequeueRunItem(queueItem *DiggerRunQueueItem) error { +func (db *Database) DequeueRunItem(queueItem *model.DiggerRunQueueItem) error { log.Printf("DiggerRunQueueItem Deleting: %v", queueItem.ID) result := db.GormDB.Delete(queueItem) if result.Error != nil { @@ -811,65 +824,69 @@ func (db *Database) DequeueRunItem(queueItem *DiggerRunQueueItem) error { return nil } -func (db *Database) GetFirstRunQueueForEveryProject() ([]DiggerRunQueueItem, error) { - var runqueues []DiggerRunQueueItem - query := `WITH RankedRuns AS ( - SELECT - digger_run_queue_items.digger_run_id, - digger_run_queue_items.project_id, - digger_run_queue_items.created_at, - ROW_NUMBER() OVER (PARTITION BY digger_run_queue_items.project_id ORDER BY digger_run_queue_items.created_at ASC) AS QueuePosition - FROM - digger_run_queue_items -) -SELECT - RankedRuns.digger_run_id , - RankedRuns.project_id , - RankedRuns.created_at -FROM - RankedRuns -WHERE - QueuePosition = 1` - - // 1. Fetch the front of the queue for every projectID - tx := db.GormDB. - Raw(query). - Find(&runqueues) - - if tx.Error != nil { - fmt.Printf("%v", tx.Error) - return nil, tx.Error - } - - // 2. Preload Project and DiggerRun for every DiggerrunQueue item (front of queue) - var runqueuesWithData []DiggerRunQueueItem - diggerRunIds := lo.Map(runqueues, func(run DiggerRunQueueItem, index int) uint { - return run.DiggerRunId - }) - - tx = db.GormDB.Preload("DiggerRun").Preload("DiggerRun.Repo"). - Preload("DiggerRun.PlanStage").Preload("DiggerRun.ApplyStage"). - Preload("DiggerRun.PlanStage.Batch").Preload("DiggerRun.ApplyStage.Batch"). - Where("digger_run_queue_items.digger_run_id in ?", diggerRunIds).Find(&runqueuesWithData) - - if tx.Error != nil { - fmt.Printf("%v", tx.Error) - return nil, tx.Error - } - - return runqueuesWithData, nil -} - -func (db *Database) UpdateDiggerJobSummary(diggerJobId string, resourcesCreated uint, resourcesUpdated uint, resourcesDeleted uint) (*DiggerJob, error) { +//func (db *Database) GetFirstRunQueueForEveryProject() ([]DiggerRunQueueItem, error) { +// var runqueues []DiggerRunQueueItem +// query := `WITH RankedRuns AS ( +// SELECT +// digger_run_queue_items.digger_run_id, +// digger_run_queue_items.project_id, +// digger_run_queue_items.created_at, +// ROW_NUMBER() OVER (PARTITION BY digger_run_queue_items.project_id ORDER BY digger_run_queue_items.created_at ASC) AS QueuePosition +// FROM +// digger_run_queue_items +//) +//SELECT +// RankedRuns.digger_run_id , +// RankedRuns.project_id , +// RankedRuns.created_at +//FROM +// RankedRuns +//WHERE +// QueuePosition = 1` +// +// // 1. Fetch the front of the queue for every projectID +// tx := db.GormDB. +// Raw(query). +// Find(&runqueues) +// +// if tx.Error != nil { +// fmt.Printf("%v", tx.Error) +// return nil, tx.Error +// } +// +// // 2. Preload Project and DiggerRun for every DiggerrunQueue item (front of queue) +// var runqueuesWithData []DiggerRunQueueItem +// diggerRunIds := lo.Map(runqueues, func(run DiggerRunQueueItem, index int) uint { +// return run.DiggerRunId +// }) +// +// tx = db.GormDB.Preload("DiggerRun").Preload("DiggerRun.Repo"). +// Preload("DiggerRun.PlanStage").Preload("DiggerRun.ApplyStage"). +// Preload("DiggerRun.PlanStage.Batch").Preload("DiggerRun.ApplyStage.Batch"). +// Where("digger_run_queue_items.digger_run_id in ?", diggerRunIds).Find(&runqueuesWithData) +// +// if tx.Error != nil { +// fmt.Printf("%v", tx.Error) +// return nil, tx.Error +// } +// +// return runqueuesWithData, nil +//} + +func (db *Database) UpdateDiggerJobSummary(diggerJobId string, resourcesCreated uint, resourcesUpdated uint, resourcesDeleted uint) (*model.DiggerJob, error) { diggerJob, err := db.GetDiggerJob(diggerJobId) if err != nil { return nil, fmt.Errorf("Could not get digger job") } - var jobSummary *DiggerJobSummary - jobSummary = &diggerJob.DiggerJobSummary - jobSummary.ResourcesCreated = resourcesCreated - jobSummary.ResourcesUpdated = resourcesUpdated - jobSummary.ResourcesDeleted = resourcesDeleted + var jobSummary *model.DiggerJobSummary + + jobSummary, err = db.Query.DiggerJobSummary.Select(db.Query.DiggerJobSummary.ID.Eq(diggerJobId)).First() + if err != nil { + return nil, fmt.Errorf("could not get digger job summary: %v", err) + } + jobSummary.ResourcesCreated = int64(resourcesCreated) + jobSummary.ResourcesUpdated = int64(resourcesUpdated) + jobSummary.ResourcesDeleted = int64(resourcesDeleted) result := db.GormDB.Save(&jobSummary) if result.Error != nil { @@ -880,7 +897,7 @@ func (db *Database) UpdateDiggerJobSummary(diggerJobId string, resourcesCreated return diggerJob, nil } -func (db *Database) UpdateDiggerJob(job *DiggerJob) error { +func (db *Database) UpdateDiggerJob(job *model.DiggerJob) error { result := db.GormDB.Save(job) if result.Error != nil { return result.Error @@ -889,8 +906,8 @@ func (db *Database) UpdateDiggerJob(job *DiggerJob) error { return nil } -func (db *Database) GetDiggerJobsForBatch(batchId uuid.UUID) ([]DiggerJob, error) { - jobs := make([]DiggerJob, 0) +func (db *Database) GetDiggerJobsForBatch(batchId uuid.UUID) ([]model.DiggerJob, error) { + jobs := make([]model.DiggerJob, 0) var where *gorm.DB where = db.GormDB.Where("digger_jobs.batch_id = ?", batchId) @@ -904,8 +921,8 @@ func (db *Database) GetDiggerJobsForBatch(batchId uuid.UUID) ([]DiggerJob, error return jobs, nil } -func (db *Database) GetDiggerJobsForBatchWithStatus(batchId uuid.UUID, status []scheduler.DiggerJobStatus) ([]DiggerJob, error) { - jobs := make([]DiggerJob, 0) +func (db *Database) GetDiggerJobsForBatchWithStatus(batchId uuid.UUID, status []scheduler.DiggerJobStatus) ([]model.DiggerJob, error) { + jobs := make([]model.DiggerJob, 0) var where *gorm.DB where = db.GormDB.Where("digger_jobs.batch_id = ?", batchId).Where("status IN ?", status) @@ -919,8 +936,8 @@ func (db *Database) GetDiggerJobsForBatchWithStatus(batchId uuid.UUID, status [] return jobs, nil } -func (db *Database) GetDiggerJobsWithStatus(status scheduler.DiggerJobStatus) ([]DiggerJob, error) { - jobs := make([]DiggerJob, 0) +func (db *Database) GetDiggerJobsWithStatus(status scheduler.DiggerJobStatus) ([]model.DiggerJob, error) { + jobs := make([]model.DiggerJob, 0) var where *gorm.DB where = db.GormDB.Where("status = ?", status) @@ -934,8 +951,8 @@ func (db *Database) GetDiggerJobsWithStatus(status scheduler.DiggerJobStatus) ([ return jobs, nil } -func (db *Database) GetPendingParentDiggerJobs(batchId *uuid.UUID) ([]DiggerJob, error) { - jobs := make([]DiggerJob, 0) +func (db *Database) GetPendingParentDiggerJobs(batchId *uuid.UUID) ([]model.DiggerJob, error) { + jobs := make([]model.DiggerJob, 0) joins := db.GormDB.Joins("LEFT JOIN digger_job_parent_links ON digger_jobs.digger_job_id = digger_job_parent_links.digger_job_id").Preload("Batch") @@ -955,8 +972,8 @@ func (db *Database) GetPendingParentDiggerJobs(batchId *uuid.UUID) ([]DiggerJob, return jobs, nil } -func (db *Database) GetDiggerJob(jobId string) (*DiggerJob, error) { - job := &DiggerJob{} +func (db *Database) GetDiggerJob(jobId string) (*model.DiggerJob, error) { + job := &model.DiggerJob{} result := db.GormDB.Preload("Batch").Preload("DiggerJobSummary").Where("digger_job_id=? ", jobId).Find(job) if result.Error != nil { if !errors.Is(result.Error, gorm.ErrRecordNotFound) { @@ -966,8 +983,8 @@ func (db *Database) GetDiggerJob(jobId string) (*DiggerJob, error) { return job, nil } -func (db *Database) GetDiggerJobParentLinksByParentId(parentId *string) ([]DiggerJobParentLink, error) { - var jobParentLinks []DiggerJobParentLink +func (db *Database) GetDiggerJobParentLinksByParentId(parentId *string) ([]model.DiggerJobParentLink, error) { + var jobParentLinks []model.DiggerJobParentLink result := db.GormDB.Where("parent_digger_job_id=?", parentId).Find(&jobParentLinks) if result.Error != nil { if !errors.Is(result.Error, gorm.ErrRecordNotFound) { @@ -979,7 +996,7 @@ func (db *Database) GetDiggerJobParentLinksByParentId(parentId *string) ([]Digge } func (db *Database) CreateDiggerJobParentLink(parentJobId string, jobId string) error { - jobParentLink := DiggerJobParentLink{ParentDiggerJobId: parentJobId, DiggerJobId: jobId} + jobParentLink := model.DiggerJobParentLink{ParentDiggerJobID: parentJobId, DiggerJobID: jobId} result := db.GormDB.Create(&jobParentLink) if result.Error != nil { return result.Error @@ -987,8 +1004,8 @@ func (db *Database) CreateDiggerJobParentLink(parentJobId string, jobId string) return nil } -func (db *Database) GetDiggerJobParentLinksChildId(childId *string) ([]DiggerJobParentLink, error) { - var jobParentLinks []DiggerJobParentLink +func (db *Database) GetDiggerJobParentLinksChildId(childId *string) ([]model.DiggerJobParentLink, error) { + var jobParentLinks []model.DiggerJobParentLink result := db.GormDB.Where("digger_job_id=?", childId).Find(&jobParentLinks) if result.Error != nil { if !errors.Is(result.Error, gorm.ErrRecordNotFound) { @@ -999,8 +1016,8 @@ func (db *Database) GetDiggerJobParentLinksChildId(childId *string) ([]DiggerJob return jobParentLinks, nil } -func (db *Database) GetOrganisation(tenantId any) (*Organisation, error) { - org := &Organisation{} +func (db *Database) GetOrganisation(tenantId any) (*model.Organization, error) { + org := &model.Organization{} result := db.GormDB.Take(org, "external_id = ?", tenantId) if result.Error != nil { if errors.Is(result.Error, gorm.ErrRecordNotFound) { @@ -1012,23 +1029,23 @@ func (db *Database) GetOrganisation(tenantId any) (*Organisation, error) { return org, nil } -func (db *Database) CreateOrganisation(name string, externalSource string, tenantId string) (*Organisation, error) { - org := &Organisation{Name: name, ExternalSource: externalSource, ExternalId: tenantId} - result := db.GormDB.Save(org) - if result.Error != nil { - log.Printf("Failed to create organisation: %v, error: %v\n", name, result.Error) - return nil, result.Error - } - log.Printf("Organisation %s, (id: %v) has been created successfully\n", name, org.ID) - return org, nil -} +//func (db *Database) CreateOrganisation(name string, externalSource string, tenantId string) (*model.Organization, error) { +// org := &model.Organization{Name: name, ExternalSource: externalSource, ExternalId: tenantId} +// result := db.GormDB.Save(org) +// if result.Error != nil { +// log.Printf("Failed to create organisation: %v, error: %v\n", name, result.Error) +// return nil, result.Error +// } +// log.Printf("Organisation %s, (id: %v) has been created successfully\n", name, org.ID) +// return org, nil +//} -func (db *Database) CreateProject(name string, org *Organisation, repo *Repo, isGenerated bool, isInMainBranch bool) (*Project, error) { - project := &Project{ +func (db *Database) CreateProject(name string, org *model.Organization, repo *model.Repo, isGenerated bool, isInMainBranch bool) (*model.Project, error) { + project := &model.Project{ Name: name, - Organisation: org, - Repo: repo, - Status: ProjectActive, + OrganizationID: org.ID, + RepoID: repo.ID, + Status: string(ProjectActive), IsGenerated: isGenerated, IsInMainBranch: isInMainBranch, } @@ -1041,7 +1058,7 @@ func (db *Database) CreateProject(name string, org *Organisation, repo *Repo, is return project, nil } -func (db *Database) UpdateProject(project *Project) error { +func (db *Database) UpdateProject(project *model.Project) error { result := db.GormDB.Save(project) if result.Error != nil { return result.Error @@ -1050,10 +1067,10 @@ func (db *Database) UpdateProject(project *Project) error { return nil } -func (db *Database) CreateRepo(name string, repoFullName string, repoOrganisation string, repoName string, repoUrl string, org *Organisation, diggerConfig string) (*Repo, error) { - var repo Repo +func (db *Database) CreateRepo(name string, repoFullName string, repoOrganisation string, repoName string, repoUrl string, org *model.Organization, diggerConfig string) (*model.Repo, error) { + var repo model.Repo // check if repo exist already, do nothing in this case - result := db.GormDB.Where("name = ? AND organisation_id=?", name, org.ID).Find(&repo) + result := db.GormDB.Where("name = ? AND organization_id=?", name, org.ID).Find(&repo) if result.Error != nil { if !errors.Is(result.Error, gorm.ErrRecordNotFound) { return nil, result.Error @@ -1063,14 +1080,14 @@ func (db *Database) CreateRepo(name string, repoFullName string, repoOrganisatio // record already exist, do nothing return &repo, nil } - repo = Repo{ + repo = model.Repo{ Name: name, - Organisation: org, + OrganizationID: org.ID, DiggerConfig: diggerConfig, RepoFullName: repoFullName, RepoOrganisation: repoOrganisation, RepoName: repoName, - RepoUrl: repoUrl, + RepoURL: repoUrl, } result = db.GormDB.Save(&repo) if result.Error != nil { @@ -1081,27 +1098,27 @@ func (db *Database) CreateRepo(name string, repoFullName string, repoOrganisatio return &repo, nil } -func (db *Database) GetToken(tenantId any) (*Token, error) { - token := &Token{} - result := db.GormDB.Take(token, "value = ?", tenantId) - if result.Error != nil { - if errors.Is(result.Error, gorm.ErrRecordNotFound) { - return nil, nil - } else { - return nil, result.Error - } - } - return token, nil -} +//func (db *Database) GetToken(tenantId any) (*Token, error) { +// token := &Token{} +// result := db.GormDB.Take(token, "value = ?", tenantId) +// if result.Error != nil { +// if errors.Is(result.Error, gorm.ErrRecordNotFound) { +// return nil, nil +// } else { +// return nil, result.Error +// } +// } +// return token, nil +//} -func (db *Database) CreateDiggerJobToken(organisationId uint) (*JobToken, error) { +func (db *Database) CreateDiggerJobToken(organisationId uint) (*model.DiggerJobToken, error) { // create a digger job token // prefixing token to make easier to retire this type of tokens later token := "cli:" + uuid.New().String() - jobToken := &JobToken{ + jobToken := &model.DiggerJobToken{ Value: token, - OrganisationID: organisationId, + OrganisationID: int64(organisationId), Type: CliJobAccessType, Expiry: time.Now().Add(time.Hour * 2), // some jobs can take >30 mins (k8s cluster) } @@ -1113,8 +1130,8 @@ func (db *Database) CreateDiggerJobToken(organisationId uint) (*JobToken, error) return jobToken, nil } -func (db *Database) GetJobToken(tenantId any) (*JobToken, error) { - token := &JobToken{} +func (db *Database) GetJobToken(tenantId any) (*model.DiggerJobToken, error) { + token := &model.DiggerJobToken{} result := db.GormDB.Take(token, "value = ?", tenantId) if result.Error != nil { if errors.Is(result.Error, gorm.ErrRecordNotFound) { @@ -1126,14 +1143,14 @@ func (db *Database) GetJobToken(tenantId any) (*JobToken, error) { return token, nil } -func (db *Database) CreateGithubAppInstallation(installationId int64, githubAppId int64, login string, accountId int, repoFullName string) (*GithubAppInstallation, error) { - installation := &GithubAppInstallation{ - GithubInstallationId: installationId, - GithubAppId: githubAppId, +func (db *Database) CreateGithubAppInstallation(installationId int64, githubAppId int64, login string, accountId int, repoFullName string) (*model.GithubAppInstallation, error) { + installation := &model.GithubAppInstallation{ + GithubInstallationID: installationId, + GithubAppID: githubAppId, Login: login, - AccountId: accountId, + AccountID: int64(accountId), Repo: repoFullName, - Status: GithubAppInstallActive, + Status: int64(GithubAppInstallActive), } result := db.GormDB.Save(installation) if result.Error != nil { @@ -1152,7 +1169,7 @@ func validateDiggerConfigYaml(configYaml string) (*configuration.DiggerConfig, e return diggerConfig, nil } -func (db *Database) UpdateRepoDiggerConfig(orgId any, config configuration.DiggerConfigYaml, repo *Repo, isMainBranch bool) error { +func (db *Database) UpdateRepoDiggerConfig(orgId string, config configuration.DiggerConfigYaml, repo *model.Repo, isMainBranch bool) error { log.Printf("UpdateRepoDiggerConfig, repo: %v\n", repo) org, err := db.GetOrganisationById(orgId) @@ -1203,23 +1220,23 @@ func (db *Database) UpdateRepoDiggerConfig(orgId any, config configuration.Digge return nil } -func (db *Database) CreateDiggerLock(resource string, lockId int, orgId uint) (*DiggerLock, error) { - lock := &DiggerLock{ +func (db *Database) CreateDiggerLock(resource string, lockId int, orgId string) (*model.DiggerLock, error) { + lock := &model.DiggerLock{ Resource: resource, - LockId: lockId, - OrganisationID: orgId, + LockID: int64(lockId), + OrganizationID: orgId, } result := db.GormDB.Save(lock) if result.Error != nil { return nil, result.Error } - log.Printf("CreateDiggerLock (id: %v %v) has been created successfully\n", lock.LockId, lock.Resource) + log.Printf("CreateDiggerLock (id: %v %v) has been created successfully\n", lock.LockID, lock.Resource) return lock, nil } -func (db *Database) GetDiggerLock(resource string) (*DiggerLock, error) { - lock := &DiggerLock{} +func (db *Database) GetDiggerLock(resource string) (*model.DiggerLock, error) { + lock := &model.DiggerLock{} result := db.GormDB.Where("resource=? ", resource).First(lock) if result.Error != nil { return nil, result.Error @@ -1227,12 +1244,12 @@ func (db *Database) GetDiggerLock(resource string) (*DiggerLock, error) { return lock, nil } -func (db *Database) DeleteDiggerLock(lock *DiggerLock) error { - log.Printf("DeleteDiggerLock Deleting: %v, %v", lock.LockId, lock.Resource) +func (db *Database) DeleteDiggerLock(lock *model.DiggerLock) error { + log.Printf("DeleteDiggerLock Deleting: %v, %v", lock.LockID, lock.Resource) result := db.GormDB.Delete(lock) if result.Error != nil { return result.Error } - log.Printf("DeleteDiggerLock %v %v has been deleted successfully\n", lock.LockId, lock.Resource) + log.Printf("DeleteDiggerLock %v %v has been deleted successfully\n", lock.LockID, lock.Resource) return nil } diff --git a/next/models/storage_test.go b/next/models/storage_test.go deleted file mode 100644 index a89666afc..000000000 --- a/next/models/storage_test.go +++ /dev/null @@ -1,167 +0,0 @@ -package models - -import ( - "github.com/diggerhq/digger/libs/scheduler" - "github.com/stretchr/testify/assert" - "gorm.io/driver/sqlite" - "gorm.io/gorm" - "gorm.io/gorm/logger" - "log" - "os" - "strings" - "testing" -) - -func setupSuite(tb testing.TB) (func(tb testing.TB), *Database, *Organisation) { - log.Println("setup suite") - - // database file name - dbName := "database_storage_test.db" - - // remove old database - e := os.Remove(dbName) - if e != nil { - if !strings.Contains(e.Error(), "no such file or directory") { - log.Fatal(e) - } - } - - // open and create a new database - gdb, err := gorm.Open(sqlite.Open(dbName), &gorm.Config{ - Logger: logger.Default.LogMode(logger.Silent), - }) - if err != nil { - log.Fatal(err) - } - - // migrate tables - err = gdb.AutoMigrate(&Policy{}, &Organisation{}, &Repo{}, &Project{}, &Token{}, - &User{}, &ProjectRun{}, &GithubAppInstallation{}, &GithubApp{}, &GithubAppInstallationLink{}, - &GithubDiggerJobLink{}, &DiggerJob{}, &DiggerJobParentLink{}) - if err != nil { - log.Fatal(err) - } - - database := &Database{GormDB: gdb} - DB = database - - // create an org - orgTenantId := "11111111-1111-1111-1111-111111111111" - externalSource := "test" - orgName := "testOrg" - org, err := database.CreateOrganisation(orgName, externalSource, orgTenantId) - if err != nil { - log.Fatal(err) - } - - DB = database - // Return a function to teardown the test - return func(tb testing.TB) { - log.Println("teardown suite") - err = os.Remove(dbName) - if err != nil { - log.Fatal(err) - } - }, database, org -} - -func init() { - log.SetOutput(os.Stdout) - log.SetFlags(log.Ldate | log.Ltime | log.Lshortfile) -} - -func TestCreateGithubInstallationLink(t *testing.T) { - teardownSuite, _, org := setupSuite(t) - defer teardownSuite(t) - - installationId := int64(1) - - link, err := DB.CreateGithubInstallationLink(org, installationId) - assert.NoError(t, err) - assert.NotNil(t, link) - - link2, err := DB.CreateGithubInstallationLink(org, installationId) - assert.NoError(t, err) - assert.NotNil(t, link2) - assert.Equal(t, link.ID, link2.ID) -} - -func TestGithubRepoAdded(t *testing.T) { - teardownSuite, _, _ := setupSuite(t) - defer teardownSuite(t) - - installationId := int64(1) - appId := int64(1) - accountId := int64(1) - login := "test" - repoFullName := "test/test" - - i, err := DB.GithubRepoAdded(installationId, appId, login, accountId, repoFullName) - assert.NoError(t, err) - assert.NotNil(t, i) - - i2, err := DB.GithubRepoAdded(installationId, appId, login, accountId, repoFullName) - assert.NoError(t, err) - assert.NotNil(t, i) - assert.Equal(t, i.ID, i2.ID) - assert.Equal(t, GithubAppInstallActive, i.Status) -} - -func TestGithubRepoRemoved(t *testing.T) { - teardownSuite, _, _ := setupSuite(t) - defer teardownSuite(t) - - installationId := int64(1) - appId := int64(1) - accountId := int64(1) - login := "test" - repoFullName := "test/test" - - i, err := DB.GithubRepoAdded(installationId, appId, login, accountId, repoFullName) - assert.NoError(t, err) - assert.NotNil(t, i) - - i, err = DB.GithubRepoRemoved(installationId, appId, repoFullName) - assert.NoError(t, err) - assert.NotNil(t, i) - assert.Equal(t, GithubAppInstallDeleted, i.Status) - - i2, err := DB.GithubRepoAdded(installationId, appId, login, accountId, repoFullName) - assert.NoError(t, err) - assert.NotNil(t, i) - assert.Equal(t, i.ID, i2.ID) - assert.Equal(t, GithubAppInstallDeleted, i.Status) -} - -func TestGetDiggerJobsForBatchPreloadsSummary(t *testing.T) { - teardownSuite, _, _ := setupSuite(t) - defer teardownSuite(t) - - prNumber := 123 - repoName := "test" - repoOwner := "test" - repoFullName := "test/test" - diggerconfig := "" - branchName := "main" - batchType := scheduler.DiggerCommandPlan - commentId := int64(123) - jobSpec := "abc" - - resourcesCreated := uint(1) - resourcesUpdated := uint(2) - resourcesDeleted := uint(3) - - batch, err := DB.CreateDiggerBatch(DiggerVCSGithub, 123, repoOwner, repoName, repoFullName, prNumber, diggerconfig, branchName, batchType, &commentId, 0) - assert.NoError(t, err) - - job, err := DB.CreateDiggerJob(batch.ID, []byte(jobSpec), "workflow_file.yml") - assert.NoError(t, err) - - job, err = DB.UpdateDiggerJobSummary(job.DiggerJobID, resourcesCreated, resourcesUpdated, resourcesDeleted) - assert.NoError(t, err) - - jobssss, err := DB.GetDiggerJobsForBatch(batch.ID) - assert.Equal(t, jobssss[0].DiggerJobSummary.ResourcesCreated, resourcesCreated) - assert.Equal(t, jobssss[0].DiggerJobSummary.ResourcesUpdated, resourcesUpdated) - assert.Equal(t, jobssss[0].DiggerJobSummary.ResourcesDeleted, resourcesDeleted) -} diff --git a/next/models/user.go b/next/models/user.go deleted file mode 100644 index 5b0361509..000000000 --- a/next/models/user.go +++ /dev/null @@ -1,8 +0,0 @@ -package models - -import "gorm.io/gorm" - -type User struct { - gorm.Model - Username string `gorm:"uniqueIndex:idx_user"` -} diff --git a/next/models_generated/account_delete_tokens.gen.go b/next/models_generated/account_delete_tokens.gen.go new file mode 100644 index 000000000..4a57864fe --- /dev/null +++ b/next/models_generated/account_delete_tokens.gen.go @@ -0,0 +1,384 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newAccountDeleteToken(db *gorm.DB, opts ...gen.DOOption) accountDeleteToken { + _accountDeleteToken := accountDeleteToken{} + + _accountDeleteToken.accountDeleteTokenDo.UseDB(db, opts...) + _accountDeleteToken.accountDeleteTokenDo.UseModel(&model.AccountDeleteToken{}) + + tableName := _accountDeleteToken.accountDeleteTokenDo.TableName() + _accountDeleteToken.ALL = field.NewAsterisk(tableName) + _accountDeleteToken.Token = field.NewString(tableName, "token") + _accountDeleteToken.UserID = field.NewString(tableName, "user_id") + + _accountDeleteToken.fillFieldMap() + + return _accountDeleteToken +} + +type accountDeleteToken struct { + accountDeleteTokenDo + + ALL field.Asterisk + Token field.String + UserID field.String + + fieldMap map[string]field.Expr +} + +func (a accountDeleteToken) Table(newTableName string) *accountDeleteToken { + a.accountDeleteTokenDo.UseTable(newTableName) + return a.updateTableName(newTableName) +} + +func (a accountDeleteToken) As(alias string) *accountDeleteToken { + a.accountDeleteTokenDo.DO = *(a.accountDeleteTokenDo.As(alias).(*gen.DO)) + return a.updateTableName(alias) +} + +func (a *accountDeleteToken) updateTableName(table string) *accountDeleteToken { + a.ALL = field.NewAsterisk(table) + a.Token = field.NewString(table, "token") + a.UserID = field.NewString(table, "user_id") + + a.fillFieldMap() + + return a +} + +func (a *accountDeleteToken) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := a.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (a *accountDeleteToken) fillFieldMap() { + a.fieldMap = make(map[string]field.Expr, 2) + a.fieldMap["token"] = a.Token + a.fieldMap["user_id"] = a.UserID +} + +func (a accountDeleteToken) clone(db *gorm.DB) accountDeleteToken { + a.accountDeleteTokenDo.ReplaceConnPool(db.Statement.ConnPool) + return a +} + +func (a accountDeleteToken) replaceDB(db *gorm.DB) accountDeleteToken { + a.accountDeleteTokenDo.ReplaceDB(db) + return a +} + +type accountDeleteTokenDo struct{ gen.DO } + +type IAccountDeleteTokenDo interface { + gen.SubQuery + Debug() IAccountDeleteTokenDo + WithContext(ctx context.Context) IAccountDeleteTokenDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IAccountDeleteTokenDo + WriteDB() IAccountDeleteTokenDo + As(alias string) gen.Dao + Session(config *gorm.Session) IAccountDeleteTokenDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IAccountDeleteTokenDo + Not(conds ...gen.Condition) IAccountDeleteTokenDo + Or(conds ...gen.Condition) IAccountDeleteTokenDo + Select(conds ...field.Expr) IAccountDeleteTokenDo + Where(conds ...gen.Condition) IAccountDeleteTokenDo + Order(conds ...field.Expr) IAccountDeleteTokenDo + Distinct(cols ...field.Expr) IAccountDeleteTokenDo + Omit(cols ...field.Expr) IAccountDeleteTokenDo + Join(table schema.Tabler, on ...field.Expr) IAccountDeleteTokenDo + LeftJoin(table schema.Tabler, on ...field.Expr) IAccountDeleteTokenDo + RightJoin(table schema.Tabler, on ...field.Expr) IAccountDeleteTokenDo + Group(cols ...field.Expr) IAccountDeleteTokenDo + Having(conds ...gen.Condition) IAccountDeleteTokenDo + Limit(limit int) IAccountDeleteTokenDo + Offset(offset int) IAccountDeleteTokenDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IAccountDeleteTokenDo + Unscoped() IAccountDeleteTokenDo + Create(values ...*model.AccountDeleteToken) error + CreateInBatches(values []*model.AccountDeleteToken, batchSize int) error + Save(values ...*model.AccountDeleteToken) error + First() (*model.AccountDeleteToken, error) + Take() (*model.AccountDeleteToken, error) + Last() (*model.AccountDeleteToken, error) + Find() ([]*model.AccountDeleteToken, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.AccountDeleteToken, err error) + FindInBatches(result *[]*model.AccountDeleteToken, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.AccountDeleteToken) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IAccountDeleteTokenDo + Assign(attrs ...field.AssignExpr) IAccountDeleteTokenDo + Joins(fields ...field.RelationField) IAccountDeleteTokenDo + Preload(fields ...field.RelationField) IAccountDeleteTokenDo + FirstOrInit() (*model.AccountDeleteToken, error) + FirstOrCreate() (*model.AccountDeleteToken, error) + FindByPage(offset int, limit int) (result []*model.AccountDeleteToken, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IAccountDeleteTokenDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (a accountDeleteTokenDo) Debug() IAccountDeleteTokenDo { + return a.withDO(a.DO.Debug()) +} + +func (a accountDeleteTokenDo) WithContext(ctx context.Context) IAccountDeleteTokenDo { + return a.withDO(a.DO.WithContext(ctx)) +} + +func (a accountDeleteTokenDo) ReadDB() IAccountDeleteTokenDo { + return a.Clauses(dbresolver.Read) +} + +func (a accountDeleteTokenDo) WriteDB() IAccountDeleteTokenDo { + return a.Clauses(dbresolver.Write) +} + +func (a accountDeleteTokenDo) Session(config *gorm.Session) IAccountDeleteTokenDo { + return a.withDO(a.DO.Session(config)) +} + +func (a accountDeleteTokenDo) Clauses(conds ...clause.Expression) IAccountDeleteTokenDo { + return a.withDO(a.DO.Clauses(conds...)) +} + +func (a accountDeleteTokenDo) Returning(value interface{}, columns ...string) IAccountDeleteTokenDo { + return a.withDO(a.DO.Returning(value, columns...)) +} + +func (a accountDeleteTokenDo) Not(conds ...gen.Condition) IAccountDeleteTokenDo { + return a.withDO(a.DO.Not(conds...)) +} + +func (a accountDeleteTokenDo) Or(conds ...gen.Condition) IAccountDeleteTokenDo { + return a.withDO(a.DO.Or(conds...)) +} + +func (a accountDeleteTokenDo) Select(conds ...field.Expr) IAccountDeleteTokenDo { + return a.withDO(a.DO.Select(conds...)) +} + +func (a accountDeleteTokenDo) Where(conds ...gen.Condition) IAccountDeleteTokenDo { + return a.withDO(a.DO.Where(conds...)) +} + +func (a accountDeleteTokenDo) Order(conds ...field.Expr) IAccountDeleteTokenDo { + return a.withDO(a.DO.Order(conds...)) +} + +func (a accountDeleteTokenDo) Distinct(cols ...field.Expr) IAccountDeleteTokenDo { + return a.withDO(a.DO.Distinct(cols...)) +} + +func (a accountDeleteTokenDo) Omit(cols ...field.Expr) IAccountDeleteTokenDo { + return a.withDO(a.DO.Omit(cols...)) +} + +func (a accountDeleteTokenDo) Join(table schema.Tabler, on ...field.Expr) IAccountDeleteTokenDo { + return a.withDO(a.DO.Join(table, on...)) +} + +func (a accountDeleteTokenDo) LeftJoin(table schema.Tabler, on ...field.Expr) IAccountDeleteTokenDo { + return a.withDO(a.DO.LeftJoin(table, on...)) +} + +func (a accountDeleteTokenDo) RightJoin(table schema.Tabler, on ...field.Expr) IAccountDeleteTokenDo { + return a.withDO(a.DO.RightJoin(table, on...)) +} + +func (a accountDeleteTokenDo) Group(cols ...field.Expr) IAccountDeleteTokenDo { + return a.withDO(a.DO.Group(cols...)) +} + +func (a accountDeleteTokenDo) Having(conds ...gen.Condition) IAccountDeleteTokenDo { + return a.withDO(a.DO.Having(conds...)) +} + +func (a accountDeleteTokenDo) Limit(limit int) IAccountDeleteTokenDo { + return a.withDO(a.DO.Limit(limit)) +} + +func (a accountDeleteTokenDo) Offset(offset int) IAccountDeleteTokenDo { + return a.withDO(a.DO.Offset(offset)) +} + +func (a accountDeleteTokenDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IAccountDeleteTokenDo { + return a.withDO(a.DO.Scopes(funcs...)) +} + +func (a accountDeleteTokenDo) Unscoped() IAccountDeleteTokenDo { + return a.withDO(a.DO.Unscoped()) +} + +func (a accountDeleteTokenDo) Create(values ...*model.AccountDeleteToken) error { + if len(values) == 0 { + return nil + } + return a.DO.Create(values) +} + +func (a accountDeleteTokenDo) CreateInBatches(values []*model.AccountDeleteToken, batchSize int) error { + return a.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (a accountDeleteTokenDo) Save(values ...*model.AccountDeleteToken) error { + if len(values) == 0 { + return nil + } + return a.DO.Save(values) +} + +func (a accountDeleteTokenDo) First() (*model.AccountDeleteToken, error) { + if result, err := a.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.AccountDeleteToken), nil + } +} + +func (a accountDeleteTokenDo) Take() (*model.AccountDeleteToken, error) { + if result, err := a.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.AccountDeleteToken), nil + } +} + +func (a accountDeleteTokenDo) Last() (*model.AccountDeleteToken, error) { + if result, err := a.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.AccountDeleteToken), nil + } +} + +func (a accountDeleteTokenDo) Find() ([]*model.AccountDeleteToken, error) { + result, err := a.DO.Find() + return result.([]*model.AccountDeleteToken), err +} + +func (a accountDeleteTokenDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.AccountDeleteToken, err error) { + buf := make([]*model.AccountDeleteToken, 0, batchSize) + err = a.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (a accountDeleteTokenDo) FindInBatches(result *[]*model.AccountDeleteToken, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return a.DO.FindInBatches(result, batchSize, fc) +} + +func (a accountDeleteTokenDo) Attrs(attrs ...field.AssignExpr) IAccountDeleteTokenDo { + return a.withDO(a.DO.Attrs(attrs...)) +} + +func (a accountDeleteTokenDo) Assign(attrs ...field.AssignExpr) IAccountDeleteTokenDo { + return a.withDO(a.DO.Assign(attrs...)) +} + +func (a accountDeleteTokenDo) Joins(fields ...field.RelationField) IAccountDeleteTokenDo { + for _, _f := range fields { + a = *a.withDO(a.DO.Joins(_f)) + } + return &a +} + +func (a accountDeleteTokenDo) Preload(fields ...field.RelationField) IAccountDeleteTokenDo { + for _, _f := range fields { + a = *a.withDO(a.DO.Preload(_f)) + } + return &a +} + +func (a accountDeleteTokenDo) FirstOrInit() (*model.AccountDeleteToken, error) { + if result, err := a.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.AccountDeleteToken), nil + } +} + +func (a accountDeleteTokenDo) FirstOrCreate() (*model.AccountDeleteToken, error) { + if result, err := a.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.AccountDeleteToken), nil + } +} + +func (a accountDeleteTokenDo) FindByPage(offset int, limit int) (result []*model.AccountDeleteToken, count int64, err error) { + result, err = a.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = a.Offset(-1).Limit(-1).Count() + return +} + +func (a accountDeleteTokenDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = a.Count() + if err != nil { + return + } + + err = a.Offset(offset).Limit(limit).Scan(result) + return +} + +func (a accountDeleteTokenDo) Scan(result interface{}) (err error) { + return a.DO.Scan(result) +} + +func (a accountDeleteTokenDo) Delete(models ...*model.AccountDeleteToken) (result gen.ResultInfo, err error) { + return a.DO.Delete(models) +} + +func (a *accountDeleteTokenDo) withDO(do gen.Dao) *accountDeleteTokenDo { + a.DO = *do.(*gen.DO) + return a +} diff --git a/next/models_generated/chats.gen.go b/next/models_generated/chats.gen.go new file mode 100644 index 000000000..91c778579 --- /dev/null +++ b/next/models_generated/chats.gen.go @@ -0,0 +1,396 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newChat(db *gorm.DB, opts ...gen.DOOption) chat { + _chat := chat{} + + _chat.chatDo.UseDB(db, opts...) + _chat.chatDo.UseModel(&model.Chat{}) + + tableName := _chat.chatDo.TableName() + _chat.ALL = field.NewAsterisk(tableName) + _chat.ID = field.NewString(tableName, "id") + _chat.UserID = field.NewString(tableName, "user_id") + _chat.Payload = field.NewString(tableName, "payload") + _chat.CreatedAt = field.NewTime(tableName, "created_at") + _chat.ProjectID = field.NewString(tableName, "project_id") + + _chat.fillFieldMap() + + return _chat +} + +type chat struct { + chatDo + + ALL field.Asterisk + ID field.String + UserID field.String + Payload field.String + CreatedAt field.Time + ProjectID field.String + + fieldMap map[string]field.Expr +} + +func (c chat) Table(newTableName string) *chat { + c.chatDo.UseTable(newTableName) + return c.updateTableName(newTableName) +} + +func (c chat) As(alias string) *chat { + c.chatDo.DO = *(c.chatDo.As(alias).(*gen.DO)) + return c.updateTableName(alias) +} + +func (c *chat) updateTableName(table string) *chat { + c.ALL = field.NewAsterisk(table) + c.ID = field.NewString(table, "id") + c.UserID = field.NewString(table, "user_id") + c.Payload = field.NewString(table, "payload") + c.CreatedAt = field.NewTime(table, "created_at") + c.ProjectID = field.NewString(table, "project_id") + + c.fillFieldMap() + + return c +} + +func (c *chat) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := c.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (c *chat) fillFieldMap() { + c.fieldMap = make(map[string]field.Expr, 5) + c.fieldMap["id"] = c.ID + c.fieldMap["user_id"] = c.UserID + c.fieldMap["payload"] = c.Payload + c.fieldMap["created_at"] = c.CreatedAt + c.fieldMap["project_id"] = c.ProjectID +} + +func (c chat) clone(db *gorm.DB) chat { + c.chatDo.ReplaceConnPool(db.Statement.ConnPool) + return c +} + +func (c chat) replaceDB(db *gorm.DB) chat { + c.chatDo.ReplaceDB(db) + return c +} + +type chatDo struct{ gen.DO } + +type IChatDo interface { + gen.SubQuery + Debug() IChatDo + WithContext(ctx context.Context) IChatDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IChatDo + WriteDB() IChatDo + As(alias string) gen.Dao + Session(config *gorm.Session) IChatDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IChatDo + Not(conds ...gen.Condition) IChatDo + Or(conds ...gen.Condition) IChatDo + Select(conds ...field.Expr) IChatDo + Where(conds ...gen.Condition) IChatDo + Order(conds ...field.Expr) IChatDo + Distinct(cols ...field.Expr) IChatDo + Omit(cols ...field.Expr) IChatDo + Join(table schema.Tabler, on ...field.Expr) IChatDo + LeftJoin(table schema.Tabler, on ...field.Expr) IChatDo + RightJoin(table schema.Tabler, on ...field.Expr) IChatDo + Group(cols ...field.Expr) IChatDo + Having(conds ...gen.Condition) IChatDo + Limit(limit int) IChatDo + Offset(offset int) IChatDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IChatDo + Unscoped() IChatDo + Create(values ...*model.Chat) error + CreateInBatches(values []*model.Chat, batchSize int) error + Save(values ...*model.Chat) error + First() (*model.Chat, error) + Take() (*model.Chat, error) + Last() (*model.Chat, error) + Find() ([]*model.Chat, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.Chat, err error) + FindInBatches(result *[]*model.Chat, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.Chat) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IChatDo + Assign(attrs ...field.AssignExpr) IChatDo + Joins(fields ...field.RelationField) IChatDo + Preload(fields ...field.RelationField) IChatDo + FirstOrInit() (*model.Chat, error) + FirstOrCreate() (*model.Chat, error) + FindByPage(offset int, limit int) (result []*model.Chat, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IChatDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (c chatDo) Debug() IChatDo { + return c.withDO(c.DO.Debug()) +} + +func (c chatDo) WithContext(ctx context.Context) IChatDo { + return c.withDO(c.DO.WithContext(ctx)) +} + +func (c chatDo) ReadDB() IChatDo { + return c.Clauses(dbresolver.Read) +} + +func (c chatDo) WriteDB() IChatDo { + return c.Clauses(dbresolver.Write) +} + +func (c chatDo) Session(config *gorm.Session) IChatDo { + return c.withDO(c.DO.Session(config)) +} + +func (c chatDo) Clauses(conds ...clause.Expression) IChatDo { + return c.withDO(c.DO.Clauses(conds...)) +} + +func (c chatDo) Returning(value interface{}, columns ...string) IChatDo { + return c.withDO(c.DO.Returning(value, columns...)) +} + +func (c chatDo) Not(conds ...gen.Condition) IChatDo { + return c.withDO(c.DO.Not(conds...)) +} + +func (c chatDo) Or(conds ...gen.Condition) IChatDo { + return c.withDO(c.DO.Or(conds...)) +} + +func (c chatDo) Select(conds ...field.Expr) IChatDo { + return c.withDO(c.DO.Select(conds...)) +} + +func (c chatDo) Where(conds ...gen.Condition) IChatDo { + return c.withDO(c.DO.Where(conds...)) +} + +func (c chatDo) Order(conds ...field.Expr) IChatDo { + return c.withDO(c.DO.Order(conds...)) +} + +func (c chatDo) Distinct(cols ...field.Expr) IChatDo { + return c.withDO(c.DO.Distinct(cols...)) +} + +func (c chatDo) Omit(cols ...field.Expr) IChatDo { + return c.withDO(c.DO.Omit(cols...)) +} + +func (c chatDo) Join(table schema.Tabler, on ...field.Expr) IChatDo { + return c.withDO(c.DO.Join(table, on...)) +} + +func (c chatDo) LeftJoin(table schema.Tabler, on ...field.Expr) IChatDo { + return c.withDO(c.DO.LeftJoin(table, on...)) +} + +func (c chatDo) RightJoin(table schema.Tabler, on ...field.Expr) IChatDo { + return c.withDO(c.DO.RightJoin(table, on...)) +} + +func (c chatDo) Group(cols ...field.Expr) IChatDo { + return c.withDO(c.DO.Group(cols...)) +} + +func (c chatDo) Having(conds ...gen.Condition) IChatDo { + return c.withDO(c.DO.Having(conds...)) +} + +func (c chatDo) Limit(limit int) IChatDo { + return c.withDO(c.DO.Limit(limit)) +} + +func (c chatDo) Offset(offset int) IChatDo { + return c.withDO(c.DO.Offset(offset)) +} + +func (c chatDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IChatDo { + return c.withDO(c.DO.Scopes(funcs...)) +} + +func (c chatDo) Unscoped() IChatDo { + return c.withDO(c.DO.Unscoped()) +} + +func (c chatDo) Create(values ...*model.Chat) error { + if len(values) == 0 { + return nil + } + return c.DO.Create(values) +} + +func (c chatDo) CreateInBatches(values []*model.Chat, batchSize int) error { + return c.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (c chatDo) Save(values ...*model.Chat) error { + if len(values) == 0 { + return nil + } + return c.DO.Save(values) +} + +func (c chatDo) First() (*model.Chat, error) { + if result, err := c.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.Chat), nil + } +} + +func (c chatDo) Take() (*model.Chat, error) { + if result, err := c.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.Chat), nil + } +} + +func (c chatDo) Last() (*model.Chat, error) { + if result, err := c.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.Chat), nil + } +} + +func (c chatDo) Find() ([]*model.Chat, error) { + result, err := c.DO.Find() + return result.([]*model.Chat), err +} + +func (c chatDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.Chat, err error) { + buf := make([]*model.Chat, 0, batchSize) + err = c.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (c chatDo) FindInBatches(result *[]*model.Chat, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return c.DO.FindInBatches(result, batchSize, fc) +} + +func (c chatDo) Attrs(attrs ...field.AssignExpr) IChatDo { + return c.withDO(c.DO.Attrs(attrs...)) +} + +func (c chatDo) Assign(attrs ...field.AssignExpr) IChatDo { + return c.withDO(c.DO.Assign(attrs...)) +} + +func (c chatDo) Joins(fields ...field.RelationField) IChatDo { + for _, _f := range fields { + c = *c.withDO(c.DO.Joins(_f)) + } + return &c +} + +func (c chatDo) Preload(fields ...field.RelationField) IChatDo { + for _, _f := range fields { + c = *c.withDO(c.DO.Preload(_f)) + } + return &c +} + +func (c chatDo) FirstOrInit() (*model.Chat, error) { + if result, err := c.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.Chat), nil + } +} + +func (c chatDo) FirstOrCreate() (*model.Chat, error) { + if result, err := c.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.Chat), nil + } +} + +func (c chatDo) FindByPage(offset int, limit int) (result []*model.Chat, count int64, err error) { + result, err = c.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = c.Offset(-1).Limit(-1).Count() + return +} + +func (c chatDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = c.Count() + if err != nil { + return + } + + err = c.Offset(offset).Limit(limit).Scan(result) + return +} + +func (c chatDo) Scan(result interface{}) (err error) { + return c.DO.Scan(result) +} + +func (c chatDo) Delete(models ...*model.Chat) (result gen.ResultInfo, err error) { + return c.DO.Delete(models) +} + +func (c *chatDo) withDO(do gen.Dao) *chatDo { + c.DO = *do.(*gen.DO) + return c +} diff --git a/next/models_generated/customers.gen.go b/next/models_generated/customers.gen.go new file mode 100644 index 000000000..359f3191e --- /dev/null +++ b/next/models_generated/customers.gen.go @@ -0,0 +1,384 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newCustomer(db *gorm.DB, opts ...gen.DOOption) customer { + _customer := customer{} + + _customer.customerDo.UseDB(db, opts...) + _customer.customerDo.UseModel(&model.Customer{}) + + tableName := _customer.customerDo.TableName() + _customer.ALL = field.NewAsterisk(tableName) + _customer.StripeCustomerID = field.NewString(tableName, "stripe_customer_id") + _customer.OrganizationID = field.NewString(tableName, "organization_id") + + _customer.fillFieldMap() + + return _customer +} + +type customer struct { + customerDo + + ALL field.Asterisk + StripeCustomerID field.String + OrganizationID field.String + + fieldMap map[string]field.Expr +} + +func (c customer) Table(newTableName string) *customer { + c.customerDo.UseTable(newTableName) + return c.updateTableName(newTableName) +} + +func (c customer) As(alias string) *customer { + c.customerDo.DO = *(c.customerDo.As(alias).(*gen.DO)) + return c.updateTableName(alias) +} + +func (c *customer) updateTableName(table string) *customer { + c.ALL = field.NewAsterisk(table) + c.StripeCustomerID = field.NewString(table, "stripe_customer_id") + c.OrganizationID = field.NewString(table, "organization_id") + + c.fillFieldMap() + + return c +} + +func (c *customer) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := c.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (c *customer) fillFieldMap() { + c.fieldMap = make(map[string]field.Expr, 2) + c.fieldMap["stripe_customer_id"] = c.StripeCustomerID + c.fieldMap["organization_id"] = c.OrganizationID +} + +func (c customer) clone(db *gorm.DB) customer { + c.customerDo.ReplaceConnPool(db.Statement.ConnPool) + return c +} + +func (c customer) replaceDB(db *gorm.DB) customer { + c.customerDo.ReplaceDB(db) + return c +} + +type customerDo struct{ gen.DO } + +type ICustomerDo interface { + gen.SubQuery + Debug() ICustomerDo + WithContext(ctx context.Context) ICustomerDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() ICustomerDo + WriteDB() ICustomerDo + As(alias string) gen.Dao + Session(config *gorm.Session) ICustomerDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) ICustomerDo + Not(conds ...gen.Condition) ICustomerDo + Or(conds ...gen.Condition) ICustomerDo + Select(conds ...field.Expr) ICustomerDo + Where(conds ...gen.Condition) ICustomerDo + Order(conds ...field.Expr) ICustomerDo + Distinct(cols ...field.Expr) ICustomerDo + Omit(cols ...field.Expr) ICustomerDo + Join(table schema.Tabler, on ...field.Expr) ICustomerDo + LeftJoin(table schema.Tabler, on ...field.Expr) ICustomerDo + RightJoin(table schema.Tabler, on ...field.Expr) ICustomerDo + Group(cols ...field.Expr) ICustomerDo + Having(conds ...gen.Condition) ICustomerDo + Limit(limit int) ICustomerDo + Offset(offset int) ICustomerDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) ICustomerDo + Unscoped() ICustomerDo + Create(values ...*model.Customer) error + CreateInBatches(values []*model.Customer, batchSize int) error + Save(values ...*model.Customer) error + First() (*model.Customer, error) + Take() (*model.Customer, error) + Last() (*model.Customer, error) + Find() ([]*model.Customer, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.Customer, err error) + FindInBatches(result *[]*model.Customer, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.Customer) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) ICustomerDo + Assign(attrs ...field.AssignExpr) ICustomerDo + Joins(fields ...field.RelationField) ICustomerDo + Preload(fields ...field.RelationField) ICustomerDo + FirstOrInit() (*model.Customer, error) + FirstOrCreate() (*model.Customer, error) + FindByPage(offset int, limit int) (result []*model.Customer, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) ICustomerDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (c customerDo) Debug() ICustomerDo { + return c.withDO(c.DO.Debug()) +} + +func (c customerDo) WithContext(ctx context.Context) ICustomerDo { + return c.withDO(c.DO.WithContext(ctx)) +} + +func (c customerDo) ReadDB() ICustomerDo { + return c.Clauses(dbresolver.Read) +} + +func (c customerDo) WriteDB() ICustomerDo { + return c.Clauses(dbresolver.Write) +} + +func (c customerDo) Session(config *gorm.Session) ICustomerDo { + return c.withDO(c.DO.Session(config)) +} + +func (c customerDo) Clauses(conds ...clause.Expression) ICustomerDo { + return c.withDO(c.DO.Clauses(conds...)) +} + +func (c customerDo) Returning(value interface{}, columns ...string) ICustomerDo { + return c.withDO(c.DO.Returning(value, columns...)) +} + +func (c customerDo) Not(conds ...gen.Condition) ICustomerDo { + return c.withDO(c.DO.Not(conds...)) +} + +func (c customerDo) Or(conds ...gen.Condition) ICustomerDo { + return c.withDO(c.DO.Or(conds...)) +} + +func (c customerDo) Select(conds ...field.Expr) ICustomerDo { + return c.withDO(c.DO.Select(conds...)) +} + +func (c customerDo) Where(conds ...gen.Condition) ICustomerDo { + return c.withDO(c.DO.Where(conds...)) +} + +func (c customerDo) Order(conds ...field.Expr) ICustomerDo { + return c.withDO(c.DO.Order(conds...)) +} + +func (c customerDo) Distinct(cols ...field.Expr) ICustomerDo { + return c.withDO(c.DO.Distinct(cols...)) +} + +func (c customerDo) Omit(cols ...field.Expr) ICustomerDo { + return c.withDO(c.DO.Omit(cols...)) +} + +func (c customerDo) Join(table schema.Tabler, on ...field.Expr) ICustomerDo { + return c.withDO(c.DO.Join(table, on...)) +} + +func (c customerDo) LeftJoin(table schema.Tabler, on ...field.Expr) ICustomerDo { + return c.withDO(c.DO.LeftJoin(table, on...)) +} + +func (c customerDo) RightJoin(table schema.Tabler, on ...field.Expr) ICustomerDo { + return c.withDO(c.DO.RightJoin(table, on...)) +} + +func (c customerDo) Group(cols ...field.Expr) ICustomerDo { + return c.withDO(c.DO.Group(cols...)) +} + +func (c customerDo) Having(conds ...gen.Condition) ICustomerDo { + return c.withDO(c.DO.Having(conds...)) +} + +func (c customerDo) Limit(limit int) ICustomerDo { + return c.withDO(c.DO.Limit(limit)) +} + +func (c customerDo) Offset(offset int) ICustomerDo { + return c.withDO(c.DO.Offset(offset)) +} + +func (c customerDo) Scopes(funcs ...func(gen.Dao) gen.Dao) ICustomerDo { + return c.withDO(c.DO.Scopes(funcs...)) +} + +func (c customerDo) Unscoped() ICustomerDo { + return c.withDO(c.DO.Unscoped()) +} + +func (c customerDo) Create(values ...*model.Customer) error { + if len(values) == 0 { + return nil + } + return c.DO.Create(values) +} + +func (c customerDo) CreateInBatches(values []*model.Customer, batchSize int) error { + return c.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (c customerDo) Save(values ...*model.Customer) error { + if len(values) == 0 { + return nil + } + return c.DO.Save(values) +} + +func (c customerDo) First() (*model.Customer, error) { + if result, err := c.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.Customer), nil + } +} + +func (c customerDo) Take() (*model.Customer, error) { + if result, err := c.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.Customer), nil + } +} + +func (c customerDo) Last() (*model.Customer, error) { + if result, err := c.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.Customer), nil + } +} + +func (c customerDo) Find() ([]*model.Customer, error) { + result, err := c.DO.Find() + return result.([]*model.Customer), err +} + +func (c customerDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.Customer, err error) { + buf := make([]*model.Customer, 0, batchSize) + err = c.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (c customerDo) FindInBatches(result *[]*model.Customer, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return c.DO.FindInBatches(result, batchSize, fc) +} + +func (c customerDo) Attrs(attrs ...field.AssignExpr) ICustomerDo { + return c.withDO(c.DO.Attrs(attrs...)) +} + +func (c customerDo) Assign(attrs ...field.AssignExpr) ICustomerDo { + return c.withDO(c.DO.Assign(attrs...)) +} + +func (c customerDo) Joins(fields ...field.RelationField) ICustomerDo { + for _, _f := range fields { + c = *c.withDO(c.DO.Joins(_f)) + } + return &c +} + +func (c customerDo) Preload(fields ...field.RelationField) ICustomerDo { + for _, _f := range fields { + c = *c.withDO(c.DO.Preload(_f)) + } + return &c +} + +func (c customerDo) FirstOrInit() (*model.Customer, error) { + if result, err := c.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.Customer), nil + } +} + +func (c customerDo) FirstOrCreate() (*model.Customer, error) { + if result, err := c.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.Customer), nil + } +} + +func (c customerDo) FindByPage(offset int, limit int) (result []*model.Customer, count int64, err error) { + result, err = c.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = c.Offset(-1).Limit(-1).Count() + return +} + +func (c customerDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = c.Count() + if err != nil { + return + } + + err = c.Offset(offset).Limit(limit).Scan(result) + return +} + +func (c customerDo) Scan(result interface{}) (err error) { + return c.DO.Scan(result) +} + +func (c customerDo) Delete(models ...*model.Customer) (result gen.ResultInfo, err error) { + return c.DO.Delete(models) +} + +func (c *customerDo) withDO(do gen.Dao) *customerDo { + c.DO = *do.(*gen.DO) + return c +} diff --git a/next/models_generated/digger_batches.gen.go b/next/models_generated/digger_batches.gen.go new file mode 100644 index 000000000..f07037d30 --- /dev/null +++ b/next/models_generated/digger_batches.gen.go @@ -0,0 +1,432 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newDiggerBatch(db *gorm.DB, opts ...gen.DOOption) diggerBatch { + _diggerBatch := diggerBatch{} + + _diggerBatch.diggerBatchDo.UseDB(db, opts...) + _diggerBatch.diggerBatchDo.UseModel(&model.DiggerBatch{}) + + tableName := _diggerBatch.diggerBatchDo.TableName() + _diggerBatch.ALL = field.NewAsterisk(tableName) + _diggerBatch.ID = field.NewString(tableName, "id") + _diggerBatch.PrNumber = field.NewInt64(tableName, "pr_number") + _diggerBatch.Status = field.NewInt16(tableName, "status") + _diggerBatch.BranchName = field.NewString(tableName, "branch_name") + _diggerBatch.DiggerConfig = field.NewString(tableName, "digger_config") + _diggerBatch.GithubInstallationID = field.NewInt64(tableName, "github_installation_id") + _diggerBatch.RepoFullName = field.NewString(tableName, "repo_full_name") + _diggerBatch.RepoOwner = field.NewString(tableName, "repo_owner") + _diggerBatch.RepoName = field.NewString(tableName, "repo_name") + _diggerBatch.BatchType = field.NewString(tableName, "batch_type") + _diggerBatch.CommentID = field.NewInt64(tableName, "comment_id") + _diggerBatch.SourceDetails = field.NewField(tableName, "source_details") + _diggerBatch.Vcs = field.NewString(tableName, "vcs") + _diggerBatch.GitlabProjectID = field.NewInt64(tableName, "gitlab_project_id") + + _diggerBatch.fillFieldMap() + + return _diggerBatch +} + +type diggerBatch struct { + diggerBatchDo + + ALL field.Asterisk + ID field.String + PrNumber field.Int64 + Status field.Int16 + BranchName field.String + DiggerConfig field.String + GithubInstallationID field.Int64 + RepoFullName field.String + RepoOwner field.String + RepoName field.String + BatchType field.String + CommentID field.Int64 + SourceDetails field.Field + Vcs field.String + GitlabProjectID field.Int64 + + fieldMap map[string]field.Expr +} + +func (d diggerBatch) Table(newTableName string) *diggerBatch { + d.diggerBatchDo.UseTable(newTableName) + return d.updateTableName(newTableName) +} + +func (d diggerBatch) As(alias string) *diggerBatch { + d.diggerBatchDo.DO = *(d.diggerBatchDo.As(alias).(*gen.DO)) + return d.updateTableName(alias) +} + +func (d *diggerBatch) updateTableName(table string) *diggerBatch { + d.ALL = field.NewAsterisk(table) + d.ID = field.NewString(table, "id") + d.PrNumber = field.NewInt64(table, "pr_number") + d.Status = field.NewInt16(table, "status") + d.BranchName = field.NewString(table, "branch_name") + d.DiggerConfig = field.NewString(table, "digger_config") + d.GithubInstallationID = field.NewInt64(table, "github_installation_id") + d.RepoFullName = field.NewString(table, "repo_full_name") + d.RepoOwner = field.NewString(table, "repo_owner") + d.RepoName = field.NewString(table, "repo_name") + d.BatchType = field.NewString(table, "batch_type") + d.CommentID = field.NewInt64(table, "comment_id") + d.SourceDetails = field.NewField(table, "source_details") + d.Vcs = field.NewString(table, "vcs") + d.GitlabProjectID = field.NewInt64(table, "gitlab_project_id") + + d.fillFieldMap() + + return d +} + +func (d *diggerBatch) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := d.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (d *diggerBatch) fillFieldMap() { + d.fieldMap = make(map[string]field.Expr, 14) + d.fieldMap["id"] = d.ID + d.fieldMap["pr_number"] = d.PrNumber + d.fieldMap["status"] = d.Status + d.fieldMap["branch_name"] = d.BranchName + d.fieldMap["digger_config"] = d.DiggerConfig + d.fieldMap["github_installation_id"] = d.GithubInstallationID + d.fieldMap["repo_full_name"] = d.RepoFullName + d.fieldMap["repo_owner"] = d.RepoOwner + d.fieldMap["repo_name"] = d.RepoName + d.fieldMap["batch_type"] = d.BatchType + d.fieldMap["comment_id"] = d.CommentID + d.fieldMap["source_details"] = d.SourceDetails + d.fieldMap["vcs"] = d.Vcs + d.fieldMap["gitlab_project_id"] = d.GitlabProjectID +} + +func (d diggerBatch) clone(db *gorm.DB) diggerBatch { + d.diggerBatchDo.ReplaceConnPool(db.Statement.ConnPool) + return d +} + +func (d diggerBatch) replaceDB(db *gorm.DB) diggerBatch { + d.diggerBatchDo.ReplaceDB(db) + return d +} + +type diggerBatchDo struct{ gen.DO } + +type IDiggerBatchDo interface { + gen.SubQuery + Debug() IDiggerBatchDo + WithContext(ctx context.Context) IDiggerBatchDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IDiggerBatchDo + WriteDB() IDiggerBatchDo + As(alias string) gen.Dao + Session(config *gorm.Session) IDiggerBatchDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IDiggerBatchDo + Not(conds ...gen.Condition) IDiggerBatchDo + Or(conds ...gen.Condition) IDiggerBatchDo + Select(conds ...field.Expr) IDiggerBatchDo + Where(conds ...gen.Condition) IDiggerBatchDo + Order(conds ...field.Expr) IDiggerBatchDo + Distinct(cols ...field.Expr) IDiggerBatchDo + Omit(cols ...field.Expr) IDiggerBatchDo + Join(table schema.Tabler, on ...field.Expr) IDiggerBatchDo + LeftJoin(table schema.Tabler, on ...field.Expr) IDiggerBatchDo + RightJoin(table schema.Tabler, on ...field.Expr) IDiggerBatchDo + Group(cols ...field.Expr) IDiggerBatchDo + Having(conds ...gen.Condition) IDiggerBatchDo + Limit(limit int) IDiggerBatchDo + Offset(offset int) IDiggerBatchDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IDiggerBatchDo + Unscoped() IDiggerBatchDo + Create(values ...*model.DiggerBatch) error + CreateInBatches(values []*model.DiggerBatch, batchSize int) error + Save(values ...*model.DiggerBatch) error + First() (*model.DiggerBatch, error) + Take() (*model.DiggerBatch, error) + Last() (*model.DiggerBatch, error) + Find() ([]*model.DiggerBatch, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.DiggerBatch, err error) + FindInBatches(result *[]*model.DiggerBatch, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.DiggerBatch) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IDiggerBatchDo + Assign(attrs ...field.AssignExpr) IDiggerBatchDo + Joins(fields ...field.RelationField) IDiggerBatchDo + Preload(fields ...field.RelationField) IDiggerBatchDo + FirstOrInit() (*model.DiggerBatch, error) + FirstOrCreate() (*model.DiggerBatch, error) + FindByPage(offset int, limit int) (result []*model.DiggerBatch, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IDiggerBatchDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (d diggerBatchDo) Debug() IDiggerBatchDo { + return d.withDO(d.DO.Debug()) +} + +func (d diggerBatchDo) WithContext(ctx context.Context) IDiggerBatchDo { + return d.withDO(d.DO.WithContext(ctx)) +} + +func (d diggerBatchDo) ReadDB() IDiggerBatchDo { + return d.Clauses(dbresolver.Read) +} + +func (d diggerBatchDo) WriteDB() IDiggerBatchDo { + return d.Clauses(dbresolver.Write) +} + +func (d diggerBatchDo) Session(config *gorm.Session) IDiggerBatchDo { + return d.withDO(d.DO.Session(config)) +} + +func (d diggerBatchDo) Clauses(conds ...clause.Expression) IDiggerBatchDo { + return d.withDO(d.DO.Clauses(conds...)) +} + +func (d diggerBatchDo) Returning(value interface{}, columns ...string) IDiggerBatchDo { + return d.withDO(d.DO.Returning(value, columns...)) +} + +func (d diggerBatchDo) Not(conds ...gen.Condition) IDiggerBatchDo { + return d.withDO(d.DO.Not(conds...)) +} + +func (d diggerBatchDo) Or(conds ...gen.Condition) IDiggerBatchDo { + return d.withDO(d.DO.Or(conds...)) +} + +func (d diggerBatchDo) Select(conds ...field.Expr) IDiggerBatchDo { + return d.withDO(d.DO.Select(conds...)) +} + +func (d diggerBatchDo) Where(conds ...gen.Condition) IDiggerBatchDo { + return d.withDO(d.DO.Where(conds...)) +} + +func (d diggerBatchDo) Order(conds ...field.Expr) IDiggerBatchDo { + return d.withDO(d.DO.Order(conds...)) +} + +func (d diggerBatchDo) Distinct(cols ...field.Expr) IDiggerBatchDo { + return d.withDO(d.DO.Distinct(cols...)) +} + +func (d diggerBatchDo) Omit(cols ...field.Expr) IDiggerBatchDo { + return d.withDO(d.DO.Omit(cols...)) +} + +func (d diggerBatchDo) Join(table schema.Tabler, on ...field.Expr) IDiggerBatchDo { + return d.withDO(d.DO.Join(table, on...)) +} + +func (d diggerBatchDo) LeftJoin(table schema.Tabler, on ...field.Expr) IDiggerBatchDo { + return d.withDO(d.DO.LeftJoin(table, on...)) +} + +func (d diggerBatchDo) RightJoin(table schema.Tabler, on ...field.Expr) IDiggerBatchDo { + return d.withDO(d.DO.RightJoin(table, on...)) +} + +func (d diggerBatchDo) Group(cols ...field.Expr) IDiggerBatchDo { + return d.withDO(d.DO.Group(cols...)) +} + +func (d diggerBatchDo) Having(conds ...gen.Condition) IDiggerBatchDo { + return d.withDO(d.DO.Having(conds...)) +} + +func (d diggerBatchDo) Limit(limit int) IDiggerBatchDo { + return d.withDO(d.DO.Limit(limit)) +} + +func (d diggerBatchDo) Offset(offset int) IDiggerBatchDo { + return d.withDO(d.DO.Offset(offset)) +} + +func (d diggerBatchDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IDiggerBatchDo { + return d.withDO(d.DO.Scopes(funcs...)) +} + +func (d diggerBatchDo) Unscoped() IDiggerBatchDo { + return d.withDO(d.DO.Unscoped()) +} + +func (d diggerBatchDo) Create(values ...*model.DiggerBatch) error { + if len(values) == 0 { + return nil + } + return d.DO.Create(values) +} + +func (d diggerBatchDo) CreateInBatches(values []*model.DiggerBatch, batchSize int) error { + return d.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (d diggerBatchDo) Save(values ...*model.DiggerBatch) error { + if len(values) == 0 { + return nil + } + return d.DO.Save(values) +} + +func (d diggerBatchDo) First() (*model.DiggerBatch, error) { + if result, err := d.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.DiggerBatch), nil + } +} + +func (d diggerBatchDo) Take() (*model.DiggerBatch, error) { + if result, err := d.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.DiggerBatch), nil + } +} + +func (d diggerBatchDo) Last() (*model.DiggerBatch, error) { + if result, err := d.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.DiggerBatch), nil + } +} + +func (d diggerBatchDo) Find() ([]*model.DiggerBatch, error) { + result, err := d.DO.Find() + return result.([]*model.DiggerBatch), err +} + +func (d diggerBatchDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.DiggerBatch, err error) { + buf := make([]*model.DiggerBatch, 0, batchSize) + err = d.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (d diggerBatchDo) FindInBatches(result *[]*model.DiggerBatch, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return d.DO.FindInBatches(result, batchSize, fc) +} + +func (d diggerBatchDo) Attrs(attrs ...field.AssignExpr) IDiggerBatchDo { + return d.withDO(d.DO.Attrs(attrs...)) +} + +func (d diggerBatchDo) Assign(attrs ...field.AssignExpr) IDiggerBatchDo { + return d.withDO(d.DO.Assign(attrs...)) +} + +func (d diggerBatchDo) Joins(fields ...field.RelationField) IDiggerBatchDo { + for _, _f := range fields { + d = *d.withDO(d.DO.Joins(_f)) + } + return &d +} + +func (d diggerBatchDo) Preload(fields ...field.RelationField) IDiggerBatchDo { + for _, _f := range fields { + d = *d.withDO(d.DO.Preload(_f)) + } + return &d +} + +func (d diggerBatchDo) FirstOrInit() (*model.DiggerBatch, error) { + if result, err := d.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.DiggerBatch), nil + } +} + +func (d diggerBatchDo) FirstOrCreate() (*model.DiggerBatch, error) { + if result, err := d.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.DiggerBatch), nil + } +} + +func (d diggerBatchDo) FindByPage(offset int, limit int) (result []*model.DiggerBatch, count int64, err error) { + result, err = d.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = d.Offset(-1).Limit(-1).Count() + return +} + +func (d diggerBatchDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = d.Count() + if err != nil { + return + } + + err = d.Offset(offset).Limit(limit).Scan(result) + return +} + +func (d diggerBatchDo) Scan(result interface{}) (err error) { + return d.DO.Scan(result) +} + +func (d diggerBatchDo) Delete(models ...*model.DiggerBatch) (result gen.ResultInfo, err error) { + return d.DO.Delete(models) +} + +func (d *diggerBatchDo) withDO(do gen.Dao) *diggerBatchDo { + d.DO = *do.(*gen.DO) + return d +} diff --git a/next/models_generated/digger_job_parent_links.gen.go b/next/models_generated/digger_job_parent_links.gen.go new file mode 100644 index 000000000..43e6f907d --- /dev/null +++ b/next/models_generated/digger_job_parent_links.gen.go @@ -0,0 +1,400 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newDiggerJobParentLink(db *gorm.DB, opts ...gen.DOOption) diggerJobParentLink { + _diggerJobParentLink := diggerJobParentLink{} + + _diggerJobParentLink.diggerJobParentLinkDo.UseDB(db, opts...) + _diggerJobParentLink.diggerJobParentLinkDo.UseModel(&model.DiggerJobParentLink{}) + + tableName := _diggerJobParentLink.diggerJobParentLinkDo.TableName() + _diggerJobParentLink.ALL = field.NewAsterisk(tableName) + _diggerJobParentLink.ID = field.NewInt64(tableName, "id") + _diggerJobParentLink.CreatedAt = field.NewTime(tableName, "created_at") + _diggerJobParentLink.UpdatedAt = field.NewTime(tableName, "updated_at") + _diggerJobParentLink.DeletedAt = field.NewField(tableName, "deleted_at") + _diggerJobParentLink.DiggerJobID = field.NewString(tableName, "digger_job_id") + _diggerJobParentLink.ParentDiggerJobID = field.NewString(tableName, "parent_digger_job_id") + + _diggerJobParentLink.fillFieldMap() + + return _diggerJobParentLink +} + +type diggerJobParentLink struct { + diggerJobParentLinkDo + + ALL field.Asterisk + ID field.Int64 + CreatedAt field.Time + UpdatedAt field.Time + DeletedAt field.Field + DiggerJobID field.String + ParentDiggerJobID field.String + + fieldMap map[string]field.Expr +} + +func (d diggerJobParentLink) Table(newTableName string) *diggerJobParentLink { + d.diggerJobParentLinkDo.UseTable(newTableName) + return d.updateTableName(newTableName) +} + +func (d diggerJobParentLink) As(alias string) *diggerJobParentLink { + d.diggerJobParentLinkDo.DO = *(d.diggerJobParentLinkDo.As(alias).(*gen.DO)) + return d.updateTableName(alias) +} + +func (d *diggerJobParentLink) updateTableName(table string) *diggerJobParentLink { + d.ALL = field.NewAsterisk(table) + d.ID = field.NewInt64(table, "id") + d.CreatedAt = field.NewTime(table, "created_at") + d.UpdatedAt = field.NewTime(table, "updated_at") + d.DeletedAt = field.NewField(table, "deleted_at") + d.DiggerJobID = field.NewString(table, "digger_job_id") + d.ParentDiggerJobID = field.NewString(table, "parent_digger_job_id") + + d.fillFieldMap() + + return d +} + +func (d *diggerJobParentLink) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := d.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (d *diggerJobParentLink) fillFieldMap() { + d.fieldMap = make(map[string]field.Expr, 6) + d.fieldMap["id"] = d.ID + d.fieldMap["created_at"] = d.CreatedAt + d.fieldMap["updated_at"] = d.UpdatedAt + d.fieldMap["deleted_at"] = d.DeletedAt + d.fieldMap["digger_job_id"] = d.DiggerJobID + d.fieldMap["parent_digger_job_id"] = d.ParentDiggerJobID +} + +func (d diggerJobParentLink) clone(db *gorm.DB) diggerJobParentLink { + d.diggerJobParentLinkDo.ReplaceConnPool(db.Statement.ConnPool) + return d +} + +func (d diggerJobParentLink) replaceDB(db *gorm.DB) diggerJobParentLink { + d.diggerJobParentLinkDo.ReplaceDB(db) + return d +} + +type diggerJobParentLinkDo struct{ gen.DO } + +type IDiggerJobParentLinkDo interface { + gen.SubQuery + Debug() IDiggerJobParentLinkDo + WithContext(ctx context.Context) IDiggerJobParentLinkDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IDiggerJobParentLinkDo + WriteDB() IDiggerJobParentLinkDo + As(alias string) gen.Dao + Session(config *gorm.Session) IDiggerJobParentLinkDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IDiggerJobParentLinkDo + Not(conds ...gen.Condition) IDiggerJobParentLinkDo + Or(conds ...gen.Condition) IDiggerJobParentLinkDo + Select(conds ...field.Expr) IDiggerJobParentLinkDo + Where(conds ...gen.Condition) IDiggerJobParentLinkDo + Order(conds ...field.Expr) IDiggerJobParentLinkDo + Distinct(cols ...field.Expr) IDiggerJobParentLinkDo + Omit(cols ...field.Expr) IDiggerJobParentLinkDo + Join(table schema.Tabler, on ...field.Expr) IDiggerJobParentLinkDo + LeftJoin(table schema.Tabler, on ...field.Expr) IDiggerJobParentLinkDo + RightJoin(table schema.Tabler, on ...field.Expr) IDiggerJobParentLinkDo + Group(cols ...field.Expr) IDiggerJobParentLinkDo + Having(conds ...gen.Condition) IDiggerJobParentLinkDo + Limit(limit int) IDiggerJobParentLinkDo + Offset(offset int) IDiggerJobParentLinkDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IDiggerJobParentLinkDo + Unscoped() IDiggerJobParentLinkDo + Create(values ...*model.DiggerJobParentLink) error + CreateInBatches(values []*model.DiggerJobParentLink, batchSize int) error + Save(values ...*model.DiggerJobParentLink) error + First() (*model.DiggerJobParentLink, error) + Take() (*model.DiggerJobParentLink, error) + Last() (*model.DiggerJobParentLink, error) + Find() ([]*model.DiggerJobParentLink, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.DiggerJobParentLink, err error) + FindInBatches(result *[]*model.DiggerJobParentLink, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.DiggerJobParentLink) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IDiggerJobParentLinkDo + Assign(attrs ...field.AssignExpr) IDiggerJobParentLinkDo + Joins(fields ...field.RelationField) IDiggerJobParentLinkDo + Preload(fields ...field.RelationField) IDiggerJobParentLinkDo + FirstOrInit() (*model.DiggerJobParentLink, error) + FirstOrCreate() (*model.DiggerJobParentLink, error) + FindByPage(offset int, limit int) (result []*model.DiggerJobParentLink, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IDiggerJobParentLinkDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (d diggerJobParentLinkDo) Debug() IDiggerJobParentLinkDo { + return d.withDO(d.DO.Debug()) +} + +func (d diggerJobParentLinkDo) WithContext(ctx context.Context) IDiggerJobParentLinkDo { + return d.withDO(d.DO.WithContext(ctx)) +} + +func (d diggerJobParentLinkDo) ReadDB() IDiggerJobParentLinkDo { + return d.Clauses(dbresolver.Read) +} + +func (d diggerJobParentLinkDo) WriteDB() IDiggerJobParentLinkDo { + return d.Clauses(dbresolver.Write) +} + +func (d diggerJobParentLinkDo) Session(config *gorm.Session) IDiggerJobParentLinkDo { + return d.withDO(d.DO.Session(config)) +} + +func (d diggerJobParentLinkDo) Clauses(conds ...clause.Expression) IDiggerJobParentLinkDo { + return d.withDO(d.DO.Clauses(conds...)) +} + +func (d diggerJobParentLinkDo) Returning(value interface{}, columns ...string) IDiggerJobParentLinkDo { + return d.withDO(d.DO.Returning(value, columns...)) +} + +func (d diggerJobParentLinkDo) Not(conds ...gen.Condition) IDiggerJobParentLinkDo { + return d.withDO(d.DO.Not(conds...)) +} + +func (d diggerJobParentLinkDo) Or(conds ...gen.Condition) IDiggerJobParentLinkDo { + return d.withDO(d.DO.Or(conds...)) +} + +func (d diggerJobParentLinkDo) Select(conds ...field.Expr) IDiggerJobParentLinkDo { + return d.withDO(d.DO.Select(conds...)) +} + +func (d diggerJobParentLinkDo) Where(conds ...gen.Condition) IDiggerJobParentLinkDo { + return d.withDO(d.DO.Where(conds...)) +} + +func (d diggerJobParentLinkDo) Order(conds ...field.Expr) IDiggerJobParentLinkDo { + return d.withDO(d.DO.Order(conds...)) +} + +func (d diggerJobParentLinkDo) Distinct(cols ...field.Expr) IDiggerJobParentLinkDo { + return d.withDO(d.DO.Distinct(cols...)) +} + +func (d diggerJobParentLinkDo) Omit(cols ...field.Expr) IDiggerJobParentLinkDo { + return d.withDO(d.DO.Omit(cols...)) +} + +func (d diggerJobParentLinkDo) Join(table schema.Tabler, on ...field.Expr) IDiggerJobParentLinkDo { + return d.withDO(d.DO.Join(table, on...)) +} + +func (d diggerJobParentLinkDo) LeftJoin(table schema.Tabler, on ...field.Expr) IDiggerJobParentLinkDo { + return d.withDO(d.DO.LeftJoin(table, on...)) +} + +func (d diggerJobParentLinkDo) RightJoin(table schema.Tabler, on ...field.Expr) IDiggerJobParentLinkDo { + return d.withDO(d.DO.RightJoin(table, on...)) +} + +func (d diggerJobParentLinkDo) Group(cols ...field.Expr) IDiggerJobParentLinkDo { + return d.withDO(d.DO.Group(cols...)) +} + +func (d diggerJobParentLinkDo) Having(conds ...gen.Condition) IDiggerJobParentLinkDo { + return d.withDO(d.DO.Having(conds...)) +} + +func (d diggerJobParentLinkDo) Limit(limit int) IDiggerJobParentLinkDo { + return d.withDO(d.DO.Limit(limit)) +} + +func (d diggerJobParentLinkDo) Offset(offset int) IDiggerJobParentLinkDo { + return d.withDO(d.DO.Offset(offset)) +} + +func (d diggerJobParentLinkDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IDiggerJobParentLinkDo { + return d.withDO(d.DO.Scopes(funcs...)) +} + +func (d diggerJobParentLinkDo) Unscoped() IDiggerJobParentLinkDo { + return d.withDO(d.DO.Unscoped()) +} + +func (d diggerJobParentLinkDo) Create(values ...*model.DiggerJobParentLink) error { + if len(values) == 0 { + return nil + } + return d.DO.Create(values) +} + +func (d diggerJobParentLinkDo) CreateInBatches(values []*model.DiggerJobParentLink, batchSize int) error { + return d.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (d diggerJobParentLinkDo) Save(values ...*model.DiggerJobParentLink) error { + if len(values) == 0 { + return nil + } + return d.DO.Save(values) +} + +func (d diggerJobParentLinkDo) First() (*model.DiggerJobParentLink, error) { + if result, err := d.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.DiggerJobParentLink), nil + } +} + +func (d diggerJobParentLinkDo) Take() (*model.DiggerJobParentLink, error) { + if result, err := d.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.DiggerJobParentLink), nil + } +} + +func (d diggerJobParentLinkDo) Last() (*model.DiggerJobParentLink, error) { + if result, err := d.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.DiggerJobParentLink), nil + } +} + +func (d diggerJobParentLinkDo) Find() ([]*model.DiggerJobParentLink, error) { + result, err := d.DO.Find() + return result.([]*model.DiggerJobParentLink), err +} + +func (d diggerJobParentLinkDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.DiggerJobParentLink, err error) { + buf := make([]*model.DiggerJobParentLink, 0, batchSize) + err = d.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (d diggerJobParentLinkDo) FindInBatches(result *[]*model.DiggerJobParentLink, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return d.DO.FindInBatches(result, batchSize, fc) +} + +func (d diggerJobParentLinkDo) Attrs(attrs ...field.AssignExpr) IDiggerJobParentLinkDo { + return d.withDO(d.DO.Attrs(attrs...)) +} + +func (d diggerJobParentLinkDo) Assign(attrs ...field.AssignExpr) IDiggerJobParentLinkDo { + return d.withDO(d.DO.Assign(attrs...)) +} + +func (d diggerJobParentLinkDo) Joins(fields ...field.RelationField) IDiggerJobParentLinkDo { + for _, _f := range fields { + d = *d.withDO(d.DO.Joins(_f)) + } + return &d +} + +func (d diggerJobParentLinkDo) Preload(fields ...field.RelationField) IDiggerJobParentLinkDo { + for _, _f := range fields { + d = *d.withDO(d.DO.Preload(_f)) + } + return &d +} + +func (d diggerJobParentLinkDo) FirstOrInit() (*model.DiggerJobParentLink, error) { + if result, err := d.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.DiggerJobParentLink), nil + } +} + +func (d diggerJobParentLinkDo) FirstOrCreate() (*model.DiggerJobParentLink, error) { + if result, err := d.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.DiggerJobParentLink), nil + } +} + +func (d diggerJobParentLinkDo) FindByPage(offset int, limit int) (result []*model.DiggerJobParentLink, count int64, err error) { + result, err = d.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = d.Offset(-1).Limit(-1).Count() + return +} + +func (d diggerJobParentLinkDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = d.Count() + if err != nil { + return + } + + err = d.Offset(offset).Limit(limit).Scan(result) + return +} + +func (d diggerJobParentLinkDo) Scan(result interface{}) (err error) { + return d.DO.Scan(result) +} + +func (d diggerJobParentLinkDo) Delete(models ...*model.DiggerJobParentLink) (result gen.ResultInfo, err error) { + return d.DO.Delete(models) +} + +func (d *diggerJobParentLinkDo) withDO(do gen.Dao) *diggerJobParentLinkDo { + d.DO = *do.(*gen.DO) + return d +} diff --git a/next/models_generated/digger_job_summaries.gen.go b/next/models_generated/digger_job_summaries.gen.go new file mode 100644 index 000000000..e51b84595 --- /dev/null +++ b/next/models_generated/digger_job_summaries.gen.go @@ -0,0 +1,404 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newDiggerJobSummary(db *gorm.DB, opts ...gen.DOOption) diggerJobSummary { + _diggerJobSummary := diggerJobSummary{} + + _diggerJobSummary.diggerJobSummaryDo.UseDB(db, opts...) + _diggerJobSummary.diggerJobSummaryDo.UseModel(&model.DiggerJobSummary{}) + + tableName := _diggerJobSummary.diggerJobSummaryDo.TableName() + _diggerJobSummary.ALL = field.NewAsterisk(tableName) + _diggerJobSummary.ID = field.NewString(tableName, "id") + _diggerJobSummary.CreatedAt = field.NewTime(tableName, "created_at") + _diggerJobSummary.UpdatedAt = field.NewTime(tableName, "updated_at") + _diggerJobSummary.DeletedAt = field.NewField(tableName, "deleted_at") + _diggerJobSummary.ResourcesCreated = field.NewInt64(tableName, "resources_created") + _diggerJobSummary.ResourcesDeleted = field.NewInt64(tableName, "resources_deleted") + _diggerJobSummary.ResourcesUpdated = field.NewInt64(tableName, "resources_updated") + + _diggerJobSummary.fillFieldMap() + + return _diggerJobSummary +} + +type diggerJobSummary struct { + diggerJobSummaryDo + + ALL field.Asterisk + ID field.String + CreatedAt field.Time + UpdatedAt field.Time + DeletedAt field.Field + ResourcesCreated field.Int64 + ResourcesDeleted field.Int64 + ResourcesUpdated field.Int64 + + fieldMap map[string]field.Expr +} + +func (d diggerJobSummary) Table(newTableName string) *diggerJobSummary { + d.diggerJobSummaryDo.UseTable(newTableName) + return d.updateTableName(newTableName) +} + +func (d diggerJobSummary) As(alias string) *diggerJobSummary { + d.diggerJobSummaryDo.DO = *(d.diggerJobSummaryDo.As(alias).(*gen.DO)) + return d.updateTableName(alias) +} + +func (d *diggerJobSummary) updateTableName(table string) *diggerJobSummary { + d.ALL = field.NewAsterisk(table) + d.ID = field.NewString(table, "id") + d.CreatedAt = field.NewTime(table, "created_at") + d.UpdatedAt = field.NewTime(table, "updated_at") + d.DeletedAt = field.NewField(table, "deleted_at") + d.ResourcesCreated = field.NewInt64(table, "resources_created") + d.ResourcesDeleted = field.NewInt64(table, "resources_deleted") + d.ResourcesUpdated = field.NewInt64(table, "resources_updated") + + d.fillFieldMap() + + return d +} + +func (d *diggerJobSummary) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := d.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (d *diggerJobSummary) fillFieldMap() { + d.fieldMap = make(map[string]field.Expr, 7) + d.fieldMap["id"] = d.ID + d.fieldMap["created_at"] = d.CreatedAt + d.fieldMap["updated_at"] = d.UpdatedAt + d.fieldMap["deleted_at"] = d.DeletedAt + d.fieldMap["resources_created"] = d.ResourcesCreated + d.fieldMap["resources_deleted"] = d.ResourcesDeleted + d.fieldMap["resources_updated"] = d.ResourcesUpdated +} + +func (d diggerJobSummary) clone(db *gorm.DB) diggerJobSummary { + d.diggerJobSummaryDo.ReplaceConnPool(db.Statement.ConnPool) + return d +} + +func (d diggerJobSummary) replaceDB(db *gorm.DB) diggerJobSummary { + d.diggerJobSummaryDo.ReplaceDB(db) + return d +} + +type diggerJobSummaryDo struct{ gen.DO } + +type IDiggerJobSummaryDo interface { + gen.SubQuery + Debug() IDiggerJobSummaryDo + WithContext(ctx context.Context) IDiggerJobSummaryDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IDiggerJobSummaryDo + WriteDB() IDiggerJobSummaryDo + As(alias string) gen.Dao + Session(config *gorm.Session) IDiggerJobSummaryDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IDiggerJobSummaryDo + Not(conds ...gen.Condition) IDiggerJobSummaryDo + Or(conds ...gen.Condition) IDiggerJobSummaryDo + Select(conds ...field.Expr) IDiggerJobSummaryDo + Where(conds ...gen.Condition) IDiggerJobSummaryDo + Order(conds ...field.Expr) IDiggerJobSummaryDo + Distinct(cols ...field.Expr) IDiggerJobSummaryDo + Omit(cols ...field.Expr) IDiggerJobSummaryDo + Join(table schema.Tabler, on ...field.Expr) IDiggerJobSummaryDo + LeftJoin(table schema.Tabler, on ...field.Expr) IDiggerJobSummaryDo + RightJoin(table schema.Tabler, on ...field.Expr) IDiggerJobSummaryDo + Group(cols ...field.Expr) IDiggerJobSummaryDo + Having(conds ...gen.Condition) IDiggerJobSummaryDo + Limit(limit int) IDiggerJobSummaryDo + Offset(offset int) IDiggerJobSummaryDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IDiggerJobSummaryDo + Unscoped() IDiggerJobSummaryDo + Create(values ...*model.DiggerJobSummary) error + CreateInBatches(values []*model.DiggerJobSummary, batchSize int) error + Save(values ...*model.DiggerJobSummary) error + First() (*model.DiggerJobSummary, error) + Take() (*model.DiggerJobSummary, error) + Last() (*model.DiggerJobSummary, error) + Find() ([]*model.DiggerJobSummary, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.DiggerJobSummary, err error) + FindInBatches(result *[]*model.DiggerJobSummary, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.DiggerJobSummary) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IDiggerJobSummaryDo + Assign(attrs ...field.AssignExpr) IDiggerJobSummaryDo + Joins(fields ...field.RelationField) IDiggerJobSummaryDo + Preload(fields ...field.RelationField) IDiggerJobSummaryDo + FirstOrInit() (*model.DiggerJobSummary, error) + FirstOrCreate() (*model.DiggerJobSummary, error) + FindByPage(offset int, limit int) (result []*model.DiggerJobSummary, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IDiggerJobSummaryDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (d diggerJobSummaryDo) Debug() IDiggerJobSummaryDo { + return d.withDO(d.DO.Debug()) +} + +func (d diggerJobSummaryDo) WithContext(ctx context.Context) IDiggerJobSummaryDo { + return d.withDO(d.DO.WithContext(ctx)) +} + +func (d diggerJobSummaryDo) ReadDB() IDiggerJobSummaryDo { + return d.Clauses(dbresolver.Read) +} + +func (d diggerJobSummaryDo) WriteDB() IDiggerJobSummaryDo { + return d.Clauses(dbresolver.Write) +} + +func (d diggerJobSummaryDo) Session(config *gorm.Session) IDiggerJobSummaryDo { + return d.withDO(d.DO.Session(config)) +} + +func (d diggerJobSummaryDo) Clauses(conds ...clause.Expression) IDiggerJobSummaryDo { + return d.withDO(d.DO.Clauses(conds...)) +} + +func (d diggerJobSummaryDo) Returning(value interface{}, columns ...string) IDiggerJobSummaryDo { + return d.withDO(d.DO.Returning(value, columns...)) +} + +func (d diggerJobSummaryDo) Not(conds ...gen.Condition) IDiggerJobSummaryDo { + return d.withDO(d.DO.Not(conds...)) +} + +func (d diggerJobSummaryDo) Or(conds ...gen.Condition) IDiggerJobSummaryDo { + return d.withDO(d.DO.Or(conds...)) +} + +func (d diggerJobSummaryDo) Select(conds ...field.Expr) IDiggerJobSummaryDo { + return d.withDO(d.DO.Select(conds...)) +} + +func (d diggerJobSummaryDo) Where(conds ...gen.Condition) IDiggerJobSummaryDo { + return d.withDO(d.DO.Where(conds...)) +} + +func (d diggerJobSummaryDo) Order(conds ...field.Expr) IDiggerJobSummaryDo { + return d.withDO(d.DO.Order(conds...)) +} + +func (d diggerJobSummaryDo) Distinct(cols ...field.Expr) IDiggerJobSummaryDo { + return d.withDO(d.DO.Distinct(cols...)) +} + +func (d diggerJobSummaryDo) Omit(cols ...field.Expr) IDiggerJobSummaryDo { + return d.withDO(d.DO.Omit(cols...)) +} + +func (d diggerJobSummaryDo) Join(table schema.Tabler, on ...field.Expr) IDiggerJobSummaryDo { + return d.withDO(d.DO.Join(table, on...)) +} + +func (d diggerJobSummaryDo) LeftJoin(table schema.Tabler, on ...field.Expr) IDiggerJobSummaryDo { + return d.withDO(d.DO.LeftJoin(table, on...)) +} + +func (d diggerJobSummaryDo) RightJoin(table schema.Tabler, on ...field.Expr) IDiggerJobSummaryDo { + return d.withDO(d.DO.RightJoin(table, on...)) +} + +func (d diggerJobSummaryDo) Group(cols ...field.Expr) IDiggerJobSummaryDo { + return d.withDO(d.DO.Group(cols...)) +} + +func (d diggerJobSummaryDo) Having(conds ...gen.Condition) IDiggerJobSummaryDo { + return d.withDO(d.DO.Having(conds...)) +} + +func (d diggerJobSummaryDo) Limit(limit int) IDiggerJobSummaryDo { + return d.withDO(d.DO.Limit(limit)) +} + +func (d diggerJobSummaryDo) Offset(offset int) IDiggerJobSummaryDo { + return d.withDO(d.DO.Offset(offset)) +} + +func (d diggerJobSummaryDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IDiggerJobSummaryDo { + return d.withDO(d.DO.Scopes(funcs...)) +} + +func (d diggerJobSummaryDo) Unscoped() IDiggerJobSummaryDo { + return d.withDO(d.DO.Unscoped()) +} + +func (d diggerJobSummaryDo) Create(values ...*model.DiggerJobSummary) error { + if len(values) == 0 { + return nil + } + return d.DO.Create(values) +} + +func (d diggerJobSummaryDo) CreateInBatches(values []*model.DiggerJobSummary, batchSize int) error { + return d.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (d diggerJobSummaryDo) Save(values ...*model.DiggerJobSummary) error { + if len(values) == 0 { + return nil + } + return d.DO.Save(values) +} + +func (d diggerJobSummaryDo) First() (*model.DiggerJobSummary, error) { + if result, err := d.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.DiggerJobSummary), nil + } +} + +func (d diggerJobSummaryDo) Take() (*model.DiggerJobSummary, error) { + if result, err := d.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.DiggerJobSummary), nil + } +} + +func (d diggerJobSummaryDo) Last() (*model.DiggerJobSummary, error) { + if result, err := d.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.DiggerJobSummary), nil + } +} + +func (d diggerJobSummaryDo) Find() ([]*model.DiggerJobSummary, error) { + result, err := d.DO.Find() + return result.([]*model.DiggerJobSummary), err +} + +func (d diggerJobSummaryDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.DiggerJobSummary, err error) { + buf := make([]*model.DiggerJobSummary, 0, batchSize) + err = d.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (d diggerJobSummaryDo) FindInBatches(result *[]*model.DiggerJobSummary, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return d.DO.FindInBatches(result, batchSize, fc) +} + +func (d diggerJobSummaryDo) Attrs(attrs ...field.AssignExpr) IDiggerJobSummaryDo { + return d.withDO(d.DO.Attrs(attrs...)) +} + +func (d diggerJobSummaryDo) Assign(attrs ...field.AssignExpr) IDiggerJobSummaryDo { + return d.withDO(d.DO.Assign(attrs...)) +} + +func (d diggerJobSummaryDo) Joins(fields ...field.RelationField) IDiggerJobSummaryDo { + for _, _f := range fields { + d = *d.withDO(d.DO.Joins(_f)) + } + return &d +} + +func (d diggerJobSummaryDo) Preload(fields ...field.RelationField) IDiggerJobSummaryDo { + for _, _f := range fields { + d = *d.withDO(d.DO.Preload(_f)) + } + return &d +} + +func (d diggerJobSummaryDo) FirstOrInit() (*model.DiggerJobSummary, error) { + if result, err := d.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.DiggerJobSummary), nil + } +} + +func (d diggerJobSummaryDo) FirstOrCreate() (*model.DiggerJobSummary, error) { + if result, err := d.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.DiggerJobSummary), nil + } +} + +func (d diggerJobSummaryDo) FindByPage(offset int, limit int) (result []*model.DiggerJobSummary, count int64, err error) { + result, err = d.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = d.Offset(-1).Limit(-1).Count() + return +} + +func (d diggerJobSummaryDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = d.Count() + if err != nil { + return + } + + err = d.Offset(offset).Limit(limit).Scan(result) + return +} + +func (d diggerJobSummaryDo) Scan(result interface{}) (err error) { + return d.DO.Scan(result) +} + +func (d diggerJobSummaryDo) Delete(models ...*model.DiggerJobSummary) (result gen.ResultInfo, err error) { + return d.DO.Delete(models) +} + +func (d *diggerJobSummaryDo) withDO(do gen.Dao) *diggerJobSummaryDo { + d.DO = *do.(*gen.DO) + return d +} diff --git a/next/models_generated/digger_job_tokens.gen.go b/next/models_generated/digger_job_tokens.gen.go new file mode 100644 index 000000000..81484781a --- /dev/null +++ b/next/models_generated/digger_job_tokens.gen.go @@ -0,0 +1,408 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newDiggerJobToken(db *gorm.DB, opts ...gen.DOOption) diggerJobToken { + _diggerJobToken := diggerJobToken{} + + _diggerJobToken.diggerJobTokenDo.UseDB(db, opts...) + _diggerJobToken.diggerJobTokenDo.UseModel(&model.DiggerJobToken{}) + + tableName := _diggerJobToken.diggerJobTokenDo.TableName() + _diggerJobToken.ALL = field.NewAsterisk(tableName) + _diggerJobToken.ID = field.NewInt64(tableName, "id") + _diggerJobToken.CreatedAt = field.NewTime(tableName, "created_at") + _diggerJobToken.UpdatedAt = field.NewTime(tableName, "updated_at") + _diggerJobToken.DeletedAt = field.NewField(tableName, "deleted_at") + _diggerJobToken.Value = field.NewString(tableName, "value") + _diggerJobToken.Expiry = field.NewTime(tableName, "expiry") + _diggerJobToken.OrganisationID = field.NewInt64(tableName, "organisation_id") + _diggerJobToken.Type = field.NewString(tableName, "type") + + _diggerJobToken.fillFieldMap() + + return _diggerJobToken +} + +type diggerJobToken struct { + diggerJobTokenDo + + ALL field.Asterisk + ID field.Int64 + CreatedAt field.Time + UpdatedAt field.Time + DeletedAt field.Field + Value field.String + Expiry field.Time + OrganisationID field.Int64 + Type field.String + + fieldMap map[string]field.Expr +} + +func (d diggerJobToken) Table(newTableName string) *diggerJobToken { + d.diggerJobTokenDo.UseTable(newTableName) + return d.updateTableName(newTableName) +} + +func (d diggerJobToken) As(alias string) *diggerJobToken { + d.diggerJobTokenDo.DO = *(d.diggerJobTokenDo.As(alias).(*gen.DO)) + return d.updateTableName(alias) +} + +func (d *diggerJobToken) updateTableName(table string) *diggerJobToken { + d.ALL = field.NewAsterisk(table) + d.ID = field.NewInt64(table, "id") + d.CreatedAt = field.NewTime(table, "created_at") + d.UpdatedAt = field.NewTime(table, "updated_at") + d.DeletedAt = field.NewField(table, "deleted_at") + d.Value = field.NewString(table, "value") + d.Expiry = field.NewTime(table, "expiry") + d.OrganisationID = field.NewInt64(table, "organisation_id") + d.Type = field.NewString(table, "type") + + d.fillFieldMap() + + return d +} + +func (d *diggerJobToken) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := d.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (d *diggerJobToken) fillFieldMap() { + d.fieldMap = make(map[string]field.Expr, 8) + d.fieldMap["id"] = d.ID + d.fieldMap["created_at"] = d.CreatedAt + d.fieldMap["updated_at"] = d.UpdatedAt + d.fieldMap["deleted_at"] = d.DeletedAt + d.fieldMap["value"] = d.Value + d.fieldMap["expiry"] = d.Expiry + d.fieldMap["organisation_id"] = d.OrganisationID + d.fieldMap["type"] = d.Type +} + +func (d diggerJobToken) clone(db *gorm.DB) diggerJobToken { + d.diggerJobTokenDo.ReplaceConnPool(db.Statement.ConnPool) + return d +} + +func (d diggerJobToken) replaceDB(db *gorm.DB) diggerJobToken { + d.diggerJobTokenDo.ReplaceDB(db) + return d +} + +type diggerJobTokenDo struct{ gen.DO } + +type IDiggerJobTokenDo interface { + gen.SubQuery + Debug() IDiggerJobTokenDo + WithContext(ctx context.Context) IDiggerJobTokenDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IDiggerJobTokenDo + WriteDB() IDiggerJobTokenDo + As(alias string) gen.Dao + Session(config *gorm.Session) IDiggerJobTokenDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IDiggerJobTokenDo + Not(conds ...gen.Condition) IDiggerJobTokenDo + Or(conds ...gen.Condition) IDiggerJobTokenDo + Select(conds ...field.Expr) IDiggerJobTokenDo + Where(conds ...gen.Condition) IDiggerJobTokenDo + Order(conds ...field.Expr) IDiggerJobTokenDo + Distinct(cols ...field.Expr) IDiggerJobTokenDo + Omit(cols ...field.Expr) IDiggerJobTokenDo + Join(table schema.Tabler, on ...field.Expr) IDiggerJobTokenDo + LeftJoin(table schema.Tabler, on ...field.Expr) IDiggerJobTokenDo + RightJoin(table schema.Tabler, on ...field.Expr) IDiggerJobTokenDo + Group(cols ...field.Expr) IDiggerJobTokenDo + Having(conds ...gen.Condition) IDiggerJobTokenDo + Limit(limit int) IDiggerJobTokenDo + Offset(offset int) IDiggerJobTokenDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IDiggerJobTokenDo + Unscoped() IDiggerJobTokenDo + Create(values ...*model.DiggerJobToken) error + CreateInBatches(values []*model.DiggerJobToken, batchSize int) error + Save(values ...*model.DiggerJobToken) error + First() (*model.DiggerJobToken, error) + Take() (*model.DiggerJobToken, error) + Last() (*model.DiggerJobToken, error) + Find() ([]*model.DiggerJobToken, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.DiggerJobToken, err error) + FindInBatches(result *[]*model.DiggerJobToken, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.DiggerJobToken) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IDiggerJobTokenDo + Assign(attrs ...field.AssignExpr) IDiggerJobTokenDo + Joins(fields ...field.RelationField) IDiggerJobTokenDo + Preload(fields ...field.RelationField) IDiggerJobTokenDo + FirstOrInit() (*model.DiggerJobToken, error) + FirstOrCreate() (*model.DiggerJobToken, error) + FindByPage(offset int, limit int) (result []*model.DiggerJobToken, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IDiggerJobTokenDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (d diggerJobTokenDo) Debug() IDiggerJobTokenDo { + return d.withDO(d.DO.Debug()) +} + +func (d diggerJobTokenDo) WithContext(ctx context.Context) IDiggerJobTokenDo { + return d.withDO(d.DO.WithContext(ctx)) +} + +func (d diggerJobTokenDo) ReadDB() IDiggerJobTokenDo { + return d.Clauses(dbresolver.Read) +} + +func (d diggerJobTokenDo) WriteDB() IDiggerJobTokenDo { + return d.Clauses(dbresolver.Write) +} + +func (d diggerJobTokenDo) Session(config *gorm.Session) IDiggerJobTokenDo { + return d.withDO(d.DO.Session(config)) +} + +func (d diggerJobTokenDo) Clauses(conds ...clause.Expression) IDiggerJobTokenDo { + return d.withDO(d.DO.Clauses(conds...)) +} + +func (d diggerJobTokenDo) Returning(value interface{}, columns ...string) IDiggerJobTokenDo { + return d.withDO(d.DO.Returning(value, columns...)) +} + +func (d diggerJobTokenDo) Not(conds ...gen.Condition) IDiggerJobTokenDo { + return d.withDO(d.DO.Not(conds...)) +} + +func (d diggerJobTokenDo) Or(conds ...gen.Condition) IDiggerJobTokenDo { + return d.withDO(d.DO.Or(conds...)) +} + +func (d diggerJobTokenDo) Select(conds ...field.Expr) IDiggerJobTokenDo { + return d.withDO(d.DO.Select(conds...)) +} + +func (d diggerJobTokenDo) Where(conds ...gen.Condition) IDiggerJobTokenDo { + return d.withDO(d.DO.Where(conds...)) +} + +func (d diggerJobTokenDo) Order(conds ...field.Expr) IDiggerJobTokenDo { + return d.withDO(d.DO.Order(conds...)) +} + +func (d diggerJobTokenDo) Distinct(cols ...field.Expr) IDiggerJobTokenDo { + return d.withDO(d.DO.Distinct(cols...)) +} + +func (d diggerJobTokenDo) Omit(cols ...field.Expr) IDiggerJobTokenDo { + return d.withDO(d.DO.Omit(cols...)) +} + +func (d diggerJobTokenDo) Join(table schema.Tabler, on ...field.Expr) IDiggerJobTokenDo { + return d.withDO(d.DO.Join(table, on...)) +} + +func (d diggerJobTokenDo) LeftJoin(table schema.Tabler, on ...field.Expr) IDiggerJobTokenDo { + return d.withDO(d.DO.LeftJoin(table, on...)) +} + +func (d diggerJobTokenDo) RightJoin(table schema.Tabler, on ...field.Expr) IDiggerJobTokenDo { + return d.withDO(d.DO.RightJoin(table, on...)) +} + +func (d diggerJobTokenDo) Group(cols ...field.Expr) IDiggerJobTokenDo { + return d.withDO(d.DO.Group(cols...)) +} + +func (d diggerJobTokenDo) Having(conds ...gen.Condition) IDiggerJobTokenDo { + return d.withDO(d.DO.Having(conds...)) +} + +func (d diggerJobTokenDo) Limit(limit int) IDiggerJobTokenDo { + return d.withDO(d.DO.Limit(limit)) +} + +func (d diggerJobTokenDo) Offset(offset int) IDiggerJobTokenDo { + return d.withDO(d.DO.Offset(offset)) +} + +func (d diggerJobTokenDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IDiggerJobTokenDo { + return d.withDO(d.DO.Scopes(funcs...)) +} + +func (d diggerJobTokenDo) Unscoped() IDiggerJobTokenDo { + return d.withDO(d.DO.Unscoped()) +} + +func (d diggerJobTokenDo) Create(values ...*model.DiggerJobToken) error { + if len(values) == 0 { + return nil + } + return d.DO.Create(values) +} + +func (d diggerJobTokenDo) CreateInBatches(values []*model.DiggerJobToken, batchSize int) error { + return d.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (d diggerJobTokenDo) Save(values ...*model.DiggerJobToken) error { + if len(values) == 0 { + return nil + } + return d.DO.Save(values) +} + +func (d diggerJobTokenDo) First() (*model.DiggerJobToken, error) { + if result, err := d.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.DiggerJobToken), nil + } +} + +func (d diggerJobTokenDo) Take() (*model.DiggerJobToken, error) { + if result, err := d.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.DiggerJobToken), nil + } +} + +func (d diggerJobTokenDo) Last() (*model.DiggerJobToken, error) { + if result, err := d.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.DiggerJobToken), nil + } +} + +func (d diggerJobTokenDo) Find() ([]*model.DiggerJobToken, error) { + result, err := d.DO.Find() + return result.([]*model.DiggerJobToken), err +} + +func (d diggerJobTokenDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.DiggerJobToken, err error) { + buf := make([]*model.DiggerJobToken, 0, batchSize) + err = d.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (d diggerJobTokenDo) FindInBatches(result *[]*model.DiggerJobToken, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return d.DO.FindInBatches(result, batchSize, fc) +} + +func (d diggerJobTokenDo) Attrs(attrs ...field.AssignExpr) IDiggerJobTokenDo { + return d.withDO(d.DO.Attrs(attrs...)) +} + +func (d diggerJobTokenDo) Assign(attrs ...field.AssignExpr) IDiggerJobTokenDo { + return d.withDO(d.DO.Assign(attrs...)) +} + +func (d diggerJobTokenDo) Joins(fields ...field.RelationField) IDiggerJobTokenDo { + for _, _f := range fields { + d = *d.withDO(d.DO.Joins(_f)) + } + return &d +} + +func (d diggerJobTokenDo) Preload(fields ...field.RelationField) IDiggerJobTokenDo { + for _, _f := range fields { + d = *d.withDO(d.DO.Preload(_f)) + } + return &d +} + +func (d diggerJobTokenDo) FirstOrInit() (*model.DiggerJobToken, error) { + if result, err := d.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.DiggerJobToken), nil + } +} + +func (d diggerJobTokenDo) FirstOrCreate() (*model.DiggerJobToken, error) { + if result, err := d.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.DiggerJobToken), nil + } +} + +func (d diggerJobTokenDo) FindByPage(offset int, limit int) (result []*model.DiggerJobToken, count int64, err error) { + result, err = d.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = d.Offset(-1).Limit(-1).Count() + return +} + +func (d diggerJobTokenDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = d.Count() + if err != nil { + return + } + + err = d.Offset(offset).Limit(limit).Scan(result) + return +} + +func (d diggerJobTokenDo) Scan(result interface{}) (err error) { + return d.DO.Scan(result) +} + +func (d diggerJobTokenDo) Delete(models ...*model.DiggerJobToken) (result gen.ResultInfo, err error) { + return d.DO.Delete(models) +} + +func (d *diggerJobTokenDo) withDO(do gen.Dao) *diggerJobTokenDo { + d.DO = *do.(*gen.DO) + return d +} diff --git a/next/models_generated/digger_jobs.gen.go b/next/models_generated/digger_jobs.gen.go new file mode 100644 index 000000000..3b4f203f5 --- /dev/null +++ b/next/models_generated/digger_jobs.gen.go @@ -0,0 +1,436 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newDiggerJob(db *gorm.DB, opts ...gen.DOOption) diggerJob { + _diggerJob := diggerJob{} + + _diggerJob.diggerJobDo.UseDB(db, opts...) + _diggerJob.diggerJobDo.UseModel(&model.DiggerJob{}) + + tableName := _diggerJob.diggerJobDo.TableName() + _diggerJob.ALL = field.NewAsterisk(tableName) + _diggerJob.ID = field.NewString(tableName, "id") + _diggerJob.CreatedAt = field.NewTime(tableName, "created_at") + _diggerJob.UpdatedAt = field.NewTime(tableName, "updated_at") + _diggerJob.DeletedAt = field.NewField(tableName, "deleted_at") + _diggerJob.DiggerJobID = field.NewString(tableName, "digger_job_id") + _diggerJob.Status = field.NewInt16(tableName, "status") + _diggerJob.BatchID = field.NewString(tableName, "batch_id") + _diggerJob.StatusUpdatedAt = field.NewTime(tableName, "status_updated_at") + _diggerJob.DiggerJobSummaryID = field.NewString(tableName, "digger_job_summary_id") + _diggerJob.WorkflowFile = field.NewString(tableName, "workflow_file") + _diggerJob.WorkflowRunURL = field.NewString(tableName, "workflow_run_url") + _diggerJob.PlanFootprint = field.NewField(tableName, "plan_footprint") + _diggerJob.PrCommentURL = field.NewString(tableName, "pr_comment_url") + _diggerJob.TerraformOutput = field.NewString(tableName, "terraform_output") + _diggerJob.JobSpec = field.NewField(tableName, "job_spec") + + _diggerJob.fillFieldMap() + + return _diggerJob +} + +type diggerJob struct { + diggerJobDo + + ALL field.Asterisk + ID field.String + CreatedAt field.Time + UpdatedAt field.Time + DeletedAt field.Field + DiggerJobID field.String + Status field.Int16 + BatchID field.String + StatusUpdatedAt field.Time + DiggerJobSummaryID field.String + WorkflowFile field.String + WorkflowRunURL field.String + PlanFootprint field.Field + PrCommentURL field.String + TerraformOutput field.String + JobSpec field.Field + + fieldMap map[string]field.Expr +} + +func (d diggerJob) Table(newTableName string) *diggerJob { + d.diggerJobDo.UseTable(newTableName) + return d.updateTableName(newTableName) +} + +func (d diggerJob) As(alias string) *diggerJob { + d.diggerJobDo.DO = *(d.diggerJobDo.As(alias).(*gen.DO)) + return d.updateTableName(alias) +} + +func (d *diggerJob) updateTableName(table string) *diggerJob { + d.ALL = field.NewAsterisk(table) + d.ID = field.NewString(table, "id") + d.CreatedAt = field.NewTime(table, "created_at") + d.UpdatedAt = field.NewTime(table, "updated_at") + d.DeletedAt = field.NewField(table, "deleted_at") + d.DiggerJobID = field.NewString(table, "digger_job_id") + d.Status = field.NewInt16(table, "status") + d.BatchID = field.NewString(table, "batch_id") + d.StatusUpdatedAt = field.NewTime(table, "status_updated_at") + d.DiggerJobSummaryID = field.NewString(table, "digger_job_summary_id") + d.WorkflowFile = field.NewString(table, "workflow_file") + d.WorkflowRunURL = field.NewString(table, "workflow_run_url") + d.PlanFootprint = field.NewField(table, "plan_footprint") + d.PrCommentURL = field.NewString(table, "pr_comment_url") + d.TerraformOutput = field.NewString(table, "terraform_output") + d.JobSpec = field.NewField(table, "job_spec") + + d.fillFieldMap() + + return d +} + +func (d *diggerJob) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := d.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (d *diggerJob) fillFieldMap() { + d.fieldMap = make(map[string]field.Expr, 15) + d.fieldMap["id"] = d.ID + d.fieldMap["created_at"] = d.CreatedAt + d.fieldMap["updated_at"] = d.UpdatedAt + d.fieldMap["deleted_at"] = d.DeletedAt + d.fieldMap["digger_job_id"] = d.DiggerJobID + d.fieldMap["status"] = d.Status + d.fieldMap["batch_id"] = d.BatchID + d.fieldMap["status_updated_at"] = d.StatusUpdatedAt + d.fieldMap["digger_job_summary_id"] = d.DiggerJobSummaryID + d.fieldMap["workflow_file"] = d.WorkflowFile + d.fieldMap["workflow_run_url"] = d.WorkflowRunURL + d.fieldMap["plan_footprint"] = d.PlanFootprint + d.fieldMap["pr_comment_url"] = d.PrCommentURL + d.fieldMap["terraform_output"] = d.TerraformOutput + d.fieldMap["job_spec"] = d.JobSpec +} + +func (d diggerJob) clone(db *gorm.DB) diggerJob { + d.diggerJobDo.ReplaceConnPool(db.Statement.ConnPool) + return d +} + +func (d diggerJob) replaceDB(db *gorm.DB) diggerJob { + d.diggerJobDo.ReplaceDB(db) + return d +} + +type diggerJobDo struct{ gen.DO } + +type IDiggerJobDo interface { + gen.SubQuery + Debug() IDiggerJobDo + WithContext(ctx context.Context) IDiggerJobDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IDiggerJobDo + WriteDB() IDiggerJobDo + As(alias string) gen.Dao + Session(config *gorm.Session) IDiggerJobDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IDiggerJobDo + Not(conds ...gen.Condition) IDiggerJobDo + Or(conds ...gen.Condition) IDiggerJobDo + Select(conds ...field.Expr) IDiggerJobDo + Where(conds ...gen.Condition) IDiggerJobDo + Order(conds ...field.Expr) IDiggerJobDo + Distinct(cols ...field.Expr) IDiggerJobDo + Omit(cols ...field.Expr) IDiggerJobDo + Join(table schema.Tabler, on ...field.Expr) IDiggerJobDo + LeftJoin(table schema.Tabler, on ...field.Expr) IDiggerJobDo + RightJoin(table schema.Tabler, on ...field.Expr) IDiggerJobDo + Group(cols ...field.Expr) IDiggerJobDo + Having(conds ...gen.Condition) IDiggerJobDo + Limit(limit int) IDiggerJobDo + Offset(offset int) IDiggerJobDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IDiggerJobDo + Unscoped() IDiggerJobDo + Create(values ...*model.DiggerJob) error + CreateInBatches(values []*model.DiggerJob, batchSize int) error + Save(values ...*model.DiggerJob) error + First() (*model.DiggerJob, error) + Take() (*model.DiggerJob, error) + Last() (*model.DiggerJob, error) + Find() ([]*model.DiggerJob, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.DiggerJob, err error) + FindInBatches(result *[]*model.DiggerJob, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.DiggerJob) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IDiggerJobDo + Assign(attrs ...field.AssignExpr) IDiggerJobDo + Joins(fields ...field.RelationField) IDiggerJobDo + Preload(fields ...field.RelationField) IDiggerJobDo + FirstOrInit() (*model.DiggerJob, error) + FirstOrCreate() (*model.DiggerJob, error) + FindByPage(offset int, limit int) (result []*model.DiggerJob, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IDiggerJobDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (d diggerJobDo) Debug() IDiggerJobDo { + return d.withDO(d.DO.Debug()) +} + +func (d diggerJobDo) WithContext(ctx context.Context) IDiggerJobDo { + return d.withDO(d.DO.WithContext(ctx)) +} + +func (d diggerJobDo) ReadDB() IDiggerJobDo { + return d.Clauses(dbresolver.Read) +} + +func (d diggerJobDo) WriteDB() IDiggerJobDo { + return d.Clauses(dbresolver.Write) +} + +func (d diggerJobDo) Session(config *gorm.Session) IDiggerJobDo { + return d.withDO(d.DO.Session(config)) +} + +func (d diggerJobDo) Clauses(conds ...clause.Expression) IDiggerJobDo { + return d.withDO(d.DO.Clauses(conds...)) +} + +func (d diggerJobDo) Returning(value interface{}, columns ...string) IDiggerJobDo { + return d.withDO(d.DO.Returning(value, columns...)) +} + +func (d diggerJobDo) Not(conds ...gen.Condition) IDiggerJobDo { + return d.withDO(d.DO.Not(conds...)) +} + +func (d diggerJobDo) Or(conds ...gen.Condition) IDiggerJobDo { + return d.withDO(d.DO.Or(conds...)) +} + +func (d diggerJobDo) Select(conds ...field.Expr) IDiggerJobDo { + return d.withDO(d.DO.Select(conds...)) +} + +func (d diggerJobDo) Where(conds ...gen.Condition) IDiggerJobDo { + return d.withDO(d.DO.Where(conds...)) +} + +func (d diggerJobDo) Order(conds ...field.Expr) IDiggerJobDo { + return d.withDO(d.DO.Order(conds...)) +} + +func (d diggerJobDo) Distinct(cols ...field.Expr) IDiggerJobDo { + return d.withDO(d.DO.Distinct(cols...)) +} + +func (d diggerJobDo) Omit(cols ...field.Expr) IDiggerJobDo { + return d.withDO(d.DO.Omit(cols...)) +} + +func (d diggerJobDo) Join(table schema.Tabler, on ...field.Expr) IDiggerJobDo { + return d.withDO(d.DO.Join(table, on...)) +} + +func (d diggerJobDo) LeftJoin(table schema.Tabler, on ...field.Expr) IDiggerJobDo { + return d.withDO(d.DO.LeftJoin(table, on...)) +} + +func (d diggerJobDo) RightJoin(table schema.Tabler, on ...field.Expr) IDiggerJobDo { + return d.withDO(d.DO.RightJoin(table, on...)) +} + +func (d diggerJobDo) Group(cols ...field.Expr) IDiggerJobDo { + return d.withDO(d.DO.Group(cols...)) +} + +func (d diggerJobDo) Having(conds ...gen.Condition) IDiggerJobDo { + return d.withDO(d.DO.Having(conds...)) +} + +func (d diggerJobDo) Limit(limit int) IDiggerJobDo { + return d.withDO(d.DO.Limit(limit)) +} + +func (d diggerJobDo) Offset(offset int) IDiggerJobDo { + return d.withDO(d.DO.Offset(offset)) +} + +func (d diggerJobDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IDiggerJobDo { + return d.withDO(d.DO.Scopes(funcs...)) +} + +func (d diggerJobDo) Unscoped() IDiggerJobDo { + return d.withDO(d.DO.Unscoped()) +} + +func (d diggerJobDo) Create(values ...*model.DiggerJob) error { + if len(values) == 0 { + return nil + } + return d.DO.Create(values) +} + +func (d diggerJobDo) CreateInBatches(values []*model.DiggerJob, batchSize int) error { + return d.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (d diggerJobDo) Save(values ...*model.DiggerJob) error { + if len(values) == 0 { + return nil + } + return d.DO.Save(values) +} + +func (d diggerJobDo) First() (*model.DiggerJob, error) { + if result, err := d.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.DiggerJob), nil + } +} + +func (d diggerJobDo) Take() (*model.DiggerJob, error) { + if result, err := d.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.DiggerJob), nil + } +} + +func (d diggerJobDo) Last() (*model.DiggerJob, error) { + if result, err := d.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.DiggerJob), nil + } +} + +func (d diggerJobDo) Find() ([]*model.DiggerJob, error) { + result, err := d.DO.Find() + return result.([]*model.DiggerJob), err +} + +func (d diggerJobDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.DiggerJob, err error) { + buf := make([]*model.DiggerJob, 0, batchSize) + err = d.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (d diggerJobDo) FindInBatches(result *[]*model.DiggerJob, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return d.DO.FindInBatches(result, batchSize, fc) +} + +func (d diggerJobDo) Attrs(attrs ...field.AssignExpr) IDiggerJobDo { + return d.withDO(d.DO.Attrs(attrs...)) +} + +func (d diggerJobDo) Assign(attrs ...field.AssignExpr) IDiggerJobDo { + return d.withDO(d.DO.Assign(attrs...)) +} + +func (d diggerJobDo) Joins(fields ...field.RelationField) IDiggerJobDo { + for _, _f := range fields { + d = *d.withDO(d.DO.Joins(_f)) + } + return &d +} + +func (d diggerJobDo) Preload(fields ...field.RelationField) IDiggerJobDo { + for _, _f := range fields { + d = *d.withDO(d.DO.Preload(_f)) + } + return &d +} + +func (d diggerJobDo) FirstOrInit() (*model.DiggerJob, error) { + if result, err := d.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.DiggerJob), nil + } +} + +func (d diggerJobDo) FirstOrCreate() (*model.DiggerJob, error) { + if result, err := d.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.DiggerJob), nil + } +} + +func (d diggerJobDo) FindByPage(offset int, limit int) (result []*model.DiggerJob, count int64, err error) { + result, err = d.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = d.Offset(-1).Limit(-1).Count() + return +} + +func (d diggerJobDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = d.Count() + if err != nil { + return + } + + err = d.Offset(offset).Limit(limit).Scan(result) + return +} + +func (d diggerJobDo) Scan(result interface{}) (err error) { + return d.DO.Scan(result) +} + +func (d diggerJobDo) Delete(models ...*model.DiggerJob) (result gen.ResultInfo, err error) { + return d.DO.Delete(models) +} + +func (d *diggerJobDo) withDO(do gen.Dao) *diggerJobDo { + d.DO = *do.(*gen.DO) + return d +} diff --git a/next/models_generated/digger_locks.gen.go b/next/models_generated/digger_locks.gen.go new file mode 100644 index 000000000..7f035a09e --- /dev/null +++ b/next/models_generated/digger_locks.gen.go @@ -0,0 +1,404 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newDiggerLock(db *gorm.DB, opts ...gen.DOOption) diggerLock { + _diggerLock := diggerLock{} + + _diggerLock.diggerLockDo.UseDB(db, opts...) + _diggerLock.diggerLockDo.UseModel(&model.DiggerLock{}) + + tableName := _diggerLock.diggerLockDo.TableName() + _diggerLock.ALL = field.NewAsterisk(tableName) + _diggerLock.ID = field.NewString(tableName, "id") + _diggerLock.CreatedAt = field.NewTime(tableName, "created_at") + _diggerLock.UpdatedAt = field.NewTime(tableName, "updated_at") + _diggerLock.DeletedAt = field.NewField(tableName, "deleted_at") + _diggerLock.Resource = field.NewString(tableName, "resource") + _diggerLock.LockID = field.NewInt64(tableName, "lock_id") + _diggerLock.OrganizationID = field.NewString(tableName, "organization_id") + + _diggerLock.fillFieldMap() + + return _diggerLock +} + +type diggerLock struct { + diggerLockDo + + ALL field.Asterisk + ID field.String + CreatedAt field.Time + UpdatedAt field.Time + DeletedAt field.Field + Resource field.String + LockID field.Int64 + OrganizationID field.String + + fieldMap map[string]field.Expr +} + +func (d diggerLock) Table(newTableName string) *diggerLock { + d.diggerLockDo.UseTable(newTableName) + return d.updateTableName(newTableName) +} + +func (d diggerLock) As(alias string) *diggerLock { + d.diggerLockDo.DO = *(d.diggerLockDo.As(alias).(*gen.DO)) + return d.updateTableName(alias) +} + +func (d *diggerLock) updateTableName(table string) *diggerLock { + d.ALL = field.NewAsterisk(table) + d.ID = field.NewString(table, "id") + d.CreatedAt = field.NewTime(table, "created_at") + d.UpdatedAt = field.NewTime(table, "updated_at") + d.DeletedAt = field.NewField(table, "deleted_at") + d.Resource = field.NewString(table, "resource") + d.LockID = field.NewInt64(table, "lock_id") + d.OrganizationID = field.NewString(table, "organization_id") + + d.fillFieldMap() + + return d +} + +func (d *diggerLock) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := d.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (d *diggerLock) fillFieldMap() { + d.fieldMap = make(map[string]field.Expr, 7) + d.fieldMap["id"] = d.ID + d.fieldMap["created_at"] = d.CreatedAt + d.fieldMap["updated_at"] = d.UpdatedAt + d.fieldMap["deleted_at"] = d.DeletedAt + d.fieldMap["resource"] = d.Resource + d.fieldMap["lock_id"] = d.LockID + d.fieldMap["organization_id"] = d.OrganizationID +} + +func (d diggerLock) clone(db *gorm.DB) diggerLock { + d.diggerLockDo.ReplaceConnPool(db.Statement.ConnPool) + return d +} + +func (d diggerLock) replaceDB(db *gorm.DB) diggerLock { + d.diggerLockDo.ReplaceDB(db) + return d +} + +type diggerLockDo struct{ gen.DO } + +type IDiggerLockDo interface { + gen.SubQuery + Debug() IDiggerLockDo + WithContext(ctx context.Context) IDiggerLockDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IDiggerLockDo + WriteDB() IDiggerLockDo + As(alias string) gen.Dao + Session(config *gorm.Session) IDiggerLockDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IDiggerLockDo + Not(conds ...gen.Condition) IDiggerLockDo + Or(conds ...gen.Condition) IDiggerLockDo + Select(conds ...field.Expr) IDiggerLockDo + Where(conds ...gen.Condition) IDiggerLockDo + Order(conds ...field.Expr) IDiggerLockDo + Distinct(cols ...field.Expr) IDiggerLockDo + Omit(cols ...field.Expr) IDiggerLockDo + Join(table schema.Tabler, on ...field.Expr) IDiggerLockDo + LeftJoin(table schema.Tabler, on ...field.Expr) IDiggerLockDo + RightJoin(table schema.Tabler, on ...field.Expr) IDiggerLockDo + Group(cols ...field.Expr) IDiggerLockDo + Having(conds ...gen.Condition) IDiggerLockDo + Limit(limit int) IDiggerLockDo + Offset(offset int) IDiggerLockDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IDiggerLockDo + Unscoped() IDiggerLockDo + Create(values ...*model.DiggerLock) error + CreateInBatches(values []*model.DiggerLock, batchSize int) error + Save(values ...*model.DiggerLock) error + First() (*model.DiggerLock, error) + Take() (*model.DiggerLock, error) + Last() (*model.DiggerLock, error) + Find() ([]*model.DiggerLock, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.DiggerLock, err error) + FindInBatches(result *[]*model.DiggerLock, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.DiggerLock) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IDiggerLockDo + Assign(attrs ...field.AssignExpr) IDiggerLockDo + Joins(fields ...field.RelationField) IDiggerLockDo + Preload(fields ...field.RelationField) IDiggerLockDo + FirstOrInit() (*model.DiggerLock, error) + FirstOrCreate() (*model.DiggerLock, error) + FindByPage(offset int, limit int) (result []*model.DiggerLock, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IDiggerLockDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (d diggerLockDo) Debug() IDiggerLockDo { + return d.withDO(d.DO.Debug()) +} + +func (d diggerLockDo) WithContext(ctx context.Context) IDiggerLockDo { + return d.withDO(d.DO.WithContext(ctx)) +} + +func (d diggerLockDo) ReadDB() IDiggerLockDo { + return d.Clauses(dbresolver.Read) +} + +func (d diggerLockDo) WriteDB() IDiggerLockDo { + return d.Clauses(dbresolver.Write) +} + +func (d diggerLockDo) Session(config *gorm.Session) IDiggerLockDo { + return d.withDO(d.DO.Session(config)) +} + +func (d diggerLockDo) Clauses(conds ...clause.Expression) IDiggerLockDo { + return d.withDO(d.DO.Clauses(conds...)) +} + +func (d diggerLockDo) Returning(value interface{}, columns ...string) IDiggerLockDo { + return d.withDO(d.DO.Returning(value, columns...)) +} + +func (d diggerLockDo) Not(conds ...gen.Condition) IDiggerLockDo { + return d.withDO(d.DO.Not(conds...)) +} + +func (d diggerLockDo) Or(conds ...gen.Condition) IDiggerLockDo { + return d.withDO(d.DO.Or(conds...)) +} + +func (d diggerLockDo) Select(conds ...field.Expr) IDiggerLockDo { + return d.withDO(d.DO.Select(conds...)) +} + +func (d diggerLockDo) Where(conds ...gen.Condition) IDiggerLockDo { + return d.withDO(d.DO.Where(conds...)) +} + +func (d diggerLockDo) Order(conds ...field.Expr) IDiggerLockDo { + return d.withDO(d.DO.Order(conds...)) +} + +func (d diggerLockDo) Distinct(cols ...field.Expr) IDiggerLockDo { + return d.withDO(d.DO.Distinct(cols...)) +} + +func (d diggerLockDo) Omit(cols ...field.Expr) IDiggerLockDo { + return d.withDO(d.DO.Omit(cols...)) +} + +func (d diggerLockDo) Join(table schema.Tabler, on ...field.Expr) IDiggerLockDo { + return d.withDO(d.DO.Join(table, on...)) +} + +func (d diggerLockDo) LeftJoin(table schema.Tabler, on ...field.Expr) IDiggerLockDo { + return d.withDO(d.DO.LeftJoin(table, on...)) +} + +func (d diggerLockDo) RightJoin(table schema.Tabler, on ...field.Expr) IDiggerLockDo { + return d.withDO(d.DO.RightJoin(table, on...)) +} + +func (d diggerLockDo) Group(cols ...field.Expr) IDiggerLockDo { + return d.withDO(d.DO.Group(cols...)) +} + +func (d diggerLockDo) Having(conds ...gen.Condition) IDiggerLockDo { + return d.withDO(d.DO.Having(conds...)) +} + +func (d diggerLockDo) Limit(limit int) IDiggerLockDo { + return d.withDO(d.DO.Limit(limit)) +} + +func (d diggerLockDo) Offset(offset int) IDiggerLockDo { + return d.withDO(d.DO.Offset(offset)) +} + +func (d diggerLockDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IDiggerLockDo { + return d.withDO(d.DO.Scopes(funcs...)) +} + +func (d diggerLockDo) Unscoped() IDiggerLockDo { + return d.withDO(d.DO.Unscoped()) +} + +func (d diggerLockDo) Create(values ...*model.DiggerLock) error { + if len(values) == 0 { + return nil + } + return d.DO.Create(values) +} + +func (d diggerLockDo) CreateInBatches(values []*model.DiggerLock, batchSize int) error { + return d.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (d diggerLockDo) Save(values ...*model.DiggerLock) error { + if len(values) == 0 { + return nil + } + return d.DO.Save(values) +} + +func (d diggerLockDo) First() (*model.DiggerLock, error) { + if result, err := d.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.DiggerLock), nil + } +} + +func (d diggerLockDo) Take() (*model.DiggerLock, error) { + if result, err := d.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.DiggerLock), nil + } +} + +func (d diggerLockDo) Last() (*model.DiggerLock, error) { + if result, err := d.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.DiggerLock), nil + } +} + +func (d diggerLockDo) Find() ([]*model.DiggerLock, error) { + result, err := d.DO.Find() + return result.([]*model.DiggerLock), err +} + +func (d diggerLockDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.DiggerLock, err error) { + buf := make([]*model.DiggerLock, 0, batchSize) + err = d.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (d diggerLockDo) FindInBatches(result *[]*model.DiggerLock, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return d.DO.FindInBatches(result, batchSize, fc) +} + +func (d diggerLockDo) Attrs(attrs ...field.AssignExpr) IDiggerLockDo { + return d.withDO(d.DO.Attrs(attrs...)) +} + +func (d diggerLockDo) Assign(attrs ...field.AssignExpr) IDiggerLockDo { + return d.withDO(d.DO.Assign(attrs...)) +} + +func (d diggerLockDo) Joins(fields ...field.RelationField) IDiggerLockDo { + for _, _f := range fields { + d = *d.withDO(d.DO.Joins(_f)) + } + return &d +} + +func (d diggerLockDo) Preload(fields ...field.RelationField) IDiggerLockDo { + for _, _f := range fields { + d = *d.withDO(d.DO.Preload(_f)) + } + return &d +} + +func (d diggerLockDo) FirstOrInit() (*model.DiggerLock, error) { + if result, err := d.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.DiggerLock), nil + } +} + +func (d diggerLockDo) FirstOrCreate() (*model.DiggerLock, error) { + if result, err := d.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.DiggerLock), nil + } +} + +func (d diggerLockDo) FindByPage(offset int, limit int) (result []*model.DiggerLock, count int64, err error) { + result, err = d.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = d.Offset(-1).Limit(-1).Count() + return +} + +func (d diggerLockDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = d.Count() + if err != nil { + return + } + + err = d.Offset(offset).Limit(limit).Scan(result) + return +} + +func (d diggerLockDo) Scan(result interface{}) (err error) { + return d.DO.Scan(result) +} + +func (d diggerLockDo) Delete(models ...*model.DiggerLock) (result gen.ResultInfo, err error) { + return d.DO.Delete(models) +} + +func (d *diggerLockDo) withDO(do gen.Dao) *diggerLockDo { + d.DO = *do.(*gen.DO) + return d +} diff --git a/next/models_generated/digger_run_queue_items.gen.go b/next/models_generated/digger_run_queue_items.gen.go new file mode 100644 index 000000000..35821db8e --- /dev/null +++ b/next/models_generated/digger_run_queue_items.gen.go @@ -0,0 +1,400 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newDiggerRunQueueItem(db *gorm.DB, opts ...gen.DOOption) diggerRunQueueItem { + _diggerRunQueueItem := diggerRunQueueItem{} + + _diggerRunQueueItem.diggerRunQueueItemDo.UseDB(db, opts...) + _diggerRunQueueItem.diggerRunQueueItemDo.UseModel(&model.DiggerRunQueueItem{}) + + tableName := _diggerRunQueueItem.diggerRunQueueItemDo.TableName() + _diggerRunQueueItem.ALL = field.NewAsterisk(tableName) + _diggerRunQueueItem.ID = field.NewInt64(tableName, "id") + _diggerRunQueueItem.CreatedAt = field.NewTime(tableName, "created_at") + _diggerRunQueueItem.UpdatedAt = field.NewTime(tableName, "updated_at") + _diggerRunQueueItem.DeletedAt = field.NewField(tableName, "deleted_at") + _diggerRunQueueItem.DiggerRunID = field.NewInt64(tableName, "digger_run_id") + _diggerRunQueueItem.ProjectID = field.NewInt64(tableName, "project_id") + + _diggerRunQueueItem.fillFieldMap() + + return _diggerRunQueueItem +} + +type diggerRunQueueItem struct { + diggerRunQueueItemDo + + ALL field.Asterisk + ID field.Int64 + CreatedAt field.Time + UpdatedAt field.Time + DeletedAt field.Field + DiggerRunID field.Int64 + ProjectID field.Int64 + + fieldMap map[string]field.Expr +} + +func (d diggerRunQueueItem) Table(newTableName string) *diggerRunQueueItem { + d.diggerRunQueueItemDo.UseTable(newTableName) + return d.updateTableName(newTableName) +} + +func (d diggerRunQueueItem) As(alias string) *diggerRunQueueItem { + d.diggerRunQueueItemDo.DO = *(d.diggerRunQueueItemDo.As(alias).(*gen.DO)) + return d.updateTableName(alias) +} + +func (d *diggerRunQueueItem) updateTableName(table string) *diggerRunQueueItem { + d.ALL = field.NewAsterisk(table) + d.ID = field.NewInt64(table, "id") + d.CreatedAt = field.NewTime(table, "created_at") + d.UpdatedAt = field.NewTime(table, "updated_at") + d.DeletedAt = field.NewField(table, "deleted_at") + d.DiggerRunID = field.NewInt64(table, "digger_run_id") + d.ProjectID = field.NewInt64(table, "project_id") + + d.fillFieldMap() + + return d +} + +func (d *diggerRunQueueItem) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := d.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (d *diggerRunQueueItem) fillFieldMap() { + d.fieldMap = make(map[string]field.Expr, 6) + d.fieldMap["id"] = d.ID + d.fieldMap["created_at"] = d.CreatedAt + d.fieldMap["updated_at"] = d.UpdatedAt + d.fieldMap["deleted_at"] = d.DeletedAt + d.fieldMap["digger_run_id"] = d.DiggerRunID + d.fieldMap["project_id"] = d.ProjectID +} + +func (d diggerRunQueueItem) clone(db *gorm.DB) diggerRunQueueItem { + d.diggerRunQueueItemDo.ReplaceConnPool(db.Statement.ConnPool) + return d +} + +func (d diggerRunQueueItem) replaceDB(db *gorm.DB) diggerRunQueueItem { + d.diggerRunQueueItemDo.ReplaceDB(db) + return d +} + +type diggerRunQueueItemDo struct{ gen.DO } + +type IDiggerRunQueueItemDo interface { + gen.SubQuery + Debug() IDiggerRunQueueItemDo + WithContext(ctx context.Context) IDiggerRunQueueItemDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IDiggerRunQueueItemDo + WriteDB() IDiggerRunQueueItemDo + As(alias string) gen.Dao + Session(config *gorm.Session) IDiggerRunQueueItemDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IDiggerRunQueueItemDo + Not(conds ...gen.Condition) IDiggerRunQueueItemDo + Or(conds ...gen.Condition) IDiggerRunQueueItemDo + Select(conds ...field.Expr) IDiggerRunQueueItemDo + Where(conds ...gen.Condition) IDiggerRunQueueItemDo + Order(conds ...field.Expr) IDiggerRunQueueItemDo + Distinct(cols ...field.Expr) IDiggerRunQueueItemDo + Omit(cols ...field.Expr) IDiggerRunQueueItemDo + Join(table schema.Tabler, on ...field.Expr) IDiggerRunQueueItemDo + LeftJoin(table schema.Tabler, on ...field.Expr) IDiggerRunQueueItemDo + RightJoin(table schema.Tabler, on ...field.Expr) IDiggerRunQueueItemDo + Group(cols ...field.Expr) IDiggerRunQueueItemDo + Having(conds ...gen.Condition) IDiggerRunQueueItemDo + Limit(limit int) IDiggerRunQueueItemDo + Offset(offset int) IDiggerRunQueueItemDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IDiggerRunQueueItemDo + Unscoped() IDiggerRunQueueItemDo + Create(values ...*model.DiggerRunQueueItem) error + CreateInBatches(values []*model.DiggerRunQueueItem, batchSize int) error + Save(values ...*model.DiggerRunQueueItem) error + First() (*model.DiggerRunQueueItem, error) + Take() (*model.DiggerRunQueueItem, error) + Last() (*model.DiggerRunQueueItem, error) + Find() ([]*model.DiggerRunQueueItem, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.DiggerRunQueueItem, err error) + FindInBatches(result *[]*model.DiggerRunQueueItem, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.DiggerRunQueueItem) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IDiggerRunQueueItemDo + Assign(attrs ...field.AssignExpr) IDiggerRunQueueItemDo + Joins(fields ...field.RelationField) IDiggerRunQueueItemDo + Preload(fields ...field.RelationField) IDiggerRunQueueItemDo + FirstOrInit() (*model.DiggerRunQueueItem, error) + FirstOrCreate() (*model.DiggerRunQueueItem, error) + FindByPage(offset int, limit int) (result []*model.DiggerRunQueueItem, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IDiggerRunQueueItemDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (d diggerRunQueueItemDo) Debug() IDiggerRunQueueItemDo { + return d.withDO(d.DO.Debug()) +} + +func (d diggerRunQueueItemDo) WithContext(ctx context.Context) IDiggerRunQueueItemDo { + return d.withDO(d.DO.WithContext(ctx)) +} + +func (d diggerRunQueueItemDo) ReadDB() IDiggerRunQueueItemDo { + return d.Clauses(dbresolver.Read) +} + +func (d diggerRunQueueItemDo) WriteDB() IDiggerRunQueueItemDo { + return d.Clauses(dbresolver.Write) +} + +func (d diggerRunQueueItemDo) Session(config *gorm.Session) IDiggerRunQueueItemDo { + return d.withDO(d.DO.Session(config)) +} + +func (d diggerRunQueueItemDo) Clauses(conds ...clause.Expression) IDiggerRunQueueItemDo { + return d.withDO(d.DO.Clauses(conds...)) +} + +func (d diggerRunQueueItemDo) Returning(value interface{}, columns ...string) IDiggerRunQueueItemDo { + return d.withDO(d.DO.Returning(value, columns...)) +} + +func (d diggerRunQueueItemDo) Not(conds ...gen.Condition) IDiggerRunQueueItemDo { + return d.withDO(d.DO.Not(conds...)) +} + +func (d diggerRunQueueItemDo) Or(conds ...gen.Condition) IDiggerRunQueueItemDo { + return d.withDO(d.DO.Or(conds...)) +} + +func (d diggerRunQueueItemDo) Select(conds ...field.Expr) IDiggerRunQueueItemDo { + return d.withDO(d.DO.Select(conds...)) +} + +func (d diggerRunQueueItemDo) Where(conds ...gen.Condition) IDiggerRunQueueItemDo { + return d.withDO(d.DO.Where(conds...)) +} + +func (d diggerRunQueueItemDo) Order(conds ...field.Expr) IDiggerRunQueueItemDo { + return d.withDO(d.DO.Order(conds...)) +} + +func (d diggerRunQueueItemDo) Distinct(cols ...field.Expr) IDiggerRunQueueItemDo { + return d.withDO(d.DO.Distinct(cols...)) +} + +func (d diggerRunQueueItemDo) Omit(cols ...field.Expr) IDiggerRunQueueItemDo { + return d.withDO(d.DO.Omit(cols...)) +} + +func (d diggerRunQueueItemDo) Join(table schema.Tabler, on ...field.Expr) IDiggerRunQueueItemDo { + return d.withDO(d.DO.Join(table, on...)) +} + +func (d diggerRunQueueItemDo) LeftJoin(table schema.Tabler, on ...field.Expr) IDiggerRunQueueItemDo { + return d.withDO(d.DO.LeftJoin(table, on...)) +} + +func (d diggerRunQueueItemDo) RightJoin(table schema.Tabler, on ...field.Expr) IDiggerRunQueueItemDo { + return d.withDO(d.DO.RightJoin(table, on...)) +} + +func (d diggerRunQueueItemDo) Group(cols ...field.Expr) IDiggerRunQueueItemDo { + return d.withDO(d.DO.Group(cols...)) +} + +func (d diggerRunQueueItemDo) Having(conds ...gen.Condition) IDiggerRunQueueItemDo { + return d.withDO(d.DO.Having(conds...)) +} + +func (d diggerRunQueueItemDo) Limit(limit int) IDiggerRunQueueItemDo { + return d.withDO(d.DO.Limit(limit)) +} + +func (d diggerRunQueueItemDo) Offset(offset int) IDiggerRunQueueItemDo { + return d.withDO(d.DO.Offset(offset)) +} + +func (d diggerRunQueueItemDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IDiggerRunQueueItemDo { + return d.withDO(d.DO.Scopes(funcs...)) +} + +func (d diggerRunQueueItemDo) Unscoped() IDiggerRunQueueItemDo { + return d.withDO(d.DO.Unscoped()) +} + +func (d diggerRunQueueItemDo) Create(values ...*model.DiggerRunQueueItem) error { + if len(values) == 0 { + return nil + } + return d.DO.Create(values) +} + +func (d diggerRunQueueItemDo) CreateInBatches(values []*model.DiggerRunQueueItem, batchSize int) error { + return d.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (d diggerRunQueueItemDo) Save(values ...*model.DiggerRunQueueItem) error { + if len(values) == 0 { + return nil + } + return d.DO.Save(values) +} + +func (d diggerRunQueueItemDo) First() (*model.DiggerRunQueueItem, error) { + if result, err := d.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.DiggerRunQueueItem), nil + } +} + +func (d diggerRunQueueItemDo) Take() (*model.DiggerRunQueueItem, error) { + if result, err := d.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.DiggerRunQueueItem), nil + } +} + +func (d diggerRunQueueItemDo) Last() (*model.DiggerRunQueueItem, error) { + if result, err := d.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.DiggerRunQueueItem), nil + } +} + +func (d diggerRunQueueItemDo) Find() ([]*model.DiggerRunQueueItem, error) { + result, err := d.DO.Find() + return result.([]*model.DiggerRunQueueItem), err +} + +func (d diggerRunQueueItemDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.DiggerRunQueueItem, err error) { + buf := make([]*model.DiggerRunQueueItem, 0, batchSize) + err = d.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (d diggerRunQueueItemDo) FindInBatches(result *[]*model.DiggerRunQueueItem, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return d.DO.FindInBatches(result, batchSize, fc) +} + +func (d diggerRunQueueItemDo) Attrs(attrs ...field.AssignExpr) IDiggerRunQueueItemDo { + return d.withDO(d.DO.Attrs(attrs...)) +} + +func (d diggerRunQueueItemDo) Assign(attrs ...field.AssignExpr) IDiggerRunQueueItemDo { + return d.withDO(d.DO.Assign(attrs...)) +} + +func (d diggerRunQueueItemDo) Joins(fields ...field.RelationField) IDiggerRunQueueItemDo { + for _, _f := range fields { + d = *d.withDO(d.DO.Joins(_f)) + } + return &d +} + +func (d diggerRunQueueItemDo) Preload(fields ...field.RelationField) IDiggerRunQueueItemDo { + for _, _f := range fields { + d = *d.withDO(d.DO.Preload(_f)) + } + return &d +} + +func (d diggerRunQueueItemDo) FirstOrInit() (*model.DiggerRunQueueItem, error) { + if result, err := d.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.DiggerRunQueueItem), nil + } +} + +func (d diggerRunQueueItemDo) FirstOrCreate() (*model.DiggerRunQueueItem, error) { + if result, err := d.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.DiggerRunQueueItem), nil + } +} + +func (d diggerRunQueueItemDo) FindByPage(offset int, limit int) (result []*model.DiggerRunQueueItem, count int64, err error) { + result, err = d.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = d.Offset(-1).Limit(-1).Count() + return +} + +func (d diggerRunQueueItemDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = d.Count() + if err != nil { + return + } + + err = d.Offset(offset).Limit(limit).Scan(result) + return +} + +func (d diggerRunQueueItemDo) Scan(result interface{}) (err error) { + return d.DO.Scan(result) +} + +func (d diggerRunQueueItemDo) Delete(models ...*model.DiggerRunQueueItem) (result gen.ResultInfo, err error) { + return d.DO.Delete(models) +} + +func (d *diggerRunQueueItemDo) withDO(do gen.Dao) *diggerRunQueueItemDo { + d.DO = *do.(*gen.DO) + return d +} diff --git a/next/models_generated/digger_run_stages.gen.go b/next/models_generated/digger_run_stages.gen.go new file mode 100644 index 000000000..1d0f45352 --- /dev/null +++ b/next/models_generated/digger_run_stages.gen.go @@ -0,0 +1,396 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newDiggerRunStage(db *gorm.DB, opts ...gen.DOOption) diggerRunStage { + _diggerRunStage := diggerRunStage{} + + _diggerRunStage.diggerRunStageDo.UseDB(db, opts...) + _diggerRunStage.diggerRunStageDo.UseModel(&model.DiggerRunStage{}) + + tableName := _diggerRunStage.diggerRunStageDo.TableName() + _diggerRunStage.ALL = field.NewAsterisk(tableName) + _diggerRunStage.ID = field.NewString(tableName, "id") + _diggerRunStage.CreatedAt = field.NewTime(tableName, "created_at") + _diggerRunStage.UpdatedAt = field.NewTime(tableName, "updated_at") + _diggerRunStage.DeletedAt = field.NewField(tableName, "deleted_at") + _diggerRunStage.BatchID = field.NewString(tableName, "batch_id") + + _diggerRunStage.fillFieldMap() + + return _diggerRunStage +} + +type diggerRunStage struct { + diggerRunStageDo + + ALL field.Asterisk + ID field.String + CreatedAt field.Time + UpdatedAt field.Time + DeletedAt field.Field + BatchID field.String + + fieldMap map[string]field.Expr +} + +func (d diggerRunStage) Table(newTableName string) *diggerRunStage { + d.diggerRunStageDo.UseTable(newTableName) + return d.updateTableName(newTableName) +} + +func (d diggerRunStage) As(alias string) *diggerRunStage { + d.diggerRunStageDo.DO = *(d.diggerRunStageDo.As(alias).(*gen.DO)) + return d.updateTableName(alias) +} + +func (d *diggerRunStage) updateTableName(table string) *diggerRunStage { + d.ALL = field.NewAsterisk(table) + d.ID = field.NewString(table, "id") + d.CreatedAt = field.NewTime(table, "created_at") + d.UpdatedAt = field.NewTime(table, "updated_at") + d.DeletedAt = field.NewField(table, "deleted_at") + d.BatchID = field.NewString(table, "batch_id") + + d.fillFieldMap() + + return d +} + +func (d *diggerRunStage) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := d.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (d *diggerRunStage) fillFieldMap() { + d.fieldMap = make(map[string]field.Expr, 5) + d.fieldMap["id"] = d.ID + d.fieldMap["created_at"] = d.CreatedAt + d.fieldMap["updated_at"] = d.UpdatedAt + d.fieldMap["deleted_at"] = d.DeletedAt + d.fieldMap["batch_id"] = d.BatchID +} + +func (d diggerRunStage) clone(db *gorm.DB) diggerRunStage { + d.diggerRunStageDo.ReplaceConnPool(db.Statement.ConnPool) + return d +} + +func (d diggerRunStage) replaceDB(db *gorm.DB) diggerRunStage { + d.diggerRunStageDo.ReplaceDB(db) + return d +} + +type diggerRunStageDo struct{ gen.DO } + +type IDiggerRunStageDo interface { + gen.SubQuery + Debug() IDiggerRunStageDo + WithContext(ctx context.Context) IDiggerRunStageDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IDiggerRunStageDo + WriteDB() IDiggerRunStageDo + As(alias string) gen.Dao + Session(config *gorm.Session) IDiggerRunStageDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IDiggerRunStageDo + Not(conds ...gen.Condition) IDiggerRunStageDo + Or(conds ...gen.Condition) IDiggerRunStageDo + Select(conds ...field.Expr) IDiggerRunStageDo + Where(conds ...gen.Condition) IDiggerRunStageDo + Order(conds ...field.Expr) IDiggerRunStageDo + Distinct(cols ...field.Expr) IDiggerRunStageDo + Omit(cols ...field.Expr) IDiggerRunStageDo + Join(table schema.Tabler, on ...field.Expr) IDiggerRunStageDo + LeftJoin(table schema.Tabler, on ...field.Expr) IDiggerRunStageDo + RightJoin(table schema.Tabler, on ...field.Expr) IDiggerRunStageDo + Group(cols ...field.Expr) IDiggerRunStageDo + Having(conds ...gen.Condition) IDiggerRunStageDo + Limit(limit int) IDiggerRunStageDo + Offset(offset int) IDiggerRunStageDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IDiggerRunStageDo + Unscoped() IDiggerRunStageDo + Create(values ...*model.DiggerRunStage) error + CreateInBatches(values []*model.DiggerRunStage, batchSize int) error + Save(values ...*model.DiggerRunStage) error + First() (*model.DiggerRunStage, error) + Take() (*model.DiggerRunStage, error) + Last() (*model.DiggerRunStage, error) + Find() ([]*model.DiggerRunStage, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.DiggerRunStage, err error) + FindInBatches(result *[]*model.DiggerRunStage, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.DiggerRunStage) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IDiggerRunStageDo + Assign(attrs ...field.AssignExpr) IDiggerRunStageDo + Joins(fields ...field.RelationField) IDiggerRunStageDo + Preload(fields ...field.RelationField) IDiggerRunStageDo + FirstOrInit() (*model.DiggerRunStage, error) + FirstOrCreate() (*model.DiggerRunStage, error) + FindByPage(offset int, limit int) (result []*model.DiggerRunStage, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IDiggerRunStageDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (d diggerRunStageDo) Debug() IDiggerRunStageDo { + return d.withDO(d.DO.Debug()) +} + +func (d diggerRunStageDo) WithContext(ctx context.Context) IDiggerRunStageDo { + return d.withDO(d.DO.WithContext(ctx)) +} + +func (d diggerRunStageDo) ReadDB() IDiggerRunStageDo { + return d.Clauses(dbresolver.Read) +} + +func (d diggerRunStageDo) WriteDB() IDiggerRunStageDo { + return d.Clauses(dbresolver.Write) +} + +func (d diggerRunStageDo) Session(config *gorm.Session) IDiggerRunStageDo { + return d.withDO(d.DO.Session(config)) +} + +func (d diggerRunStageDo) Clauses(conds ...clause.Expression) IDiggerRunStageDo { + return d.withDO(d.DO.Clauses(conds...)) +} + +func (d diggerRunStageDo) Returning(value interface{}, columns ...string) IDiggerRunStageDo { + return d.withDO(d.DO.Returning(value, columns...)) +} + +func (d diggerRunStageDo) Not(conds ...gen.Condition) IDiggerRunStageDo { + return d.withDO(d.DO.Not(conds...)) +} + +func (d diggerRunStageDo) Or(conds ...gen.Condition) IDiggerRunStageDo { + return d.withDO(d.DO.Or(conds...)) +} + +func (d diggerRunStageDo) Select(conds ...field.Expr) IDiggerRunStageDo { + return d.withDO(d.DO.Select(conds...)) +} + +func (d diggerRunStageDo) Where(conds ...gen.Condition) IDiggerRunStageDo { + return d.withDO(d.DO.Where(conds...)) +} + +func (d diggerRunStageDo) Order(conds ...field.Expr) IDiggerRunStageDo { + return d.withDO(d.DO.Order(conds...)) +} + +func (d diggerRunStageDo) Distinct(cols ...field.Expr) IDiggerRunStageDo { + return d.withDO(d.DO.Distinct(cols...)) +} + +func (d diggerRunStageDo) Omit(cols ...field.Expr) IDiggerRunStageDo { + return d.withDO(d.DO.Omit(cols...)) +} + +func (d diggerRunStageDo) Join(table schema.Tabler, on ...field.Expr) IDiggerRunStageDo { + return d.withDO(d.DO.Join(table, on...)) +} + +func (d diggerRunStageDo) LeftJoin(table schema.Tabler, on ...field.Expr) IDiggerRunStageDo { + return d.withDO(d.DO.LeftJoin(table, on...)) +} + +func (d diggerRunStageDo) RightJoin(table schema.Tabler, on ...field.Expr) IDiggerRunStageDo { + return d.withDO(d.DO.RightJoin(table, on...)) +} + +func (d diggerRunStageDo) Group(cols ...field.Expr) IDiggerRunStageDo { + return d.withDO(d.DO.Group(cols...)) +} + +func (d diggerRunStageDo) Having(conds ...gen.Condition) IDiggerRunStageDo { + return d.withDO(d.DO.Having(conds...)) +} + +func (d diggerRunStageDo) Limit(limit int) IDiggerRunStageDo { + return d.withDO(d.DO.Limit(limit)) +} + +func (d diggerRunStageDo) Offset(offset int) IDiggerRunStageDo { + return d.withDO(d.DO.Offset(offset)) +} + +func (d diggerRunStageDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IDiggerRunStageDo { + return d.withDO(d.DO.Scopes(funcs...)) +} + +func (d diggerRunStageDo) Unscoped() IDiggerRunStageDo { + return d.withDO(d.DO.Unscoped()) +} + +func (d diggerRunStageDo) Create(values ...*model.DiggerRunStage) error { + if len(values) == 0 { + return nil + } + return d.DO.Create(values) +} + +func (d diggerRunStageDo) CreateInBatches(values []*model.DiggerRunStage, batchSize int) error { + return d.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (d diggerRunStageDo) Save(values ...*model.DiggerRunStage) error { + if len(values) == 0 { + return nil + } + return d.DO.Save(values) +} + +func (d diggerRunStageDo) First() (*model.DiggerRunStage, error) { + if result, err := d.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.DiggerRunStage), nil + } +} + +func (d diggerRunStageDo) Take() (*model.DiggerRunStage, error) { + if result, err := d.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.DiggerRunStage), nil + } +} + +func (d diggerRunStageDo) Last() (*model.DiggerRunStage, error) { + if result, err := d.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.DiggerRunStage), nil + } +} + +func (d diggerRunStageDo) Find() ([]*model.DiggerRunStage, error) { + result, err := d.DO.Find() + return result.([]*model.DiggerRunStage), err +} + +func (d diggerRunStageDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.DiggerRunStage, err error) { + buf := make([]*model.DiggerRunStage, 0, batchSize) + err = d.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (d diggerRunStageDo) FindInBatches(result *[]*model.DiggerRunStage, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return d.DO.FindInBatches(result, batchSize, fc) +} + +func (d diggerRunStageDo) Attrs(attrs ...field.AssignExpr) IDiggerRunStageDo { + return d.withDO(d.DO.Attrs(attrs...)) +} + +func (d diggerRunStageDo) Assign(attrs ...field.AssignExpr) IDiggerRunStageDo { + return d.withDO(d.DO.Assign(attrs...)) +} + +func (d diggerRunStageDo) Joins(fields ...field.RelationField) IDiggerRunStageDo { + for _, _f := range fields { + d = *d.withDO(d.DO.Joins(_f)) + } + return &d +} + +func (d diggerRunStageDo) Preload(fields ...field.RelationField) IDiggerRunStageDo { + for _, _f := range fields { + d = *d.withDO(d.DO.Preload(_f)) + } + return &d +} + +func (d diggerRunStageDo) FirstOrInit() (*model.DiggerRunStage, error) { + if result, err := d.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.DiggerRunStage), nil + } +} + +func (d diggerRunStageDo) FirstOrCreate() (*model.DiggerRunStage, error) { + if result, err := d.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.DiggerRunStage), nil + } +} + +func (d diggerRunStageDo) FindByPage(offset int, limit int) (result []*model.DiggerRunStage, count int64, err error) { + result, err = d.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = d.Offset(-1).Limit(-1).Count() + return +} + +func (d diggerRunStageDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = d.Count() + if err != nil { + return + } + + err = d.Offset(offset).Limit(limit).Scan(result) + return +} + +func (d diggerRunStageDo) Scan(result interface{}) (err error) { + return d.DO.Scan(result) +} + +func (d diggerRunStageDo) Delete(models ...*model.DiggerRunStage) (result gen.ResultInfo, err error) { + return d.DO.Delete(models) +} + +func (d *diggerRunStageDo) withDO(do gen.Dao) *diggerRunStageDo { + d.DO = *do.(*gen.DO) + return d +} diff --git a/next/models_generated/digger_runs.gen.go b/next/models_generated/digger_runs.gen.go new file mode 100644 index 000000000..a2ea6901f --- /dev/null +++ b/next/models_generated/digger_runs.gen.go @@ -0,0 +1,448 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newDiggerRun(db *gorm.DB, opts ...gen.DOOption) diggerRun { + _diggerRun := diggerRun{} + + _diggerRun.diggerRunDo.UseDB(db, opts...) + _diggerRun.diggerRunDo.UseModel(&model.DiggerRun{}) + + tableName := _diggerRun.diggerRunDo.TableName() + _diggerRun.ALL = field.NewAsterisk(tableName) + _diggerRun.ID = field.NewString(tableName, "id") + _diggerRun.CreatedAt = field.NewTime(tableName, "created_at") + _diggerRun.UpdatedAt = field.NewTime(tableName, "updated_at") + _diggerRun.DeletedAt = field.NewField(tableName, "deleted_at") + _diggerRun.Triggertype = field.NewString(tableName, "triggertype") + _diggerRun.PrNumber = field.NewInt64(tableName, "pr_number") + _diggerRun.Status = field.NewString(tableName, "status") + _diggerRun.CommitID = field.NewString(tableName, "commit_id") + _diggerRun.DiggerConfig = field.NewString(tableName, "digger_config") + _diggerRun.GithubInstallationID = field.NewInt64(tableName, "github_installation_id") + _diggerRun.RepoID = field.NewInt64(tableName, "repo_id") + _diggerRun.RunType = field.NewString(tableName, "run_type") + _diggerRun.PlanStageID = field.NewString(tableName, "plan_stage_id") + _diggerRun.ApplyStageID = field.NewString(tableName, "apply_stage_id") + _diggerRun.ProjectName = field.NewString(tableName, "project_name") + _diggerRun.IsApproved = field.NewBool(tableName, "is_approved") + _diggerRun.ApprovalAuthor = field.NewString(tableName, "approval_author") + _diggerRun.ApprovalDate = field.NewTime(tableName, "approval_date") + + _diggerRun.fillFieldMap() + + return _diggerRun +} + +type diggerRun struct { + diggerRunDo + + ALL field.Asterisk + ID field.String + CreatedAt field.Time + UpdatedAt field.Time + DeletedAt field.Field + Triggertype field.String + PrNumber field.Int64 + Status field.String + CommitID field.String + DiggerConfig field.String + GithubInstallationID field.Int64 + RepoID field.Int64 + RunType field.String + PlanStageID field.String + ApplyStageID field.String + ProjectName field.String + IsApproved field.Bool + ApprovalAuthor field.String + ApprovalDate field.Time + + fieldMap map[string]field.Expr +} + +func (d diggerRun) Table(newTableName string) *diggerRun { + d.diggerRunDo.UseTable(newTableName) + return d.updateTableName(newTableName) +} + +func (d diggerRun) As(alias string) *diggerRun { + d.diggerRunDo.DO = *(d.diggerRunDo.As(alias).(*gen.DO)) + return d.updateTableName(alias) +} + +func (d *diggerRun) updateTableName(table string) *diggerRun { + d.ALL = field.NewAsterisk(table) + d.ID = field.NewString(table, "id") + d.CreatedAt = field.NewTime(table, "created_at") + d.UpdatedAt = field.NewTime(table, "updated_at") + d.DeletedAt = field.NewField(table, "deleted_at") + d.Triggertype = field.NewString(table, "triggertype") + d.PrNumber = field.NewInt64(table, "pr_number") + d.Status = field.NewString(table, "status") + d.CommitID = field.NewString(table, "commit_id") + d.DiggerConfig = field.NewString(table, "digger_config") + d.GithubInstallationID = field.NewInt64(table, "github_installation_id") + d.RepoID = field.NewInt64(table, "repo_id") + d.RunType = field.NewString(table, "run_type") + d.PlanStageID = field.NewString(table, "plan_stage_id") + d.ApplyStageID = field.NewString(table, "apply_stage_id") + d.ProjectName = field.NewString(table, "project_name") + d.IsApproved = field.NewBool(table, "is_approved") + d.ApprovalAuthor = field.NewString(table, "approval_author") + d.ApprovalDate = field.NewTime(table, "approval_date") + + d.fillFieldMap() + + return d +} + +func (d *diggerRun) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := d.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (d *diggerRun) fillFieldMap() { + d.fieldMap = make(map[string]field.Expr, 18) + d.fieldMap["id"] = d.ID + d.fieldMap["created_at"] = d.CreatedAt + d.fieldMap["updated_at"] = d.UpdatedAt + d.fieldMap["deleted_at"] = d.DeletedAt + d.fieldMap["triggertype"] = d.Triggertype + d.fieldMap["pr_number"] = d.PrNumber + d.fieldMap["status"] = d.Status + d.fieldMap["commit_id"] = d.CommitID + d.fieldMap["digger_config"] = d.DiggerConfig + d.fieldMap["github_installation_id"] = d.GithubInstallationID + d.fieldMap["repo_id"] = d.RepoID + d.fieldMap["run_type"] = d.RunType + d.fieldMap["plan_stage_id"] = d.PlanStageID + d.fieldMap["apply_stage_id"] = d.ApplyStageID + d.fieldMap["project_name"] = d.ProjectName + d.fieldMap["is_approved"] = d.IsApproved + d.fieldMap["approval_author"] = d.ApprovalAuthor + d.fieldMap["approval_date"] = d.ApprovalDate +} + +func (d diggerRun) clone(db *gorm.DB) diggerRun { + d.diggerRunDo.ReplaceConnPool(db.Statement.ConnPool) + return d +} + +func (d diggerRun) replaceDB(db *gorm.DB) diggerRun { + d.diggerRunDo.ReplaceDB(db) + return d +} + +type diggerRunDo struct{ gen.DO } + +type IDiggerRunDo interface { + gen.SubQuery + Debug() IDiggerRunDo + WithContext(ctx context.Context) IDiggerRunDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IDiggerRunDo + WriteDB() IDiggerRunDo + As(alias string) gen.Dao + Session(config *gorm.Session) IDiggerRunDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IDiggerRunDo + Not(conds ...gen.Condition) IDiggerRunDo + Or(conds ...gen.Condition) IDiggerRunDo + Select(conds ...field.Expr) IDiggerRunDo + Where(conds ...gen.Condition) IDiggerRunDo + Order(conds ...field.Expr) IDiggerRunDo + Distinct(cols ...field.Expr) IDiggerRunDo + Omit(cols ...field.Expr) IDiggerRunDo + Join(table schema.Tabler, on ...field.Expr) IDiggerRunDo + LeftJoin(table schema.Tabler, on ...field.Expr) IDiggerRunDo + RightJoin(table schema.Tabler, on ...field.Expr) IDiggerRunDo + Group(cols ...field.Expr) IDiggerRunDo + Having(conds ...gen.Condition) IDiggerRunDo + Limit(limit int) IDiggerRunDo + Offset(offset int) IDiggerRunDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IDiggerRunDo + Unscoped() IDiggerRunDo + Create(values ...*model.DiggerRun) error + CreateInBatches(values []*model.DiggerRun, batchSize int) error + Save(values ...*model.DiggerRun) error + First() (*model.DiggerRun, error) + Take() (*model.DiggerRun, error) + Last() (*model.DiggerRun, error) + Find() ([]*model.DiggerRun, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.DiggerRun, err error) + FindInBatches(result *[]*model.DiggerRun, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.DiggerRun) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IDiggerRunDo + Assign(attrs ...field.AssignExpr) IDiggerRunDo + Joins(fields ...field.RelationField) IDiggerRunDo + Preload(fields ...field.RelationField) IDiggerRunDo + FirstOrInit() (*model.DiggerRun, error) + FirstOrCreate() (*model.DiggerRun, error) + FindByPage(offset int, limit int) (result []*model.DiggerRun, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IDiggerRunDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (d diggerRunDo) Debug() IDiggerRunDo { + return d.withDO(d.DO.Debug()) +} + +func (d diggerRunDo) WithContext(ctx context.Context) IDiggerRunDo { + return d.withDO(d.DO.WithContext(ctx)) +} + +func (d diggerRunDo) ReadDB() IDiggerRunDo { + return d.Clauses(dbresolver.Read) +} + +func (d diggerRunDo) WriteDB() IDiggerRunDo { + return d.Clauses(dbresolver.Write) +} + +func (d diggerRunDo) Session(config *gorm.Session) IDiggerRunDo { + return d.withDO(d.DO.Session(config)) +} + +func (d diggerRunDo) Clauses(conds ...clause.Expression) IDiggerRunDo { + return d.withDO(d.DO.Clauses(conds...)) +} + +func (d diggerRunDo) Returning(value interface{}, columns ...string) IDiggerRunDo { + return d.withDO(d.DO.Returning(value, columns...)) +} + +func (d diggerRunDo) Not(conds ...gen.Condition) IDiggerRunDo { + return d.withDO(d.DO.Not(conds...)) +} + +func (d diggerRunDo) Or(conds ...gen.Condition) IDiggerRunDo { + return d.withDO(d.DO.Or(conds...)) +} + +func (d diggerRunDo) Select(conds ...field.Expr) IDiggerRunDo { + return d.withDO(d.DO.Select(conds...)) +} + +func (d diggerRunDo) Where(conds ...gen.Condition) IDiggerRunDo { + return d.withDO(d.DO.Where(conds...)) +} + +func (d diggerRunDo) Order(conds ...field.Expr) IDiggerRunDo { + return d.withDO(d.DO.Order(conds...)) +} + +func (d diggerRunDo) Distinct(cols ...field.Expr) IDiggerRunDo { + return d.withDO(d.DO.Distinct(cols...)) +} + +func (d diggerRunDo) Omit(cols ...field.Expr) IDiggerRunDo { + return d.withDO(d.DO.Omit(cols...)) +} + +func (d diggerRunDo) Join(table schema.Tabler, on ...field.Expr) IDiggerRunDo { + return d.withDO(d.DO.Join(table, on...)) +} + +func (d diggerRunDo) LeftJoin(table schema.Tabler, on ...field.Expr) IDiggerRunDo { + return d.withDO(d.DO.LeftJoin(table, on...)) +} + +func (d diggerRunDo) RightJoin(table schema.Tabler, on ...field.Expr) IDiggerRunDo { + return d.withDO(d.DO.RightJoin(table, on...)) +} + +func (d diggerRunDo) Group(cols ...field.Expr) IDiggerRunDo { + return d.withDO(d.DO.Group(cols...)) +} + +func (d diggerRunDo) Having(conds ...gen.Condition) IDiggerRunDo { + return d.withDO(d.DO.Having(conds...)) +} + +func (d diggerRunDo) Limit(limit int) IDiggerRunDo { + return d.withDO(d.DO.Limit(limit)) +} + +func (d diggerRunDo) Offset(offset int) IDiggerRunDo { + return d.withDO(d.DO.Offset(offset)) +} + +func (d diggerRunDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IDiggerRunDo { + return d.withDO(d.DO.Scopes(funcs...)) +} + +func (d diggerRunDo) Unscoped() IDiggerRunDo { + return d.withDO(d.DO.Unscoped()) +} + +func (d diggerRunDo) Create(values ...*model.DiggerRun) error { + if len(values) == 0 { + return nil + } + return d.DO.Create(values) +} + +func (d diggerRunDo) CreateInBatches(values []*model.DiggerRun, batchSize int) error { + return d.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (d diggerRunDo) Save(values ...*model.DiggerRun) error { + if len(values) == 0 { + return nil + } + return d.DO.Save(values) +} + +func (d diggerRunDo) First() (*model.DiggerRun, error) { + if result, err := d.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.DiggerRun), nil + } +} + +func (d diggerRunDo) Take() (*model.DiggerRun, error) { + if result, err := d.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.DiggerRun), nil + } +} + +func (d diggerRunDo) Last() (*model.DiggerRun, error) { + if result, err := d.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.DiggerRun), nil + } +} + +func (d diggerRunDo) Find() ([]*model.DiggerRun, error) { + result, err := d.DO.Find() + return result.([]*model.DiggerRun), err +} + +func (d diggerRunDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.DiggerRun, err error) { + buf := make([]*model.DiggerRun, 0, batchSize) + err = d.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (d diggerRunDo) FindInBatches(result *[]*model.DiggerRun, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return d.DO.FindInBatches(result, batchSize, fc) +} + +func (d diggerRunDo) Attrs(attrs ...field.AssignExpr) IDiggerRunDo { + return d.withDO(d.DO.Attrs(attrs...)) +} + +func (d diggerRunDo) Assign(attrs ...field.AssignExpr) IDiggerRunDo { + return d.withDO(d.DO.Assign(attrs...)) +} + +func (d diggerRunDo) Joins(fields ...field.RelationField) IDiggerRunDo { + for _, _f := range fields { + d = *d.withDO(d.DO.Joins(_f)) + } + return &d +} + +func (d diggerRunDo) Preload(fields ...field.RelationField) IDiggerRunDo { + for _, _f := range fields { + d = *d.withDO(d.DO.Preload(_f)) + } + return &d +} + +func (d diggerRunDo) FirstOrInit() (*model.DiggerRun, error) { + if result, err := d.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.DiggerRun), nil + } +} + +func (d diggerRunDo) FirstOrCreate() (*model.DiggerRun, error) { + if result, err := d.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.DiggerRun), nil + } +} + +func (d diggerRunDo) FindByPage(offset int, limit int) (result []*model.DiggerRun, count int64, err error) { + result, err = d.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = d.Offset(-1).Limit(-1).Count() + return +} + +func (d diggerRunDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = d.Count() + if err != nil { + return + } + + err = d.Offset(offset).Limit(limit).Scan(result) + return +} + +func (d diggerRunDo) Scan(result interface{}) (err error) { + return d.DO.Scan(result) +} + +func (d diggerRunDo) Delete(models ...*model.DiggerRun) (result gen.ResultInfo, err error) { + return d.DO.Delete(models) +} + +func (d *diggerRunDo) withDO(do gen.Dao) *diggerRunDo { + d.DO = *do.(*gen.DO) + return d +} diff --git a/next/models_generated/gen.go b/next/models_generated/gen.go new file mode 100644 index 000000000..d52dfe35a --- /dev/null +++ b/next/models_generated/gen.go @@ -0,0 +1,415 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + "database/sql" + + "gorm.io/gorm" + + "gorm.io/gen" + + "gorm.io/plugin/dbresolver" +) + +var ( + Q = new(Query) + AccountDeleteToken *accountDeleteToken + Chat *chat + Customer *customer + DiggerBatch *diggerBatch + DiggerJob *diggerJob + DiggerJobParentLink *diggerJobParentLink + DiggerJobSummary *diggerJobSummary + DiggerJobToken *diggerJobToken + DiggerLock *diggerLock + DiggerRun *diggerRun + DiggerRunQueueItem *diggerRunQueueItem + DiggerRunStage *diggerRunStage + GithubApp *githubApp + GithubAppInstallation *githubAppInstallation + GithubAppInstallationLink *githubAppInstallationLink + InternalBlogAuthorPost *internalBlogAuthorPost + InternalBlogAuthorProfile *internalBlogAuthorProfile + InternalBlogPost *internalBlogPost + InternalBlogPostTag *internalBlogPostTag + InternalBlogPostTagsRelationship *internalBlogPostTagsRelationship + InternalChangelog *internalChangelog + InternalFeedbackComment *internalFeedbackComment + InternalFeedbackThread *internalFeedbackThread + Organization *organization + OrganizationCredit *organizationCredit + OrganizationJoinInvitation *organizationJoinInvitation + OrganizationMember *organizationMember + OrganizationsPrivateInfo *organizationsPrivateInfo + Price *price + Product *product + Project *project + ProjectComment *projectComment + Repo *repo + Subscription *subscription + UserAPIKey *userAPIKey + UserNotification *userNotification + UserOnboarding *userOnboarding + UserPrivateInfo *userPrivateInfo + UserProfile *userProfile + UserRole *userRole +) + +func SetDefault(db *gorm.DB, opts ...gen.DOOption) { + *Q = *Use(db, opts...) + AccountDeleteToken = &Q.AccountDeleteToken + Chat = &Q.Chat + Customer = &Q.Customer + DiggerBatch = &Q.DiggerBatch + DiggerJob = &Q.DiggerJob + DiggerJobParentLink = &Q.DiggerJobParentLink + DiggerJobSummary = &Q.DiggerJobSummary + DiggerJobToken = &Q.DiggerJobToken + DiggerLock = &Q.DiggerLock + DiggerRun = &Q.DiggerRun + DiggerRunQueueItem = &Q.DiggerRunQueueItem + DiggerRunStage = &Q.DiggerRunStage + GithubApp = &Q.GithubApp + GithubAppInstallation = &Q.GithubAppInstallation + GithubAppInstallationLink = &Q.GithubAppInstallationLink + InternalBlogAuthorPost = &Q.InternalBlogAuthorPost + InternalBlogAuthorProfile = &Q.InternalBlogAuthorProfile + InternalBlogPost = &Q.InternalBlogPost + InternalBlogPostTag = &Q.InternalBlogPostTag + InternalBlogPostTagsRelationship = &Q.InternalBlogPostTagsRelationship + InternalChangelog = &Q.InternalChangelog + InternalFeedbackComment = &Q.InternalFeedbackComment + InternalFeedbackThread = &Q.InternalFeedbackThread + Organization = &Q.Organization + OrganizationCredit = &Q.OrganizationCredit + OrganizationJoinInvitation = &Q.OrganizationJoinInvitation + OrganizationMember = &Q.OrganizationMember + OrganizationsPrivateInfo = &Q.OrganizationsPrivateInfo + Price = &Q.Price + Product = &Q.Product + Project = &Q.Project + ProjectComment = &Q.ProjectComment + Repo = &Q.Repo + Subscription = &Q.Subscription + UserAPIKey = &Q.UserAPIKey + UserNotification = &Q.UserNotification + UserOnboarding = &Q.UserOnboarding + UserPrivateInfo = &Q.UserPrivateInfo + UserProfile = &Q.UserProfile + UserRole = &Q.UserRole +} + +func Use(db *gorm.DB, opts ...gen.DOOption) *Query { + return &Query{ + db: db, + AccountDeleteToken: newAccountDeleteToken(db, opts...), + Chat: newChat(db, opts...), + Customer: newCustomer(db, opts...), + DiggerBatch: newDiggerBatch(db, opts...), + DiggerJob: newDiggerJob(db, opts...), + DiggerJobParentLink: newDiggerJobParentLink(db, opts...), + DiggerJobSummary: newDiggerJobSummary(db, opts...), + DiggerJobToken: newDiggerJobToken(db, opts...), + DiggerLock: newDiggerLock(db, opts...), + DiggerRun: newDiggerRun(db, opts...), + DiggerRunQueueItem: newDiggerRunQueueItem(db, opts...), + DiggerRunStage: newDiggerRunStage(db, opts...), + GithubApp: newGithubApp(db, opts...), + GithubAppInstallation: newGithubAppInstallation(db, opts...), + GithubAppInstallationLink: newGithubAppInstallationLink(db, opts...), + InternalBlogAuthorPost: newInternalBlogAuthorPost(db, opts...), + InternalBlogAuthorProfile: newInternalBlogAuthorProfile(db, opts...), + InternalBlogPost: newInternalBlogPost(db, opts...), + InternalBlogPostTag: newInternalBlogPostTag(db, opts...), + InternalBlogPostTagsRelationship: newInternalBlogPostTagsRelationship(db, opts...), + InternalChangelog: newInternalChangelog(db, opts...), + InternalFeedbackComment: newInternalFeedbackComment(db, opts...), + InternalFeedbackThread: newInternalFeedbackThread(db, opts...), + Organization: newOrganization(db, opts...), + OrganizationCredit: newOrganizationCredit(db, opts...), + OrganizationJoinInvitation: newOrganizationJoinInvitation(db, opts...), + OrganizationMember: newOrganizationMember(db, opts...), + OrganizationsPrivateInfo: newOrganizationsPrivateInfo(db, opts...), + Price: newPrice(db, opts...), + Product: newProduct(db, opts...), + Project: newProject(db, opts...), + ProjectComment: newProjectComment(db, opts...), + Repo: newRepo(db, opts...), + Subscription: newSubscription(db, opts...), + UserAPIKey: newUserAPIKey(db, opts...), + UserNotification: newUserNotification(db, opts...), + UserOnboarding: newUserOnboarding(db, opts...), + UserPrivateInfo: newUserPrivateInfo(db, opts...), + UserProfile: newUserProfile(db, opts...), + UserRole: newUserRole(db, opts...), + } +} + +type Query struct { + db *gorm.DB + + AccountDeleteToken accountDeleteToken + Chat chat + Customer customer + DiggerBatch diggerBatch + DiggerJob diggerJob + DiggerJobParentLink diggerJobParentLink + DiggerJobSummary diggerJobSummary + DiggerJobToken diggerJobToken + DiggerLock diggerLock + DiggerRun diggerRun + DiggerRunQueueItem diggerRunQueueItem + DiggerRunStage diggerRunStage + GithubApp githubApp + GithubAppInstallation githubAppInstallation + GithubAppInstallationLink githubAppInstallationLink + InternalBlogAuthorPost internalBlogAuthorPost + InternalBlogAuthorProfile internalBlogAuthorProfile + InternalBlogPost internalBlogPost + InternalBlogPostTag internalBlogPostTag + InternalBlogPostTagsRelationship internalBlogPostTagsRelationship + InternalChangelog internalChangelog + InternalFeedbackComment internalFeedbackComment + InternalFeedbackThread internalFeedbackThread + Organization organization + OrganizationCredit organizationCredit + OrganizationJoinInvitation organizationJoinInvitation + OrganizationMember organizationMember + OrganizationsPrivateInfo organizationsPrivateInfo + Price price + Product product + Project project + ProjectComment projectComment + Repo repo + Subscription subscription + UserAPIKey userAPIKey + UserNotification userNotification + UserOnboarding userOnboarding + UserPrivateInfo userPrivateInfo + UserProfile userProfile + UserRole userRole +} + +func (q *Query) Available() bool { return q.db != nil } + +func (q *Query) clone(db *gorm.DB) *Query { + return &Query{ + db: db, + AccountDeleteToken: q.AccountDeleteToken.clone(db), + Chat: q.Chat.clone(db), + Customer: q.Customer.clone(db), + DiggerBatch: q.DiggerBatch.clone(db), + DiggerJob: q.DiggerJob.clone(db), + DiggerJobParentLink: q.DiggerJobParentLink.clone(db), + DiggerJobSummary: q.DiggerJobSummary.clone(db), + DiggerJobToken: q.DiggerJobToken.clone(db), + DiggerLock: q.DiggerLock.clone(db), + DiggerRun: q.DiggerRun.clone(db), + DiggerRunQueueItem: q.DiggerRunQueueItem.clone(db), + DiggerRunStage: q.DiggerRunStage.clone(db), + GithubApp: q.GithubApp.clone(db), + GithubAppInstallation: q.GithubAppInstallation.clone(db), + GithubAppInstallationLink: q.GithubAppInstallationLink.clone(db), + InternalBlogAuthorPost: q.InternalBlogAuthorPost.clone(db), + InternalBlogAuthorProfile: q.InternalBlogAuthorProfile.clone(db), + InternalBlogPost: q.InternalBlogPost.clone(db), + InternalBlogPostTag: q.InternalBlogPostTag.clone(db), + InternalBlogPostTagsRelationship: q.InternalBlogPostTagsRelationship.clone(db), + InternalChangelog: q.InternalChangelog.clone(db), + InternalFeedbackComment: q.InternalFeedbackComment.clone(db), + InternalFeedbackThread: q.InternalFeedbackThread.clone(db), + Organization: q.Organization.clone(db), + OrganizationCredit: q.OrganizationCredit.clone(db), + OrganizationJoinInvitation: q.OrganizationJoinInvitation.clone(db), + OrganizationMember: q.OrganizationMember.clone(db), + OrganizationsPrivateInfo: q.OrganizationsPrivateInfo.clone(db), + Price: q.Price.clone(db), + Product: q.Product.clone(db), + Project: q.Project.clone(db), + ProjectComment: q.ProjectComment.clone(db), + Repo: q.Repo.clone(db), + Subscription: q.Subscription.clone(db), + UserAPIKey: q.UserAPIKey.clone(db), + UserNotification: q.UserNotification.clone(db), + UserOnboarding: q.UserOnboarding.clone(db), + UserPrivateInfo: q.UserPrivateInfo.clone(db), + UserProfile: q.UserProfile.clone(db), + UserRole: q.UserRole.clone(db), + } +} + +func (q *Query) ReadDB() *Query { + return q.ReplaceDB(q.db.Clauses(dbresolver.Read)) +} + +func (q *Query) WriteDB() *Query { + return q.ReplaceDB(q.db.Clauses(dbresolver.Write)) +} + +func (q *Query) ReplaceDB(db *gorm.DB) *Query { + return &Query{ + db: db, + AccountDeleteToken: q.AccountDeleteToken.replaceDB(db), + Chat: q.Chat.replaceDB(db), + Customer: q.Customer.replaceDB(db), + DiggerBatch: q.DiggerBatch.replaceDB(db), + DiggerJob: q.DiggerJob.replaceDB(db), + DiggerJobParentLink: q.DiggerJobParentLink.replaceDB(db), + DiggerJobSummary: q.DiggerJobSummary.replaceDB(db), + DiggerJobToken: q.DiggerJobToken.replaceDB(db), + DiggerLock: q.DiggerLock.replaceDB(db), + DiggerRun: q.DiggerRun.replaceDB(db), + DiggerRunQueueItem: q.DiggerRunQueueItem.replaceDB(db), + DiggerRunStage: q.DiggerRunStage.replaceDB(db), + GithubApp: q.GithubApp.replaceDB(db), + GithubAppInstallation: q.GithubAppInstallation.replaceDB(db), + GithubAppInstallationLink: q.GithubAppInstallationLink.replaceDB(db), + InternalBlogAuthorPost: q.InternalBlogAuthorPost.replaceDB(db), + InternalBlogAuthorProfile: q.InternalBlogAuthorProfile.replaceDB(db), + InternalBlogPost: q.InternalBlogPost.replaceDB(db), + InternalBlogPostTag: q.InternalBlogPostTag.replaceDB(db), + InternalBlogPostTagsRelationship: q.InternalBlogPostTagsRelationship.replaceDB(db), + InternalChangelog: q.InternalChangelog.replaceDB(db), + InternalFeedbackComment: q.InternalFeedbackComment.replaceDB(db), + InternalFeedbackThread: q.InternalFeedbackThread.replaceDB(db), + Organization: q.Organization.replaceDB(db), + OrganizationCredit: q.OrganizationCredit.replaceDB(db), + OrganizationJoinInvitation: q.OrganizationJoinInvitation.replaceDB(db), + OrganizationMember: q.OrganizationMember.replaceDB(db), + OrganizationsPrivateInfo: q.OrganizationsPrivateInfo.replaceDB(db), + Price: q.Price.replaceDB(db), + Product: q.Product.replaceDB(db), + Project: q.Project.replaceDB(db), + ProjectComment: q.ProjectComment.replaceDB(db), + Repo: q.Repo.replaceDB(db), + Subscription: q.Subscription.replaceDB(db), + UserAPIKey: q.UserAPIKey.replaceDB(db), + UserNotification: q.UserNotification.replaceDB(db), + UserOnboarding: q.UserOnboarding.replaceDB(db), + UserPrivateInfo: q.UserPrivateInfo.replaceDB(db), + UserProfile: q.UserProfile.replaceDB(db), + UserRole: q.UserRole.replaceDB(db), + } +} + +type queryCtx struct { + AccountDeleteToken IAccountDeleteTokenDo + Chat IChatDo + Customer ICustomerDo + DiggerBatch IDiggerBatchDo + DiggerJob IDiggerJobDo + DiggerJobParentLink IDiggerJobParentLinkDo + DiggerJobSummary IDiggerJobSummaryDo + DiggerJobToken IDiggerJobTokenDo + DiggerLock IDiggerLockDo + DiggerRun IDiggerRunDo + DiggerRunQueueItem IDiggerRunQueueItemDo + DiggerRunStage IDiggerRunStageDo + GithubApp IGithubAppDo + GithubAppInstallation IGithubAppInstallationDo + GithubAppInstallationLink IGithubAppInstallationLinkDo + InternalBlogAuthorPost IInternalBlogAuthorPostDo + InternalBlogAuthorProfile IInternalBlogAuthorProfileDo + InternalBlogPost IInternalBlogPostDo + InternalBlogPostTag IInternalBlogPostTagDo + InternalBlogPostTagsRelationship IInternalBlogPostTagsRelationshipDo + InternalChangelog IInternalChangelogDo + InternalFeedbackComment IInternalFeedbackCommentDo + InternalFeedbackThread IInternalFeedbackThreadDo + Organization IOrganizationDo + OrganizationCredit IOrganizationCreditDo + OrganizationJoinInvitation IOrganizationJoinInvitationDo + OrganizationMember IOrganizationMemberDo + OrganizationsPrivateInfo IOrganizationsPrivateInfoDo + Price IPriceDo + Product IProductDo + Project IProjectDo + ProjectComment IProjectCommentDo + Repo IRepoDo + Subscription ISubscriptionDo + UserAPIKey IUserAPIKeyDo + UserNotification IUserNotificationDo + UserOnboarding IUserOnboardingDo + UserPrivateInfo IUserPrivateInfoDo + UserProfile IUserProfileDo + UserRole IUserRoleDo +} + +func (q *Query) WithContext(ctx context.Context) *queryCtx { + return &queryCtx{ + AccountDeleteToken: q.AccountDeleteToken.WithContext(ctx), + Chat: q.Chat.WithContext(ctx), + Customer: q.Customer.WithContext(ctx), + DiggerBatch: q.DiggerBatch.WithContext(ctx), + DiggerJob: q.DiggerJob.WithContext(ctx), + DiggerJobParentLink: q.DiggerJobParentLink.WithContext(ctx), + DiggerJobSummary: q.DiggerJobSummary.WithContext(ctx), + DiggerJobToken: q.DiggerJobToken.WithContext(ctx), + DiggerLock: q.DiggerLock.WithContext(ctx), + DiggerRun: q.DiggerRun.WithContext(ctx), + DiggerRunQueueItem: q.DiggerRunQueueItem.WithContext(ctx), + DiggerRunStage: q.DiggerRunStage.WithContext(ctx), + GithubApp: q.GithubApp.WithContext(ctx), + GithubAppInstallation: q.GithubAppInstallation.WithContext(ctx), + GithubAppInstallationLink: q.GithubAppInstallationLink.WithContext(ctx), + InternalBlogAuthorPost: q.InternalBlogAuthorPost.WithContext(ctx), + InternalBlogAuthorProfile: q.InternalBlogAuthorProfile.WithContext(ctx), + InternalBlogPost: q.InternalBlogPost.WithContext(ctx), + InternalBlogPostTag: q.InternalBlogPostTag.WithContext(ctx), + InternalBlogPostTagsRelationship: q.InternalBlogPostTagsRelationship.WithContext(ctx), + InternalChangelog: q.InternalChangelog.WithContext(ctx), + InternalFeedbackComment: q.InternalFeedbackComment.WithContext(ctx), + InternalFeedbackThread: q.InternalFeedbackThread.WithContext(ctx), + Organization: q.Organization.WithContext(ctx), + OrganizationCredit: q.OrganizationCredit.WithContext(ctx), + OrganizationJoinInvitation: q.OrganizationJoinInvitation.WithContext(ctx), + OrganizationMember: q.OrganizationMember.WithContext(ctx), + OrganizationsPrivateInfo: q.OrganizationsPrivateInfo.WithContext(ctx), + Price: q.Price.WithContext(ctx), + Product: q.Product.WithContext(ctx), + Project: q.Project.WithContext(ctx), + ProjectComment: q.ProjectComment.WithContext(ctx), + Repo: q.Repo.WithContext(ctx), + Subscription: q.Subscription.WithContext(ctx), + UserAPIKey: q.UserAPIKey.WithContext(ctx), + UserNotification: q.UserNotification.WithContext(ctx), + UserOnboarding: q.UserOnboarding.WithContext(ctx), + UserPrivateInfo: q.UserPrivateInfo.WithContext(ctx), + UserProfile: q.UserProfile.WithContext(ctx), + UserRole: q.UserRole.WithContext(ctx), + } +} + +func (q *Query) Transaction(fc func(tx *Query) error, opts ...*sql.TxOptions) error { + return q.db.Transaction(func(tx *gorm.DB) error { return fc(q.clone(tx)) }, opts...) +} + +func (q *Query) Begin(opts ...*sql.TxOptions) *QueryTx { + tx := q.db.Begin(opts...) + return &QueryTx{Query: q.clone(tx), Error: tx.Error} +} + +type QueryTx struct { + *Query + Error error +} + +func (q *QueryTx) Commit() error { + return q.db.Commit().Error +} + +func (q *QueryTx) Rollback() error { + return q.db.Rollback().Error +} + +func (q *QueryTx) SavePoint(name string) error { + return q.db.SavePoint(name).Error +} + +func (q *QueryTx) RollbackTo(name string) error { + return q.db.RollbackTo(name).Error +} diff --git a/next/models_generated/github_app_installation_links.gen.go b/next/models_generated/github_app_installation_links.gen.go new file mode 100644 index 000000000..5fc5084ae --- /dev/null +++ b/next/models_generated/github_app_installation_links.gen.go @@ -0,0 +1,404 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newGithubAppInstallationLink(db *gorm.DB, opts ...gen.DOOption) githubAppInstallationLink { + _githubAppInstallationLink := githubAppInstallationLink{} + + _githubAppInstallationLink.githubAppInstallationLinkDo.UseDB(db, opts...) + _githubAppInstallationLink.githubAppInstallationLinkDo.UseModel(&model.GithubAppInstallationLink{}) + + tableName := _githubAppInstallationLink.githubAppInstallationLinkDo.TableName() + _githubAppInstallationLink.ALL = field.NewAsterisk(tableName) + _githubAppInstallationLink.ID = field.NewString(tableName, "id") + _githubAppInstallationLink.CreatedAt = field.NewTime(tableName, "created_at") + _githubAppInstallationLink.UpdatedAt = field.NewTime(tableName, "updated_at") + _githubAppInstallationLink.DeletedAt = field.NewField(tableName, "deleted_at") + _githubAppInstallationLink.GithubInstallationID = field.NewInt64(tableName, "github_installation_id") + _githubAppInstallationLink.OrganizationID = field.NewString(tableName, "organization_id") + _githubAppInstallationLink.Status = field.NewInt16(tableName, "status") + + _githubAppInstallationLink.fillFieldMap() + + return _githubAppInstallationLink +} + +type githubAppInstallationLink struct { + githubAppInstallationLinkDo + + ALL field.Asterisk + ID field.String + CreatedAt field.Time + UpdatedAt field.Time + DeletedAt field.Field + GithubInstallationID field.Int64 + OrganizationID field.String + Status field.Int16 + + fieldMap map[string]field.Expr +} + +func (g githubAppInstallationLink) Table(newTableName string) *githubAppInstallationLink { + g.githubAppInstallationLinkDo.UseTable(newTableName) + return g.updateTableName(newTableName) +} + +func (g githubAppInstallationLink) As(alias string) *githubAppInstallationLink { + g.githubAppInstallationLinkDo.DO = *(g.githubAppInstallationLinkDo.As(alias).(*gen.DO)) + return g.updateTableName(alias) +} + +func (g *githubAppInstallationLink) updateTableName(table string) *githubAppInstallationLink { + g.ALL = field.NewAsterisk(table) + g.ID = field.NewString(table, "id") + g.CreatedAt = field.NewTime(table, "created_at") + g.UpdatedAt = field.NewTime(table, "updated_at") + g.DeletedAt = field.NewField(table, "deleted_at") + g.GithubInstallationID = field.NewInt64(table, "github_installation_id") + g.OrganizationID = field.NewString(table, "organization_id") + g.Status = field.NewInt16(table, "status") + + g.fillFieldMap() + + return g +} + +func (g *githubAppInstallationLink) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := g.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (g *githubAppInstallationLink) fillFieldMap() { + g.fieldMap = make(map[string]field.Expr, 7) + g.fieldMap["id"] = g.ID + g.fieldMap["created_at"] = g.CreatedAt + g.fieldMap["updated_at"] = g.UpdatedAt + g.fieldMap["deleted_at"] = g.DeletedAt + g.fieldMap["github_installation_id"] = g.GithubInstallationID + g.fieldMap["organization_id"] = g.OrganizationID + g.fieldMap["status"] = g.Status +} + +func (g githubAppInstallationLink) clone(db *gorm.DB) githubAppInstallationLink { + g.githubAppInstallationLinkDo.ReplaceConnPool(db.Statement.ConnPool) + return g +} + +func (g githubAppInstallationLink) replaceDB(db *gorm.DB) githubAppInstallationLink { + g.githubAppInstallationLinkDo.ReplaceDB(db) + return g +} + +type githubAppInstallationLinkDo struct{ gen.DO } + +type IGithubAppInstallationLinkDo interface { + gen.SubQuery + Debug() IGithubAppInstallationLinkDo + WithContext(ctx context.Context) IGithubAppInstallationLinkDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IGithubAppInstallationLinkDo + WriteDB() IGithubAppInstallationLinkDo + As(alias string) gen.Dao + Session(config *gorm.Session) IGithubAppInstallationLinkDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IGithubAppInstallationLinkDo + Not(conds ...gen.Condition) IGithubAppInstallationLinkDo + Or(conds ...gen.Condition) IGithubAppInstallationLinkDo + Select(conds ...field.Expr) IGithubAppInstallationLinkDo + Where(conds ...gen.Condition) IGithubAppInstallationLinkDo + Order(conds ...field.Expr) IGithubAppInstallationLinkDo + Distinct(cols ...field.Expr) IGithubAppInstallationLinkDo + Omit(cols ...field.Expr) IGithubAppInstallationLinkDo + Join(table schema.Tabler, on ...field.Expr) IGithubAppInstallationLinkDo + LeftJoin(table schema.Tabler, on ...field.Expr) IGithubAppInstallationLinkDo + RightJoin(table schema.Tabler, on ...field.Expr) IGithubAppInstallationLinkDo + Group(cols ...field.Expr) IGithubAppInstallationLinkDo + Having(conds ...gen.Condition) IGithubAppInstallationLinkDo + Limit(limit int) IGithubAppInstallationLinkDo + Offset(offset int) IGithubAppInstallationLinkDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IGithubAppInstallationLinkDo + Unscoped() IGithubAppInstallationLinkDo + Create(values ...*model.GithubAppInstallationLink) error + CreateInBatches(values []*model.GithubAppInstallationLink, batchSize int) error + Save(values ...*model.GithubAppInstallationLink) error + First() (*model.GithubAppInstallationLink, error) + Take() (*model.GithubAppInstallationLink, error) + Last() (*model.GithubAppInstallationLink, error) + Find() ([]*model.GithubAppInstallationLink, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.GithubAppInstallationLink, err error) + FindInBatches(result *[]*model.GithubAppInstallationLink, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.GithubAppInstallationLink) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IGithubAppInstallationLinkDo + Assign(attrs ...field.AssignExpr) IGithubAppInstallationLinkDo + Joins(fields ...field.RelationField) IGithubAppInstallationLinkDo + Preload(fields ...field.RelationField) IGithubAppInstallationLinkDo + FirstOrInit() (*model.GithubAppInstallationLink, error) + FirstOrCreate() (*model.GithubAppInstallationLink, error) + FindByPage(offset int, limit int) (result []*model.GithubAppInstallationLink, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IGithubAppInstallationLinkDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (g githubAppInstallationLinkDo) Debug() IGithubAppInstallationLinkDo { + return g.withDO(g.DO.Debug()) +} + +func (g githubAppInstallationLinkDo) WithContext(ctx context.Context) IGithubAppInstallationLinkDo { + return g.withDO(g.DO.WithContext(ctx)) +} + +func (g githubAppInstallationLinkDo) ReadDB() IGithubAppInstallationLinkDo { + return g.Clauses(dbresolver.Read) +} + +func (g githubAppInstallationLinkDo) WriteDB() IGithubAppInstallationLinkDo { + return g.Clauses(dbresolver.Write) +} + +func (g githubAppInstallationLinkDo) Session(config *gorm.Session) IGithubAppInstallationLinkDo { + return g.withDO(g.DO.Session(config)) +} + +func (g githubAppInstallationLinkDo) Clauses(conds ...clause.Expression) IGithubAppInstallationLinkDo { + return g.withDO(g.DO.Clauses(conds...)) +} + +func (g githubAppInstallationLinkDo) Returning(value interface{}, columns ...string) IGithubAppInstallationLinkDo { + return g.withDO(g.DO.Returning(value, columns...)) +} + +func (g githubAppInstallationLinkDo) Not(conds ...gen.Condition) IGithubAppInstallationLinkDo { + return g.withDO(g.DO.Not(conds...)) +} + +func (g githubAppInstallationLinkDo) Or(conds ...gen.Condition) IGithubAppInstallationLinkDo { + return g.withDO(g.DO.Or(conds...)) +} + +func (g githubAppInstallationLinkDo) Select(conds ...field.Expr) IGithubAppInstallationLinkDo { + return g.withDO(g.DO.Select(conds...)) +} + +func (g githubAppInstallationLinkDo) Where(conds ...gen.Condition) IGithubAppInstallationLinkDo { + return g.withDO(g.DO.Where(conds...)) +} + +func (g githubAppInstallationLinkDo) Order(conds ...field.Expr) IGithubAppInstallationLinkDo { + return g.withDO(g.DO.Order(conds...)) +} + +func (g githubAppInstallationLinkDo) Distinct(cols ...field.Expr) IGithubAppInstallationLinkDo { + return g.withDO(g.DO.Distinct(cols...)) +} + +func (g githubAppInstallationLinkDo) Omit(cols ...field.Expr) IGithubAppInstallationLinkDo { + return g.withDO(g.DO.Omit(cols...)) +} + +func (g githubAppInstallationLinkDo) Join(table schema.Tabler, on ...field.Expr) IGithubAppInstallationLinkDo { + return g.withDO(g.DO.Join(table, on...)) +} + +func (g githubAppInstallationLinkDo) LeftJoin(table schema.Tabler, on ...field.Expr) IGithubAppInstallationLinkDo { + return g.withDO(g.DO.LeftJoin(table, on...)) +} + +func (g githubAppInstallationLinkDo) RightJoin(table schema.Tabler, on ...field.Expr) IGithubAppInstallationLinkDo { + return g.withDO(g.DO.RightJoin(table, on...)) +} + +func (g githubAppInstallationLinkDo) Group(cols ...field.Expr) IGithubAppInstallationLinkDo { + return g.withDO(g.DO.Group(cols...)) +} + +func (g githubAppInstallationLinkDo) Having(conds ...gen.Condition) IGithubAppInstallationLinkDo { + return g.withDO(g.DO.Having(conds...)) +} + +func (g githubAppInstallationLinkDo) Limit(limit int) IGithubAppInstallationLinkDo { + return g.withDO(g.DO.Limit(limit)) +} + +func (g githubAppInstallationLinkDo) Offset(offset int) IGithubAppInstallationLinkDo { + return g.withDO(g.DO.Offset(offset)) +} + +func (g githubAppInstallationLinkDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IGithubAppInstallationLinkDo { + return g.withDO(g.DO.Scopes(funcs...)) +} + +func (g githubAppInstallationLinkDo) Unscoped() IGithubAppInstallationLinkDo { + return g.withDO(g.DO.Unscoped()) +} + +func (g githubAppInstallationLinkDo) Create(values ...*model.GithubAppInstallationLink) error { + if len(values) == 0 { + return nil + } + return g.DO.Create(values) +} + +func (g githubAppInstallationLinkDo) CreateInBatches(values []*model.GithubAppInstallationLink, batchSize int) error { + return g.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (g githubAppInstallationLinkDo) Save(values ...*model.GithubAppInstallationLink) error { + if len(values) == 0 { + return nil + } + return g.DO.Save(values) +} + +func (g githubAppInstallationLinkDo) First() (*model.GithubAppInstallationLink, error) { + if result, err := g.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.GithubAppInstallationLink), nil + } +} + +func (g githubAppInstallationLinkDo) Take() (*model.GithubAppInstallationLink, error) { + if result, err := g.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.GithubAppInstallationLink), nil + } +} + +func (g githubAppInstallationLinkDo) Last() (*model.GithubAppInstallationLink, error) { + if result, err := g.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.GithubAppInstallationLink), nil + } +} + +func (g githubAppInstallationLinkDo) Find() ([]*model.GithubAppInstallationLink, error) { + result, err := g.DO.Find() + return result.([]*model.GithubAppInstallationLink), err +} + +func (g githubAppInstallationLinkDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.GithubAppInstallationLink, err error) { + buf := make([]*model.GithubAppInstallationLink, 0, batchSize) + err = g.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (g githubAppInstallationLinkDo) FindInBatches(result *[]*model.GithubAppInstallationLink, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return g.DO.FindInBatches(result, batchSize, fc) +} + +func (g githubAppInstallationLinkDo) Attrs(attrs ...field.AssignExpr) IGithubAppInstallationLinkDo { + return g.withDO(g.DO.Attrs(attrs...)) +} + +func (g githubAppInstallationLinkDo) Assign(attrs ...field.AssignExpr) IGithubAppInstallationLinkDo { + return g.withDO(g.DO.Assign(attrs...)) +} + +func (g githubAppInstallationLinkDo) Joins(fields ...field.RelationField) IGithubAppInstallationLinkDo { + for _, _f := range fields { + g = *g.withDO(g.DO.Joins(_f)) + } + return &g +} + +func (g githubAppInstallationLinkDo) Preload(fields ...field.RelationField) IGithubAppInstallationLinkDo { + for _, _f := range fields { + g = *g.withDO(g.DO.Preload(_f)) + } + return &g +} + +func (g githubAppInstallationLinkDo) FirstOrInit() (*model.GithubAppInstallationLink, error) { + if result, err := g.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.GithubAppInstallationLink), nil + } +} + +func (g githubAppInstallationLinkDo) FirstOrCreate() (*model.GithubAppInstallationLink, error) { + if result, err := g.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.GithubAppInstallationLink), nil + } +} + +func (g githubAppInstallationLinkDo) FindByPage(offset int, limit int) (result []*model.GithubAppInstallationLink, count int64, err error) { + result, err = g.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = g.Offset(-1).Limit(-1).Count() + return +} + +func (g githubAppInstallationLinkDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = g.Count() + if err != nil { + return + } + + err = g.Offset(offset).Limit(limit).Scan(result) + return +} + +func (g githubAppInstallationLinkDo) Scan(result interface{}) (err error) { + return g.DO.Scan(result) +} + +func (g githubAppInstallationLinkDo) Delete(models ...*model.GithubAppInstallationLink) (result gen.ResultInfo, err error) { + return g.DO.Delete(models) +} + +func (g *githubAppInstallationLinkDo) withDO(do gen.Dao) *githubAppInstallationLinkDo { + g.DO = *do.(*gen.DO) + return g +} diff --git a/next/models_generated/github_app_installations.gen.go b/next/models_generated/github_app_installations.gen.go new file mode 100644 index 000000000..fba81b10b --- /dev/null +++ b/next/models_generated/github_app_installations.gen.go @@ -0,0 +1,416 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newGithubAppInstallation(db *gorm.DB, opts ...gen.DOOption) githubAppInstallation { + _githubAppInstallation := githubAppInstallation{} + + _githubAppInstallation.githubAppInstallationDo.UseDB(db, opts...) + _githubAppInstallation.githubAppInstallationDo.UseModel(&model.GithubAppInstallation{}) + + tableName := _githubAppInstallation.githubAppInstallationDo.TableName() + _githubAppInstallation.ALL = field.NewAsterisk(tableName) + _githubAppInstallation.ID = field.NewString(tableName, "id") + _githubAppInstallation.CreatedAt = field.NewTime(tableName, "created_at") + _githubAppInstallation.UpdatedAt = field.NewTime(tableName, "updated_at") + _githubAppInstallation.DeletedAt = field.NewField(tableName, "deleted_at") + _githubAppInstallation.GithubInstallationID = field.NewInt64(tableName, "github_installation_id") + _githubAppInstallation.GithubAppID = field.NewInt64(tableName, "github_app_id") + _githubAppInstallation.AccountID = field.NewInt64(tableName, "account_id") + _githubAppInstallation.Login = field.NewString(tableName, "login") + _githubAppInstallation.Repo = field.NewString(tableName, "repo") + _githubAppInstallation.Status = field.NewInt64(tableName, "status") + + _githubAppInstallation.fillFieldMap() + + return _githubAppInstallation +} + +type githubAppInstallation struct { + githubAppInstallationDo + + ALL field.Asterisk + ID field.String + CreatedAt field.Time + UpdatedAt field.Time + DeletedAt field.Field + GithubInstallationID field.Int64 + GithubAppID field.Int64 + AccountID field.Int64 + Login field.String + Repo field.String + Status field.Int64 + + fieldMap map[string]field.Expr +} + +func (g githubAppInstallation) Table(newTableName string) *githubAppInstallation { + g.githubAppInstallationDo.UseTable(newTableName) + return g.updateTableName(newTableName) +} + +func (g githubAppInstallation) As(alias string) *githubAppInstallation { + g.githubAppInstallationDo.DO = *(g.githubAppInstallationDo.As(alias).(*gen.DO)) + return g.updateTableName(alias) +} + +func (g *githubAppInstallation) updateTableName(table string) *githubAppInstallation { + g.ALL = field.NewAsterisk(table) + g.ID = field.NewString(table, "id") + g.CreatedAt = field.NewTime(table, "created_at") + g.UpdatedAt = field.NewTime(table, "updated_at") + g.DeletedAt = field.NewField(table, "deleted_at") + g.GithubInstallationID = field.NewInt64(table, "github_installation_id") + g.GithubAppID = field.NewInt64(table, "github_app_id") + g.AccountID = field.NewInt64(table, "account_id") + g.Login = field.NewString(table, "login") + g.Repo = field.NewString(table, "repo") + g.Status = field.NewInt64(table, "status") + + g.fillFieldMap() + + return g +} + +func (g *githubAppInstallation) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := g.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (g *githubAppInstallation) fillFieldMap() { + g.fieldMap = make(map[string]field.Expr, 10) + g.fieldMap["id"] = g.ID + g.fieldMap["created_at"] = g.CreatedAt + g.fieldMap["updated_at"] = g.UpdatedAt + g.fieldMap["deleted_at"] = g.DeletedAt + g.fieldMap["github_installation_id"] = g.GithubInstallationID + g.fieldMap["github_app_id"] = g.GithubAppID + g.fieldMap["account_id"] = g.AccountID + g.fieldMap["login"] = g.Login + g.fieldMap["repo"] = g.Repo + g.fieldMap["status"] = g.Status +} + +func (g githubAppInstallation) clone(db *gorm.DB) githubAppInstallation { + g.githubAppInstallationDo.ReplaceConnPool(db.Statement.ConnPool) + return g +} + +func (g githubAppInstallation) replaceDB(db *gorm.DB) githubAppInstallation { + g.githubAppInstallationDo.ReplaceDB(db) + return g +} + +type githubAppInstallationDo struct{ gen.DO } + +type IGithubAppInstallationDo interface { + gen.SubQuery + Debug() IGithubAppInstallationDo + WithContext(ctx context.Context) IGithubAppInstallationDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IGithubAppInstallationDo + WriteDB() IGithubAppInstallationDo + As(alias string) gen.Dao + Session(config *gorm.Session) IGithubAppInstallationDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IGithubAppInstallationDo + Not(conds ...gen.Condition) IGithubAppInstallationDo + Or(conds ...gen.Condition) IGithubAppInstallationDo + Select(conds ...field.Expr) IGithubAppInstallationDo + Where(conds ...gen.Condition) IGithubAppInstallationDo + Order(conds ...field.Expr) IGithubAppInstallationDo + Distinct(cols ...field.Expr) IGithubAppInstallationDo + Omit(cols ...field.Expr) IGithubAppInstallationDo + Join(table schema.Tabler, on ...field.Expr) IGithubAppInstallationDo + LeftJoin(table schema.Tabler, on ...field.Expr) IGithubAppInstallationDo + RightJoin(table schema.Tabler, on ...field.Expr) IGithubAppInstallationDo + Group(cols ...field.Expr) IGithubAppInstallationDo + Having(conds ...gen.Condition) IGithubAppInstallationDo + Limit(limit int) IGithubAppInstallationDo + Offset(offset int) IGithubAppInstallationDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IGithubAppInstallationDo + Unscoped() IGithubAppInstallationDo + Create(values ...*model.GithubAppInstallation) error + CreateInBatches(values []*model.GithubAppInstallation, batchSize int) error + Save(values ...*model.GithubAppInstallation) error + First() (*model.GithubAppInstallation, error) + Take() (*model.GithubAppInstallation, error) + Last() (*model.GithubAppInstallation, error) + Find() ([]*model.GithubAppInstallation, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.GithubAppInstallation, err error) + FindInBatches(result *[]*model.GithubAppInstallation, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.GithubAppInstallation) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IGithubAppInstallationDo + Assign(attrs ...field.AssignExpr) IGithubAppInstallationDo + Joins(fields ...field.RelationField) IGithubAppInstallationDo + Preload(fields ...field.RelationField) IGithubAppInstallationDo + FirstOrInit() (*model.GithubAppInstallation, error) + FirstOrCreate() (*model.GithubAppInstallation, error) + FindByPage(offset int, limit int) (result []*model.GithubAppInstallation, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IGithubAppInstallationDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (g githubAppInstallationDo) Debug() IGithubAppInstallationDo { + return g.withDO(g.DO.Debug()) +} + +func (g githubAppInstallationDo) WithContext(ctx context.Context) IGithubAppInstallationDo { + return g.withDO(g.DO.WithContext(ctx)) +} + +func (g githubAppInstallationDo) ReadDB() IGithubAppInstallationDo { + return g.Clauses(dbresolver.Read) +} + +func (g githubAppInstallationDo) WriteDB() IGithubAppInstallationDo { + return g.Clauses(dbresolver.Write) +} + +func (g githubAppInstallationDo) Session(config *gorm.Session) IGithubAppInstallationDo { + return g.withDO(g.DO.Session(config)) +} + +func (g githubAppInstallationDo) Clauses(conds ...clause.Expression) IGithubAppInstallationDo { + return g.withDO(g.DO.Clauses(conds...)) +} + +func (g githubAppInstallationDo) Returning(value interface{}, columns ...string) IGithubAppInstallationDo { + return g.withDO(g.DO.Returning(value, columns...)) +} + +func (g githubAppInstallationDo) Not(conds ...gen.Condition) IGithubAppInstallationDo { + return g.withDO(g.DO.Not(conds...)) +} + +func (g githubAppInstallationDo) Or(conds ...gen.Condition) IGithubAppInstallationDo { + return g.withDO(g.DO.Or(conds...)) +} + +func (g githubAppInstallationDo) Select(conds ...field.Expr) IGithubAppInstallationDo { + return g.withDO(g.DO.Select(conds...)) +} + +func (g githubAppInstallationDo) Where(conds ...gen.Condition) IGithubAppInstallationDo { + return g.withDO(g.DO.Where(conds...)) +} + +func (g githubAppInstallationDo) Order(conds ...field.Expr) IGithubAppInstallationDo { + return g.withDO(g.DO.Order(conds...)) +} + +func (g githubAppInstallationDo) Distinct(cols ...field.Expr) IGithubAppInstallationDo { + return g.withDO(g.DO.Distinct(cols...)) +} + +func (g githubAppInstallationDo) Omit(cols ...field.Expr) IGithubAppInstallationDo { + return g.withDO(g.DO.Omit(cols...)) +} + +func (g githubAppInstallationDo) Join(table schema.Tabler, on ...field.Expr) IGithubAppInstallationDo { + return g.withDO(g.DO.Join(table, on...)) +} + +func (g githubAppInstallationDo) LeftJoin(table schema.Tabler, on ...field.Expr) IGithubAppInstallationDo { + return g.withDO(g.DO.LeftJoin(table, on...)) +} + +func (g githubAppInstallationDo) RightJoin(table schema.Tabler, on ...field.Expr) IGithubAppInstallationDo { + return g.withDO(g.DO.RightJoin(table, on...)) +} + +func (g githubAppInstallationDo) Group(cols ...field.Expr) IGithubAppInstallationDo { + return g.withDO(g.DO.Group(cols...)) +} + +func (g githubAppInstallationDo) Having(conds ...gen.Condition) IGithubAppInstallationDo { + return g.withDO(g.DO.Having(conds...)) +} + +func (g githubAppInstallationDo) Limit(limit int) IGithubAppInstallationDo { + return g.withDO(g.DO.Limit(limit)) +} + +func (g githubAppInstallationDo) Offset(offset int) IGithubAppInstallationDo { + return g.withDO(g.DO.Offset(offset)) +} + +func (g githubAppInstallationDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IGithubAppInstallationDo { + return g.withDO(g.DO.Scopes(funcs...)) +} + +func (g githubAppInstallationDo) Unscoped() IGithubAppInstallationDo { + return g.withDO(g.DO.Unscoped()) +} + +func (g githubAppInstallationDo) Create(values ...*model.GithubAppInstallation) error { + if len(values) == 0 { + return nil + } + return g.DO.Create(values) +} + +func (g githubAppInstallationDo) CreateInBatches(values []*model.GithubAppInstallation, batchSize int) error { + return g.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (g githubAppInstallationDo) Save(values ...*model.GithubAppInstallation) error { + if len(values) == 0 { + return nil + } + return g.DO.Save(values) +} + +func (g githubAppInstallationDo) First() (*model.GithubAppInstallation, error) { + if result, err := g.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.GithubAppInstallation), nil + } +} + +func (g githubAppInstallationDo) Take() (*model.GithubAppInstallation, error) { + if result, err := g.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.GithubAppInstallation), nil + } +} + +func (g githubAppInstallationDo) Last() (*model.GithubAppInstallation, error) { + if result, err := g.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.GithubAppInstallation), nil + } +} + +func (g githubAppInstallationDo) Find() ([]*model.GithubAppInstallation, error) { + result, err := g.DO.Find() + return result.([]*model.GithubAppInstallation), err +} + +func (g githubAppInstallationDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.GithubAppInstallation, err error) { + buf := make([]*model.GithubAppInstallation, 0, batchSize) + err = g.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (g githubAppInstallationDo) FindInBatches(result *[]*model.GithubAppInstallation, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return g.DO.FindInBatches(result, batchSize, fc) +} + +func (g githubAppInstallationDo) Attrs(attrs ...field.AssignExpr) IGithubAppInstallationDo { + return g.withDO(g.DO.Attrs(attrs...)) +} + +func (g githubAppInstallationDo) Assign(attrs ...field.AssignExpr) IGithubAppInstallationDo { + return g.withDO(g.DO.Assign(attrs...)) +} + +func (g githubAppInstallationDo) Joins(fields ...field.RelationField) IGithubAppInstallationDo { + for _, _f := range fields { + g = *g.withDO(g.DO.Joins(_f)) + } + return &g +} + +func (g githubAppInstallationDo) Preload(fields ...field.RelationField) IGithubAppInstallationDo { + for _, _f := range fields { + g = *g.withDO(g.DO.Preload(_f)) + } + return &g +} + +func (g githubAppInstallationDo) FirstOrInit() (*model.GithubAppInstallation, error) { + if result, err := g.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.GithubAppInstallation), nil + } +} + +func (g githubAppInstallationDo) FirstOrCreate() (*model.GithubAppInstallation, error) { + if result, err := g.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.GithubAppInstallation), nil + } +} + +func (g githubAppInstallationDo) FindByPage(offset int, limit int) (result []*model.GithubAppInstallation, count int64, err error) { + result, err = g.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = g.Offset(-1).Limit(-1).Count() + return +} + +func (g githubAppInstallationDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = g.Count() + if err != nil { + return + } + + err = g.Offset(offset).Limit(limit).Scan(result) + return +} + +func (g githubAppInstallationDo) Scan(result interface{}) (err error) { + return g.DO.Scan(result) +} + +func (g githubAppInstallationDo) Delete(models ...*model.GithubAppInstallation) (result gen.ResultInfo, err error) { + return g.DO.Delete(models) +} + +func (g *githubAppInstallationDo) withDO(do gen.Dao) *githubAppInstallationDo { + g.DO = *do.(*gen.DO) + return g +} diff --git a/next/models_generated/github_apps.gen.go b/next/models_generated/github_apps.gen.go new file mode 100644 index 000000000..ce7a1a0be --- /dev/null +++ b/next/models_generated/github_apps.gen.go @@ -0,0 +1,404 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newGithubApp(db *gorm.DB, opts ...gen.DOOption) githubApp { + _githubApp := githubApp{} + + _githubApp.githubAppDo.UseDB(db, opts...) + _githubApp.githubAppDo.UseModel(&model.GithubApp{}) + + tableName := _githubApp.githubAppDo.TableName() + _githubApp.ALL = field.NewAsterisk(tableName) + _githubApp.ID = field.NewString(tableName, "id") + _githubApp.CreatedAt = field.NewTime(tableName, "created_at") + _githubApp.UpdatedAt = field.NewTime(tableName, "updated_at") + _githubApp.DeletedAt = field.NewField(tableName, "deleted_at") + _githubApp.GithubID = field.NewInt64(tableName, "github_id") + _githubApp.Name = field.NewString(tableName, "name") + _githubApp.GithubAppURL = field.NewString(tableName, "github_app_url") + + _githubApp.fillFieldMap() + + return _githubApp +} + +type githubApp struct { + githubAppDo + + ALL field.Asterisk + ID field.String + CreatedAt field.Time + UpdatedAt field.Time + DeletedAt field.Field + GithubID field.Int64 + Name field.String + GithubAppURL field.String + + fieldMap map[string]field.Expr +} + +func (g githubApp) Table(newTableName string) *githubApp { + g.githubAppDo.UseTable(newTableName) + return g.updateTableName(newTableName) +} + +func (g githubApp) As(alias string) *githubApp { + g.githubAppDo.DO = *(g.githubAppDo.As(alias).(*gen.DO)) + return g.updateTableName(alias) +} + +func (g *githubApp) updateTableName(table string) *githubApp { + g.ALL = field.NewAsterisk(table) + g.ID = field.NewString(table, "id") + g.CreatedAt = field.NewTime(table, "created_at") + g.UpdatedAt = field.NewTime(table, "updated_at") + g.DeletedAt = field.NewField(table, "deleted_at") + g.GithubID = field.NewInt64(table, "github_id") + g.Name = field.NewString(table, "name") + g.GithubAppURL = field.NewString(table, "github_app_url") + + g.fillFieldMap() + + return g +} + +func (g *githubApp) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := g.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (g *githubApp) fillFieldMap() { + g.fieldMap = make(map[string]field.Expr, 7) + g.fieldMap["id"] = g.ID + g.fieldMap["created_at"] = g.CreatedAt + g.fieldMap["updated_at"] = g.UpdatedAt + g.fieldMap["deleted_at"] = g.DeletedAt + g.fieldMap["github_id"] = g.GithubID + g.fieldMap["name"] = g.Name + g.fieldMap["github_app_url"] = g.GithubAppURL +} + +func (g githubApp) clone(db *gorm.DB) githubApp { + g.githubAppDo.ReplaceConnPool(db.Statement.ConnPool) + return g +} + +func (g githubApp) replaceDB(db *gorm.DB) githubApp { + g.githubAppDo.ReplaceDB(db) + return g +} + +type githubAppDo struct{ gen.DO } + +type IGithubAppDo interface { + gen.SubQuery + Debug() IGithubAppDo + WithContext(ctx context.Context) IGithubAppDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IGithubAppDo + WriteDB() IGithubAppDo + As(alias string) gen.Dao + Session(config *gorm.Session) IGithubAppDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IGithubAppDo + Not(conds ...gen.Condition) IGithubAppDo + Or(conds ...gen.Condition) IGithubAppDo + Select(conds ...field.Expr) IGithubAppDo + Where(conds ...gen.Condition) IGithubAppDo + Order(conds ...field.Expr) IGithubAppDo + Distinct(cols ...field.Expr) IGithubAppDo + Omit(cols ...field.Expr) IGithubAppDo + Join(table schema.Tabler, on ...field.Expr) IGithubAppDo + LeftJoin(table schema.Tabler, on ...field.Expr) IGithubAppDo + RightJoin(table schema.Tabler, on ...field.Expr) IGithubAppDo + Group(cols ...field.Expr) IGithubAppDo + Having(conds ...gen.Condition) IGithubAppDo + Limit(limit int) IGithubAppDo + Offset(offset int) IGithubAppDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IGithubAppDo + Unscoped() IGithubAppDo + Create(values ...*model.GithubApp) error + CreateInBatches(values []*model.GithubApp, batchSize int) error + Save(values ...*model.GithubApp) error + First() (*model.GithubApp, error) + Take() (*model.GithubApp, error) + Last() (*model.GithubApp, error) + Find() ([]*model.GithubApp, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.GithubApp, err error) + FindInBatches(result *[]*model.GithubApp, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.GithubApp) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IGithubAppDo + Assign(attrs ...field.AssignExpr) IGithubAppDo + Joins(fields ...field.RelationField) IGithubAppDo + Preload(fields ...field.RelationField) IGithubAppDo + FirstOrInit() (*model.GithubApp, error) + FirstOrCreate() (*model.GithubApp, error) + FindByPage(offset int, limit int) (result []*model.GithubApp, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IGithubAppDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (g githubAppDo) Debug() IGithubAppDo { + return g.withDO(g.DO.Debug()) +} + +func (g githubAppDo) WithContext(ctx context.Context) IGithubAppDo { + return g.withDO(g.DO.WithContext(ctx)) +} + +func (g githubAppDo) ReadDB() IGithubAppDo { + return g.Clauses(dbresolver.Read) +} + +func (g githubAppDo) WriteDB() IGithubAppDo { + return g.Clauses(dbresolver.Write) +} + +func (g githubAppDo) Session(config *gorm.Session) IGithubAppDo { + return g.withDO(g.DO.Session(config)) +} + +func (g githubAppDo) Clauses(conds ...clause.Expression) IGithubAppDo { + return g.withDO(g.DO.Clauses(conds...)) +} + +func (g githubAppDo) Returning(value interface{}, columns ...string) IGithubAppDo { + return g.withDO(g.DO.Returning(value, columns...)) +} + +func (g githubAppDo) Not(conds ...gen.Condition) IGithubAppDo { + return g.withDO(g.DO.Not(conds...)) +} + +func (g githubAppDo) Or(conds ...gen.Condition) IGithubAppDo { + return g.withDO(g.DO.Or(conds...)) +} + +func (g githubAppDo) Select(conds ...field.Expr) IGithubAppDo { + return g.withDO(g.DO.Select(conds...)) +} + +func (g githubAppDo) Where(conds ...gen.Condition) IGithubAppDo { + return g.withDO(g.DO.Where(conds...)) +} + +func (g githubAppDo) Order(conds ...field.Expr) IGithubAppDo { + return g.withDO(g.DO.Order(conds...)) +} + +func (g githubAppDo) Distinct(cols ...field.Expr) IGithubAppDo { + return g.withDO(g.DO.Distinct(cols...)) +} + +func (g githubAppDo) Omit(cols ...field.Expr) IGithubAppDo { + return g.withDO(g.DO.Omit(cols...)) +} + +func (g githubAppDo) Join(table schema.Tabler, on ...field.Expr) IGithubAppDo { + return g.withDO(g.DO.Join(table, on...)) +} + +func (g githubAppDo) LeftJoin(table schema.Tabler, on ...field.Expr) IGithubAppDo { + return g.withDO(g.DO.LeftJoin(table, on...)) +} + +func (g githubAppDo) RightJoin(table schema.Tabler, on ...field.Expr) IGithubAppDo { + return g.withDO(g.DO.RightJoin(table, on...)) +} + +func (g githubAppDo) Group(cols ...field.Expr) IGithubAppDo { + return g.withDO(g.DO.Group(cols...)) +} + +func (g githubAppDo) Having(conds ...gen.Condition) IGithubAppDo { + return g.withDO(g.DO.Having(conds...)) +} + +func (g githubAppDo) Limit(limit int) IGithubAppDo { + return g.withDO(g.DO.Limit(limit)) +} + +func (g githubAppDo) Offset(offset int) IGithubAppDo { + return g.withDO(g.DO.Offset(offset)) +} + +func (g githubAppDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IGithubAppDo { + return g.withDO(g.DO.Scopes(funcs...)) +} + +func (g githubAppDo) Unscoped() IGithubAppDo { + return g.withDO(g.DO.Unscoped()) +} + +func (g githubAppDo) Create(values ...*model.GithubApp) error { + if len(values) == 0 { + return nil + } + return g.DO.Create(values) +} + +func (g githubAppDo) CreateInBatches(values []*model.GithubApp, batchSize int) error { + return g.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (g githubAppDo) Save(values ...*model.GithubApp) error { + if len(values) == 0 { + return nil + } + return g.DO.Save(values) +} + +func (g githubAppDo) First() (*model.GithubApp, error) { + if result, err := g.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.GithubApp), nil + } +} + +func (g githubAppDo) Take() (*model.GithubApp, error) { + if result, err := g.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.GithubApp), nil + } +} + +func (g githubAppDo) Last() (*model.GithubApp, error) { + if result, err := g.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.GithubApp), nil + } +} + +func (g githubAppDo) Find() ([]*model.GithubApp, error) { + result, err := g.DO.Find() + return result.([]*model.GithubApp), err +} + +func (g githubAppDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.GithubApp, err error) { + buf := make([]*model.GithubApp, 0, batchSize) + err = g.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (g githubAppDo) FindInBatches(result *[]*model.GithubApp, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return g.DO.FindInBatches(result, batchSize, fc) +} + +func (g githubAppDo) Attrs(attrs ...field.AssignExpr) IGithubAppDo { + return g.withDO(g.DO.Attrs(attrs...)) +} + +func (g githubAppDo) Assign(attrs ...field.AssignExpr) IGithubAppDo { + return g.withDO(g.DO.Assign(attrs...)) +} + +func (g githubAppDo) Joins(fields ...field.RelationField) IGithubAppDo { + for _, _f := range fields { + g = *g.withDO(g.DO.Joins(_f)) + } + return &g +} + +func (g githubAppDo) Preload(fields ...field.RelationField) IGithubAppDo { + for _, _f := range fields { + g = *g.withDO(g.DO.Preload(_f)) + } + return &g +} + +func (g githubAppDo) FirstOrInit() (*model.GithubApp, error) { + if result, err := g.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.GithubApp), nil + } +} + +func (g githubAppDo) FirstOrCreate() (*model.GithubApp, error) { + if result, err := g.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.GithubApp), nil + } +} + +func (g githubAppDo) FindByPage(offset int, limit int) (result []*model.GithubApp, count int64, err error) { + result, err = g.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = g.Offset(-1).Limit(-1).Count() + return +} + +func (g githubAppDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = g.Count() + if err != nil { + return + } + + err = g.Offset(offset).Limit(limit).Scan(result) + return +} + +func (g githubAppDo) Scan(result interface{}) (err error) { + return g.DO.Scan(result) +} + +func (g githubAppDo) Delete(models ...*model.GithubApp) (result gen.ResultInfo, err error) { + return g.DO.Delete(models) +} + +func (g *githubAppDo) withDO(do gen.Dao) *githubAppDo { + g.DO = *do.(*gen.DO) + return g +} diff --git a/next/models_generated/internal_blog_author_posts.gen.go b/next/models_generated/internal_blog_author_posts.gen.go new file mode 100644 index 000000000..682bd0eea --- /dev/null +++ b/next/models_generated/internal_blog_author_posts.gen.go @@ -0,0 +1,384 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newInternalBlogAuthorPost(db *gorm.DB, opts ...gen.DOOption) internalBlogAuthorPost { + _internalBlogAuthorPost := internalBlogAuthorPost{} + + _internalBlogAuthorPost.internalBlogAuthorPostDo.UseDB(db, opts...) + _internalBlogAuthorPost.internalBlogAuthorPostDo.UseModel(&model.InternalBlogAuthorPost{}) + + tableName := _internalBlogAuthorPost.internalBlogAuthorPostDo.TableName() + _internalBlogAuthorPost.ALL = field.NewAsterisk(tableName) + _internalBlogAuthorPost.AuthorID = field.NewString(tableName, "author_id") + _internalBlogAuthorPost.PostID = field.NewString(tableName, "post_id") + + _internalBlogAuthorPost.fillFieldMap() + + return _internalBlogAuthorPost +} + +type internalBlogAuthorPost struct { + internalBlogAuthorPostDo + + ALL field.Asterisk + AuthorID field.String + PostID field.String + + fieldMap map[string]field.Expr +} + +func (i internalBlogAuthorPost) Table(newTableName string) *internalBlogAuthorPost { + i.internalBlogAuthorPostDo.UseTable(newTableName) + return i.updateTableName(newTableName) +} + +func (i internalBlogAuthorPost) As(alias string) *internalBlogAuthorPost { + i.internalBlogAuthorPostDo.DO = *(i.internalBlogAuthorPostDo.As(alias).(*gen.DO)) + return i.updateTableName(alias) +} + +func (i *internalBlogAuthorPost) updateTableName(table string) *internalBlogAuthorPost { + i.ALL = field.NewAsterisk(table) + i.AuthorID = field.NewString(table, "author_id") + i.PostID = field.NewString(table, "post_id") + + i.fillFieldMap() + + return i +} + +func (i *internalBlogAuthorPost) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := i.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (i *internalBlogAuthorPost) fillFieldMap() { + i.fieldMap = make(map[string]field.Expr, 2) + i.fieldMap["author_id"] = i.AuthorID + i.fieldMap["post_id"] = i.PostID +} + +func (i internalBlogAuthorPost) clone(db *gorm.DB) internalBlogAuthorPost { + i.internalBlogAuthorPostDo.ReplaceConnPool(db.Statement.ConnPool) + return i +} + +func (i internalBlogAuthorPost) replaceDB(db *gorm.DB) internalBlogAuthorPost { + i.internalBlogAuthorPostDo.ReplaceDB(db) + return i +} + +type internalBlogAuthorPostDo struct{ gen.DO } + +type IInternalBlogAuthorPostDo interface { + gen.SubQuery + Debug() IInternalBlogAuthorPostDo + WithContext(ctx context.Context) IInternalBlogAuthorPostDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IInternalBlogAuthorPostDo + WriteDB() IInternalBlogAuthorPostDo + As(alias string) gen.Dao + Session(config *gorm.Session) IInternalBlogAuthorPostDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IInternalBlogAuthorPostDo + Not(conds ...gen.Condition) IInternalBlogAuthorPostDo + Or(conds ...gen.Condition) IInternalBlogAuthorPostDo + Select(conds ...field.Expr) IInternalBlogAuthorPostDo + Where(conds ...gen.Condition) IInternalBlogAuthorPostDo + Order(conds ...field.Expr) IInternalBlogAuthorPostDo + Distinct(cols ...field.Expr) IInternalBlogAuthorPostDo + Omit(cols ...field.Expr) IInternalBlogAuthorPostDo + Join(table schema.Tabler, on ...field.Expr) IInternalBlogAuthorPostDo + LeftJoin(table schema.Tabler, on ...field.Expr) IInternalBlogAuthorPostDo + RightJoin(table schema.Tabler, on ...field.Expr) IInternalBlogAuthorPostDo + Group(cols ...field.Expr) IInternalBlogAuthorPostDo + Having(conds ...gen.Condition) IInternalBlogAuthorPostDo + Limit(limit int) IInternalBlogAuthorPostDo + Offset(offset int) IInternalBlogAuthorPostDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IInternalBlogAuthorPostDo + Unscoped() IInternalBlogAuthorPostDo + Create(values ...*model.InternalBlogAuthorPost) error + CreateInBatches(values []*model.InternalBlogAuthorPost, batchSize int) error + Save(values ...*model.InternalBlogAuthorPost) error + First() (*model.InternalBlogAuthorPost, error) + Take() (*model.InternalBlogAuthorPost, error) + Last() (*model.InternalBlogAuthorPost, error) + Find() ([]*model.InternalBlogAuthorPost, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.InternalBlogAuthorPost, err error) + FindInBatches(result *[]*model.InternalBlogAuthorPost, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.InternalBlogAuthorPost) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IInternalBlogAuthorPostDo + Assign(attrs ...field.AssignExpr) IInternalBlogAuthorPostDo + Joins(fields ...field.RelationField) IInternalBlogAuthorPostDo + Preload(fields ...field.RelationField) IInternalBlogAuthorPostDo + FirstOrInit() (*model.InternalBlogAuthorPost, error) + FirstOrCreate() (*model.InternalBlogAuthorPost, error) + FindByPage(offset int, limit int) (result []*model.InternalBlogAuthorPost, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IInternalBlogAuthorPostDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (i internalBlogAuthorPostDo) Debug() IInternalBlogAuthorPostDo { + return i.withDO(i.DO.Debug()) +} + +func (i internalBlogAuthorPostDo) WithContext(ctx context.Context) IInternalBlogAuthorPostDo { + return i.withDO(i.DO.WithContext(ctx)) +} + +func (i internalBlogAuthorPostDo) ReadDB() IInternalBlogAuthorPostDo { + return i.Clauses(dbresolver.Read) +} + +func (i internalBlogAuthorPostDo) WriteDB() IInternalBlogAuthorPostDo { + return i.Clauses(dbresolver.Write) +} + +func (i internalBlogAuthorPostDo) Session(config *gorm.Session) IInternalBlogAuthorPostDo { + return i.withDO(i.DO.Session(config)) +} + +func (i internalBlogAuthorPostDo) Clauses(conds ...clause.Expression) IInternalBlogAuthorPostDo { + return i.withDO(i.DO.Clauses(conds...)) +} + +func (i internalBlogAuthorPostDo) Returning(value interface{}, columns ...string) IInternalBlogAuthorPostDo { + return i.withDO(i.DO.Returning(value, columns...)) +} + +func (i internalBlogAuthorPostDo) Not(conds ...gen.Condition) IInternalBlogAuthorPostDo { + return i.withDO(i.DO.Not(conds...)) +} + +func (i internalBlogAuthorPostDo) Or(conds ...gen.Condition) IInternalBlogAuthorPostDo { + return i.withDO(i.DO.Or(conds...)) +} + +func (i internalBlogAuthorPostDo) Select(conds ...field.Expr) IInternalBlogAuthorPostDo { + return i.withDO(i.DO.Select(conds...)) +} + +func (i internalBlogAuthorPostDo) Where(conds ...gen.Condition) IInternalBlogAuthorPostDo { + return i.withDO(i.DO.Where(conds...)) +} + +func (i internalBlogAuthorPostDo) Order(conds ...field.Expr) IInternalBlogAuthorPostDo { + return i.withDO(i.DO.Order(conds...)) +} + +func (i internalBlogAuthorPostDo) Distinct(cols ...field.Expr) IInternalBlogAuthorPostDo { + return i.withDO(i.DO.Distinct(cols...)) +} + +func (i internalBlogAuthorPostDo) Omit(cols ...field.Expr) IInternalBlogAuthorPostDo { + return i.withDO(i.DO.Omit(cols...)) +} + +func (i internalBlogAuthorPostDo) Join(table schema.Tabler, on ...field.Expr) IInternalBlogAuthorPostDo { + return i.withDO(i.DO.Join(table, on...)) +} + +func (i internalBlogAuthorPostDo) LeftJoin(table schema.Tabler, on ...field.Expr) IInternalBlogAuthorPostDo { + return i.withDO(i.DO.LeftJoin(table, on...)) +} + +func (i internalBlogAuthorPostDo) RightJoin(table schema.Tabler, on ...field.Expr) IInternalBlogAuthorPostDo { + return i.withDO(i.DO.RightJoin(table, on...)) +} + +func (i internalBlogAuthorPostDo) Group(cols ...field.Expr) IInternalBlogAuthorPostDo { + return i.withDO(i.DO.Group(cols...)) +} + +func (i internalBlogAuthorPostDo) Having(conds ...gen.Condition) IInternalBlogAuthorPostDo { + return i.withDO(i.DO.Having(conds...)) +} + +func (i internalBlogAuthorPostDo) Limit(limit int) IInternalBlogAuthorPostDo { + return i.withDO(i.DO.Limit(limit)) +} + +func (i internalBlogAuthorPostDo) Offset(offset int) IInternalBlogAuthorPostDo { + return i.withDO(i.DO.Offset(offset)) +} + +func (i internalBlogAuthorPostDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IInternalBlogAuthorPostDo { + return i.withDO(i.DO.Scopes(funcs...)) +} + +func (i internalBlogAuthorPostDo) Unscoped() IInternalBlogAuthorPostDo { + return i.withDO(i.DO.Unscoped()) +} + +func (i internalBlogAuthorPostDo) Create(values ...*model.InternalBlogAuthorPost) error { + if len(values) == 0 { + return nil + } + return i.DO.Create(values) +} + +func (i internalBlogAuthorPostDo) CreateInBatches(values []*model.InternalBlogAuthorPost, batchSize int) error { + return i.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (i internalBlogAuthorPostDo) Save(values ...*model.InternalBlogAuthorPost) error { + if len(values) == 0 { + return nil + } + return i.DO.Save(values) +} + +func (i internalBlogAuthorPostDo) First() (*model.InternalBlogAuthorPost, error) { + if result, err := i.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.InternalBlogAuthorPost), nil + } +} + +func (i internalBlogAuthorPostDo) Take() (*model.InternalBlogAuthorPost, error) { + if result, err := i.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.InternalBlogAuthorPost), nil + } +} + +func (i internalBlogAuthorPostDo) Last() (*model.InternalBlogAuthorPost, error) { + if result, err := i.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.InternalBlogAuthorPost), nil + } +} + +func (i internalBlogAuthorPostDo) Find() ([]*model.InternalBlogAuthorPost, error) { + result, err := i.DO.Find() + return result.([]*model.InternalBlogAuthorPost), err +} + +func (i internalBlogAuthorPostDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.InternalBlogAuthorPost, err error) { + buf := make([]*model.InternalBlogAuthorPost, 0, batchSize) + err = i.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (i internalBlogAuthorPostDo) FindInBatches(result *[]*model.InternalBlogAuthorPost, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return i.DO.FindInBatches(result, batchSize, fc) +} + +func (i internalBlogAuthorPostDo) Attrs(attrs ...field.AssignExpr) IInternalBlogAuthorPostDo { + return i.withDO(i.DO.Attrs(attrs...)) +} + +func (i internalBlogAuthorPostDo) Assign(attrs ...field.AssignExpr) IInternalBlogAuthorPostDo { + return i.withDO(i.DO.Assign(attrs...)) +} + +func (i internalBlogAuthorPostDo) Joins(fields ...field.RelationField) IInternalBlogAuthorPostDo { + for _, _f := range fields { + i = *i.withDO(i.DO.Joins(_f)) + } + return &i +} + +func (i internalBlogAuthorPostDo) Preload(fields ...field.RelationField) IInternalBlogAuthorPostDo { + for _, _f := range fields { + i = *i.withDO(i.DO.Preload(_f)) + } + return &i +} + +func (i internalBlogAuthorPostDo) FirstOrInit() (*model.InternalBlogAuthorPost, error) { + if result, err := i.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.InternalBlogAuthorPost), nil + } +} + +func (i internalBlogAuthorPostDo) FirstOrCreate() (*model.InternalBlogAuthorPost, error) { + if result, err := i.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.InternalBlogAuthorPost), nil + } +} + +func (i internalBlogAuthorPostDo) FindByPage(offset int, limit int) (result []*model.InternalBlogAuthorPost, count int64, err error) { + result, err = i.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = i.Offset(-1).Limit(-1).Count() + return +} + +func (i internalBlogAuthorPostDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = i.Count() + if err != nil { + return + } + + err = i.Offset(offset).Limit(limit).Scan(result) + return +} + +func (i internalBlogAuthorPostDo) Scan(result interface{}) (err error) { + return i.DO.Scan(result) +} + +func (i internalBlogAuthorPostDo) Delete(models ...*model.InternalBlogAuthorPost) (result gen.ResultInfo, err error) { + return i.DO.Delete(models) +} + +func (i *internalBlogAuthorPostDo) withDO(do gen.Dao) *internalBlogAuthorPostDo { + i.DO = *do.(*gen.DO) + return i +} diff --git a/next/models_generated/internal_blog_author_profiles.gen.go b/next/models_generated/internal_blog_author_profiles.gen.go new file mode 100644 index 000000000..0d8f6a5c3 --- /dev/null +++ b/next/models_generated/internal_blog_author_profiles.gen.go @@ -0,0 +1,420 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newInternalBlogAuthorProfile(db *gorm.DB, opts ...gen.DOOption) internalBlogAuthorProfile { + _internalBlogAuthorProfile := internalBlogAuthorProfile{} + + _internalBlogAuthorProfile.internalBlogAuthorProfileDo.UseDB(db, opts...) + _internalBlogAuthorProfile.internalBlogAuthorProfileDo.UseModel(&model.InternalBlogAuthorProfile{}) + + tableName := _internalBlogAuthorProfile.internalBlogAuthorProfileDo.TableName() + _internalBlogAuthorProfile.ALL = field.NewAsterisk(tableName) + _internalBlogAuthorProfile.UserID = field.NewString(tableName, "user_id") + _internalBlogAuthorProfile.DisplayName = field.NewString(tableName, "display_name") + _internalBlogAuthorProfile.Bio = field.NewString(tableName, "bio") + _internalBlogAuthorProfile.AvatarURL = field.NewString(tableName, "avatar_url") + _internalBlogAuthorProfile.WebsiteURL = field.NewString(tableName, "website_url") + _internalBlogAuthorProfile.TwitterHandle = field.NewString(tableName, "twitter_handle") + _internalBlogAuthorProfile.FacebookHandle = field.NewString(tableName, "facebook_handle") + _internalBlogAuthorProfile.LinkedinHandle = field.NewString(tableName, "linkedin_handle") + _internalBlogAuthorProfile.InstagramHandle = field.NewString(tableName, "instagram_handle") + _internalBlogAuthorProfile.CreatedAt = field.NewTime(tableName, "created_at") + _internalBlogAuthorProfile.UpdatedAt = field.NewTime(tableName, "updated_at") + + _internalBlogAuthorProfile.fillFieldMap() + + return _internalBlogAuthorProfile +} + +type internalBlogAuthorProfile struct { + internalBlogAuthorProfileDo + + ALL field.Asterisk + UserID field.String + DisplayName field.String + Bio field.String + AvatarURL field.String + WebsiteURL field.String + TwitterHandle field.String + FacebookHandle field.String + LinkedinHandle field.String + InstagramHandle field.String + CreatedAt field.Time + UpdatedAt field.Time + + fieldMap map[string]field.Expr +} + +func (i internalBlogAuthorProfile) Table(newTableName string) *internalBlogAuthorProfile { + i.internalBlogAuthorProfileDo.UseTable(newTableName) + return i.updateTableName(newTableName) +} + +func (i internalBlogAuthorProfile) As(alias string) *internalBlogAuthorProfile { + i.internalBlogAuthorProfileDo.DO = *(i.internalBlogAuthorProfileDo.As(alias).(*gen.DO)) + return i.updateTableName(alias) +} + +func (i *internalBlogAuthorProfile) updateTableName(table string) *internalBlogAuthorProfile { + i.ALL = field.NewAsterisk(table) + i.UserID = field.NewString(table, "user_id") + i.DisplayName = field.NewString(table, "display_name") + i.Bio = field.NewString(table, "bio") + i.AvatarURL = field.NewString(table, "avatar_url") + i.WebsiteURL = field.NewString(table, "website_url") + i.TwitterHandle = field.NewString(table, "twitter_handle") + i.FacebookHandle = field.NewString(table, "facebook_handle") + i.LinkedinHandle = field.NewString(table, "linkedin_handle") + i.InstagramHandle = field.NewString(table, "instagram_handle") + i.CreatedAt = field.NewTime(table, "created_at") + i.UpdatedAt = field.NewTime(table, "updated_at") + + i.fillFieldMap() + + return i +} + +func (i *internalBlogAuthorProfile) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := i.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (i *internalBlogAuthorProfile) fillFieldMap() { + i.fieldMap = make(map[string]field.Expr, 11) + i.fieldMap["user_id"] = i.UserID + i.fieldMap["display_name"] = i.DisplayName + i.fieldMap["bio"] = i.Bio + i.fieldMap["avatar_url"] = i.AvatarURL + i.fieldMap["website_url"] = i.WebsiteURL + i.fieldMap["twitter_handle"] = i.TwitterHandle + i.fieldMap["facebook_handle"] = i.FacebookHandle + i.fieldMap["linkedin_handle"] = i.LinkedinHandle + i.fieldMap["instagram_handle"] = i.InstagramHandle + i.fieldMap["created_at"] = i.CreatedAt + i.fieldMap["updated_at"] = i.UpdatedAt +} + +func (i internalBlogAuthorProfile) clone(db *gorm.DB) internalBlogAuthorProfile { + i.internalBlogAuthorProfileDo.ReplaceConnPool(db.Statement.ConnPool) + return i +} + +func (i internalBlogAuthorProfile) replaceDB(db *gorm.DB) internalBlogAuthorProfile { + i.internalBlogAuthorProfileDo.ReplaceDB(db) + return i +} + +type internalBlogAuthorProfileDo struct{ gen.DO } + +type IInternalBlogAuthorProfileDo interface { + gen.SubQuery + Debug() IInternalBlogAuthorProfileDo + WithContext(ctx context.Context) IInternalBlogAuthorProfileDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IInternalBlogAuthorProfileDo + WriteDB() IInternalBlogAuthorProfileDo + As(alias string) gen.Dao + Session(config *gorm.Session) IInternalBlogAuthorProfileDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IInternalBlogAuthorProfileDo + Not(conds ...gen.Condition) IInternalBlogAuthorProfileDo + Or(conds ...gen.Condition) IInternalBlogAuthorProfileDo + Select(conds ...field.Expr) IInternalBlogAuthorProfileDo + Where(conds ...gen.Condition) IInternalBlogAuthorProfileDo + Order(conds ...field.Expr) IInternalBlogAuthorProfileDo + Distinct(cols ...field.Expr) IInternalBlogAuthorProfileDo + Omit(cols ...field.Expr) IInternalBlogAuthorProfileDo + Join(table schema.Tabler, on ...field.Expr) IInternalBlogAuthorProfileDo + LeftJoin(table schema.Tabler, on ...field.Expr) IInternalBlogAuthorProfileDo + RightJoin(table schema.Tabler, on ...field.Expr) IInternalBlogAuthorProfileDo + Group(cols ...field.Expr) IInternalBlogAuthorProfileDo + Having(conds ...gen.Condition) IInternalBlogAuthorProfileDo + Limit(limit int) IInternalBlogAuthorProfileDo + Offset(offset int) IInternalBlogAuthorProfileDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IInternalBlogAuthorProfileDo + Unscoped() IInternalBlogAuthorProfileDo + Create(values ...*model.InternalBlogAuthorProfile) error + CreateInBatches(values []*model.InternalBlogAuthorProfile, batchSize int) error + Save(values ...*model.InternalBlogAuthorProfile) error + First() (*model.InternalBlogAuthorProfile, error) + Take() (*model.InternalBlogAuthorProfile, error) + Last() (*model.InternalBlogAuthorProfile, error) + Find() ([]*model.InternalBlogAuthorProfile, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.InternalBlogAuthorProfile, err error) + FindInBatches(result *[]*model.InternalBlogAuthorProfile, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.InternalBlogAuthorProfile) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IInternalBlogAuthorProfileDo + Assign(attrs ...field.AssignExpr) IInternalBlogAuthorProfileDo + Joins(fields ...field.RelationField) IInternalBlogAuthorProfileDo + Preload(fields ...field.RelationField) IInternalBlogAuthorProfileDo + FirstOrInit() (*model.InternalBlogAuthorProfile, error) + FirstOrCreate() (*model.InternalBlogAuthorProfile, error) + FindByPage(offset int, limit int) (result []*model.InternalBlogAuthorProfile, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IInternalBlogAuthorProfileDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (i internalBlogAuthorProfileDo) Debug() IInternalBlogAuthorProfileDo { + return i.withDO(i.DO.Debug()) +} + +func (i internalBlogAuthorProfileDo) WithContext(ctx context.Context) IInternalBlogAuthorProfileDo { + return i.withDO(i.DO.WithContext(ctx)) +} + +func (i internalBlogAuthorProfileDo) ReadDB() IInternalBlogAuthorProfileDo { + return i.Clauses(dbresolver.Read) +} + +func (i internalBlogAuthorProfileDo) WriteDB() IInternalBlogAuthorProfileDo { + return i.Clauses(dbresolver.Write) +} + +func (i internalBlogAuthorProfileDo) Session(config *gorm.Session) IInternalBlogAuthorProfileDo { + return i.withDO(i.DO.Session(config)) +} + +func (i internalBlogAuthorProfileDo) Clauses(conds ...clause.Expression) IInternalBlogAuthorProfileDo { + return i.withDO(i.DO.Clauses(conds...)) +} + +func (i internalBlogAuthorProfileDo) Returning(value interface{}, columns ...string) IInternalBlogAuthorProfileDo { + return i.withDO(i.DO.Returning(value, columns...)) +} + +func (i internalBlogAuthorProfileDo) Not(conds ...gen.Condition) IInternalBlogAuthorProfileDo { + return i.withDO(i.DO.Not(conds...)) +} + +func (i internalBlogAuthorProfileDo) Or(conds ...gen.Condition) IInternalBlogAuthorProfileDo { + return i.withDO(i.DO.Or(conds...)) +} + +func (i internalBlogAuthorProfileDo) Select(conds ...field.Expr) IInternalBlogAuthorProfileDo { + return i.withDO(i.DO.Select(conds...)) +} + +func (i internalBlogAuthorProfileDo) Where(conds ...gen.Condition) IInternalBlogAuthorProfileDo { + return i.withDO(i.DO.Where(conds...)) +} + +func (i internalBlogAuthorProfileDo) Order(conds ...field.Expr) IInternalBlogAuthorProfileDo { + return i.withDO(i.DO.Order(conds...)) +} + +func (i internalBlogAuthorProfileDo) Distinct(cols ...field.Expr) IInternalBlogAuthorProfileDo { + return i.withDO(i.DO.Distinct(cols...)) +} + +func (i internalBlogAuthorProfileDo) Omit(cols ...field.Expr) IInternalBlogAuthorProfileDo { + return i.withDO(i.DO.Omit(cols...)) +} + +func (i internalBlogAuthorProfileDo) Join(table schema.Tabler, on ...field.Expr) IInternalBlogAuthorProfileDo { + return i.withDO(i.DO.Join(table, on...)) +} + +func (i internalBlogAuthorProfileDo) LeftJoin(table schema.Tabler, on ...field.Expr) IInternalBlogAuthorProfileDo { + return i.withDO(i.DO.LeftJoin(table, on...)) +} + +func (i internalBlogAuthorProfileDo) RightJoin(table schema.Tabler, on ...field.Expr) IInternalBlogAuthorProfileDo { + return i.withDO(i.DO.RightJoin(table, on...)) +} + +func (i internalBlogAuthorProfileDo) Group(cols ...field.Expr) IInternalBlogAuthorProfileDo { + return i.withDO(i.DO.Group(cols...)) +} + +func (i internalBlogAuthorProfileDo) Having(conds ...gen.Condition) IInternalBlogAuthorProfileDo { + return i.withDO(i.DO.Having(conds...)) +} + +func (i internalBlogAuthorProfileDo) Limit(limit int) IInternalBlogAuthorProfileDo { + return i.withDO(i.DO.Limit(limit)) +} + +func (i internalBlogAuthorProfileDo) Offset(offset int) IInternalBlogAuthorProfileDo { + return i.withDO(i.DO.Offset(offset)) +} + +func (i internalBlogAuthorProfileDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IInternalBlogAuthorProfileDo { + return i.withDO(i.DO.Scopes(funcs...)) +} + +func (i internalBlogAuthorProfileDo) Unscoped() IInternalBlogAuthorProfileDo { + return i.withDO(i.DO.Unscoped()) +} + +func (i internalBlogAuthorProfileDo) Create(values ...*model.InternalBlogAuthorProfile) error { + if len(values) == 0 { + return nil + } + return i.DO.Create(values) +} + +func (i internalBlogAuthorProfileDo) CreateInBatches(values []*model.InternalBlogAuthorProfile, batchSize int) error { + return i.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (i internalBlogAuthorProfileDo) Save(values ...*model.InternalBlogAuthorProfile) error { + if len(values) == 0 { + return nil + } + return i.DO.Save(values) +} + +func (i internalBlogAuthorProfileDo) First() (*model.InternalBlogAuthorProfile, error) { + if result, err := i.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.InternalBlogAuthorProfile), nil + } +} + +func (i internalBlogAuthorProfileDo) Take() (*model.InternalBlogAuthorProfile, error) { + if result, err := i.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.InternalBlogAuthorProfile), nil + } +} + +func (i internalBlogAuthorProfileDo) Last() (*model.InternalBlogAuthorProfile, error) { + if result, err := i.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.InternalBlogAuthorProfile), nil + } +} + +func (i internalBlogAuthorProfileDo) Find() ([]*model.InternalBlogAuthorProfile, error) { + result, err := i.DO.Find() + return result.([]*model.InternalBlogAuthorProfile), err +} + +func (i internalBlogAuthorProfileDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.InternalBlogAuthorProfile, err error) { + buf := make([]*model.InternalBlogAuthorProfile, 0, batchSize) + err = i.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (i internalBlogAuthorProfileDo) FindInBatches(result *[]*model.InternalBlogAuthorProfile, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return i.DO.FindInBatches(result, batchSize, fc) +} + +func (i internalBlogAuthorProfileDo) Attrs(attrs ...field.AssignExpr) IInternalBlogAuthorProfileDo { + return i.withDO(i.DO.Attrs(attrs...)) +} + +func (i internalBlogAuthorProfileDo) Assign(attrs ...field.AssignExpr) IInternalBlogAuthorProfileDo { + return i.withDO(i.DO.Assign(attrs...)) +} + +func (i internalBlogAuthorProfileDo) Joins(fields ...field.RelationField) IInternalBlogAuthorProfileDo { + for _, _f := range fields { + i = *i.withDO(i.DO.Joins(_f)) + } + return &i +} + +func (i internalBlogAuthorProfileDo) Preload(fields ...field.RelationField) IInternalBlogAuthorProfileDo { + for _, _f := range fields { + i = *i.withDO(i.DO.Preload(_f)) + } + return &i +} + +func (i internalBlogAuthorProfileDo) FirstOrInit() (*model.InternalBlogAuthorProfile, error) { + if result, err := i.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.InternalBlogAuthorProfile), nil + } +} + +func (i internalBlogAuthorProfileDo) FirstOrCreate() (*model.InternalBlogAuthorProfile, error) { + if result, err := i.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.InternalBlogAuthorProfile), nil + } +} + +func (i internalBlogAuthorProfileDo) FindByPage(offset int, limit int) (result []*model.InternalBlogAuthorProfile, count int64, err error) { + result, err = i.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = i.Offset(-1).Limit(-1).Count() + return +} + +func (i internalBlogAuthorProfileDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = i.Count() + if err != nil { + return + } + + err = i.Offset(offset).Limit(limit).Scan(result) + return +} + +func (i internalBlogAuthorProfileDo) Scan(result interface{}) (err error) { + return i.DO.Scan(result) +} + +func (i internalBlogAuthorProfileDo) Delete(models ...*model.InternalBlogAuthorProfile) (result gen.ResultInfo, err error) { + return i.DO.Delete(models) +} + +func (i *internalBlogAuthorProfileDo) withDO(do gen.Dao) *internalBlogAuthorProfileDo { + i.DO = *do.(*gen.DO) + return i +} diff --git a/next/models_generated/internal_blog_post_tags.gen.go b/next/models_generated/internal_blog_post_tags.gen.go new file mode 100644 index 000000000..a0306b179 --- /dev/null +++ b/next/models_generated/internal_blog_post_tags.gen.go @@ -0,0 +1,392 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newInternalBlogPostTag(db *gorm.DB, opts ...gen.DOOption) internalBlogPostTag { + _internalBlogPostTag := internalBlogPostTag{} + + _internalBlogPostTag.internalBlogPostTagDo.UseDB(db, opts...) + _internalBlogPostTag.internalBlogPostTagDo.UseModel(&model.InternalBlogPostTag{}) + + tableName := _internalBlogPostTag.internalBlogPostTagDo.TableName() + _internalBlogPostTag.ALL = field.NewAsterisk(tableName) + _internalBlogPostTag.ID = field.NewInt32(tableName, "id") + _internalBlogPostTag.Slug = field.NewString(tableName, "slug") + _internalBlogPostTag.Name = field.NewString(tableName, "name") + _internalBlogPostTag.Description = field.NewString(tableName, "description") + + _internalBlogPostTag.fillFieldMap() + + return _internalBlogPostTag +} + +type internalBlogPostTag struct { + internalBlogPostTagDo + + ALL field.Asterisk + ID field.Int32 + Slug field.String + Name field.String + Description field.String + + fieldMap map[string]field.Expr +} + +func (i internalBlogPostTag) Table(newTableName string) *internalBlogPostTag { + i.internalBlogPostTagDo.UseTable(newTableName) + return i.updateTableName(newTableName) +} + +func (i internalBlogPostTag) As(alias string) *internalBlogPostTag { + i.internalBlogPostTagDo.DO = *(i.internalBlogPostTagDo.As(alias).(*gen.DO)) + return i.updateTableName(alias) +} + +func (i *internalBlogPostTag) updateTableName(table string) *internalBlogPostTag { + i.ALL = field.NewAsterisk(table) + i.ID = field.NewInt32(table, "id") + i.Slug = field.NewString(table, "slug") + i.Name = field.NewString(table, "name") + i.Description = field.NewString(table, "description") + + i.fillFieldMap() + + return i +} + +func (i *internalBlogPostTag) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := i.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (i *internalBlogPostTag) fillFieldMap() { + i.fieldMap = make(map[string]field.Expr, 4) + i.fieldMap["id"] = i.ID + i.fieldMap["slug"] = i.Slug + i.fieldMap["name"] = i.Name + i.fieldMap["description"] = i.Description +} + +func (i internalBlogPostTag) clone(db *gorm.DB) internalBlogPostTag { + i.internalBlogPostTagDo.ReplaceConnPool(db.Statement.ConnPool) + return i +} + +func (i internalBlogPostTag) replaceDB(db *gorm.DB) internalBlogPostTag { + i.internalBlogPostTagDo.ReplaceDB(db) + return i +} + +type internalBlogPostTagDo struct{ gen.DO } + +type IInternalBlogPostTagDo interface { + gen.SubQuery + Debug() IInternalBlogPostTagDo + WithContext(ctx context.Context) IInternalBlogPostTagDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IInternalBlogPostTagDo + WriteDB() IInternalBlogPostTagDo + As(alias string) gen.Dao + Session(config *gorm.Session) IInternalBlogPostTagDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IInternalBlogPostTagDo + Not(conds ...gen.Condition) IInternalBlogPostTagDo + Or(conds ...gen.Condition) IInternalBlogPostTagDo + Select(conds ...field.Expr) IInternalBlogPostTagDo + Where(conds ...gen.Condition) IInternalBlogPostTagDo + Order(conds ...field.Expr) IInternalBlogPostTagDo + Distinct(cols ...field.Expr) IInternalBlogPostTagDo + Omit(cols ...field.Expr) IInternalBlogPostTagDo + Join(table schema.Tabler, on ...field.Expr) IInternalBlogPostTagDo + LeftJoin(table schema.Tabler, on ...field.Expr) IInternalBlogPostTagDo + RightJoin(table schema.Tabler, on ...field.Expr) IInternalBlogPostTagDo + Group(cols ...field.Expr) IInternalBlogPostTagDo + Having(conds ...gen.Condition) IInternalBlogPostTagDo + Limit(limit int) IInternalBlogPostTagDo + Offset(offset int) IInternalBlogPostTagDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IInternalBlogPostTagDo + Unscoped() IInternalBlogPostTagDo + Create(values ...*model.InternalBlogPostTag) error + CreateInBatches(values []*model.InternalBlogPostTag, batchSize int) error + Save(values ...*model.InternalBlogPostTag) error + First() (*model.InternalBlogPostTag, error) + Take() (*model.InternalBlogPostTag, error) + Last() (*model.InternalBlogPostTag, error) + Find() ([]*model.InternalBlogPostTag, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.InternalBlogPostTag, err error) + FindInBatches(result *[]*model.InternalBlogPostTag, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.InternalBlogPostTag) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IInternalBlogPostTagDo + Assign(attrs ...field.AssignExpr) IInternalBlogPostTagDo + Joins(fields ...field.RelationField) IInternalBlogPostTagDo + Preload(fields ...field.RelationField) IInternalBlogPostTagDo + FirstOrInit() (*model.InternalBlogPostTag, error) + FirstOrCreate() (*model.InternalBlogPostTag, error) + FindByPage(offset int, limit int) (result []*model.InternalBlogPostTag, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IInternalBlogPostTagDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (i internalBlogPostTagDo) Debug() IInternalBlogPostTagDo { + return i.withDO(i.DO.Debug()) +} + +func (i internalBlogPostTagDo) WithContext(ctx context.Context) IInternalBlogPostTagDo { + return i.withDO(i.DO.WithContext(ctx)) +} + +func (i internalBlogPostTagDo) ReadDB() IInternalBlogPostTagDo { + return i.Clauses(dbresolver.Read) +} + +func (i internalBlogPostTagDo) WriteDB() IInternalBlogPostTagDo { + return i.Clauses(dbresolver.Write) +} + +func (i internalBlogPostTagDo) Session(config *gorm.Session) IInternalBlogPostTagDo { + return i.withDO(i.DO.Session(config)) +} + +func (i internalBlogPostTagDo) Clauses(conds ...clause.Expression) IInternalBlogPostTagDo { + return i.withDO(i.DO.Clauses(conds...)) +} + +func (i internalBlogPostTagDo) Returning(value interface{}, columns ...string) IInternalBlogPostTagDo { + return i.withDO(i.DO.Returning(value, columns...)) +} + +func (i internalBlogPostTagDo) Not(conds ...gen.Condition) IInternalBlogPostTagDo { + return i.withDO(i.DO.Not(conds...)) +} + +func (i internalBlogPostTagDo) Or(conds ...gen.Condition) IInternalBlogPostTagDo { + return i.withDO(i.DO.Or(conds...)) +} + +func (i internalBlogPostTagDo) Select(conds ...field.Expr) IInternalBlogPostTagDo { + return i.withDO(i.DO.Select(conds...)) +} + +func (i internalBlogPostTagDo) Where(conds ...gen.Condition) IInternalBlogPostTagDo { + return i.withDO(i.DO.Where(conds...)) +} + +func (i internalBlogPostTagDo) Order(conds ...field.Expr) IInternalBlogPostTagDo { + return i.withDO(i.DO.Order(conds...)) +} + +func (i internalBlogPostTagDo) Distinct(cols ...field.Expr) IInternalBlogPostTagDo { + return i.withDO(i.DO.Distinct(cols...)) +} + +func (i internalBlogPostTagDo) Omit(cols ...field.Expr) IInternalBlogPostTagDo { + return i.withDO(i.DO.Omit(cols...)) +} + +func (i internalBlogPostTagDo) Join(table schema.Tabler, on ...field.Expr) IInternalBlogPostTagDo { + return i.withDO(i.DO.Join(table, on...)) +} + +func (i internalBlogPostTagDo) LeftJoin(table schema.Tabler, on ...field.Expr) IInternalBlogPostTagDo { + return i.withDO(i.DO.LeftJoin(table, on...)) +} + +func (i internalBlogPostTagDo) RightJoin(table schema.Tabler, on ...field.Expr) IInternalBlogPostTagDo { + return i.withDO(i.DO.RightJoin(table, on...)) +} + +func (i internalBlogPostTagDo) Group(cols ...field.Expr) IInternalBlogPostTagDo { + return i.withDO(i.DO.Group(cols...)) +} + +func (i internalBlogPostTagDo) Having(conds ...gen.Condition) IInternalBlogPostTagDo { + return i.withDO(i.DO.Having(conds...)) +} + +func (i internalBlogPostTagDo) Limit(limit int) IInternalBlogPostTagDo { + return i.withDO(i.DO.Limit(limit)) +} + +func (i internalBlogPostTagDo) Offset(offset int) IInternalBlogPostTagDo { + return i.withDO(i.DO.Offset(offset)) +} + +func (i internalBlogPostTagDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IInternalBlogPostTagDo { + return i.withDO(i.DO.Scopes(funcs...)) +} + +func (i internalBlogPostTagDo) Unscoped() IInternalBlogPostTagDo { + return i.withDO(i.DO.Unscoped()) +} + +func (i internalBlogPostTagDo) Create(values ...*model.InternalBlogPostTag) error { + if len(values) == 0 { + return nil + } + return i.DO.Create(values) +} + +func (i internalBlogPostTagDo) CreateInBatches(values []*model.InternalBlogPostTag, batchSize int) error { + return i.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (i internalBlogPostTagDo) Save(values ...*model.InternalBlogPostTag) error { + if len(values) == 0 { + return nil + } + return i.DO.Save(values) +} + +func (i internalBlogPostTagDo) First() (*model.InternalBlogPostTag, error) { + if result, err := i.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.InternalBlogPostTag), nil + } +} + +func (i internalBlogPostTagDo) Take() (*model.InternalBlogPostTag, error) { + if result, err := i.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.InternalBlogPostTag), nil + } +} + +func (i internalBlogPostTagDo) Last() (*model.InternalBlogPostTag, error) { + if result, err := i.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.InternalBlogPostTag), nil + } +} + +func (i internalBlogPostTagDo) Find() ([]*model.InternalBlogPostTag, error) { + result, err := i.DO.Find() + return result.([]*model.InternalBlogPostTag), err +} + +func (i internalBlogPostTagDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.InternalBlogPostTag, err error) { + buf := make([]*model.InternalBlogPostTag, 0, batchSize) + err = i.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (i internalBlogPostTagDo) FindInBatches(result *[]*model.InternalBlogPostTag, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return i.DO.FindInBatches(result, batchSize, fc) +} + +func (i internalBlogPostTagDo) Attrs(attrs ...field.AssignExpr) IInternalBlogPostTagDo { + return i.withDO(i.DO.Attrs(attrs...)) +} + +func (i internalBlogPostTagDo) Assign(attrs ...field.AssignExpr) IInternalBlogPostTagDo { + return i.withDO(i.DO.Assign(attrs...)) +} + +func (i internalBlogPostTagDo) Joins(fields ...field.RelationField) IInternalBlogPostTagDo { + for _, _f := range fields { + i = *i.withDO(i.DO.Joins(_f)) + } + return &i +} + +func (i internalBlogPostTagDo) Preload(fields ...field.RelationField) IInternalBlogPostTagDo { + for _, _f := range fields { + i = *i.withDO(i.DO.Preload(_f)) + } + return &i +} + +func (i internalBlogPostTagDo) FirstOrInit() (*model.InternalBlogPostTag, error) { + if result, err := i.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.InternalBlogPostTag), nil + } +} + +func (i internalBlogPostTagDo) FirstOrCreate() (*model.InternalBlogPostTag, error) { + if result, err := i.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.InternalBlogPostTag), nil + } +} + +func (i internalBlogPostTagDo) FindByPage(offset int, limit int) (result []*model.InternalBlogPostTag, count int64, err error) { + result, err = i.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = i.Offset(-1).Limit(-1).Count() + return +} + +func (i internalBlogPostTagDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = i.Count() + if err != nil { + return + } + + err = i.Offset(offset).Limit(limit).Scan(result) + return +} + +func (i internalBlogPostTagDo) Scan(result interface{}) (err error) { + return i.DO.Scan(result) +} + +func (i internalBlogPostTagDo) Delete(models ...*model.InternalBlogPostTag) (result gen.ResultInfo, err error) { + return i.DO.Delete(models) +} + +func (i *internalBlogPostTagDo) withDO(do gen.Dao) *internalBlogPostTagDo { + i.DO = *do.(*gen.DO) + return i +} diff --git a/next/models_generated/internal_blog_post_tags_relationship.gen.go b/next/models_generated/internal_blog_post_tags_relationship.gen.go new file mode 100644 index 000000000..74a45d92f --- /dev/null +++ b/next/models_generated/internal_blog_post_tags_relationship.gen.go @@ -0,0 +1,384 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newInternalBlogPostTagsRelationship(db *gorm.DB, opts ...gen.DOOption) internalBlogPostTagsRelationship { + _internalBlogPostTagsRelationship := internalBlogPostTagsRelationship{} + + _internalBlogPostTagsRelationship.internalBlogPostTagsRelationshipDo.UseDB(db, opts...) + _internalBlogPostTagsRelationship.internalBlogPostTagsRelationshipDo.UseModel(&model.InternalBlogPostTagsRelationship{}) + + tableName := _internalBlogPostTagsRelationship.internalBlogPostTagsRelationshipDo.TableName() + _internalBlogPostTagsRelationship.ALL = field.NewAsterisk(tableName) + _internalBlogPostTagsRelationship.BlogPostID = field.NewString(tableName, "blog_post_id") + _internalBlogPostTagsRelationship.TagID = field.NewInt32(tableName, "tag_id") + + _internalBlogPostTagsRelationship.fillFieldMap() + + return _internalBlogPostTagsRelationship +} + +type internalBlogPostTagsRelationship struct { + internalBlogPostTagsRelationshipDo + + ALL field.Asterisk + BlogPostID field.String + TagID field.Int32 + + fieldMap map[string]field.Expr +} + +func (i internalBlogPostTagsRelationship) Table(newTableName string) *internalBlogPostTagsRelationship { + i.internalBlogPostTagsRelationshipDo.UseTable(newTableName) + return i.updateTableName(newTableName) +} + +func (i internalBlogPostTagsRelationship) As(alias string) *internalBlogPostTagsRelationship { + i.internalBlogPostTagsRelationshipDo.DO = *(i.internalBlogPostTagsRelationshipDo.As(alias).(*gen.DO)) + return i.updateTableName(alias) +} + +func (i *internalBlogPostTagsRelationship) updateTableName(table string) *internalBlogPostTagsRelationship { + i.ALL = field.NewAsterisk(table) + i.BlogPostID = field.NewString(table, "blog_post_id") + i.TagID = field.NewInt32(table, "tag_id") + + i.fillFieldMap() + + return i +} + +func (i *internalBlogPostTagsRelationship) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := i.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (i *internalBlogPostTagsRelationship) fillFieldMap() { + i.fieldMap = make(map[string]field.Expr, 2) + i.fieldMap["blog_post_id"] = i.BlogPostID + i.fieldMap["tag_id"] = i.TagID +} + +func (i internalBlogPostTagsRelationship) clone(db *gorm.DB) internalBlogPostTagsRelationship { + i.internalBlogPostTagsRelationshipDo.ReplaceConnPool(db.Statement.ConnPool) + return i +} + +func (i internalBlogPostTagsRelationship) replaceDB(db *gorm.DB) internalBlogPostTagsRelationship { + i.internalBlogPostTagsRelationshipDo.ReplaceDB(db) + return i +} + +type internalBlogPostTagsRelationshipDo struct{ gen.DO } + +type IInternalBlogPostTagsRelationshipDo interface { + gen.SubQuery + Debug() IInternalBlogPostTagsRelationshipDo + WithContext(ctx context.Context) IInternalBlogPostTagsRelationshipDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IInternalBlogPostTagsRelationshipDo + WriteDB() IInternalBlogPostTagsRelationshipDo + As(alias string) gen.Dao + Session(config *gorm.Session) IInternalBlogPostTagsRelationshipDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IInternalBlogPostTagsRelationshipDo + Not(conds ...gen.Condition) IInternalBlogPostTagsRelationshipDo + Or(conds ...gen.Condition) IInternalBlogPostTagsRelationshipDo + Select(conds ...field.Expr) IInternalBlogPostTagsRelationshipDo + Where(conds ...gen.Condition) IInternalBlogPostTagsRelationshipDo + Order(conds ...field.Expr) IInternalBlogPostTagsRelationshipDo + Distinct(cols ...field.Expr) IInternalBlogPostTagsRelationshipDo + Omit(cols ...field.Expr) IInternalBlogPostTagsRelationshipDo + Join(table schema.Tabler, on ...field.Expr) IInternalBlogPostTagsRelationshipDo + LeftJoin(table schema.Tabler, on ...field.Expr) IInternalBlogPostTagsRelationshipDo + RightJoin(table schema.Tabler, on ...field.Expr) IInternalBlogPostTagsRelationshipDo + Group(cols ...field.Expr) IInternalBlogPostTagsRelationshipDo + Having(conds ...gen.Condition) IInternalBlogPostTagsRelationshipDo + Limit(limit int) IInternalBlogPostTagsRelationshipDo + Offset(offset int) IInternalBlogPostTagsRelationshipDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IInternalBlogPostTagsRelationshipDo + Unscoped() IInternalBlogPostTagsRelationshipDo + Create(values ...*model.InternalBlogPostTagsRelationship) error + CreateInBatches(values []*model.InternalBlogPostTagsRelationship, batchSize int) error + Save(values ...*model.InternalBlogPostTagsRelationship) error + First() (*model.InternalBlogPostTagsRelationship, error) + Take() (*model.InternalBlogPostTagsRelationship, error) + Last() (*model.InternalBlogPostTagsRelationship, error) + Find() ([]*model.InternalBlogPostTagsRelationship, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.InternalBlogPostTagsRelationship, err error) + FindInBatches(result *[]*model.InternalBlogPostTagsRelationship, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.InternalBlogPostTagsRelationship) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IInternalBlogPostTagsRelationshipDo + Assign(attrs ...field.AssignExpr) IInternalBlogPostTagsRelationshipDo + Joins(fields ...field.RelationField) IInternalBlogPostTagsRelationshipDo + Preload(fields ...field.RelationField) IInternalBlogPostTagsRelationshipDo + FirstOrInit() (*model.InternalBlogPostTagsRelationship, error) + FirstOrCreate() (*model.InternalBlogPostTagsRelationship, error) + FindByPage(offset int, limit int) (result []*model.InternalBlogPostTagsRelationship, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IInternalBlogPostTagsRelationshipDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (i internalBlogPostTagsRelationshipDo) Debug() IInternalBlogPostTagsRelationshipDo { + return i.withDO(i.DO.Debug()) +} + +func (i internalBlogPostTagsRelationshipDo) WithContext(ctx context.Context) IInternalBlogPostTagsRelationshipDo { + return i.withDO(i.DO.WithContext(ctx)) +} + +func (i internalBlogPostTagsRelationshipDo) ReadDB() IInternalBlogPostTagsRelationshipDo { + return i.Clauses(dbresolver.Read) +} + +func (i internalBlogPostTagsRelationshipDo) WriteDB() IInternalBlogPostTagsRelationshipDo { + return i.Clauses(dbresolver.Write) +} + +func (i internalBlogPostTagsRelationshipDo) Session(config *gorm.Session) IInternalBlogPostTagsRelationshipDo { + return i.withDO(i.DO.Session(config)) +} + +func (i internalBlogPostTagsRelationshipDo) Clauses(conds ...clause.Expression) IInternalBlogPostTagsRelationshipDo { + return i.withDO(i.DO.Clauses(conds...)) +} + +func (i internalBlogPostTagsRelationshipDo) Returning(value interface{}, columns ...string) IInternalBlogPostTagsRelationshipDo { + return i.withDO(i.DO.Returning(value, columns...)) +} + +func (i internalBlogPostTagsRelationshipDo) Not(conds ...gen.Condition) IInternalBlogPostTagsRelationshipDo { + return i.withDO(i.DO.Not(conds...)) +} + +func (i internalBlogPostTagsRelationshipDo) Or(conds ...gen.Condition) IInternalBlogPostTagsRelationshipDo { + return i.withDO(i.DO.Or(conds...)) +} + +func (i internalBlogPostTagsRelationshipDo) Select(conds ...field.Expr) IInternalBlogPostTagsRelationshipDo { + return i.withDO(i.DO.Select(conds...)) +} + +func (i internalBlogPostTagsRelationshipDo) Where(conds ...gen.Condition) IInternalBlogPostTagsRelationshipDo { + return i.withDO(i.DO.Where(conds...)) +} + +func (i internalBlogPostTagsRelationshipDo) Order(conds ...field.Expr) IInternalBlogPostTagsRelationshipDo { + return i.withDO(i.DO.Order(conds...)) +} + +func (i internalBlogPostTagsRelationshipDo) Distinct(cols ...field.Expr) IInternalBlogPostTagsRelationshipDo { + return i.withDO(i.DO.Distinct(cols...)) +} + +func (i internalBlogPostTagsRelationshipDo) Omit(cols ...field.Expr) IInternalBlogPostTagsRelationshipDo { + return i.withDO(i.DO.Omit(cols...)) +} + +func (i internalBlogPostTagsRelationshipDo) Join(table schema.Tabler, on ...field.Expr) IInternalBlogPostTagsRelationshipDo { + return i.withDO(i.DO.Join(table, on...)) +} + +func (i internalBlogPostTagsRelationshipDo) LeftJoin(table schema.Tabler, on ...field.Expr) IInternalBlogPostTagsRelationshipDo { + return i.withDO(i.DO.LeftJoin(table, on...)) +} + +func (i internalBlogPostTagsRelationshipDo) RightJoin(table schema.Tabler, on ...field.Expr) IInternalBlogPostTagsRelationshipDo { + return i.withDO(i.DO.RightJoin(table, on...)) +} + +func (i internalBlogPostTagsRelationshipDo) Group(cols ...field.Expr) IInternalBlogPostTagsRelationshipDo { + return i.withDO(i.DO.Group(cols...)) +} + +func (i internalBlogPostTagsRelationshipDo) Having(conds ...gen.Condition) IInternalBlogPostTagsRelationshipDo { + return i.withDO(i.DO.Having(conds...)) +} + +func (i internalBlogPostTagsRelationshipDo) Limit(limit int) IInternalBlogPostTagsRelationshipDo { + return i.withDO(i.DO.Limit(limit)) +} + +func (i internalBlogPostTagsRelationshipDo) Offset(offset int) IInternalBlogPostTagsRelationshipDo { + return i.withDO(i.DO.Offset(offset)) +} + +func (i internalBlogPostTagsRelationshipDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IInternalBlogPostTagsRelationshipDo { + return i.withDO(i.DO.Scopes(funcs...)) +} + +func (i internalBlogPostTagsRelationshipDo) Unscoped() IInternalBlogPostTagsRelationshipDo { + return i.withDO(i.DO.Unscoped()) +} + +func (i internalBlogPostTagsRelationshipDo) Create(values ...*model.InternalBlogPostTagsRelationship) error { + if len(values) == 0 { + return nil + } + return i.DO.Create(values) +} + +func (i internalBlogPostTagsRelationshipDo) CreateInBatches(values []*model.InternalBlogPostTagsRelationship, batchSize int) error { + return i.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (i internalBlogPostTagsRelationshipDo) Save(values ...*model.InternalBlogPostTagsRelationship) error { + if len(values) == 0 { + return nil + } + return i.DO.Save(values) +} + +func (i internalBlogPostTagsRelationshipDo) First() (*model.InternalBlogPostTagsRelationship, error) { + if result, err := i.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.InternalBlogPostTagsRelationship), nil + } +} + +func (i internalBlogPostTagsRelationshipDo) Take() (*model.InternalBlogPostTagsRelationship, error) { + if result, err := i.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.InternalBlogPostTagsRelationship), nil + } +} + +func (i internalBlogPostTagsRelationshipDo) Last() (*model.InternalBlogPostTagsRelationship, error) { + if result, err := i.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.InternalBlogPostTagsRelationship), nil + } +} + +func (i internalBlogPostTagsRelationshipDo) Find() ([]*model.InternalBlogPostTagsRelationship, error) { + result, err := i.DO.Find() + return result.([]*model.InternalBlogPostTagsRelationship), err +} + +func (i internalBlogPostTagsRelationshipDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.InternalBlogPostTagsRelationship, err error) { + buf := make([]*model.InternalBlogPostTagsRelationship, 0, batchSize) + err = i.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (i internalBlogPostTagsRelationshipDo) FindInBatches(result *[]*model.InternalBlogPostTagsRelationship, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return i.DO.FindInBatches(result, batchSize, fc) +} + +func (i internalBlogPostTagsRelationshipDo) Attrs(attrs ...field.AssignExpr) IInternalBlogPostTagsRelationshipDo { + return i.withDO(i.DO.Attrs(attrs...)) +} + +func (i internalBlogPostTagsRelationshipDo) Assign(attrs ...field.AssignExpr) IInternalBlogPostTagsRelationshipDo { + return i.withDO(i.DO.Assign(attrs...)) +} + +func (i internalBlogPostTagsRelationshipDo) Joins(fields ...field.RelationField) IInternalBlogPostTagsRelationshipDo { + for _, _f := range fields { + i = *i.withDO(i.DO.Joins(_f)) + } + return &i +} + +func (i internalBlogPostTagsRelationshipDo) Preload(fields ...field.RelationField) IInternalBlogPostTagsRelationshipDo { + for _, _f := range fields { + i = *i.withDO(i.DO.Preload(_f)) + } + return &i +} + +func (i internalBlogPostTagsRelationshipDo) FirstOrInit() (*model.InternalBlogPostTagsRelationship, error) { + if result, err := i.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.InternalBlogPostTagsRelationship), nil + } +} + +func (i internalBlogPostTagsRelationshipDo) FirstOrCreate() (*model.InternalBlogPostTagsRelationship, error) { + if result, err := i.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.InternalBlogPostTagsRelationship), nil + } +} + +func (i internalBlogPostTagsRelationshipDo) FindByPage(offset int, limit int) (result []*model.InternalBlogPostTagsRelationship, count int64, err error) { + result, err = i.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = i.Offset(-1).Limit(-1).Count() + return +} + +func (i internalBlogPostTagsRelationshipDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = i.Count() + if err != nil { + return + } + + err = i.Offset(offset).Limit(limit).Scan(result) + return +} + +func (i internalBlogPostTagsRelationshipDo) Scan(result interface{}) (err error) { + return i.DO.Scan(result) +} + +func (i internalBlogPostTagsRelationshipDo) Delete(models ...*model.InternalBlogPostTagsRelationship) (result gen.ResultInfo, err error) { + return i.DO.Delete(models) +} + +func (i *internalBlogPostTagsRelationshipDo) withDO(do gen.Dao) *internalBlogPostTagsRelationshipDo { + i.DO = *do.(*gen.DO) + return i +} diff --git a/next/models_generated/internal_blog_posts.gen.go b/next/models_generated/internal_blog_posts.gen.go new file mode 100644 index 000000000..6596e7a7a --- /dev/null +++ b/next/models_generated/internal_blog_posts.gen.go @@ -0,0 +1,424 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newInternalBlogPost(db *gorm.DB, opts ...gen.DOOption) internalBlogPost { + _internalBlogPost := internalBlogPost{} + + _internalBlogPost.internalBlogPostDo.UseDB(db, opts...) + _internalBlogPost.internalBlogPostDo.UseModel(&model.InternalBlogPost{}) + + tableName := _internalBlogPost.internalBlogPostDo.TableName() + _internalBlogPost.ALL = field.NewAsterisk(tableName) + _internalBlogPost.ID = field.NewString(tableName, "id") + _internalBlogPost.Slug = field.NewString(tableName, "slug") + _internalBlogPost.Title = field.NewString(tableName, "title") + _internalBlogPost.Summary = field.NewString(tableName, "summary") + _internalBlogPost.Content = field.NewString(tableName, "content") + _internalBlogPost.CreatedAt = field.NewTime(tableName, "created_at") + _internalBlogPost.UpdatedAt = field.NewTime(tableName, "updated_at") + _internalBlogPost.IsFeatured = field.NewBool(tableName, "is_featured") + _internalBlogPost.Status = field.NewString(tableName, "status") + _internalBlogPost.CoverImage = field.NewString(tableName, "cover_image") + _internalBlogPost.SeoData = field.NewString(tableName, "seo_data") + _internalBlogPost.JSONContent = field.NewString(tableName, "json_content") + + _internalBlogPost.fillFieldMap() + + return _internalBlogPost +} + +type internalBlogPost struct { + internalBlogPostDo + + ALL field.Asterisk + ID field.String + Slug field.String + Title field.String + Summary field.String + Content field.String + CreatedAt field.Time + UpdatedAt field.Time + IsFeatured field.Bool + Status field.String + CoverImage field.String + SeoData field.String + JSONContent field.String + + fieldMap map[string]field.Expr +} + +func (i internalBlogPost) Table(newTableName string) *internalBlogPost { + i.internalBlogPostDo.UseTable(newTableName) + return i.updateTableName(newTableName) +} + +func (i internalBlogPost) As(alias string) *internalBlogPost { + i.internalBlogPostDo.DO = *(i.internalBlogPostDo.As(alias).(*gen.DO)) + return i.updateTableName(alias) +} + +func (i *internalBlogPost) updateTableName(table string) *internalBlogPost { + i.ALL = field.NewAsterisk(table) + i.ID = field.NewString(table, "id") + i.Slug = field.NewString(table, "slug") + i.Title = field.NewString(table, "title") + i.Summary = field.NewString(table, "summary") + i.Content = field.NewString(table, "content") + i.CreatedAt = field.NewTime(table, "created_at") + i.UpdatedAt = field.NewTime(table, "updated_at") + i.IsFeatured = field.NewBool(table, "is_featured") + i.Status = field.NewString(table, "status") + i.CoverImage = field.NewString(table, "cover_image") + i.SeoData = field.NewString(table, "seo_data") + i.JSONContent = field.NewString(table, "json_content") + + i.fillFieldMap() + + return i +} + +func (i *internalBlogPost) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := i.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (i *internalBlogPost) fillFieldMap() { + i.fieldMap = make(map[string]field.Expr, 12) + i.fieldMap["id"] = i.ID + i.fieldMap["slug"] = i.Slug + i.fieldMap["title"] = i.Title + i.fieldMap["summary"] = i.Summary + i.fieldMap["content"] = i.Content + i.fieldMap["created_at"] = i.CreatedAt + i.fieldMap["updated_at"] = i.UpdatedAt + i.fieldMap["is_featured"] = i.IsFeatured + i.fieldMap["status"] = i.Status + i.fieldMap["cover_image"] = i.CoverImage + i.fieldMap["seo_data"] = i.SeoData + i.fieldMap["json_content"] = i.JSONContent +} + +func (i internalBlogPost) clone(db *gorm.DB) internalBlogPost { + i.internalBlogPostDo.ReplaceConnPool(db.Statement.ConnPool) + return i +} + +func (i internalBlogPost) replaceDB(db *gorm.DB) internalBlogPost { + i.internalBlogPostDo.ReplaceDB(db) + return i +} + +type internalBlogPostDo struct{ gen.DO } + +type IInternalBlogPostDo interface { + gen.SubQuery + Debug() IInternalBlogPostDo + WithContext(ctx context.Context) IInternalBlogPostDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IInternalBlogPostDo + WriteDB() IInternalBlogPostDo + As(alias string) gen.Dao + Session(config *gorm.Session) IInternalBlogPostDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IInternalBlogPostDo + Not(conds ...gen.Condition) IInternalBlogPostDo + Or(conds ...gen.Condition) IInternalBlogPostDo + Select(conds ...field.Expr) IInternalBlogPostDo + Where(conds ...gen.Condition) IInternalBlogPostDo + Order(conds ...field.Expr) IInternalBlogPostDo + Distinct(cols ...field.Expr) IInternalBlogPostDo + Omit(cols ...field.Expr) IInternalBlogPostDo + Join(table schema.Tabler, on ...field.Expr) IInternalBlogPostDo + LeftJoin(table schema.Tabler, on ...field.Expr) IInternalBlogPostDo + RightJoin(table schema.Tabler, on ...field.Expr) IInternalBlogPostDo + Group(cols ...field.Expr) IInternalBlogPostDo + Having(conds ...gen.Condition) IInternalBlogPostDo + Limit(limit int) IInternalBlogPostDo + Offset(offset int) IInternalBlogPostDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IInternalBlogPostDo + Unscoped() IInternalBlogPostDo + Create(values ...*model.InternalBlogPost) error + CreateInBatches(values []*model.InternalBlogPost, batchSize int) error + Save(values ...*model.InternalBlogPost) error + First() (*model.InternalBlogPost, error) + Take() (*model.InternalBlogPost, error) + Last() (*model.InternalBlogPost, error) + Find() ([]*model.InternalBlogPost, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.InternalBlogPost, err error) + FindInBatches(result *[]*model.InternalBlogPost, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.InternalBlogPost) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IInternalBlogPostDo + Assign(attrs ...field.AssignExpr) IInternalBlogPostDo + Joins(fields ...field.RelationField) IInternalBlogPostDo + Preload(fields ...field.RelationField) IInternalBlogPostDo + FirstOrInit() (*model.InternalBlogPost, error) + FirstOrCreate() (*model.InternalBlogPost, error) + FindByPage(offset int, limit int) (result []*model.InternalBlogPost, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IInternalBlogPostDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (i internalBlogPostDo) Debug() IInternalBlogPostDo { + return i.withDO(i.DO.Debug()) +} + +func (i internalBlogPostDo) WithContext(ctx context.Context) IInternalBlogPostDo { + return i.withDO(i.DO.WithContext(ctx)) +} + +func (i internalBlogPostDo) ReadDB() IInternalBlogPostDo { + return i.Clauses(dbresolver.Read) +} + +func (i internalBlogPostDo) WriteDB() IInternalBlogPostDo { + return i.Clauses(dbresolver.Write) +} + +func (i internalBlogPostDo) Session(config *gorm.Session) IInternalBlogPostDo { + return i.withDO(i.DO.Session(config)) +} + +func (i internalBlogPostDo) Clauses(conds ...clause.Expression) IInternalBlogPostDo { + return i.withDO(i.DO.Clauses(conds...)) +} + +func (i internalBlogPostDo) Returning(value interface{}, columns ...string) IInternalBlogPostDo { + return i.withDO(i.DO.Returning(value, columns...)) +} + +func (i internalBlogPostDo) Not(conds ...gen.Condition) IInternalBlogPostDo { + return i.withDO(i.DO.Not(conds...)) +} + +func (i internalBlogPostDo) Or(conds ...gen.Condition) IInternalBlogPostDo { + return i.withDO(i.DO.Or(conds...)) +} + +func (i internalBlogPostDo) Select(conds ...field.Expr) IInternalBlogPostDo { + return i.withDO(i.DO.Select(conds...)) +} + +func (i internalBlogPostDo) Where(conds ...gen.Condition) IInternalBlogPostDo { + return i.withDO(i.DO.Where(conds...)) +} + +func (i internalBlogPostDo) Order(conds ...field.Expr) IInternalBlogPostDo { + return i.withDO(i.DO.Order(conds...)) +} + +func (i internalBlogPostDo) Distinct(cols ...field.Expr) IInternalBlogPostDo { + return i.withDO(i.DO.Distinct(cols...)) +} + +func (i internalBlogPostDo) Omit(cols ...field.Expr) IInternalBlogPostDo { + return i.withDO(i.DO.Omit(cols...)) +} + +func (i internalBlogPostDo) Join(table schema.Tabler, on ...field.Expr) IInternalBlogPostDo { + return i.withDO(i.DO.Join(table, on...)) +} + +func (i internalBlogPostDo) LeftJoin(table schema.Tabler, on ...field.Expr) IInternalBlogPostDo { + return i.withDO(i.DO.LeftJoin(table, on...)) +} + +func (i internalBlogPostDo) RightJoin(table schema.Tabler, on ...field.Expr) IInternalBlogPostDo { + return i.withDO(i.DO.RightJoin(table, on...)) +} + +func (i internalBlogPostDo) Group(cols ...field.Expr) IInternalBlogPostDo { + return i.withDO(i.DO.Group(cols...)) +} + +func (i internalBlogPostDo) Having(conds ...gen.Condition) IInternalBlogPostDo { + return i.withDO(i.DO.Having(conds...)) +} + +func (i internalBlogPostDo) Limit(limit int) IInternalBlogPostDo { + return i.withDO(i.DO.Limit(limit)) +} + +func (i internalBlogPostDo) Offset(offset int) IInternalBlogPostDo { + return i.withDO(i.DO.Offset(offset)) +} + +func (i internalBlogPostDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IInternalBlogPostDo { + return i.withDO(i.DO.Scopes(funcs...)) +} + +func (i internalBlogPostDo) Unscoped() IInternalBlogPostDo { + return i.withDO(i.DO.Unscoped()) +} + +func (i internalBlogPostDo) Create(values ...*model.InternalBlogPost) error { + if len(values) == 0 { + return nil + } + return i.DO.Create(values) +} + +func (i internalBlogPostDo) CreateInBatches(values []*model.InternalBlogPost, batchSize int) error { + return i.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (i internalBlogPostDo) Save(values ...*model.InternalBlogPost) error { + if len(values) == 0 { + return nil + } + return i.DO.Save(values) +} + +func (i internalBlogPostDo) First() (*model.InternalBlogPost, error) { + if result, err := i.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.InternalBlogPost), nil + } +} + +func (i internalBlogPostDo) Take() (*model.InternalBlogPost, error) { + if result, err := i.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.InternalBlogPost), nil + } +} + +func (i internalBlogPostDo) Last() (*model.InternalBlogPost, error) { + if result, err := i.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.InternalBlogPost), nil + } +} + +func (i internalBlogPostDo) Find() ([]*model.InternalBlogPost, error) { + result, err := i.DO.Find() + return result.([]*model.InternalBlogPost), err +} + +func (i internalBlogPostDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.InternalBlogPost, err error) { + buf := make([]*model.InternalBlogPost, 0, batchSize) + err = i.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (i internalBlogPostDo) FindInBatches(result *[]*model.InternalBlogPost, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return i.DO.FindInBatches(result, batchSize, fc) +} + +func (i internalBlogPostDo) Attrs(attrs ...field.AssignExpr) IInternalBlogPostDo { + return i.withDO(i.DO.Attrs(attrs...)) +} + +func (i internalBlogPostDo) Assign(attrs ...field.AssignExpr) IInternalBlogPostDo { + return i.withDO(i.DO.Assign(attrs...)) +} + +func (i internalBlogPostDo) Joins(fields ...field.RelationField) IInternalBlogPostDo { + for _, _f := range fields { + i = *i.withDO(i.DO.Joins(_f)) + } + return &i +} + +func (i internalBlogPostDo) Preload(fields ...field.RelationField) IInternalBlogPostDo { + for _, _f := range fields { + i = *i.withDO(i.DO.Preload(_f)) + } + return &i +} + +func (i internalBlogPostDo) FirstOrInit() (*model.InternalBlogPost, error) { + if result, err := i.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.InternalBlogPost), nil + } +} + +func (i internalBlogPostDo) FirstOrCreate() (*model.InternalBlogPost, error) { + if result, err := i.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.InternalBlogPost), nil + } +} + +func (i internalBlogPostDo) FindByPage(offset int, limit int) (result []*model.InternalBlogPost, count int64, err error) { + result, err = i.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = i.Offset(-1).Limit(-1).Count() + return +} + +func (i internalBlogPostDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = i.Count() + if err != nil { + return + } + + err = i.Offset(offset).Limit(limit).Scan(result) + return +} + +func (i internalBlogPostDo) Scan(result interface{}) (err error) { + return i.DO.Scan(result) +} + +func (i internalBlogPostDo) Delete(models ...*model.InternalBlogPost) (result gen.ResultInfo, err error) { + return i.DO.Delete(models) +} + +func (i *internalBlogPostDo) withDO(do gen.Dao) *internalBlogPostDo { + i.DO = *do.(*gen.DO) + return i +} diff --git a/next/models_generated/internal_changelog.gen.go b/next/models_generated/internal_changelog.gen.go new file mode 100644 index 000000000..71adf8fba --- /dev/null +++ b/next/models_generated/internal_changelog.gen.go @@ -0,0 +1,404 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newInternalChangelog(db *gorm.DB, opts ...gen.DOOption) internalChangelog { + _internalChangelog := internalChangelog{} + + _internalChangelog.internalChangelogDo.UseDB(db, opts...) + _internalChangelog.internalChangelogDo.UseModel(&model.InternalChangelog{}) + + tableName := _internalChangelog.internalChangelogDo.TableName() + _internalChangelog.ALL = field.NewAsterisk(tableName) + _internalChangelog.ID = field.NewString(tableName, "id") + _internalChangelog.Title = field.NewString(tableName, "title") + _internalChangelog.Changes = field.NewString(tableName, "changes") + _internalChangelog.UserID = field.NewString(tableName, "user_id") + _internalChangelog.CreatedAt = field.NewTime(tableName, "created_at") + _internalChangelog.UpdatedAt = field.NewTime(tableName, "updated_at") + _internalChangelog.CoverImage = field.NewString(tableName, "cover_image") + + _internalChangelog.fillFieldMap() + + return _internalChangelog +} + +type internalChangelog struct { + internalChangelogDo + + ALL field.Asterisk + ID field.String + Title field.String + Changes field.String + UserID field.String + CreatedAt field.Time + UpdatedAt field.Time + CoverImage field.String + + fieldMap map[string]field.Expr +} + +func (i internalChangelog) Table(newTableName string) *internalChangelog { + i.internalChangelogDo.UseTable(newTableName) + return i.updateTableName(newTableName) +} + +func (i internalChangelog) As(alias string) *internalChangelog { + i.internalChangelogDo.DO = *(i.internalChangelogDo.As(alias).(*gen.DO)) + return i.updateTableName(alias) +} + +func (i *internalChangelog) updateTableName(table string) *internalChangelog { + i.ALL = field.NewAsterisk(table) + i.ID = field.NewString(table, "id") + i.Title = field.NewString(table, "title") + i.Changes = field.NewString(table, "changes") + i.UserID = field.NewString(table, "user_id") + i.CreatedAt = field.NewTime(table, "created_at") + i.UpdatedAt = field.NewTime(table, "updated_at") + i.CoverImage = field.NewString(table, "cover_image") + + i.fillFieldMap() + + return i +} + +func (i *internalChangelog) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := i.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (i *internalChangelog) fillFieldMap() { + i.fieldMap = make(map[string]field.Expr, 7) + i.fieldMap["id"] = i.ID + i.fieldMap["title"] = i.Title + i.fieldMap["changes"] = i.Changes + i.fieldMap["user_id"] = i.UserID + i.fieldMap["created_at"] = i.CreatedAt + i.fieldMap["updated_at"] = i.UpdatedAt + i.fieldMap["cover_image"] = i.CoverImage +} + +func (i internalChangelog) clone(db *gorm.DB) internalChangelog { + i.internalChangelogDo.ReplaceConnPool(db.Statement.ConnPool) + return i +} + +func (i internalChangelog) replaceDB(db *gorm.DB) internalChangelog { + i.internalChangelogDo.ReplaceDB(db) + return i +} + +type internalChangelogDo struct{ gen.DO } + +type IInternalChangelogDo interface { + gen.SubQuery + Debug() IInternalChangelogDo + WithContext(ctx context.Context) IInternalChangelogDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IInternalChangelogDo + WriteDB() IInternalChangelogDo + As(alias string) gen.Dao + Session(config *gorm.Session) IInternalChangelogDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IInternalChangelogDo + Not(conds ...gen.Condition) IInternalChangelogDo + Or(conds ...gen.Condition) IInternalChangelogDo + Select(conds ...field.Expr) IInternalChangelogDo + Where(conds ...gen.Condition) IInternalChangelogDo + Order(conds ...field.Expr) IInternalChangelogDo + Distinct(cols ...field.Expr) IInternalChangelogDo + Omit(cols ...field.Expr) IInternalChangelogDo + Join(table schema.Tabler, on ...field.Expr) IInternalChangelogDo + LeftJoin(table schema.Tabler, on ...field.Expr) IInternalChangelogDo + RightJoin(table schema.Tabler, on ...field.Expr) IInternalChangelogDo + Group(cols ...field.Expr) IInternalChangelogDo + Having(conds ...gen.Condition) IInternalChangelogDo + Limit(limit int) IInternalChangelogDo + Offset(offset int) IInternalChangelogDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IInternalChangelogDo + Unscoped() IInternalChangelogDo + Create(values ...*model.InternalChangelog) error + CreateInBatches(values []*model.InternalChangelog, batchSize int) error + Save(values ...*model.InternalChangelog) error + First() (*model.InternalChangelog, error) + Take() (*model.InternalChangelog, error) + Last() (*model.InternalChangelog, error) + Find() ([]*model.InternalChangelog, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.InternalChangelog, err error) + FindInBatches(result *[]*model.InternalChangelog, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.InternalChangelog) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IInternalChangelogDo + Assign(attrs ...field.AssignExpr) IInternalChangelogDo + Joins(fields ...field.RelationField) IInternalChangelogDo + Preload(fields ...field.RelationField) IInternalChangelogDo + FirstOrInit() (*model.InternalChangelog, error) + FirstOrCreate() (*model.InternalChangelog, error) + FindByPage(offset int, limit int) (result []*model.InternalChangelog, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IInternalChangelogDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (i internalChangelogDo) Debug() IInternalChangelogDo { + return i.withDO(i.DO.Debug()) +} + +func (i internalChangelogDo) WithContext(ctx context.Context) IInternalChangelogDo { + return i.withDO(i.DO.WithContext(ctx)) +} + +func (i internalChangelogDo) ReadDB() IInternalChangelogDo { + return i.Clauses(dbresolver.Read) +} + +func (i internalChangelogDo) WriteDB() IInternalChangelogDo { + return i.Clauses(dbresolver.Write) +} + +func (i internalChangelogDo) Session(config *gorm.Session) IInternalChangelogDo { + return i.withDO(i.DO.Session(config)) +} + +func (i internalChangelogDo) Clauses(conds ...clause.Expression) IInternalChangelogDo { + return i.withDO(i.DO.Clauses(conds...)) +} + +func (i internalChangelogDo) Returning(value interface{}, columns ...string) IInternalChangelogDo { + return i.withDO(i.DO.Returning(value, columns...)) +} + +func (i internalChangelogDo) Not(conds ...gen.Condition) IInternalChangelogDo { + return i.withDO(i.DO.Not(conds...)) +} + +func (i internalChangelogDo) Or(conds ...gen.Condition) IInternalChangelogDo { + return i.withDO(i.DO.Or(conds...)) +} + +func (i internalChangelogDo) Select(conds ...field.Expr) IInternalChangelogDo { + return i.withDO(i.DO.Select(conds...)) +} + +func (i internalChangelogDo) Where(conds ...gen.Condition) IInternalChangelogDo { + return i.withDO(i.DO.Where(conds...)) +} + +func (i internalChangelogDo) Order(conds ...field.Expr) IInternalChangelogDo { + return i.withDO(i.DO.Order(conds...)) +} + +func (i internalChangelogDo) Distinct(cols ...field.Expr) IInternalChangelogDo { + return i.withDO(i.DO.Distinct(cols...)) +} + +func (i internalChangelogDo) Omit(cols ...field.Expr) IInternalChangelogDo { + return i.withDO(i.DO.Omit(cols...)) +} + +func (i internalChangelogDo) Join(table schema.Tabler, on ...field.Expr) IInternalChangelogDo { + return i.withDO(i.DO.Join(table, on...)) +} + +func (i internalChangelogDo) LeftJoin(table schema.Tabler, on ...field.Expr) IInternalChangelogDo { + return i.withDO(i.DO.LeftJoin(table, on...)) +} + +func (i internalChangelogDo) RightJoin(table schema.Tabler, on ...field.Expr) IInternalChangelogDo { + return i.withDO(i.DO.RightJoin(table, on...)) +} + +func (i internalChangelogDo) Group(cols ...field.Expr) IInternalChangelogDo { + return i.withDO(i.DO.Group(cols...)) +} + +func (i internalChangelogDo) Having(conds ...gen.Condition) IInternalChangelogDo { + return i.withDO(i.DO.Having(conds...)) +} + +func (i internalChangelogDo) Limit(limit int) IInternalChangelogDo { + return i.withDO(i.DO.Limit(limit)) +} + +func (i internalChangelogDo) Offset(offset int) IInternalChangelogDo { + return i.withDO(i.DO.Offset(offset)) +} + +func (i internalChangelogDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IInternalChangelogDo { + return i.withDO(i.DO.Scopes(funcs...)) +} + +func (i internalChangelogDo) Unscoped() IInternalChangelogDo { + return i.withDO(i.DO.Unscoped()) +} + +func (i internalChangelogDo) Create(values ...*model.InternalChangelog) error { + if len(values) == 0 { + return nil + } + return i.DO.Create(values) +} + +func (i internalChangelogDo) CreateInBatches(values []*model.InternalChangelog, batchSize int) error { + return i.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (i internalChangelogDo) Save(values ...*model.InternalChangelog) error { + if len(values) == 0 { + return nil + } + return i.DO.Save(values) +} + +func (i internalChangelogDo) First() (*model.InternalChangelog, error) { + if result, err := i.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.InternalChangelog), nil + } +} + +func (i internalChangelogDo) Take() (*model.InternalChangelog, error) { + if result, err := i.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.InternalChangelog), nil + } +} + +func (i internalChangelogDo) Last() (*model.InternalChangelog, error) { + if result, err := i.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.InternalChangelog), nil + } +} + +func (i internalChangelogDo) Find() ([]*model.InternalChangelog, error) { + result, err := i.DO.Find() + return result.([]*model.InternalChangelog), err +} + +func (i internalChangelogDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.InternalChangelog, err error) { + buf := make([]*model.InternalChangelog, 0, batchSize) + err = i.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (i internalChangelogDo) FindInBatches(result *[]*model.InternalChangelog, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return i.DO.FindInBatches(result, batchSize, fc) +} + +func (i internalChangelogDo) Attrs(attrs ...field.AssignExpr) IInternalChangelogDo { + return i.withDO(i.DO.Attrs(attrs...)) +} + +func (i internalChangelogDo) Assign(attrs ...field.AssignExpr) IInternalChangelogDo { + return i.withDO(i.DO.Assign(attrs...)) +} + +func (i internalChangelogDo) Joins(fields ...field.RelationField) IInternalChangelogDo { + for _, _f := range fields { + i = *i.withDO(i.DO.Joins(_f)) + } + return &i +} + +func (i internalChangelogDo) Preload(fields ...field.RelationField) IInternalChangelogDo { + for _, _f := range fields { + i = *i.withDO(i.DO.Preload(_f)) + } + return &i +} + +func (i internalChangelogDo) FirstOrInit() (*model.InternalChangelog, error) { + if result, err := i.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.InternalChangelog), nil + } +} + +func (i internalChangelogDo) FirstOrCreate() (*model.InternalChangelog, error) { + if result, err := i.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.InternalChangelog), nil + } +} + +func (i internalChangelogDo) FindByPage(offset int, limit int) (result []*model.InternalChangelog, count int64, err error) { + result, err = i.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = i.Offset(-1).Limit(-1).Count() + return +} + +func (i internalChangelogDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = i.Count() + if err != nil { + return + } + + err = i.Offset(offset).Limit(limit).Scan(result) + return +} + +func (i internalChangelogDo) Scan(result interface{}) (err error) { + return i.DO.Scan(result) +} + +func (i internalChangelogDo) Delete(models ...*model.InternalChangelog) (result gen.ResultInfo, err error) { + return i.DO.Delete(models) +} + +func (i *internalChangelogDo) withDO(do gen.Dao) *internalChangelogDo { + i.DO = *do.(*gen.DO) + return i +} diff --git a/next/models_generated/internal_feedback_comments.gen.go b/next/models_generated/internal_feedback_comments.gen.go new file mode 100644 index 000000000..f5c74ca71 --- /dev/null +++ b/next/models_generated/internal_feedback_comments.gen.go @@ -0,0 +1,400 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newInternalFeedbackComment(db *gorm.DB, opts ...gen.DOOption) internalFeedbackComment { + _internalFeedbackComment := internalFeedbackComment{} + + _internalFeedbackComment.internalFeedbackCommentDo.UseDB(db, opts...) + _internalFeedbackComment.internalFeedbackCommentDo.UseModel(&model.InternalFeedbackComment{}) + + tableName := _internalFeedbackComment.internalFeedbackCommentDo.TableName() + _internalFeedbackComment.ALL = field.NewAsterisk(tableName) + _internalFeedbackComment.ID = field.NewString(tableName, "id") + _internalFeedbackComment.UserID = field.NewString(tableName, "user_id") + _internalFeedbackComment.ThreadID = field.NewString(tableName, "thread_id") + _internalFeedbackComment.Content = field.NewString(tableName, "content") + _internalFeedbackComment.CreatedAt = field.NewTime(tableName, "created_at") + _internalFeedbackComment.UpdatedAt = field.NewTime(tableName, "updated_at") + + _internalFeedbackComment.fillFieldMap() + + return _internalFeedbackComment +} + +type internalFeedbackComment struct { + internalFeedbackCommentDo + + ALL field.Asterisk + ID field.String + UserID field.String + ThreadID field.String + Content field.String + CreatedAt field.Time + UpdatedAt field.Time + + fieldMap map[string]field.Expr +} + +func (i internalFeedbackComment) Table(newTableName string) *internalFeedbackComment { + i.internalFeedbackCommentDo.UseTable(newTableName) + return i.updateTableName(newTableName) +} + +func (i internalFeedbackComment) As(alias string) *internalFeedbackComment { + i.internalFeedbackCommentDo.DO = *(i.internalFeedbackCommentDo.As(alias).(*gen.DO)) + return i.updateTableName(alias) +} + +func (i *internalFeedbackComment) updateTableName(table string) *internalFeedbackComment { + i.ALL = field.NewAsterisk(table) + i.ID = field.NewString(table, "id") + i.UserID = field.NewString(table, "user_id") + i.ThreadID = field.NewString(table, "thread_id") + i.Content = field.NewString(table, "content") + i.CreatedAt = field.NewTime(table, "created_at") + i.UpdatedAt = field.NewTime(table, "updated_at") + + i.fillFieldMap() + + return i +} + +func (i *internalFeedbackComment) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := i.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (i *internalFeedbackComment) fillFieldMap() { + i.fieldMap = make(map[string]field.Expr, 6) + i.fieldMap["id"] = i.ID + i.fieldMap["user_id"] = i.UserID + i.fieldMap["thread_id"] = i.ThreadID + i.fieldMap["content"] = i.Content + i.fieldMap["created_at"] = i.CreatedAt + i.fieldMap["updated_at"] = i.UpdatedAt +} + +func (i internalFeedbackComment) clone(db *gorm.DB) internalFeedbackComment { + i.internalFeedbackCommentDo.ReplaceConnPool(db.Statement.ConnPool) + return i +} + +func (i internalFeedbackComment) replaceDB(db *gorm.DB) internalFeedbackComment { + i.internalFeedbackCommentDo.ReplaceDB(db) + return i +} + +type internalFeedbackCommentDo struct{ gen.DO } + +type IInternalFeedbackCommentDo interface { + gen.SubQuery + Debug() IInternalFeedbackCommentDo + WithContext(ctx context.Context) IInternalFeedbackCommentDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IInternalFeedbackCommentDo + WriteDB() IInternalFeedbackCommentDo + As(alias string) gen.Dao + Session(config *gorm.Session) IInternalFeedbackCommentDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IInternalFeedbackCommentDo + Not(conds ...gen.Condition) IInternalFeedbackCommentDo + Or(conds ...gen.Condition) IInternalFeedbackCommentDo + Select(conds ...field.Expr) IInternalFeedbackCommentDo + Where(conds ...gen.Condition) IInternalFeedbackCommentDo + Order(conds ...field.Expr) IInternalFeedbackCommentDo + Distinct(cols ...field.Expr) IInternalFeedbackCommentDo + Omit(cols ...field.Expr) IInternalFeedbackCommentDo + Join(table schema.Tabler, on ...field.Expr) IInternalFeedbackCommentDo + LeftJoin(table schema.Tabler, on ...field.Expr) IInternalFeedbackCommentDo + RightJoin(table schema.Tabler, on ...field.Expr) IInternalFeedbackCommentDo + Group(cols ...field.Expr) IInternalFeedbackCommentDo + Having(conds ...gen.Condition) IInternalFeedbackCommentDo + Limit(limit int) IInternalFeedbackCommentDo + Offset(offset int) IInternalFeedbackCommentDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IInternalFeedbackCommentDo + Unscoped() IInternalFeedbackCommentDo + Create(values ...*model.InternalFeedbackComment) error + CreateInBatches(values []*model.InternalFeedbackComment, batchSize int) error + Save(values ...*model.InternalFeedbackComment) error + First() (*model.InternalFeedbackComment, error) + Take() (*model.InternalFeedbackComment, error) + Last() (*model.InternalFeedbackComment, error) + Find() ([]*model.InternalFeedbackComment, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.InternalFeedbackComment, err error) + FindInBatches(result *[]*model.InternalFeedbackComment, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.InternalFeedbackComment) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IInternalFeedbackCommentDo + Assign(attrs ...field.AssignExpr) IInternalFeedbackCommentDo + Joins(fields ...field.RelationField) IInternalFeedbackCommentDo + Preload(fields ...field.RelationField) IInternalFeedbackCommentDo + FirstOrInit() (*model.InternalFeedbackComment, error) + FirstOrCreate() (*model.InternalFeedbackComment, error) + FindByPage(offset int, limit int) (result []*model.InternalFeedbackComment, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IInternalFeedbackCommentDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (i internalFeedbackCommentDo) Debug() IInternalFeedbackCommentDo { + return i.withDO(i.DO.Debug()) +} + +func (i internalFeedbackCommentDo) WithContext(ctx context.Context) IInternalFeedbackCommentDo { + return i.withDO(i.DO.WithContext(ctx)) +} + +func (i internalFeedbackCommentDo) ReadDB() IInternalFeedbackCommentDo { + return i.Clauses(dbresolver.Read) +} + +func (i internalFeedbackCommentDo) WriteDB() IInternalFeedbackCommentDo { + return i.Clauses(dbresolver.Write) +} + +func (i internalFeedbackCommentDo) Session(config *gorm.Session) IInternalFeedbackCommentDo { + return i.withDO(i.DO.Session(config)) +} + +func (i internalFeedbackCommentDo) Clauses(conds ...clause.Expression) IInternalFeedbackCommentDo { + return i.withDO(i.DO.Clauses(conds...)) +} + +func (i internalFeedbackCommentDo) Returning(value interface{}, columns ...string) IInternalFeedbackCommentDo { + return i.withDO(i.DO.Returning(value, columns...)) +} + +func (i internalFeedbackCommentDo) Not(conds ...gen.Condition) IInternalFeedbackCommentDo { + return i.withDO(i.DO.Not(conds...)) +} + +func (i internalFeedbackCommentDo) Or(conds ...gen.Condition) IInternalFeedbackCommentDo { + return i.withDO(i.DO.Or(conds...)) +} + +func (i internalFeedbackCommentDo) Select(conds ...field.Expr) IInternalFeedbackCommentDo { + return i.withDO(i.DO.Select(conds...)) +} + +func (i internalFeedbackCommentDo) Where(conds ...gen.Condition) IInternalFeedbackCommentDo { + return i.withDO(i.DO.Where(conds...)) +} + +func (i internalFeedbackCommentDo) Order(conds ...field.Expr) IInternalFeedbackCommentDo { + return i.withDO(i.DO.Order(conds...)) +} + +func (i internalFeedbackCommentDo) Distinct(cols ...field.Expr) IInternalFeedbackCommentDo { + return i.withDO(i.DO.Distinct(cols...)) +} + +func (i internalFeedbackCommentDo) Omit(cols ...field.Expr) IInternalFeedbackCommentDo { + return i.withDO(i.DO.Omit(cols...)) +} + +func (i internalFeedbackCommentDo) Join(table schema.Tabler, on ...field.Expr) IInternalFeedbackCommentDo { + return i.withDO(i.DO.Join(table, on...)) +} + +func (i internalFeedbackCommentDo) LeftJoin(table schema.Tabler, on ...field.Expr) IInternalFeedbackCommentDo { + return i.withDO(i.DO.LeftJoin(table, on...)) +} + +func (i internalFeedbackCommentDo) RightJoin(table schema.Tabler, on ...field.Expr) IInternalFeedbackCommentDo { + return i.withDO(i.DO.RightJoin(table, on...)) +} + +func (i internalFeedbackCommentDo) Group(cols ...field.Expr) IInternalFeedbackCommentDo { + return i.withDO(i.DO.Group(cols...)) +} + +func (i internalFeedbackCommentDo) Having(conds ...gen.Condition) IInternalFeedbackCommentDo { + return i.withDO(i.DO.Having(conds...)) +} + +func (i internalFeedbackCommentDo) Limit(limit int) IInternalFeedbackCommentDo { + return i.withDO(i.DO.Limit(limit)) +} + +func (i internalFeedbackCommentDo) Offset(offset int) IInternalFeedbackCommentDo { + return i.withDO(i.DO.Offset(offset)) +} + +func (i internalFeedbackCommentDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IInternalFeedbackCommentDo { + return i.withDO(i.DO.Scopes(funcs...)) +} + +func (i internalFeedbackCommentDo) Unscoped() IInternalFeedbackCommentDo { + return i.withDO(i.DO.Unscoped()) +} + +func (i internalFeedbackCommentDo) Create(values ...*model.InternalFeedbackComment) error { + if len(values) == 0 { + return nil + } + return i.DO.Create(values) +} + +func (i internalFeedbackCommentDo) CreateInBatches(values []*model.InternalFeedbackComment, batchSize int) error { + return i.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (i internalFeedbackCommentDo) Save(values ...*model.InternalFeedbackComment) error { + if len(values) == 0 { + return nil + } + return i.DO.Save(values) +} + +func (i internalFeedbackCommentDo) First() (*model.InternalFeedbackComment, error) { + if result, err := i.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.InternalFeedbackComment), nil + } +} + +func (i internalFeedbackCommentDo) Take() (*model.InternalFeedbackComment, error) { + if result, err := i.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.InternalFeedbackComment), nil + } +} + +func (i internalFeedbackCommentDo) Last() (*model.InternalFeedbackComment, error) { + if result, err := i.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.InternalFeedbackComment), nil + } +} + +func (i internalFeedbackCommentDo) Find() ([]*model.InternalFeedbackComment, error) { + result, err := i.DO.Find() + return result.([]*model.InternalFeedbackComment), err +} + +func (i internalFeedbackCommentDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.InternalFeedbackComment, err error) { + buf := make([]*model.InternalFeedbackComment, 0, batchSize) + err = i.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (i internalFeedbackCommentDo) FindInBatches(result *[]*model.InternalFeedbackComment, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return i.DO.FindInBatches(result, batchSize, fc) +} + +func (i internalFeedbackCommentDo) Attrs(attrs ...field.AssignExpr) IInternalFeedbackCommentDo { + return i.withDO(i.DO.Attrs(attrs...)) +} + +func (i internalFeedbackCommentDo) Assign(attrs ...field.AssignExpr) IInternalFeedbackCommentDo { + return i.withDO(i.DO.Assign(attrs...)) +} + +func (i internalFeedbackCommentDo) Joins(fields ...field.RelationField) IInternalFeedbackCommentDo { + for _, _f := range fields { + i = *i.withDO(i.DO.Joins(_f)) + } + return &i +} + +func (i internalFeedbackCommentDo) Preload(fields ...field.RelationField) IInternalFeedbackCommentDo { + for _, _f := range fields { + i = *i.withDO(i.DO.Preload(_f)) + } + return &i +} + +func (i internalFeedbackCommentDo) FirstOrInit() (*model.InternalFeedbackComment, error) { + if result, err := i.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.InternalFeedbackComment), nil + } +} + +func (i internalFeedbackCommentDo) FirstOrCreate() (*model.InternalFeedbackComment, error) { + if result, err := i.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.InternalFeedbackComment), nil + } +} + +func (i internalFeedbackCommentDo) FindByPage(offset int, limit int) (result []*model.InternalFeedbackComment, count int64, err error) { + result, err = i.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = i.Offset(-1).Limit(-1).Count() + return +} + +func (i internalFeedbackCommentDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = i.Count() + if err != nil { + return + } + + err = i.Offset(offset).Limit(limit).Scan(result) + return +} + +func (i internalFeedbackCommentDo) Scan(result interface{}) (err error) { + return i.DO.Scan(result) +} + +func (i internalFeedbackCommentDo) Delete(models ...*model.InternalFeedbackComment) (result gen.ResultInfo, err error) { + return i.DO.Delete(models) +} + +func (i *internalFeedbackCommentDo) withDO(do gen.Dao) *internalFeedbackCommentDo { + i.DO = *do.(*gen.DO) + return i +} diff --git a/next/models_generated/internal_feedback_threads.gen.go b/next/models_generated/internal_feedback_threads.gen.go new file mode 100644 index 000000000..70acd9a33 --- /dev/null +++ b/next/models_generated/internal_feedback_threads.gen.go @@ -0,0 +1,424 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newInternalFeedbackThread(db *gorm.DB, opts ...gen.DOOption) internalFeedbackThread { + _internalFeedbackThread := internalFeedbackThread{} + + _internalFeedbackThread.internalFeedbackThreadDo.UseDB(db, opts...) + _internalFeedbackThread.internalFeedbackThreadDo.UseModel(&model.InternalFeedbackThread{}) + + tableName := _internalFeedbackThread.internalFeedbackThreadDo.TableName() + _internalFeedbackThread.ALL = field.NewAsterisk(tableName) + _internalFeedbackThread.ID = field.NewString(tableName, "id") + _internalFeedbackThread.Title = field.NewString(tableName, "title") + _internalFeedbackThread.Content = field.NewString(tableName, "content") + _internalFeedbackThread.UserID = field.NewString(tableName, "user_id") + _internalFeedbackThread.CreatedAt = field.NewTime(tableName, "created_at") + _internalFeedbackThread.UpdatedAt = field.NewTime(tableName, "updated_at") + _internalFeedbackThread.Priority = field.NewString(tableName, "priority") + _internalFeedbackThread.Type = field.NewString(tableName, "type") + _internalFeedbackThread.Status = field.NewString(tableName, "status") + _internalFeedbackThread.AddedToRoadmap = field.NewBool(tableName, "added_to_roadmap") + _internalFeedbackThread.OpenForPublicDiscussion = field.NewBool(tableName, "open_for_public_discussion") + _internalFeedbackThread.IsPubliclyVisible = field.NewBool(tableName, "is_publicly_visible") + + _internalFeedbackThread.fillFieldMap() + + return _internalFeedbackThread +} + +type internalFeedbackThread struct { + internalFeedbackThreadDo + + ALL field.Asterisk + ID field.String + Title field.String + Content field.String + UserID field.String + CreatedAt field.Time + UpdatedAt field.Time + Priority field.String + Type field.String + Status field.String + AddedToRoadmap field.Bool + OpenForPublicDiscussion field.Bool + IsPubliclyVisible field.Bool + + fieldMap map[string]field.Expr +} + +func (i internalFeedbackThread) Table(newTableName string) *internalFeedbackThread { + i.internalFeedbackThreadDo.UseTable(newTableName) + return i.updateTableName(newTableName) +} + +func (i internalFeedbackThread) As(alias string) *internalFeedbackThread { + i.internalFeedbackThreadDo.DO = *(i.internalFeedbackThreadDo.As(alias).(*gen.DO)) + return i.updateTableName(alias) +} + +func (i *internalFeedbackThread) updateTableName(table string) *internalFeedbackThread { + i.ALL = field.NewAsterisk(table) + i.ID = field.NewString(table, "id") + i.Title = field.NewString(table, "title") + i.Content = field.NewString(table, "content") + i.UserID = field.NewString(table, "user_id") + i.CreatedAt = field.NewTime(table, "created_at") + i.UpdatedAt = field.NewTime(table, "updated_at") + i.Priority = field.NewString(table, "priority") + i.Type = field.NewString(table, "type") + i.Status = field.NewString(table, "status") + i.AddedToRoadmap = field.NewBool(table, "added_to_roadmap") + i.OpenForPublicDiscussion = field.NewBool(table, "open_for_public_discussion") + i.IsPubliclyVisible = field.NewBool(table, "is_publicly_visible") + + i.fillFieldMap() + + return i +} + +func (i *internalFeedbackThread) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := i.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (i *internalFeedbackThread) fillFieldMap() { + i.fieldMap = make(map[string]field.Expr, 12) + i.fieldMap["id"] = i.ID + i.fieldMap["title"] = i.Title + i.fieldMap["content"] = i.Content + i.fieldMap["user_id"] = i.UserID + i.fieldMap["created_at"] = i.CreatedAt + i.fieldMap["updated_at"] = i.UpdatedAt + i.fieldMap["priority"] = i.Priority + i.fieldMap["type"] = i.Type + i.fieldMap["status"] = i.Status + i.fieldMap["added_to_roadmap"] = i.AddedToRoadmap + i.fieldMap["open_for_public_discussion"] = i.OpenForPublicDiscussion + i.fieldMap["is_publicly_visible"] = i.IsPubliclyVisible +} + +func (i internalFeedbackThread) clone(db *gorm.DB) internalFeedbackThread { + i.internalFeedbackThreadDo.ReplaceConnPool(db.Statement.ConnPool) + return i +} + +func (i internalFeedbackThread) replaceDB(db *gorm.DB) internalFeedbackThread { + i.internalFeedbackThreadDo.ReplaceDB(db) + return i +} + +type internalFeedbackThreadDo struct{ gen.DO } + +type IInternalFeedbackThreadDo interface { + gen.SubQuery + Debug() IInternalFeedbackThreadDo + WithContext(ctx context.Context) IInternalFeedbackThreadDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IInternalFeedbackThreadDo + WriteDB() IInternalFeedbackThreadDo + As(alias string) gen.Dao + Session(config *gorm.Session) IInternalFeedbackThreadDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IInternalFeedbackThreadDo + Not(conds ...gen.Condition) IInternalFeedbackThreadDo + Or(conds ...gen.Condition) IInternalFeedbackThreadDo + Select(conds ...field.Expr) IInternalFeedbackThreadDo + Where(conds ...gen.Condition) IInternalFeedbackThreadDo + Order(conds ...field.Expr) IInternalFeedbackThreadDo + Distinct(cols ...field.Expr) IInternalFeedbackThreadDo + Omit(cols ...field.Expr) IInternalFeedbackThreadDo + Join(table schema.Tabler, on ...field.Expr) IInternalFeedbackThreadDo + LeftJoin(table schema.Tabler, on ...field.Expr) IInternalFeedbackThreadDo + RightJoin(table schema.Tabler, on ...field.Expr) IInternalFeedbackThreadDo + Group(cols ...field.Expr) IInternalFeedbackThreadDo + Having(conds ...gen.Condition) IInternalFeedbackThreadDo + Limit(limit int) IInternalFeedbackThreadDo + Offset(offset int) IInternalFeedbackThreadDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IInternalFeedbackThreadDo + Unscoped() IInternalFeedbackThreadDo + Create(values ...*model.InternalFeedbackThread) error + CreateInBatches(values []*model.InternalFeedbackThread, batchSize int) error + Save(values ...*model.InternalFeedbackThread) error + First() (*model.InternalFeedbackThread, error) + Take() (*model.InternalFeedbackThread, error) + Last() (*model.InternalFeedbackThread, error) + Find() ([]*model.InternalFeedbackThread, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.InternalFeedbackThread, err error) + FindInBatches(result *[]*model.InternalFeedbackThread, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.InternalFeedbackThread) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IInternalFeedbackThreadDo + Assign(attrs ...field.AssignExpr) IInternalFeedbackThreadDo + Joins(fields ...field.RelationField) IInternalFeedbackThreadDo + Preload(fields ...field.RelationField) IInternalFeedbackThreadDo + FirstOrInit() (*model.InternalFeedbackThread, error) + FirstOrCreate() (*model.InternalFeedbackThread, error) + FindByPage(offset int, limit int) (result []*model.InternalFeedbackThread, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IInternalFeedbackThreadDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (i internalFeedbackThreadDo) Debug() IInternalFeedbackThreadDo { + return i.withDO(i.DO.Debug()) +} + +func (i internalFeedbackThreadDo) WithContext(ctx context.Context) IInternalFeedbackThreadDo { + return i.withDO(i.DO.WithContext(ctx)) +} + +func (i internalFeedbackThreadDo) ReadDB() IInternalFeedbackThreadDo { + return i.Clauses(dbresolver.Read) +} + +func (i internalFeedbackThreadDo) WriteDB() IInternalFeedbackThreadDo { + return i.Clauses(dbresolver.Write) +} + +func (i internalFeedbackThreadDo) Session(config *gorm.Session) IInternalFeedbackThreadDo { + return i.withDO(i.DO.Session(config)) +} + +func (i internalFeedbackThreadDo) Clauses(conds ...clause.Expression) IInternalFeedbackThreadDo { + return i.withDO(i.DO.Clauses(conds...)) +} + +func (i internalFeedbackThreadDo) Returning(value interface{}, columns ...string) IInternalFeedbackThreadDo { + return i.withDO(i.DO.Returning(value, columns...)) +} + +func (i internalFeedbackThreadDo) Not(conds ...gen.Condition) IInternalFeedbackThreadDo { + return i.withDO(i.DO.Not(conds...)) +} + +func (i internalFeedbackThreadDo) Or(conds ...gen.Condition) IInternalFeedbackThreadDo { + return i.withDO(i.DO.Or(conds...)) +} + +func (i internalFeedbackThreadDo) Select(conds ...field.Expr) IInternalFeedbackThreadDo { + return i.withDO(i.DO.Select(conds...)) +} + +func (i internalFeedbackThreadDo) Where(conds ...gen.Condition) IInternalFeedbackThreadDo { + return i.withDO(i.DO.Where(conds...)) +} + +func (i internalFeedbackThreadDo) Order(conds ...field.Expr) IInternalFeedbackThreadDo { + return i.withDO(i.DO.Order(conds...)) +} + +func (i internalFeedbackThreadDo) Distinct(cols ...field.Expr) IInternalFeedbackThreadDo { + return i.withDO(i.DO.Distinct(cols...)) +} + +func (i internalFeedbackThreadDo) Omit(cols ...field.Expr) IInternalFeedbackThreadDo { + return i.withDO(i.DO.Omit(cols...)) +} + +func (i internalFeedbackThreadDo) Join(table schema.Tabler, on ...field.Expr) IInternalFeedbackThreadDo { + return i.withDO(i.DO.Join(table, on...)) +} + +func (i internalFeedbackThreadDo) LeftJoin(table schema.Tabler, on ...field.Expr) IInternalFeedbackThreadDo { + return i.withDO(i.DO.LeftJoin(table, on...)) +} + +func (i internalFeedbackThreadDo) RightJoin(table schema.Tabler, on ...field.Expr) IInternalFeedbackThreadDo { + return i.withDO(i.DO.RightJoin(table, on...)) +} + +func (i internalFeedbackThreadDo) Group(cols ...field.Expr) IInternalFeedbackThreadDo { + return i.withDO(i.DO.Group(cols...)) +} + +func (i internalFeedbackThreadDo) Having(conds ...gen.Condition) IInternalFeedbackThreadDo { + return i.withDO(i.DO.Having(conds...)) +} + +func (i internalFeedbackThreadDo) Limit(limit int) IInternalFeedbackThreadDo { + return i.withDO(i.DO.Limit(limit)) +} + +func (i internalFeedbackThreadDo) Offset(offset int) IInternalFeedbackThreadDo { + return i.withDO(i.DO.Offset(offset)) +} + +func (i internalFeedbackThreadDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IInternalFeedbackThreadDo { + return i.withDO(i.DO.Scopes(funcs...)) +} + +func (i internalFeedbackThreadDo) Unscoped() IInternalFeedbackThreadDo { + return i.withDO(i.DO.Unscoped()) +} + +func (i internalFeedbackThreadDo) Create(values ...*model.InternalFeedbackThread) error { + if len(values) == 0 { + return nil + } + return i.DO.Create(values) +} + +func (i internalFeedbackThreadDo) CreateInBatches(values []*model.InternalFeedbackThread, batchSize int) error { + return i.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (i internalFeedbackThreadDo) Save(values ...*model.InternalFeedbackThread) error { + if len(values) == 0 { + return nil + } + return i.DO.Save(values) +} + +func (i internalFeedbackThreadDo) First() (*model.InternalFeedbackThread, error) { + if result, err := i.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.InternalFeedbackThread), nil + } +} + +func (i internalFeedbackThreadDo) Take() (*model.InternalFeedbackThread, error) { + if result, err := i.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.InternalFeedbackThread), nil + } +} + +func (i internalFeedbackThreadDo) Last() (*model.InternalFeedbackThread, error) { + if result, err := i.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.InternalFeedbackThread), nil + } +} + +func (i internalFeedbackThreadDo) Find() ([]*model.InternalFeedbackThread, error) { + result, err := i.DO.Find() + return result.([]*model.InternalFeedbackThread), err +} + +func (i internalFeedbackThreadDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.InternalFeedbackThread, err error) { + buf := make([]*model.InternalFeedbackThread, 0, batchSize) + err = i.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (i internalFeedbackThreadDo) FindInBatches(result *[]*model.InternalFeedbackThread, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return i.DO.FindInBatches(result, batchSize, fc) +} + +func (i internalFeedbackThreadDo) Attrs(attrs ...field.AssignExpr) IInternalFeedbackThreadDo { + return i.withDO(i.DO.Attrs(attrs...)) +} + +func (i internalFeedbackThreadDo) Assign(attrs ...field.AssignExpr) IInternalFeedbackThreadDo { + return i.withDO(i.DO.Assign(attrs...)) +} + +func (i internalFeedbackThreadDo) Joins(fields ...field.RelationField) IInternalFeedbackThreadDo { + for _, _f := range fields { + i = *i.withDO(i.DO.Joins(_f)) + } + return &i +} + +func (i internalFeedbackThreadDo) Preload(fields ...field.RelationField) IInternalFeedbackThreadDo { + for _, _f := range fields { + i = *i.withDO(i.DO.Preload(_f)) + } + return &i +} + +func (i internalFeedbackThreadDo) FirstOrInit() (*model.InternalFeedbackThread, error) { + if result, err := i.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.InternalFeedbackThread), nil + } +} + +func (i internalFeedbackThreadDo) FirstOrCreate() (*model.InternalFeedbackThread, error) { + if result, err := i.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.InternalFeedbackThread), nil + } +} + +func (i internalFeedbackThreadDo) FindByPage(offset int, limit int) (result []*model.InternalFeedbackThread, count int64, err error) { + result, err = i.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = i.Offset(-1).Limit(-1).Count() + return +} + +func (i internalFeedbackThreadDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = i.Count() + if err != nil { + return + } + + err = i.Offset(offset).Limit(limit).Scan(result) + return +} + +func (i internalFeedbackThreadDo) Scan(result interface{}) (err error) { + return i.DO.Scan(result) +} + +func (i internalFeedbackThreadDo) Delete(models ...*model.InternalFeedbackThread) (result gen.ResultInfo, err error) { + return i.DO.Delete(models) +} + +func (i *internalFeedbackThreadDo) withDO(do gen.Dao) *internalFeedbackThreadDo { + i.DO = *do.(*gen.DO) + return i +} diff --git a/next/models_generated/organization_credits.gen.go b/next/models_generated/organization_credits.gen.go new file mode 100644 index 000000000..6d8fd2325 --- /dev/null +++ b/next/models_generated/organization_credits.gen.go @@ -0,0 +1,384 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newOrganizationCredit(db *gorm.DB, opts ...gen.DOOption) organizationCredit { + _organizationCredit := organizationCredit{} + + _organizationCredit.organizationCreditDo.UseDB(db, opts...) + _organizationCredit.organizationCreditDo.UseModel(&model.OrganizationCredit{}) + + tableName := _organizationCredit.organizationCreditDo.TableName() + _organizationCredit.ALL = field.NewAsterisk(tableName) + _organizationCredit.OrganizationID = field.NewString(tableName, "organization_id") + _organizationCredit.Credits = field.NewInt64(tableName, "credits") + + _organizationCredit.fillFieldMap() + + return _organizationCredit +} + +type organizationCredit struct { + organizationCreditDo + + ALL field.Asterisk + OrganizationID field.String + Credits field.Int64 + + fieldMap map[string]field.Expr +} + +func (o organizationCredit) Table(newTableName string) *organizationCredit { + o.organizationCreditDo.UseTable(newTableName) + return o.updateTableName(newTableName) +} + +func (o organizationCredit) As(alias string) *organizationCredit { + o.organizationCreditDo.DO = *(o.organizationCreditDo.As(alias).(*gen.DO)) + return o.updateTableName(alias) +} + +func (o *organizationCredit) updateTableName(table string) *organizationCredit { + o.ALL = field.NewAsterisk(table) + o.OrganizationID = field.NewString(table, "organization_id") + o.Credits = field.NewInt64(table, "credits") + + o.fillFieldMap() + + return o +} + +func (o *organizationCredit) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := o.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (o *organizationCredit) fillFieldMap() { + o.fieldMap = make(map[string]field.Expr, 2) + o.fieldMap["organization_id"] = o.OrganizationID + o.fieldMap["credits"] = o.Credits +} + +func (o organizationCredit) clone(db *gorm.DB) organizationCredit { + o.organizationCreditDo.ReplaceConnPool(db.Statement.ConnPool) + return o +} + +func (o organizationCredit) replaceDB(db *gorm.DB) organizationCredit { + o.organizationCreditDo.ReplaceDB(db) + return o +} + +type organizationCreditDo struct{ gen.DO } + +type IOrganizationCreditDo interface { + gen.SubQuery + Debug() IOrganizationCreditDo + WithContext(ctx context.Context) IOrganizationCreditDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IOrganizationCreditDo + WriteDB() IOrganizationCreditDo + As(alias string) gen.Dao + Session(config *gorm.Session) IOrganizationCreditDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IOrganizationCreditDo + Not(conds ...gen.Condition) IOrganizationCreditDo + Or(conds ...gen.Condition) IOrganizationCreditDo + Select(conds ...field.Expr) IOrganizationCreditDo + Where(conds ...gen.Condition) IOrganizationCreditDo + Order(conds ...field.Expr) IOrganizationCreditDo + Distinct(cols ...field.Expr) IOrganizationCreditDo + Omit(cols ...field.Expr) IOrganizationCreditDo + Join(table schema.Tabler, on ...field.Expr) IOrganizationCreditDo + LeftJoin(table schema.Tabler, on ...field.Expr) IOrganizationCreditDo + RightJoin(table schema.Tabler, on ...field.Expr) IOrganizationCreditDo + Group(cols ...field.Expr) IOrganizationCreditDo + Having(conds ...gen.Condition) IOrganizationCreditDo + Limit(limit int) IOrganizationCreditDo + Offset(offset int) IOrganizationCreditDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IOrganizationCreditDo + Unscoped() IOrganizationCreditDo + Create(values ...*model.OrganizationCredit) error + CreateInBatches(values []*model.OrganizationCredit, batchSize int) error + Save(values ...*model.OrganizationCredit) error + First() (*model.OrganizationCredit, error) + Take() (*model.OrganizationCredit, error) + Last() (*model.OrganizationCredit, error) + Find() ([]*model.OrganizationCredit, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.OrganizationCredit, err error) + FindInBatches(result *[]*model.OrganizationCredit, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.OrganizationCredit) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IOrganizationCreditDo + Assign(attrs ...field.AssignExpr) IOrganizationCreditDo + Joins(fields ...field.RelationField) IOrganizationCreditDo + Preload(fields ...field.RelationField) IOrganizationCreditDo + FirstOrInit() (*model.OrganizationCredit, error) + FirstOrCreate() (*model.OrganizationCredit, error) + FindByPage(offset int, limit int) (result []*model.OrganizationCredit, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IOrganizationCreditDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (o organizationCreditDo) Debug() IOrganizationCreditDo { + return o.withDO(o.DO.Debug()) +} + +func (o organizationCreditDo) WithContext(ctx context.Context) IOrganizationCreditDo { + return o.withDO(o.DO.WithContext(ctx)) +} + +func (o organizationCreditDo) ReadDB() IOrganizationCreditDo { + return o.Clauses(dbresolver.Read) +} + +func (o organizationCreditDo) WriteDB() IOrganizationCreditDo { + return o.Clauses(dbresolver.Write) +} + +func (o organizationCreditDo) Session(config *gorm.Session) IOrganizationCreditDo { + return o.withDO(o.DO.Session(config)) +} + +func (o organizationCreditDo) Clauses(conds ...clause.Expression) IOrganizationCreditDo { + return o.withDO(o.DO.Clauses(conds...)) +} + +func (o organizationCreditDo) Returning(value interface{}, columns ...string) IOrganizationCreditDo { + return o.withDO(o.DO.Returning(value, columns...)) +} + +func (o organizationCreditDo) Not(conds ...gen.Condition) IOrganizationCreditDo { + return o.withDO(o.DO.Not(conds...)) +} + +func (o organizationCreditDo) Or(conds ...gen.Condition) IOrganizationCreditDo { + return o.withDO(o.DO.Or(conds...)) +} + +func (o organizationCreditDo) Select(conds ...field.Expr) IOrganizationCreditDo { + return o.withDO(o.DO.Select(conds...)) +} + +func (o organizationCreditDo) Where(conds ...gen.Condition) IOrganizationCreditDo { + return o.withDO(o.DO.Where(conds...)) +} + +func (o organizationCreditDo) Order(conds ...field.Expr) IOrganizationCreditDo { + return o.withDO(o.DO.Order(conds...)) +} + +func (o organizationCreditDo) Distinct(cols ...field.Expr) IOrganizationCreditDo { + return o.withDO(o.DO.Distinct(cols...)) +} + +func (o organizationCreditDo) Omit(cols ...field.Expr) IOrganizationCreditDo { + return o.withDO(o.DO.Omit(cols...)) +} + +func (o organizationCreditDo) Join(table schema.Tabler, on ...field.Expr) IOrganizationCreditDo { + return o.withDO(o.DO.Join(table, on...)) +} + +func (o organizationCreditDo) LeftJoin(table schema.Tabler, on ...field.Expr) IOrganizationCreditDo { + return o.withDO(o.DO.LeftJoin(table, on...)) +} + +func (o organizationCreditDo) RightJoin(table schema.Tabler, on ...field.Expr) IOrganizationCreditDo { + return o.withDO(o.DO.RightJoin(table, on...)) +} + +func (o organizationCreditDo) Group(cols ...field.Expr) IOrganizationCreditDo { + return o.withDO(o.DO.Group(cols...)) +} + +func (o organizationCreditDo) Having(conds ...gen.Condition) IOrganizationCreditDo { + return o.withDO(o.DO.Having(conds...)) +} + +func (o organizationCreditDo) Limit(limit int) IOrganizationCreditDo { + return o.withDO(o.DO.Limit(limit)) +} + +func (o organizationCreditDo) Offset(offset int) IOrganizationCreditDo { + return o.withDO(o.DO.Offset(offset)) +} + +func (o organizationCreditDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IOrganizationCreditDo { + return o.withDO(o.DO.Scopes(funcs...)) +} + +func (o organizationCreditDo) Unscoped() IOrganizationCreditDo { + return o.withDO(o.DO.Unscoped()) +} + +func (o organizationCreditDo) Create(values ...*model.OrganizationCredit) error { + if len(values) == 0 { + return nil + } + return o.DO.Create(values) +} + +func (o organizationCreditDo) CreateInBatches(values []*model.OrganizationCredit, batchSize int) error { + return o.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (o organizationCreditDo) Save(values ...*model.OrganizationCredit) error { + if len(values) == 0 { + return nil + } + return o.DO.Save(values) +} + +func (o organizationCreditDo) First() (*model.OrganizationCredit, error) { + if result, err := o.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.OrganizationCredit), nil + } +} + +func (o organizationCreditDo) Take() (*model.OrganizationCredit, error) { + if result, err := o.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.OrganizationCredit), nil + } +} + +func (o organizationCreditDo) Last() (*model.OrganizationCredit, error) { + if result, err := o.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.OrganizationCredit), nil + } +} + +func (o organizationCreditDo) Find() ([]*model.OrganizationCredit, error) { + result, err := o.DO.Find() + return result.([]*model.OrganizationCredit), err +} + +func (o organizationCreditDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.OrganizationCredit, err error) { + buf := make([]*model.OrganizationCredit, 0, batchSize) + err = o.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (o organizationCreditDo) FindInBatches(result *[]*model.OrganizationCredit, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return o.DO.FindInBatches(result, batchSize, fc) +} + +func (o organizationCreditDo) Attrs(attrs ...field.AssignExpr) IOrganizationCreditDo { + return o.withDO(o.DO.Attrs(attrs...)) +} + +func (o organizationCreditDo) Assign(attrs ...field.AssignExpr) IOrganizationCreditDo { + return o.withDO(o.DO.Assign(attrs...)) +} + +func (o organizationCreditDo) Joins(fields ...field.RelationField) IOrganizationCreditDo { + for _, _f := range fields { + o = *o.withDO(o.DO.Joins(_f)) + } + return &o +} + +func (o organizationCreditDo) Preload(fields ...field.RelationField) IOrganizationCreditDo { + for _, _f := range fields { + o = *o.withDO(o.DO.Preload(_f)) + } + return &o +} + +func (o organizationCreditDo) FirstOrInit() (*model.OrganizationCredit, error) { + if result, err := o.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.OrganizationCredit), nil + } +} + +func (o organizationCreditDo) FirstOrCreate() (*model.OrganizationCredit, error) { + if result, err := o.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.OrganizationCredit), nil + } +} + +func (o organizationCreditDo) FindByPage(offset int, limit int) (result []*model.OrganizationCredit, count int64, err error) { + result, err = o.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = o.Offset(-1).Limit(-1).Count() + return +} + +func (o organizationCreditDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = o.Count() + if err != nil { + return + } + + err = o.Offset(offset).Limit(limit).Scan(result) + return +} + +func (o organizationCreditDo) Scan(result interface{}) (err error) { + return o.DO.Scan(result) +} + +func (o organizationCreditDo) Delete(models ...*model.OrganizationCredit) (result gen.ResultInfo, err error) { + return o.DO.Delete(models) +} + +func (o *organizationCreditDo) withDO(do gen.Dao) *organizationCreditDo { + o.DO = *do.(*gen.DO) + return o +} diff --git a/next/models_generated/organization_join_invitations.gen.go b/next/models_generated/organization_join_invitations.gen.go new file mode 100644 index 000000000..5080deabb --- /dev/null +++ b/next/models_generated/organization_join_invitations.gen.go @@ -0,0 +1,408 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newOrganizationJoinInvitation(db *gorm.DB, opts ...gen.DOOption) organizationJoinInvitation { + _organizationJoinInvitation := organizationJoinInvitation{} + + _organizationJoinInvitation.organizationJoinInvitationDo.UseDB(db, opts...) + _organizationJoinInvitation.organizationJoinInvitationDo.UseModel(&model.OrganizationJoinInvitation{}) + + tableName := _organizationJoinInvitation.organizationJoinInvitationDo.TableName() + _organizationJoinInvitation.ALL = field.NewAsterisk(tableName) + _organizationJoinInvitation.CreatedAt = field.NewTime(tableName, "created_at") + _organizationJoinInvitation.InviterUserID = field.NewString(tableName, "inviter_user_id") + _organizationJoinInvitation.Status = field.NewString(tableName, "status") + _organizationJoinInvitation.ID = field.NewString(tableName, "id") + _organizationJoinInvitation.InviteeUserEmail = field.NewString(tableName, "invitee_user_email") + _organizationJoinInvitation.OrganizationID = field.NewString(tableName, "organization_id") + _organizationJoinInvitation.InviteeOrganizationRole = field.NewString(tableName, "invitee_organization_role") + _organizationJoinInvitation.InviteeUserID = field.NewString(tableName, "invitee_user_id") + + _organizationJoinInvitation.fillFieldMap() + + return _organizationJoinInvitation +} + +type organizationJoinInvitation struct { + organizationJoinInvitationDo + + ALL field.Asterisk + CreatedAt field.Time + InviterUserID field.String + Status field.String + ID field.String + InviteeUserEmail field.String + OrganizationID field.String + InviteeOrganizationRole field.String + InviteeUserID field.String + + fieldMap map[string]field.Expr +} + +func (o organizationJoinInvitation) Table(newTableName string) *organizationJoinInvitation { + o.organizationJoinInvitationDo.UseTable(newTableName) + return o.updateTableName(newTableName) +} + +func (o organizationJoinInvitation) As(alias string) *organizationJoinInvitation { + o.organizationJoinInvitationDo.DO = *(o.organizationJoinInvitationDo.As(alias).(*gen.DO)) + return o.updateTableName(alias) +} + +func (o *organizationJoinInvitation) updateTableName(table string) *organizationJoinInvitation { + o.ALL = field.NewAsterisk(table) + o.CreatedAt = field.NewTime(table, "created_at") + o.InviterUserID = field.NewString(table, "inviter_user_id") + o.Status = field.NewString(table, "status") + o.ID = field.NewString(table, "id") + o.InviteeUserEmail = field.NewString(table, "invitee_user_email") + o.OrganizationID = field.NewString(table, "organization_id") + o.InviteeOrganizationRole = field.NewString(table, "invitee_organization_role") + o.InviteeUserID = field.NewString(table, "invitee_user_id") + + o.fillFieldMap() + + return o +} + +func (o *organizationJoinInvitation) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := o.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (o *organizationJoinInvitation) fillFieldMap() { + o.fieldMap = make(map[string]field.Expr, 8) + o.fieldMap["created_at"] = o.CreatedAt + o.fieldMap["inviter_user_id"] = o.InviterUserID + o.fieldMap["status"] = o.Status + o.fieldMap["id"] = o.ID + o.fieldMap["invitee_user_email"] = o.InviteeUserEmail + o.fieldMap["organization_id"] = o.OrganizationID + o.fieldMap["invitee_organization_role"] = o.InviteeOrganizationRole + o.fieldMap["invitee_user_id"] = o.InviteeUserID +} + +func (o organizationJoinInvitation) clone(db *gorm.DB) organizationJoinInvitation { + o.organizationJoinInvitationDo.ReplaceConnPool(db.Statement.ConnPool) + return o +} + +func (o organizationJoinInvitation) replaceDB(db *gorm.DB) organizationJoinInvitation { + o.organizationJoinInvitationDo.ReplaceDB(db) + return o +} + +type organizationJoinInvitationDo struct{ gen.DO } + +type IOrganizationJoinInvitationDo interface { + gen.SubQuery + Debug() IOrganizationJoinInvitationDo + WithContext(ctx context.Context) IOrganizationJoinInvitationDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IOrganizationJoinInvitationDo + WriteDB() IOrganizationJoinInvitationDo + As(alias string) gen.Dao + Session(config *gorm.Session) IOrganizationJoinInvitationDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IOrganizationJoinInvitationDo + Not(conds ...gen.Condition) IOrganizationJoinInvitationDo + Or(conds ...gen.Condition) IOrganizationJoinInvitationDo + Select(conds ...field.Expr) IOrganizationJoinInvitationDo + Where(conds ...gen.Condition) IOrganizationJoinInvitationDo + Order(conds ...field.Expr) IOrganizationJoinInvitationDo + Distinct(cols ...field.Expr) IOrganizationJoinInvitationDo + Omit(cols ...field.Expr) IOrganizationJoinInvitationDo + Join(table schema.Tabler, on ...field.Expr) IOrganizationJoinInvitationDo + LeftJoin(table schema.Tabler, on ...field.Expr) IOrganizationJoinInvitationDo + RightJoin(table schema.Tabler, on ...field.Expr) IOrganizationJoinInvitationDo + Group(cols ...field.Expr) IOrganizationJoinInvitationDo + Having(conds ...gen.Condition) IOrganizationJoinInvitationDo + Limit(limit int) IOrganizationJoinInvitationDo + Offset(offset int) IOrganizationJoinInvitationDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IOrganizationJoinInvitationDo + Unscoped() IOrganizationJoinInvitationDo + Create(values ...*model.OrganizationJoinInvitation) error + CreateInBatches(values []*model.OrganizationJoinInvitation, batchSize int) error + Save(values ...*model.OrganizationJoinInvitation) error + First() (*model.OrganizationJoinInvitation, error) + Take() (*model.OrganizationJoinInvitation, error) + Last() (*model.OrganizationJoinInvitation, error) + Find() ([]*model.OrganizationJoinInvitation, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.OrganizationJoinInvitation, err error) + FindInBatches(result *[]*model.OrganizationJoinInvitation, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.OrganizationJoinInvitation) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IOrganizationJoinInvitationDo + Assign(attrs ...field.AssignExpr) IOrganizationJoinInvitationDo + Joins(fields ...field.RelationField) IOrganizationJoinInvitationDo + Preload(fields ...field.RelationField) IOrganizationJoinInvitationDo + FirstOrInit() (*model.OrganizationJoinInvitation, error) + FirstOrCreate() (*model.OrganizationJoinInvitation, error) + FindByPage(offset int, limit int) (result []*model.OrganizationJoinInvitation, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IOrganizationJoinInvitationDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (o organizationJoinInvitationDo) Debug() IOrganizationJoinInvitationDo { + return o.withDO(o.DO.Debug()) +} + +func (o organizationJoinInvitationDo) WithContext(ctx context.Context) IOrganizationJoinInvitationDo { + return o.withDO(o.DO.WithContext(ctx)) +} + +func (o organizationJoinInvitationDo) ReadDB() IOrganizationJoinInvitationDo { + return o.Clauses(dbresolver.Read) +} + +func (o organizationJoinInvitationDo) WriteDB() IOrganizationJoinInvitationDo { + return o.Clauses(dbresolver.Write) +} + +func (o organizationJoinInvitationDo) Session(config *gorm.Session) IOrganizationJoinInvitationDo { + return o.withDO(o.DO.Session(config)) +} + +func (o organizationJoinInvitationDo) Clauses(conds ...clause.Expression) IOrganizationJoinInvitationDo { + return o.withDO(o.DO.Clauses(conds...)) +} + +func (o organizationJoinInvitationDo) Returning(value interface{}, columns ...string) IOrganizationJoinInvitationDo { + return o.withDO(o.DO.Returning(value, columns...)) +} + +func (o organizationJoinInvitationDo) Not(conds ...gen.Condition) IOrganizationJoinInvitationDo { + return o.withDO(o.DO.Not(conds...)) +} + +func (o organizationJoinInvitationDo) Or(conds ...gen.Condition) IOrganizationJoinInvitationDo { + return o.withDO(o.DO.Or(conds...)) +} + +func (o organizationJoinInvitationDo) Select(conds ...field.Expr) IOrganizationJoinInvitationDo { + return o.withDO(o.DO.Select(conds...)) +} + +func (o organizationJoinInvitationDo) Where(conds ...gen.Condition) IOrganizationJoinInvitationDo { + return o.withDO(o.DO.Where(conds...)) +} + +func (o organizationJoinInvitationDo) Order(conds ...field.Expr) IOrganizationJoinInvitationDo { + return o.withDO(o.DO.Order(conds...)) +} + +func (o organizationJoinInvitationDo) Distinct(cols ...field.Expr) IOrganizationJoinInvitationDo { + return o.withDO(o.DO.Distinct(cols...)) +} + +func (o organizationJoinInvitationDo) Omit(cols ...field.Expr) IOrganizationJoinInvitationDo { + return o.withDO(o.DO.Omit(cols...)) +} + +func (o organizationJoinInvitationDo) Join(table schema.Tabler, on ...field.Expr) IOrganizationJoinInvitationDo { + return o.withDO(o.DO.Join(table, on...)) +} + +func (o organizationJoinInvitationDo) LeftJoin(table schema.Tabler, on ...field.Expr) IOrganizationJoinInvitationDo { + return o.withDO(o.DO.LeftJoin(table, on...)) +} + +func (o organizationJoinInvitationDo) RightJoin(table schema.Tabler, on ...field.Expr) IOrganizationJoinInvitationDo { + return o.withDO(o.DO.RightJoin(table, on...)) +} + +func (o organizationJoinInvitationDo) Group(cols ...field.Expr) IOrganizationJoinInvitationDo { + return o.withDO(o.DO.Group(cols...)) +} + +func (o organizationJoinInvitationDo) Having(conds ...gen.Condition) IOrganizationJoinInvitationDo { + return o.withDO(o.DO.Having(conds...)) +} + +func (o organizationJoinInvitationDo) Limit(limit int) IOrganizationJoinInvitationDo { + return o.withDO(o.DO.Limit(limit)) +} + +func (o organizationJoinInvitationDo) Offset(offset int) IOrganizationJoinInvitationDo { + return o.withDO(o.DO.Offset(offset)) +} + +func (o organizationJoinInvitationDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IOrganizationJoinInvitationDo { + return o.withDO(o.DO.Scopes(funcs...)) +} + +func (o organizationJoinInvitationDo) Unscoped() IOrganizationJoinInvitationDo { + return o.withDO(o.DO.Unscoped()) +} + +func (o organizationJoinInvitationDo) Create(values ...*model.OrganizationJoinInvitation) error { + if len(values) == 0 { + return nil + } + return o.DO.Create(values) +} + +func (o organizationJoinInvitationDo) CreateInBatches(values []*model.OrganizationJoinInvitation, batchSize int) error { + return o.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (o organizationJoinInvitationDo) Save(values ...*model.OrganizationJoinInvitation) error { + if len(values) == 0 { + return nil + } + return o.DO.Save(values) +} + +func (o organizationJoinInvitationDo) First() (*model.OrganizationJoinInvitation, error) { + if result, err := o.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.OrganizationJoinInvitation), nil + } +} + +func (o organizationJoinInvitationDo) Take() (*model.OrganizationJoinInvitation, error) { + if result, err := o.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.OrganizationJoinInvitation), nil + } +} + +func (o organizationJoinInvitationDo) Last() (*model.OrganizationJoinInvitation, error) { + if result, err := o.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.OrganizationJoinInvitation), nil + } +} + +func (o organizationJoinInvitationDo) Find() ([]*model.OrganizationJoinInvitation, error) { + result, err := o.DO.Find() + return result.([]*model.OrganizationJoinInvitation), err +} + +func (o organizationJoinInvitationDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.OrganizationJoinInvitation, err error) { + buf := make([]*model.OrganizationJoinInvitation, 0, batchSize) + err = o.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (o organizationJoinInvitationDo) FindInBatches(result *[]*model.OrganizationJoinInvitation, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return o.DO.FindInBatches(result, batchSize, fc) +} + +func (o organizationJoinInvitationDo) Attrs(attrs ...field.AssignExpr) IOrganizationJoinInvitationDo { + return o.withDO(o.DO.Attrs(attrs...)) +} + +func (o organizationJoinInvitationDo) Assign(attrs ...field.AssignExpr) IOrganizationJoinInvitationDo { + return o.withDO(o.DO.Assign(attrs...)) +} + +func (o organizationJoinInvitationDo) Joins(fields ...field.RelationField) IOrganizationJoinInvitationDo { + for _, _f := range fields { + o = *o.withDO(o.DO.Joins(_f)) + } + return &o +} + +func (o organizationJoinInvitationDo) Preload(fields ...field.RelationField) IOrganizationJoinInvitationDo { + for _, _f := range fields { + o = *o.withDO(o.DO.Preload(_f)) + } + return &o +} + +func (o organizationJoinInvitationDo) FirstOrInit() (*model.OrganizationJoinInvitation, error) { + if result, err := o.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.OrganizationJoinInvitation), nil + } +} + +func (o organizationJoinInvitationDo) FirstOrCreate() (*model.OrganizationJoinInvitation, error) { + if result, err := o.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.OrganizationJoinInvitation), nil + } +} + +func (o organizationJoinInvitationDo) FindByPage(offset int, limit int) (result []*model.OrganizationJoinInvitation, count int64, err error) { + result, err = o.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = o.Offset(-1).Limit(-1).Count() + return +} + +func (o organizationJoinInvitationDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = o.Count() + if err != nil { + return + } + + err = o.Offset(offset).Limit(limit).Scan(result) + return +} + +func (o organizationJoinInvitationDo) Scan(result interface{}) (err error) { + return o.DO.Scan(result) +} + +func (o organizationJoinInvitationDo) Delete(models ...*model.OrganizationJoinInvitation) (result gen.ResultInfo, err error) { + return o.DO.Delete(models) +} + +func (o *organizationJoinInvitationDo) withDO(do gen.Dao) *organizationJoinInvitationDo { + o.DO = *do.(*gen.DO) + return o +} diff --git a/next/models_generated/organization_members.gen.go b/next/models_generated/organization_members.gen.go new file mode 100644 index 000000000..144d58ff4 --- /dev/null +++ b/next/models_generated/organization_members.gen.go @@ -0,0 +1,396 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newOrganizationMember(db *gorm.DB, opts ...gen.DOOption) organizationMember { + _organizationMember := organizationMember{} + + _organizationMember.organizationMemberDo.UseDB(db, opts...) + _organizationMember.organizationMemberDo.UseModel(&model.OrganizationMember{}) + + tableName := _organizationMember.organizationMemberDo.TableName() + _organizationMember.ALL = field.NewAsterisk(tableName) + _organizationMember.ID = field.NewInt64(tableName, "id") + _organizationMember.CreatedAt = field.NewTime(tableName, "created_at") + _organizationMember.MemberID = field.NewString(tableName, "member_id") + _organizationMember.MemberRole = field.NewString(tableName, "member_role") + _organizationMember.OrganizationID = field.NewString(tableName, "organization_id") + + _organizationMember.fillFieldMap() + + return _organizationMember +} + +type organizationMember struct { + organizationMemberDo + + ALL field.Asterisk + ID field.Int64 + CreatedAt field.Time + MemberID field.String + MemberRole field.String + OrganizationID field.String + + fieldMap map[string]field.Expr +} + +func (o organizationMember) Table(newTableName string) *organizationMember { + o.organizationMemberDo.UseTable(newTableName) + return o.updateTableName(newTableName) +} + +func (o organizationMember) As(alias string) *organizationMember { + o.organizationMemberDo.DO = *(o.organizationMemberDo.As(alias).(*gen.DO)) + return o.updateTableName(alias) +} + +func (o *organizationMember) updateTableName(table string) *organizationMember { + o.ALL = field.NewAsterisk(table) + o.ID = field.NewInt64(table, "id") + o.CreatedAt = field.NewTime(table, "created_at") + o.MemberID = field.NewString(table, "member_id") + o.MemberRole = field.NewString(table, "member_role") + o.OrganizationID = field.NewString(table, "organization_id") + + o.fillFieldMap() + + return o +} + +func (o *organizationMember) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := o.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (o *organizationMember) fillFieldMap() { + o.fieldMap = make(map[string]field.Expr, 5) + o.fieldMap["id"] = o.ID + o.fieldMap["created_at"] = o.CreatedAt + o.fieldMap["member_id"] = o.MemberID + o.fieldMap["member_role"] = o.MemberRole + o.fieldMap["organization_id"] = o.OrganizationID +} + +func (o organizationMember) clone(db *gorm.DB) organizationMember { + o.organizationMemberDo.ReplaceConnPool(db.Statement.ConnPool) + return o +} + +func (o organizationMember) replaceDB(db *gorm.DB) organizationMember { + o.organizationMemberDo.ReplaceDB(db) + return o +} + +type organizationMemberDo struct{ gen.DO } + +type IOrganizationMemberDo interface { + gen.SubQuery + Debug() IOrganizationMemberDo + WithContext(ctx context.Context) IOrganizationMemberDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IOrganizationMemberDo + WriteDB() IOrganizationMemberDo + As(alias string) gen.Dao + Session(config *gorm.Session) IOrganizationMemberDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IOrganizationMemberDo + Not(conds ...gen.Condition) IOrganizationMemberDo + Or(conds ...gen.Condition) IOrganizationMemberDo + Select(conds ...field.Expr) IOrganizationMemberDo + Where(conds ...gen.Condition) IOrganizationMemberDo + Order(conds ...field.Expr) IOrganizationMemberDo + Distinct(cols ...field.Expr) IOrganizationMemberDo + Omit(cols ...field.Expr) IOrganizationMemberDo + Join(table schema.Tabler, on ...field.Expr) IOrganizationMemberDo + LeftJoin(table schema.Tabler, on ...field.Expr) IOrganizationMemberDo + RightJoin(table schema.Tabler, on ...field.Expr) IOrganizationMemberDo + Group(cols ...field.Expr) IOrganizationMemberDo + Having(conds ...gen.Condition) IOrganizationMemberDo + Limit(limit int) IOrganizationMemberDo + Offset(offset int) IOrganizationMemberDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IOrganizationMemberDo + Unscoped() IOrganizationMemberDo + Create(values ...*model.OrganizationMember) error + CreateInBatches(values []*model.OrganizationMember, batchSize int) error + Save(values ...*model.OrganizationMember) error + First() (*model.OrganizationMember, error) + Take() (*model.OrganizationMember, error) + Last() (*model.OrganizationMember, error) + Find() ([]*model.OrganizationMember, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.OrganizationMember, err error) + FindInBatches(result *[]*model.OrganizationMember, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.OrganizationMember) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IOrganizationMemberDo + Assign(attrs ...field.AssignExpr) IOrganizationMemberDo + Joins(fields ...field.RelationField) IOrganizationMemberDo + Preload(fields ...field.RelationField) IOrganizationMemberDo + FirstOrInit() (*model.OrganizationMember, error) + FirstOrCreate() (*model.OrganizationMember, error) + FindByPage(offset int, limit int) (result []*model.OrganizationMember, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IOrganizationMemberDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (o organizationMemberDo) Debug() IOrganizationMemberDo { + return o.withDO(o.DO.Debug()) +} + +func (o organizationMemberDo) WithContext(ctx context.Context) IOrganizationMemberDo { + return o.withDO(o.DO.WithContext(ctx)) +} + +func (o organizationMemberDo) ReadDB() IOrganizationMemberDo { + return o.Clauses(dbresolver.Read) +} + +func (o organizationMemberDo) WriteDB() IOrganizationMemberDo { + return o.Clauses(dbresolver.Write) +} + +func (o organizationMemberDo) Session(config *gorm.Session) IOrganizationMemberDo { + return o.withDO(o.DO.Session(config)) +} + +func (o organizationMemberDo) Clauses(conds ...clause.Expression) IOrganizationMemberDo { + return o.withDO(o.DO.Clauses(conds...)) +} + +func (o organizationMemberDo) Returning(value interface{}, columns ...string) IOrganizationMemberDo { + return o.withDO(o.DO.Returning(value, columns...)) +} + +func (o organizationMemberDo) Not(conds ...gen.Condition) IOrganizationMemberDo { + return o.withDO(o.DO.Not(conds...)) +} + +func (o organizationMemberDo) Or(conds ...gen.Condition) IOrganizationMemberDo { + return o.withDO(o.DO.Or(conds...)) +} + +func (o organizationMemberDo) Select(conds ...field.Expr) IOrganizationMemberDo { + return o.withDO(o.DO.Select(conds...)) +} + +func (o organizationMemberDo) Where(conds ...gen.Condition) IOrganizationMemberDo { + return o.withDO(o.DO.Where(conds...)) +} + +func (o organizationMemberDo) Order(conds ...field.Expr) IOrganizationMemberDo { + return o.withDO(o.DO.Order(conds...)) +} + +func (o organizationMemberDo) Distinct(cols ...field.Expr) IOrganizationMemberDo { + return o.withDO(o.DO.Distinct(cols...)) +} + +func (o organizationMemberDo) Omit(cols ...field.Expr) IOrganizationMemberDo { + return o.withDO(o.DO.Omit(cols...)) +} + +func (o organizationMemberDo) Join(table schema.Tabler, on ...field.Expr) IOrganizationMemberDo { + return o.withDO(o.DO.Join(table, on...)) +} + +func (o organizationMemberDo) LeftJoin(table schema.Tabler, on ...field.Expr) IOrganizationMemberDo { + return o.withDO(o.DO.LeftJoin(table, on...)) +} + +func (o organizationMemberDo) RightJoin(table schema.Tabler, on ...field.Expr) IOrganizationMemberDo { + return o.withDO(o.DO.RightJoin(table, on...)) +} + +func (o organizationMemberDo) Group(cols ...field.Expr) IOrganizationMemberDo { + return o.withDO(o.DO.Group(cols...)) +} + +func (o organizationMemberDo) Having(conds ...gen.Condition) IOrganizationMemberDo { + return o.withDO(o.DO.Having(conds...)) +} + +func (o organizationMemberDo) Limit(limit int) IOrganizationMemberDo { + return o.withDO(o.DO.Limit(limit)) +} + +func (o organizationMemberDo) Offset(offset int) IOrganizationMemberDo { + return o.withDO(o.DO.Offset(offset)) +} + +func (o organizationMemberDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IOrganizationMemberDo { + return o.withDO(o.DO.Scopes(funcs...)) +} + +func (o organizationMemberDo) Unscoped() IOrganizationMemberDo { + return o.withDO(o.DO.Unscoped()) +} + +func (o organizationMemberDo) Create(values ...*model.OrganizationMember) error { + if len(values) == 0 { + return nil + } + return o.DO.Create(values) +} + +func (o organizationMemberDo) CreateInBatches(values []*model.OrganizationMember, batchSize int) error { + return o.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (o organizationMemberDo) Save(values ...*model.OrganizationMember) error { + if len(values) == 0 { + return nil + } + return o.DO.Save(values) +} + +func (o organizationMemberDo) First() (*model.OrganizationMember, error) { + if result, err := o.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.OrganizationMember), nil + } +} + +func (o organizationMemberDo) Take() (*model.OrganizationMember, error) { + if result, err := o.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.OrganizationMember), nil + } +} + +func (o organizationMemberDo) Last() (*model.OrganizationMember, error) { + if result, err := o.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.OrganizationMember), nil + } +} + +func (o organizationMemberDo) Find() ([]*model.OrganizationMember, error) { + result, err := o.DO.Find() + return result.([]*model.OrganizationMember), err +} + +func (o organizationMemberDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.OrganizationMember, err error) { + buf := make([]*model.OrganizationMember, 0, batchSize) + err = o.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (o organizationMemberDo) FindInBatches(result *[]*model.OrganizationMember, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return o.DO.FindInBatches(result, batchSize, fc) +} + +func (o organizationMemberDo) Attrs(attrs ...field.AssignExpr) IOrganizationMemberDo { + return o.withDO(o.DO.Attrs(attrs...)) +} + +func (o organizationMemberDo) Assign(attrs ...field.AssignExpr) IOrganizationMemberDo { + return o.withDO(o.DO.Assign(attrs...)) +} + +func (o organizationMemberDo) Joins(fields ...field.RelationField) IOrganizationMemberDo { + for _, _f := range fields { + o = *o.withDO(o.DO.Joins(_f)) + } + return &o +} + +func (o organizationMemberDo) Preload(fields ...field.RelationField) IOrganizationMemberDo { + for _, _f := range fields { + o = *o.withDO(o.DO.Preload(_f)) + } + return &o +} + +func (o organizationMemberDo) FirstOrInit() (*model.OrganizationMember, error) { + if result, err := o.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.OrganizationMember), nil + } +} + +func (o organizationMemberDo) FirstOrCreate() (*model.OrganizationMember, error) { + if result, err := o.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.OrganizationMember), nil + } +} + +func (o organizationMemberDo) FindByPage(offset int, limit int) (result []*model.OrganizationMember, count int64, err error) { + result, err = o.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = o.Offset(-1).Limit(-1).Count() + return +} + +func (o organizationMemberDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = o.Count() + if err != nil { + return + } + + err = o.Offset(offset).Limit(limit).Scan(result) + return +} + +func (o organizationMemberDo) Scan(result interface{}) (err error) { + return o.DO.Scan(result) +} + +func (o organizationMemberDo) Delete(models ...*model.OrganizationMember) (result gen.ResultInfo, err error) { + return o.DO.Delete(models) +} + +func (o *organizationMemberDo) withDO(do gen.Dao) *organizationMemberDo { + o.DO = *do.(*gen.DO) + return o +} diff --git a/next/models_generated/organizations.gen.go b/next/models_generated/organizations.gen.go new file mode 100644 index 000000000..37a23a412 --- /dev/null +++ b/next/models_generated/organizations.gen.go @@ -0,0 +1,392 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newOrganization(db *gorm.DB, opts ...gen.DOOption) organization { + _organization := organization{} + + _organization.organizationDo.UseDB(db, opts...) + _organization.organizationDo.UseModel(&model.Organization{}) + + tableName := _organization.organizationDo.TableName() + _organization.ALL = field.NewAsterisk(tableName) + _organization.CreatedAt = field.NewTime(tableName, "created_at") + _organization.ID = field.NewString(tableName, "id") + _organization.Title = field.NewString(tableName, "title") + _organization.Slug = field.NewString(tableName, "slug") + + _organization.fillFieldMap() + + return _organization +} + +type organization struct { + organizationDo + + ALL field.Asterisk + CreatedAt field.Time + ID field.String + Title field.String + Slug field.String + + fieldMap map[string]field.Expr +} + +func (o organization) Table(newTableName string) *organization { + o.organizationDo.UseTable(newTableName) + return o.updateTableName(newTableName) +} + +func (o organization) As(alias string) *organization { + o.organizationDo.DO = *(o.organizationDo.As(alias).(*gen.DO)) + return o.updateTableName(alias) +} + +func (o *organization) updateTableName(table string) *organization { + o.ALL = field.NewAsterisk(table) + o.CreatedAt = field.NewTime(table, "created_at") + o.ID = field.NewString(table, "id") + o.Title = field.NewString(table, "title") + o.Slug = field.NewString(table, "slug") + + o.fillFieldMap() + + return o +} + +func (o *organization) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := o.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (o *organization) fillFieldMap() { + o.fieldMap = make(map[string]field.Expr, 4) + o.fieldMap["created_at"] = o.CreatedAt + o.fieldMap["id"] = o.ID + o.fieldMap["title"] = o.Title + o.fieldMap["slug"] = o.Slug +} + +func (o organization) clone(db *gorm.DB) organization { + o.organizationDo.ReplaceConnPool(db.Statement.ConnPool) + return o +} + +func (o organization) replaceDB(db *gorm.DB) organization { + o.organizationDo.ReplaceDB(db) + return o +} + +type organizationDo struct{ gen.DO } + +type IOrganizationDo interface { + gen.SubQuery + Debug() IOrganizationDo + WithContext(ctx context.Context) IOrganizationDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IOrganizationDo + WriteDB() IOrganizationDo + As(alias string) gen.Dao + Session(config *gorm.Session) IOrganizationDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IOrganizationDo + Not(conds ...gen.Condition) IOrganizationDo + Or(conds ...gen.Condition) IOrganizationDo + Select(conds ...field.Expr) IOrganizationDo + Where(conds ...gen.Condition) IOrganizationDo + Order(conds ...field.Expr) IOrganizationDo + Distinct(cols ...field.Expr) IOrganizationDo + Omit(cols ...field.Expr) IOrganizationDo + Join(table schema.Tabler, on ...field.Expr) IOrganizationDo + LeftJoin(table schema.Tabler, on ...field.Expr) IOrganizationDo + RightJoin(table schema.Tabler, on ...field.Expr) IOrganizationDo + Group(cols ...field.Expr) IOrganizationDo + Having(conds ...gen.Condition) IOrganizationDo + Limit(limit int) IOrganizationDo + Offset(offset int) IOrganizationDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IOrganizationDo + Unscoped() IOrganizationDo + Create(values ...*model.Organization) error + CreateInBatches(values []*model.Organization, batchSize int) error + Save(values ...*model.Organization) error + First() (*model.Organization, error) + Take() (*model.Organization, error) + Last() (*model.Organization, error) + Find() ([]*model.Organization, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.Organization, err error) + FindInBatches(result *[]*model.Organization, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.Organization) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IOrganizationDo + Assign(attrs ...field.AssignExpr) IOrganizationDo + Joins(fields ...field.RelationField) IOrganizationDo + Preload(fields ...field.RelationField) IOrganizationDo + FirstOrInit() (*model.Organization, error) + FirstOrCreate() (*model.Organization, error) + FindByPage(offset int, limit int) (result []*model.Organization, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IOrganizationDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (o organizationDo) Debug() IOrganizationDo { + return o.withDO(o.DO.Debug()) +} + +func (o organizationDo) WithContext(ctx context.Context) IOrganizationDo { + return o.withDO(o.DO.WithContext(ctx)) +} + +func (o organizationDo) ReadDB() IOrganizationDo { + return o.Clauses(dbresolver.Read) +} + +func (o organizationDo) WriteDB() IOrganizationDo { + return o.Clauses(dbresolver.Write) +} + +func (o organizationDo) Session(config *gorm.Session) IOrganizationDo { + return o.withDO(o.DO.Session(config)) +} + +func (o organizationDo) Clauses(conds ...clause.Expression) IOrganizationDo { + return o.withDO(o.DO.Clauses(conds...)) +} + +func (o organizationDo) Returning(value interface{}, columns ...string) IOrganizationDo { + return o.withDO(o.DO.Returning(value, columns...)) +} + +func (o organizationDo) Not(conds ...gen.Condition) IOrganizationDo { + return o.withDO(o.DO.Not(conds...)) +} + +func (o organizationDo) Or(conds ...gen.Condition) IOrganizationDo { + return o.withDO(o.DO.Or(conds...)) +} + +func (o organizationDo) Select(conds ...field.Expr) IOrganizationDo { + return o.withDO(o.DO.Select(conds...)) +} + +func (o organizationDo) Where(conds ...gen.Condition) IOrganizationDo { + return o.withDO(o.DO.Where(conds...)) +} + +func (o organizationDo) Order(conds ...field.Expr) IOrganizationDo { + return o.withDO(o.DO.Order(conds...)) +} + +func (o organizationDo) Distinct(cols ...field.Expr) IOrganizationDo { + return o.withDO(o.DO.Distinct(cols...)) +} + +func (o organizationDo) Omit(cols ...field.Expr) IOrganizationDo { + return o.withDO(o.DO.Omit(cols...)) +} + +func (o organizationDo) Join(table schema.Tabler, on ...field.Expr) IOrganizationDo { + return o.withDO(o.DO.Join(table, on...)) +} + +func (o organizationDo) LeftJoin(table schema.Tabler, on ...field.Expr) IOrganizationDo { + return o.withDO(o.DO.LeftJoin(table, on...)) +} + +func (o organizationDo) RightJoin(table schema.Tabler, on ...field.Expr) IOrganizationDo { + return o.withDO(o.DO.RightJoin(table, on...)) +} + +func (o organizationDo) Group(cols ...field.Expr) IOrganizationDo { + return o.withDO(o.DO.Group(cols...)) +} + +func (o organizationDo) Having(conds ...gen.Condition) IOrganizationDo { + return o.withDO(o.DO.Having(conds...)) +} + +func (o organizationDo) Limit(limit int) IOrganizationDo { + return o.withDO(o.DO.Limit(limit)) +} + +func (o organizationDo) Offset(offset int) IOrganizationDo { + return o.withDO(o.DO.Offset(offset)) +} + +func (o organizationDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IOrganizationDo { + return o.withDO(o.DO.Scopes(funcs...)) +} + +func (o organizationDo) Unscoped() IOrganizationDo { + return o.withDO(o.DO.Unscoped()) +} + +func (o organizationDo) Create(values ...*model.Organization) error { + if len(values) == 0 { + return nil + } + return o.DO.Create(values) +} + +func (o organizationDo) CreateInBatches(values []*model.Organization, batchSize int) error { + return o.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (o organizationDo) Save(values ...*model.Organization) error { + if len(values) == 0 { + return nil + } + return o.DO.Save(values) +} + +func (o organizationDo) First() (*model.Organization, error) { + if result, err := o.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.Organization), nil + } +} + +func (o organizationDo) Take() (*model.Organization, error) { + if result, err := o.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.Organization), nil + } +} + +func (o organizationDo) Last() (*model.Organization, error) { + if result, err := o.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.Organization), nil + } +} + +func (o organizationDo) Find() ([]*model.Organization, error) { + result, err := o.DO.Find() + return result.([]*model.Organization), err +} + +func (o organizationDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.Organization, err error) { + buf := make([]*model.Organization, 0, batchSize) + err = o.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (o organizationDo) FindInBatches(result *[]*model.Organization, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return o.DO.FindInBatches(result, batchSize, fc) +} + +func (o organizationDo) Attrs(attrs ...field.AssignExpr) IOrganizationDo { + return o.withDO(o.DO.Attrs(attrs...)) +} + +func (o organizationDo) Assign(attrs ...field.AssignExpr) IOrganizationDo { + return o.withDO(o.DO.Assign(attrs...)) +} + +func (o organizationDo) Joins(fields ...field.RelationField) IOrganizationDo { + for _, _f := range fields { + o = *o.withDO(o.DO.Joins(_f)) + } + return &o +} + +func (o organizationDo) Preload(fields ...field.RelationField) IOrganizationDo { + for _, _f := range fields { + o = *o.withDO(o.DO.Preload(_f)) + } + return &o +} + +func (o organizationDo) FirstOrInit() (*model.Organization, error) { + if result, err := o.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.Organization), nil + } +} + +func (o organizationDo) FirstOrCreate() (*model.Organization, error) { + if result, err := o.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.Organization), nil + } +} + +func (o organizationDo) FindByPage(offset int, limit int) (result []*model.Organization, count int64, err error) { + result, err = o.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = o.Offset(-1).Limit(-1).Count() + return +} + +func (o organizationDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = o.Count() + if err != nil { + return + } + + err = o.Offset(offset).Limit(limit).Scan(result) + return +} + +func (o organizationDo) Scan(result interface{}) (err error) { + return o.DO.Scan(result) +} + +func (o organizationDo) Delete(models ...*model.Organization) (result gen.ResultInfo, err error) { + return o.DO.Delete(models) +} + +func (o *organizationDo) withDO(do gen.Dao) *organizationDo { + o.DO = *do.(*gen.DO) + return o +} diff --git a/next/models_generated/organizations_private_info.gen.go b/next/models_generated/organizations_private_info.gen.go new file mode 100644 index 000000000..a54662896 --- /dev/null +++ b/next/models_generated/organizations_private_info.gen.go @@ -0,0 +1,388 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newOrganizationsPrivateInfo(db *gorm.DB, opts ...gen.DOOption) organizationsPrivateInfo { + _organizationsPrivateInfo := organizationsPrivateInfo{} + + _organizationsPrivateInfo.organizationsPrivateInfoDo.UseDB(db, opts...) + _organizationsPrivateInfo.organizationsPrivateInfoDo.UseModel(&model.OrganizationsPrivateInfo{}) + + tableName := _organizationsPrivateInfo.organizationsPrivateInfoDo.TableName() + _organizationsPrivateInfo.ALL = field.NewAsterisk(tableName) + _organizationsPrivateInfo.ID = field.NewString(tableName, "id") + _organizationsPrivateInfo.BillingAddress = field.NewString(tableName, "billing_address") + _organizationsPrivateInfo.PaymentMethod = field.NewString(tableName, "payment_method") + + _organizationsPrivateInfo.fillFieldMap() + + return _organizationsPrivateInfo +} + +type organizationsPrivateInfo struct { + organizationsPrivateInfoDo + + ALL field.Asterisk + ID field.String + BillingAddress field.String + PaymentMethod field.String + + fieldMap map[string]field.Expr +} + +func (o organizationsPrivateInfo) Table(newTableName string) *organizationsPrivateInfo { + o.organizationsPrivateInfoDo.UseTable(newTableName) + return o.updateTableName(newTableName) +} + +func (o organizationsPrivateInfo) As(alias string) *organizationsPrivateInfo { + o.organizationsPrivateInfoDo.DO = *(o.organizationsPrivateInfoDo.As(alias).(*gen.DO)) + return o.updateTableName(alias) +} + +func (o *organizationsPrivateInfo) updateTableName(table string) *organizationsPrivateInfo { + o.ALL = field.NewAsterisk(table) + o.ID = field.NewString(table, "id") + o.BillingAddress = field.NewString(table, "billing_address") + o.PaymentMethod = field.NewString(table, "payment_method") + + o.fillFieldMap() + + return o +} + +func (o *organizationsPrivateInfo) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := o.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (o *organizationsPrivateInfo) fillFieldMap() { + o.fieldMap = make(map[string]field.Expr, 3) + o.fieldMap["id"] = o.ID + o.fieldMap["billing_address"] = o.BillingAddress + o.fieldMap["payment_method"] = o.PaymentMethod +} + +func (o organizationsPrivateInfo) clone(db *gorm.DB) organizationsPrivateInfo { + o.organizationsPrivateInfoDo.ReplaceConnPool(db.Statement.ConnPool) + return o +} + +func (o organizationsPrivateInfo) replaceDB(db *gorm.DB) organizationsPrivateInfo { + o.organizationsPrivateInfoDo.ReplaceDB(db) + return o +} + +type organizationsPrivateInfoDo struct{ gen.DO } + +type IOrganizationsPrivateInfoDo interface { + gen.SubQuery + Debug() IOrganizationsPrivateInfoDo + WithContext(ctx context.Context) IOrganizationsPrivateInfoDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IOrganizationsPrivateInfoDo + WriteDB() IOrganizationsPrivateInfoDo + As(alias string) gen.Dao + Session(config *gorm.Session) IOrganizationsPrivateInfoDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IOrganizationsPrivateInfoDo + Not(conds ...gen.Condition) IOrganizationsPrivateInfoDo + Or(conds ...gen.Condition) IOrganizationsPrivateInfoDo + Select(conds ...field.Expr) IOrganizationsPrivateInfoDo + Where(conds ...gen.Condition) IOrganizationsPrivateInfoDo + Order(conds ...field.Expr) IOrganizationsPrivateInfoDo + Distinct(cols ...field.Expr) IOrganizationsPrivateInfoDo + Omit(cols ...field.Expr) IOrganizationsPrivateInfoDo + Join(table schema.Tabler, on ...field.Expr) IOrganizationsPrivateInfoDo + LeftJoin(table schema.Tabler, on ...field.Expr) IOrganizationsPrivateInfoDo + RightJoin(table schema.Tabler, on ...field.Expr) IOrganizationsPrivateInfoDo + Group(cols ...field.Expr) IOrganizationsPrivateInfoDo + Having(conds ...gen.Condition) IOrganizationsPrivateInfoDo + Limit(limit int) IOrganizationsPrivateInfoDo + Offset(offset int) IOrganizationsPrivateInfoDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IOrganizationsPrivateInfoDo + Unscoped() IOrganizationsPrivateInfoDo + Create(values ...*model.OrganizationsPrivateInfo) error + CreateInBatches(values []*model.OrganizationsPrivateInfo, batchSize int) error + Save(values ...*model.OrganizationsPrivateInfo) error + First() (*model.OrganizationsPrivateInfo, error) + Take() (*model.OrganizationsPrivateInfo, error) + Last() (*model.OrganizationsPrivateInfo, error) + Find() ([]*model.OrganizationsPrivateInfo, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.OrganizationsPrivateInfo, err error) + FindInBatches(result *[]*model.OrganizationsPrivateInfo, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.OrganizationsPrivateInfo) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IOrganizationsPrivateInfoDo + Assign(attrs ...field.AssignExpr) IOrganizationsPrivateInfoDo + Joins(fields ...field.RelationField) IOrganizationsPrivateInfoDo + Preload(fields ...field.RelationField) IOrganizationsPrivateInfoDo + FirstOrInit() (*model.OrganizationsPrivateInfo, error) + FirstOrCreate() (*model.OrganizationsPrivateInfo, error) + FindByPage(offset int, limit int) (result []*model.OrganizationsPrivateInfo, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IOrganizationsPrivateInfoDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (o organizationsPrivateInfoDo) Debug() IOrganizationsPrivateInfoDo { + return o.withDO(o.DO.Debug()) +} + +func (o organizationsPrivateInfoDo) WithContext(ctx context.Context) IOrganizationsPrivateInfoDo { + return o.withDO(o.DO.WithContext(ctx)) +} + +func (o organizationsPrivateInfoDo) ReadDB() IOrganizationsPrivateInfoDo { + return o.Clauses(dbresolver.Read) +} + +func (o organizationsPrivateInfoDo) WriteDB() IOrganizationsPrivateInfoDo { + return o.Clauses(dbresolver.Write) +} + +func (o organizationsPrivateInfoDo) Session(config *gorm.Session) IOrganizationsPrivateInfoDo { + return o.withDO(o.DO.Session(config)) +} + +func (o organizationsPrivateInfoDo) Clauses(conds ...clause.Expression) IOrganizationsPrivateInfoDo { + return o.withDO(o.DO.Clauses(conds...)) +} + +func (o organizationsPrivateInfoDo) Returning(value interface{}, columns ...string) IOrganizationsPrivateInfoDo { + return o.withDO(o.DO.Returning(value, columns...)) +} + +func (o organizationsPrivateInfoDo) Not(conds ...gen.Condition) IOrganizationsPrivateInfoDo { + return o.withDO(o.DO.Not(conds...)) +} + +func (o organizationsPrivateInfoDo) Or(conds ...gen.Condition) IOrganizationsPrivateInfoDo { + return o.withDO(o.DO.Or(conds...)) +} + +func (o organizationsPrivateInfoDo) Select(conds ...field.Expr) IOrganizationsPrivateInfoDo { + return o.withDO(o.DO.Select(conds...)) +} + +func (o organizationsPrivateInfoDo) Where(conds ...gen.Condition) IOrganizationsPrivateInfoDo { + return o.withDO(o.DO.Where(conds...)) +} + +func (o organizationsPrivateInfoDo) Order(conds ...field.Expr) IOrganizationsPrivateInfoDo { + return o.withDO(o.DO.Order(conds...)) +} + +func (o organizationsPrivateInfoDo) Distinct(cols ...field.Expr) IOrganizationsPrivateInfoDo { + return o.withDO(o.DO.Distinct(cols...)) +} + +func (o organizationsPrivateInfoDo) Omit(cols ...field.Expr) IOrganizationsPrivateInfoDo { + return o.withDO(o.DO.Omit(cols...)) +} + +func (o organizationsPrivateInfoDo) Join(table schema.Tabler, on ...field.Expr) IOrganizationsPrivateInfoDo { + return o.withDO(o.DO.Join(table, on...)) +} + +func (o organizationsPrivateInfoDo) LeftJoin(table schema.Tabler, on ...field.Expr) IOrganizationsPrivateInfoDo { + return o.withDO(o.DO.LeftJoin(table, on...)) +} + +func (o organizationsPrivateInfoDo) RightJoin(table schema.Tabler, on ...field.Expr) IOrganizationsPrivateInfoDo { + return o.withDO(o.DO.RightJoin(table, on...)) +} + +func (o organizationsPrivateInfoDo) Group(cols ...field.Expr) IOrganizationsPrivateInfoDo { + return o.withDO(o.DO.Group(cols...)) +} + +func (o organizationsPrivateInfoDo) Having(conds ...gen.Condition) IOrganizationsPrivateInfoDo { + return o.withDO(o.DO.Having(conds...)) +} + +func (o organizationsPrivateInfoDo) Limit(limit int) IOrganizationsPrivateInfoDo { + return o.withDO(o.DO.Limit(limit)) +} + +func (o organizationsPrivateInfoDo) Offset(offset int) IOrganizationsPrivateInfoDo { + return o.withDO(o.DO.Offset(offset)) +} + +func (o organizationsPrivateInfoDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IOrganizationsPrivateInfoDo { + return o.withDO(o.DO.Scopes(funcs...)) +} + +func (o organizationsPrivateInfoDo) Unscoped() IOrganizationsPrivateInfoDo { + return o.withDO(o.DO.Unscoped()) +} + +func (o organizationsPrivateInfoDo) Create(values ...*model.OrganizationsPrivateInfo) error { + if len(values) == 0 { + return nil + } + return o.DO.Create(values) +} + +func (o organizationsPrivateInfoDo) CreateInBatches(values []*model.OrganizationsPrivateInfo, batchSize int) error { + return o.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (o organizationsPrivateInfoDo) Save(values ...*model.OrganizationsPrivateInfo) error { + if len(values) == 0 { + return nil + } + return o.DO.Save(values) +} + +func (o organizationsPrivateInfoDo) First() (*model.OrganizationsPrivateInfo, error) { + if result, err := o.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.OrganizationsPrivateInfo), nil + } +} + +func (o organizationsPrivateInfoDo) Take() (*model.OrganizationsPrivateInfo, error) { + if result, err := o.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.OrganizationsPrivateInfo), nil + } +} + +func (o organizationsPrivateInfoDo) Last() (*model.OrganizationsPrivateInfo, error) { + if result, err := o.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.OrganizationsPrivateInfo), nil + } +} + +func (o organizationsPrivateInfoDo) Find() ([]*model.OrganizationsPrivateInfo, error) { + result, err := o.DO.Find() + return result.([]*model.OrganizationsPrivateInfo), err +} + +func (o organizationsPrivateInfoDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.OrganizationsPrivateInfo, err error) { + buf := make([]*model.OrganizationsPrivateInfo, 0, batchSize) + err = o.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (o organizationsPrivateInfoDo) FindInBatches(result *[]*model.OrganizationsPrivateInfo, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return o.DO.FindInBatches(result, batchSize, fc) +} + +func (o organizationsPrivateInfoDo) Attrs(attrs ...field.AssignExpr) IOrganizationsPrivateInfoDo { + return o.withDO(o.DO.Attrs(attrs...)) +} + +func (o organizationsPrivateInfoDo) Assign(attrs ...field.AssignExpr) IOrganizationsPrivateInfoDo { + return o.withDO(o.DO.Assign(attrs...)) +} + +func (o organizationsPrivateInfoDo) Joins(fields ...field.RelationField) IOrganizationsPrivateInfoDo { + for _, _f := range fields { + o = *o.withDO(o.DO.Joins(_f)) + } + return &o +} + +func (o organizationsPrivateInfoDo) Preload(fields ...field.RelationField) IOrganizationsPrivateInfoDo { + for _, _f := range fields { + o = *o.withDO(o.DO.Preload(_f)) + } + return &o +} + +func (o organizationsPrivateInfoDo) FirstOrInit() (*model.OrganizationsPrivateInfo, error) { + if result, err := o.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.OrganizationsPrivateInfo), nil + } +} + +func (o organizationsPrivateInfoDo) FirstOrCreate() (*model.OrganizationsPrivateInfo, error) { + if result, err := o.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.OrganizationsPrivateInfo), nil + } +} + +func (o organizationsPrivateInfoDo) FindByPage(offset int, limit int) (result []*model.OrganizationsPrivateInfo, count int64, err error) { + result, err = o.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = o.Offset(-1).Limit(-1).Count() + return +} + +func (o organizationsPrivateInfoDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = o.Count() + if err != nil { + return + } + + err = o.Offset(offset).Limit(limit).Scan(result) + return +} + +func (o organizationsPrivateInfoDo) Scan(result interface{}) (err error) { + return o.DO.Scan(result) +} + +func (o organizationsPrivateInfoDo) Delete(models ...*model.OrganizationsPrivateInfo) (result gen.ResultInfo, err error) { + return o.DO.Delete(models) +} + +func (o *organizationsPrivateInfoDo) withDO(do gen.Dao) *organizationsPrivateInfoDo { + o.DO = *do.(*gen.DO) + return o +} diff --git a/next/models_generated/prices.gen.go b/next/models_generated/prices.gen.go new file mode 100644 index 000000000..3adb1b16d --- /dev/null +++ b/next/models_generated/prices.gen.go @@ -0,0 +1,420 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newPrice(db *gorm.DB, opts ...gen.DOOption) price { + _price := price{} + + _price.priceDo.UseDB(db, opts...) + _price.priceDo.UseModel(&model.Price{}) + + tableName := _price.priceDo.TableName() + _price.ALL = field.NewAsterisk(tableName) + _price.ID = field.NewString(tableName, "id") + _price.ProductID = field.NewString(tableName, "product_id") + _price.Active = field.NewBool(tableName, "active") + _price.Description = field.NewString(tableName, "description") + _price.UnitAmount = field.NewInt64(tableName, "unit_amount") + _price.Currency = field.NewString(tableName, "currency") + _price.Type = field.NewString(tableName, "type") + _price.Interval = field.NewString(tableName, "interval") + _price.IntervalCount = field.NewInt64(tableName, "interval_count") + _price.TrialPeriodDays = field.NewInt64(tableName, "trial_period_days") + _price.Metadata = field.NewString(tableName, "metadata") + + _price.fillFieldMap() + + return _price +} + +type price struct { + priceDo + + ALL field.Asterisk + ID field.String + ProductID field.String + Active field.Bool + Description field.String + UnitAmount field.Int64 + Currency field.String + Type field.String + Interval field.String + IntervalCount field.Int64 + TrialPeriodDays field.Int64 + Metadata field.String + + fieldMap map[string]field.Expr +} + +func (p price) Table(newTableName string) *price { + p.priceDo.UseTable(newTableName) + return p.updateTableName(newTableName) +} + +func (p price) As(alias string) *price { + p.priceDo.DO = *(p.priceDo.As(alias).(*gen.DO)) + return p.updateTableName(alias) +} + +func (p *price) updateTableName(table string) *price { + p.ALL = field.NewAsterisk(table) + p.ID = field.NewString(table, "id") + p.ProductID = field.NewString(table, "product_id") + p.Active = field.NewBool(table, "active") + p.Description = field.NewString(table, "description") + p.UnitAmount = field.NewInt64(table, "unit_amount") + p.Currency = field.NewString(table, "currency") + p.Type = field.NewString(table, "type") + p.Interval = field.NewString(table, "interval") + p.IntervalCount = field.NewInt64(table, "interval_count") + p.TrialPeriodDays = field.NewInt64(table, "trial_period_days") + p.Metadata = field.NewString(table, "metadata") + + p.fillFieldMap() + + return p +} + +func (p *price) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := p.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (p *price) fillFieldMap() { + p.fieldMap = make(map[string]field.Expr, 11) + p.fieldMap["id"] = p.ID + p.fieldMap["product_id"] = p.ProductID + p.fieldMap["active"] = p.Active + p.fieldMap["description"] = p.Description + p.fieldMap["unit_amount"] = p.UnitAmount + p.fieldMap["currency"] = p.Currency + p.fieldMap["type"] = p.Type + p.fieldMap["interval"] = p.Interval + p.fieldMap["interval_count"] = p.IntervalCount + p.fieldMap["trial_period_days"] = p.TrialPeriodDays + p.fieldMap["metadata"] = p.Metadata +} + +func (p price) clone(db *gorm.DB) price { + p.priceDo.ReplaceConnPool(db.Statement.ConnPool) + return p +} + +func (p price) replaceDB(db *gorm.DB) price { + p.priceDo.ReplaceDB(db) + return p +} + +type priceDo struct{ gen.DO } + +type IPriceDo interface { + gen.SubQuery + Debug() IPriceDo + WithContext(ctx context.Context) IPriceDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IPriceDo + WriteDB() IPriceDo + As(alias string) gen.Dao + Session(config *gorm.Session) IPriceDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IPriceDo + Not(conds ...gen.Condition) IPriceDo + Or(conds ...gen.Condition) IPriceDo + Select(conds ...field.Expr) IPriceDo + Where(conds ...gen.Condition) IPriceDo + Order(conds ...field.Expr) IPriceDo + Distinct(cols ...field.Expr) IPriceDo + Omit(cols ...field.Expr) IPriceDo + Join(table schema.Tabler, on ...field.Expr) IPriceDo + LeftJoin(table schema.Tabler, on ...field.Expr) IPriceDo + RightJoin(table schema.Tabler, on ...field.Expr) IPriceDo + Group(cols ...field.Expr) IPriceDo + Having(conds ...gen.Condition) IPriceDo + Limit(limit int) IPriceDo + Offset(offset int) IPriceDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IPriceDo + Unscoped() IPriceDo + Create(values ...*model.Price) error + CreateInBatches(values []*model.Price, batchSize int) error + Save(values ...*model.Price) error + First() (*model.Price, error) + Take() (*model.Price, error) + Last() (*model.Price, error) + Find() ([]*model.Price, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.Price, err error) + FindInBatches(result *[]*model.Price, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.Price) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IPriceDo + Assign(attrs ...field.AssignExpr) IPriceDo + Joins(fields ...field.RelationField) IPriceDo + Preload(fields ...field.RelationField) IPriceDo + FirstOrInit() (*model.Price, error) + FirstOrCreate() (*model.Price, error) + FindByPage(offset int, limit int) (result []*model.Price, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IPriceDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (p priceDo) Debug() IPriceDo { + return p.withDO(p.DO.Debug()) +} + +func (p priceDo) WithContext(ctx context.Context) IPriceDo { + return p.withDO(p.DO.WithContext(ctx)) +} + +func (p priceDo) ReadDB() IPriceDo { + return p.Clauses(dbresolver.Read) +} + +func (p priceDo) WriteDB() IPriceDo { + return p.Clauses(dbresolver.Write) +} + +func (p priceDo) Session(config *gorm.Session) IPriceDo { + return p.withDO(p.DO.Session(config)) +} + +func (p priceDo) Clauses(conds ...clause.Expression) IPriceDo { + return p.withDO(p.DO.Clauses(conds...)) +} + +func (p priceDo) Returning(value interface{}, columns ...string) IPriceDo { + return p.withDO(p.DO.Returning(value, columns...)) +} + +func (p priceDo) Not(conds ...gen.Condition) IPriceDo { + return p.withDO(p.DO.Not(conds...)) +} + +func (p priceDo) Or(conds ...gen.Condition) IPriceDo { + return p.withDO(p.DO.Or(conds...)) +} + +func (p priceDo) Select(conds ...field.Expr) IPriceDo { + return p.withDO(p.DO.Select(conds...)) +} + +func (p priceDo) Where(conds ...gen.Condition) IPriceDo { + return p.withDO(p.DO.Where(conds...)) +} + +func (p priceDo) Order(conds ...field.Expr) IPriceDo { + return p.withDO(p.DO.Order(conds...)) +} + +func (p priceDo) Distinct(cols ...field.Expr) IPriceDo { + return p.withDO(p.DO.Distinct(cols...)) +} + +func (p priceDo) Omit(cols ...field.Expr) IPriceDo { + return p.withDO(p.DO.Omit(cols...)) +} + +func (p priceDo) Join(table schema.Tabler, on ...field.Expr) IPriceDo { + return p.withDO(p.DO.Join(table, on...)) +} + +func (p priceDo) LeftJoin(table schema.Tabler, on ...field.Expr) IPriceDo { + return p.withDO(p.DO.LeftJoin(table, on...)) +} + +func (p priceDo) RightJoin(table schema.Tabler, on ...field.Expr) IPriceDo { + return p.withDO(p.DO.RightJoin(table, on...)) +} + +func (p priceDo) Group(cols ...field.Expr) IPriceDo { + return p.withDO(p.DO.Group(cols...)) +} + +func (p priceDo) Having(conds ...gen.Condition) IPriceDo { + return p.withDO(p.DO.Having(conds...)) +} + +func (p priceDo) Limit(limit int) IPriceDo { + return p.withDO(p.DO.Limit(limit)) +} + +func (p priceDo) Offset(offset int) IPriceDo { + return p.withDO(p.DO.Offset(offset)) +} + +func (p priceDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IPriceDo { + return p.withDO(p.DO.Scopes(funcs...)) +} + +func (p priceDo) Unscoped() IPriceDo { + return p.withDO(p.DO.Unscoped()) +} + +func (p priceDo) Create(values ...*model.Price) error { + if len(values) == 0 { + return nil + } + return p.DO.Create(values) +} + +func (p priceDo) CreateInBatches(values []*model.Price, batchSize int) error { + return p.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (p priceDo) Save(values ...*model.Price) error { + if len(values) == 0 { + return nil + } + return p.DO.Save(values) +} + +func (p priceDo) First() (*model.Price, error) { + if result, err := p.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.Price), nil + } +} + +func (p priceDo) Take() (*model.Price, error) { + if result, err := p.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.Price), nil + } +} + +func (p priceDo) Last() (*model.Price, error) { + if result, err := p.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.Price), nil + } +} + +func (p priceDo) Find() ([]*model.Price, error) { + result, err := p.DO.Find() + return result.([]*model.Price), err +} + +func (p priceDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.Price, err error) { + buf := make([]*model.Price, 0, batchSize) + err = p.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (p priceDo) FindInBatches(result *[]*model.Price, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return p.DO.FindInBatches(result, batchSize, fc) +} + +func (p priceDo) Attrs(attrs ...field.AssignExpr) IPriceDo { + return p.withDO(p.DO.Attrs(attrs...)) +} + +func (p priceDo) Assign(attrs ...field.AssignExpr) IPriceDo { + return p.withDO(p.DO.Assign(attrs...)) +} + +func (p priceDo) Joins(fields ...field.RelationField) IPriceDo { + for _, _f := range fields { + p = *p.withDO(p.DO.Joins(_f)) + } + return &p +} + +func (p priceDo) Preload(fields ...field.RelationField) IPriceDo { + for _, _f := range fields { + p = *p.withDO(p.DO.Preload(_f)) + } + return &p +} + +func (p priceDo) FirstOrInit() (*model.Price, error) { + if result, err := p.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.Price), nil + } +} + +func (p priceDo) FirstOrCreate() (*model.Price, error) { + if result, err := p.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.Price), nil + } +} + +func (p priceDo) FindByPage(offset int, limit int) (result []*model.Price, count int64, err error) { + result, err = p.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = p.Offset(-1).Limit(-1).Count() + return +} + +func (p priceDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = p.Count() + if err != nil { + return + } + + err = p.Offset(offset).Limit(limit).Scan(result) + return +} + +func (p priceDo) Scan(result interface{}) (err error) { + return p.DO.Scan(result) +} + +func (p priceDo) Delete(models ...*model.Price) (result gen.ResultInfo, err error) { + return p.DO.Delete(models) +} + +func (p *priceDo) withDO(do gen.Dao) *priceDo { + p.DO = *do.(*gen.DO) + return p +} diff --git a/next/models_generated/products.gen.go b/next/models_generated/products.gen.go new file mode 100644 index 000000000..b92716cf3 --- /dev/null +++ b/next/models_generated/products.gen.go @@ -0,0 +1,400 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newProduct(db *gorm.DB, opts ...gen.DOOption) product { + _product := product{} + + _product.productDo.UseDB(db, opts...) + _product.productDo.UseModel(&model.Product{}) + + tableName := _product.productDo.TableName() + _product.ALL = field.NewAsterisk(tableName) + _product.ID = field.NewString(tableName, "id") + _product.Active = field.NewBool(tableName, "active") + _product.Name = field.NewString(tableName, "name") + _product.Description = field.NewString(tableName, "description") + _product.Image = field.NewString(tableName, "image") + _product.Metadata = field.NewString(tableName, "metadata") + + _product.fillFieldMap() + + return _product +} + +type product struct { + productDo + + ALL field.Asterisk + ID field.String + Active field.Bool + Name field.String + Description field.String + Image field.String + Metadata field.String + + fieldMap map[string]field.Expr +} + +func (p product) Table(newTableName string) *product { + p.productDo.UseTable(newTableName) + return p.updateTableName(newTableName) +} + +func (p product) As(alias string) *product { + p.productDo.DO = *(p.productDo.As(alias).(*gen.DO)) + return p.updateTableName(alias) +} + +func (p *product) updateTableName(table string) *product { + p.ALL = field.NewAsterisk(table) + p.ID = field.NewString(table, "id") + p.Active = field.NewBool(table, "active") + p.Name = field.NewString(table, "name") + p.Description = field.NewString(table, "description") + p.Image = field.NewString(table, "image") + p.Metadata = field.NewString(table, "metadata") + + p.fillFieldMap() + + return p +} + +func (p *product) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := p.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (p *product) fillFieldMap() { + p.fieldMap = make(map[string]field.Expr, 6) + p.fieldMap["id"] = p.ID + p.fieldMap["active"] = p.Active + p.fieldMap["name"] = p.Name + p.fieldMap["description"] = p.Description + p.fieldMap["image"] = p.Image + p.fieldMap["metadata"] = p.Metadata +} + +func (p product) clone(db *gorm.DB) product { + p.productDo.ReplaceConnPool(db.Statement.ConnPool) + return p +} + +func (p product) replaceDB(db *gorm.DB) product { + p.productDo.ReplaceDB(db) + return p +} + +type productDo struct{ gen.DO } + +type IProductDo interface { + gen.SubQuery + Debug() IProductDo + WithContext(ctx context.Context) IProductDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IProductDo + WriteDB() IProductDo + As(alias string) gen.Dao + Session(config *gorm.Session) IProductDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IProductDo + Not(conds ...gen.Condition) IProductDo + Or(conds ...gen.Condition) IProductDo + Select(conds ...field.Expr) IProductDo + Where(conds ...gen.Condition) IProductDo + Order(conds ...field.Expr) IProductDo + Distinct(cols ...field.Expr) IProductDo + Omit(cols ...field.Expr) IProductDo + Join(table schema.Tabler, on ...field.Expr) IProductDo + LeftJoin(table schema.Tabler, on ...field.Expr) IProductDo + RightJoin(table schema.Tabler, on ...field.Expr) IProductDo + Group(cols ...field.Expr) IProductDo + Having(conds ...gen.Condition) IProductDo + Limit(limit int) IProductDo + Offset(offset int) IProductDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IProductDo + Unscoped() IProductDo + Create(values ...*model.Product) error + CreateInBatches(values []*model.Product, batchSize int) error + Save(values ...*model.Product) error + First() (*model.Product, error) + Take() (*model.Product, error) + Last() (*model.Product, error) + Find() ([]*model.Product, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.Product, err error) + FindInBatches(result *[]*model.Product, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.Product) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IProductDo + Assign(attrs ...field.AssignExpr) IProductDo + Joins(fields ...field.RelationField) IProductDo + Preload(fields ...field.RelationField) IProductDo + FirstOrInit() (*model.Product, error) + FirstOrCreate() (*model.Product, error) + FindByPage(offset int, limit int) (result []*model.Product, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IProductDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (p productDo) Debug() IProductDo { + return p.withDO(p.DO.Debug()) +} + +func (p productDo) WithContext(ctx context.Context) IProductDo { + return p.withDO(p.DO.WithContext(ctx)) +} + +func (p productDo) ReadDB() IProductDo { + return p.Clauses(dbresolver.Read) +} + +func (p productDo) WriteDB() IProductDo { + return p.Clauses(dbresolver.Write) +} + +func (p productDo) Session(config *gorm.Session) IProductDo { + return p.withDO(p.DO.Session(config)) +} + +func (p productDo) Clauses(conds ...clause.Expression) IProductDo { + return p.withDO(p.DO.Clauses(conds...)) +} + +func (p productDo) Returning(value interface{}, columns ...string) IProductDo { + return p.withDO(p.DO.Returning(value, columns...)) +} + +func (p productDo) Not(conds ...gen.Condition) IProductDo { + return p.withDO(p.DO.Not(conds...)) +} + +func (p productDo) Or(conds ...gen.Condition) IProductDo { + return p.withDO(p.DO.Or(conds...)) +} + +func (p productDo) Select(conds ...field.Expr) IProductDo { + return p.withDO(p.DO.Select(conds...)) +} + +func (p productDo) Where(conds ...gen.Condition) IProductDo { + return p.withDO(p.DO.Where(conds...)) +} + +func (p productDo) Order(conds ...field.Expr) IProductDo { + return p.withDO(p.DO.Order(conds...)) +} + +func (p productDo) Distinct(cols ...field.Expr) IProductDo { + return p.withDO(p.DO.Distinct(cols...)) +} + +func (p productDo) Omit(cols ...field.Expr) IProductDo { + return p.withDO(p.DO.Omit(cols...)) +} + +func (p productDo) Join(table schema.Tabler, on ...field.Expr) IProductDo { + return p.withDO(p.DO.Join(table, on...)) +} + +func (p productDo) LeftJoin(table schema.Tabler, on ...field.Expr) IProductDo { + return p.withDO(p.DO.LeftJoin(table, on...)) +} + +func (p productDo) RightJoin(table schema.Tabler, on ...field.Expr) IProductDo { + return p.withDO(p.DO.RightJoin(table, on...)) +} + +func (p productDo) Group(cols ...field.Expr) IProductDo { + return p.withDO(p.DO.Group(cols...)) +} + +func (p productDo) Having(conds ...gen.Condition) IProductDo { + return p.withDO(p.DO.Having(conds...)) +} + +func (p productDo) Limit(limit int) IProductDo { + return p.withDO(p.DO.Limit(limit)) +} + +func (p productDo) Offset(offset int) IProductDo { + return p.withDO(p.DO.Offset(offset)) +} + +func (p productDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IProductDo { + return p.withDO(p.DO.Scopes(funcs...)) +} + +func (p productDo) Unscoped() IProductDo { + return p.withDO(p.DO.Unscoped()) +} + +func (p productDo) Create(values ...*model.Product) error { + if len(values) == 0 { + return nil + } + return p.DO.Create(values) +} + +func (p productDo) CreateInBatches(values []*model.Product, batchSize int) error { + return p.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (p productDo) Save(values ...*model.Product) error { + if len(values) == 0 { + return nil + } + return p.DO.Save(values) +} + +func (p productDo) First() (*model.Product, error) { + if result, err := p.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.Product), nil + } +} + +func (p productDo) Take() (*model.Product, error) { + if result, err := p.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.Product), nil + } +} + +func (p productDo) Last() (*model.Product, error) { + if result, err := p.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.Product), nil + } +} + +func (p productDo) Find() ([]*model.Product, error) { + result, err := p.DO.Find() + return result.([]*model.Product), err +} + +func (p productDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.Product, err error) { + buf := make([]*model.Product, 0, batchSize) + err = p.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (p productDo) FindInBatches(result *[]*model.Product, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return p.DO.FindInBatches(result, batchSize, fc) +} + +func (p productDo) Attrs(attrs ...field.AssignExpr) IProductDo { + return p.withDO(p.DO.Attrs(attrs...)) +} + +func (p productDo) Assign(attrs ...field.AssignExpr) IProductDo { + return p.withDO(p.DO.Assign(attrs...)) +} + +func (p productDo) Joins(fields ...field.RelationField) IProductDo { + for _, _f := range fields { + p = *p.withDO(p.DO.Joins(_f)) + } + return &p +} + +func (p productDo) Preload(fields ...field.RelationField) IProductDo { + for _, _f := range fields { + p = *p.withDO(p.DO.Preload(_f)) + } + return &p +} + +func (p productDo) FirstOrInit() (*model.Product, error) { + if result, err := p.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.Product), nil + } +} + +func (p productDo) FirstOrCreate() (*model.Product, error) { + if result, err := p.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.Product), nil + } +} + +func (p productDo) FindByPage(offset int, limit int) (result []*model.Product, count int64, err error) { + result, err = p.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = p.Offset(-1).Limit(-1).Count() + return +} + +func (p productDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = p.Count() + if err != nil { + return + } + + err = p.Offset(offset).Limit(limit).Scan(result) + return +} + +func (p productDo) Scan(result interface{}) (err error) { + return p.DO.Scan(result) +} + +func (p productDo) Delete(models ...*model.Product) (result gen.ResultInfo, err error) { + return p.DO.Delete(models) +} + +func (p *productDo) withDO(do gen.Dao) *productDo { + p.DO = *do.(*gen.DO) + return p +} diff --git a/next/models_generated/project_comments.gen.go b/next/models_generated/project_comments.gen.go new file mode 100644 index 000000000..7e2b169b9 --- /dev/null +++ b/next/models_generated/project_comments.gen.go @@ -0,0 +1,400 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newProjectComment(db *gorm.DB, opts ...gen.DOOption) projectComment { + _projectComment := projectComment{} + + _projectComment.projectCommentDo.UseDB(db, opts...) + _projectComment.projectCommentDo.UseModel(&model.ProjectComment{}) + + tableName := _projectComment.projectCommentDo.TableName() + _projectComment.ALL = field.NewAsterisk(tableName) + _projectComment.ID = field.NewInt64(tableName, "id") + _projectComment.CreatedAt = field.NewTime(tableName, "created_at") + _projectComment.Text = field.NewString(tableName, "text") + _projectComment.UserID = field.NewString(tableName, "user_id") + _projectComment.InReplyTo = field.NewInt64(tableName, "in_reply_to") + _projectComment.ProjectID = field.NewString(tableName, "project_id") + + _projectComment.fillFieldMap() + + return _projectComment +} + +type projectComment struct { + projectCommentDo + + ALL field.Asterisk + ID field.Int64 + CreatedAt field.Time + Text field.String + UserID field.String + InReplyTo field.Int64 + ProjectID field.String + + fieldMap map[string]field.Expr +} + +func (p projectComment) Table(newTableName string) *projectComment { + p.projectCommentDo.UseTable(newTableName) + return p.updateTableName(newTableName) +} + +func (p projectComment) As(alias string) *projectComment { + p.projectCommentDo.DO = *(p.projectCommentDo.As(alias).(*gen.DO)) + return p.updateTableName(alias) +} + +func (p *projectComment) updateTableName(table string) *projectComment { + p.ALL = field.NewAsterisk(table) + p.ID = field.NewInt64(table, "id") + p.CreatedAt = field.NewTime(table, "created_at") + p.Text = field.NewString(table, "text") + p.UserID = field.NewString(table, "user_id") + p.InReplyTo = field.NewInt64(table, "in_reply_to") + p.ProjectID = field.NewString(table, "project_id") + + p.fillFieldMap() + + return p +} + +func (p *projectComment) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := p.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (p *projectComment) fillFieldMap() { + p.fieldMap = make(map[string]field.Expr, 6) + p.fieldMap["id"] = p.ID + p.fieldMap["created_at"] = p.CreatedAt + p.fieldMap["text"] = p.Text + p.fieldMap["user_id"] = p.UserID + p.fieldMap["in_reply_to"] = p.InReplyTo + p.fieldMap["project_id"] = p.ProjectID +} + +func (p projectComment) clone(db *gorm.DB) projectComment { + p.projectCommentDo.ReplaceConnPool(db.Statement.ConnPool) + return p +} + +func (p projectComment) replaceDB(db *gorm.DB) projectComment { + p.projectCommentDo.ReplaceDB(db) + return p +} + +type projectCommentDo struct{ gen.DO } + +type IProjectCommentDo interface { + gen.SubQuery + Debug() IProjectCommentDo + WithContext(ctx context.Context) IProjectCommentDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IProjectCommentDo + WriteDB() IProjectCommentDo + As(alias string) gen.Dao + Session(config *gorm.Session) IProjectCommentDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IProjectCommentDo + Not(conds ...gen.Condition) IProjectCommentDo + Or(conds ...gen.Condition) IProjectCommentDo + Select(conds ...field.Expr) IProjectCommentDo + Where(conds ...gen.Condition) IProjectCommentDo + Order(conds ...field.Expr) IProjectCommentDo + Distinct(cols ...field.Expr) IProjectCommentDo + Omit(cols ...field.Expr) IProjectCommentDo + Join(table schema.Tabler, on ...field.Expr) IProjectCommentDo + LeftJoin(table schema.Tabler, on ...field.Expr) IProjectCommentDo + RightJoin(table schema.Tabler, on ...field.Expr) IProjectCommentDo + Group(cols ...field.Expr) IProjectCommentDo + Having(conds ...gen.Condition) IProjectCommentDo + Limit(limit int) IProjectCommentDo + Offset(offset int) IProjectCommentDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IProjectCommentDo + Unscoped() IProjectCommentDo + Create(values ...*model.ProjectComment) error + CreateInBatches(values []*model.ProjectComment, batchSize int) error + Save(values ...*model.ProjectComment) error + First() (*model.ProjectComment, error) + Take() (*model.ProjectComment, error) + Last() (*model.ProjectComment, error) + Find() ([]*model.ProjectComment, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.ProjectComment, err error) + FindInBatches(result *[]*model.ProjectComment, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.ProjectComment) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IProjectCommentDo + Assign(attrs ...field.AssignExpr) IProjectCommentDo + Joins(fields ...field.RelationField) IProjectCommentDo + Preload(fields ...field.RelationField) IProjectCommentDo + FirstOrInit() (*model.ProjectComment, error) + FirstOrCreate() (*model.ProjectComment, error) + FindByPage(offset int, limit int) (result []*model.ProjectComment, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IProjectCommentDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (p projectCommentDo) Debug() IProjectCommentDo { + return p.withDO(p.DO.Debug()) +} + +func (p projectCommentDo) WithContext(ctx context.Context) IProjectCommentDo { + return p.withDO(p.DO.WithContext(ctx)) +} + +func (p projectCommentDo) ReadDB() IProjectCommentDo { + return p.Clauses(dbresolver.Read) +} + +func (p projectCommentDo) WriteDB() IProjectCommentDo { + return p.Clauses(dbresolver.Write) +} + +func (p projectCommentDo) Session(config *gorm.Session) IProjectCommentDo { + return p.withDO(p.DO.Session(config)) +} + +func (p projectCommentDo) Clauses(conds ...clause.Expression) IProjectCommentDo { + return p.withDO(p.DO.Clauses(conds...)) +} + +func (p projectCommentDo) Returning(value interface{}, columns ...string) IProjectCommentDo { + return p.withDO(p.DO.Returning(value, columns...)) +} + +func (p projectCommentDo) Not(conds ...gen.Condition) IProjectCommentDo { + return p.withDO(p.DO.Not(conds...)) +} + +func (p projectCommentDo) Or(conds ...gen.Condition) IProjectCommentDo { + return p.withDO(p.DO.Or(conds...)) +} + +func (p projectCommentDo) Select(conds ...field.Expr) IProjectCommentDo { + return p.withDO(p.DO.Select(conds...)) +} + +func (p projectCommentDo) Where(conds ...gen.Condition) IProjectCommentDo { + return p.withDO(p.DO.Where(conds...)) +} + +func (p projectCommentDo) Order(conds ...field.Expr) IProjectCommentDo { + return p.withDO(p.DO.Order(conds...)) +} + +func (p projectCommentDo) Distinct(cols ...field.Expr) IProjectCommentDo { + return p.withDO(p.DO.Distinct(cols...)) +} + +func (p projectCommentDo) Omit(cols ...field.Expr) IProjectCommentDo { + return p.withDO(p.DO.Omit(cols...)) +} + +func (p projectCommentDo) Join(table schema.Tabler, on ...field.Expr) IProjectCommentDo { + return p.withDO(p.DO.Join(table, on...)) +} + +func (p projectCommentDo) LeftJoin(table schema.Tabler, on ...field.Expr) IProjectCommentDo { + return p.withDO(p.DO.LeftJoin(table, on...)) +} + +func (p projectCommentDo) RightJoin(table schema.Tabler, on ...field.Expr) IProjectCommentDo { + return p.withDO(p.DO.RightJoin(table, on...)) +} + +func (p projectCommentDo) Group(cols ...field.Expr) IProjectCommentDo { + return p.withDO(p.DO.Group(cols...)) +} + +func (p projectCommentDo) Having(conds ...gen.Condition) IProjectCommentDo { + return p.withDO(p.DO.Having(conds...)) +} + +func (p projectCommentDo) Limit(limit int) IProjectCommentDo { + return p.withDO(p.DO.Limit(limit)) +} + +func (p projectCommentDo) Offset(offset int) IProjectCommentDo { + return p.withDO(p.DO.Offset(offset)) +} + +func (p projectCommentDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IProjectCommentDo { + return p.withDO(p.DO.Scopes(funcs...)) +} + +func (p projectCommentDo) Unscoped() IProjectCommentDo { + return p.withDO(p.DO.Unscoped()) +} + +func (p projectCommentDo) Create(values ...*model.ProjectComment) error { + if len(values) == 0 { + return nil + } + return p.DO.Create(values) +} + +func (p projectCommentDo) CreateInBatches(values []*model.ProjectComment, batchSize int) error { + return p.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (p projectCommentDo) Save(values ...*model.ProjectComment) error { + if len(values) == 0 { + return nil + } + return p.DO.Save(values) +} + +func (p projectCommentDo) First() (*model.ProjectComment, error) { + if result, err := p.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.ProjectComment), nil + } +} + +func (p projectCommentDo) Take() (*model.ProjectComment, error) { + if result, err := p.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.ProjectComment), nil + } +} + +func (p projectCommentDo) Last() (*model.ProjectComment, error) { + if result, err := p.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.ProjectComment), nil + } +} + +func (p projectCommentDo) Find() ([]*model.ProjectComment, error) { + result, err := p.DO.Find() + return result.([]*model.ProjectComment), err +} + +func (p projectCommentDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.ProjectComment, err error) { + buf := make([]*model.ProjectComment, 0, batchSize) + err = p.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (p projectCommentDo) FindInBatches(result *[]*model.ProjectComment, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return p.DO.FindInBatches(result, batchSize, fc) +} + +func (p projectCommentDo) Attrs(attrs ...field.AssignExpr) IProjectCommentDo { + return p.withDO(p.DO.Attrs(attrs...)) +} + +func (p projectCommentDo) Assign(attrs ...field.AssignExpr) IProjectCommentDo { + return p.withDO(p.DO.Assign(attrs...)) +} + +func (p projectCommentDo) Joins(fields ...field.RelationField) IProjectCommentDo { + for _, _f := range fields { + p = *p.withDO(p.DO.Joins(_f)) + } + return &p +} + +func (p projectCommentDo) Preload(fields ...field.RelationField) IProjectCommentDo { + for _, _f := range fields { + p = *p.withDO(p.DO.Preload(_f)) + } + return &p +} + +func (p projectCommentDo) FirstOrInit() (*model.ProjectComment, error) { + if result, err := p.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.ProjectComment), nil + } +} + +func (p projectCommentDo) FirstOrCreate() (*model.ProjectComment, error) { + if result, err := p.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.ProjectComment), nil + } +} + +func (p projectCommentDo) FindByPage(offset int, limit int) (result []*model.ProjectComment, count int64, err error) { + result, err = p.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = p.Offset(-1).Limit(-1).Count() + return +} + +func (p projectCommentDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = p.Count() + if err != nil { + return + } + + err = p.Offset(offset).Limit(limit).Scan(result) + return +} + +func (p projectCommentDo) Scan(result interface{}) (err error) { + return p.DO.Scan(result) +} + +func (p projectCommentDo) Delete(models ...*model.ProjectComment) (result gen.ResultInfo, err error) { + return p.DO.Delete(models) +} + +func (p *projectCommentDo) withDO(do gen.Dao) *projectCommentDo { + p.DO = *do.(*gen.DO) + return p +} diff --git a/next/models_generated/projects.gen.go b/next/models_generated/projects.gen.go new file mode 100644 index 000000000..c2f098195 --- /dev/null +++ b/next/models_generated/projects.gen.go @@ -0,0 +1,448 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newProject(db *gorm.DB, opts ...gen.DOOption) project { + _project := project{} + + _project.projectDo.UseDB(db, opts...) + _project.projectDo.UseModel(&model.Project{}) + + tableName := _project.projectDo.TableName() + _project.ALL = field.NewAsterisk(tableName) + _project.ID = field.NewString(tableName, "id") + _project.Name = field.NewString(tableName, "name") + _project.CreatedAt = field.NewTime(tableName, "created_at") + _project.UpdatedAt = field.NewTime(tableName, "updated_at") + _project.OrganizationID = field.NewString(tableName, "organization_id") + _project.TeamID = field.NewInt64(tableName, "team_id") + _project.ProjectStatus = field.NewString(tableName, "project_status") + _project.Slug = field.NewString(tableName, "slug") + _project.LatestActionOn = field.NewString(tableName, "latest_action_on") + _project.RepoID = field.NewInt64(tableName, "repo_id") + _project.ConfigurationYaml = field.NewString(tableName, "configuration_yaml") + _project.Status = field.NewString(tableName, "status") + _project.IsGenerated = field.NewBool(tableName, "is_generated") + _project.IsInMainBranch = field.NewBool(tableName, "is_in_main_branch") + _project.DeletedAt = field.NewField(tableName, "deleted_at") + _project.TerraformWorkingDir = field.NewString(tableName, "terraform_working_dir") + _project.IsManagingState = field.NewBool(tableName, "is_managing_state") + _project.Labels = field.NewString(tableName, "labels") + + _project.fillFieldMap() + + return _project +} + +type project struct { + projectDo + + ALL field.Asterisk + ID field.String + Name field.String + CreatedAt field.Time + UpdatedAt field.Time + OrganizationID field.String + TeamID field.Int64 + ProjectStatus field.String + Slug field.String + LatestActionOn field.String + RepoID field.Int64 + ConfigurationYaml field.String + Status field.String + IsGenerated field.Bool + IsInMainBranch field.Bool + DeletedAt field.Field + TerraformWorkingDir field.String + IsManagingState field.Bool + Labels field.String + + fieldMap map[string]field.Expr +} + +func (p project) Table(newTableName string) *project { + p.projectDo.UseTable(newTableName) + return p.updateTableName(newTableName) +} + +func (p project) As(alias string) *project { + p.projectDo.DO = *(p.projectDo.As(alias).(*gen.DO)) + return p.updateTableName(alias) +} + +func (p *project) updateTableName(table string) *project { + p.ALL = field.NewAsterisk(table) + p.ID = field.NewString(table, "id") + p.Name = field.NewString(table, "name") + p.CreatedAt = field.NewTime(table, "created_at") + p.UpdatedAt = field.NewTime(table, "updated_at") + p.OrganizationID = field.NewString(table, "organization_id") + p.TeamID = field.NewInt64(table, "team_id") + p.ProjectStatus = field.NewString(table, "project_status") + p.Slug = field.NewString(table, "slug") + p.LatestActionOn = field.NewString(table, "latest_action_on") + p.RepoID = field.NewInt64(table, "repo_id") + p.ConfigurationYaml = field.NewString(table, "configuration_yaml") + p.Status = field.NewString(table, "status") + p.IsGenerated = field.NewBool(table, "is_generated") + p.IsInMainBranch = field.NewBool(table, "is_in_main_branch") + p.DeletedAt = field.NewField(table, "deleted_at") + p.TerraformWorkingDir = field.NewString(table, "terraform_working_dir") + p.IsManagingState = field.NewBool(table, "is_managing_state") + p.Labels = field.NewString(table, "labels") + + p.fillFieldMap() + + return p +} + +func (p *project) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := p.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (p *project) fillFieldMap() { + p.fieldMap = make(map[string]field.Expr, 18) + p.fieldMap["id"] = p.ID + p.fieldMap["name"] = p.Name + p.fieldMap["created_at"] = p.CreatedAt + p.fieldMap["updated_at"] = p.UpdatedAt + p.fieldMap["organization_id"] = p.OrganizationID + p.fieldMap["team_id"] = p.TeamID + p.fieldMap["project_status"] = p.ProjectStatus + p.fieldMap["slug"] = p.Slug + p.fieldMap["latest_action_on"] = p.LatestActionOn + p.fieldMap["repo_id"] = p.RepoID + p.fieldMap["configuration_yaml"] = p.ConfigurationYaml + p.fieldMap["status"] = p.Status + p.fieldMap["is_generated"] = p.IsGenerated + p.fieldMap["is_in_main_branch"] = p.IsInMainBranch + p.fieldMap["deleted_at"] = p.DeletedAt + p.fieldMap["terraform_working_dir"] = p.TerraformWorkingDir + p.fieldMap["is_managing_state"] = p.IsManagingState + p.fieldMap["labels"] = p.Labels +} + +func (p project) clone(db *gorm.DB) project { + p.projectDo.ReplaceConnPool(db.Statement.ConnPool) + return p +} + +func (p project) replaceDB(db *gorm.DB) project { + p.projectDo.ReplaceDB(db) + return p +} + +type projectDo struct{ gen.DO } + +type IProjectDo interface { + gen.SubQuery + Debug() IProjectDo + WithContext(ctx context.Context) IProjectDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IProjectDo + WriteDB() IProjectDo + As(alias string) gen.Dao + Session(config *gorm.Session) IProjectDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IProjectDo + Not(conds ...gen.Condition) IProjectDo + Or(conds ...gen.Condition) IProjectDo + Select(conds ...field.Expr) IProjectDo + Where(conds ...gen.Condition) IProjectDo + Order(conds ...field.Expr) IProjectDo + Distinct(cols ...field.Expr) IProjectDo + Omit(cols ...field.Expr) IProjectDo + Join(table schema.Tabler, on ...field.Expr) IProjectDo + LeftJoin(table schema.Tabler, on ...field.Expr) IProjectDo + RightJoin(table schema.Tabler, on ...field.Expr) IProjectDo + Group(cols ...field.Expr) IProjectDo + Having(conds ...gen.Condition) IProjectDo + Limit(limit int) IProjectDo + Offset(offset int) IProjectDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IProjectDo + Unscoped() IProjectDo + Create(values ...*model.Project) error + CreateInBatches(values []*model.Project, batchSize int) error + Save(values ...*model.Project) error + First() (*model.Project, error) + Take() (*model.Project, error) + Last() (*model.Project, error) + Find() ([]*model.Project, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.Project, err error) + FindInBatches(result *[]*model.Project, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.Project) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IProjectDo + Assign(attrs ...field.AssignExpr) IProjectDo + Joins(fields ...field.RelationField) IProjectDo + Preload(fields ...field.RelationField) IProjectDo + FirstOrInit() (*model.Project, error) + FirstOrCreate() (*model.Project, error) + FindByPage(offset int, limit int) (result []*model.Project, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IProjectDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (p projectDo) Debug() IProjectDo { + return p.withDO(p.DO.Debug()) +} + +func (p projectDo) WithContext(ctx context.Context) IProjectDo { + return p.withDO(p.DO.WithContext(ctx)) +} + +func (p projectDo) ReadDB() IProjectDo { + return p.Clauses(dbresolver.Read) +} + +func (p projectDo) WriteDB() IProjectDo { + return p.Clauses(dbresolver.Write) +} + +func (p projectDo) Session(config *gorm.Session) IProjectDo { + return p.withDO(p.DO.Session(config)) +} + +func (p projectDo) Clauses(conds ...clause.Expression) IProjectDo { + return p.withDO(p.DO.Clauses(conds...)) +} + +func (p projectDo) Returning(value interface{}, columns ...string) IProjectDo { + return p.withDO(p.DO.Returning(value, columns...)) +} + +func (p projectDo) Not(conds ...gen.Condition) IProjectDo { + return p.withDO(p.DO.Not(conds...)) +} + +func (p projectDo) Or(conds ...gen.Condition) IProjectDo { + return p.withDO(p.DO.Or(conds...)) +} + +func (p projectDo) Select(conds ...field.Expr) IProjectDo { + return p.withDO(p.DO.Select(conds...)) +} + +func (p projectDo) Where(conds ...gen.Condition) IProjectDo { + return p.withDO(p.DO.Where(conds...)) +} + +func (p projectDo) Order(conds ...field.Expr) IProjectDo { + return p.withDO(p.DO.Order(conds...)) +} + +func (p projectDo) Distinct(cols ...field.Expr) IProjectDo { + return p.withDO(p.DO.Distinct(cols...)) +} + +func (p projectDo) Omit(cols ...field.Expr) IProjectDo { + return p.withDO(p.DO.Omit(cols...)) +} + +func (p projectDo) Join(table schema.Tabler, on ...field.Expr) IProjectDo { + return p.withDO(p.DO.Join(table, on...)) +} + +func (p projectDo) LeftJoin(table schema.Tabler, on ...field.Expr) IProjectDo { + return p.withDO(p.DO.LeftJoin(table, on...)) +} + +func (p projectDo) RightJoin(table schema.Tabler, on ...field.Expr) IProjectDo { + return p.withDO(p.DO.RightJoin(table, on...)) +} + +func (p projectDo) Group(cols ...field.Expr) IProjectDo { + return p.withDO(p.DO.Group(cols...)) +} + +func (p projectDo) Having(conds ...gen.Condition) IProjectDo { + return p.withDO(p.DO.Having(conds...)) +} + +func (p projectDo) Limit(limit int) IProjectDo { + return p.withDO(p.DO.Limit(limit)) +} + +func (p projectDo) Offset(offset int) IProjectDo { + return p.withDO(p.DO.Offset(offset)) +} + +func (p projectDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IProjectDo { + return p.withDO(p.DO.Scopes(funcs...)) +} + +func (p projectDo) Unscoped() IProjectDo { + return p.withDO(p.DO.Unscoped()) +} + +func (p projectDo) Create(values ...*model.Project) error { + if len(values) == 0 { + return nil + } + return p.DO.Create(values) +} + +func (p projectDo) CreateInBatches(values []*model.Project, batchSize int) error { + return p.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (p projectDo) Save(values ...*model.Project) error { + if len(values) == 0 { + return nil + } + return p.DO.Save(values) +} + +func (p projectDo) First() (*model.Project, error) { + if result, err := p.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.Project), nil + } +} + +func (p projectDo) Take() (*model.Project, error) { + if result, err := p.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.Project), nil + } +} + +func (p projectDo) Last() (*model.Project, error) { + if result, err := p.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.Project), nil + } +} + +func (p projectDo) Find() ([]*model.Project, error) { + result, err := p.DO.Find() + return result.([]*model.Project), err +} + +func (p projectDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.Project, err error) { + buf := make([]*model.Project, 0, batchSize) + err = p.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (p projectDo) FindInBatches(result *[]*model.Project, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return p.DO.FindInBatches(result, batchSize, fc) +} + +func (p projectDo) Attrs(attrs ...field.AssignExpr) IProjectDo { + return p.withDO(p.DO.Attrs(attrs...)) +} + +func (p projectDo) Assign(attrs ...field.AssignExpr) IProjectDo { + return p.withDO(p.DO.Assign(attrs...)) +} + +func (p projectDo) Joins(fields ...field.RelationField) IProjectDo { + for _, _f := range fields { + p = *p.withDO(p.DO.Joins(_f)) + } + return &p +} + +func (p projectDo) Preload(fields ...field.RelationField) IProjectDo { + for _, _f := range fields { + p = *p.withDO(p.DO.Preload(_f)) + } + return &p +} + +func (p projectDo) FirstOrInit() (*model.Project, error) { + if result, err := p.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.Project), nil + } +} + +func (p projectDo) FirstOrCreate() (*model.Project, error) { + if result, err := p.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.Project), nil + } +} + +func (p projectDo) FindByPage(offset int, limit int) (result []*model.Project, count int64, err error) { + result, err = p.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = p.Offset(-1).Limit(-1).Count() + return +} + +func (p projectDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = p.Count() + if err != nil { + return + } + + err = p.Offset(offset).Limit(limit).Scan(result) + return +} + +func (p projectDo) Scan(result interface{}) (err error) { + return p.DO.Scan(result) +} + +func (p projectDo) Delete(models ...*model.Project) (result gen.ResultInfo, err error) { + return p.DO.Delete(models) +} + +func (p *projectDo) withDO(do gen.Dao) *projectDo { + p.DO = *do.(*gen.DO) + return p +} diff --git a/next/models_generated/repos.gen.go b/next/models_generated/repos.gen.go new file mode 100644 index 000000000..ce3f20915 --- /dev/null +++ b/next/models_generated/repos.gen.go @@ -0,0 +1,420 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newRepo(db *gorm.DB, opts ...gen.DOOption) repo { + _repo := repo{} + + _repo.repoDo.UseDB(db, opts...) + _repo.repoDo.UseModel(&model.Repo{}) + + tableName := _repo.repoDo.TableName() + _repo.ALL = field.NewAsterisk(tableName) + _repo.ID = field.NewInt64(tableName, "id") + _repo.CreatedAt = field.NewTime(tableName, "created_at") + _repo.UpdatedAt = field.NewTime(tableName, "updated_at") + _repo.DeletedAt = field.NewField(tableName, "deleted_at") + _repo.Name = field.NewString(tableName, "name") + _repo.OrganizationID = field.NewString(tableName, "organization_id") + _repo.DiggerConfig = field.NewString(tableName, "digger_config") + _repo.RepoName = field.NewString(tableName, "repo_name") + _repo.RepoFullName = field.NewString(tableName, "repo_full_name") + _repo.RepoOrganisation = field.NewString(tableName, "repo_organisation") + _repo.RepoURL = field.NewString(tableName, "repo_url") + + _repo.fillFieldMap() + + return _repo +} + +type repo struct { + repoDo + + ALL field.Asterisk + ID field.Int64 + CreatedAt field.Time + UpdatedAt field.Time + DeletedAt field.Field + Name field.String + OrganizationID field.String + DiggerConfig field.String + RepoName field.String + RepoFullName field.String + RepoOrganisation field.String + RepoURL field.String + + fieldMap map[string]field.Expr +} + +func (r repo) Table(newTableName string) *repo { + r.repoDo.UseTable(newTableName) + return r.updateTableName(newTableName) +} + +func (r repo) As(alias string) *repo { + r.repoDo.DO = *(r.repoDo.As(alias).(*gen.DO)) + return r.updateTableName(alias) +} + +func (r *repo) updateTableName(table string) *repo { + r.ALL = field.NewAsterisk(table) + r.ID = field.NewInt64(table, "id") + r.CreatedAt = field.NewTime(table, "created_at") + r.UpdatedAt = field.NewTime(table, "updated_at") + r.DeletedAt = field.NewField(table, "deleted_at") + r.Name = field.NewString(table, "name") + r.OrganizationID = field.NewString(table, "organization_id") + r.DiggerConfig = field.NewString(table, "digger_config") + r.RepoName = field.NewString(table, "repo_name") + r.RepoFullName = field.NewString(table, "repo_full_name") + r.RepoOrganisation = field.NewString(table, "repo_organisation") + r.RepoURL = field.NewString(table, "repo_url") + + r.fillFieldMap() + + return r +} + +func (r *repo) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := r.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (r *repo) fillFieldMap() { + r.fieldMap = make(map[string]field.Expr, 11) + r.fieldMap["id"] = r.ID + r.fieldMap["created_at"] = r.CreatedAt + r.fieldMap["updated_at"] = r.UpdatedAt + r.fieldMap["deleted_at"] = r.DeletedAt + r.fieldMap["name"] = r.Name + r.fieldMap["organization_id"] = r.OrganizationID + r.fieldMap["digger_config"] = r.DiggerConfig + r.fieldMap["repo_name"] = r.RepoName + r.fieldMap["repo_full_name"] = r.RepoFullName + r.fieldMap["repo_organisation"] = r.RepoOrganisation + r.fieldMap["repo_url"] = r.RepoURL +} + +func (r repo) clone(db *gorm.DB) repo { + r.repoDo.ReplaceConnPool(db.Statement.ConnPool) + return r +} + +func (r repo) replaceDB(db *gorm.DB) repo { + r.repoDo.ReplaceDB(db) + return r +} + +type repoDo struct{ gen.DO } + +type IRepoDo interface { + gen.SubQuery + Debug() IRepoDo + WithContext(ctx context.Context) IRepoDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IRepoDo + WriteDB() IRepoDo + As(alias string) gen.Dao + Session(config *gorm.Session) IRepoDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IRepoDo + Not(conds ...gen.Condition) IRepoDo + Or(conds ...gen.Condition) IRepoDo + Select(conds ...field.Expr) IRepoDo + Where(conds ...gen.Condition) IRepoDo + Order(conds ...field.Expr) IRepoDo + Distinct(cols ...field.Expr) IRepoDo + Omit(cols ...field.Expr) IRepoDo + Join(table schema.Tabler, on ...field.Expr) IRepoDo + LeftJoin(table schema.Tabler, on ...field.Expr) IRepoDo + RightJoin(table schema.Tabler, on ...field.Expr) IRepoDo + Group(cols ...field.Expr) IRepoDo + Having(conds ...gen.Condition) IRepoDo + Limit(limit int) IRepoDo + Offset(offset int) IRepoDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IRepoDo + Unscoped() IRepoDo + Create(values ...*model.Repo) error + CreateInBatches(values []*model.Repo, batchSize int) error + Save(values ...*model.Repo) error + First() (*model.Repo, error) + Take() (*model.Repo, error) + Last() (*model.Repo, error) + Find() ([]*model.Repo, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.Repo, err error) + FindInBatches(result *[]*model.Repo, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.Repo) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IRepoDo + Assign(attrs ...field.AssignExpr) IRepoDo + Joins(fields ...field.RelationField) IRepoDo + Preload(fields ...field.RelationField) IRepoDo + FirstOrInit() (*model.Repo, error) + FirstOrCreate() (*model.Repo, error) + FindByPage(offset int, limit int) (result []*model.Repo, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IRepoDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (r repoDo) Debug() IRepoDo { + return r.withDO(r.DO.Debug()) +} + +func (r repoDo) WithContext(ctx context.Context) IRepoDo { + return r.withDO(r.DO.WithContext(ctx)) +} + +func (r repoDo) ReadDB() IRepoDo { + return r.Clauses(dbresolver.Read) +} + +func (r repoDo) WriteDB() IRepoDo { + return r.Clauses(dbresolver.Write) +} + +func (r repoDo) Session(config *gorm.Session) IRepoDo { + return r.withDO(r.DO.Session(config)) +} + +func (r repoDo) Clauses(conds ...clause.Expression) IRepoDo { + return r.withDO(r.DO.Clauses(conds...)) +} + +func (r repoDo) Returning(value interface{}, columns ...string) IRepoDo { + return r.withDO(r.DO.Returning(value, columns...)) +} + +func (r repoDo) Not(conds ...gen.Condition) IRepoDo { + return r.withDO(r.DO.Not(conds...)) +} + +func (r repoDo) Or(conds ...gen.Condition) IRepoDo { + return r.withDO(r.DO.Or(conds...)) +} + +func (r repoDo) Select(conds ...field.Expr) IRepoDo { + return r.withDO(r.DO.Select(conds...)) +} + +func (r repoDo) Where(conds ...gen.Condition) IRepoDo { + return r.withDO(r.DO.Where(conds...)) +} + +func (r repoDo) Order(conds ...field.Expr) IRepoDo { + return r.withDO(r.DO.Order(conds...)) +} + +func (r repoDo) Distinct(cols ...field.Expr) IRepoDo { + return r.withDO(r.DO.Distinct(cols...)) +} + +func (r repoDo) Omit(cols ...field.Expr) IRepoDo { + return r.withDO(r.DO.Omit(cols...)) +} + +func (r repoDo) Join(table schema.Tabler, on ...field.Expr) IRepoDo { + return r.withDO(r.DO.Join(table, on...)) +} + +func (r repoDo) LeftJoin(table schema.Tabler, on ...field.Expr) IRepoDo { + return r.withDO(r.DO.LeftJoin(table, on...)) +} + +func (r repoDo) RightJoin(table schema.Tabler, on ...field.Expr) IRepoDo { + return r.withDO(r.DO.RightJoin(table, on...)) +} + +func (r repoDo) Group(cols ...field.Expr) IRepoDo { + return r.withDO(r.DO.Group(cols...)) +} + +func (r repoDo) Having(conds ...gen.Condition) IRepoDo { + return r.withDO(r.DO.Having(conds...)) +} + +func (r repoDo) Limit(limit int) IRepoDo { + return r.withDO(r.DO.Limit(limit)) +} + +func (r repoDo) Offset(offset int) IRepoDo { + return r.withDO(r.DO.Offset(offset)) +} + +func (r repoDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IRepoDo { + return r.withDO(r.DO.Scopes(funcs...)) +} + +func (r repoDo) Unscoped() IRepoDo { + return r.withDO(r.DO.Unscoped()) +} + +func (r repoDo) Create(values ...*model.Repo) error { + if len(values) == 0 { + return nil + } + return r.DO.Create(values) +} + +func (r repoDo) CreateInBatches(values []*model.Repo, batchSize int) error { + return r.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (r repoDo) Save(values ...*model.Repo) error { + if len(values) == 0 { + return nil + } + return r.DO.Save(values) +} + +func (r repoDo) First() (*model.Repo, error) { + if result, err := r.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.Repo), nil + } +} + +func (r repoDo) Take() (*model.Repo, error) { + if result, err := r.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.Repo), nil + } +} + +func (r repoDo) Last() (*model.Repo, error) { + if result, err := r.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.Repo), nil + } +} + +func (r repoDo) Find() ([]*model.Repo, error) { + result, err := r.DO.Find() + return result.([]*model.Repo), err +} + +func (r repoDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.Repo, err error) { + buf := make([]*model.Repo, 0, batchSize) + err = r.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (r repoDo) FindInBatches(result *[]*model.Repo, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return r.DO.FindInBatches(result, batchSize, fc) +} + +func (r repoDo) Attrs(attrs ...field.AssignExpr) IRepoDo { + return r.withDO(r.DO.Attrs(attrs...)) +} + +func (r repoDo) Assign(attrs ...field.AssignExpr) IRepoDo { + return r.withDO(r.DO.Assign(attrs...)) +} + +func (r repoDo) Joins(fields ...field.RelationField) IRepoDo { + for _, _f := range fields { + r = *r.withDO(r.DO.Joins(_f)) + } + return &r +} + +func (r repoDo) Preload(fields ...field.RelationField) IRepoDo { + for _, _f := range fields { + r = *r.withDO(r.DO.Preload(_f)) + } + return &r +} + +func (r repoDo) FirstOrInit() (*model.Repo, error) { + if result, err := r.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.Repo), nil + } +} + +func (r repoDo) FirstOrCreate() (*model.Repo, error) { + if result, err := r.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.Repo), nil + } +} + +func (r repoDo) FindByPage(offset int, limit int) (result []*model.Repo, count int64, err error) { + result, err = r.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = r.Offset(-1).Limit(-1).Count() + return +} + +func (r repoDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = r.Count() + if err != nil { + return + } + + err = r.Offset(offset).Limit(limit).Scan(result) + return +} + +func (r repoDo) Scan(result interface{}) (err error) { + return r.DO.Scan(result) +} + +func (r repoDo) Delete(models ...*model.Repo) (result gen.ResultInfo, err error) { + return r.DO.Delete(models) +} + +func (r *repoDo) withDO(do gen.Dao) *repoDo { + r.DO = *do.(*gen.DO) + return r +} diff --git a/next/models_generated/subscriptions.gen.go b/next/models_generated/subscriptions.gen.go new file mode 100644 index 000000000..5fba62e39 --- /dev/null +++ b/next/models_generated/subscriptions.gen.go @@ -0,0 +1,436 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newSubscription(db *gorm.DB, opts ...gen.DOOption) subscription { + _subscription := subscription{} + + _subscription.subscriptionDo.UseDB(db, opts...) + _subscription.subscriptionDo.UseModel(&model.Subscription{}) + + tableName := _subscription.subscriptionDo.TableName() + _subscription.ALL = field.NewAsterisk(tableName) + _subscription.ID = field.NewString(tableName, "id") + _subscription.Status = field.NewString(tableName, "status") + _subscription.Metadata = field.NewString(tableName, "metadata") + _subscription.PriceID = field.NewString(tableName, "price_id") + _subscription.Quantity = field.NewInt64(tableName, "quantity") + _subscription.CancelAtPeriodEnd = field.NewBool(tableName, "cancel_at_period_end") + _subscription.Created = field.NewTime(tableName, "created") + _subscription.CurrentPeriodStart = field.NewTime(tableName, "current_period_start") + _subscription.CurrentPeriodEnd = field.NewTime(tableName, "current_period_end") + _subscription.EndedAt = field.NewTime(tableName, "ended_at") + _subscription.CancelAt = field.NewTime(tableName, "cancel_at") + _subscription.CanceledAt = field.NewTime(tableName, "canceled_at") + _subscription.TrialStart = field.NewTime(tableName, "trial_start") + _subscription.TrialEnd = field.NewTime(tableName, "trial_end") + _subscription.OrganizationID = field.NewString(tableName, "organization_id") + + _subscription.fillFieldMap() + + return _subscription +} + +type subscription struct { + subscriptionDo + + ALL field.Asterisk + ID field.String + Status field.String + Metadata field.String + PriceID field.String + Quantity field.Int64 + CancelAtPeriodEnd field.Bool + Created field.Time + CurrentPeriodStart field.Time + CurrentPeriodEnd field.Time + EndedAt field.Time + CancelAt field.Time + CanceledAt field.Time + TrialStart field.Time + TrialEnd field.Time + OrganizationID field.String + + fieldMap map[string]field.Expr +} + +func (s subscription) Table(newTableName string) *subscription { + s.subscriptionDo.UseTable(newTableName) + return s.updateTableName(newTableName) +} + +func (s subscription) As(alias string) *subscription { + s.subscriptionDo.DO = *(s.subscriptionDo.As(alias).(*gen.DO)) + return s.updateTableName(alias) +} + +func (s *subscription) updateTableName(table string) *subscription { + s.ALL = field.NewAsterisk(table) + s.ID = field.NewString(table, "id") + s.Status = field.NewString(table, "status") + s.Metadata = field.NewString(table, "metadata") + s.PriceID = field.NewString(table, "price_id") + s.Quantity = field.NewInt64(table, "quantity") + s.CancelAtPeriodEnd = field.NewBool(table, "cancel_at_period_end") + s.Created = field.NewTime(table, "created") + s.CurrentPeriodStart = field.NewTime(table, "current_period_start") + s.CurrentPeriodEnd = field.NewTime(table, "current_period_end") + s.EndedAt = field.NewTime(table, "ended_at") + s.CancelAt = field.NewTime(table, "cancel_at") + s.CanceledAt = field.NewTime(table, "canceled_at") + s.TrialStart = field.NewTime(table, "trial_start") + s.TrialEnd = field.NewTime(table, "trial_end") + s.OrganizationID = field.NewString(table, "organization_id") + + s.fillFieldMap() + + return s +} + +func (s *subscription) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := s.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (s *subscription) fillFieldMap() { + s.fieldMap = make(map[string]field.Expr, 15) + s.fieldMap["id"] = s.ID + s.fieldMap["status"] = s.Status + s.fieldMap["metadata"] = s.Metadata + s.fieldMap["price_id"] = s.PriceID + s.fieldMap["quantity"] = s.Quantity + s.fieldMap["cancel_at_period_end"] = s.CancelAtPeriodEnd + s.fieldMap["created"] = s.Created + s.fieldMap["current_period_start"] = s.CurrentPeriodStart + s.fieldMap["current_period_end"] = s.CurrentPeriodEnd + s.fieldMap["ended_at"] = s.EndedAt + s.fieldMap["cancel_at"] = s.CancelAt + s.fieldMap["canceled_at"] = s.CanceledAt + s.fieldMap["trial_start"] = s.TrialStart + s.fieldMap["trial_end"] = s.TrialEnd + s.fieldMap["organization_id"] = s.OrganizationID +} + +func (s subscription) clone(db *gorm.DB) subscription { + s.subscriptionDo.ReplaceConnPool(db.Statement.ConnPool) + return s +} + +func (s subscription) replaceDB(db *gorm.DB) subscription { + s.subscriptionDo.ReplaceDB(db) + return s +} + +type subscriptionDo struct{ gen.DO } + +type ISubscriptionDo interface { + gen.SubQuery + Debug() ISubscriptionDo + WithContext(ctx context.Context) ISubscriptionDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() ISubscriptionDo + WriteDB() ISubscriptionDo + As(alias string) gen.Dao + Session(config *gorm.Session) ISubscriptionDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) ISubscriptionDo + Not(conds ...gen.Condition) ISubscriptionDo + Or(conds ...gen.Condition) ISubscriptionDo + Select(conds ...field.Expr) ISubscriptionDo + Where(conds ...gen.Condition) ISubscriptionDo + Order(conds ...field.Expr) ISubscriptionDo + Distinct(cols ...field.Expr) ISubscriptionDo + Omit(cols ...field.Expr) ISubscriptionDo + Join(table schema.Tabler, on ...field.Expr) ISubscriptionDo + LeftJoin(table schema.Tabler, on ...field.Expr) ISubscriptionDo + RightJoin(table schema.Tabler, on ...field.Expr) ISubscriptionDo + Group(cols ...field.Expr) ISubscriptionDo + Having(conds ...gen.Condition) ISubscriptionDo + Limit(limit int) ISubscriptionDo + Offset(offset int) ISubscriptionDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) ISubscriptionDo + Unscoped() ISubscriptionDo + Create(values ...*model.Subscription) error + CreateInBatches(values []*model.Subscription, batchSize int) error + Save(values ...*model.Subscription) error + First() (*model.Subscription, error) + Take() (*model.Subscription, error) + Last() (*model.Subscription, error) + Find() ([]*model.Subscription, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.Subscription, err error) + FindInBatches(result *[]*model.Subscription, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.Subscription) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) ISubscriptionDo + Assign(attrs ...field.AssignExpr) ISubscriptionDo + Joins(fields ...field.RelationField) ISubscriptionDo + Preload(fields ...field.RelationField) ISubscriptionDo + FirstOrInit() (*model.Subscription, error) + FirstOrCreate() (*model.Subscription, error) + FindByPage(offset int, limit int) (result []*model.Subscription, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) ISubscriptionDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (s subscriptionDo) Debug() ISubscriptionDo { + return s.withDO(s.DO.Debug()) +} + +func (s subscriptionDo) WithContext(ctx context.Context) ISubscriptionDo { + return s.withDO(s.DO.WithContext(ctx)) +} + +func (s subscriptionDo) ReadDB() ISubscriptionDo { + return s.Clauses(dbresolver.Read) +} + +func (s subscriptionDo) WriteDB() ISubscriptionDo { + return s.Clauses(dbresolver.Write) +} + +func (s subscriptionDo) Session(config *gorm.Session) ISubscriptionDo { + return s.withDO(s.DO.Session(config)) +} + +func (s subscriptionDo) Clauses(conds ...clause.Expression) ISubscriptionDo { + return s.withDO(s.DO.Clauses(conds...)) +} + +func (s subscriptionDo) Returning(value interface{}, columns ...string) ISubscriptionDo { + return s.withDO(s.DO.Returning(value, columns...)) +} + +func (s subscriptionDo) Not(conds ...gen.Condition) ISubscriptionDo { + return s.withDO(s.DO.Not(conds...)) +} + +func (s subscriptionDo) Or(conds ...gen.Condition) ISubscriptionDo { + return s.withDO(s.DO.Or(conds...)) +} + +func (s subscriptionDo) Select(conds ...field.Expr) ISubscriptionDo { + return s.withDO(s.DO.Select(conds...)) +} + +func (s subscriptionDo) Where(conds ...gen.Condition) ISubscriptionDo { + return s.withDO(s.DO.Where(conds...)) +} + +func (s subscriptionDo) Order(conds ...field.Expr) ISubscriptionDo { + return s.withDO(s.DO.Order(conds...)) +} + +func (s subscriptionDo) Distinct(cols ...field.Expr) ISubscriptionDo { + return s.withDO(s.DO.Distinct(cols...)) +} + +func (s subscriptionDo) Omit(cols ...field.Expr) ISubscriptionDo { + return s.withDO(s.DO.Omit(cols...)) +} + +func (s subscriptionDo) Join(table schema.Tabler, on ...field.Expr) ISubscriptionDo { + return s.withDO(s.DO.Join(table, on...)) +} + +func (s subscriptionDo) LeftJoin(table schema.Tabler, on ...field.Expr) ISubscriptionDo { + return s.withDO(s.DO.LeftJoin(table, on...)) +} + +func (s subscriptionDo) RightJoin(table schema.Tabler, on ...field.Expr) ISubscriptionDo { + return s.withDO(s.DO.RightJoin(table, on...)) +} + +func (s subscriptionDo) Group(cols ...field.Expr) ISubscriptionDo { + return s.withDO(s.DO.Group(cols...)) +} + +func (s subscriptionDo) Having(conds ...gen.Condition) ISubscriptionDo { + return s.withDO(s.DO.Having(conds...)) +} + +func (s subscriptionDo) Limit(limit int) ISubscriptionDo { + return s.withDO(s.DO.Limit(limit)) +} + +func (s subscriptionDo) Offset(offset int) ISubscriptionDo { + return s.withDO(s.DO.Offset(offset)) +} + +func (s subscriptionDo) Scopes(funcs ...func(gen.Dao) gen.Dao) ISubscriptionDo { + return s.withDO(s.DO.Scopes(funcs...)) +} + +func (s subscriptionDo) Unscoped() ISubscriptionDo { + return s.withDO(s.DO.Unscoped()) +} + +func (s subscriptionDo) Create(values ...*model.Subscription) error { + if len(values) == 0 { + return nil + } + return s.DO.Create(values) +} + +func (s subscriptionDo) CreateInBatches(values []*model.Subscription, batchSize int) error { + return s.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (s subscriptionDo) Save(values ...*model.Subscription) error { + if len(values) == 0 { + return nil + } + return s.DO.Save(values) +} + +func (s subscriptionDo) First() (*model.Subscription, error) { + if result, err := s.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.Subscription), nil + } +} + +func (s subscriptionDo) Take() (*model.Subscription, error) { + if result, err := s.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.Subscription), nil + } +} + +func (s subscriptionDo) Last() (*model.Subscription, error) { + if result, err := s.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.Subscription), nil + } +} + +func (s subscriptionDo) Find() ([]*model.Subscription, error) { + result, err := s.DO.Find() + return result.([]*model.Subscription), err +} + +func (s subscriptionDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.Subscription, err error) { + buf := make([]*model.Subscription, 0, batchSize) + err = s.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (s subscriptionDo) FindInBatches(result *[]*model.Subscription, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return s.DO.FindInBatches(result, batchSize, fc) +} + +func (s subscriptionDo) Attrs(attrs ...field.AssignExpr) ISubscriptionDo { + return s.withDO(s.DO.Attrs(attrs...)) +} + +func (s subscriptionDo) Assign(attrs ...field.AssignExpr) ISubscriptionDo { + return s.withDO(s.DO.Assign(attrs...)) +} + +func (s subscriptionDo) Joins(fields ...field.RelationField) ISubscriptionDo { + for _, _f := range fields { + s = *s.withDO(s.DO.Joins(_f)) + } + return &s +} + +func (s subscriptionDo) Preload(fields ...field.RelationField) ISubscriptionDo { + for _, _f := range fields { + s = *s.withDO(s.DO.Preload(_f)) + } + return &s +} + +func (s subscriptionDo) FirstOrInit() (*model.Subscription, error) { + if result, err := s.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.Subscription), nil + } +} + +func (s subscriptionDo) FirstOrCreate() (*model.Subscription, error) { + if result, err := s.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.Subscription), nil + } +} + +func (s subscriptionDo) FindByPage(offset int, limit int) (result []*model.Subscription, count int64, err error) { + result, err = s.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = s.Offset(-1).Limit(-1).Count() + return +} + +func (s subscriptionDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = s.Count() + if err != nil { + return + } + + err = s.Offset(offset).Limit(limit).Scan(result) + return +} + +func (s subscriptionDo) Scan(result interface{}) (err error) { + return s.DO.Scan(result) +} + +func (s subscriptionDo) Delete(models ...*model.Subscription) (result gen.ResultInfo, err error) { + return s.DO.Delete(models) +} + +func (s *subscriptionDo) withDO(do gen.Dao) *subscriptionDo { + s.DO = *do.(*gen.DO) + return s +} diff --git a/next/models_generated/user_api_keys.gen.go b/next/models_generated/user_api_keys.gen.go new file mode 100644 index 000000000..87feba2b5 --- /dev/null +++ b/next/models_generated/user_api_keys.gen.go @@ -0,0 +1,400 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newUserAPIKey(db *gorm.DB, opts ...gen.DOOption) userAPIKey { + _userAPIKey := userAPIKey{} + + _userAPIKey.userAPIKeyDo.UseDB(db, opts...) + _userAPIKey.userAPIKeyDo.UseModel(&model.UserAPIKey{}) + + tableName := _userAPIKey.userAPIKeyDo.TableName() + _userAPIKey.ALL = field.NewAsterisk(tableName) + _userAPIKey.KeyID = field.NewString(tableName, "key_id") + _userAPIKey.MaskedKey = field.NewString(tableName, "masked_key") + _userAPIKey.CreatedAt = field.NewTime(tableName, "created_at") + _userAPIKey.UserID = field.NewString(tableName, "user_id") + _userAPIKey.ExpiresAt = field.NewTime(tableName, "expires_at") + _userAPIKey.IsRevoked = field.NewBool(tableName, "is_revoked") + + _userAPIKey.fillFieldMap() + + return _userAPIKey +} + +type userAPIKey struct { + userAPIKeyDo + + ALL field.Asterisk + KeyID field.String + MaskedKey field.String + CreatedAt field.Time + UserID field.String + ExpiresAt field.Time + IsRevoked field.Bool + + fieldMap map[string]field.Expr +} + +func (u userAPIKey) Table(newTableName string) *userAPIKey { + u.userAPIKeyDo.UseTable(newTableName) + return u.updateTableName(newTableName) +} + +func (u userAPIKey) As(alias string) *userAPIKey { + u.userAPIKeyDo.DO = *(u.userAPIKeyDo.As(alias).(*gen.DO)) + return u.updateTableName(alias) +} + +func (u *userAPIKey) updateTableName(table string) *userAPIKey { + u.ALL = field.NewAsterisk(table) + u.KeyID = field.NewString(table, "key_id") + u.MaskedKey = field.NewString(table, "masked_key") + u.CreatedAt = field.NewTime(table, "created_at") + u.UserID = field.NewString(table, "user_id") + u.ExpiresAt = field.NewTime(table, "expires_at") + u.IsRevoked = field.NewBool(table, "is_revoked") + + u.fillFieldMap() + + return u +} + +func (u *userAPIKey) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := u.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (u *userAPIKey) fillFieldMap() { + u.fieldMap = make(map[string]field.Expr, 6) + u.fieldMap["key_id"] = u.KeyID + u.fieldMap["masked_key"] = u.MaskedKey + u.fieldMap["created_at"] = u.CreatedAt + u.fieldMap["user_id"] = u.UserID + u.fieldMap["expires_at"] = u.ExpiresAt + u.fieldMap["is_revoked"] = u.IsRevoked +} + +func (u userAPIKey) clone(db *gorm.DB) userAPIKey { + u.userAPIKeyDo.ReplaceConnPool(db.Statement.ConnPool) + return u +} + +func (u userAPIKey) replaceDB(db *gorm.DB) userAPIKey { + u.userAPIKeyDo.ReplaceDB(db) + return u +} + +type userAPIKeyDo struct{ gen.DO } + +type IUserAPIKeyDo interface { + gen.SubQuery + Debug() IUserAPIKeyDo + WithContext(ctx context.Context) IUserAPIKeyDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IUserAPIKeyDo + WriteDB() IUserAPIKeyDo + As(alias string) gen.Dao + Session(config *gorm.Session) IUserAPIKeyDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IUserAPIKeyDo + Not(conds ...gen.Condition) IUserAPIKeyDo + Or(conds ...gen.Condition) IUserAPIKeyDo + Select(conds ...field.Expr) IUserAPIKeyDo + Where(conds ...gen.Condition) IUserAPIKeyDo + Order(conds ...field.Expr) IUserAPIKeyDo + Distinct(cols ...field.Expr) IUserAPIKeyDo + Omit(cols ...field.Expr) IUserAPIKeyDo + Join(table schema.Tabler, on ...field.Expr) IUserAPIKeyDo + LeftJoin(table schema.Tabler, on ...field.Expr) IUserAPIKeyDo + RightJoin(table schema.Tabler, on ...field.Expr) IUserAPIKeyDo + Group(cols ...field.Expr) IUserAPIKeyDo + Having(conds ...gen.Condition) IUserAPIKeyDo + Limit(limit int) IUserAPIKeyDo + Offset(offset int) IUserAPIKeyDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IUserAPIKeyDo + Unscoped() IUserAPIKeyDo + Create(values ...*model.UserAPIKey) error + CreateInBatches(values []*model.UserAPIKey, batchSize int) error + Save(values ...*model.UserAPIKey) error + First() (*model.UserAPIKey, error) + Take() (*model.UserAPIKey, error) + Last() (*model.UserAPIKey, error) + Find() ([]*model.UserAPIKey, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.UserAPIKey, err error) + FindInBatches(result *[]*model.UserAPIKey, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.UserAPIKey) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IUserAPIKeyDo + Assign(attrs ...field.AssignExpr) IUserAPIKeyDo + Joins(fields ...field.RelationField) IUserAPIKeyDo + Preload(fields ...field.RelationField) IUserAPIKeyDo + FirstOrInit() (*model.UserAPIKey, error) + FirstOrCreate() (*model.UserAPIKey, error) + FindByPage(offset int, limit int) (result []*model.UserAPIKey, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IUserAPIKeyDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (u userAPIKeyDo) Debug() IUserAPIKeyDo { + return u.withDO(u.DO.Debug()) +} + +func (u userAPIKeyDo) WithContext(ctx context.Context) IUserAPIKeyDo { + return u.withDO(u.DO.WithContext(ctx)) +} + +func (u userAPIKeyDo) ReadDB() IUserAPIKeyDo { + return u.Clauses(dbresolver.Read) +} + +func (u userAPIKeyDo) WriteDB() IUserAPIKeyDo { + return u.Clauses(dbresolver.Write) +} + +func (u userAPIKeyDo) Session(config *gorm.Session) IUserAPIKeyDo { + return u.withDO(u.DO.Session(config)) +} + +func (u userAPIKeyDo) Clauses(conds ...clause.Expression) IUserAPIKeyDo { + return u.withDO(u.DO.Clauses(conds...)) +} + +func (u userAPIKeyDo) Returning(value interface{}, columns ...string) IUserAPIKeyDo { + return u.withDO(u.DO.Returning(value, columns...)) +} + +func (u userAPIKeyDo) Not(conds ...gen.Condition) IUserAPIKeyDo { + return u.withDO(u.DO.Not(conds...)) +} + +func (u userAPIKeyDo) Or(conds ...gen.Condition) IUserAPIKeyDo { + return u.withDO(u.DO.Or(conds...)) +} + +func (u userAPIKeyDo) Select(conds ...field.Expr) IUserAPIKeyDo { + return u.withDO(u.DO.Select(conds...)) +} + +func (u userAPIKeyDo) Where(conds ...gen.Condition) IUserAPIKeyDo { + return u.withDO(u.DO.Where(conds...)) +} + +func (u userAPIKeyDo) Order(conds ...field.Expr) IUserAPIKeyDo { + return u.withDO(u.DO.Order(conds...)) +} + +func (u userAPIKeyDo) Distinct(cols ...field.Expr) IUserAPIKeyDo { + return u.withDO(u.DO.Distinct(cols...)) +} + +func (u userAPIKeyDo) Omit(cols ...field.Expr) IUserAPIKeyDo { + return u.withDO(u.DO.Omit(cols...)) +} + +func (u userAPIKeyDo) Join(table schema.Tabler, on ...field.Expr) IUserAPIKeyDo { + return u.withDO(u.DO.Join(table, on...)) +} + +func (u userAPIKeyDo) LeftJoin(table schema.Tabler, on ...field.Expr) IUserAPIKeyDo { + return u.withDO(u.DO.LeftJoin(table, on...)) +} + +func (u userAPIKeyDo) RightJoin(table schema.Tabler, on ...field.Expr) IUserAPIKeyDo { + return u.withDO(u.DO.RightJoin(table, on...)) +} + +func (u userAPIKeyDo) Group(cols ...field.Expr) IUserAPIKeyDo { + return u.withDO(u.DO.Group(cols...)) +} + +func (u userAPIKeyDo) Having(conds ...gen.Condition) IUserAPIKeyDo { + return u.withDO(u.DO.Having(conds...)) +} + +func (u userAPIKeyDo) Limit(limit int) IUserAPIKeyDo { + return u.withDO(u.DO.Limit(limit)) +} + +func (u userAPIKeyDo) Offset(offset int) IUserAPIKeyDo { + return u.withDO(u.DO.Offset(offset)) +} + +func (u userAPIKeyDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IUserAPIKeyDo { + return u.withDO(u.DO.Scopes(funcs...)) +} + +func (u userAPIKeyDo) Unscoped() IUserAPIKeyDo { + return u.withDO(u.DO.Unscoped()) +} + +func (u userAPIKeyDo) Create(values ...*model.UserAPIKey) error { + if len(values) == 0 { + return nil + } + return u.DO.Create(values) +} + +func (u userAPIKeyDo) CreateInBatches(values []*model.UserAPIKey, batchSize int) error { + return u.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (u userAPIKeyDo) Save(values ...*model.UserAPIKey) error { + if len(values) == 0 { + return nil + } + return u.DO.Save(values) +} + +func (u userAPIKeyDo) First() (*model.UserAPIKey, error) { + if result, err := u.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.UserAPIKey), nil + } +} + +func (u userAPIKeyDo) Take() (*model.UserAPIKey, error) { + if result, err := u.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.UserAPIKey), nil + } +} + +func (u userAPIKeyDo) Last() (*model.UserAPIKey, error) { + if result, err := u.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.UserAPIKey), nil + } +} + +func (u userAPIKeyDo) Find() ([]*model.UserAPIKey, error) { + result, err := u.DO.Find() + return result.([]*model.UserAPIKey), err +} + +func (u userAPIKeyDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.UserAPIKey, err error) { + buf := make([]*model.UserAPIKey, 0, batchSize) + err = u.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (u userAPIKeyDo) FindInBatches(result *[]*model.UserAPIKey, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return u.DO.FindInBatches(result, batchSize, fc) +} + +func (u userAPIKeyDo) Attrs(attrs ...field.AssignExpr) IUserAPIKeyDo { + return u.withDO(u.DO.Attrs(attrs...)) +} + +func (u userAPIKeyDo) Assign(attrs ...field.AssignExpr) IUserAPIKeyDo { + return u.withDO(u.DO.Assign(attrs...)) +} + +func (u userAPIKeyDo) Joins(fields ...field.RelationField) IUserAPIKeyDo { + for _, _f := range fields { + u = *u.withDO(u.DO.Joins(_f)) + } + return &u +} + +func (u userAPIKeyDo) Preload(fields ...field.RelationField) IUserAPIKeyDo { + for _, _f := range fields { + u = *u.withDO(u.DO.Preload(_f)) + } + return &u +} + +func (u userAPIKeyDo) FirstOrInit() (*model.UserAPIKey, error) { + if result, err := u.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.UserAPIKey), nil + } +} + +func (u userAPIKeyDo) FirstOrCreate() (*model.UserAPIKey, error) { + if result, err := u.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.UserAPIKey), nil + } +} + +func (u userAPIKeyDo) FindByPage(offset int, limit int) (result []*model.UserAPIKey, count int64, err error) { + result, err = u.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = u.Offset(-1).Limit(-1).Count() + return +} + +func (u userAPIKeyDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = u.Count() + if err != nil { + return + } + + err = u.Offset(offset).Limit(limit).Scan(result) + return +} + +func (u userAPIKeyDo) Scan(result interface{}) (err error) { + return u.DO.Scan(result) +} + +func (u userAPIKeyDo) Delete(models ...*model.UserAPIKey) (result gen.ResultInfo, err error) { + return u.DO.Delete(models) +} + +func (u *userAPIKeyDo) withDO(do gen.Dao) *userAPIKeyDo { + u.DO = *do.(*gen.DO) + return u +} diff --git a/next/models_generated/user_notifications.gen.go b/next/models_generated/user_notifications.gen.go new file mode 100644 index 000000000..88b687f5f --- /dev/null +++ b/next/models_generated/user_notifications.gen.go @@ -0,0 +1,404 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newUserNotification(db *gorm.DB, opts ...gen.DOOption) userNotification { + _userNotification := userNotification{} + + _userNotification.userNotificationDo.UseDB(db, opts...) + _userNotification.userNotificationDo.UseModel(&model.UserNotification{}) + + tableName := _userNotification.userNotificationDo.TableName() + _userNotification.ALL = field.NewAsterisk(tableName) + _userNotification.ID = field.NewString(tableName, "id") + _userNotification.UserID = field.NewString(tableName, "user_id") + _userNotification.IsRead = field.NewBool(tableName, "is_read") + _userNotification.IsSeen = field.NewBool(tableName, "is_seen") + _userNotification.Payload = field.NewString(tableName, "payload") + _userNotification.CreatedAt = field.NewTime(tableName, "created_at") + _userNotification.UpdatedAt = field.NewTime(tableName, "updated_at") + + _userNotification.fillFieldMap() + + return _userNotification +} + +type userNotification struct { + userNotificationDo + + ALL field.Asterisk + ID field.String + UserID field.String + IsRead field.Bool + IsSeen field.Bool + Payload field.String + CreatedAt field.Time + UpdatedAt field.Time + + fieldMap map[string]field.Expr +} + +func (u userNotification) Table(newTableName string) *userNotification { + u.userNotificationDo.UseTable(newTableName) + return u.updateTableName(newTableName) +} + +func (u userNotification) As(alias string) *userNotification { + u.userNotificationDo.DO = *(u.userNotificationDo.As(alias).(*gen.DO)) + return u.updateTableName(alias) +} + +func (u *userNotification) updateTableName(table string) *userNotification { + u.ALL = field.NewAsterisk(table) + u.ID = field.NewString(table, "id") + u.UserID = field.NewString(table, "user_id") + u.IsRead = field.NewBool(table, "is_read") + u.IsSeen = field.NewBool(table, "is_seen") + u.Payload = field.NewString(table, "payload") + u.CreatedAt = field.NewTime(table, "created_at") + u.UpdatedAt = field.NewTime(table, "updated_at") + + u.fillFieldMap() + + return u +} + +func (u *userNotification) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := u.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (u *userNotification) fillFieldMap() { + u.fieldMap = make(map[string]field.Expr, 7) + u.fieldMap["id"] = u.ID + u.fieldMap["user_id"] = u.UserID + u.fieldMap["is_read"] = u.IsRead + u.fieldMap["is_seen"] = u.IsSeen + u.fieldMap["payload"] = u.Payload + u.fieldMap["created_at"] = u.CreatedAt + u.fieldMap["updated_at"] = u.UpdatedAt +} + +func (u userNotification) clone(db *gorm.DB) userNotification { + u.userNotificationDo.ReplaceConnPool(db.Statement.ConnPool) + return u +} + +func (u userNotification) replaceDB(db *gorm.DB) userNotification { + u.userNotificationDo.ReplaceDB(db) + return u +} + +type userNotificationDo struct{ gen.DO } + +type IUserNotificationDo interface { + gen.SubQuery + Debug() IUserNotificationDo + WithContext(ctx context.Context) IUserNotificationDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IUserNotificationDo + WriteDB() IUserNotificationDo + As(alias string) gen.Dao + Session(config *gorm.Session) IUserNotificationDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IUserNotificationDo + Not(conds ...gen.Condition) IUserNotificationDo + Or(conds ...gen.Condition) IUserNotificationDo + Select(conds ...field.Expr) IUserNotificationDo + Where(conds ...gen.Condition) IUserNotificationDo + Order(conds ...field.Expr) IUserNotificationDo + Distinct(cols ...field.Expr) IUserNotificationDo + Omit(cols ...field.Expr) IUserNotificationDo + Join(table schema.Tabler, on ...field.Expr) IUserNotificationDo + LeftJoin(table schema.Tabler, on ...field.Expr) IUserNotificationDo + RightJoin(table schema.Tabler, on ...field.Expr) IUserNotificationDo + Group(cols ...field.Expr) IUserNotificationDo + Having(conds ...gen.Condition) IUserNotificationDo + Limit(limit int) IUserNotificationDo + Offset(offset int) IUserNotificationDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IUserNotificationDo + Unscoped() IUserNotificationDo + Create(values ...*model.UserNotification) error + CreateInBatches(values []*model.UserNotification, batchSize int) error + Save(values ...*model.UserNotification) error + First() (*model.UserNotification, error) + Take() (*model.UserNotification, error) + Last() (*model.UserNotification, error) + Find() ([]*model.UserNotification, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.UserNotification, err error) + FindInBatches(result *[]*model.UserNotification, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.UserNotification) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IUserNotificationDo + Assign(attrs ...field.AssignExpr) IUserNotificationDo + Joins(fields ...field.RelationField) IUserNotificationDo + Preload(fields ...field.RelationField) IUserNotificationDo + FirstOrInit() (*model.UserNotification, error) + FirstOrCreate() (*model.UserNotification, error) + FindByPage(offset int, limit int) (result []*model.UserNotification, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IUserNotificationDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (u userNotificationDo) Debug() IUserNotificationDo { + return u.withDO(u.DO.Debug()) +} + +func (u userNotificationDo) WithContext(ctx context.Context) IUserNotificationDo { + return u.withDO(u.DO.WithContext(ctx)) +} + +func (u userNotificationDo) ReadDB() IUserNotificationDo { + return u.Clauses(dbresolver.Read) +} + +func (u userNotificationDo) WriteDB() IUserNotificationDo { + return u.Clauses(dbresolver.Write) +} + +func (u userNotificationDo) Session(config *gorm.Session) IUserNotificationDo { + return u.withDO(u.DO.Session(config)) +} + +func (u userNotificationDo) Clauses(conds ...clause.Expression) IUserNotificationDo { + return u.withDO(u.DO.Clauses(conds...)) +} + +func (u userNotificationDo) Returning(value interface{}, columns ...string) IUserNotificationDo { + return u.withDO(u.DO.Returning(value, columns...)) +} + +func (u userNotificationDo) Not(conds ...gen.Condition) IUserNotificationDo { + return u.withDO(u.DO.Not(conds...)) +} + +func (u userNotificationDo) Or(conds ...gen.Condition) IUserNotificationDo { + return u.withDO(u.DO.Or(conds...)) +} + +func (u userNotificationDo) Select(conds ...field.Expr) IUserNotificationDo { + return u.withDO(u.DO.Select(conds...)) +} + +func (u userNotificationDo) Where(conds ...gen.Condition) IUserNotificationDo { + return u.withDO(u.DO.Where(conds...)) +} + +func (u userNotificationDo) Order(conds ...field.Expr) IUserNotificationDo { + return u.withDO(u.DO.Order(conds...)) +} + +func (u userNotificationDo) Distinct(cols ...field.Expr) IUserNotificationDo { + return u.withDO(u.DO.Distinct(cols...)) +} + +func (u userNotificationDo) Omit(cols ...field.Expr) IUserNotificationDo { + return u.withDO(u.DO.Omit(cols...)) +} + +func (u userNotificationDo) Join(table schema.Tabler, on ...field.Expr) IUserNotificationDo { + return u.withDO(u.DO.Join(table, on...)) +} + +func (u userNotificationDo) LeftJoin(table schema.Tabler, on ...field.Expr) IUserNotificationDo { + return u.withDO(u.DO.LeftJoin(table, on...)) +} + +func (u userNotificationDo) RightJoin(table schema.Tabler, on ...field.Expr) IUserNotificationDo { + return u.withDO(u.DO.RightJoin(table, on...)) +} + +func (u userNotificationDo) Group(cols ...field.Expr) IUserNotificationDo { + return u.withDO(u.DO.Group(cols...)) +} + +func (u userNotificationDo) Having(conds ...gen.Condition) IUserNotificationDo { + return u.withDO(u.DO.Having(conds...)) +} + +func (u userNotificationDo) Limit(limit int) IUserNotificationDo { + return u.withDO(u.DO.Limit(limit)) +} + +func (u userNotificationDo) Offset(offset int) IUserNotificationDo { + return u.withDO(u.DO.Offset(offset)) +} + +func (u userNotificationDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IUserNotificationDo { + return u.withDO(u.DO.Scopes(funcs...)) +} + +func (u userNotificationDo) Unscoped() IUserNotificationDo { + return u.withDO(u.DO.Unscoped()) +} + +func (u userNotificationDo) Create(values ...*model.UserNotification) error { + if len(values) == 0 { + return nil + } + return u.DO.Create(values) +} + +func (u userNotificationDo) CreateInBatches(values []*model.UserNotification, batchSize int) error { + return u.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (u userNotificationDo) Save(values ...*model.UserNotification) error { + if len(values) == 0 { + return nil + } + return u.DO.Save(values) +} + +func (u userNotificationDo) First() (*model.UserNotification, error) { + if result, err := u.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.UserNotification), nil + } +} + +func (u userNotificationDo) Take() (*model.UserNotification, error) { + if result, err := u.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.UserNotification), nil + } +} + +func (u userNotificationDo) Last() (*model.UserNotification, error) { + if result, err := u.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.UserNotification), nil + } +} + +func (u userNotificationDo) Find() ([]*model.UserNotification, error) { + result, err := u.DO.Find() + return result.([]*model.UserNotification), err +} + +func (u userNotificationDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.UserNotification, err error) { + buf := make([]*model.UserNotification, 0, batchSize) + err = u.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (u userNotificationDo) FindInBatches(result *[]*model.UserNotification, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return u.DO.FindInBatches(result, batchSize, fc) +} + +func (u userNotificationDo) Attrs(attrs ...field.AssignExpr) IUserNotificationDo { + return u.withDO(u.DO.Attrs(attrs...)) +} + +func (u userNotificationDo) Assign(attrs ...field.AssignExpr) IUserNotificationDo { + return u.withDO(u.DO.Assign(attrs...)) +} + +func (u userNotificationDo) Joins(fields ...field.RelationField) IUserNotificationDo { + for _, _f := range fields { + u = *u.withDO(u.DO.Joins(_f)) + } + return &u +} + +func (u userNotificationDo) Preload(fields ...field.RelationField) IUserNotificationDo { + for _, _f := range fields { + u = *u.withDO(u.DO.Preload(_f)) + } + return &u +} + +func (u userNotificationDo) FirstOrInit() (*model.UserNotification, error) { + if result, err := u.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.UserNotification), nil + } +} + +func (u userNotificationDo) FirstOrCreate() (*model.UserNotification, error) { + if result, err := u.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.UserNotification), nil + } +} + +func (u userNotificationDo) FindByPage(offset int, limit int) (result []*model.UserNotification, count int64, err error) { + result, err = u.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = u.Offset(-1).Limit(-1).Count() + return +} + +func (u userNotificationDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = u.Count() + if err != nil { + return + } + + err = u.Offset(offset).Limit(limit).Scan(result) + return +} + +func (u userNotificationDo) Scan(result interface{}) (err error) { + return u.DO.Scan(result) +} + +func (u userNotificationDo) Delete(models ...*model.UserNotification) (result gen.ResultInfo, err error) { + return u.DO.Delete(models) +} + +func (u *userNotificationDo) withDO(do gen.Dao) *userNotificationDo { + u.DO = *do.(*gen.DO) + return u +} diff --git a/next/models_generated/user_onboarding.gen.go b/next/models_generated/user_onboarding.gen.go new file mode 100644 index 000000000..3df65210b --- /dev/null +++ b/next/models_generated/user_onboarding.gen.go @@ -0,0 +1,388 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newUserOnboarding(db *gorm.DB, opts ...gen.DOOption) userOnboarding { + _userOnboarding := userOnboarding{} + + _userOnboarding.userOnboardingDo.UseDB(db, opts...) + _userOnboarding.userOnboardingDo.UseModel(&model.UserOnboarding{}) + + tableName := _userOnboarding.userOnboardingDo.TableName() + _userOnboarding.ALL = field.NewAsterisk(tableName) + _userOnboarding.UserID = field.NewString(tableName, "user_id") + _userOnboarding.CreatedAt = field.NewTime(tableName, "created_at") + _userOnboarding.AcceptedTerms = field.NewBool(tableName, "accepted_terms") + + _userOnboarding.fillFieldMap() + + return _userOnboarding +} + +type userOnboarding struct { + userOnboardingDo + + ALL field.Asterisk + UserID field.String + CreatedAt field.Time + AcceptedTerms field.Bool + + fieldMap map[string]field.Expr +} + +func (u userOnboarding) Table(newTableName string) *userOnboarding { + u.userOnboardingDo.UseTable(newTableName) + return u.updateTableName(newTableName) +} + +func (u userOnboarding) As(alias string) *userOnboarding { + u.userOnboardingDo.DO = *(u.userOnboardingDo.As(alias).(*gen.DO)) + return u.updateTableName(alias) +} + +func (u *userOnboarding) updateTableName(table string) *userOnboarding { + u.ALL = field.NewAsterisk(table) + u.UserID = field.NewString(table, "user_id") + u.CreatedAt = field.NewTime(table, "created_at") + u.AcceptedTerms = field.NewBool(table, "accepted_terms") + + u.fillFieldMap() + + return u +} + +func (u *userOnboarding) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := u.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (u *userOnboarding) fillFieldMap() { + u.fieldMap = make(map[string]field.Expr, 3) + u.fieldMap["user_id"] = u.UserID + u.fieldMap["created_at"] = u.CreatedAt + u.fieldMap["accepted_terms"] = u.AcceptedTerms +} + +func (u userOnboarding) clone(db *gorm.DB) userOnboarding { + u.userOnboardingDo.ReplaceConnPool(db.Statement.ConnPool) + return u +} + +func (u userOnboarding) replaceDB(db *gorm.DB) userOnboarding { + u.userOnboardingDo.ReplaceDB(db) + return u +} + +type userOnboardingDo struct{ gen.DO } + +type IUserOnboardingDo interface { + gen.SubQuery + Debug() IUserOnboardingDo + WithContext(ctx context.Context) IUserOnboardingDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IUserOnboardingDo + WriteDB() IUserOnboardingDo + As(alias string) gen.Dao + Session(config *gorm.Session) IUserOnboardingDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IUserOnboardingDo + Not(conds ...gen.Condition) IUserOnboardingDo + Or(conds ...gen.Condition) IUserOnboardingDo + Select(conds ...field.Expr) IUserOnboardingDo + Where(conds ...gen.Condition) IUserOnboardingDo + Order(conds ...field.Expr) IUserOnboardingDo + Distinct(cols ...field.Expr) IUserOnboardingDo + Omit(cols ...field.Expr) IUserOnboardingDo + Join(table schema.Tabler, on ...field.Expr) IUserOnboardingDo + LeftJoin(table schema.Tabler, on ...field.Expr) IUserOnboardingDo + RightJoin(table schema.Tabler, on ...field.Expr) IUserOnboardingDo + Group(cols ...field.Expr) IUserOnboardingDo + Having(conds ...gen.Condition) IUserOnboardingDo + Limit(limit int) IUserOnboardingDo + Offset(offset int) IUserOnboardingDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IUserOnboardingDo + Unscoped() IUserOnboardingDo + Create(values ...*model.UserOnboarding) error + CreateInBatches(values []*model.UserOnboarding, batchSize int) error + Save(values ...*model.UserOnboarding) error + First() (*model.UserOnboarding, error) + Take() (*model.UserOnboarding, error) + Last() (*model.UserOnboarding, error) + Find() ([]*model.UserOnboarding, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.UserOnboarding, err error) + FindInBatches(result *[]*model.UserOnboarding, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.UserOnboarding) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IUserOnboardingDo + Assign(attrs ...field.AssignExpr) IUserOnboardingDo + Joins(fields ...field.RelationField) IUserOnboardingDo + Preload(fields ...field.RelationField) IUserOnboardingDo + FirstOrInit() (*model.UserOnboarding, error) + FirstOrCreate() (*model.UserOnboarding, error) + FindByPage(offset int, limit int) (result []*model.UserOnboarding, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IUserOnboardingDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (u userOnboardingDo) Debug() IUserOnboardingDo { + return u.withDO(u.DO.Debug()) +} + +func (u userOnboardingDo) WithContext(ctx context.Context) IUserOnboardingDo { + return u.withDO(u.DO.WithContext(ctx)) +} + +func (u userOnboardingDo) ReadDB() IUserOnboardingDo { + return u.Clauses(dbresolver.Read) +} + +func (u userOnboardingDo) WriteDB() IUserOnboardingDo { + return u.Clauses(dbresolver.Write) +} + +func (u userOnboardingDo) Session(config *gorm.Session) IUserOnboardingDo { + return u.withDO(u.DO.Session(config)) +} + +func (u userOnboardingDo) Clauses(conds ...clause.Expression) IUserOnboardingDo { + return u.withDO(u.DO.Clauses(conds...)) +} + +func (u userOnboardingDo) Returning(value interface{}, columns ...string) IUserOnboardingDo { + return u.withDO(u.DO.Returning(value, columns...)) +} + +func (u userOnboardingDo) Not(conds ...gen.Condition) IUserOnboardingDo { + return u.withDO(u.DO.Not(conds...)) +} + +func (u userOnboardingDo) Or(conds ...gen.Condition) IUserOnboardingDo { + return u.withDO(u.DO.Or(conds...)) +} + +func (u userOnboardingDo) Select(conds ...field.Expr) IUserOnboardingDo { + return u.withDO(u.DO.Select(conds...)) +} + +func (u userOnboardingDo) Where(conds ...gen.Condition) IUserOnboardingDo { + return u.withDO(u.DO.Where(conds...)) +} + +func (u userOnboardingDo) Order(conds ...field.Expr) IUserOnboardingDo { + return u.withDO(u.DO.Order(conds...)) +} + +func (u userOnboardingDo) Distinct(cols ...field.Expr) IUserOnboardingDo { + return u.withDO(u.DO.Distinct(cols...)) +} + +func (u userOnboardingDo) Omit(cols ...field.Expr) IUserOnboardingDo { + return u.withDO(u.DO.Omit(cols...)) +} + +func (u userOnboardingDo) Join(table schema.Tabler, on ...field.Expr) IUserOnboardingDo { + return u.withDO(u.DO.Join(table, on...)) +} + +func (u userOnboardingDo) LeftJoin(table schema.Tabler, on ...field.Expr) IUserOnboardingDo { + return u.withDO(u.DO.LeftJoin(table, on...)) +} + +func (u userOnboardingDo) RightJoin(table schema.Tabler, on ...field.Expr) IUserOnboardingDo { + return u.withDO(u.DO.RightJoin(table, on...)) +} + +func (u userOnboardingDo) Group(cols ...field.Expr) IUserOnboardingDo { + return u.withDO(u.DO.Group(cols...)) +} + +func (u userOnboardingDo) Having(conds ...gen.Condition) IUserOnboardingDo { + return u.withDO(u.DO.Having(conds...)) +} + +func (u userOnboardingDo) Limit(limit int) IUserOnboardingDo { + return u.withDO(u.DO.Limit(limit)) +} + +func (u userOnboardingDo) Offset(offset int) IUserOnboardingDo { + return u.withDO(u.DO.Offset(offset)) +} + +func (u userOnboardingDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IUserOnboardingDo { + return u.withDO(u.DO.Scopes(funcs...)) +} + +func (u userOnboardingDo) Unscoped() IUserOnboardingDo { + return u.withDO(u.DO.Unscoped()) +} + +func (u userOnboardingDo) Create(values ...*model.UserOnboarding) error { + if len(values) == 0 { + return nil + } + return u.DO.Create(values) +} + +func (u userOnboardingDo) CreateInBatches(values []*model.UserOnboarding, batchSize int) error { + return u.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (u userOnboardingDo) Save(values ...*model.UserOnboarding) error { + if len(values) == 0 { + return nil + } + return u.DO.Save(values) +} + +func (u userOnboardingDo) First() (*model.UserOnboarding, error) { + if result, err := u.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.UserOnboarding), nil + } +} + +func (u userOnboardingDo) Take() (*model.UserOnboarding, error) { + if result, err := u.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.UserOnboarding), nil + } +} + +func (u userOnboardingDo) Last() (*model.UserOnboarding, error) { + if result, err := u.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.UserOnboarding), nil + } +} + +func (u userOnboardingDo) Find() ([]*model.UserOnboarding, error) { + result, err := u.DO.Find() + return result.([]*model.UserOnboarding), err +} + +func (u userOnboardingDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.UserOnboarding, err error) { + buf := make([]*model.UserOnboarding, 0, batchSize) + err = u.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (u userOnboardingDo) FindInBatches(result *[]*model.UserOnboarding, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return u.DO.FindInBatches(result, batchSize, fc) +} + +func (u userOnboardingDo) Attrs(attrs ...field.AssignExpr) IUserOnboardingDo { + return u.withDO(u.DO.Attrs(attrs...)) +} + +func (u userOnboardingDo) Assign(attrs ...field.AssignExpr) IUserOnboardingDo { + return u.withDO(u.DO.Assign(attrs...)) +} + +func (u userOnboardingDo) Joins(fields ...field.RelationField) IUserOnboardingDo { + for _, _f := range fields { + u = *u.withDO(u.DO.Joins(_f)) + } + return &u +} + +func (u userOnboardingDo) Preload(fields ...field.RelationField) IUserOnboardingDo { + for _, _f := range fields { + u = *u.withDO(u.DO.Preload(_f)) + } + return &u +} + +func (u userOnboardingDo) FirstOrInit() (*model.UserOnboarding, error) { + if result, err := u.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.UserOnboarding), nil + } +} + +func (u userOnboardingDo) FirstOrCreate() (*model.UserOnboarding, error) { + if result, err := u.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.UserOnboarding), nil + } +} + +func (u userOnboardingDo) FindByPage(offset int, limit int) (result []*model.UserOnboarding, count int64, err error) { + result, err = u.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = u.Offset(-1).Limit(-1).Count() + return +} + +func (u userOnboardingDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = u.Count() + if err != nil { + return + } + + err = u.Offset(offset).Limit(limit).Scan(result) + return +} + +func (u userOnboardingDo) Scan(result interface{}) (err error) { + return u.DO.Scan(result) +} + +func (u userOnboardingDo) Delete(models ...*model.UserOnboarding) (result gen.ResultInfo, err error) { + return u.DO.Delete(models) +} + +func (u *userOnboardingDo) withDO(do gen.Dao) *userOnboardingDo { + u.DO = *do.(*gen.DO) + return u +} diff --git a/next/models_generated/user_private_info.gen.go b/next/models_generated/user_private_info.gen.go new file mode 100644 index 000000000..9c5b39712 --- /dev/null +++ b/next/models_generated/user_private_info.gen.go @@ -0,0 +1,388 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newUserPrivateInfo(db *gorm.DB, opts ...gen.DOOption) userPrivateInfo { + _userPrivateInfo := userPrivateInfo{} + + _userPrivateInfo.userPrivateInfoDo.UseDB(db, opts...) + _userPrivateInfo.userPrivateInfoDo.UseModel(&model.UserPrivateInfo{}) + + tableName := _userPrivateInfo.userPrivateInfoDo.TableName() + _userPrivateInfo.ALL = field.NewAsterisk(tableName) + _userPrivateInfo.ID = field.NewString(tableName, "id") + _userPrivateInfo.CreatedAt = field.NewTime(tableName, "created_at") + _userPrivateInfo.DefaultOrganization = field.NewString(tableName, "default_organization") + + _userPrivateInfo.fillFieldMap() + + return _userPrivateInfo +} + +type userPrivateInfo struct { + userPrivateInfoDo + + ALL field.Asterisk + ID field.String + CreatedAt field.Time + DefaultOrganization field.String + + fieldMap map[string]field.Expr +} + +func (u userPrivateInfo) Table(newTableName string) *userPrivateInfo { + u.userPrivateInfoDo.UseTable(newTableName) + return u.updateTableName(newTableName) +} + +func (u userPrivateInfo) As(alias string) *userPrivateInfo { + u.userPrivateInfoDo.DO = *(u.userPrivateInfoDo.As(alias).(*gen.DO)) + return u.updateTableName(alias) +} + +func (u *userPrivateInfo) updateTableName(table string) *userPrivateInfo { + u.ALL = field.NewAsterisk(table) + u.ID = field.NewString(table, "id") + u.CreatedAt = field.NewTime(table, "created_at") + u.DefaultOrganization = field.NewString(table, "default_organization") + + u.fillFieldMap() + + return u +} + +func (u *userPrivateInfo) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := u.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (u *userPrivateInfo) fillFieldMap() { + u.fieldMap = make(map[string]field.Expr, 3) + u.fieldMap["id"] = u.ID + u.fieldMap["created_at"] = u.CreatedAt + u.fieldMap["default_organization"] = u.DefaultOrganization +} + +func (u userPrivateInfo) clone(db *gorm.DB) userPrivateInfo { + u.userPrivateInfoDo.ReplaceConnPool(db.Statement.ConnPool) + return u +} + +func (u userPrivateInfo) replaceDB(db *gorm.DB) userPrivateInfo { + u.userPrivateInfoDo.ReplaceDB(db) + return u +} + +type userPrivateInfoDo struct{ gen.DO } + +type IUserPrivateInfoDo interface { + gen.SubQuery + Debug() IUserPrivateInfoDo + WithContext(ctx context.Context) IUserPrivateInfoDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IUserPrivateInfoDo + WriteDB() IUserPrivateInfoDo + As(alias string) gen.Dao + Session(config *gorm.Session) IUserPrivateInfoDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IUserPrivateInfoDo + Not(conds ...gen.Condition) IUserPrivateInfoDo + Or(conds ...gen.Condition) IUserPrivateInfoDo + Select(conds ...field.Expr) IUserPrivateInfoDo + Where(conds ...gen.Condition) IUserPrivateInfoDo + Order(conds ...field.Expr) IUserPrivateInfoDo + Distinct(cols ...field.Expr) IUserPrivateInfoDo + Omit(cols ...field.Expr) IUserPrivateInfoDo + Join(table schema.Tabler, on ...field.Expr) IUserPrivateInfoDo + LeftJoin(table schema.Tabler, on ...field.Expr) IUserPrivateInfoDo + RightJoin(table schema.Tabler, on ...field.Expr) IUserPrivateInfoDo + Group(cols ...field.Expr) IUserPrivateInfoDo + Having(conds ...gen.Condition) IUserPrivateInfoDo + Limit(limit int) IUserPrivateInfoDo + Offset(offset int) IUserPrivateInfoDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IUserPrivateInfoDo + Unscoped() IUserPrivateInfoDo + Create(values ...*model.UserPrivateInfo) error + CreateInBatches(values []*model.UserPrivateInfo, batchSize int) error + Save(values ...*model.UserPrivateInfo) error + First() (*model.UserPrivateInfo, error) + Take() (*model.UserPrivateInfo, error) + Last() (*model.UserPrivateInfo, error) + Find() ([]*model.UserPrivateInfo, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.UserPrivateInfo, err error) + FindInBatches(result *[]*model.UserPrivateInfo, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.UserPrivateInfo) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IUserPrivateInfoDo + Assign(attrs ...field.AssignExpr) IUserPrivateInfoDo + Joins(fields ...field.RelationField) IUserPrivateInfoDo + Preload(fields ...field.RelationField) IUserPrivateInfoDo + FirstOrInit() (*model.UserPrivateInfo, error) + FirstOrCreate() (*model.UserPrivateInfo, error) + FindByPage(offset int, limit int) (result []*model.UserPrivateInfo, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IUserPrivateInfoDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (u userPrivateInfoDo) Debug() IUserPrivateInfoDo { + return u.withDO(u.DO.Debug()) +} + +func (u userPrivateInfoDo) WithContext(ctx context.Context) IUserPrivateInfoDo { + return u.withDO(u.DO.WithContext(ctx)) +} + +func (u userPrivateInfoDo) ReadDB() IUserPrivateInfoDo { + return u.Clauses(dbresolver.Read) +} + +func (u userPrivateInfoDo) WriteDB() IUserPrivateInfoDo { + return u.Clauses(dbresolver.Write) +} + +func (u userPrivateInfoDo) Session(config *gorm.Session) IUserPrivateInfoDo { + return u.withDO(u.DO.Session(config)) +} + +func (u userPrivateInfoDo) Clauses(conds ...clause.Expression) IUserPrivateInfoDo { + return u.withDO(u.DO.Clauses(conds...)) +} + +func (u userPrivateInfoDo) Returning(value interface{}, columns ...string) IUserPrivateInfoDo { + return u.withDO(u.DO.Returning(value, columns...)) +} + +func (u userPrivateInfoDo) Not(conds ...gen.Condition) IUserPrivateInfoDo { + return u.withDO(u.DO.Not(conds...)) +} + +func (u userPrivateInfoDo) Or(conds ...gen.Condition) IUserPrivateInfoDo { + return u.withDO(u.DO.Or(conds...)) +} + +func (u userPrivateInfoDo) Select(conds ...field.Expr) IUserPrivateInfoDo { + return u.withDO(u.DO.Select(conds...)) +} + +func (u userPrivateInfoDo) Where(conds ...gen.Condition) IUserPrivateInfoDo { + return u.withDO(u.DO.Where(conds...)) +} + +func (u userPrivateInfoDo) Order(conds ...field.Expr) IUserPrivateInfoDo { + return u.withDO(u.DO.Order(conds...)) +} + +func (u userPrivateInfoDo) Distinct(cols ...field.Expr) IUserPrivateInfoDo { + return u.withDO(u.DO.Distinct(cols...)) +} + +func (u userPrivateInfoDo) Omit(cols ...field.Expr) IUserPrivateInfoDo { + return u.withDO(u.DO.Omit(cols...)) +} + +func (u userPrivateInfoDo) Join(table schema.Tabler, on ...field.Expr) IUserPrivateInfoDo { + return u.withDO(u.DO.Join(table, on...)) +} + +func (u userPrivateInfoDo) LeftJoin(table schema.Tabler, on ...field.Expr) IUserPrivateInfoDo { + return u.withDO(u.DO.LeftJoin(table, on...)) +} + +func (u userPrivateInfoDo) RightJoin(table schema.Tabler, on ...field.Expr) IUserPrivateInfoDo { + return u.withDO(u.DO.RightJoin(table, on...)) +} + +func (u userPrivateInfoDo) Group(cols ...field.Expr) IUserPrivateInfoDo { + return u.withDO(u.DO.Group(cols...)) +} + +func (u userPrivateInfoDo) Having(conds ...gen.Condition) IUserPrivateInfoDo { + return u.withDO(u.DO.Having(conds...)) +} + +func (u userPrivateInfoDo) Limit(limit int) IUserPrivateInfoDo { + return u.withDO(u.DO.Limit(limit)) +} + +func (u userPrivateInfoDo) Offset(offset int) IUserPrivateInfoDo { + return u.withDO(u.DO.Offset(offset)) +} + +func (u userPrivateInfoDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IUserPrivateInfoDo { + return u.withDO(u.DO.Scopes(funcs...)) +} + +func (u userPrivateInfoDo) Unscoped() IUserPrivateInfoDo { + return u.withDO(u.DO.Unscoped()) +} + +func (u userPrivateInfoDo) Create(values ...*model.UserPrivateInfo) error { + if len(values) == 0 { + return nil + } + return u.DO.Create(values) +} + +func (u userPrivateInfoDo) CreateInBatches(values []*model.UserPrivateInfo, batchSize int) error { + return u.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (u userPrivateInfoDo) Save(values ...*model.UserPrivateInfo) error { + if len(values) == 0 { + return nil + } + return u.DO.Save(values) +} + +func (u userPrivateInfoDo) First() (*model.UserPrivateInfo, error) { + if result, err := u.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.UserPrivateInfo), nil + } +} + +func (u userPrivateInfoDo) Take() (*model.UserPrivateInfo, error) { + if result, err := u.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.UserPrivateInfo), nil + } +} + +func (u userPrivateInfoDo) Last() (*model.UserPrivateInfo, error) { + if result, err := u.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.UserPrivateInfo), nil + } +} + +func (u userPrivateInfoDo) Find() ([]*model.UserPrivateInfo, error) { + result, err := u.DO.Find() + return result.([]*model.UserPrivateInfo), err +} + +func (u userPrivateInfoDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.UserPrivateInfo, err error) { + buf := make([]*model.UserPrivateInfo, 0, batchSize) + err = u.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (u userPrivateInfoDo) FindInBatches(result *[]*model.UserPrivateInfo, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return u.DO.FindInBatches(result, batchSize, fc) +} + +func (u userPrivateInfoDo) Attrs(attrs ...field.AssignExpr) IUserPrivateInfoDo { + return u.withDO(u.DO.Attrs(attrs...)) +} + +func (u userPrivateInfoDo) Assign(attrs ...field.AssignExpr) IUserPrivateInfoDo { + return u.withDO(u.DO.Assign(attrs...)) +} + +func (u userPrivateInfoDo) Joins(fields ...field.RelationField) IUserPrivateInfoDo { + for _, _f := range fields { + u = *u.withDO(u.DO.Joins(_f)) + } + return &u +} + +func (u userPrivateInfoDo) Preload(fields ...field.RelationField) IUserPrivateInfoDo { + for _, _f := range fields { + u = *u.withDO(u.DO.Preload(_f)) + } + return &u +} + +func (u userPrivateInfoDo) FirstOrInit() (*model.UserPrivateInfo, error) { + if result, err := u.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.UserPrivateInfo), nil + } +} + +func (u userPrivateInfoDo) FirstOrCreate() (*model.UserPrivateInfo, error) { + if result, err := u.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.UserPrivateInfo), nil + } +} + +func (u userPrivateInfoDo) FindByPage(offset int, limit int) (result []*model.UserPrivateInfo, count int64, err error) { + result, err = u.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = u.Offset(-1).Limit(-1).Count() + return +} + +func (u userPrivateInfoDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = u.Count() + if err != nil { + return + } + + err = u.Offset(offset).Limit(limit).Scan(result) + return +} + +func (u userPrivateInfoDo) Scan(result interface{}) (err error) { + return u.DO.Scan(result) +} + +func (u userPrivateInfoDo) Delete(models ...*model.UserPrivateInfo) (result gen.ResultInfo, err error) { + return u.DO.Delete(models) +} + +func (u *userPrivateInfoDo) withDO(do gen.Dao) *userPrivateInfoDo { + u.DO = *do.(*gen.DO) + return u +} diff --git a/next/models_generated/user_profiles.gen.go b/next/models_generated/user_profiles.gen.go new file mode 100644 index 000000000..3bed7feaf --- /dev/null +++ b/next/models_generated/user_profiles.gen.go @@ -0,0 +1,392 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newUserProfile(db *gorm.DB, opts ...gen.DOOption) userProfile { + _userProfile := userProfile{} + + _userProfile.userProfileDo.UseDB(db, opts...) + _userProfile.userProfileDo.UseModel(&model.UserProfile{}) + + tableName := _userProfile.userProfileDo.TableName() + _userProfile.ALL = field.NewAsterisk(tableName) + _userProfile.ID = field.NewString(tableName, "id") + _userProfile.FullName = field.NewString(tableName, "full_name") + _userProfile.AvatarURL = field.NewString(tableName, "avatar_url") + _userProfile.CreatedAt = field.NewTime(tableName, "created_at") + + _userProfile.fillFieldMap() + + return _userProfile +} + +type userProfile struct { + userProfileDo + + ALL field.Asterisk + ID field.String + FullName field.String + AvatarURL field.String + CreatedAt field.Time + + fieldMap map[string]field.Expr +} + +func (u userProfile) Table(newTableName string) *userProfile { + u.userProfileDo.UseTable(newTableName) + return u.updateTableName(newTableName) +} + +func (u userProfile) As(alias string) *userProfile { + u.userProfileDo.DO = *(u.userProfileDo.As(alias).(*gen.DO)) + return u.updateTableName(alias) +} + +func (u *userProfile) updateTableName(table string) *userProfile { + u.ALL = field.NewAsterisk(table) + u.ID = field.NewString(table, "id") + u.FullName = field.NewString(table, "full_name") + u.AvatarURL = field.NewString(table, "avatar_url") + u.CreatedAt = field.NewTime(table, "created_at") + + u.fillFieldMap() + + return u +} + +func (u *userProfile) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := u.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (u *userProfile) fillFieldMap() { + u.fieldMap = make(map[string]field.Expr, 4) + u.fieldMap["id"] = u.ID + u.fieldMap["full_name"] = u.FullName + u.fieldMap["avatar_url"] = u.AvatarURL + u.fieldMap["created_at"] = u.CreatedAt +} + +func (u userProfile) clone(db *gorm.DB) userProfile { + u.userProfileDo.ReplaceConnPool(db.Statement.ConnPool) + return u +} + +func (u userProfile) replaceDB(db *gorm.DB) userProfile { + u.userProfileDo.ReplaceDB(db) + return u +} + +type userProfileDo struct{ gen.DO } + +type IUserProfileDo interface { + gen.SubQuery + Debug() IUserProfileDo + WithContext(ctx context.Context) IUserProfileDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IUserProfileDo + WriteDB() IUserProfileDo + As(alias string) gen.Dao + Session(config *gorm.Session) IUserProfileDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IUserProfileDo + Not(conds ...gen.Condition) IUserProfileDo + Or(conds ...gen.Condition) IUserProfileDo + Select(conds ...field.Expr) IUserProfileDo + Where(conds ...gen.Condition) IUserProfileDo + Order(conds ...field.Expr) IUserProfileDo + Distinct(cols ...field.Expr) IUserProfileDo + Omit(cols ...field.Expr) IUserProfileDo + Join(table schema.Tabler, on ...field.Expr) IUserProfileDo + LeftJoin(table schema.Tabler, on ...field.Expr) IUserProfileDo + RightJoin(table schema.Tabler, on ...field.Expr) IUserProfileDo + Group(cols ...field.Expr) IUserProfileDo + Having(conds ...gen.Condition) IUserProfileDo + Limit(limit int) IUserProfileDo + Offset(offset int) IUserProfileDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IUserProfileDo + Unscoped() IUserProfileDo + Create(values ...*model.UserProfile) error + CreateInBatches(values []*model.UserProfile, batchSize int) error + Save(values ...*model.UserProfile) error + First() (*model.UserProfile, error) + Take() (*model.UserProfile, error) + Last() (*model.UserProfile, error) + Find() ([]*model.UserProfile, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.UserProfile, err error) + FindInBatches(result *[]*model.UserProfile, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.UserProfile) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IUserProfileDo + Assign(attrs ...field.AssignExpr) IUserProfileDo + Joins(fields ...field.RelationField) IUserProfileDo + Preload(fields ...field.RelationField) IUserProfileDo + FirstOrInit() (*model.UserProfile, error) + FirstOrCreate() (*model.UserProfile, error) + FindByPage(offset int, limit int) (result []*model.UserProfile, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IUserProfileDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (u userProfileDo) Debug() IUserProfileDo { + return u.withDO(u.DO.Debug()) +} + +func (u userProfileDo) WithContext(ctx context.Context) IUserProfileDo { + return u.withDO(u.DO.WithContext(ctx)) +} + +func (u userProfileDo) ReadDB() IUserProfileDo { + return u.Clauses(dbresolver.Read) +} + +func (u userProfileDo) WriteDB() IUserProfileDo { + return u.Clauses(dbresolver.Write) +} + +func (u userProfileDo) Session(config *gorm.Session) IUserProfileDo { + return u.withDO(u.DO.Session(config)) +} + +func (u userProfileDo) Clauses(conds ...clause.Expression) IUserProfileDo { + return u.withDO(u.DO.Clauses(conds...)) +} + +func (u userProfileDo) Returning(value interface{}, columns ...string) IUserProfileDo { + return u.withDO(u.DO.Returning(value, columns...)) +} + +func (u userProfileDo) Not(conds ...gen.Condition) IUserProfileDo { + return u.withDO(u.DO.Not(conds...)) +} + +func (u userProfileDo) Or(conds ...gen.Condition) IUserProfileDo { + return u.withDO(u.DO.Or(conds...)) +} + +func (u userProfileDo) Select(conds ...field.Expr) IUserProfileDo { + return u.withDO(u.DO.Select(conds...)) +} + +func (u userProfileDo) Where(conds ...gen.Condition) IUserProfileDo { + return u.withDO(u.DO.Where(conds...)) +} + +func (u userProfileDo) Order(conds ...field.Expr) IUserProfileDo { + return u.withDO(u.DO.Order(conds...)) +} + +func (u userProfileDo) Distinct(cols ...field.Expr) IUserProfileDo { + return u.withDO(u.DO.Distinct(cols...)) +} + +func (u userProfileDo) Omit(cols ...field.Expr) IUserProfileDo { + return u.withDO(u.DO.Omit(cols...)) +} + +func (u userProfileDo) Join(table schema.Tabler, on ...field.Expr) IUserProfileDo { + return u.withDO(u.DO.Join(table, on...)) +} + +func (u userProfileDo) LeftJoin(table schema.Tabler, on ...field.Expr) IUserProfileDo { + return u.withDO(u.DO.LeftJoin(table, on...)) +} + +func (u userProfileDo) RightJoin(table schema.Tabler, on ...field.Expr) IUserProfileDo { + return u.withDO(u.DO.RightJoin(table, on...)) +} + +func (u userProfileDo) Group(cols ...field.Expr) IUserProfileDo { + return u.withDO(u.DO.Group(cols...)) +} + +func (u userProfileDo) Having(conds ...gen.Condition) IUserProfileDo { + return u.withDO(u.DO.Having(conds...)) +} + +func (u userProfileDo) Limit(limit int) IUserProfileDo { + return u.withDO(u.DO.Limit(limit)) +} + +func (u userProfileDo) Offset(offset int) IUserProfileDo { + return u.withDO(u.DO.Offset(offset)) +} + +func (u userProfileDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IUserProfileDo { + return u.withDO(u.DO.Scopes(funcs...)) +} + +func (u userProfileDo) Unscoped() IUserProfileDo { + return u.withDO(u.DO.Unscoped()) +} + +func (u userProfileDo) Create(values ...*model.UserProfile) error { + if len(values) == 0 { + return nil + } + return u.DO.Create(values) +} + +func (u userProfileDo) CreateInBatches(values []*model.UserProfile, batchSize int) error { + return u.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (u userProfileDo) Save(values ...*model.UserProfile) error { + if len(values) == 0 { + return nil + } + return u.DO.Save(values) +} + +func (u userProfileDo) First() (*model.UserProfile, error) { + if result, err := u.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.UserProfile), nil + } +} + +func (u userProfileDo) Take() (*model.UserProfile, error) { + if result, err := u.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.UserProfile), nil + } +} + +func (u userProfileDo) Last() (*model.UserProfile, error) { + if result, err := u.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.UserProfile), nil + } +} + +func (u userProfileDo) Find() ([]*model.UserProfile, error) { + result, err := u.DO.Find() + return result.([]*model.UserProfile), err +} + +func (u userProfileDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.UserProfile, err error) { + buf := make([]*model.UserProfile, 0, batchSize) + err = u.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (u userProfileDo) FindInBatches(result *[]*model.UserProfile, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return u.DO.FindInBatches(result, batchSize, fc) +} + +func (u userProfileDo) Attrs(attrs ...field.AssignExpr) IUserProfileDo { + return u.withDO(u.DO.Attrs(attrs...)) +} + +func (u userProfileDo) Assign(attrs ...field.AssignExpr) IUserProfileDo { + return u.withDO(u.DO.Assign(attrs...)) +} + +func (u userProfileDo) Joins(fields ...field.RelationField) IUserProfileDo { + for _, _f := range fields { + u = *u.withDO(u.DO.Joins(_f)) + } + return &u +} + +func (u userProfileDo) Preload(fields ...field.RelationField) IUserProfileDo { + for _, _f := range fields { + u = *u.withDO(u.DO.Preload(_f)) + } + return &u +} + +func (u userProfileDo) FirstOrInit() (*model.UserProfile, error) { + if result, err := u.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.UserProfile), nil + } +} + +func (u userProfileDo) FirstOrCreate() (*model.UserProfile, error) { + if result, err := u.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.UserProfile), nil + } +} + +func (u userProfileDo) FindByPage(offset int, limit int) (result []*model.UserProfile, count int64, err error) { + result, err = u.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = u.Offset(-1).Limit(-1).Count() + return +} + +func (u userProfileDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = u.Count() + if err != nil { + return + } + + err = u.Offset(offset).Limit(limit).Scan(result) + return +} + +func (u userProfileDo) Scan(result interface{}) (err error) { + return u.DO.Scan(result) +} + +func (u userProfileDo) Delete(models ...*model.UserProfile) (result gen.ResultInfo, err error) { + return u.DO.Delete(models) +} + +func (u *userProfileDo) withDO(do gen.Dao) *userProfileDo { + u.DO = *do.(*gen.DO) + return u +} diff --git a/next/models_generated/user_roles.gen.go b/next/models_generated/user_roles.gen.go new file mode 100644 index 000000000..5dca00309 --- /dev/null +++ b/next/models_generated/user_roles.gen.go @@ -0,0 +1,388 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package models_generated + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/diggerhq/digger/next/model" +) + +func newUserRole(db *gorm.DB, opts ...gen.DOOption) userRole { + _userRole := userRole{} + + _userRole.userRoleDo.UseDB(db, opts...) + _userRole.userRoleDo.UseModel(&model.UserRole{}) + + tableName := _userRole.userRoleDo.TableName() + _userRole.ALL = field.NewAsterisk(tableName) + _userRole.ID = field.NewInt64(tableName, "id") + _userRole.UserID = field.NewString(tableName, "user_id") + _userRole.Role = field.NewString(tableName, "role") + + _userRole.fillFieldMap() + + return _userRole +} + +type userRole struct { + userRoleDo + + ALL field.Asterisk + ID field.Int64 + UserID field.String + Role field.String + + fieldMap map[string]field.Expr +} + +func (u userRole) Table(newTableName string) *userRole { + u.userRoleDo.UseTable(newTableName) + return u.updateTableName(newTableName) +} + +func (u userRole) As(alias string) *userRole { + u.userRoleDo.DO = *(u.userRoleDo.As(alias).(*gen.DO)) + return u.updateTableName(alias) +} + +func (u *userRole) updateTableName(table string) *userRole { + u.ALL = field.NewAsterisk(table) + u.ID = field.NewInt64(table, "id") + u.UserID = field.NewString(table, "user_id") + u.Role = field.NewString(table, "role") + + u.fillFieldMap() + + return u +} + +func (u *userRole) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := u.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (u *userRole) fillFieldMap() { + u.fieldMap = make(map[string]field.Expr, 3) + u.fieldMap["id"] = u.ID + u.fieldMap["user_id"] = u.UserID + u.fieldMap["role"] = u.Role +} + +func (u userRole) clone(db *gorm.DB) userRole { + u.userRoleDo.ReplaceConnPool(db.Statement.ConnPool) + return u +} + +func (u userRole) replaceDB(db *gorm.DB) userRole { + u.userRoleDo.ReplaceDB(db) + return u +} + +type userRoleDo struct{ gen.DO } + +type IUserRoleDo interface { + gen.SubQuery + Debug() IUserRoleDo + WithContext(ctx context.Context) IUserRoleDo + WithResult(fc func(tx gen.Dao)) gen.ResultInfo + ReplaceDB(db *gorm.DB) + ReadDB() IUserRoleDo + WriteDB() IUserRoleDo + As(alias string) gen.Dao + Session(config *gorm.Session) IUserRoleDo + Columns(cols ...field.Expr) gen.Columns + Clauses(conds ...clause.Expression) IUserRoleDo + Not(conds ...gen.Condition) IUserRoleDo + Or(conds ...gen.Condition) IUserRoleDo + Select(conds ...field.Expr) IUserRoleDo + Where(conds ...gen.Condition) IUserRoleDo + Order(conds ...field.Expr) IUserRoleDo + Distinct(cols ...field.Expr) IUserRoleDo + Omit(cols ...field.Expr) IUserRoleDo + Join(table schema.Tabler, on ...field.Expr) IUserRoleDo + LeftJoin(table schema.Tabler, on ...field.Expr) IUserRoleDo + RightJoin(table schema.Tabler, on ...field.Expr) IUserRoleDo + Group(cols ...field.Expr) IUserRoleDo + Having(conds ...gen.Condition) IUserRoleDo + Limit(limit int) IUserRoleDo + Offset(offset int) IUserRoleDo + Count() (count int64, err error) + Scopes(funcs ...func(gen.Dao) gen.Dao) IUserRoleDo + Unscoped() IUserRoleDo + Create(values ...*model.UserRole) error + CreateInBatches(values []*model.UserRole, batchSize int) error + Save(values ...*model.UserRole) error + First() (*model.UserRole, error) + Take() (*model.UserRole, error) + Last() (*model.UserRole, error) + Find() ([]*model.UserRole, error) + FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.UserRole, err error) + FindInBatches(result *[]*model.UserRole, batchSize int, fc func(tx gen.Dao, batch int) error) error + Pluck(column field.Expr, dest interface{}) error + Delete(...*model.UserRole) (info gen.ResultInfo, err error) + Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + Updates(value interface{}) (info gen.ResultInfo, err error) + UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error) + UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error) + UpdateColumns(value interface{}) (info gen.ResultInfo, err error) + UpdateFrom(q gen.SubQuery) gen.Dao + Attrs(attrs ...field.AssignExpr) IUserRoleDo + Assign(attrs ...field.AssignExpr) IUserRoleDo + Joins(fields ...field.RelationField) IUserRoleDo + Preload(fields ...field.RelationField) IUserRoleDo + FirstOrInit() (*model.UserRole, error) + FirstOrCreate() (*model.UserRole, error) + FindByPage(offset int, limit int) (result []*model.UserRole, count int64, err error) + ScanByPage(result interface{}, offset int, limit int) (count int64, err error) + Scan(result interface{}) (err error) + Returning(value interface{}, columns ...string) IUserRoleDo + UnderlyingDB() *gorm.DB + schema.Tabler +} + +func (u userRoleDo) Debug() IUserRoleDo { + return u.withDO(u.DO.Debug()) +} + +func (u userRoleDo) WithContext(ctx context.Context) IUserRoleDo { + return u.withDO(u.DO.WithContext(ctx)) +} + +func (u userRoleDo) ReadDB() IUserRoleDo { + return u.Clauses(dbresolver.Read) +} + +func (u userRoleDo) WriteDB() IUserRoleDo { + return u.Clauses(dbresolver.Write) +} + +func (u userRoleDo) Session(config *gorm.Session) IUserRoleDo { + return u.withDO(u.DO.Session(config)) +} + +func (u userRoleDo) Clauses(conds ...clause.Expression) IUserRoleDo { + return u.withDO(u.DO.Clauses(conds...)) +} + +func (u userRoleDo) Returning(value interface{}, columns ...string) IUserRoleDo { + return u.withDO(u.DO.Returning(value, columns...)) +} + +func (u userRoleDo) Not(conds ...gen.Condition) IUserRoleDo { + return u.withDO(u.DO.Not(conds...)) +} + +func (u userRoleDo) Or(conds ...gen.Condition) IUserRoleDo { + return u.withDO(u.DO.Or(conds...)) +} + +func (u userRoleDo) Select(conds ...field.Expr) IUserRoleDo { + return u.withDO(u.DO.Select(conds...)) +} + +func (u userRoleDo) Where(conds ...gen.Condition) IUserRoleDo { + return u.withDO(u.DO.Where(conds...)) +} + +func (u userRoleDo) Order(conds ...field.Expr) IUserRoleDo { + return u.withDO(u.DO.Order(conds...)) +} + +func (u userRoleDo) Distinct(cols ...field.Expr) IUserRoleDo { + return u.withDO(u.DO.Distinct(cols...)) +} + +func (u userRoleDo) Omit(cols ...field.Expr) IUserRoleDo { + return u.withDO(u.DO.Omit(cols...)) +} + +func (u userRoleDo) Join(table schema.Tabler, on ...field.Expr) IUserRoleDo { + return u.withDO(u.DO.Join(table, on...)) +} + +func (u userRoleDo) LeftJoin(table schema.Tabler, on ...field.Expr) IUserRoleDo { + return u.withDO(u.DO.LeftJoin(table, on...)) +} + +func (u userRoleDo) RightJoin(table schema.Tabler, on ...field.Expr) IUserRoleDo { + return u.withDO(u.DO.RightJoin(table, on...)) +} + +func (u userRoleDo) Group(cols ...field.Expr) IUserRoleDo { + return u.withDO(u.DO.Group(cols...)) +} + +func (u userRoleDo) Having(conds ...gen.Condition) IUserRoleDo { + return u.withDO(u.DO.Having(conds...)) +} + +func (u userRoleDo) Limit(limit int) IUserRoleDo { + return u.withDO(u.DO.Limit(limit)) +} + +func (u userRoleDo) Offset(offset int) IUserRoleDo { + return u.withDO(u.DO.Offset(offset)) +} + +func (u userRoleDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IUserRoleDo { + return u.withDO(u.DO.Scopes(funcs...)) +} + +func (u userRoleDo) Unscoped() IUserRoleDo { + return u.withDO(u.DO.Unscoped()) +} + +func (u userRoleDo) Create(values ...*model.UserRole) error { + if len(values) == 0 { + return nil + } + return u.DO.Create(values) +} + +func (u userRoleDo) CreateInBatches(values []*model.UserRole, batchSize int) error { + return u.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (u userRoleDo) Save(values ...*model.UserRole) error { + if len(values) == 0 { + return nil + } + return u.DO.Save(values) +} + +func (u userRoleDo) First() (*model.UserRole, error) { + if result, err := u.DO.First(); err != nil { + return nil, err + } else { + return result.(*model.UserRole), nil + } +} + +func (u userRoleDo) Take() (*model.UserRole, error) { + if result, err := u.DO.Take(); err != nil { + return nil, err + } else { + return result.(*model.UserRole), nil + } +} + +func (u userRoleDo) Last() (*model.UserRole, error) { + if result, err := u.DO.Last(); err != nil { + return nil, err + } else { + return result.(*model.UserRole), nil + } +} + +func (u userRoleDo) Find() ([]*model.UserRole, error) { + result, err := u.DO.Find() + return result.([]*model.UserRole), err +} + +func (u userRoleDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.UserRole, err error) { + buf := make([]*model.UserRole, 0, batchSize) + err = u.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (u userRoleDo) FindInBatches(result *[]*model.UserRole, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return u.DO.FindInBatches(result, batchSize, fc) +} + +func (u userRoleDo) Attrs(attrs ...field.AssignExpr) IUserRoleDo { + return u.withDO(u.DO.Attrs(attrs...)) +} + +func (u userRoleDo) Assign(attrs ...field.AssignExpr) IUserRoleDo { + return u.withDO(u.DO.Assign(attrs...)) +} + +func (u userRoleDo) Joins(fields ...field.RelationField) IUserRoleDo { + for _, _f := range fields { + u = *u.withDO(u.DO.Joins(_f)) + } + return &u +} + +func (u userRoleDo) Preload(fields ...field.RelationField) IUserRoleDo { + for _, _f := range fields { + u = *u.withDO(u.DO.Preload(_f)) + } + return &u +} + +func (u userRoleDo) FirstOrInit() (*model.UserRole, error) { + if result, err := u.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*model.UserRole), nil + } +} + +func (u userRoleDo) FirstOrCreate() (*model.UserRole, error) { + if result, err := u.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*model.UserRole), nil + } +} + +func (u userRoleDo) FindByPage(offset int, limit int) (result []*model.UserRole, count int64, err error) { + result, err = u.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = u.Offset(-1).Limit(-1).Count() + return +} + +func (u userRoleDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = u.Count() + if err != nil { + return + } + + err = u.Offset(offset).Limit(limit).Scan(result) + return +} + +func (u userRoleDo) Scan(result interface{}) (err error) { + return u.DO.Scan(result) +} + +func (u userRoleDo) Delete(models ...*model.UserRole) (result gen.ResultInfo, err error) { + return u.DO.Delete(models) +} + +func (u *userRoleDo) withDO(do gen.Dao) *userRoleDo { + u.DO = *do.(*gen.DO) + return u +} diff --git a/next/supa/supa.go b/next/supa/supa.go new file mode 100644 index 000000000..f936e3fe2 --- /dev/null +++ b/next/supa/supa.go @@ -0,0 +1,21 @@ +package supa + +import ( + "fmt" + "github.com/supabase-community/supabase-go" + "os" +) + +var client *supabase.Client = nil + +func GetClient() (*supabase.Client, error) { + ApiUrl := os.Getenv("DIGGER_SUPABASE_API_URL") + ApiKey := os.Getenv("DIGGER_SUPABASE_API_KEY") + var err error + client, err = supabase.NewClient(ApiUrl, ApiKey, nil) + if err != nil { + fmt.Println("cannot initalize supabase client", err) + return nil, fmt.Errorf("could not create supabase client: %v", err) + } + return client, err +}