diff --git a/dynamoid/.dev/vagrant/minion b/dynamoid/.dev/vagrant/minion new file mode 100644 index 000000000..8e3cbc277 --- /dev/null +++ b/dynamoid/.dev/vagrant/minion @@ -0,0 +1,9 @@ +# Masterless Minion Configuration File +master: localhost +id: development +file_client: local + +# Where your salt state exists +file_roots: + base: + - /vagrant/.dev/vagrant/salt diff --git a/dynamoid/.dev/vagrant/salt/apt/init.sls b/dynamoid/.dev/vagrant/salt/apt/init.sls new file mode 100644 index 000000000..9f2139799 --- /dev/null +++ b/dynamoid/.dev/vagrant/salt/apt/init.sls @@ -0,0 +1,13 @@ +apt-pkgs: + pkg.latest: + - pkgs: + - daemontools + - git + - openjdk-11-jre-headless + - tmux + - vim + +# JAVA_HOME +/home/vagrant/.bashrc: + file.append: + - text: export JAVA_HOME="/usr/lib/jvm/java-11-openjdk-amd64" diff --git a/dynamoid/.dev/vagrant/salt/dynamodb/init.sls b/dynamoid/.dev/vagrant/salt/dynamodb/init.sls new file mode 100644 index 000000000..bde1214ee --- /dev/null +++ b/dynamoid/.dev/vagrant/salt/dynamodb/init.sls @@ -0,0 +1,17 @@ +/opt/install/aws/dynamodb.tar.gz: + file.managed: + - source: https://s3-us-west-2.amazonaws.com/dynamodb-local/dynamodb_local_2019-02-07.tar.gz + - source_hash: sha256=3281b5403d0d397959ce444b86a83b44bc521e8b40077a3c2094fa17c9eb3c43 + - makedirs: True + +/vagrant/spec/DynamoDBLocal-latest: + file.directory: + - name: /vagrant/spec/DynamoDBLocal-latest + - user: vagrant + - group: vagrant + +dynamodb.install: + cmd.wait: + - name: cd /vagrant/spec/DynamoDBLocal-latest && tar xfz /opt/install/aws/dynamodb.tar.gz + - watch: + - file: /opt/install/aws/dynamodb.tar.gz diff --git a/dynamoid/.dev/vagrant/salt/rvm/.gemrc b/dynamoid/.dev/vagrant/salt/rvm/.gemrc new file mode 100644 index 000000000..6153a6e0f --- /dev/null +++ b/dynamoid/.dev/vagrant/salt/rvm/.gemrc @@ -0,0 +1 @@ +gem: --no-ri --no-rdoc diff --git a/dynamoid/.dev/vagrant/salt/rvm/init.sls b/dynamoid/.dev/vagrant/salt/rvm/init.sls new file mode 100644 index 000000000..a089a3966 --- /dev/null +++ b/dynamoid/.dev/vagrant/salt/rvm/init.sls @@ -0,0 +1,79 @@ +# https://docs.saltstack.com/en/latest/ref/states/all/salt.states.rvm.html +rvm-deps: + pkg.installed: + - pkgs: + - bash + - coreutils + - gzip + - bzip2 + - gawk + - sed + - curl + - git + - subversion + - gnupg2 + +mri-deps: + pkg.installed: + - pkgs: + - build-essential + - openssl + - libreadline-dev + - curl + - git + - zlib1g + - zlib1g-dev + - libssl-dev + - libyaml-dev + - libsqlite3-0 + - libsqlite3-dev + - sqlite3 + - libxml2-dev + - libxslt1-dev + - autoconf + - libc6-dev + - libncurses5-dev + - automake + - libtool + - bison + - subversion + - ruby + +gpg-trust: + cmd.run: + - cwd: /home/vagrant + - name: gpg2 --recv-keys 409B6B1796C275462A1703113804BB82D39DC0E3 7D2BAF1CF37B13E2069D6956105BD0E739499BDB + - runas: vagrant + +ruby-{{ pillar['ruby']['version'] }}: + rvm.installed: + - name: {{ pillar['ruby']['version'] }} + - default: True + - user: vagrant + - require: + - pkg: rvm-deps + - pkg: mri-deps + +# Disable Documentation Installation +/home/vagrant/.gemrc: + file.managed: + - user: vagrant + - group: vagrant + - name: /home/vagrant/.gemrc + - source: salt://rvm/.gemrc + - makedirs: True + +# Bundler +bundler.install: + gem.installed: + - user: vagrant + - name: bundler + - ruby: ruby-{{ pillar['ruby']['version'] }} + - rdoc: false + - ri: false + +bundle: + cmd.run: + - cwd: /vagrant + - name: bundle install + - runas: vagrant diff --git a/dynamoid/.dev/vagrant/salt/top.sls b/dynamoid/.dev/vagrant/salt/top.sls new file mode 100644 index 000000000..c9ad5fdc0 --- /dev/null +++ b/dynamoid/.dev/vagrant/salt/top.sls @@ -0,0 +1,5 @@ +base: + 'development': + - apt + - dynamodb + - rvm diff --git a/dynamoid/.github/FUNDING.yml b/dynamoid/.github/FUNDING.yml new file mode 100644 index 000000000..d5e923b78 --- /dev/null +++ b/dynamoid/.github/FUNDING.yml @@ -0,0 +1,13 @@ +# These are supported funding model platforms + +github: [Dynamoid, pboling] # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] +patreon: # Replace with a single Patreon username +open_collective: dynamoid # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: rubygems/dynamoid # Replace with a single Tidelift platform-name/package-name e.g., npm/babel +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry +custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] diff --git a/dynamoid/.github/dependabot.yml b/dynamoid/.github/dependabot.yml new file mode 100644 index 000000000..1a6aa8772 --- /dev/null +++ b/dynamoid/.github/dependabot.yml @@ -0,0 +1,8 @@ +version: 2 +updates: + - package-ecosystem: bundler + directory: "/" + schedule: + interval: daily + time: "04:31" + open-pull-requests-limit: 10 \ No newline at end of file diff --git a/dynamoid/.github/workflows/ci.yml b/dynamoid/.github/workflows/ci.yml new file mode 100644 index 000000000..1710b7a25 --- /dev/null +++ b/dynamoid/.github/workflows/ci.yml @@ -0,0 +1,167 @@ +name: CI + +on: + push: + branches: + - master + pull_request: + branches: + - master + # Allow manually triggering the workflow. + workflow_dispatch: + +# Cancels all previous workflow runs for the same branch that have not yet completed. +concurrency: + # The concurrency group contains the workflow name and the branch name. + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + tests: + runs-on: ubuntu-latest + + env: # $BUNDLE_GEMFILE must be set at the job level, so it is set for all steps + BUNDLE_GEMFILE: ${{ github.workspace }}/gemfiles/${{ matrix.gemfile }}.gemfile + + # Do not bother building commits with [ci skip] or [skip ci] in the commit message + if: "!contains(github.event.commits[0].message, '[ci skip]') && !contains(github.event.commits[0].message, '[skip ci]')" + + strategy: + fail-fast: false + + matrix: + gemfile: + - rails_4_2 + - rails_5_0 + - rails_5_1 + - rails_5_2 + - rails_6_0 + - rails_6_1 + - rails_7_0 + - rails_7_1 + rubygems: + - default + bundler: + - default + ruby: + - "2.3" + - "2.4" + - "2.5" + - "2.6" + - jruby + - "2.7" + - "3.0" + - "3.1" + - "3.2" + - "3.3" + exclude: + + # Rails 7.1 requires Ruby 2.7 and above + - gemfile: rails_7_1 + ruby: "2.3" + - gemfile: rails_7_1 + ruby: "2.4" + - gemfile: rails_7_1 + ruby: "2.5" + - gemfile: rails_7_1 + ruby: "2.6" + + # Rails 7.0 requires Ruby 2.7 and above + - gemfile: rails_7_0 + ruby: "2.3" + - gemfile: rails_7_0 + ruby: "2.4" + - gemfile: rails_7_0 + ruby: "2.5" + - gemfile: rails_7_0 + ruby: "2.6" + + # Rails 6.1 requires Ruby 2.5 and above. + - gemfile: rails_6_1 + ruby: "2.3" + - gemfile: rails_6_1 + ruby: "2.4" + + # Rails 6.0 requires Ruby 2.5 and above. + - gemfile: rails_6_0 + ruby: "2.3" + - gemfile: rails_6_0 + ruby: "2.4" + + # Rails supports Ruby 3.0 since 6.0 only. So skip all the other Rails versions. + - ruby: "3.0" + gemfile: rails_4_2 + - ruby: "3.0" + gemfile: rails_5_0 + - ruby: "3.0" + gemfile: rails_5_1 + - ruby: "3.0" + gemfile: rails_5_2 + - ruby: "3.1" + gemfile: rails_4_2 + - ruby: "3.1" + gemfile: rails_5_0 + - ruby: "3.1" + gemfile: rails_5_1 + - ruby: "3.1" + gemfile: rails_5_2 + - ruby: "3.2" + gemfile: rails_4_2 + - ruby: "3.2" + gemfile: rails_5_0 + - ruby: "3.2" + gemfile: rails_5_1 + - ruby: "3.2" + gemfile: rails_5_2 + - ruby: "3.3" + gemfile: rails_4_2 + - ruby: "3.3" + gemfile: rails_5_0 + - ruby: "3.3" + gemfile: rails_5_1 + - ruby: "3.3" + gemfile: rails_5_2 + + - ruby: "jruby" + gemfile: rails_4_2 + - ruby: "jruby" + gemfile: rails_5_0 + - ruby: "jruby" + gemfile: rails_5_1 + - ruby: "jruby" + gemfile: rails_5_2 + + include: + - ruby: "jruby-9.3.9.0" + gemfile: rails_4_2 + - ruby: "jruby-9.3.9.0" + gemfile: rails_5_0 + - ruby: "jruby-9.3.9.0" + gemfile: rails_5_1 + - ruby: "jruby-9.3.9.0" + gemfile: rails_5_2 + + name: ${{ matrix.gemfile }}, Ruby ${{ matrix.ruby }} + + steps: + - uses: actions/checkout@v3 + + - name: Setup Ruby & Bundle + uses: ruby/setup-ruby@v1 + with: + ruby-version: ${{ matrix.ruby }} + rubygems: ${{ matrix.rubygems }} + bundler: ${{ matrix.bundler }} + bundler-cache: true + + - name: Start dynamodb-local + run: | + docker-compose up -d + + - name: Run RSpec tests + run: | + bundle exec rspec + + - name: Stop dynamodb-local + run: | + docker-compose down diff --git a/dynamoid/.github/workflows/codeql.yml b/dynamoid/.github/workflows/codeql.yml new file mode 100644 index 000000000..3c45d7ca5 --- /dev/null +++ b/dynamoid/.github/workflows/codeql.yml @@ -0,0 +1,76 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "CodeQL" + +on: + push: + branches: [ "master" ] + pull_request: + # The branches below must be a subset of the branches above + branches: [ "master" ] + schedule: + - cron: '29 13 * * 1' + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [ 'ruby' ] + # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] + # Use only 'java' to analyze code written in Java, Kotlin or both + # Use only 'javascript' to analyze code written in JavaScript, TypeScript or both + # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + + # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality + + + # Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v2 + + # ℹī¸ Command-line programs to run using the OS shell. + # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun + + # If the Autobuild fails above, remove it and uncomment the following three lines. + # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. + + # - run: | + # echo "Run, Build Application using script" + # ./location_of_script_within_repo/buildscript.sh + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 + with: + category: "/language:${{matrix.language}}" diff --git a/dynamoid/.github/workflows/coverage.yml b/dynamoid/.github/workflows/coverage.yml new file mode 100644 index 000000000..42d2ed219 --- /dev/null +++ b/dynamoid/.github/workflows/coverage.yml @@ -0,0 +1,111 @@ +name: Code Coverage + +env: + CI_CODECOV: true + COVER_ALL: true + +on: + push: + branches: + - 'main' + - 'master' + tags: + - '!*' # Do not execute on tags + pull_request: + branches: + - '*' + # Allow manually triggering the workflow. + workflow_dispatch: + +# Cancels all previous workflow runs for the same branch that have not yet completed. +concurrency: + # The concurrency group contains the workflow name and the branch name. + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + test: + name: Specs with Coverage - Ruby ${{ matrix.ruby }} ${{ matrix.name_extra || '' }} + if: "!contains(github.event.commits[0].message, '[ci skip]') && !contains(github.event.commits[0].message, '[skip ci]')" + strategy: + fail-fast: false + matrix: + experimental: [false] + rubygems: + - latest + bundler: + - latest + ruby: + - "2.7" + + runs-on: ubuntu-latest + env: # $BUNDLE_GEMFILE must be set at the job level, so it is set for all steps + BUNDLE_GEMFILE: ${{ github.workspace }}/gemfiles/coverage.gemfile + steps: + - uses: amancevice/setup-code-climate@v0 + name: CodeClimate Install + if: matrix.ruby == '2.7' && github.event_name != 'pull_request' && always() + with: + cc_test_reporter_id: ${{ secrets.CC_TEST_REPORTER_ID }} + + - name: Checkout + uses: actions/checkout@v3 + + - name: Setup Ruby & Bundle + uses: ruby/setup-ruby@v1 + with: + ruby-version: ${{ matrix.ruby }} + rubygems: ${{ matrix.rubygems }} + bundler: ${{ matrix.bundler }} + bundler-cache: true + + - name: CodeClimate Pre-build Notification + run: cc-test-reporter before-build + if: matrix.ruby == '2.7' && github.event_name != 'pull_request' && always() + continue-on-error: ${{ matrix.experimental != 'false' }} + + - name: Start dynamodb-local + run: | + docker-compose up -d + + - name: Run RSpec tests + run: | + bundle exec rspec + + - name: Stop dynamodb-local + run: | + docker-compose down + + - name: CodeClimate Post-build Notification + run: cc-test-reporter after-build + if: matrix.ruby == '2.7' && github.event_name != 'pull_request' && always() + continue-on-error: ${{ matrix.experimental != 'false' }} + + - name: Code Coverage Summary Report + uses: irongut/CodeCoverageSummary@v1.2.0 + with: + filename: ./coverage/coverage.xml + badge: true + fail_below_min: true + format: markdown + hide_branch_rate: true + hide_complexity: true + indicators: true + output: both + thresholds: '90 89' + continue-on-error: ${{ matrix.experimental != 'false' }} + + - name: Add Coverage PR Comment + uses: marocchino/sticky-pull-request-comment@v2 + if: matrix.ruby == '2.7' && always() + with: + recreate: true + path: code-coverage-results.md + continue-on-error: ${{ matrix.experimental != 'false' }} + + - name: Coveralls + uses: coverallsapp/github-action@master + if: matrix.ruby == '2.7' && github.event_name != 'pull_request' && always() + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + continue-on-error: ${{ matrix.experimental != 'false' }} diff --git a/dynamoid/.github/workflows/dependency-review.yml b/dynamoid/.github/workflows/dependency-review.yml new file mode 100644 index 000000000..fe461b424 --- /dev/null +++ b/dynamoid/.github/workflows/dependency-review.yml @@ -0,0 +1,20 @@ +# Dependency Review Action +# +# This Action will scan dependency manifest files that change as part of a Pull Request, surfacing known-vulnerable versions of the packages declared or updated in the PR. Once installed, if the workflow run is marked as required, PRs introducing known-vulnerable packages will be blocked from merging. +# +# Source repository: https://github.com/actions/dependency-review-action +# Public documentation: https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-dependency-review#dependency-review-enforcement +name: 'Dependency Review' +on: [pull_request] + +permissions: + contents: read + +jobs: + dependency-review: + runs-on: ubuntu-latest + steps: + - name: 'Checkout Repository' + uses: actions/checkout@v3 + - name: 'Dependency Review' + uses: actions/dependency-review-action@v2 diff --git a/dynamoid/.github/workflows/style.yml b/dynamoid/.github/workflows/style.yml new file mode 100644 index 000000000..c9059d4b6 --- /dev/null +++ b/dynamoid/.github/workflows/style.yml @@ -0,0 +1,41 @@ +name: Code Style Checks + +on: + push: + branches: + - 'main' + - 'master' + tags: + - '!*' # Do not execute on tags + pull_request: + branches: + - '*' + +jobs: + rubocop: + name: Rubocop + strategy: + fail-fast: false + matrix: + experimental: [false] + rubygems: + - latest + bundler: + - latest + ruby: + - "3.3" + runs-on: ubuntu-latest + env: # $BUNDLE_GEMFILE must be set at the job level, so it is set for all steps + BUNDLE_GEMFILE: ${{ github.workspace }}/gemfiles/style.gemfile + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Setup Ruby & Bundle + uses: ruby/setup-ruby@v1 + with: + ruby-version: ${{ matrix.ruby }} + rubygems: ${{ matrix.rubygems }} + bundler: ${{ matrix.bundler }} + bundler-cache: true + - name: Run Rubocop + run: bundle exec rubocop -DESP diff --git a/dynamoid/.gitignore b/dynamoid/.gitignore new file mode 100644 index 000000000..cdf97f15b --- /dev/null +++ b/dynamoid/.gitignore @@ -0,0 +1,73 @@ +.project + +# rcov generated +coverage + +# rdoc generated +rdoc + +# yardoc generated +.yardoc +/_yardoc/ + +# bundler +/.bundle/ + +# jeweler generated +/pkg/ + +# Have editor/IDE/OS specific files you need to ignore? Consider using a global gitignore: +# +# * Create a file at ~/.gitignore +# * Include files you want ignored +# * Run: git config --global core.excludesfile ~/.gitignore +# +# After doing this, these files will be ignored in all your git projects, +# saving you from having to 'pollute' every project you touch with them +# +# Not sure what to needs to be ignored for particular editors/OSes? Here's some ideas to get you started. (Remember, remove the leading # of the line) +# +# For MacOS: +# +#.DS_Store + +# For TextMate +#*.tmproj +#tmtags + +# For emacs: +#*~ +#\#* +#.\#* + +# For vim: +#*.swp + +# For redcar: +#.redcar + +# For rubinius: +#*.rbc + +# for RVM +.rvmrc + +# For RubyMine: +/.idea/ + +# For Ctags +.gemtags +.tags +.tags_sorted_by_file + +/doc/ +/spec/reports/ +/tmp/ +/spec/DynamoDBLocal-latest/ +/vendor/ + +# For vagrant +.vagrant + +# For Appraisals +gemfiles/*.gemfile.lock diff --git a/dynamoid/.overcommit.yml b/dynamoid/.overcommit.yml new file mode 100644 index 000000000..0e2124901 --- /dev/null +++ b/dynamoid/.overcommit.yml @@ -0,0 +1,33 @@ +# Use this file to configure the Overcommit hooks you wish to use. This will +# extend the default configuration defined in: +# https://github.com/sds/overcommit/blob/master/config/default.yml +# +# At the topmost level of this YAML file is a key representing type of hook +# being run (e.g. pre-commit, commit-msg, etc.). Within each type you can +# customize each hook, such as whether to only run it on certain files (via +# `include`), whether to only display output if it fails (via `quiet`), etc. +# +# For a complete list of hooks, see: +# https://github.com/sds/overcommit/tree/master/lib/overcommit/hook +# +# For a complete list of options that you can use to customize hooks, see: +# https://github.com/sds/overcommit#configuration +# +# Uncomment the following lines to make the configuration take effect. + +#PreCommit: +# RuboCop: +# enabled: true +# on_warn: fail # Treat all warnings as failures +# + TrailingWhitespace: + enabled: true +# exclude: +# - '**/db/structure.sql' # Ignore trailing whitespace in generated files +# +#PostCheckout: +# ALL: # Special hook name that customizes all hooks of this type +# quiet: true # Change all post-checkout hooks to only display output on failure +# +# IndexTags: +# enabled: true # Generate a tags file with `ctags` each time HEAD changes diff --git a/dynamoid/.rspec b/dynamoid/.rspec new file mode 100644 index 000000000..8c18f1abd --- /dev/null +++ b/dynamoid/.rspec @@ -0,0 +1,2 @@ +--format documentation +--color diff --git a/dynamoid/.rubocop.yml b/dynamoid/.rubocop.yml new file mode 100644 index 000000000..5f3b3092c --- /dev/null +++ b/dynamoid/.rubocop.yml @@ -0,0 +1,111 @@ +# We chose not to make these changes +inherit_from: + - .rubocop_gemspec.yml + - .rubocop_performance.yml + - .rubocop_rspec.yml + - .rubocop_thread_safety.yml + - .rubocop_todo.yml + +require: + - rubocop-md + - rubocop-packaging + - rubocop-performance + - rubocop-rake + - rubocop-rspec + - rubocop-thread_safety + +# It's the lowest supported Ruby version +AllCops: + DisplayCopNames: true # Display the name of the failing cops + TargetRubyVersion: 2.3 + NewCops: enable + +# It's a matter of taste +Layout/ParameterAlignment: + EnforcedStyle: with_fixed_indentation +Layout/HashAlignment: + Enabled: false +Lint/RaiseException: + Enabled: true +Lint/StructNewOverride: + Enabled: true +Style/HashEachMethods: + Enabled: true +Style/HashTransformKeys: + Enabled: true +Style/HashTransformValues: + Enabled: true +Style/GuardClause: + Enabled: false +Style/FormatStringToken: + Enabled: false +Style/DoubleNegation: + Enabled: false +Style/IfUnlessModifier: + Enabled: false +Style/EachWithObject: + Enabled: false +Style/SafeNavigation: + Enabled: false +Style/BlockDelimiters: + Enabled: false +Layout/MultilineMethodCallIndentation: + EnforcedStyle: indented +Naming/VariableNumber: + Enabled: false +Style/MultilineBlockChain: + Enabled: false +Style/TrailingCommaInHashLiteral: + Enabled: false +Style/TrailingCommaInArrayLiteral: + Enabled: false +Style/TrailingCommaInArguments: + Enabled: false +Style/UnlessElse: + Enabled: false + +# We aren't so brave to tackle all these issues right now +Layout/LineLength: + Enabled: false +Metrics/BlockLength: + Enabled: false +Metrics/MethodLength: + Enabled: false +Metrics/CyclomaticComplexity: + Enabled: false +Metrics/AbcSize: + Enabled: false +Metrics/ModuleLength: + Enabled: false +Metrics/BlockNesting: + Enabled: false +Metrics/PerceivedComplexity: + Enabled: false +Metrics/ClassLength: + Enabled: false + +# Minor annoying issues +Lint/UselessAssignment: + Enabled: false +Lint/AmbiguousBlockAssociation: + Enabled: false +Lint/AssignmentInCondition: + Enabled: false +Style/Documentation: + Enabled: false +Style/DateTime: + Enabled: false +Style/MissingRespondToMissing: + Enabled: false +Naming/PredicateName: + Enabled: false +Security/YAMLLoad: + Enabled: false + +Lint/EmptyClass: + Exclude: + - README.md +Lint/EmptyBlock: + Exclude: + - README.md + diff --git a/dynamoid/.rubocop_gemspec.yml b/dynamoid/.rubocop_gemspec.yml new file mode 100644 index 000000000..bcdcb697a --- /dev/null +++ b/dynamoid/.rubocop_gemspec.yml @@ -0,0 +1,9 @@ +# specifying Ruby version would be a breaking change +# we may consider adding `required_ruby_version` in the next major release +Gemspec/RequiredRubyVersion: + Enabled: false + +# development dependencies specified in the gemspec file are shared +# by the main Gemfile and gemfiles in the gemfiles/ directory that are used on CI +Gemspec/DevelopmentDependencies: + Enabled: false diff --git a/dynamoid/.rubocop_performance.yml b/dynamoid/.rubocop_performance.yml new file mode 100644 index 000000000..5ef13e728 --- /dev/null +++ b/dynamoid/.rubocop_performance.yml @@ -0,0 +1,6 @@ +# See: https://github.com/rubocop/rubocop-performance/issues/322 +Performance/RegexpMatch: + Enabled: false + +Performance/MethodObjectAsBlock: + Enabled: false diff --git a/dynamoid/.rubocop_rspec.yml b/dynamoid/.rubocop_rspec.yml new file mode 100644 index 000000000..7c1f669c9 --- /dev/null +++ b/dynamoid/.rubocop_rspec.yml @@ -0,0 +1,44 @@ +RSpec/FilePath: + Enabled: false + +RSpec/MultipleExpectations: + Enabled: false + +RSpec/MultipleMemoizedHelpers: + Enabled: false + +RSpec/NamedSubject: + Enabled: false + +RSpec/ExampleLength: + Enabled: false + +RSpec/VerifiedDoubles: + Enabled: false + +RSpec/MessageSpies: + Enabled: false + +RSpec/InstanceVariable: + Enabled: false + +RSpec/NestedGroups: + Enabled: false + +RSpec/ExpectInHook: + Enabled: false + +# NOTE: for many tests of equality `eql` works, while `be` does not, because +# expected # => 101 +# got # => 101.0 (0.101e3) +RSpec/BeEql: + Enabled: false + +RSpec/BeEq: + Enabled: false + +RSpec/StubbedMock: + Enabled: false + +RSpec/IndexedLet: + Enabled: false diff --git a/dynamoid/.rubocop_thread_safety.yml b/dynamoid/.rubocop_thread_safety.yml new file mode 100644 index 000000000..e9fb06c03 --- /dev/null +++ b/dynamoid/.rubocop_thread_safety.yml @@ -0,0 +1,6 @@ +# It would be good to make the gem more thread safe, but at the moment it is not entirely. +# TODO: Comment out the following to see code needing to be refactored for thread safety! +ThreadSafety/ClassAndModuleAttributes: + Enabled: false +ThreadSafety/InstanceVariableInClassMethod: + Enabled: false diff --git a/dynamoid/.rubocop_todo.yml b/dynamoid/.rubocop_todo.yml new file mode 100644 index 000000000..e2bc59bcb --- /dev/null +++ b/dynamoid/.rubocop_todo.yml @@ -0,0 +1,122 @@ +# This configuration was generated by +# `rubocop --auto-gen-config` +# on 2022-12-16 17:45:21 -0700 using RuboCop version 0.81.0. +# The point is for the user to remove these configuration records +# one by one as the offenses are removed from the code base. +# Note that changes in the inspected code, or installation of new +# versions of RuboCop, may require this file to be generated again. + +# Offense count: 1 +# Cop supports --auto-correct. +Lint/OrderedMagicComments: + Exclude: + - 'lib/dynamoid/persistence.rb' + +# Offense count: 3 +# Configuration parameters: AllowComments. +Lint/SuppressedException: + Exclude: + - 'lib/dynamoid/dirty.rb' + - 'lib/dynamoid/persistence/update_fields.rb' + - 'lib/dynamoid/persistence/upsert.rb' + +# Offense count: 1 +# Configuration parameters: EnforcedStyleForLeadingUnderscores. +# SupportedStylesForLeadingUnderscores: disallowed, required, optional +Naming/MemoizedInstanceVariableName: + Exclude: + - 'lib/dynamoid/dirty.rb' + +# Offense count: 13 +RSpec/AnyInstance: + Exclude: + - 'spec/dynamoid/adapter_plugin/aws_sdk_v3_spec.rb' + - 'spec/dynamoid/adapter_spec.rb' + - 'spec/dynamoid/criteria/chain_spec.rb' + - 'spec/dynamoid/persistence_spec.rb' + +# Offense count: 125 +# Configuration parameters: Prefixes. +# Prefixes: when, with, without +RSpec/ContextWording: + Enabled: false + +# Offense count: 2 +RSpec/DescribeClass: + Exclude: + - 'spec/dynamoid/before_type_cast_spec.rb' + - 'spec/dynamoid/type_casting_spec.rb' + +# Offense count: 4 +# Configuration parameters: CustomIncludeMethods. +RSpec/EmptyExampleGroup: + Exclude: + - 'spec/dynamoid/persistence_spec.rb' + - 'spec/dynamoid/type_casting_spec.rb' + +# Offense count: 8 +RSpec/LeakyConstantDeclaration: + Exclude: + - 'spec/dynamoid/criteria/chain_spec.rb' + - 'spec/dynamoid/indexes_spec.rb' + - 'spec/dynamoid/sti_spec.rb' + +# Offense count: 1 +RSpec/LetSetup: + Exclude: + - 'spec/dynamoid/sti_spec.rb' + +# Offense count: 2 +RSpec/RepeatedDescription: + Exclude: + - 'spec/dynamoid/associations/belongs_to_spec.rb' + +# Offense count: 2 +RSpec/RepeatedExample: + Exclude: + - 'spec/dynamoid/associations/has_one_spec.rb' + +# Offense count: 6 +RSpec/RepeatedExampleGroupDescription: + Exclude: + - 'spec/dynamoid/adapter_plugin/aws_sdk_v3_spec.rb' + - 'spec/dynamoid/finders_spec.rb' + +# Offense count: 9 +RSpec/SubjectStub: + Exclude: + - 'spec/dynamoid/adapter_spec.rb' + +# Offense count: 2 +Style/CommentedKeyword: + Exclude: + - 'lib/dynamoid/dirty.rb' + +# Offense count: 2 +# Cop supports --auto-correct. +# Configuration parameters: EnforcedStyle, Autocorrect. +# SupportedStyles: module_function, extend_self, forbidden +Style/ModuleFunction: + Exclude: + - 'lib/dynamoid.rb' + - 'lib/dynamoid/config.rb' + +# Offense count: 3 +Style/OptionalArguments: + Exclude: + - 'lib/dynamoid/persistence.rb' + +# Offense count: 1 +# Cop supports --auto-correct. +# Configuration parameters: AllowAsExpressionSeparator. +Style/Semicolon: + Exclude: + - 'spec/dynamoid/adapter_plugin/aws_sdk_v3_spec.rb' + +# Offense count: 1 +# Cop supports --auto-correct. +# Configuration parameters: ExactNameMatch, AllowPredicates, AllowDSLWriters, IgnoreClassMethods, AllowedMethods. +# AllowedMethods: to_ary, to_a, to_c, to_enum, to_h, to_hash, to_i, to_int, to_io, to_open, to_path, to_proc, to_r, to_regexp, to_str, to_s, to_sym +Style/TrivialAccessors: + Exclude: + - 'lib/dynamoid/adapter_plugin/aws_sdk_v3.rb' diff --git a/dynamoid/.ruby-version b/dynamoid/.ruby-version new file mode 100644 index 000000000..15a279981 --- /dev/null +++ b/dynamoid/.ruby-version @@ -0,0 +1 @@ +3.3.0 diff --git a/dynamoid/.simplecov b/dynamoid/.simplecov new file mode 100644 index 000000000..32fe34e75 --- /dev/null +++ b/dynamoid/.simplecov @@ -0,0 +1,33 @@ +# frozen_string_literal: true + +# To get coverage +# On Local, default (HTML) output coverage is turned on with Ruby 2.6+: +# bundle exec rspec spec +# On Local, all output formats with Ruby 2.6+: +# COVER_ALL=true bundle exec rspec spec +# +# On CI, all output formats, the ENV variables CI is always set, +# and COVER_ALL, and CI_CODECOV, are set in the coverage.yml workflow only, +# so coverage only runs in that workflow, and outputs all formats. +# + +if RUN_COVERAGE + SimpleCov.start do + enable_coverage :branch + primary_coverage :branch + add_filter 'spec' + # Why exclude version.rb? See: https://github.com/simplecov-ruby/simplecov/issues/557#issuecomment-410105995 + add_filter 'lib/dynamoid/version.rb' + track_files '**/*.rb' + + if ALL_FORMATTERS + command_name "#{ENV.fetch('GITHUB_WORKFLOW')} Job #{ENV.fetch('GITHUB_RUN_ID')}:#{ENV.fetch('GITHUB_RUN_NUMBER')}" + else + formatter SimpleCov::Formatter::HTMLFormatter + end + + minimum_coverage(line: 90, branch: 89) + end +else + puts "Not running coverage on #{RUBY_VERSION}-#{RUBY_ENGINE}" +end diff --git a/dynamoid/.yardopts b/dynamoid/.yardopts new file mode 100644 index 000000000..22ce944f6 --- /dev/null +++ b/dynamoid/.yardopts @@ -0,0 +1 @@ +--no-private diff --git a/dynamoid/Appraisals b/dynamoid/Appraisals new file mode 100644 index 000000000..6c52ab406 --- /dev/null +++ b/dynamoid/Appraisals @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +appraise 'rails-4-2' do + gem 'activemodel', '~> 4.2.0' + + # Add bigdecimal gem to support Ruby 2.7 and above: + # https://github.com/rails/rails/issues/34822 + + # Compatibility with Ruby versions: + # https://github.com/ruby/bigdecimal#which-version-should-you-select + # + # Actually bigdecimal 1.4.x works on all the Ruby versions till Ruby 3.0 + gem 'bigdecimal', '~> 1.4.0', platform: :mri +end + +appraise 'rails-5-0' do + gem 'activemodel', '~> 5.0.0' +end + +appraise 'rails-5-1' do + gem 'activemodel', '~> 5.1.0' +end + +appraise 'rails-5-2' do + gem 'activemodel', '~> 5.2.0' +end + +appraise 'rails-6-0' do + gem 'activemodel', '~> 6.0.0' +end + +appraise 'rails-6-1' do + gem 'activemodel', '~> 6.1.0' +end + +appraise 'rails-7-0' do + gem 'activemodel', '~> 7.0.0' +end + +appraise 'rails-7-1' do + gem 'activemodel', '~> 7.1.0' +end diff --git a/dynamoid/CHANGELOG.md b/dynamoid/CHANGELOG.md new file mode 100644 index 000000000..db4d61e2f --- /dev/null +++ b/dynamoid/CHANGELOG.md @@ -0,0 +1,449 @@ +# Changelog +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [Unreleased] + +### Fixed +### Added +### Changed +### Removed + +## 3.10.0 +### Fixed +* [#681](https://github.com/Dynamoid/dynamoid/pull/681) Fixed saving persisted model and deleting attributes with `nil` value if `config.store_attribute_with_nil_value` is `false` +* [#716](https://github.com/Dynamoid/dynamoid/pull/716), [#691](https://github.com/Dynamoid/dynamoid/pull/691), [#687](https://github.com/Dynamoid/dynamoid/pull/687), [#660](https://github.com/Dynamoid/dynamoid/pull/660) Numerous fixes in README.md and RDoc documentation (@ndjndj, @kiharito, @dunkOnIT) +### Added +* [#656](https://github.com/Dynamoid/dynamoid/pull/656) Added a `create_table_on_save` configuration flag to create table on save (@imaximix) +* [#697](https://github.com/Dynamoid/dynamoid/pull/697) Ensure Ruby 3.3 and Rails 7.1 versions are supported and added them on CI +### Changed +* [#655](https://github.com/Dynamoid/dynamoid/pull/655) Support multiple `where` in the same chain with multiple conditions for the same field + +## 3.9.0 / 2023-04-13 +### Fixed +* [#610](https://github.com/Dynamoid/dynamoid/pull/610) Specs in JRuby; Support for JRuby 9.4.0.0 (@pboling) +* [#624](https://github.com/Dynamoid/dynamoid/pull/624) Fixed `#increment!`/`#decrement!` methods and made them compatible with Rails counterparts +* [#626](https://github.com/Dynamoid/dynamoid/pull/626) Fixed saving empty Set and String and replacing with `nil` in `#update`, `#update!`, `.update_fields`, and `.upsert` methods +* [#628](https://github.com/Dynamoid/dynamoid/pull/628) Fixed `.import` method to mark persisted model attributes as not changed/not dirty +* [#632](https://github.com/Dynamoid/dynamoid/pull/632) Fixed `#save` called with `touch: false` option to set `updated_at` attribute even for a new record (to comply with Rails) +* [#634](https://github.com/Dynamoid/dynamoid/pull/634) Fixed model callbacks: + * changed order of `save` and `create`/`update` callbacks - `save` callbacks are outer for the `create`/`update` ones + * removed `before_initialize` and `around_initialize` callbacks - there should be only `after_initialize` one +* [#634](https://github.com/Dynamoid/dynamoid/pull/634) Fixed `#touch` method compatibility with a Rails counterpart: + * don't save other modified attributes - only timestamps + * don't perform validation and don't call `save`/`create`/`update` callbacks + * accept a list of attribute names, but not one name + * accept a `:time` option +### Added +* [#611](https://github.com/Dynamoid/dynamoid/pull/611) Add `rubocop-md` (@pboling) +* [#612](https://github.com/Dynamoid/dynamoid/pull/612) Add `rubocop-rspec` (@pboling) +* [#613](https://github.com/Dynamoid/dynamoid/pull/613) Add `rubocop-performance` and `rubocop-rake` (@pboling) + * Added `funding_uri` set to open collective: https://opencollective.com/dynamoid + * Added `required_ruby_version` as `>= 2.3.0` (which was already the minimum supported version of Ruby) +* [#616](https://github.com/Dynamoid/dynamoid/pull/616) Upgrade `simplecov` (& remove `coveralls`) (@pboling) + * Setup GitHub actions for Code Coverage + * Setup GitHub actions for RuboCop linting + * Automate coverage feedback on Pull Requests via GitHub Actions and CodeCov +* [#618](https://github.com/Dynamoid/dynamoid/pull/618) Upgrade README Badges (@pboling) +* [#624](https://github.com/Dynamoid/dynamoid/pull/624) Added `:touch` option for `.inc` method to be more compatible with the Rails counterpart method `.update_counters` +* [#627](https://github.com/Dynamoid/dynamoid/pull/627) Made the following methods in the Dirty API public (to comply with Rails): + * `clear_changes_information` + * `changes_applied` + * `clear_attribute_changes` +* [#630](https://github.com/Dynamoid/dynamoid/pull/630) Added `Dynamoid::Adapter#execute` method to run PartiQL queries +* [#634](https://github.com/Dynamoid/dynamoid/pull/634) Added `after_touch` callback and run it in the following methods: + * `#touch` + * `#increment!` + * `#decrement!` +* [#642](https://github.com/Dynamoid/dynamoid/pull/642) Run specs on CI against Ruby 3.2 +* [#645](https://github.com/Dynamoid/dynamoid/pull/645) Added `after_find` callback +### Changed +* [#610](https://github.com/Dynamoid/dynamoid/pull/610) Switch to [`rubocop-lts`](https://rubocop-lts.gitlab.io/) (@pboling) +* [#633](https://github.com/Dynamoid/dynamoid/pull/633) Change `#inspect` method to return only attributes +* [#623](https://github.com/Dynamoid/dynamoid/pull/623) Optimized performance of persisting to send only changed attributes in a request to DynamoDB + +## 3.8.0 / 2022-11-09 +### Fixed +* [#525](https://github.com/Dynamoid/dynamoid/pull/525) Don't mark an attribute as changed if new assigned value equals the old one (@a5-stable) +* Minor changes in the documentation: + * [#526](https://github.com/Dynamoid/dynamoid/pull/526) (@a5-stable) + * [#528](https://github.com/Dynamoid/dynamoid/pull/528) (@a5-stable) + * [#589](https://github.com/Dynamoid/dynamoid/pull/589) (@jrhoads) +* [#527](https://github.com/Dynamoid/dynamoid/pull/527) Fix `update_fields` and `upsert` methods - take into account the table-specific `timestamps` option for setting the `updated_at` attribute +(@oieioi) +* [#533](https://github.com/Dynamoid/dynamoid/pull/533) Allow attribute with custom type to be used as a partition or sort key of DynamoDB index (@rahul342) +* [#558](https://github.com/Dynamoid/dynamoid/pull/558) Change the way how a filename is derived from a model class name in a warning about using the Scan operation (@a5-stable) +* [#567](https://github.com/Dynamoid/dynamoid/pull/567) Fix `#reload` and mark a reloaded model as persisted +* [#573](https://github.com/Dynamoid/dynamoid/pull/573) Fix proxying of method calls with keyword arguments called on an association for Ruby 2.7 and above (@xeger) +* [#579](https://github.com/Dynamoid/dynamoid/pull/579) Fix `create_table` method when a `table_name` option passed in case a model has TTL setting enabled (@ta1kt0me) +* [#591](https://github.com/Dynamoid/dynamoid/pull/591) Multiple fixes: + * Fix `#update` and `#update!` - take into account the table-specific `timestamps` option for setting the `updated_at` attribute + * Fix `#update_attribute` - skip validation + * Return `self` in some public methods to enable methods chaining +* [#601](https://github.com/Dynamoid/dynamoid/pull/601) Fix `#first` and `#pluck` methods: do not mutate query parameters and affect other query methods. + +### Added +* [#531](https://github.com/Dynamoid/dynamoid/pull/531) Run `before_update` and `after_update` callbacks at saving a model (@a5-stable) +* [#536](https://github.com/Dynamoid/dynamoid/pull/536) Modernization + * Support for Ruby 3.1 + * Support for Rails 7.0 + * Keep a Changelog Format + +## 3.7.1 / 2021-06-30 +### Fixed +* [#484](https://github.com/Dynamoid/dynamoid/pull/484) Fix model deletion - update cached foreign keys in associated models when delete a model +* [#492](https://github.com/Dynamoid/dynamoid/pull/492) Fix using `:date` field as an index hash/range key (@yoshida-eth0) +* [#503](https://github.com/Dynamoid/dynamoid/pull/503) Fix explicit forcing index with `#with_index` method when perform Scan operation (@bmalinconico) +* [#511](https://github.com/Dynamoid/dynamoid/pull/511) Fix `BatchGetItem` operation when partial result returned (so there are unprocessed keys) and table has a range key. The fix affects `.find_all` method only (@Bajena) + +## 3.7.0 / 2021-02-02 +### Added +* [#476](https://github.com/Dynamoid/dynamoid/pull/476) Added `#with_index` method to force an index in query (@bmalinconico) +* [#481](https://github.com/Dynamoid/dynamoid/pull/481) Added `alias` option to the `field` method to declare a field alias and use more conventional name to set and get value + +### Changed +* [#482](https://github.com/Dynamoid/dynamoid/pull/482) Support Ruby 3.0 and Rails 6.1 +* [#461](https://github.com/Dynamoid/dynamoid/pull/461) Allow to delete item attribute with `#update` method (@jkirsteins) +* [#463](https://github.com/Dynamoid/dynamoid/pull/463) Raise `UnknownAttribute` exception when specified not declared attribute name (@AlexGascon) + +### Fixed +* [#480](https://github.com/Dynamoid/dynamoid/pull/480) Repair `.consistent`/`.delete_all`/`.destroy_all` calls directly on a model class +* [#484](https://github.com/Dynamoid/dynamoid/pull/484) Fix broken foreign keys after model deleting (@kkan) +* Fixed in Readme.md: [#470](https://github.com/Dynamoid/dynamoid/pull/470) (@rromanchuk), [#473](https://github.com/Dynamoid/dynamoid/pull/473) (@Rulikkk) + +## 3.6.0 / 2020-07-13 +### Added +* [#458](https://github.com/Dynamoid/dynamoid/pull/458) Added `binary` field type +* [#459](https://github.com/Dynamoid/dynamoid/pull/459) Added `log_formatter` config option and changed default logging format + +### Changed +* [#423](https://github.com/Dynamoid/dynamoid/pull/423) Added warning when generated for a field methods override existing ones +* [#429](https://github.com/Dynamoid/dynamoid/pull/429) Added `raise_error` option for `find` method +* [#440](https://github.com/Dynamoid/dynamoid/pull/440) Optimized performance of `first` method when there are only conditions on key attribute in a query (@mrkamel) +* [#445](https://github.com/Dynamoid/dynamoid/pull/445) Support `limit` parameter in `first` method (@mrkamel) +* [#454](https://github.com/Dynamoid/dynamoid/pull/454) Added block argument to `create`/`create!` methods +* [#456](https://github.com/Dynamoid/dynamoid/pull/456) Detect when `find` method requires a range key argument and raise `Dynamoid::Errors::MissingRangeKey` exception if it's missing +* YARD documentation: + * added missing documentation so now all the public methods are documented + * hid all the private methods and classes + +### Removed +* [#450](https://github.com/Dynamoid/dynamoid/pull/450) Got rid of `null-logger` gem to make Dynamoid dependencies license suitable for commercial use (@yakjuly) + +### Fixed +* [#425](https://github.com/Dynamoid/dynamoid/pull/425) Fixed typos in the README.md file (@omarsotillo) +* [#432](https://github.com/Dynamoid/dynamoid/pull/432) Support tables that use "hash_key" as their partition key name (@remomueller) +* [#434](https://github.com/Dynamoid/dynamoid/pull/434) Support tables that have attribute with name "range_value" +* [#453](https://github.com/Dynamoid/dynamoid/pull/453) Fixed issue with using `type` attribute as a GSI hash key + +## 3.5.0 / 2020-04-04 +### Added +* [#405](https://github.com/Dynamoid/dynamoid/pull/405) Added `update!` class method (@UrsaDK) +* [#408](https://github.com/Dynamoid/dynamoid/pull/408) Added `ActiveSupport` load hook on `Dynamoid` load (@aaronmallen) +* [#422](https://github.com/Dynamoid/dynamoid/pull/422) Added `.pluck` method + +### Fixed +* [#410](https://github.com/Dynamoid/dynamoid/pull/410) Fixed creating GSI when table uses on-demand capacity provisioning (@icy-arctic-fox) +* [#414](https://github.com/Dynamoid/dynamoid/pull/414) Fixed lazy table creation +* [#415](https://github.com/Dynamoid/dynamoid/pull/415) Fixed RubyDoc comment (@walkersumida) +* [#420](https://github.com/Dynamoid/dynamoid/pull/420) Fixed `#persisted?` for deleted/destroyed models + +### Changed +* [#416](https://github.com/Dynamoid/dynamoid/pull/416) Improved speed of Adapter's `truncate` method. It now uses `#batch_delete_item` method (@TheSmartnik) +* [#421](https://github.com/Dynamoid/dynamoid/pull/421) Added `touch: false` option of the #save method +* [#423](https://github.com/Dynamoid/dynamoid/pull/423) Added warning when generated for a field methods override existing ones + +## 3.4.1 +### Fixed +* Fix: [#398](https://github.com/Dynamoid/dynamoid/pull/398) Fix broken configuration + +## 3.4.0 +### Added +* Feature: [#386](https://github.com/Dynamoid/dynamoid/pull/386) Disable timestamps fields on a table level with new + table option `timestamps` +* Feature: [#387](https://github.com/Dynamoid/dynamoid/pull/387) Add TTL support with table option `expires` +* Feature: [#393](https://github.com/Dynamoid/dynamoid/pull/393) Support pre-configured credentials with new config + option `credentials` (@emmajhyde) +* Feature: [#397](https://github.com/Dynamoid/dynamoid/pull/397) Configure on-demand table capacity mode with `capacity_mode` option + +### Changed +* Improvement: [#388](https://github.com/Dynamoid/dynamoid/pull/388) Minor memory optimization - don't allocate excessive + hash (@arjes) + +### Fixed +* Fix: [#382](https://github.com/Dynamoid/dynamoid/pull/382) Fixed deprecation warning about `Module#parent_name` in Rails 6 (@tmandke) +* Fix: Typos in Readme.md (@romeuhcf) + +## 3.3.0 +### Added +* [#374](https://github.com/Dynamoid/dynamoid/pull/374) Add `#project` query method to load only specified fields + +### Changed +* [#359](https://github.com/Dynamoid/dynamoid/pull/359) Add support of `NULL` and `NOT_NULL` operators +* [#360](https://github.com/Dynamoid/dynamoid/pull/360) Add `store_attribute_with_nil_value` config option +* [#368](https://github.com/Dynamoid/dynamoid/pull/368) Support Rails 6 (RC1) + +### Fixed +* [#357](https://github.com/Dynamoid/dynamoid/pull/357) Fix synchronous table creation issue +* [#362](https://github.com/Dynamoid/dynamoid/pull/362) Fix issue with selecting Global Secondary Index (@atyndall) +* [#368](https://github.com/Dynamoid/dynamoid/pull/368) Repair `#previous_changes` method from Dirty API +* [#373](https://github.com/Dynamoid/dynamoid/pull/373) Fix threadsafety of loading `Dynamoid::Adapter` (@tsub) + +## 3.2.0 +### Added +* [#341](https://github.com/Dynamoid/dynamoid/pull/341), [#342](https://github.com/Dynamoid/dynamoid/pull/342) Add `find_by_pages` method to provide access to DynamoDB query result pagination mechanism (@bmalinconico, @arjes) +* [#354](https://github.com/Dynamoid/dynamoid/pull/354) Add `map` field type + +### Changed +* [#340](https://github.com/Dynamoid/dynamoid/pull/340) Improve selecting more optimal GSI for Query operation - choose GSI with sort key if it's used in criteria (@ryz310) +* [#351](https://github.com/Dynamoid/dynamoid/pull/351) Add warnings about nonexistent fields in `where` conditions +* [#352](https://github.com/Dynamoid/dynamoid/pull/352) Add warning about skipped conditions +* [#356](https://github.com/Dynamoid/dynamoid/pull/356) Simplify requiring Rake tasks in non-Rails application +* Readme.md. Minor improvements and Fixed (@cabello) + +## 3.1.0 +### Added +* [#302](https://github.com/Dynamoid/dynamoid/pull/302) Add methods similar to `ActiveRecord::AttributeMethods::BeforeTypeCast`: + * method `attributes_before_type_cast` + * method `read_attribte_before_type_cast` + * methods `_before_type_cast` +* [#303](https://github.com/Dynamoid/dynamoid/pull/303) Add `#update_attributes!` method +* [#304](https://github.com/Dynamoid/dynamoid/pull/304) Add `inheritance_field` option for `Document.table` method to specify column name for supporting STI and storing class name +* [#305](https://github.com/Dynamoid/dynamoid/pull/305) Add increment/decrement methods: + * `#increment` + * `#increment!` + * `#decrement` + * `#decrement!` + * `.inc` +* [#307](https://github.com/Dynamoid/dynamoid/pull/307) Allow to declare type of elements in `array`/`set` fields with `of` option. Only scalar types are supported as well as custom types +* [#312](https://github.com/Dynamoid/dynamoid/pull/312) Add Ability to specify network timeout connection settings (@lulu-ulul) +* [#313](https://github.com/Dynamoid/dynamoid/pull/313) Add support for backoff in scan and query (@bonty) + +### Changed +* [#314](https://github.com/Dynamoid/dynamoid/pull/314) Re-implement `count` for `where`-chain query efficiently. So now `where(...).count` doesn't load all the documents, just statistics + +### Fixed +* [#298](https://github.com/Dynamoid/dynamoid/pull/298) Fix `raw` field storing when value is a Hash with non-string keys +* [#299](https://github.com/Dynamoid/dynamoid/pull/299) Fix `raw` fields - skip empty strings and sets +* [#309](https://github.com/Dynamoid/dynamoid/pull/309) Fix loading of a document that contains not declared in model class fields +* [#310](https://github.com/Dynamoid/dynamoid/pull/310) Fix `Adapter#list_tables` method to return names of all tables, not just first page (@knovoselic) +* [#311](https://github.com/Dynamoid/dynamoid/pull/311) Fix `consistent_read` option of `.find` (@kokuyouwind) +* [#319](https://github.com/Dynamoid/dynamoid/pull/319) Repair consistent reading for `find_all` +* [#317](https://github.com/Dynamoid/dynamoid/pull/317) Fix `create_tables` rake task + +## 3.0.0 +### Changed +* BREAKING [#267](https://github.com/Dynamoid/dynamoid/pull/267) Upgrade AWS SDK to V3 +* BREAKING [#268](https://github.com/Dynamoid/dynamoid/pull/268) Drop support of old Ruby versions. Support Ruby since 2.3 version +* BREAKING [#268](https://github.com/Dynamoid/dynamoid/pull/268) Drop support of old Rails versions. Support Rails since 4.2 version +* BREAKING [#278](https://github.com/Dynamoid/dynamoid/pull/278) Add type casting for finders (`find`, `find_by_id` and `find_all`) +* BREAKING [#279](https://github.com/Dynamoid/dynamoid/pull/279) Change default value of `application_timezone` config option from `:local` to `:utc` +* BREAKING [#288](https://github.com/Dynamoid/dynamoid/pull/288) Add `store_boolean_as_native` config option and set it to `true` by default. So all boolean fields are stored not as string `'t'` and `'f'` but as native boolean values now +* BREAKING [#289](https://github.com/Dynamoid/dynamoid/pull/289) Add `dynamodb_timezone` config option and set it to `:utc` by default. So now all `date` and `datetime` fields stored in string format will be converted to UTC time zone by default +* [#261](https://github.com/Dynamoid/Dynamoid/pull/261) Improve documentation (@walkersumida) +* [#264](https://github.com/Dynamoid/Dynamoid/pull/264) Improve documentation (@xbx) +* [#278](https://github.com/Dynamoid/Dynamoid/pull/278) Add Rails-like type casting +* [#281](https://github.com/Dynamoid/Dynamoid/pull/281) Deprecate dynamic finders, `find_all`, `find_by_id`, `find_by_composite_key`, `find_all_by_composite_key` and `find_all_by_secondary_index` +* [#285](https://github.com/Dynamoid/Dynamoid/pull/285) Set timestamps (`created_at` and `updated_at`) in `upsert`, `update_fields`, `import` and `update` methods +* [#286](https://github.com/Dynamoid/Dynamoid/pull/286) Disable scan warning when intentionally loading all items from a collection (@knovoselic) + +### Fixed +* Bug: [#275](https://github.com/Dynamoid/Dynamoid/pull/275) Fix custom type serialization/deserialization +* Bug: [#283](https://github.com/Dynamoid/Dynamoid/pull/283) Fix using string formats for partition and sort keys of `date`/`datetime` type +* Bug: [#283](https://github.com/Dynamoid/Dynamoid/pull/283) Fix type declaration of custom type fields. Returned by `.dynamoid_field_type` value is treated as Dynamoid's type now +* Bug: [#287](https://github.com/Dynamoid/Dynamoid/pull/287) Fix logging disabling (@ghiculescu) + +## 2.2.0 +### Changed +* Feature: [#256](https://github.com/Dynamoid/Dynamoid/pull/256) Support Rails 5.2 (@andrykonchin) + +### Fixed +* Bug: [#255](https://github.com/Dynamoid/Dynamoid/pull/255) Fix Vagrant RVM configuration and upgrade to Ruby 2.4.1 (@richardhsu) + +## 2.1.0 +### Changed +* Feature: [#221](https://github.com/Dynamoid/Dynamoid/pull/221) Add field declaration option `of` to specify the type of `set` elements (@pratik60) +* Feature: [#223](https://github.com/Dynamoid/Dynamoid/pull/223) Add field declaration option `store_as_string` to store `datetime` as ISO-8601 formatted strings (@william101) +* Feature: [#228](https://github.com/Dynamoid/Dynamoid/pull/228) Add field declaration option `store_as_string` to store `date` as ISO-8601 formatted strings (@andrykonchin) +* Feature: [#229](https://github.com/Dynamoid/Dynamoid/pull/229) Support hash argument for `start` chain method (@mnussbaumer) +* Feature: [#236](https://github.com/Dynamoid/Dynamoid/pull/236) Change log level from `info` to `debug` for benchmark logging (@kicktheken) +* Feature: [#239](https://github.com/Dynamoid/Dynamoid/pull/239) Add methods for low-level updating: `.update`, `.update_fields` and `.upsert` (@andrykonchin) +* Feature: [#243](https://github.com/Dynamoid/Dynamoid/pull/243) Support `ne` condition operator (@andrykonchin) +* Feature: [#246](https://github.com/Dynamoid/Dynamoid/pull/246) Added support of backoff in batch operations (@andrykonchin) + * added global config options `backoff` and `backoff_strategies` to configure backoff + * added `constant` and `exponential` built-in backoff strategies + * `.find_all` and `.import` support new backoff options + +### Fixed +* Bug: [#216](https://github.com/Dynamoid/Dynamoid/pull/216) Fix global index detection in queries with conditions other than equal (@andrykonchin) +* Bug: [#224](https://github.com/Dynamoid/Dynamoid/pull/224) Fix how `contains` operator works with `set` and `array` field types (@andrykonchin) +* Bug: [#225](https://github.com/Dynamoid/Dynamoid/pull/225) Fix equal conditions for `array` fields (@andrykonchin) +* Bug: [#229](https://github.com/Dynamoid/Dynamoid/pull/229) Repair support `start` chain method on Scan operation (@mnussbaumer) +* Bug: [#238](https://github.com/Dynamoid/Dynamoid/pull/238) Fix default value of `models_dir` config option (@baloran) +* Bug: [#244](https://github.com/Dynamoid/Dynamoid/pull/244) Allow to pass empty strings and sets to `.import` (@andrykonchin) +* Bug: [#246](https://github.com/Dynamoid/Dynamoid/pull/246) Batch operations (`batch_write_item` and `batch_read_item`) handle unprocessed items themselves (@andrykonchin) +* Bug: [#250](https://github.com/Dynamoid/Dynamoid/pull/250) Update outdated warning message about inefficient query and missing indices (@andrykonchin) +* Bug: [252](https://github.com/Dynamoid/Dynamoid/pull/252) Don't loose nanoseconds when store DateTime as float number + +## 2.0.0 +Breaking changes in this release generally bring Dynamoid behavior closer to the Rails-way. + +### Added +* [#199](https://github.com/Dynamoid/Dynamoid/pull/199) Added `Document.import` method (@andrykonchin) +* [#212](https://github.com/Dynamoid/Dynamoid/pull/212) Add foreign_key option (@andrykonchin) +* [#213](https://github.com/Dynamoid/Dynamoid/pull/213) Support Boolean raw type (@andrykonchin) + +### Changed +* BREAKING [#186](https://github.com/Dynamoid/Dynamoid/pull/186) Consistent behavior for `Model.where({}).all` (@andrykonchin) + * <= 1.3.x behaviour - + * load lazily if user specified batch size + * load all collection into memory otherwise + * New behaviour - + * always return lazy evaluated collection + * It means Model.where({}).all returns Enumerator instead of Array. + * If you need Array interface you have to convert collection to Array manually with to_a method call +* BREAKING [#195](https://github.com/Dynamoid/Dynamoid/pull/195) Failed `#find` returns error (@andrykonchin) + * <= 1.3.x behaviour - find returns nil or smaller array. + * New behaviour - it raises RecordNotFound if one or more records can not be found for the requested ids +* BREAKING [#196](https://github.com/Dynamoid/Dynamoid/pull/196) Return value of `#save` (@andrykonchin) + * <= 1.3.x behaviour - save returns self if model is saved successfully + * New behaviour - it returns true +* [#185](https://github.com/Dynamoid/Dynamoid/pull/185) `where`, finders and friends take into account STI (single table inheritance) now (@andrykonchin) + * query will return items of the model class and all subclasses +* [#190](https://github.com/Dynamoid/Dynamoid/pull/190) Allow passing options to range when defining attributes of the document (@richardhsu) + * Allows for serialized fields and passing the serializer option. +* [#198](https://github.com/Dynamoid/Dynamoid/pull/198) Enhanced `#create` and `#create!` to allow multiple document creation like `#import` (@andrykonchin) + * `User.create([{name: 'Josh'}, {name: 'Nick'}])` +* [#205](https://github.com/Dynamoid/Dynamoid/pull/205) Use batch deletion via `batch_write_item` for `delete_all` (@andrykonchin) +* [#205](https://github.com/Dynamoid/Dynamoid/pull/205) Rename `Chain#destroy_all` as `Chain#delete_all`, to better match Rails conventions when no callbacks are run (@andrykonchin) + * kept the old name as an alias, for backwards compatibility +* [#207](https://github.com/Dynamoid/Dynamoid/pull/207) Added slicing by 25 requests in #batch_write_item (@andrykonchin) +* [#211](https://github.com/Dynamoid/Dynamoid/pull/211) Improved Vagrant setup for testing (@richardhsu) +* Improved Documentation (@pboling, @andrykonchin) + +### Fixed +* [#191](https://github.com/Dynamoid/Dynamoid/pull/191), [#192](https://github.com/Dynamoid/Dynamoid/pull/192) Support lambdas as fix for value types were not able to be used as default values (@andrykonchin)(@richardhsu) +* [#202](https://github.com/Dynamoid/Dynamoid/pull/202) Fix several issues with associations (@andrykonchin) + * setting `nil` value raises an exception + * document doesn't keep assigned model and loads it from the storage + * delete call doesn't update cached ids of associated models + * fix clearing old `has_many` association while add model to new `has_many` association +* [#204](https://github.com/Dynamoid/Dynamoid/pull/204) Fixed issue where `Document.where(:"id.in" => [])` would do `Query` operation instead of `Scan` (@andrykonchin) + * Fixed `Chain#key_present?` +* [#205](https://github.com/Dynamoid/Dynamoid/pull/205) Fixed `delete_all` (@andrykonchin) + * Fixed exception when makes scan and sort key is declared in model + * Fixed exception when makes scan and any condition is specified in where clause (like Document.where().delete_all) + * Fixed exception when makes query and sort key isn't declared in model +* [#207](https://github.com/Dynamoid/Dynamoid/pull/207) Fixed `#delete` method for case `adapter.delete(table_name, [1, 2, 3], range_key: 1)` (@andrykonchin) + +## 1.3.4 +### Added +* Added `Chain#last` method (@andrykonchin) +* Added `date` field type (@andrykonchin) +* Added `application_timezone` config option (@andrykonchin) +* Support condition operators for non-key fields for Query request (@andrykonchin) +* Support condition operators for Scan request (@andrykonchin) +* Support additional operators `in`, `contains`, `not_contains` (@andrykonchin) +* Support type casting in `where` clause (@andrykonchin) +* Allow consistent reading for Scan request (@andrykonchin) +* Add `Chain#scan_limit` (@richardhsu) +* Support batch loading for Query requests (@richardhsu) +* Support querying Global/Local Secondary Indices in `where` clause (@richardhsu) + +### Changed +* Use Query instead of Scan if there are no conditions for sort (range) key in where clause (@andrykonchin) +* Rename `Chain#eval_limit` to `#record_limit` (@richardhsu) +* Only query on GSI if projects all attributes in `where` clause (@richardhsu) + +### Fixed +* Fix incorrect applying of default field value (#36 and #117, @andrykonchin) +* Fix sync table creation/deletion (#160, @mirokuxy) +* Allow to override document timestamps (@andrykonchin) +* Fix storing empty array as nil (#8, @andrykonchin) +* Fix `limit` handling for Query requests (#85, @richardhsu) +* Fix `limit` handling for Scan requests (#85, @richardhsu) +* Fix paginating for Query requests (@richardhsu) +* Fix paginating for Scan requests (@richardhsu) +* Fix `batch_get_item` method call for integer partition key (@mudasirraza) + +## 1.3.3 +### Added +* Allow configuration of the Dynamoid models directory, as not everyone keeps non AR models in app/models + - Dynamoid::Config.models_dir = "app/whatever" + +## 1.3.2 +### Fixed +* Fix migrations by stopping the loading of all rails models outside the rails env. + +## 1.3.1 +* Implements #135 + * dump values for :integer, :string, :boolean fields passed to where query + * e.g. You can search for booleans with any of: `[true, false, "t", "f", "true", "false"]` +* Adds support for Rails 5 without warnings. +* Adds rake tasks for working with a DynamoDB database: + * rake dynamoid:create_tables + * rake dynamoid:ping +* Automatically requires the Railtie when in Rails (which loads the rake tasks) +* Prevent duplicate entries in Dynamoid.included_models +* Added wwtd and appraisal to spec suite for easier verification of the compatibility matrix +* Support is now officially Ruby 2.0+, (including JRuby 9000) and Rails 4.0+ + +## 1.3.0 +* Fixed specs (@AlexNisnevich & @pboling) +* Fix `blank?` and `present?` behavior for single associations (#110, @AlexNisnevich & @bayesimpact) +* Support BatchGet for more than 100 items (#80, @getninjas) +* Add ability to specify connection settings specific to Dynamoid (#116, @NielsKSchjoedt) +* Adds Support for Rails 5! (#109, @gastzars) +* Table Namespace Fix (#79, @alexperto) +* Improve Testing Docs (#103, @tadast) +* Query All Items by Looping (#102, @richardhsu) +* Store document in DocumentNotValid error for easier debugging (#98, holyketzer) +* Better support for raw datatype (#104, @OpenGov) +* Fix associative tables with non-id primary keys (#86, @everett-wetchler) + +## 1.2.1 +* Remove accidental Gemfile.lock; fix .gitignore (#95, @pboling) +* Allow options to put_items (#95, @alexperto) +* Support range key in secondary index queries (#95, @pboling) +* Better handling of options generally (#95, @pboling) +* Support for batch_delete_item API (#95, @pboling) +* Support for batch_write_item API (#95, @alexperto) + +## 1.2.0 +* Add create_table_syncronously, and sync: option to regular create_table (@pboling) + * make required for tables created with secondary indexes +* Expose and fix truncate method on adapter (#52, @pcorpet) +* Enable saving without updating timestamps (#58, @cignoir) +* Fix projected attributes by checking for :include (#56, @yoshida_tetsuhiro) +* Make behavior of association where method closer to AR by cloning instead of modifying (#51, @pcorpet) +* Add boolean field presence validator (#50, @pcorpet) +* Add association build method (#49, @pcorpet) +* Fix association create method (#47, #48, @pcorpet) +* Support range_between (#42, @ayemos) +* Fix problems with range query (#42, @ayemos) +* Don't prefix table names when namespace is nil (#40, @brenden) +* Added basic secondary index support (#34, @sumocoder) +* Fix query attribute behavior for booleans (#35, @amirmanji) +* Ignore unknown fields on model initialize (PR #33, @sumocoder) + +## 1.1.0 +* Added support for optimistic locking on delete (PR #29, @sumocoder) +* upgrade concurrent-ruby requirement to 1.0 (PR #31, @keithmgould) + +## 1.0.0 +* Add support for AWS SDK v2. +* Add support for custom class type for fields. +* Remove partitioning support. +* Remove support for Dynamoid's (pseudo)indexes, now that DynamoDB offers + local and global indexes. +* Rename :float field type to :number. +* Rename Chain#limit to Chain#eval_limit. + +Housekeeping: + +* Switch from `fake_dynamo` for unit tests to DynamoDBLocal. This is the new authoritative + implementation of DynamoDB for testing, and it supports AWS SDK v2. +* Use Travis CI to auto-run unit tests on multiple Rubies. +* Randomize spec order. diff --git a/dynamoid/Gemfile b/dynamoid/Gemfile new file mode 100644 index 000000000..0b5de506f --- /dev/null +++ b/dynamoid/Gemfile @@ -0,0 +1,51 @@ +# frozen_string_literal: true + +# NOTE: This Gemfile is only relevant to local development. +# It allows during local development: +# - code coverage reports to be generated +# - style linting to be run with RuboCop & extensions +# All CI builds use files in gemfiles/*. +source 'https://rubygems.org' + +# Specify your gem's dependencies in dynamoid.gemspec +gemspec + +# Only add to the set of gems from the gemspec when running on local. +# All CI jobs must use a discrete Gemfile located at gemfiles/*.gemfile. They will not use this Gemfile +if ENV['CI'].nil? + ruby_version = Gem::Version.new(RUBY_VERSION) + minimum_version = ->(version, engine = 'ruby') { ruby_version >= Gem::Version.new(version) && engine == RUBY_ENGINE } + committing = minimum_version.call('2.4') + linting = minimum_version.call('2.7') + coverage = minimum_version.call('2.7') + + platforms :mri do + if committing + gem 'overcommit' + end + if linting + gem 'rubocop-md', require: false + gem 'rubocop-packaging', require: false + gem 'rubocop-performance', require: false + gem 'rubocop-rake', require: false + gem 'rubocop-rspec', require: false + gem 'rubocop-thread_safety', require: false + end + if coverage + gem 'codecov', '~> 0.6' # For CodeCov + gem 'simplecov', '~> 0.21', require: false + gem 'simplecov-cobertura' # XML for Jenkins + gem 'simplecov-json' # For CodeClimate + gem 'simplecov-lcov', '~> 0.8', require: false + end + end + + platforms :jruby do + # Add `binding.pry` to your code where you want to drop to REPL + gem 'pry-debugger-jruby' + end + + platforms :ruby do + gem 'pry-byebug' + end +end diff --git a/dynamoid/LICENSE.txt b/dynamoid/LICENSE.txt new file mode 100644 index 000000000..725c24957 --- /dev/null +++ b/dynamoid/LICENSE.txt @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2012 Josh Symonds +Copyright (c) 2013 - 2022 Dynamoid, https://github.com/Dynamoid + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/dynamoid/README.md b/dynamoid/README.md new file mode 100644 index 000000000..3a2c6034e --- /dev/null +++ b/dynamoid/README.md @@ -0,0 +1,1465 @@ +# Dynamoid + +[![Gem Version][â›ŗī¸version-img]][â›ŗī¸gem] +[![Supported Build Status][🏘sup-wf-img]][🏘sup-wf] +[![Maintainability][â›ŗcclim-maint-imgâ™ģī¸]][â›ŗcclim-maint] +[![Coveralls][🏘coveralls-img]][🏘coveralls] +[![CodeCov][🖇codecov-imgâ™ģī¸]][🖇codecov] +[![Helpers][🖇triage-help-img]][🖇triage-help] +[![Contributors][🖐contributors-img]][🖐contributors] +[![RubyDoc.info][🚎yard-img]][🚎yard] +[![License][🖇src-license-img]][🖇src-license] +[![GitMoji][🖐gitmoji-img]][🖐gitmoji] +[![SemVer 2.0.0][🧮semver-img]][🧮semver] +[![Keep-A-Changelog 1.0.0][📗keep-changelog-img]][📗keep-changelog] +[![Sponsor Project][🖇sponsor-img]][🖇sponsor] + +Dynamoid is an ORM for Amazon's DynamoDB for Ruby applications. It +provides similar functionality to ActiveRecord and improves on Amazon's +existing +[HashModel](http://docs.amazonwebservices.com/AWSRubySDK/latest/AWS/Record/HashModel.html) +by providing better searching tools and native association support. + +DynamoDB is not like other document-based databases you might know, and +is very different indeed from relational databases. It sacrifices +anything beyond the simplest relational queries and transactional +support to provide a fast, cost-efficient, and highly durable storage +solution. If your database requires complicated relational queries and +transaction support, then this modest Gem cannot provide them for you, +and neither can DynamoDB. In those cases you would do better to look +elsewhere for your database needs. + +But if you want a fast, scalable, simple, easy-to-use database (and a +Gem that supports it) then look no further! + +## Installation + +Installing Dynamoid is pretty simple. First include the Gem in your +Gemfile: + +```ruby +gem 'dynamoid' +``` +## Prerequisites + +Dynamoid depends on the aws-sdk, and this is tested on the current +version of aws-sdk (~> 3), rails (>= 4). Hence the configuration as +needed for aws to work will be dealt with by aws setup. + +### AWS SDK Version Compatibility + +Make sure you are using the version for the right AWS SDK. + +| Dynamoid version | AWS SDK Version | +| ---------------- | --------------- | +| 0.x | 1.x | +| 1.x | 2.x | +| 2.x | 2.x | +| 3.x | 3.x | + +### AWS Configuration + +Configure AWS access: +[Reference](https://github.com/aws/aws-sdk-ruby) + +For example, to configure AWS access: + +Create `config/initializers/aws.rb` as follows: + +```ruby +Aws.config.update( + region: 'us-west-2', + credentials: Aws::Credentials.new('REPLACE_WITH_ACCESS_KEY_ID', 'REPLACE_WITH_SECRET_ACCESS_KEY'), +) +``` + +Alternatively, if you don't want Aws connection settings to be +overwritten for you entire project, you can specify connection settings +for Dynamoid only, by setting those in the `Dynamoid.configure` clause: + +```ruby +require 'dynamoid' +Dynamoid.configure do |config| + config.access_key = 'REPLACE_WITH_ACCESS_KEY_ID' + config.secret_key = 'REPLACE_WITH_SECRET_ACCESS_KEY' + config.region = 'us-west-2' +end +``` + +Additionally, if you would like to pass in pre-configured AWS credentials +(e.g. you have an IAM role credential, you configure your credentials +elsewhere in your project, etc.), you may do so: + +```ruby +require 'dynamoid' + +credentials = Aws::AssumeRoleCredentials.new( + region: region, + access_key_id: key, + secret_access_key: secret, + role_arn: role_arn, + role_session_name: 'our-session' +) + +Dynamoid.configure do |config| + config.region = 'us-west-2', + config.credentials = credentials +end +``` + +For a full list of the DDB regions, you can go +[here](http://docs.aws.amazon.com/general/latest/gr/rande.html#ddb_region). + +Then you need to initialize Dynamoid config to get it going. Put code +similar to this somewhere (a Rails initializer would be a great place +for this if you're using Rails): + +```ruby +require 'dynamoid' +Dynamoid.configure do |config| + # To namespace tables created by Dynamoid from other tables you might have. + # Set to nil to avoid namespacing. + config.namespace = 'dynamoid_app_development' + + # [Optional]. If provided, it communicates with the DB listening at the endpoint. + # This is useful for testing with [DynamoDB Local] (http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Tools.DynamoDBLocal.html). + config.endpoint = 'http://localhost:3000' +end +``` + +### Ruby & Rails Compatibility + +Dynamoid supports Ruby >= 2.3 and Rails >= 4.2. + +Its compatibility is tested against following Ruby versions: 2.3, 2.4, +2.5, 2.6, 2.7, 3.0, 3.1, 3.2 and 3.3, JRuby 9.4.x and against Rails versions: 4.2, 5.0, 5.1, +5.2, 6.0, 6.1, 7.0 and 7.1. + +## Setup + +You *must* include `Dynamoid::Document` in every Dynamoid model. + +```ruby +class User + include Dynamoid::Document + + # fields declaration +end +``` + +### Table + +Dynamoid has some sensible defaults for you when you create a new table, +including the table name and the primary key column. But you can change +those if you like on table creation. + +```ruby +class User + include Dynamoid::Document + + table name: :awesome_users, key: :user_id, read_capacity: 5, write_capacity: 5 +end +``` + +These fields will not change an existing table: so specifying a new +read_capacity and write_capacity here only works correctly for entirely +new tables. Similarly, while Dynamoid will look for a table named +`awesome_users` in your namespace, it won't change any existing tables +to use that name; and if it does find a table with the correct name, it +won't change its hash key, which it expects will be `user_id`. If this +table doesn't exist yet, however, Dynamoid will create it with these +options. + +There is a basic support of DynamoDB's [Time To Live (TTL) +mechanism](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/TTL.html). +If you declare a field as TTL field - it will be initialised if doesn't +have value yet. Default value is current time + specified seconds. + +```ruby +class User + include Dynamoid::Document + + table expires: { field: :ttl, after: 60 } + + field :ttl, :integer +end +``` + +Field used to store expiration time (e.g. `ttl`) should be declared +explicitly and should have numeric type (`integer`, `number`) only. +`datetime` type is also possible but only if it's stored as number +(there is a way to store time as a string also). + +It's also possible to override a global option `Dynamoid::Config.timestamps` +on a table level: + +```ruby +table timestamps: false +``` + +This option controls generation of timestamp fields +`created_at`/`updated_at`. + +It's also possible to override table capacity mode configured globally +with table level option `capacity_mode`. Valid values are +`:provisioned`, `:on_demand` and `nil`: + +```ruby +table capacity_mode: :on_demand +``` + +If table capacity mode is on-demand, another related table-level options +`read_capacity` and `write_capacity` will be ignored. + +### Fields + +You'll have to define all the fields on the model and the data type of +each field. Every field on the object must be included here; if you miss +any they'll be completely bypassed during DynamoDB's initialization and +will not appear on the model objects. + +By default, fields are assumed to be of type `string`. Other built-in +types are `integer`, `number`, `set`, `array`, `map`, `datetime`, +`date`, `boolean`, `binary`, `raw` and `serialized`. `array` and +`map` match List and Map DynamoDB types respectively. `raw` type means +you can store Ruby Array, Hash, String and numbers. If built-in types do +not suit you, you can use a custom field type represented by an +arbitrary class, provided that the class supports a compatible +serialization interface. The primary use case for using a custom field +type is to represent your business logic with high-level types, while +ensuring portability or backward-compatibility of the serialized +representation. + +#### Note on boolean type + +The boolean fields are stored as DynamoDB boolean values by default. +Dynamoid can store boolean values as strings as well - `'t'` and `'f'`. +So if you want to change the default format of boolean field you can +easily achieve this with `store_as_native_boolean` field option: + +```ruby +class Document + include Dynamoid::Document + + field :active, :boolean, store_as_native_boolean: false +end +``` + +#### Note on date type + +By default date fields are persisted as days count since 1 January 1970 +like UNIX time. If you prefer dates to be stored as ISO-8601 formatted +strings instead then set `store_as_string` to `true` + +```ruby +class Document + include Dynamoid::Document + + field :sent_on, :date, store_as_string: true +end +``` + +#### Note on datetime type + +By default datetime fields are persisted as UNIX timestamps with +millisecond precision in DynamoDB. If you prefer datetimes to be stored +as ISO-8601 formatted strings instead then set `store_as_string` to +`true` + +```ruby +class Document + include Dynamoid::Document + + field :sent_at, :datetime, store_as_string: true +end +``` + +**WARNING:** Fields in numeric format are stored with nanoseconds as a +fraction part and precision could be lost. That's why `datetime` field +in numeric format shouldn't be used as a range key. + +You have two options if you need to use a `datetime` field as a range +key: +* string format +* store `datetime` values without milliseconds (e.g. cut + them manually with `change` method - `Time.now.change(usec: 0)` + +#### Note on set type + +`Dynamoid`'s type `set` is stored as DynamoDB's Set attribute type. +DynamoDB supports only Set of strings, numbers and binary. Moreover Set +*must* contain elements of the same type only. + +In order to use some other `Dynamoid`'s types you can specify `of` +option to declare the type of set elements. + +As a result of that DynamoDB limitation, in Dynamoid only the following +scalar types are supported (note: does not support `boolean`): +`integer`, `number`, `date`, `datetime`, `serializable` and custom +types. + +```ruby +class Document + include Dynamoid::Document + + field :tags, :set, of: :integer +end +``` + +It's possible to specify field options like `store_as_string` for +`datetime` field or `serializer` for `serializable` field for `set` +elements type: + +```ruby +class Document + include Dynamoid::Document + + field :values, :set, of: { serialized: { serializer: JSON } } + field :dates, :set, of: { date: { store_as_string: true } } + field :datetimes, :set, of: { datetime: { store_as_string: false } } +end +``` + +DynamoDB doesn't allow empty strings in fields configured as `set`. +Abiding by this restriction, when `Dynamoid` saves a document it removes +all empty strings in set fields. + +#### Note on array type + +`Dynamoid`'s type `array` is stored as DynamoDB's List attribute type. +It can contain elements of different types (in contrast to Set attribute +type). + +If you need to store in array field elements of `datetime`, `date`, +`serializable` or some custom type, which DynamoDB doesn't support +natively, you should specify element type with `of` option: + +```ruby +class Document + include Dynamoid::Document + + field :dates, :array, of: :date +end +``` + +#### Magic Columns + +You get magic columns of `id` (`string`), `created_at` (`datetime`), and +`updated_at` (`datetime`) for free. + +```ruby +class User + include Dynamoid::Document + + field :name + field :email + field :rank, :integer + field :number, :number + field :joined_at, :datetime + field :hash, :serialized +end +``` + +#### Default Values + +You can optionally set a default value on a field using either a plain +value or a lambda: + +```ruby +field :actions_taken, :integer, default: 0 +field :joined_at, :datetime, default: -> { Time.now } +``` + +#### Aliases + +It might be helpful to define an alias for already existing field when +naming convention used for a table differs from conventions common in +Ruby: + +```ruby +field firstName, :string, alias: :first_name +``` + +This way there will be generated +setters/getters/`?`/`_before_type_cast` methods for both +original field name (`firstName`) and an alias (`first_name`). + +```ruby +user = User.new(first_name: 'Michael') +user.first_name # => 'Michael' +user.firstName # => 'Michael' +``` + +#### Custom Types + +To use a custom type for a field, suppose you have a `Money` type. + +```ruby +class Money + # ... your business logic ... + + def dynamoid_dump + 'serialized representation as a string' + end + + def self.dynamoid_load(_serialized_str) + # parse serialized representation and return a Money instance + Money.new(1.23) + end +end + +class User + include Dynamoid::Document + + field :balance, Money +end +``` + +If you want to use a third-party class (which does not support +`#dynamoid_dump` and `.dynamoid_load`) as your field type, you can use +an adapter class providing `.dynamoid_dump` and `.dynamoid_load` class +methods for your third-party class. `.dynamoid_load` can remain the same +from the previous example; here we just add a level of indirection for +serializing. Example: + +```ruby +# Third-party Money class +class Money; end + +class MoneyAdapter + def self.dynamoid_load(_money_serialized_str) + Money.new(1.23) + end + + def self.dynamoid_dump(money_obj) + money_obj.value.to_s + end +end + +class User + include Dynamoid::Document + + field :balance, MoneyAdapter +end +``` + +Lastly, you can control the data type of your custom-class-backed field +at the DynamoDB level. This is especially important if you want to use +your custom field as a numeric range or for number-oriented queries. By +default custom fields are persisted as a string attribute, but your +custom class can override this with a `.dynamoid_field_type` class +method, which would return either `:string` or `:number`. + +DynamoDB may support some other attribute types that are not yet +supported by Dynamoid. + +### Sort key + +Along with partition key table may have a sort key. In order to declare +it in a model `range` class method should be used: + +```ruby +class Post + include Dynamoid::Document + + range :posted_at, :datetime +end +``` + +Second argument, type, is optional. Default type is `string`. + +### Associations + +Just like in ActiveRecord (or your other favorite ORM), Dynamoid uses +associations to create links between models. + +**WARNING:** Associations are not supported for models with compound +primary key. If a model declares a range key it should not declare any +association itself and be referenced by an association in another model. + +The only supported associations (so far) are `has_many`, `has_one`, +`has_and_belongs_to_many`, and `belongs_to`. Associations are very +simple to create: just specify the type, the name, and then any options +you'd like to pass to the association. If there's an inverse association +either inferred or specified directly, Dynamoid will update both objects +to point at each other. + +```ruby +class User + include Dynamoid::Document + + # ... + + has_many :addresses + has_many :students, class: User + belongs_to :teacher, class_name: :user + belongs_to :group + belongs_to :group, foreign_key: :group_id + has_one :role + has_and_belongs_to_many :friends, inverse_of: :friending_users +end + +class Address + include Dynamoid::Document + + # ... + + belongs_to :user # Automatically links up with the user model +end +``` + +Contrary to what you'd expect, association information is always +contained on the object specifying the association, even if it seems +like the association has a foreign key. This is a side effect of +DynamoDB's structure: it's very difficult to find foreign keys without +an index. Usually you won't find this to be a problem, but it does mean +that association methods that build new models will not work correctly - +for example, `user.addresses.new` returns an address that is not +associated to the user. We'll be correcting this ~soon~ maybe someday, +if we get a pull request. + +### Validations + +Dynamoid bakes in ActiveModel validations, just like ActiveRecord does. + +```ruby +class User + include Dynamoid::Document + + # ... + + validates_presence_of :name + validates_format_of :email, with: /@/ +end +``` + +To see more usage and examples of ActiveModel validations, check out the +[ActiveModel validation +documentation](http://api.rubyonrails.org/classes/ActiveModel/Validations.html). + +If you want to bypass model validation, pass `validate: false` to `save` +call: + +```ruby +model.save(validate: false) +``` + +### Callbacks + +Dynamoid also employs ActiveModel callbacks. Right now the following +callbacks are supported: +- `save` (before, after, around) +- `create` (before, after, around) +- `update` (before, after, around) +- `validation` (before, after) +- `destroy` (before, after, around) +- `after_touch` +- `after_initialize` +- `after_find` + +Example: + +```ruby +class User + include Dynamoid::Document + + # ... + + before_save :set_default_password + after_create :notify_friends + after_destroy :delete_addresses +end +``` + +### STI + +Dynamoid supports STI (Single Table Inheritance) like Active Record +does. You need just specify `type` field in a base class. Example: + +```ruby +class Animal + include Dynamoid::Document + + field :name + field :type +end + +class Cat < Animal + field :lives, :integer +end + +cat = Cat.create(name: 'Morgan') +animal = Animal.find(cat.id) +animal.class +#=> Cat +``` + +If you already have DynamoDB tables and `type` field already exists and +has its own semantic it leads to conflict. It's possible to tell +Dynamoid to use another field (even not existing) instead of `type` one +with `inheritance_field` table option: + +```ruby +class Car + include Dynamoid::Document + table inheritance_field: :my_new_type + + field :my_new_type +end + +c = Car.create +c.my_new_type +#=> "Car" +``` + +### Type casting + +Dynamoid supports type casting and tries to do it in the most convenient +way. Values for all fields (except custom type) are coerced to declared +field types. + +Some obvious rules are used, e.g.: + +for boolean field: +```ruby +document.boolean_field = 'off' +# => false +document.boolean_field = 'false' +# => false +document.boolean_field = 'some string' +# => true +``` + +or for integer field: +```ruby +document.integer_field = 42.3 +# => 42 +document.integer_field = '42.3' +# => 42 +document.integer_field = true +# => 1 +``` + +If time zone isn't specified for `datetime` value - application time +zone is used. + +To access field value before type casting following method could be +used: `attributes_before_type_cast` and +`read_attribute_before_type_cast`. + +There is `_before_type_cast` method for every field in a model as +well. + +### Dirty API + +Dynamoid supports Dirty API which is equivalent to [Rails 5.2 +`ActiveModel::Dirty`](https://api.rubyonrails.org/v5.2/classes/ActiveModel/Dirty.html). +There is only one limitation - change in place of field isn't detected +automatically. + +## Usage + +### Object Creation + +Dynamoid's syntax is generally very similar to ActiveRecord's. Making +new objects is simple: + +```ruby +u = User.new(name: 'Josh') +u.email = 'josh@joshsymonds.com' +u.save +``` + +Save forces persistence to the data store: a unique ID is also assigned, +but it is a string and not an auto-incrementing number. + +```ruby +u.id # => '3a9f7216-4726-4aea-9fbc-8554ae9292cb' +``` + +To use associations, you use association methods very similar to +ActiveRecord's: + +```ruby +address = u.addresses.create +address.city = 'Chicago' +address.save +``` + +To create multiple documents at once: + +```ruby +User.create([{ name: 'Josh' }, { name: 'Nick' }]) +``` + +There is an efficient and low-level way to create multiple documents +(without validation and callbacks running): + +```ruby +users = User.import([{ name: 'Josh' }, { name: 'Nick' }]) +``` + +### Querying + +Querying can be done in one of three ways: + +```ruby +Address.find(address.id) # Find directly by ID. +Address.where(city: 'Chicago').all # Find by any number of matching criteria... + # Though presently only "where" is supported. +Address.find_by_city('Chicago') # The same as above, but using ActiveRecord's older syntax. +``` + +And you can also query on associations: + +```ruby +u.addresses.where(city: 'Chicago').all +``` + +But keep in mind Dynamoid - and document-based storage systems in +general - are not drop-in replacements for existing relational +databases. The above query does not efficiently perform a conditional +join, but instead finds all the user's addresses and naively filters +them in Ruby. For large associations this is a performance hit compared +to relational database engines. + +**Warning:** There is a caveat with filtering documents by `nil` value +attribute. By default Dynamoid ignores attributes with `nil` value and +doesn't store them in a DynamoDB document. This behavior could be +changed with `store_attribute_with_nil_value` config option. + +If Dynamoid ignores `nil` value attributes `null`/`not_null` operators +should be used in query: + +```ruby +Address.where('postcode.null': true) +Address.where('postcode.not_null': true) +``` + +If Dynamoid keeps `nil` value attributes `eq`/`ne` operators should be +used instead: + +```ruby +Address.where(postcode: nil) +Address.where('postcode.ne': nil) +``` + +#### Limits + +There are three types of limits that you can query with: + +1. `record_limit` - The number of evaluated records that are returned by + the query. +2. `scan_limit` - The number of scanned records that DynamoDB will look + at before returning. +3. `batch_size` - The number of records requested to DynamoDB per + underlying request, good for large queries! + +Using these in various combinations results in the underlying requests +to be made in the smallest size possible and the query returns once +`record_limit` or `scan_limit` is satisfied. It will attempt to batch +whenever possible. + +You can thus limit the number of evaluated records, or select a record +from which to start in order to support pagination. + +```ruby +Address.record_limit(5).start(address) # Only 5 addresses starting at `address` +``` +Where `address` is an instance of the model or a hash +`{the_model_hash_key: 'value', the_model_range_key: 'value'}`. Keep in +mind that if you are passing a hash to `.start()` you need to explicitly +define all required keys in it including range keys, depending on table +or secondary indexes signatures, otherwise you'll get an +`Aws::DynamoDB::Errors::ValidationException` either for `Exclusive Start +Key must have same size as table's key schema` or `The provided starting +key is invalid` + +If you are potentially running over a large data set and this is +especially true when using certain filters, you may want to consider +limiting the number of scanned records (the number of records DynamoDB +infrastructure looks through when evaluating data to return): + +```ruby +Address.scan_limit(5).start(address) # Only scan at most 5 records and return what's found starting from `address` +``` + +For large queries that return many rows, Dynamoid can use AWS' support +for requesting documents in batches: + +```ruby +# Do some maintenance on the entire table without flooding DynamoDB +Address.batch(100).each { |addr| addr.do_some_work && sleep(0.01) } +Address.record_limit(10_000).batch(100).each { |addr| addr.do_some_work && sleep(0.01) } # Batch specified as part of a chain +``` + +The implication of batches is that the underlying requests are done in +the batch sizes to make the request and responses more manageable. Note +that this batching is for `Query` and `Scans` and not `BatchGetItem` +commands. + +#### DynamoDB pagination + +At times it can be useful to rely on DynamoDB [low-level +pagination](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Query.html#Query.Pagination) +instead of fixed pages sizes. Each page results in a single Query or +Scan call to DynamoDB, but returns an unknown number of records. + +Access to the native DynamoDB pages can be obtained via the +`find_by_pages` method, which yields arrays of records. + +```ruby +Address.find_by_pages do |addresses, metadata| +end +``` + +Each yielded pages returns page metadata as the second argument, which +is a hash including a key `:last_evaluated_key`. The value of this key +can be used for the `start` method to fetch the next page of records. + +This way it can be used for instance to implement efficiently pagination +in web-applications: + +```ruby +class UserController < ApplicationController + def index + next_page = params[:next_page_token] ? JSON.parse(Base64.decode64(params[:next_page_token])) : nil + + records, metadata = User.start(next_page).find_by_pages.first + + render json: { + records: records, + next_page_token: Base64.encode64(metadata[:last_evaluated_key].to_json) + } + end +end +``` + +#### Sort Conditions and Filters + +You are able to optimize query with condition for sort key. Following +operators are available: `gt`, `lt`, `gte`, `lte`, `begins_with`, +`between` as well as equality: + +```ruby +Address.where(latitude: 10_212) +Address.where('latitude.gt': 10_212) +Address.where('latitude.lt': 10_212) +Address.where('latitude.gte': 10_212) +Address.where('latitude.lte': 10_212) +Address.where('city.begins_with': 'Lon') +Address.where('latitude.between': [10_212, 20_000]) +``` + +You are able to filter results on the DynamoDB side and specify +conditions for non-key fields. Following additional operators are +available: `in`, `contains`, `not_contains`, `null`, `not_null`: + +```ruby +Address.where('city.in': %w[London Edenburg Birmingham]) +Address.where('city.contains': ['on']) +Address.where('city.not_contains': ['ing']) +Address.where('postcode.null': false) +Address.where('postcode.not_null': true) +``` + +**WARNING:** Please take into account that `NULL` and `NOT_NULL` +operators check attribute presence in a document, not value. So if +attribute `postcode`'s value is `NULL`, `NULL` operator will return +false because attribute exists even if has `NULL` value. + +#### Selecting some specific fields only + +It could be done with `project` method: + +```ruby +class User + include Dynamoid::Document + field :name +end + +User.create(name: 'Alex') +user = User.project(:name).first + +user.id # => nil +user.name # => 'Alex' +user.created_at # => nil +``` + +Returned models with have filled specified fields only. + +Several fields could be specified: + +```ruby +user = User.project(:name, :created_at) +``` + +### Consistent Reads + +Querying supports consistent reading. By default, DynamoDB reads are +eventually consistent: if you do a write and then a read immediately +afterwards, the results of the previous write may not be reflected. If +you need to do a consistent read (that is, you need to read the results +of a write immediately) you can do so, but keep in mind that consistent +reads are twice as expensive as regular reads for DynamoDB. + +```ruby +Address.find(address.id, consistent_read: true) # Find an address, ensure the read is consistent. +Address.where(city: 'Chicago').consistent.all # Find all addresses where the city is Chicago, with a consistent read. +``` + +### Range Finding + +If you have a range index, Dynamoid provides a number of additional +other convenience methods to make your life a little easier: + +```ruby +User.where('created_at.gt': DateTime.now - 1.day).all +User.where('created_at.lt': DateTime.now - 1.day).all +``` + +It also supports `gte` and `lte`. Turning those into symbols and +allowing a Rails SQL-style string syntax is in the works. You can only +have one range argument per query, because of DynamoDB inherent +limitations, so use it sensibly! + + +### Updating + +In order to update document you can use high level methods +`#update_attributes`, `#update_attribute` and `.update`. They run +validation and callbacks. + +```ruby +Address.find(id).update_attributes(city: 'Chicago') +Address.find(id).update_attribute(:city, 'Chicago') +Address.update(id, city: 'Chicago') +``` + +There are also some low level methods `#update`, `.update_fields` and +`.upsert`. They don't run validation and callbacks (except `#update` - +it runs `update` callbacks). All of them support conditional updates. +`#upsert` will create new document if document with specified `id` +doesn't exist. + +```ruby +Address.find(id).update do |i| + i.set city: 'Chicago' + i.add latitude: 100 + i.delete set_of_numbers: 10 +end +Address.find(id).update(if: { deliverable: true }) do |i| + i.set city: 'Chicago' +end +Address.update_fields(id, city: 'Chicago') +Address.update_fields(id, { city: 'Chicago' }, if: { deliverable: true }) +Address.upsert(id, city: 'Chicago') +Address.upsert(id, { city: 'Chicago' }, if: { deliverable: true }) +``` + +By default, `#upsert` will update all attributes of the document if it already exists. +To idempotently create-but-not-update a record, apply the `unless_exists` condition +to its keys when you upsert. + +```ruby +Address.upsert(id, { city: 'Chicago' }, { unless_exists: [:id] }) +``` + +### Deleting + +In order to delete some items `delete_all` method should be used. Any +callback won't be called. Items delete in efficient way in batch. + +```ruby +Address.where(city: 'London').delete_all +``` + +### Global Secondary Indexes + +You can define index with `global_secondary_index`: + +```ruby +class User + include Dynamoid::Document + + field :name + field :age, :number + + global_secondary_index hash_key: :age # Must come after field definitions. +end +``` + +There are the following options: +* `hash_key` - is used as hash key of an index, +* `range_key` - is used as range key of an index, +* `projected_attributes` - list of fields to store in an index or has a + predefined value `:keys_only`, `:all`; `:keys_only` is a default, +* `name` - an index will be created with this name when a table is + created; by default name is generated and contains table name and keys + names, +* `read_capacity` - is used when table created and used as an index + capacity; by default equals `Dynamoid::Config.read_capacity`, +* `write_capacity` - is used when table created and used as an index + capacity; by default equals `Dynamoid::Config.write_capacity` + +The only mandatory option is `name`. + +**WARNING:** In order to use global secondary index in `Document.where` +implicitly you need to have all the attributes of the original table in +the index and declare it with option `projected_attributes: :all`: + +```ruby +class User + # ... + + global_secondary_index hash_key: :age, projected_attributes: :all +end +``` + +There is only one implicit way to query Global and Local Secondary +Indexes (GSI/LSI). + +#### Implicit + +The second way implicitly uses your GSI through the `where` clauses and +deduces the index based on the query fields provided. Another added +benefit is that it is built into query chaining so you can use all the +methods used in normal querying. The explicit way from above would be +rewritten as follows: + +```ruby +where(dynamo_primary_key_column_name => dynamo_primary_key_value, + "#{range_column}.#{range_modifier}" => range_value) + .scan_index_forward(false) +``` + +The only caveat with this method is that because it is also used for +general querying, it WILL NOT use a GSI unless it explicitly has defined +`projected_attributes: :all` on the GSI in your model. This is because +GSIs that do not have all attributes projected will only contain the +index keys and therefore will not return objects with fully resolved +field values. It currently opts to provide the complete results rather +than partial results unless you've explicitly looked up the data. + +*Future TODO could involve implementing `select` in chaining as well as +resolving the fields with a second query against the table since a query +against GSI then a query on base table is still likely faster than scan +on the base table* + +### Transaction Writes + +Multiple write actions can be grouped together and submitted as an all-or-nothing operation. +See the [transation documentation](README_transact.md). + +### PartiQL + +To run PartiQL statements `Dynamoid.adapter.execute` method should be +used: + +```ruby +Dynamoid.adapter.execute("UPDATE users SET name = 'Mike' WHERE id = '1'") +``` + +Parameters are also supported: + +```ruby +Dynamoid.adapter.execute('SELECT * FROM users WHERE id = ?', ['1']) +``` + +## Configuration + +Listed below are all configuration options. + +* `adapter` - useful only for the gem developers to switch to a new + adapter. Default and the only available value is `aws_sdk_v3` +* `namespace` - prefix for table names, default is + `dynamoid_#{application_name}_#{environment}` for Rails application + and `dynamoid` otherwise +* `logger` - by default it's a `Rails.logger` in Rails application and + `stdout` otherwise. You can disable logging by setting `nil` or + `false` values. Set `true` value to use defaults +* `access_key` - DynamoDb custom access key for AWS credentials, override global + AWS credentials if they're present +* `secret_key` - DynamoDb custom secret key for AWS credentials, override global + AWS credentials if they're present +* `credentials` - DynamoDb custom pre-configured credentials, override global + AWS credentials if they're present +* `region` - DynamoDb custom credentials for AWS, override global AWS + credentials if they're present +* `batch_size` - when you try to load multiple items at once with +* `batch_get_item` call Dynamoid loads them not with one api call but + piece by piece. Default is 100 items +* `capacity_mode` - used at a table creation and means whether a table + read/write capacity mode will be on-demand or provisioned. Allowed + values are `:on_demand` and `:provisioned`. Default value is `nil` which + means provisioned mode will be used. +* `read_capacity` - is used at table or indices creation. Default is 100 + (units) +* `write_capacity` - is used at table or indices creation. Default is 20 + (units) +* `warn_on_scan` - log warnings when scan table. Default is `true` +* `endpoint` - if provided, it communicates with the DynamoDB listening + at the endpoint. This is useful for testing with + [DynamoDB Local](http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Tools.DynamoDBLocal.html) +* `identity_map` - ensures that each object gets loaded only once by + keeping every loaded object in a map. Looks up objects using the map + when referring to them. Isn't thread safe. Default is `false`. + `Use Dynamoid::Middleware::IdentityMap` to clear identity map for each HTTP request +* `timestamps` - by default Dynamoid sets `created_at` and `updated_at` + fields at model creation and updating. You can disable this + behavior by setting `false` value +* `sync_retry_max_times` - when Dynamoid creates or deletes table + synchronously it checks for completion specified times. Default is 60 + (times). It's a bit over 2 minutes by default +* `sync_retry_wait_seconds` - time to wait between retries. Default is 2 + (seconds) +* `convert_big_decimal` - if `true` then Dynamoid converts numbers + stored in `Hash` in `raw` field to float. Default is `false` +* `store_attribute_with_nil_value` - if `true` Dynamoid keeps attribute + with `nil` value in a document. Otherwise Dynamoid removes it while + saving a document. Default is `nil` which equals behaviour with `false` + value. +* `models_dir` - `dynamoid:create_tables` rake task loads DynamoDb + models from this directory. Default is `./app/models`. +* `application_timezone` - Dynamoid converts all `datetime` fields to + specified time zone when loads data from the storage. + Acceptable values - `:utc`, `:local` (to use system time zone) and + time zone name e.g. `Eastern Time (US & Canada)`. Default is `utc` +* `dynamodb_timezone` - When a datetime field is stored in string format + Dynamoid converts it to specified time zone when saves a value to the + storage. Acceptable values - `:utc`, `:local` (to use system time + zone) and time zone name e.g. `Eastern Time (US & Canada)`. Default is + `utc` +* `store_datetime_as_string` - if `true` then Dynamoid stores :datetime + fields in ISO 8601 string format. Default is `false` +* `store_date_as_string` - if `true` then Dynamoid stores :date fields + in ISO 8601 string format. Default is `false` +* `store_boolean_as_native` - if `true` Dynamoid stores boolean fields + as native DynamoDB boolean values. Otherwise boolean fields are stored + as string values `'t'` and `'f'`. Default is `true` +* `backoff` - is a hash: key is a backoff strategy (symbol), value is + parameters for the strategy. Is used in batch operations. Default id + `nil` +* `backoff_strategies`: is a hash and contains all available strategies. + Default is `{ constant: ..., exponential: ...}` +* `log_formatter`: overrides default AWS SDK formatter. There are + several canned formatters: `Aws::Log::Formatter.default`, + `Aws::Log::Formatter.colored` and `Aws::Log::Formatter.short`. Please + look into `Aws::Log::Formatter` AWS SDK documentation in order to + provide own formatter. +* `http_continue_timeout`: The number of seconds to wait for a + 100-continue HTTP response before sending the request body. Default + option value is `nil`. If not specified effected value is `1` +* `http_idle_timeout`: The number of seconds an HTTP connection is + allowed to sit idle before it is considered stale. Default option + value is `nil`. If not specified effected value is `5` +* `http_open_timeout`: The number of seconds to wait when opening a HTTP + session. Default option value is `nil`. If not specified effected + value is `15` +* `http_read_timeout`:The number of seconds to wait for HTTP response + data. Default option value is `nil`. If not specified effected value + is `60` +* `create_table_on_save`: if `true` then Dynamoid creates a + corresponding table in DynamoDB at model persisting if the table + doesn't exist yet. Default is `true` + + +## Concurrency + +Dynamoid supports basic, ActiveRecord-like optimistic locking on save +operations. Simply add a `lock_version` column to your table like so: + +```ruby +class MyTable + # ... + + field :lock_version, :integer + + # ... +end +``` + +In this example, all saves to `MyTable` will raise an +`Dynamoid::Errors::StaleObjectError` if a concurrent process loaded, +edited, and saved the same row. Your code should trap this exception, +reload the row (so that it will pick up the newest values), and try the +save again. + +Calls to `update` and `update!` also increment the `lock_version`, +however, they do not check the existing value. This guarantees that a +update operation will raise an exception in a concurrent save operation, +however a save operation will never cause an update to fail. Thus, +`update` is useful & safe only for doing atomic operations (e.g. +increment a value, add/remove from a set, etc), but should not be used +in a read-modify-write pattern. + + +### Backoff strategies + + +You can use several methods that run efficiently in batch mode like +`.find_all` and `.import`. It affects `Query` and `Scan` operations as +well. + +The backoff strategy will be used when, for any reason, some items could +not be processed as part of a batch mode command. Operations will be +re-run to process these items. + +Exponential backoff is the recommended way to handle throughput limits +exceeding and throttling on the table. + +There are two built-in strategies - constant delay and truncated binary +exponential backoff. By default no backoff is used but you can specify +one of the built-in ones: + +```ruby +Dynamoid.configure do |config| + config.backoff = { constant: 2.second } +end + +Dynamoid.configure do |config| + config.backoff = { exponential: { base_backoff: 0.2.seconds, ceiling: 10 } } +end + +``` + +You can just specify strategy without any arguments to use default +presets: + +```ruby +Dynamoid.configure do |config| + config.backoff = :constant +end +``` + +You can use your own strategy in the following way: + +```ruby +Dynamoid.configure do |config| + config.backoff_strategies[:custom] = lambda do |n| + -> { sleep rand(n) } + end + + config.backoff = { custom: 10 } +end +``` + + +## Rake Tasks + +There are a few Rake tasks available out of the box: + +* `rake dynamoid:create_tables` +* `rake dynamoid:ping` + +In order to use them in non-Rails application they should be required +explicitly: + +```ruby +# Rakefile + +Rake::Task.define_task(:environment) +require 'dynamoid/tasks' +``` + +The Rake tasks depend on `:environment` task so it should be declared as +well. + +## Test Environment + +In test environment you will most likely want to clean the database +between test runs to keep tests completely isolated. This can be +achieved like so + +```ruby +module DynamoidReset + def self.all + Dynamoid.adapter.list_tables.each do |table| + # Only delete tables in our namespace + if table =~ /^#{Dynamoid::Config.namespace}/ + Dynamoid.adapter.delete_table(table) + end + end + Dynamoid.adapter.tables.clear + # Recreate all tables to avoid unexpected errors + Dynamoid.included_models.each { |m| m.create_table(sync: true) } + end +end + +# Reduce noise in test output +Dynamoid.logger.level = Logger::FATAL +``` + +If you're using RSpec you can invoke the above like so: + +```ruby +RSpec.configure do |config| + config.before(:each) do + DynamoidReset.all + end +end +``` + +In addition, the first test for each model may fail if the relevant models are not included in `included_models`. This can be fixed by adding this line before the `DynamoidReset` module: +```ruby +Dir[File.join(Dynamoid::Config.models_dir, '**/*.rb')].sort.each { |file| require file } +``` +Note that this will require _all_ models in your models folder - you can also explicitly require only certain models if you would prefer to. + +In Rails, you may also want to ensure you do not delete non-test data +accidentally by adding the following to your test environment setup: + +```ruby +raise "Tests should be run in 'test' environment only" if Rails.env != 'test' + +Dynamoid.configure do |config| + config.namespace = "#{Rails.application.railtie_name}_#{Rails.env}" +end +``` + +## Logging + +There is a config option `logger`. Dynamoid writes requests and +responses to DynamoDB using this logger on the `debug` level. So in +order to troubleshoot and debug issues just set it: + +```ruby +class User + include Dynamoid::Document + field name +end + +Dynamoid.config.logger.level = :debug +Dynamoid.config.endpoint = 'http://localhost:8000' + +User.create(name: 'Alex') + +# => D, [2019-05-12T20:01:07.840051 #75059] DEBUG -- : put_item | Request "{\"TableName\":\"dynamoid_users\",\"Item\":{\"created_at\":{\"N\":\"1557680467.608749\"},\"updated_at\":{\"N\":\"1557680467.608809\"},\"id\":{\"S\":\"1227eea7-2c96-4b8a-90d9-77b38eb85cd0\"}},\"Expected\":{\"id\":{\"Exists\":false}}}" | Response "{}" + +# => D, [2019-05-12T20:01:07.842397 #75059] DEBUG -- : (231.28 ms) PUT ITEM - ["dynamoid_users", {:created_at=>0.1557680467608749e10, :updated_at=>0.1557680467608809e10, :id=>"1227eea7-2c96-4b8a-90d9-77b38eb85cd0", :User=>nil}, {}] +``` + +The first line is a body of HTTP request and response. The second line - +Dynamoid internal logging of API call (`PUT ITEM` in our case) with +timing (231.28 ms). + +## Credits + +Dynamoid borrows code, structure, and even its name very liberally from +the truly amazing [Mongoid](https://github.com/mongoid/mongoid). Without +Mongoid to crib from none of this would have been possible, and I hope +they don't mind me reusing their very awesome ideas to make DynamoDB +just as accessible to the Ruby world as MongoDB. + +Also, without contributors the project wouldn't be nearly as awesome. So +many thanks to: + +* [Chris Hobbs](https://github.com/ckhsponge) +* [Logan Bowers](https://github.com/loganb) +* [Lane LaRue](https://github.com/luxx) +* [Craig Heneveld](https://github.com/cheneveld) +* [Anantha Kumaran](https://github.com/ananthakumaran) +* [Jason Dew](https://github.com/jasondew) +* [Luis Arias](https://github.com/luisantonioa) +* [Stefan Neculai](https://github.com/stefanneculai) +* [Philip White](https://github.com/philipmw) * +* [Peeyush Kumar](https://github.com/peeyush1234) +* [Sumanth Ravipati](https://github.com/sumocoder) +* [Pascal Corpet](https://github.com/pcorpet) +* [Brian Glusman](https://github.com/bglusman) * +* [Peter Boling](https://github.com/pboling) * +* [Andrew Konchin](https://github.com/andrykonchin) * + +\* Current Maintainers + +## Running the tests + +Running the tests is fairly simple. You should have an instance of +DynamoDB running locally. Follow these steps to setup your test +environment. + + * First download and unpack the latest version of DynamoDB. We have a + script that will do this for you if you use bash, and homebrew on a Mac. + + ```shell + bin/setup + ``` + + * Start the local instance of DynamoDB to listen in ***8000*** port + + ```shell + bin/start_dynamodblocal + ``` + + * and lastly, use `rake` to run the tests. + + ```shell + rake + ``` + + * When you are done, remember to stop the local test instance of + dynamodb + + ```shell + bin/stop_dynamodblocal + ``` + +If you run into issues, please try these steps first. +NOTE: You can use any version manager: rvm, rbenv, chruby, asdf-ruby +```shell +asdf install ruby 3.1.1 +asdf local ruby 3.1.1 +gem update --system +bundle install +``` + +## Security + +See [SECURITY.md][security]. + +## Related links + +This documentation may be useful for the contributors: +- +- + +## License + +The gem is available as open source under the terms of +the [MIT License][license] [![License: MIT](https://img.shields.io/badge/License-MIT-green.svg)][license-ref]. +See [LICENSE][license] for the official [Copyright Notice][copyright-notice-explainer]. + +[copyright-notice-explainer]: https://opensource.stackexchange.com/questions/5778/why-do-licenses-such-as-the-mit-license-specify-a-single-year + +[license]: https://github.com/Dynamoid/dynamoid/blob/master/LICENSE.txt + +[license-ref]: https://opensource.org/licenses/MIT + +[security]: https://github.com/Dynamoid/dynamoid/blob/master/SECURITY.md + +[â›ŗī¸gem]: https://rubygems.org/gems/dynamoid +[â›ŗī¸version-img]: http://img.shields.io/gem/v/dynamoid.svg +[â›ŗcclim-maint]: https://codeclimate.com/github/Dynamoid/dynamoid/maintainability +[â›ŗcclim-maint-imgâ™ģī¸]: https://api.codeclimate.com/v1/badges/27fd8b6b7ff338fa4914/maintainability +[🏘coveralls]: https://coveralls.io/github/Dynamoid/dynamoid?branch=master +[🏘coveralls-img]: https://coveralls.io/repos/github/Dynamoid/dynamoid/badge.svg?branch=master +[🖇codecov]: https://codecov.io/gh/Dynamoid/dynamoid +[🖇codecov-imgâ™ģī¸]: https://codecov.io/gh/Dynamoid/dynamoid/branch/master/graph/badge.svg?token=84WeeoxaN9 +[🖇src-license]: https://github.com/Dynamoid/dynamoid/blob/master/LICENSE.txt +[🖇src-license-img]: https://img.shields.io/badge/License-MIT-green.svg +[🖐gitmoji]: https://gitmoji.dev +[🖐gitmoji-img]: https://img.shields.io/badge/gitmoji-3.9.0-FFDD67.svg?style=flat +[🚎yard]: https://www.rubydoc.info/gems/dynamoid +[🚎yard-img]: https://img.shields.io/badge/yard-docs-blue.svg?style=flat +[🧮semver]: http://semver.org/ +[🧮semver-img]: https://img.shields.io/badge/semver-2.0.0-FFDD67.svg?style=flat +[🖐contributors]: https://github.com/Dynamoid/dynamoid/graphs/contributors +[🖐contributors-img]: https://img.shields.io/github/contributors-anon/Dynamoid/dynamoid +[📗keep-changelog]: https://keepachangelog.com/en/1.0.0/ +[📗keep-changelog-img]: https://img.shields.io/badge/keep--a--changelog-1.0.0-FFDD67.svg?style=flat +[🖇sponsor-img]: https://img.shields.io/opencollective/all/dynamoid +[🖇sponsor]: https://opencollective.com/dynamoid +[🖇triage-help]: https://www.codetriage.com/dynamoid/dynamoid +[🖇triage-help-img]: https://www.codetriage.com/dynamoid/dynamoid/badges/users.svg +[🏘sup-wf]: https://github.com/Dynamoid/dynamoid/actions/workflows/ci.yml?query=branch%3Amaster +[🏘sup-wf-img]: https://github.com/Dynamoid/dynamoid/actions/workflows/ci.yml/badge.svg?branch=master diff --git a/dynamoid/README_transact.md b/dynamoid/README_transact.md new file mode 100644 index 000000000..dc85049d8 --- /dev/null +++ b/dynamoid/README_transact.md @@ -0,0 +1,144 @@ +# Transactions in Dynamoid + +Synchronous write operations are supported in Dynamoid using transactions. +If any action in the transaction fails they all fail. +The following actions are supported: + +* Create - add a new item if it does not already exist +* Upsert - add a new item or update an existing item, no callbacks +* Update - modifies one or more attributes from an existig item +* Delete - remove an item without callbacks, validations nor existence check +* Destroy - remove an item, fails if item does not exist + +## Examples + + + +### Save models +Models can be saved in a transaction. +New records are created otherwise the model is updated. +Save, create, update, validate and destroy callbacks are called around the transaction as appropriate. +Validation failures will throw Dynamoid::Errors::DocumentNotValid. + +```ruby +user = User.find(1) +article = Article.new(body: 'New article text', user_id: user.id) +Dynamoid::TransactionWrite.execute do |txn| + txn.save!(article) + user.last_article_id = article.id + txn.save!(user) +end +``` + +### Create items +Items can be created inside of a transaction. +The hash key and range key, if applicable, are used to determine uniqueness. +Creating will fail with Aws::DynamoDB::Errors::TransactionCanceledException if an item already exists unless skip_existence_check is true. +This example creates a user with a unique id and unique email address by creating 2 items. +An additional item is upserted in the same transaction. +Upserts will update updated_at but will not create created_at. + +```ruby +user_id = SecureRandom.uuid +email = 'bob@bob.bob' +Dynamoid::TransactionWrite.execute do |txn| + txn.create!(User, id: user_id) + txn.create!(UserEmail, id: "UserEmail##{email}", user_id: user_id) + txn.create!(Address, { id: 'A#2', street: '456' }, { skip_existence_check: true }) + txn.upsert!(Address, id: 'A#1', street: '123') +end +``` + +### Update items +An item can be updated by providing the hash key, range key if applicable, and the fields to update. +Updating fields can also be done within a block using the `set()` method. +To increment a numeric value or to add values to a set use `add()` within the block. +Similarly a field can be removed or values can be removed from a set by using `delete()` in the block. +```ruby +Dynamoid::TransactionWrite.execute do |txn| + # sets the name and title for user 1 + # The user is found by id + txn.update!(User, id: 1, name: 'bob', title: 'mister') + + # sets the name, increments a count and deletes a field + txn.update!(user) do |u| # a User instance is provided + u.set(name: 'bob') + u.add(article_count: 1) + u.delete(:title) + end + + # adds to a set of integers and deletes from a set of strings + txn.update!(User, id: 3) do |u| + u.add(friend_ids: [1, 2]) + u.delete(child_names: ['bebe']) + end +end +``` + +### Destroy or delete items +Models can be used or the model class and key can be specified. +When the key is a single column it is specified as a single value or a hash +with the name of the hash key. +When using a composite key the key must be a hash with the hash key and range key. +destroy() uses callbacks and validations and fails if the item does not exist. +Use delete() to skip callbacks, validations and the existence check. + +```ruby +article = Article.find(1) +tag = article.tag +Dynamoid::TransactionWrite.execute do |txn| + txn.destroy!(article) + txn.destroy!(Article, 2) # performs find() automatically and then runs destroy callbacks + txn.destroy!(tag) + txn.delete(Tag, 2) # delete record with hash key '2' if it exists + txn.delete(Tag, id: 2) # equivalent of the above if the hash key column is 'id' + txn.delete(Tag, id: 'key#abcd', my_sort_key: 'range#1') # when range key is required +end +``` + +### Skipping callbacks and validations +Validations and callbacks can be skipped per action. +Validation failures will throw Dynamoid::Errors::DocumentNotValid when using the bang! methods. +Note that validation callbacks are run when validation happens even if skipping callbacks here. +Skipping callbacks and validation guarantees no callbacks. + +```ruby +user = User.find(1) +user.red = true +Dynamoid::TransactionWrite.execute do |txn| + txn.save!(user, skip_callbacks: true) + txn.create!(User, { name: 'bob' }, { skip_callbacks: true }) +end +Dynamoid::TransactionWrite.execute do |txn| + txn.save!(user, skip_validation: true) + txn.create!(User, { name: 'bob' }, { skip_validation: true }) +end +``` + +### Validation failures that don't raise +All of the transaction methods can be called without the bang! which results in +false instead of a raised exception when validation fails. +Ignoring validation failures can lead to confusion or bugs so always check return status when not using a bang! + +```ruby +user = User.find(1) +user.red = true +Dynamoid::TransactionWrite.execute do |txn| + if txn.save(user) # won't raise validation exception + txn.update(UserCount, id: 'UserCount#Red', count: 5) + else + puts 'ALERT: user not valid, skipping' + end +end +``` + +### Incrementally building a transaction +Transactions can also be built without a block. + +```ruby +transaction = Dynamoid::TransactionWrite.new +transaction.create!(User, id: user_id) +transaction.create!(UserEmail, id: "UserEmail##{email}", user_id: user_id) +transaction.upsert!(Address, id: 'A#1', street: '123') +transaction.commit +``` diff --git a/dynamoid/Rakefile b/dynamoid/Rakefile new file mode 100644 index 000000000..638792c85 --- /dev/null +++ b/dynamoid/Rakefile @@ -0,0 +1,68 @@ +# frozen_string_literal: true + +require 'bundler/gem_tasks' + +require 'bundler/setup' +begin + Bundler.setup(:default, :development) +rescue Bundler::BundlerError => e + warn e.message + warn 'Run `bundle install` to install missing gems' + exit e.status_code +end +load './lib/dynamoid/tasks/database.rake' if defined?(Rails) + +require 'rspec/core/rake_task' +RSpec::Core::RakeTask.new(:spec) do |spec| + spec.pattern = FileList['spec/**/*_spec.rb'] +end +desc 'alias test task to spec' +task test: :spec + +ruby_version = Gem::Version.new(RUBY_VERSION) +minimum_version = ->(version, engine = 'ruby') { ruby_version >= Gem::Version.new(version) && engine == RUBY_ENGINE } +linting = minimum_version.call('2.7') +def rubocop_task(warning) + desc 'rubocop task stub' + task :rubocop do + warn warning + end +end + +if linting + begin + require 'rubocop/rake_task' + RuboCop::RakeTask.new do |task| + task.options = ['-DESP'] # Display the name of the failing cops + end + rescue LoadError + rubocop_task("RuboCop is unexpectedly disabled locally for #{RUBY_ENGINE}-#{RUBY_VERSION}. Have you run bundle install?") + end +else + rubocop_task("RuboCop is disabled locally for #{RUBY_ENGINE}-#{RUBY_VERSION}.\nIf you need it locally on #{RUBY_ENGINE}-#{RUBY_VERSION}, run BUNDLE_GEMFILE=gemfiles/style.gemfile bundle install && BUNDLE_GEMFILE=gemfiles/style.gemfile bundle exec rubocop") +end + +require 'yard' +YARD::Rake::YardocTask.new do |t| + t.files = ['lib/**/*.rb', 'README.md', 'LICENSE.txt'] # optional + t.options = ['-m', 'markdown'] # optional +end + +desc 'Publish documentation to gh-pages' +task :publish do + Rake::Task['yard'].invoke + `git add .` + `git commit -m 'Regenerated documentation'` + `git checkout gh-pages` + `git clean -fdx` + `git checkout master -- doc` + `cp -R doc/* .` + `git rm -rf doc/` + `git add .` + `git commit -m 'Regenerated documentation'` + `git pull` + `git push` + `git checkout master` +end + +task default: %i[test rubocop] diff --git a/dynamoid/SECURITY.md b/dynamoid/SECURITY.md new file mode 100644 index 000000000..d8ba4b904 --- /dev/null +++ b/dynamoid/SECURITY.md @@ -0,0 +1,17 @@ +# Security Policy + +## Supported Versions + +| Version | Supported | +|---------|-----------| +| 3.7.x | ✅ | +| <= 3.6 | ❌ | +| 2.x | ❌ | +| 1.x | ❌ | +| 0.x | ❌ | + +## Reporting a Vulnerability + +Peter Boling is responsible for the security maintenance of this gem. Please find a way +to [contact him directly](https://railsbling.com/contact) to report the issue. Include as much relevant information as +possible. diff --git a/dynamoid/Vagrantfile b/dynamoid/Vagrantfile new file mode 100644 index 000000000..ffaf6c8e8 --- /dev/null +++ b/dynamoid/Vagrantfile @@ -0,0 +1,29 @@ +# frozen_string_literal: true + +Vagrant.configure('2') do |config| + # Choose base box + config.vm.box = 'bento/ubuntu-18.04' + + config.vm.provider 'virtualbox' do |vb| + # Prevent clock skew when host goes to sleep while VM is running + vb.customize ['guestproperty', 'set', :id, '/VirtualBox/GuestAdd/VBoxService/--timesync-set-threshold', 10_000] + + vb.cpus = 2 + vb.memory = 2048 + end + + # Defaults + config.vm.provision :salt do |salt| + salt.masterless = true + salt.minion_config = '.dev/vagrant/minion' + + # Pillars + salt.pillar( + 'ruby' => { + 'version' => '2.6.2' + } + ) + + salt.run_highstate = true + end +end diff --git a/dynamoid/bin/_dynamodblocal b/dynamoid/bin/_dynamodblocal new file mode 100644 index 000000000..2ec58dd5f --- /dev/null +++ b/dynamoid/bin/_dynamodblocal @@ -0,0 +1,4 @@ +DIST_DIR=spec/DynamoDBLocal-latest +PIDFILE=dynamodb.pid +LISTEN_PORT=8000 +LOG_DIR="logs" diff --git a/dynamoid/bin/console b/dynamoid/bin/console new file mode 100644 index 000000000..01c204e7f --- /dev/null +++ b/dynamoid/bin/console @@ -0,0 +1,28 @@ +#!/usr/bin/env ruby +# frozen_string_literal: true + +require 'bundler/setup' +require 'dynamoid' +require 'dynamoid/log/formatter' + +# You can add fixtures and/or initialization code here to make experimenting +# with your gem easier. You can also use a different console, if you like. + +# (If you use this, don't forget to add pry to your Gemfile!) +# require "pry" +# Pry.start + +Dynamoid.configure do |config| + # DynamoDB local version 2.0.0 and greater AWS_ACCESS_KEY_ID can contain + # the only letters (A–Z, a–z) and numbers (0–9). + # See https://hub.docker.com/r/amazon/dynamodb-local + config.access_key = 'accesskey' + config.secret_key = 'secretkey' + + config.region = 'us-west-2' + config.endpoint = 'http://localhost:8000' + config.log_formatter = Dynamoid::Log::Formatter::Compact.new +end + +require 'irb' +IRB.start diff --git a/dynamoid/bin/setup b/dynamoid/bin/setup new file mode 100644 index 000000000..06e7b4780 --- /dev/null +++ b/dynamoid/bin/setup @@ -0,0 +1,16 @@ +#!/usr/bin/env bash +set -euo pipefail +IFS=$'\n\t' +set -vx + +bundle install + +# Do any other automated setup that you need to do here +if ! brew info wget &>/dev/null; then + brew install wget +else + echo wget is already installed +fi +wget http://dynamodb-local.s3-website-us-west-2.amazonaws.com/dynamodb_local_latest.zip --quiet -O spec/dynamodb_temp.zip +unzip -qq spec/dynamodb_temp.zip -d spec/DynamoDBLocal-latest +rm spec/dynamodb_temp.zip diff --git a/dynamoid/bin/start_dynamodblocal b/dynamoid/bin/start_dynamodblocal new file mode 100644 index 000000000..b9f65a1cf --- /dev/null +++ b/dynamoid/bin/start_dynamodblocal @@ -0,0 +1,47 @@ +#!/bin/sh + +# Source variables +. $(dirname $0)/_dynamodblocal + +if [ -z $JAVA_HOME ]; then + echo >&2 'ERROR: DynamoDBLocal requires JAVA_HOME to be set.' + exit 1 +fi + +if [ ! -x $JAVA_HOME/bin/java ]; then + echo >&2 'ERROR: JAVA_HOME is set, but I do not see the java executable there.' + exit 1 +fi + +cd $DIST_DIR + +if [ ! -f DynamoDBLocal.jar ] || [ ! -d DynamoDBLocal_lib ]; then + echo >&2 "ERROR: Could not find DynamoDBLocal files in $DIST_DIR." + exit 1 +fi + +mkdir -p $LOG_DIR +echo "DynamoDB Local output will save to ${DIST_DIR}/${LOG_DIR}/" +hash lsof 2>/dev/null && lsof -i :$LISTEN_PORT && { echo >&2 "Something is already listening on port $LISTEN_PORT; I will not attempt to start DynamoDBLocal."; exit 1; } + +NOW=$(date -u +"%Y-%m-%dT%H:%M:%SZ") +nohup $JAVA_HOME/bin/java -Djava.library.path=./DynamoDBLocal_lib -jar DynamoDBLocal.jar -delayTransientStatuses -port $LISTEN_PORT -inMemory 1>"${LOG_DIR}/${NOW}.out.log" 2>"${LOG_DIR}/${NOW}.err.log" & +PID=$! + +echo 'Verifying that DynamoDBLocal actually started...' + +# Allow some seconds for the JDK to start and die. +counter=0 +while [ $counter -le 5 ]; do + kill -0 $PID + if [ $? -ne 0 ]; then + echo >&2 'ERROR: DynamoDBLocal died after we tried to start it!' + exit 1 + else + counter=$(($counter + 1)) + sleep 1 + fi +done + +echo "DynamoDB Local started with pid $PID listening on port $LISTEN_PORT." +echo $PID > $PIDFILE diff --git a/dynamoid/bin/stop_dynamodblocal b/dynamoid/bin/stop_dynamodblocal new file mode 100644 index 000000000..08b262839 --- /dev/null +++ b/dynamoid/bin/stop_dynamodblocal @@ -0,0 +1,34 @@ +#!/bin/sh + +# Source variables +. $(dirname $0)/_dynamodblocal + +cd $DIST_DIR + +if [ ! -f $PIDFILE ]; then + echo 'ERROR: There is no pidfile, so if DynamoDBLocal is running you will need to kill it yourself.' + exit 1 +fi + +pid=$(<$PIDFILE) + +echo "Killing DynamoDBLocal at pid $pid..." +kill $pid + +counter=0 +while [ $counter -le 5 ]; do + kill -0 $pid 2>/dev/null + if [ $? -ne 0 ]; then + echo 'Successfully shut down DynamoDBLocal.' + rm -f $PIDFILE + exit 0 + else + echo 'Still waiting for DynamoDBLocal to shut down...' + counter=$(($counter + 1)) + sleep 1 + fi +done + +echo 'Unable to shut down DynamoDBLocal; you may need to kill it yourself.' +rm -f $PIDFILE +exit 1 diff --git a/dynamoid/docker-compose.yml b/dynamoid/docker-compose.yml new file mode 100644 index 000000000..5d718bcf1 --- /dev/null +++ b/dynamoid/docker-compose.yml @@ -0,0 +1,7 @@ +version: '2' + +services: + dynamodb: + image: amazon/dynamodb-local + ports: + - 8000:8000 diff --git a/dynamoid/dynamoid.gemspec b/dynamoid/dynamoid.gemspec new file mode 100644 index 000000000..198a8dfb4 --- /dev/null +++ b/dynamoid/dynamoid.gemspec @@ -0,0 +1,65 @@ +# frozen_string_literal: true + +lib = File.expand_path('lib', __dir__) +$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) +require 'dynamoid/version' + +Gem::Specification.new do |spec| + spec.name = 'dynamoid' + spec.version = Dynamoid::VERSION + + # Keep in sync with README + spec.authors = [ + 'Josh Symonds', + 'Logan Bowers', + 'Craig Heneveld', + 'Anatha Kumaran', + 'Jason Dew', + 'Luis Arias', + 'Stefan Neculai', + 'Philip White', + 'Peeyush Kumar', + 'Sumanth Ravipati', + 'Pascal Corpet', + 'Brian Glusman', + 'Peter Boling', + 'Andrew Konchin' + ] + spec.email = ['andry.konchin@gmail.com', 'peter.boling@gmail.com', 'brian@stellaservice.com'] + + spec.description = "Dynamoid is an ORM for Amazon's DynamoDB that supports offline development, associations, querying, and everything else you'd expect from an ActiveRecord-style replacement." + spec.summary = "Dynamoid is an ORM for Amazon's DynamoDB" + # Ignore not commited files + spec.files = Dir[ + 'CHANGELOG.md', + 'dynamoid.gemspec', + 'lib/**/*', + 'LICENSE.txt', + 'README.md', + 'SECURITY.md' + ] + spec.homepage = 'http://github.com/Dynamoid/dynamoid' + spec.licenses = ['MIT'] + spec.require_paths = ['lib'] + + spec.metadata['homepage_uri'] = spec.homepage + spec.metadata['source_code_uri'] = "https://github.com/Dynamoid/dynamoid/tree/v#{spec.version}" + spec.metadata['changelog_uri'] = "https://github.com/Dynamoid/dynamoid/blob/v#{spec.version}/CHANGELOG.md" + spec.metadata['bug_tracker_uri'] = 'https://github.com/Dynamoid/dynamoid/issues' + spec.metadata['documentation_uri'] = "https://www.rubydoc.info/gems/dynamoid/#{spec.version}" + spec.metadata['funding_uri'] = 'https://opencollective.com/dynamoid' + spec.metadata['wiki_uri'] = 'https://github.com/Dynamoid/dynamoid/wiki' + spec.metadata['rubygems_mfa_required'] = 'true' + + spec.add_runtime_dependency 'activemodel', '>=4' + spec.add_runtime_dependency 'aws-sdk-dynamodb', '~> 1.0' + spec.add_runtime_dependency 'concurrent-ruby', '>= 1.0' + + spec.add_development_dependency 'appraisal' + spec.add_development_dependency 'bundler' + spec.add_development_dependency 'pry', '~> 0.14' + spec.add_development_dependency 'rake', '~> 13.0' + spec.add_development_dependency 'rexml' + spec.add_development_dependency 'rspec', '~> 3.12' + spec.add_development_dependency 'yard' +end diff --git a/dynamoid/gemfiles/coverage.gemfile b/dynamoid/gemfiles/coverage.gemfile new file mode 100644 index 000000000..686559a78 --- /dev/null +++ b/dynamoid/gemfiles/coverage.gemfile @@ -0,0 +1,14 @@ +# frozen_string_literal: true + +source 'https://rubygems.org' + +# Some tests require Rails. +gem 'activemodel', '~> 7.0.2' + +gem 'codecov', '~> 0.6', require: false # For CodeCov +gem 'simplecov', '~> 0.21', require: false +gem 'simplecov-cobertura', require: false # XML for Jenkins +gem 'simplecov-json', require: false # For CodeClimate +gem 'simplecov-lcov', '~> 0.8', require: false + +gemspec path: '../' diff --git a/dynamoid/gemfiles/rails_4_2.gemfile b/dynamoid/gemfiles/rails_4_2.gemfile new file mode 100644 index 000000000..1cc89f4ba --- /dev/null +++ b/dynamoid/gemfiles/rails_4_2.gemfile @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +# This file was generated by Appraisal + +source 'https://rubygems.org' + +gem 'activemodel', '~> 4.2.0' +gem 'bigdecimal', '~> 1.4.0', platform: :mri +gem 'pry-byebug', platforms: :ruby + +gemspec path: '../' diff --git a/dynamoid/gemfiles/rails_5_0.gemfile b/dynamoid/gemfiles/rails_5_0.gemfile new file mode 100644 index 000000000..01d6960d9 --- /dev/null +++ b/dynamoid/gemfiles/rails_5_0.gemfile @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +# This file was generated by Appraisal + +source 'https://rubygems.org' + +gem 'activemodel', '~> 5.0.0' +gem 'pry-byebug', platforms: :ruby + +gemspec path: '../' diff --git a/dynamoid/gemfiles/rails_5_1.gemfile b/dynamoid/gemfiles/rails_5_1.gemfile new file mode 100644 index 000000000..d402f15ee --- /dev/null +++ b/dynamoid/gemfiles/rails_5_1.gemfile @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +# This file was generated by Appraisal + +source 'https://rubygems.org' + +gem 'activemodel', '~> 5.1.0' +gem 'pry-byebug', platforms: :ruby + +gemspec path: '../' diff --git a/dynamoid/gemfiles/rails_5_2.gemfile b/dynamoid/gemfiles/rails_5_2.gemfile new file mode 100644 index 000000000..3ec182ab3 --- /dev/null +++ b/dynamoid/gemfiles/rails_5_2.gemfile @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +# This file was generated by Appraisal + +source 'https://rubygems.org' + +gem 'activemodel', '~> 5.2.0' +gem 'pry-byebug', platforms: :ruby + +gemspec path: '../' diff --git a/dynamoid/gemfiles/rails_6_0.gemfile b/dynamoid/gemfiles/rails_6_0.gemfile new file mode 100644 index 000000000..7bf77fbe3 --- /dev/null +++ b/dynamoid/gemfiles/rails_6_0.gemfile @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +# This file was generated by Appraisal + +source 'https://rubygems.org' + +gem 'activemodel', '~> 6.0.0' +gem 'pry-byebug', platforms: :ruby + +gemspec path: '../' diff --git a/dynamoid/gemfiles/rails_6_1.gemfile b/dynamoid/gemfiles/rails_6_1.gemfile new file mode 100644 index 000000000..a12692a37 --- /dev/null +++ b/dynamoid/gemfiles/rails_6_1.gemfile @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +# This file was generated by Appraisal + +source 'https://rubygems.org' + +gem 'activemodel', '~> 6.1.0' +gem 'pry-byebug', platforms: :ruby + +gemspec path: '../' diff --git a/dynamoid/gemfiles/rails_7_0.gemfile b/dynamoid/gemfiles/rails_7_0.gemfile new file mode 100644 index 000000000..a2f42f6d1 --- /dev/null +++ b/dynamoid/gemfiles/rails_7_0.gemfile @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +# This file was generated by Appraisal + +source 'https://rubygems.org' + +gem 'activemodel', '~> 7.0.2' +gem 'pry-byebug', platforms: :ruby + +gemspec path: '../' diff --git a/dynamoid/gemfiles/rails_7_1.gemfile b/dynamoid/gemfiles/rails_7_1.gemfile new file mode 100644 index 000000000..a24915c6d --- /dev/null +++ b/dynamoid/gemfiles/rails_7_1.gemfile @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +# This file was generated by Appraisal + +source 'https://rubygems.org' + +gem 'activemodel', '~> 7.1.0' +gem 'pry-byebug', platforms: :ruby + +gemspec path: '../' diff --git a/dynamoid/gemfiles/style.gemfile b/dynamoid/gemfiles/style.gemfile new file mode 100644 index 000000000..9108f4605 --- /dev/null +++ b/dynamoid/gemfiles/style.gemfile @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +source 'https://rubygems.org' + +gem 'rubocop-md', require: false +gem 'rubocop-packaging', require: false +gem 'rubocop-performance', require: false +gem 'rubocop-rake', require: false +gem 'rubocop-rspec', require: false +gem 'rubocop-thread_safety', require: false + +gemspec path: '../' diff --git a/dynamoid/lib/dynamoid.rb b/dynamoid/lib/dynamoid.rb new file mode 100644 index 000000000..743fd2bfd --- /dev/null +++ b/dynamoid/lib/dynamoid.rb @@ -0,0 +1,66 @@ +# frozen_string_literal: true + +require 'aws-sdk-dynamodb' +require 'delegate' +require 'time' +require 'securerandom' +require 'set' +require 'active_support' +require 'active_support/core_ext' +require 'active_support/json' +require 'active_support/inflector' +require 'active_support/lazy_load_hooks' +require 'active_support/time_with_zone' +require 'active_model' + +require 'dynamoid/version' +require 'dynamoid/errors' +require 'dynamoid/application_time_zone' +require 'dynamoid/dynamodb_time_zone' +require 'dynamoid/fields' +require 'dynamoid/indexes' +require 'dynamoid/associations' +require 'dynamoid/persistence' +require 'dynamoid/dumping' +require 'dynamoid/undumping' +require 'dynamoid/type_casting' +require 'dynamoid/primary_key_type_mapping' +require 'dynamoid/dirty' +require 'dynamoid/validations' +require 'dynamoid/criteria' +require 'dynamoid/finders' +require 'dynamoid/identity_map' +require 'dynamoid/config' +require 'dynamoid/loadable' +require 'dynamoid/components' +require 'dynamoid/document' +require 'dynamoid/adapter' +require 'dynamoid/transaction_write' + +require 'dynamoid/tasks/database' + +require 'dynamoid/middleware/identity_map' + +require 'dynamoid/railtie' if defined?(Rails) + +module Dynamoid + extend self + + def configure + block_given? ? yield(Dynamoid::Config) : Dynamoid::Config + end + alias config configure + + def logger + Dynamoid::Config.logger + end + + def included_models + @included_models ||= [] + end + + # @private + def adapter + @adapter ||= Adapter.new + end +end diff --git a/dynamoid/lib/dynamoid/adapter.rb b/dynamoid/lib/dynamoid/adapter.rb new file mode 100644 index 000000000..664a67c29 --- /dev/null +++ b/dynamoid/lib/dynamoid/adapter.rb @@ -0,0 +1,199 @@ +# frozen_string_literal: true + +require 'concurrent/atom' +require 'dynamoid/adapter_plugin/aws_sdk_v3' + +module Dynamoid + # Adapter's value-add: + # 1) For the rest of Dynamoid, the gateway to DynamoDB. + # 2) Allows switching `config.adapter` to ease development of a new adapter. + # 3) Caches the list of tables Dynamoid knows about. + # @private + class Adapter + def initialize + @adapter_ = Concurrent::Atom.new(nil) + @tables_ = Concurrent::Atom.new(nil) + end + + def tables + unless @tables_.value + @tables_.swap { |_value, _args| benchmark('Cache Tables') { list_tables || [] } } + end + @tables_.value + end + + # The actual adapter currently in use. + # + # @since 0.2.0 + def adapter + unless @adapter_.value + adapter = self.class.adapter_plugin_class.new + adapter.connect! + @adapter_.compare_and_set(nil, adapter) + clear_cache! + end + @adapter_.value + end + + def clear_cache! + @tables_.swap { |_value, _args| nil } + end + + # Shows how long it takes a method to run on the adapter. Useful for generating logged output. + # + # @param [Symbol|String] method the name of the method to appear in the log + # @param [Array] args the arguments to the method to appear in the log + # @yield the actual code to benchmark + # + # @return the result of the yield + # + # @since 0.2.0 + def benchmark(method, *args) + start = Time.now + result = yield + Dynamoid.logger.debug "(#{((Time.now - start) * 1000.0).round(2)} ms) #{method.to_s.split('_').collect(&:upcase).join(' ')}#{" - #{args.inspect}" unless args.nil? || args.empty?}" + result + end + + # Write an object to the adapter. + # + # @param [String] table the name of the table to write the object to + # @param [Object] object the object itself + # @param [Hash] options Options that are passed to the put_item call + # + # @return [Object] the persisted object + # + # @since 0.2.0 + def write(table, object, options = nil) + put_item(table, object, options) + end + + # Read one or many keys from the selected table. + # This method intelligently calls batch_get or get on the underlying adapter + # depending on whether ids is a range or a single key. + # If a range key is present, it will also interpolate that into the ids so + # that the batch get will acquire the correct record. + # + # @param [String] table the name of the table to write the object to + # @param [String, Array] ids to fetch; can also be a string of just one id + # @param [Hash] options Passed to the underlying query. The :range_key option is required whenever the table has a range key, + # unless multiple ids are passed in. + # + # @since 0.2.0 + def read(table, ids, options = {}, &blk) + if ids.respond_to?(:each) + batch_get_item({ table => ids }, options, &blk) + else + get_item(table, ids, options) + end + end + + # Delete an item from a table. + # + # @param [String] table the name of the table to write the object to + # @param [String, Array] ids to delete; can also be a string of just one id + # @param [Hash] options allowed only +range_key+ - range key or array of + # range keys of the record to delete, can also be + # a string of just one range_key, and +conditions+ + # + def delete(table, ids, options = {}) + range_key = options[:range_key] # array of range keys that matches the ids passed in + if ids.respond_to?(:each) + ids = if range_key.respond_to?(:each) + # turn ids into array of arrays each element being hash_key, range_key + ids.each_with_index.map { |id, i| [id, range_key[i]] } + else + range_key ? ids.map { |id| [id, range_key] } : ids + end + + batch_delete_item(table => ids) + else + delete_item(table, ids, options) + end + end + + # Scans a table. Generally quite slow; try to avoid using scan if at all possible. + # + # @param [String] table the name of the table to write the object to + # @param [Hash] query a hash of attributes: matching records will be returned by the scan + # + # @since 0.2.0 + def scan(table, query = {}, opts = {}) + benchmark('Scan', table, query) { adapter.scan(table, query, opts) } + end + + def create_table(table_name, key, options = {}) + unless tables.include?(table_name) + result = nil + benchmark('Create Table') { result = adapter.create_table(table_name, key, options) } + tables << table_name + result + else + false + end + end + + # @since 0.2.0 + def delete_table(table_name, options = {}) + if tables.include?(table_name) + benchmark('Delete Table') { adapter.delete_table(table_name, options) } + idx = tables.index(table_name) + tables.delete_at(idx) + end + end + + %i[batch_get_item delete_item get_item list_tables put_item truncate batch_write_item batch_delete_item execute].each do |m| + # Method delegation with benchmark to the underlying adapter. Faster than relying on method_missing. + # + # @since 0.2.0 + define_method(m) do |*args, &blk| + benchmark(m, *args) { adapter.send(m, *args, &blk) } + end + end + + # Delegate all methods that aren't defind here to the underlying adapter. + # + # @since 0.2.0 + def method_missing(method, *args, &block) + # Don't use keywork arguments delegating (with **kw). It works in + # different way in different Ruby versions: <= 2.6, 2.7, 3.0 and in some + # future 3.x versions. Providing that there are no downstream methods + # with keyword arguments in adapter. + # + # https://eregon.me/blog/2019/11/10/the-delegation-challenge-of-ruby27.html + + return benchmark(method, *args) { adapter.send(method, *args, &block) } if adapter.respond_to?(method) + + super + end + + # Query the DynamoDB table. This employs DynamoDB's indexes so is generally faster than scanning, but is + # only really useful for range queries, since it can only find by one hash key at once. Only provide + # one range key to the hash. + # + # Dynamoid.adapter.query('users', { id: [[:eq, '1']], age: [[:between, [10, 30]]] }, { batch_size: 1000 }) + # + # @param [String] table_name the name of the table + # @param [Array[Array]] key_conditions conditions for the primary key attributes + # @param [Array[Array]] non_key_conditions (optional) conditions for non-primary key attributes + # @param [Hash] options (optional) the options to query the table with + # @option options [Boolean] :consistent_read You can set the ConsistentRead parameter to true and obtain a strongly consistent result + # @option options [Boolean] :scan_index_forward Specifies the order for index traversal: If true (default), the traversal is performed in ascending order; if false, the traversal is performed in descending order. + # @option options [Symbop] :select The attributes to be returned in the result (one of ALL_ATTRIBUTES, ALL_PROJECTED_ATTRIBUTES, ...) + # @option options [Symbol] :index_name The name of an index to query. This index can be any local secondary index or global secondary index on the table. + # @option options [Hash] :exclusive_start_key The primary key of the first item that this operation will evaluate. + # @option options [Integer] :batch_size The number of items to lazily load one by one + # @option options [Integer] :record_limit The maximum number of items to return (not necessarily the number of evaluated items) + # @option options [Integer] :scan_limit The maximum number of items to evaluate (not necessarily the number of matching items) + # @option options [Array[Symbol]] :project The attributes to retrieve from the table + # + # @return [Enumerable] matching items + def query(table_name, key_conditions, non_key_conditions = {}, options = {}) + adapter.query(table_name, key_conditions, non_key_conditions, options) + end + + def self.adapter_plugin_class + Dynamoid::AdapterPlugin.const_get(Dynamoid::Config.adapter.camelcase) + end + end +end diff --git a/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3.rb b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3.rb new file mode 100644 index 000000000..3778c3f07 --- /dev/null +++ b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3.rb @@ -0,0 +1,678 @@ +# frozen_string_literal: true + +require_relative 'aws_sdk_v3/query' +require_relative 'aws_sdk_v3/scan' +require_relative 'aws_sdk_v3/execute_statement' +require_relative 'aws_sdk_v3/create_table' +require_relative 'aws_sdk_v3/batch_get_item' +require_relative 'aws_sdk_v3/item_updater' +require_relative 'aws_sdk_v3/table' +require_relative 'aws_sdk_v3/until_past_table_status' +require_relative 'aws_sdk_v3/transact' + +module Dynamoid + # @private + module AdapterPlugin + # The AwsSdkV3 adapter provides support for the aws-sdk version 2 for ruby. + + # NOTE: Don't use keyword arguments in public methods as far as method + # calls on adapter are delegated to the plugin. + # + # There are breaking changes in Ruby related to delegating keyword + # arguments so we have decided just to avoid them when use delegation. + # + # https://eregon.me/blog/2019/11/10/the-delegation-challenge-of-ruby27.html + + class AwsSdkV3 + EQ = 'EQ' + HASH_KEY = 'HASH' + RANGE_KEY = 'RANGE' + STRING_TYPE = 'S' + NUM_TYPE = 'N' + BINARY_TYPE = 'B' + TABLE_STATUSES = { + creating: 'CREATING', + updating: 'UPDATING', + deleting: 'DELETING', + active: 'ACTIVE' + }.freeze + PARSE_TABLE_STATUS = lambda { |resp, lookup = :table| + # lookup is table for describe_table API + # lookup is table_description for create_table API + # because Amazon, damnit. + resp.send(lookup).table_status + } + BATCH_WRITE_ITEM_REQUESTS_LIMIT = 25 + + CONNECTION_CONFIG_OPTIONS = %i[endpoint region http_continue_timeout http_idle_timeout http_open_timeout http_read_timeout].freeze + + # See https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/ReservedWords.html + # rubocop:disable Metrics/CollectionLiteralLength + RESERVED_WORDS = Set.new( + %i[ + ABORT ABSOLUTE ACTION ADD AFTER AGENT AGGREGATE ALL ALLOCATE ALTER ANALYZE + AND ANY ARCHIVE ARE ARRAY AS ASC ASCII ASENSITIVE ASSERTION ASYMMETRIC AT + ATOMIC ATTACH ATTRIBUTE AUTH AUTHORIZATION AUTHORIZE AUTO AVG BACK BACKUP + BASE BATCH BEFORE BEGIN BETWEEN BIGINT BINARY BIT BLOB BLOCK BOOLEAN BOTH + BREADTH BUCKET BULK BY BYTE CALL CALLED CALLING CAPACITY CASCADE CASCADED + CASE CAST CATALOG CHAR CHARACTER CHECK CLASS CLOB CLOSE CLUSTER CLUSTERED + CLUSTERING CLUSTERS COALESCE COLLATE COLLATION COLLECTION COLUMN COLUMNS + COMBINE COMMENT COMMIT COMPACT COMPILE COMPRESS CONDITION CONFLICT CONNECT + CONNECTION CONSISTENCY CONSISTENT CONSTRAINT CONSTRAINTS CONSTRUCTOR + CONSUMED CONTINUE CONVERT COPY CORRESPONDING COUNT COUNTER CREATE CROSS + CUBE CURRENT CURSOR CYCLE DATA DATABASE DATE DATETIME DAY DEALLOCATE DEC + DECIMAL DECLARE DEFAULT DEFERRABLE DEFERRED DEFINE DEFINED DEFINITION + DELETE DELIMITED DEPTH DEREF DESC DESCRIBE DESCRIPTOR DETACH DETERMINISTIC + DIAGNOSTICS DIRECTORIES DISABLE DISCONNECT DISTINCT DISTRIBUTE DO DOMAIN + DOUBLE DROP DUMP DURATION DYNAMIC EACH ELEMENT ELSE ELSEIF EMPTY ENABLE + END EQUAL EQUALS ERROR ESCAPE ESCAPED EVAL EVALUATE EXCEEDED EXCEPT + EXCEPTION EXCEPTIONS EXCLUSIVE EXEC EXECUTE EXISTS EXIT EXPLAIN EXPLODE + EXPORT EXPRESSION EXTENDED EXTERNAL EXTRACT FAIL FALSE FAMILY FETCH FIELDS + FILE FILTER FILTERING FINAL FINISH FIRST FIXED FLATTERN FLOAT FOR FORCE + FOREIGN FORMAT FORWARD FOUND FREE FROM FULL FUNCTION FUNCTIONS GENERAL + GENERATE GET GLOB GLOBAL GO GOTO GRANT GREATER GROUP GROUPING HANDLER HASH + HAVE HAVING HEAP HIDDEN HOLD HOUR IDENTIFIED IDENTITY IF IGNORE IMMEDIATE + IMPORT IN INCLUDING INCLUSIVE INCREMENT INCREMENTAL INDEX INDEXED INDEXES + INDICATOR INFINITE INITIALLY INLINE INNER INNTER INOUT INPUT INSENSITIVE + INSERT INSTEAD INT INTEGER INTERSECT INTERVAL INTO INVALIDATE IS ISOLATION + ITEM ITEMS ITERATE JOIN KEY KEYS LAG LANGUAGE LARGE LAST LATERAL LEAD + LEADING LEAVE LEFT LENGTH LESS LEVEL LIKE LIMIT LIMITED LINES LIST LOAD + LOCAL LOCALTIME LOCALTIMESTAMP LOCATION LOCATOR LOCK LOCKS LOG LOGED LONG + LOOP LOWER MAP MATCH MATERIALIZED MAX MAXLEN MEMBER MERGE METHOD METRICS + MIN MINUS MINUTE MISSING MOD MODE MODIFIES MODIFY MODULE MONTH MULTI + MULTISET NAME NAMES NATIONAL NATURAL NCHAR NCLOB NEW NEXT NO NONE NOT NULL + NULLIF NUMBER NUMERIC OBJECT OF OFFLINE OFFSET OLD ON ONLINE ONLY OPAQUE + OPEN OPERATOR OPTION OR ORDER ORDINALITY OTHER OTHERS OUT OUTER OUTPUT + OVER OVERLAPS OVERRIDE OWNER PAD PARALLEL PARAMETER PARAMETERS PARTIAL + PARTITION PARTITIONED PARTITIONS PATH PERCENT PERCENTILE PERMISSION + PERMISSIONS PIPE PIPELINED PLAN POOL POSITION PRECISION PREPARE PRESERVE + PRIMARY PRIOR PRIVATE PRIVILEGES PROCEDURE PROCESSED PROJECT PROJECTION + PROPERTY PROVISIONING PUBLIC PUT QUERY QUIT QUORUM RAISE RANDOM RANGE RANK + RAW READ READS REAL REBUILD RECORD RECURSIVE REDUCE REF REFERENCE + REFERENCES REFERENCING REGEXP REGION REINDEX RELATIVE RELEASE REMAINDER + RENAME REPEAT REPLACE REQUEST RESET RESIGNAL RESOURCE RESPONSE RESTORE + RESTRICT RESULT RETURN RETURNING RETURNS REVERSE REVOKE RIGHT ROLE ROLES + ROLLBACK ROLLUP ROUTINE ROW ROWS RULE RULES SAMPLE SATISFIES SAVE SAVEPOINT + SCAN SCHEMA SCOPE SCROLL SEARCH SECOND SECTION SEGMENT SEGMENTS SELECT SELF + SEMI SENSITIVE SEPARATE SEQUENCE SERIALIZABLE SESSION SET SETS SHARD SHARE + SHARED SHORT SHOW SIGNAL SIMILAR SIZE SKEWED SMALLINT SNAPSHOT SOME SOURCE + SPACE SPACES SPARSE SPECIFIC SPECIFICTYPE SPLIT SQL SQLCODE SQLERROR + SQLEXCEPTION SQLSTATE SQLWARNING START STATE STATIC STATUS STORAGE STORE + STORED STREAM STRING STRUCT STYLE SUB SUBMULTISET SUBPARTITION SUBSTRING + SUBTYPE SUM SUPER SYMMETRIC SYNONYM SYSTEM TABLE TABLESAMPLE TEMP TEMPORARY + TERMINATED TEXT THAN THEN THROUGHPUT TIME TIMESTAMP TIMEZONE TINYINT TO + TOKEN TOTAL TOUCH TRAILING TRANSACTION TRANSFORM TRANSLATE TRANSLATION + TREAT TRIGGER TRIM TRUE TRUNCATE TTL TUPLE TYPE UNDER UNDO UNION UNIQUE UNIT + UNKNOWN UNLOGGED UNNEST UNPROCESSED UNSIGNED UNTIL UPDATE UPPER URL USAGE + USE USER USERS USING UUID VACUUM VALUE VALUED VALUES VARCHAR VARIABLE + VARIANCE VARINT VARYING VIEW VIEWS VIRTUAL VOID WAIT WHEN WHENEVER WHERE + WHILE WINDOW WITH WITHIN WITHOUT WORK WRAPPED WRITE YEAR ZONE + ] + ).freeze + # rubocop:enable Metrics/CollectionLiteralLength + + attr_reader :table_cache + + # Establish the connection to DynamoDB. + # + # @return [Aws::DynamoDB::Client] the DynamoDB connection + def connect! + @client = Aws::DynamoDB::Client.new(connection_config) + @table_cache = {} + end + + def connection_config + @connection_hash = {} + + (Dynamoid::Config.settings.compact.keys & CONNECTION_CONFIG_OPTIONS).each do |option| + @connection_hash[option] = Dynamoid::Config.send(option) + end + + # if credentials are passed, they already contain access key & secret key + if Dynamoid::Config.credentials? + @connection_hash[:credentials] = Dynamoid::Config.credentials + else + # otherwise, pass access key & secret key for credentials creation + if Dynamoid::Config.access_key? + @connection_hash[:access_key_id] = Dynamoid::Config.access_key + end + if Dynamoid::Config.secret_key? + @connection_hash[:secret_access_key] = Dynamoid::Config.secret_key + end + end + + @connection_hash[:logger] = Dynamoid::Config.logger + @connection_hash[:log_level] = :debug + + # https://github.com/aws/aws-sdk-ruby/blob/master/gems/aws-sdk-core/lib/aws-sdk-core/plugins/logging.rb + # https://github.com/aws/aws-sdk-ruby/blob/master/gems/aws-sdk-core/lib/aws-sdk-core/log/formatter.rb + if Dynamoid::Config.log_formatter + @connection_hash[:log_formatter] = Dynamoid::Config.log_formatter + end + + @connection_hash + end + + # Return the client object. + # + # @since 1.0.0 + def client + @client + end + + # Puts multiple items in one table + # + # If optional block is passed it will be called for each written batch of items, meaning once per batch. + # Block receives boolean flag which is true if there are some unprocessed items, otherwise false. + # + # @example Saves several items to the table testtable + # Dynamoid::AdapterPlugin::AwsSdkV3.batch_write_item('table1', [{ id: '1', name: 'a' }, { id: '2', name: 'b'}]) + # + # @example Pass block + # Dynamoid::AdapterPlugin::AwsSdkV3.batch_write_item('table1', items) do |bool| + # if bool + # puts 'there are unprocessed items' + # end + # end + # + # @param [String] table_name the name of the table + # @param [Array] objects to be processed + # @param [Hash] options additional options + # @yield [true|false] invokes an optional block with argument - whether there are unprocessed items + # + # See: + # * http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchWriteItem.html + # * http://docs.aws.amazon.com/sdkforruby/api/Aws/DynamoDB/Client.html#batch_write_item-instance_method + def batch_write_item(table_name, objects, options = {}) + items = objects.map { |o| sanitize_item(o) } + + while items.present? + batch = items.shift(BATCH_WRITE_ITEM_REQUESTS_LIMIT) + requests = batch.map { |item| { put_request: { item: item } } } + + response = client.batch_write_item( + { + request_items: { + table_name => requests + }, + return_consumed_capacity: 'TOTAL', + return_item_collection_metrics: 'SIZE' + }.merge!(options) + ) + + yield(response.unprocessed_items.present?) if block_given? + + if response.unprocessed_items.present? + items += response.unprocessed_items[table_name].map { |r| r.put_request.item } + end + end + rescue Aws::DynamoDB::Errors::ConditionalCheckFailedException => e + raise Dynamoid::Errors::ConditionalCheckFailedException, e + end + + # Get many items at once from DynamoDB. More efficient than getting each item individually. + # + # If optional block is passed `nil` will be returned and the block will be called for each read batch of items, + # meaning once per batch. + # + # Block receives parameters: + # * hash with items like `{ table_name: [items]}` + # * and boolean flag is true if there are some unprocessed keys, otherwise false. + # + # @example Retrieve IDs 1 and 2 from the table testtable + # Dynamoid::AdapterPlugin::AwsSdkV3.batch_get_item('table1' => ['1', '2']) + # + # @example Pass block to receive each batch + # Dynamoid::AdapterPlugin::AwsSdkV3.batch_get_item('table1' => ids) do |hash, bool| + # puts hash['table1'] + # + # if bool + # puts 'there are unprocessed keys' + # end + # end + # + # @param [Hash] table_names_with_ids the hash of tables and IDs to retrieve + # @param [Hash] options to be passed to underlying BatchGet call + # @param [Proc] block optional block can be passed to handle each batch of items + # + # @return [Hash] a hash where keys are the table names and the values are the retrieved items + # + # See: + # * http://docs.aws.amazon.com/sdkforruby/api/Aws/DynamoDB/Client.html#batch_get_item-instance_method + # + # @since 1.0.0 + # + # @todo: Provide support for passing options to underlying batch_get_item + def batch_get_item(table_names_with_ids, options = {}, &block) + tables_with_ids = table_names_with_ids.transform_keys do |name| + describe_table(name) + end + BatchGetItem.new(client, tables_with_ids, options).call(&block) + end + + # Delete many items at once from DynamoDB. More efficient than delete each item individually. + # + # @example Delete IDs 1 and 2 from the table testtable + # Dynamoid::AdapterPlugin::AwsSdk.batch_delete_item('table1' => ['1', '2']) + # or + # Dynamoid::AdapterPlugin::AwsSdkV3.batch_delete_item('table1' => [['hk1', 'rk2'], ['hk1', 'rk2']]])) + # + # @param [Hash] options the hash of tables and IDs to delete + # + # See: + # * http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchWriteItem.html + # * http://docs.aws.amazon.com/sdkforruby/api/Aws/DynamoDB/Client.html#batch_write_item-instance_method + # + # TODO handle rejections because of internal processing failures + def batch_delete_item(options) + requests = [] + + options.each_pair do |table_name, ids| + table = describe_table(table_name) + + ids.each_slice(BATCH_WRITE_ITEM_REQUESTS_LIMIT) do |sliced_ids| + delete_requests = sliced_ids.map do |id| + { delete_request: { key: key_stanza(table, *id) } } + end + + requests << { table_name => delete_requests } + end + end + + requests.each do |items| + client.batch_write_item( + request_items: items, + return_consumed_capacity: 'TOTAL', + return_item_collection_metrics: 'SIZE' + ) + end + rescue Aws::DynamoDB::Errors::ConditionalCheckFailedException => e + raise Dynamoid::Errors::ConditionalCheckFailedException, e + end + + def transact_write_items(items) + Transact.new(client).transact_write_items(items) + end + + # Create a table on DynamoDB. This usually takes a long time to complete. + # + # @param [String] table_name the name of the table to create + # @param [Symbol] key the table's primary key (defaults to :id) + # @param [Hash] options provide a range key here if the table has a composite key + # @option options [Array] local_secondary_indexes + # @option options [Array] global_secondary_indexes + # @option options [Symbol] hash_key_type The type of the hash key + # @option options [Boolean] sync Wait for table status to be ACTIVE? + # @since 1.0.0 + def create_table(table_name, key = :id, options = {}) + Dynamoid.logger.info "Creating #{table_name} table. This could take a while." + CreateTable.new(client, table_name, key, options).call + true + rescue Aws::DynamoDB::Errors::ResourceInUseException => e + Dynamoid.logger.error "Table #{table_name} cannot be created as it already exists" + false + end + + def update_time_to_live(table_name, attribute) + request = { + table_name: table_name, + time_to_live_specification: { + attribute_name: attribute, + enabled: true, + } + } + + client.update_time_to_live(request) + end + + # Create a table on DynamoDB *synchronously*. + # This usually takes a long time to complete. + # CreateTable is normally an asynchronous operation. + # You can optionally define secondary indexes on the new table, + # as part of the CreateTable operation. + # If you want to create multiple tables with secondary indexes on them, + # you must create the tables sequentially. + # Only one table with secondary indexes can be + # in the CREATING state at any given time. + # See: http://docs.aws.amazon.com/sdkforruby/api/Aws/DynamoDB/Client.html#create_table-instance_method + # + # @param [String] table_name the name of the table to create + # @param [Symbol] key the table's primary key (defaults to :id) + # @param [Hash] options provide a range key here if the table has a composite key + # @option options [Array] local_secondary_indexes + # @option options [Array] global_secondary_indexes + # @option options [Symbol] hash_key_type The type of the hash key + # @since 1.2.0 + def create_table_synchronously(table_name, key = :id, options = {}) + create_table(table_name, key, options.merge(sync: true)) + end + + # Removes an item from DynamoDB. + # + # @param [String] table_name the name of the table + # @param [String] key the hash key of the item to delete + # @param [Hash] options provide a range key here if the table has a composite key + # + # @since 1.0.0 + # + # @todo: Provide support for various options http://docs.aws.amazon.com/sdkforruby/api/Aws/DynamoDB/Client.html#delete_item-instance_method + def delete_item(table_name, key, options = {}) + options ||= {} + range_key = options[:range_key] + conditions = options[:conditions] + table = describe_table(table_name) + client.delete_item( + table_name: table_name, + key: key_stanza(table, key, range_key), + expected: expected_stanza(conditions) + ) + rescue Aws::DynamoDB::Errors::ConditionalCheckFailedException => e + raise Dynamoid::Errors::ConditionalCheckFailedException, e + end + + # Deletes an entire table from DynamoDB. + # + # @param [String] table_name the name of the table to destroy + # @option options [Boolean] sync Wait for table status check to raise ResourceNotFoundException + # + # @since 1.0.0 + def delete_table(table_name, options = {}) + resp = client.delete_table(table_name: table_name) + + if options[:sync] + status = PARSE_TABLE_STATUS.call(resp, :table_description) + if status == TABLE_STATUSES[:deleting] + UntilPastTableStatus.new(client, table_name, :deleting).call + end + end + + table_cache.delete(table_name) + rescue Aws::DynamoDB::Errors::ResourceInUseException => e + Dynamoid.logger.error "Table #{table_name} cannot be deleted as it is in use" + raise e + end + + def delete_table_synchronously(table_name, options = {}) + delete_table(table_name, options.merge(sync: true)) + end + + # @todo Add a DescribeTable method. + + # Fetches an item from DynamoDB. + # + # @param [String] table_name the name of the table + # @param [String] key the hash key of the item to find + # @param [Hash] options provide a range key here if the table has a composite key + # + # @return [Hash] a hash representing the raw item in DynamoDB + # + # @since 1.0.0 + # + # @todo Provide support for various options http://docs.aws.amazon.com/sdkforruby/api/Aws/DynamoDB/Client.html#get_item-instance_method + def get_item(table_name, key, options = {}) + options = options.dup + options ||= {} + + table = describe_table(table_name) + range_key = options.delete(:range_key) + consistent_read = options.delete(:consistent_read) + + item = client.get_item(table_name: table_name, + key: key_stanza(table, key, range_key), + consistent_read: consistent_read)[:item] + item ? item_to_hash(item) : nil + end + + # Edits an existing item's attributes, or adds a new item to the table if it does not already exist. You can put, delete, or add attribute values + # + # @param [String] table_name the name of the table + # @param [String] key the hash key of the item to find + # @param [Hash] options provide a range key here if the table has a composite key + # + # @return new attributes for the record + # + # @todo Provide support for various options http://docs.aws.amazon.com/sdkforruby/api/Aws/DynamoDB/Client.html#update_item-instance_method + def update_item(table_name, key, options = {}) + options = options.dup + + range_key = options.delete(:range_key) + conditions = options.delete(:conditions) + table = describe_table(table_name) + + item_updater = ItemUpdater.new(table, key, range_key) + yield(item_updater) + + raise "non-empty options: #{options}" unless options.empty? + + result = client.update_item(table_name: table_name, + key: key_stanza(table, key, range_key), + attribute_updates: item_updater.attribute_updates, + expected: expected_stanza(conditions), + return_values: 'ALL_NEW') + item_to_hash(result[:attributes]) + rescue Aws::DynamoDB::Errors::ConditionalCheckFailedException => e + raise Dynamoid::Errors::ConditionalCheckFailedException, e + end + + # List all tables on DynamoDB. + # + # @since 1.0.0 + def list_tables + [].tap do |result| + start_table_name = nil + loop do + result_page = client.list_tables exclusive_start_table_name: start_table_name + start_table_name = result_page.last_evaluated_table_name + result.concat result_page.table_names + break unless start_table_name + end + end + end + + # Persists an item on DynamoDB. + # + # @param [String] table_name the name of the table + # @param [Object] object a hash or Dynamoid object to persist + # + # @since 1.0.0 + # + # See: http://docs.aws.amazon.com/sdkforruby/api/Aws/DynamoDB/Client.html#put_item-instance_method + def put_item(table_name, object, options = {}) + options ||= {} + item = sanitize_item(object) + + client.put_item( + { + table_name: table_name, + item: item, + expected: expected_stanza(options) + }.merge!(options) + ) + rescue Aws::DynamoDB::Errors::ConditionalCheckFailedException => e + raise Dynamoid::Errors::ConditionalCheckFailedException, e + end + + # Query the DynamoDB table. This employs DynamoDB's indexes so is generally faster than scanning, but is + # only really useful for range queries, since it can only find by one hash key at once. Only provide + # one range key to the hash. + # + # Dynamoid.adapter.query('users', { id: [[:eq, '1']], age: [[:between, [10, 30]]] }, { batch_size: 1000 }) + # + # @param [String] table_name the name of the table + # @param [Array[Array]] key_conditions conditions for the primary key attributes + # @param [Array[Array]] non_key_conditions (optional) conditions for non-primary key attributes + # @param [Hash] options (optional) the options to query the table with + # @option options [Boolean] :consistent_read You can set the ConsistentRead parameter to true and obtain a strongly consistent result + # @option options [Boolean] :scan_index_forward Specifies the order for index traversal: If true (default), the traversal is performed in ascending order; if false, the traversal is performed in descending order. + # @option options [Symbop] :select The attributes to be returned in the result (one of ALL_ATTRIBUTES, ALL_PROJECTED_ATTRIBUTES, ...) + # @option options [Symbol] :index_name The name of an index to query. This index can be any local secondary index or global secondary index on the table. + # @option options [Hash] :exclusive_start_key The primary key of the first item that this operation will evaluate. + # @option options [Integer] :batch_size The number of items to lazily load one by one + # @option options [Integer] :record_limit The maximum number of items to return (not necessarily the number of evaluated items) + # @option options [Integer] :scan_limit The maximum number of items to evaluate (not necessarily the number of matching items) + # @option options [Array[Symbol]] :project The attributes to retrieve from the table + # + # @return [Enumerable] matching items + # + # @since 1.0.0 + # + # @todo Provide support for various other options http://docs.aws.amazon.com/sdkforruby/api/Aws/DynamoDB/Client.html#query-instance_method + def query(table_name, key_conditions, non_key_conditions = {}, options = {}) + Enumerator.new do |yielder| + table = describe_table(table_name) + + Query.new(client, table, key_conditions, non_key_conditions, options).call.each do |page| + yielder.yield( + page.items.map { |item| item_to_hash(item) }, + last_evaluated_key: page.last_evaluated_key + ) + end + end + end + + def query_count(table_name, key_conditions, non_key_conditions, options) + table = describe_table(table_name) + options[:select] = 'COUNT' + + Query.new(client, table, key_conditions, non_key_conditions, options).call + .map(&:count) + .reduce(:+) + end + + # Scan the DynamoDB table. This is usually a very slow operation as it naively filters all data on + # the DynamoDB servers. + # + # @param [String] table_name the name of the table + # @param [Hash] conditions a hash of attributes: matching records will be returned by the scan + # + # @return [Enumerable] matching items + # + # @since 1.0.0 + # + # @todo: Provide support for various options http://docs.aws.amazon.com/sdkforruby/api/Aws/DynamoDB/Client.html#scan-instance_method + def scan(table_name, conditions = {}, options = {}) + Enumerator.new do |yielder| + table = describe_table(table_name) + + Scan.new(client, table, conditions, options).call.each do |page| + yielder.yield( + page.items.map { |item| item_to_hash(item) }, + last_evaluated_key: page.last_evaluated_key + ) + end + end + end + + def scan_count(table_name, conditions = {}, options = {}) + table = describe_table(table_name) + options[:select] = 'COUNT' + + Scan.new(client, table, conditions, options).call + .map(&:count) + .reduce(:+) + end + + # + # Truncates all records in the given table + # + # @param [String] table_name the name of the table + # + # @since 1.0.0 + def truncate(table_name) + table = describe_table(table_name) + hk = table.hash_key + rk = table.range_key + + ids = scan(table_name, {}, {}).flat_map { |i| i }.map do |attributes| + rk ? [attributes[hk], attributes[rk.to_sym]] : attributes[hk] + end + + batch_delete_item(table_name => ids) + end + + def count(table_name) + describe_table(table_name, reload: true).item_count + end + + # Run PartiQL query. + # + # Dynamoid.adapter.execute("SELECT * FROM users WHERE id = ?", ["758"]) + # + # @param [String] statement PartiQL statement + # @param [Array] parameters a list of bind parameters + # @param [Hash] options + # @option [Boolean] consistent_read + # @return [[] | Array[Hash] | Enumerator::Lazy[Hash]] items when used a SELECT statement and empty Array otherwise + # + def execute(statement, parameters = [], options = {}) + items = ExecuteStatement.new(client, statement, parameters, options).call + + if items.is_a?(Array) + items + else + items.lazy.flat_map { |array| array } + end + rescue Aws::DynamoDB::Errors::ConditionalCheckFailedException + [] + end + + protected + + # + # The key hash passed on get_item, put_item, delete_item, update_item, etc + # + def key_stanza(table, hash_key, range_key = nil) + key = { table.hash_key.to_s => hash_key } + key[table.range_key.to_s] = range_key if range_key + key + end + + # + # @param [Hash] conditions Conditions to enforce on operation (e.g. { :if => { :count => 5 }, :unless_exists => ['id']}) + # @return an Expected stanza for the given conditions hash + # + def expected_stanza(conditions = nil) + expected = Hash.new { |h, k| h[k] = {} } + return expected unless conditions + + conditions.delete(:unless_exists).try(:each) do |col| + expected[col.to_s][:exists] = false + end + + conditions.delete(:if).try(:each) do |col, val| + expected[col.to_s][:value] = val + end + + expected + end + + # + # New, semi-arbitrary API to get data on the table + # + def describe_table(table_name, reload: false) + (!reload && table_cache[table_name]) || begin + table_cache[table_name] = Table.new(client.describe_table(table_name: table_name).data) + end + end + + # + # Converts a hash returned by get_item, scan, etc. into a key-value hash + # + def item_to_hash(hash) + hash.symbolize_keys + end + + def sanitize_item(attributes) + config_value = Dynamoid.config.store_attribute_with_nil_value + store_attribute_with_nil_value = config_value.nil? ? false : !!config_value + + attributes.reject do |_, v| + ((v.is_a?(Set) || v.is_a?(String)) && v.empty?) || + (!store_attribute_with_nil_value && v.nil?) + end.transform_values do |v| + v.is_a?(Hash) ? v.stringify_keys : v + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/batch_get_item.rb b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/batch_get_item.rb new file mode 100644 index 000000000..39f3f6795 --- /dev/null +++ b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/batch_get_item.rb @@ -0,0 +1,117 @@ +# frozen_string_literal: true + +module Dynamoid + # @private + module AdapterPlugin + class AwsSdkV3 + # Documentation + # https://docs.aws.amazon.com/sdkforruby/api/Aws/DynamoDB/Client.html#batch_get_item-instance_method + class BatchGetItem + attr_reader :client, :tables_with_ids, :options + + def initialize(client, tables_with_ids, options = {}) + @client = client + @tables_with_ids = tables_with_ids + @options = options + end + + def call + results = {} + + tables_with_ids.each do |table, ids| + if ids.blank? + results[table.name] = [] + next + end + + ids = Array(ids).dup + + while ids.present? + batch = ids.shift(Dynamoid::Config.batch_size) + request = build_request(table, batch) + api_response = client.batch_get_item(request) + response = Response.new(api_response) + + if block_given? + # return batch items as a result + batch_results = Hash.new([].freeze) + batch_results.update(response.items_grouped_by_table) + + yield(batch_results, response.successful_partially?) + else + # collect all the batches to return at the end + results.update(response.items_grouped_by_table) { |_, its1, its2| its1 + its2 } + end + + if response.successful_partially? + ids += response.unprocessed_ids(table) + end + end + end + + results unless block_given? + end + + private + + def build_request(table, ids) + ids = Array(ids) + + keys = if table.range_key.nil? + ids.map { |hk| { table.hash_key => hk } } + else + ids.map { |hk, rk| { table.hash_key => hk, table.range_key => rk } } + end + + { + request_items: { + table.name => { + keys: keys, + consistent_read: options[:consistent_read] + } + } + } + end + + # Helper class to work with response + class Response + def initialize(api_response) + @api_response = api_response + end + + def successful_partially? + @api_response.unprocessed_keys.present? + end + + def unprocessed_ids(table) + # unprocessed_keys Hash contains as values instances of + # Aws::DynamoDB::Types::KeysAndAttributes + @api_response.unprocessed_keys[table.name].keys.map do |h| + # If a table has a composite primary key then we need to return an array + # of [hash key, range key]. Otherwise just return hash key's + # value. + if table.range_key.nil? + h[table.hash_key.to_s] + else + [h[table.hash_key.to_s], h[table.range_key.to_s]] + end + end + end + + def items_grouped_by_table + # data[:responses] is a Hash[table_name -> items] + @api_response.data[:responses].transform_values do |items| + items.map(&method(:item_to_hash)) + end + end + + private + + def item_to_hash(item) + item.symbolize_keys + end + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/create_table.rb b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/create_table.rb new file mode 100644 index 000000000..b9f041981 --- /dev/null +++ b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/create_table.rb @@ -0,0 +1,247 @@ +# frozen_string_literal: true + +require_relative 'until_past_table_status' + +module Dynamoid + # @private + module AdapterPlugin + class AwsSdkV3 + class CreateTable + attr_reader :client, :table_name, :key, :options + + def initialize(client, table_name, key, options) + @client = client + @table_name = table_name + @key = key + @options = options + end + + def call + billing_mode = options[:billing_mode] + read_capacity = options[:read_capacity] || Dynamoid::Config.read_capacity + write_capacity = options[:write_capacity] || Dynamoid::Config.write_capacity + + secondary_indexes = options.slice( + :local_secondary_indexes, + :global_secondary_indexes + ) + ls_indexes = options[:local_secondary_indexes] + gs_indexes = options[:global_secondary_indexes] + + key_schema = { + hash_key_schema: { key => options[:hash_key_type] || :string }, + range_key_schema: options[:range_key] + } + attribute_definitions = build_all_attribute_definitions( + key_schema, + secondary_indexes + ) + key_schema = aws_key_schema( + key_schema[:hash_key_schema], + key_schema[:range_key_schema] + ) + + client_opts = { + table_name: table_name, + key_schema: key_schema, + attribute_definitions: attribute_definitions + } + + if billing_mode == :on_demand + client_opts[:billing_mode] = 'PAY_PER_REQUEST' + else + client_opts[:billing_mode] = 'PROVISIONED' + client_opts[:provisioned_throughput] = { + read_capacity_units: read_capacity, + write_capacity_units: write_capacity + } + end + + if ls_indexes.present? + client_opts[:local_secondary_indexes] = ls_indexes.map do |index| + index_to_aws_hash(index) + end + end + + if gs_indexes.present? + client_opts[:global_secondary_indexes] = gs_indexes.map do |index| + index_to_aws_hash(index) + end + end + resp = client.create_table(client_opts) + options[:sync] = true if (!options.key?(:sync) && ls_indexes.present?) || gs_indexes.present? + + if options[:sync] + status = PARSE_TABLE_STATUS.call(resp, :table_description) + if status == TABLE_STATUSES[:creating] + UntilPastTableStatus.new(client, table_name, :creating).call + end + end + + # Response to original create_table, which, if options[:sync] + # may have an outdated table_description.table_status of "CREATING" + resp + end + + private + + # Builds aws attributes definitions based off of primary hash/range and + # secondary indexes + # + # @param key_schema + # @option key_schema [Hash] hash_key_schema - eg: {:id => :string} + # @option key_schema [Hash] range_key_schema - eg: {:created_at => :number} + # @param [Hash] secondary_indexes + # @option secondary_indexes [Array] :local_secondary_indexes + # @option secondary_indexes [Array] :global_secondary_indexes + def build_all_attribute_definitions(key_schema, secondary_indexes = {}) + ls_indexes = secondary_indexes[:local_secondary_indexes] + gs_indexes = secondary_indexes[:global_secondary_indexes] + + attribute_definitions = [] + + attribute_definitions << build_attribute_definitions( + key_schema[:hash_key_schema], + key_schema[:range_key_schema] + ) + + if ls_indexes.present? + ls_indexes.map do |index| + attribute_definitions << build_attribute_definitions( + index.hash_key_schema, + index.range_key_schema + ) + end + end + + if gs_indexes.present? + gs_indexes.map do |index| + attribute_definitions << build_attribute_definitions( + index.hash_key_schema, + index.range_key_schema + ) + end + end + + attribute_definitions.flatten! + # uniq these definitions because range keys might be common between + # primary and secondary indexes + attribute_definitions.uniq! + attribute_definitions + end + + # Builds an attribute definitions based on hash key and range key + # @param [Hash] hash_key_schema - eg: {:id => :string} + # @param [Hash] range_key_schema - eg: {:created_at => :datetime} + # @return [Array] + def build_attribute_definitions(hash_key_schema, range_key_schema = nil) + attrs = [] + + attrs << attribute_definition_element( + hash_key_schema.keys.first, + hash_key_schema.values.first + ) + + if range_key_schema.present? + attrs << attribute_definition_element( + range_key_schema.keys.first, + range_key_schema.values.first + ) + end + + attrs + end + + # Builds an aws attribute definition based on name and dynamoid type + # @param [Symbol] name - eg: :id + # @param [Symbol] dynamoid_type - eg: :string + # @return [Hash] + def attribute_definition_element(name, dynamoid_type) + aws_type = api_type(dynamoid_type) + + { + attribute_name: name.to_s, + attribute_type: aws_type + } + end + + # Converts from symbol to the API string for the given data type + # E.g. :number -> 'N' + def api_type(type) + case type + when :string then STRING_TYPE + when :number then NUM_TYPE + when :binary then BINARY_TYPE + else raise "Unknown type: #{type}" + end + end + + # Converts a Dynamoid::Indexes::Index to an AWS API-compatible hash. + # This resulting hash is of the form: + # + # { + # index_name: String + # keys: { + # hash_key: aws_key_schema (hash) + # range_key: aws_key_schema (hash) + # } + # projection: { + # projection_type: (ALL, KEYS_ONLY, INCLUDE) String + # non_key_attributes: (optional) Array + # } + # provisioned_throughput: { + # read_capacity_units: Integer + # write_capacity_units: Integer + # } + # } + # + # @param [Dynamoid::Indexes::Index] index the index. + # @return [Hash] hash representing an AWS Index definition. + def index_to_aws_hash(index) + key_schema = aws_key_schema(index.hash_key_schema, index.range_key_schema) + + hash = { + index_name: index.name, + key_schema: key_schema, + projection: { + projection_type: index.projection_type.to_s.upcase + } + } + + # If the projection type is include, specify the non key attributes + if index.projection_type == :include + hash[:projection][:non_key_attributes] = index.projected_attributes + end + + # Only global secondary indexes have a separate throughput. + if index.type == :global_secondary && options[:billing_mode] != :on_demand + hash[:provisioned_throughput] = { + read_capacity_units: index.read_capacity, + write_capacity_units: index.write_capacity + } + end + hash + end + + # Converts hash_key_schema and range_key_schema to aws_key_schema + # @param [Hash] hash_key_schema eg: {:id => :string} + # @param [Hash] range_key_schema eg: {:created_at => :number} + # @return [Array] + def aws_key_schema(hash_key_schema, range_key_schema) + schema = [{ + attribute_name: hash_key_schema.keys.first.to_s, + key_type: HASH_KEY + }] + + if range_key_schema.present? + schema << { + attribute_name: range_key_schema.keys.first.to_s, + key_type: RANGE_KEY + } + end + schema + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/execute_statement.rb b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/execute_statement.rb new file mode 100644 index 000000000..f0b0d9ea4 --- /dev/null +++ b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/execute_statement.rb @@ -0,0 +1,62 @@ +# frozen_string_literal: true + +module Dynamoid + # @private + module AdapterPlugin + class AwsSdkV3 + # Excecute a PartiQL query + # + # Documentation: + # - https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_ExecuteStatement.html + # - https://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/DynamoDB/Client.html#execute_statement-instance_method + # + # NOTE: For reads result may be paginated. Only pagination with NextToken + # is implemented. Currently LastEvaluatedKey in response cannot be fed to + # ExecuteStatement to get the next page. + # + # See also: + # - https://repost.aws/questions/QUgNPbBYWiRoOlMsJv-XzrWg/how-to-use-last-evaluated-key-in-execute-statement-request + # - https://stackoverflow.com/questions/71438439/aws-dynamodb-executestatement-pagination + class ExecuteStatement + attr_reader :client, :statement, :parameters, :options + + def initialize(client, statement, parameters, options) + @client = client + @statement = statement + @parameters = parameters + @options = options.symbolize_keys.slice(:consistent_read) + end + + def call + request = { + statement: @statement, + parameters: @parameters, + consistent_read: @options[:consistent_read], + } + + response = client.execute_statement(request) + + unless response.next_token + return response_to_items(response) + end + + Enumerator.new do |yielder| + yielder.yield(response_to_items(response)) + + while response.next_token + request[:next_token] = response.next_token + response = client.execute_statement(request) + yielder.yield(response_to_items(response)) + end + end + end + + private + + def response_to_items(response) + response.items.map(&:symbolize_keys) + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/filter_expression_convertor.rb b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/filter_expression_convertor.rb new file mode 100644 index 000000000..7698dcb7d --- /dev/null +++ b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/filter_expression_convertor.rb @@ -0,0 +1,78 @@ +# frozen_string_literal: true + +module Dynamoid + # @private + module AdapterPlugin + class AwsSdkV3 + class FilterExpressionConvertor + attr_reader :expression, :name_placeholders, :value_placeholders + + def initialize(conditions, name_placeholders, value_placeholders, name_placeholder_sequence, value_placeholder_sequence) + @conditions = conditions + @name_placeholders = name_placeholders.dup + @value_placeholders = value_placeholders.dup + @name_placeholder_sequence = name_placeholder_sequence + @value_placeholder_sequence = value_placeholder_sequence + + build + end + + private + + def build + clauses = @conditions.map do |name, attribute_conditions| + attribute_conditions.map do |operator, value| + name_or_placeholder = name_or_placeholder_for(name) + + case operator + when :eq + "#{name_or_placeholder} = #{value_placeholder_for(value)}" + when :ne + "#{name_or_placeholder} <> #{value_placeholder_for(value)}" + when :gt + "#{name_or_placeholder} > #{value_placeholder_for(value)}" + when :lt + "#{name_or_placeholder} < #{value_placeholder_for(value)}" + when :gte + "#{name_or_placeholder} >= #{value_placeholder_for(value)}" + when :lte + "#{name_or_placeholder} <= #{value_placeholder_for(value)}" + when :between + "#{name_or_placeholder} BETWEEN #{value_placeholder_for(value[0])} AND #{value_placeholder_for(value[1])}" + when :begins_with + "begins_with (#{name_or_placeholder}, #{value_placeholder_for(value)})" + when :in + list = value.map(&method(:value_placeholder_for)).join(' , ') + "#{name_or_placeholder} IN (#{list})" + when :contains + "contains (#{name_or_placeholder}, #{value_placeholder_for(value)})" + when :not_contains + "NOT contains (#{name_or_placeholder}, #{value_placeholder_for(value)})" + when :null + "attribute_not_exists (#{name_or_placeholder})" + when :not_null + "attribute_exists (#{name_or_placeholder})" + end + end + end.flatten + + @expression = clauses.join(' AND ') + end + + def name_or_placeholder_for(name) + return name unless name.upcase.in?(Dynamoid::AdapterPlugin::AwsSdkV3::RESERVED_WORDS) + + placeholder = @name_placeholder_sequence.call + @name_placeholders[placeholder] = name + placeholder + end + + def value_placeholder_for(value) + placeholder = @value_placeholder_sequence.call + @value_placeholders[placeholder] = value + placeholder + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/item_updater.rb b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/item_updater.rb new file mode 100644 index 000000000..b9b6a248e --- /dev/null +++ b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/item_updater.rb @@ -0,0 +1,122 @@ +# frozen_string_literal: true + +module Dynamoid + # @private + module AdapterPlugin + class AwsSdkV3 + # Mimics behavior of the yielded object on DynamoDB's update_item API (high level). + class ItemUpdater + ADD = 'ADD' + DELETE = 'DELETE' + PUT = 'PUT' + + attr_reader :table, :key, :range_key + + def initialize(table, key, range_key = nil) + @table = table + @key = key + @range_key = range_key + @additions = {} + @deletions = {} + @updates = {} + end + + # + # Adds the given values to the values already stored in the corresponding columns. + # The column must contain a Set or a number. + # + # @param [Hash] values keys of the hash are the columns to update, values + # are the values to add. values must be a Set, Array, or Numeric + # + def add(values) + @additions.merge!(sanitize_attributes(values)) + end + + # + # Removes values from the sets of the given columns + # + # @param [Hash|Symbol|String] values keys of the hash are the columns, values are Arrays/Sets of items + # to remove + # + def delete(values) + if values.is_a?(Hash) + @deletions.merge!(sanitize_attributes(values)) + else + @deletions.merge!(values.to_s => nil) + end + end + + # + # Replaces the values of one or more attributes + # + def set(values) + values_sanitized = sanitize_attributes(values) + + if Dynamoid.config.store_attribute_with_nil_value + @updates.merge!(values_sanitized) + else + # delete explicitly attributes if assigned nil value and configured + # to not store nil values + values_to_update = values_sanitized.reject { |_, v| v.nil? } + values_to_delete = values_sanitized.select { |_, v| v.nil? } + + @updates.merge!(values_to_update) + @deletions.merge!(values_to_delete) + end + end + + # + # Returns an AttributeUpdates hash suitable for passing to the V2 Client API + # + def attribute_updates + result = {} + + @additions.each do |k, v| + result[k] = { + action: ADD, + value: v + } + end + + @deletions.each do |k, v| + result[k] = { + action: DELETE + } + result[k][:value] = v unless v.nil? + end + + @updates.each do |k, v| + result[k] = { + action: PUT, + value: v + } + end + + result + end + + private + + # Keep in sync with AwsSdkV3.sanitize_item. + # + # The only difference is that to update item we need to track whether + # attribute value is nil or not. + def sanitize_attributes(attributes) + # rubocop:disable Lint/DuplicateBranch + attributes.transform_values do |v| + if v.is_a?(Hash) + v.stringify_keys + elsif v.is_a?(Set) && v.empty? + nil + elsif v.is_a?(String) && v.empty? + nil + else + v + end + end + # rubocop:enable Lint/DuplicateBranch + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/middleware/backoff.rb b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/middleware/backoff.rb new file mode 100644 index 000000000..0311730b5 --- /dev/null +++ b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/middleware/backoff.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +module Dynamoid + # @private + module AdapterPlugin + class AwsSdkV3 + module Middleware + class Backoff + def initialize(next_chain) + @next_chain = next_chain + @backoff = Dynamoid.config.backoff ? Dynamoid.config.build_backoff : nil + end + + def call(request) + response = @next_chain.call(request) + @backoff.call if @backoff + + response + end + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/middleware/limit.rb b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/middleware/limit.rb new file mode 100644 index 000000000..667ffc250 --- /dev/null +++ b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/middleware/limit.rb @@ -0,0 +1,59 @@ +# frozen_string_literal: true + +module Dynamoid + # @private + module AdapterPlugin + class AwsSdkV3 + module Middleware + class Limit + def initialize(next_chain, record_limit: nil, scan_limit: nil) + @next_chain = next_chain + + @record_limit = record_limit + @scan_limit = scan_limit + + @record_count = 0 + @scan_count = 0 + end + + def call(request) + # Adjust the limit down if the remaining record and/or scan limit are + # lower to obey limits. We can assume the difference won't be + # negative due to break statements below but choose smaller limit + # which is why we have 2 separate if statements. + # + # NOTE: Adjusting based on record_limit can cause many HTTP requests + # being made. We may want to change this behavior, but it affects + # filtering on data with potentially large gaps. + # + # Example: + # User.where('created_at.gte' => 1.day.ago).record_limit(1000) + # Records 1-999 User's that fit criteria + # Records 1000-2000 Users's that do not fit criteria + # Record 2001 fits criteria + # + # The underlying implementation will have 1 page for records 1-999 + # then will request with limit 1 for records 1000-2000 (making 1000 + # requests of limit 1) until hit record 2001. + if request[:limit] && @record_limit && @record_limit - @record_count < request[:limit] + request[:limit] = @record_limit - @record_count + end + if request[:limit] && @scan_limit && @scan_limit - @scan_count < request[:limit] + request[:limit] = @scan_limit - @scan_count + end + + response = @next_chain.call(request) + + @record_count += response.count + throw :stop_pagination if @record_limit && @record_count >= @record_limit + + @scan_count += response.scanned_count + throw :stop_pagination if @scan_limit && @scan_count >= @scan_limit + + response + end + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/middleware/start_key.rb b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/middleware/start_key.rb new file mode 100644 index 000000000..491464f75 --- /dev/null +++ b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/middleware/start_key.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +module Dynamoid + # @private + module AdapterPlugin + class AwsSdkV3 + module Middleware + class StartKey + def initialize(next_chain) + @next_chain = next_chain + end + + def call(request) + response = @next_chain.call(request) + + if response.last_evaluated_key + request[:exclusive_start_key] = response.last_evaluated_key + else + throw :stop_pagination + end + + response + end + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/projection_expression_convertor.rb b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/projection_expression_convertor.rb new file mode 100644 index 000000000..1b71f1c4c --- /dev/null +++ b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/projection_expression_convertor.rb @@ -0,0 +1,38 @@ +# frozen_string_literal: true + +module Dynamoid + # @private + module AdapterPlugin + class AwsSdkV3 + class ProjectionExpressionConvertor + attr_reader :expression, :name_placeholders + + def initialize(names, name_placeholders, name_placeholder_sequence) + @names = names + @name_placeholders = name_placeholders.dup + @name_placeholder_sequence = name_placeholder_sequence + + build + end + + private + + def build + return if @names.nil? || @names.empty? + + clauses = @names.map do |name| + if name.upcase.in?(Dynamoid::AdapterPlugin::AwsSdkV3::RESERVED_WORDS) + placeholder = @name_placeholder_sequence.call + @name_placeholders[placeholder] = name + placeholder + else + name.to_s + end + end + + @expression = clauses.join(' , ') + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/query.rb b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/query.rb new file mode 100644 index 000000000..25cf8835a --- /dev/null +++ b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/query.rb @@ -0,0 +1,117 @@ +# frozen_string_literal: true + +require_relative 'middleware/backoff' +require_relative 'middleware/limit' +require_relative 'middleware/start_key' +require_relative 'filter_expression_convertor' +require_relative 'projection_expression_convertor' + +module Dynamoid + # @private + module AdapterPlugin + class AwsSdkV3 + class Query + OPTIONS_KEYS = %i[ + consistent_read scan_index_forward select index_name batch_size + exclusive_start_key record_limit scan_limit project + ].freeze + + attr_reader :client, :table, :options, :conditions + + def initialize(client, table, key_conditions, non_key_conditions, options) + @client = client + @table = table + + @key_conditions = key_conditions + @non_key_conditions = non_key_conditions + @options = options.slice(*OPTIONS_KEYS) + end + + def call + request = build_request + + Enumerator.new do |yielder| + api_call = lambda do |req| + client.query(req).tap do |response| + yielder << response + end + end + + middlewares = Middleware::Backoff.new( + Middleware::StartKey.new( + Middleware::Limit.new(api_call, record_limit: record_limit, scan_limit: scan_limit) + ) + ) + + catch :stop_pagination do + loop do + middlewares.call(request) + end + end + end + end + + private + + def build_request + # expressions + name_placeholder = +'#_a0' + value_placeholder = +':_a0' + + name_placeholder_sequence = -> { name_placeholder.next!.dup } + value_placeholder_sequence = -> { value_placeholder.next!.dup } + + name_placeholders = {} + value_placeholders = {} + + # Deal with various limits and batching + batch_size = options[:batch_size] + limit = [record_limit, scan_limit, batch_size].compact.min + + # key condition expression + convertor = FilterExpressionConvertor.new(@key_conditions, name_placeholders, value_placeholders, name_placeholder_sequence, value_placeholder_sequence) + key_condition_expression = convertor.expression + value_placeholders = convertor.value_placeholders + name_placeholders = convertor.name_placeholders + + # filter expression + convertor = FilterExpressionConvertor.new(@non_key_conditions, name_placeholders, value_placeholders, name_placeholder_sequence, value_placeholder_sequence) + filter_expression = convertor.expression + value_placeholders = convertor.value_placeholders + name_placeholders = convertor.name_placeholders + + # projection expression + convertor = ProjectionExpressionConvertor.new(options[:project], name_placeholders, name_placeholder_sequence) + projection_expression = convertor.expression + name_placeholders = convertor.name_placeholders + + request = options.slice( + :consistent_read, + :scan_index_forward, + :select, + :index_name, + :exclusive_start_key + ).compact + + request[:table_name] = table.name + request[:limit] = limit if limit + request[:key_condition_expression] = key_condition_expression if key_condition_expression.present? + request[:filter_expression] = filter_expression if filter_expression.present? + request[:expression_attribute_values] = value_placeholders if value_placeholders.present? + request[:expression_attribute_names] = name_placeholders if name_placeholders.present? + request[:projection_expression] = projection_expression if projection_expression.present? + + request + end + + def record_limit + options[:record_limit] + end + + def scan_limit + options[:scan_limit] + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/scan.rb b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/scan.rb new file mode 100644 index 000000000..d4b1d7ad8 --- /dev/null +++ b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/scan.rb @@ -0,0 +1,102 @@ +# frozen_string_literal: true + +require_relative 'middleware/backoff' +require_relative 'middleware/limit' +require_relative 'middleware/start_key' +require_relative 'filter_expression_convertor' +require_relative 'projection_expression_convertor' + +module Dynamoid + # @private + module AdapterPlugin + class AwsSdkV3 + class Scan + attr_reader :client, :table, :conditions, :options + + def initialize(client, table, conditions = {}, options = {}) + @client = client + @table = table + @conditions = conditions + @options = options + end + + def call + request = build_request + + Enumerator.new do |yielder| + api_call = lambda do |req| + client.scan(req).tap do |response| + yielder << response + end + end + + middlewares = Middleware::Backoff.new( + Middleware::StartKey.new( + Middleware::Limit.new(api_call, record_limit: record_limit, scan_limit: scan_limit) + ) + ) + + catch :stop_pagination do + loop do + middlewares.call(request) + end + end + end + end + + private + + def build_request + # expressions + name_placeholder = +'#_a0' + value_placeholder = +':_a0' + + name_placeholder_sequence = -> { name_placeholder.next!.dup } + value_placeholder_sequence = -> { value_placeholder.next!.dup } + + name_placeholders = {} + value_placeholders = {} + + # Deal with various limits and batching + batch_size = options[:batch_size] + limit = [record_limit, scan_limit, batch_size].compact.min + + # filter expression + convertor = FilterExpressionConvertor.new(conditions, name_placeholders, value_placeholders, name_placeholder_sequence, value_placeholder_sequence) + filter_expression = convertor.expression + value_placeholders = convertor.value_placeholders + name_placeholders = convertor.name_placeholders + + # projection expression + convertor = ProjectionExpressionConvertor.new(options[:project], name_placeholders, name_placeholder_sequence) + projection_expression = convertor.expression + name_placeholders = convertor.name_placeholders + + request = options.slice( + :consistent_read, + :exclusive_start_key, + :select, + :index_name + ).compact + + request[:table_name] = table.name + request[:limit] = limit if limit + request[:filter_expression] = filter_expression if filter_expression.present? + request[:expression_attribute_values] = value_placeholders if value_placeholders.present? + request[:expression_attribute_names] = name_placeholders if name_placeholders.present? + request[:projection_expression] = projection_expression if projection_expression.present? + + request + end + + def record_limit + options[:record_limit] + end + + def scan_limit + options[:scan_limit] + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/table.rb b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/table.rb new file mode 100644 index 000000000..605a70400 --- /dev/null +++ b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/table.rb @@ -0,0 +1,53 @@ +# frozen_string_literal: true + +module Dynamoid + # @private + module AdapterPlugin + class AwsSdkV3 + # Represents a table. Exposes data from the "DescribeTable" API call, and also + # provides methods for coercing values to the proper types based on the table's schema data + class Table + attr_reader :schema + + # + # @param [Hash] schema Data returns from a "DescribeTable" call + # + def initialize(schema) + @schema = schema[:table] + end + + def range_key + @range_key ||= schema[:key_schema].find { |d| d[:key_type] == RANGE_KEY }.try(:attribute_name) + end + + def range_type + range_type ||= schema[:attribute_definitions].find do |d| + d[:attribute_name] == range_key + end.try(:fetch, :attribute_type, nil) + end + + def hash_key + @hash_key ||= schema[:key_schema].find { |d| d[:key_type] == HASH_KEY }.try(:attribute_name).to_sym + end + + # + # Returns the API type (e.g. "N", "S") for the given column, if the schema defines it, + # nil otherwise + # + def col_type(col) + col = col.to_s + col_def = schema[:attribute_definitions].find { |d| d[:attribute_name] == col.to_s } + col_def && col_def[:attribute_type] + end + + def item_count + schema[:item_count] + end + + def name + schema[:table_name] + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/transact.rb b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/transact.rb new file mode 100644 index 000000000..a17bf1048 --- /dev/null +++ b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/transact.rb @@ -0,0 +1,31 @@ +# frozen_string_literal: true + +# Prepare all the actions of the transaction for sending to the AWS SDK. +module Dynamoid + module AdapterPlugin + class AwsSdkV3 + class Transact + attr_reader :client + + def initialize(client) + @client = client + end + + # Perform all of the item actions in a single transaction. + # + # @param [Array] items of type Dynamoid::Transaction::Action or + # any other object whose to_h is a transact_item hash + # + def transact_write_items(items) + transact_items = items.map(&:to_h) + params = { + transact_items: transact_items, + return_consumed_capacity: 'TOTAL', + return_item_collection_metrics: 'SIZE' + } + client.transact_write_items(params) # returns this + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/until_past_table_status.rb b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/until_past_table_status.rb new file mode 100644 index 000000000..7056eeded --- /dev/null +++ b/dynamoid/lib/dynamoid/adapter_plugin/aws_sdk_v3/until_past_table_status.rb @@ -0,0 +1,62 @@ +# frozen_string_literal: true + +module Dynamoid + # @private + module AdapterPlugin + class AwsSdkV3 + class UntilPastTableStatus + attr_reader :client, :table_name, :status + + def initialize(client, table_name, status = :creating) + @client = client + @table_name = table_name + @status = status + end + + def call + counter = 0 + resp = nil + begin + check = { again: true } + while check[:again] + sleep Dynamoid::Config.sync_retry_wait_seconds + resp = client.describe_table(table_name: table_name) + check = check_table_status?(counter, resp, status) + Dynamoid.logger.info "Checked table status for #{table_name} (check #{check.inspect})" + counter += 1 + end + # If you issue a DescribeTable request immediately after a CreateTable + # request, DynamoDB might return a ResourceNotFoundException. + # This is because DescribeTable uses an eventually consistent query, + # and the metadata for your table might not be available at that moment. + # Wait for a few seconds, and then try the DescribeTable request again. + # See: http://docs.aws.amazon.com/sdkforruby/api/Aws/DynamoDB/Client.html#describe_table-instance_method + rescue Aws::DynamoDB::Errors::ResourceNotFoundException => e + case status + when :creating + if counter >= Dynamoid::Config.sync_retry_max_times + Dynamoid.logger.warn "Waiting on table metadata for #{table_name} (check #{counter})" + retry # start over at first line of begin, does not reset counter + else + Dynamoid.logger.error "Exhausted max retries (Dynamoid::Config.sync_retry_max_times) waiting on table metadata for #{table_name} (check #{counter})" + raise e + end + else + # When deleting a table, "not found" is the goal. + Dynamoid.logger.info "Checked table status for #{table_name}: Not Found (check #{check.inspect})" + end + end + end + + private + + def check_table_status?(counter, resp, expect_status) + status = PARSE_TABLE_STATUS.call(resp) + again = counter < Dynamoid::Config.sync_retry_max_times && + status == TABLE_STATUSES[expect_status] + { again: again, status: status, counter: counter } + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/application_time_zone.rb b/dynamoid/lib/dynamoid/application_time_zone.rb new file mode 100644 index 000000000..57fd19bba --- /dev/null +++ b/dynamoid/lib/dynamoid/application_time_zone.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +module Dynamoid + # @private + module ApplicationTimeZone + def self.at(value) + case Dynamoid::Config.application_timezone + when :utc + ActiveSupport::TimeZone['UTC'].at(value).to_datetime + when :local + Time.at(value).to_datetime + when String + ActiveSupport::TimeZone[Dynamoid::Config.application_timezone].at(value).to_datetime + end + end + + def self.utc_offset + case Dynamoid::Config.application_timezone + when :utc + 0 + when :local + Time.now.utc_offset + when String + ActiveSupport::TimeZone[Dynamoid::Config.application_timezone].now.utc_offset + end + end + end +end diff --git a/dynamoid/lib/dynamoid/associations.rb b/dynamoid/lib/dynamoid/associations.rb new file mode 100644 index 000000000..8491c22fb --- /dev/null +++ b/dynamoid/lib/dynamoid/associations.rb @@ -0,0 +1,275 @@ +# frozen_string_literal: true + +require 'dynamoid/associations/association' +require 'dynamoid/associations/single_association' +require 'dynamoid/associations/many_association' +require 'dynamoid/associations/has_many' +require 'dynamoid/associations/belongs_to' +require 'dynamoid/associations/has_one' +require 'dynamoid/associations/has_and_belongs_to_many' + +module Dynamoid + # Connects models together through the magic of associations. We enjoy four different kinds of associations presently: + # * belongs_to + # * has_and_belongs_to_many + # * has_many + # * has_one + module Associations + extend ActiveSupport::Concern + + # Create the association tracking attribute and initialize it to an empty hash. + included do + class_attribute :associations, instance_accessor: false + + self.associations = {} + end + + module ClassMethods + # Declare a +has_many+ association for this document. + # + # class Category + # include Dynamoid::Document + # + # has_many :posts + # end + # + # Association is an enumerable collection and supports following addition + # operations: + # + # * +create+ + # * +create!+ + # * +destroy_all+ + # * +delete_all+ + # * +delete+ + # * +<<+ + # * +where+ + # * +all+ + # * +empty?+ + # * +size+ + # + # When a name of an associated class doesn't match an association name a + # class name should be specified explicitly either with +class+ or + # +class_name+ option: + # + # has_many :labels, class: Tag + # has_many :labels, class_name: 'Tag' + # + # When associated class has own +belongs_to+ association to + # the current class and the name doesn't match a name of the current + # class this name can be specified with +inverse_of+ option: + # + # class Post + # include Dynamoid::Document + # + # belongs_to :item, class_name: 'Tag' + # end + # + # class Tag + # include Dynamoid::Document + # + # has_many :posts, inverse_of: :item + # end + # + # @param name [Symbol] the name of the association + # @param options [Hash] options to pass to the association constructor + # @option options [Class] :class the target class of the has_many association; that is, the belongs_to class + # @option options [String] :class_name the name of the target class of the association; that is, the name of the belongs_to class + # @option options [Symbol] :inverse_of the name of the association on the target class; that is, if the class has a belongs_to association, the name of that association + # + # @since 0.2.0 + def has_many(name, options = {}) + association(:has_many, name, options) + end + + # Declare a +has_one+ association for this document. + # + # class Image + # include Dynamoid::Document + # + # has_one :post + # end + # + # Association supports following operations: + # + # * +create+ + # * +create!+ + # * +delete+ + # + # When a name of an associated class doesn't match an association name a + # class name should be specified explicitly either with +class+ or + # +class_name+ option: + # + # has_one :item, class: Post + # has_one :item, class_name: 'Post' + # + # When associated class has own +belong_to+ association to the current + # class and the name doesn't match a name of the current class this name + # can be specified with +inverse_of+ option: + # + # class Post + # include Dynamoid::Document + # + # belongs_to :logo, class_name: 'Image' + # end + # + # class Image + # include Dynamoid::Document + # + # has_one :post, inverse_of: :logo + # end + # + # @param name [Symbol] the name of the association + # @param options [Hash] options to pass to the association constructor + # @option options [Class] :class the target class of the has_one association; that is, the belongs_to class + # @option options [String] :class_name the name of the target class of the association; that is, the name of the belongs_to class + # @option options [Symbol] :inverse_of the name of the association on the target class; that is, if the class has a belongs_to association, the name of that association + # + # @since 0.2.0 + def has_one(name, options = {}) + association(:has_one, name, options) + end + + # Declare a +belongs_to+ association for this document. + # + # class Post + # include Dynamoid::Document + # + # belongs_to :categories + # end + # + # Association supports following operations: + # + # * +create+ + # * +create!+ + # * +delete+ + # + # When a name of an associated class doesn't match an association name a + # class name should be specified explicitly either with +class+ or + # +class_name+ option: + # + # belongs_to :item, class: Post + # belongs_to :item, class_name: 'Post' + # + # When associated class has own +has_many+ or +has_one+ association to + # the current class and the name doesn't match a name of the current + # class this name can be specified with +inverse_of+ option: + # + # class Category + # include Dynamoid::Document + # + # has_many :items, class_name: 'Post' + # end + # + # class Post + # include Dynamoid::Document + # + # belongs_to :categories, inverse_of: :items + # end + # + # By default a hash key attribute name is +id+. If an associated class + # uses another name for a hash key attribute it should be specified in + # the +belongs_to+ association: + # + # belongs_to :categories, foreign_key: :uuid + # + # @param name [Symbol] the name of the association + # @param options [Hash] options to pass to the association constructor + # @option options [Class] :class the target class of the has_one association; that is, the has_many or has_one class + # @option options [String] :class_name the name of the target class of the association; that is, the name of the has_many or has_one class + # @option options [Symbol] :inverse_of the name of the association on the target class; that is, if the class has a has_many or has_one association, the name of that association + # @option options [Symbol] :foreign_key the name of a hash key attribute in the target class + # + # @since 0.2.0 + def belongs_to(name, options = {}) + association(:belongs_to, name, options) + end + + # Declare a +has_and_belongs_to_many+ association for this document. + # + # class Post + # include Dynamoid::Document + # + # has_and_belongs_to_many :tags + # end + # + # Association is an enumerable collection and supports following addition + # operations: + # + # * +create+ + # * +create!+ + # * +destroy_all+ + # * +delete_all+ + # * +delete+ + # * +<<+ + # * +where+ + # * +all+ + # * +empty?+ + # * +size+ + # + # When a name of an associated class doesn't match an association name a + # class name should be specified explicitly either with +class+ or + # +class_name+ option: + # + # has_and_belongs_to_many :labels, class: Tag + # has_and_belongs_to_many :labels, class_name: 'Tag' + # + # When associated class has own +has_and_belongs_to_many+ association to + # the current class and the name doesn't match a name of the current + # class this name can be specified with +inverse_of+ option: + # + # class Tag + # include Dynamoid::Document + # + # has_and_belongs_to_many :items, class_name: 'Post' + # end + # + # class Post + # include Dynamoid::Document + # + # has_and_belongs_to_many :tags, inverse_of: :items + # end + # + # @param name [Symbol] the name of the association + # @param options [Hash] options to pass to the association constructor + # @option options [Class] :class the target class of the has_and_belongs_to_many association; that is, the belongs_to class + # @option options [String] :class_name the name of the target class of the association; that is, the name of the belongs_to class + # @option options [Symbol] :inverse_of the name of the association on the target class; that is, if the class has a belongs_to association, the name of that association + # + # @since 0.2.0 + def has_and_belongs_to_many(name, options = {}) + association(:has_and_belongs_to_many, name, options) + end + + private + + # create getters and setters for an association. + # + # @param type [Symbol] the type (:has_one, :has_many, :has_and_belongs_to_many, :belongs_to) of the association + # @param name [Symbol] the name of the association + # @param options [Hash] options to pass to the association constructor; see above for all valid options + # + # @since 0.2.0 + def association(type, name, options = {}) + # Declare document field. + # In simple case it's equivalent to + # field "#{name}_ids".to_sym, :set + assoc = Dynamoid::Associations.const_get(type.to_s.camelcase).new(nil, name, options) + field_name = assoc.declaration_field_name + field_type = assoc.declaration_field_type + + field field_name.to_sym, field_type + + associations[name] = options.merge(type: type) + + define_method(name) do + @associations[:"#{name}_ids"] ||= Dynamoid::Associations.const_get(type.to_s.camelcase).new(self, name, options) + end + + define_method(:"#{name}=") do |objects| + @associations[:"#{name}_ids"] ||= Dynamoid::Associations.const_get(type.to_s.camelcase).new(self, name, options) + @associations[:"#{name}_ids"].setter(objects) + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/associations/association.rb b/dynamoid/lib/dynamoid/associations/association.rb new file mode 100644 index 000000000..19f627a37 --- /dev/null +++ b/dynamoid/lib/dynamoid/associations/association.rb @@ -0,0 +1,137 @@ +# frozen_string_literal: true + +module Dynamoid + # The base association module which all associations include. Every association has two very important components: the source and + # the target. The source is the object which is calling the association information. It always has the target_ids inside of an attribute on itself. + # The target is the object which is referencing by this association. + # @private + module Associations + # @private + module Association + attr_accessor :name, :options, :source, :loaded + + # Create a new association. + # + # @param [Class] source the source record of the association; that is, the record that you already have + # @param [Symbol] name the name of the association + # @param [Hash] options optional parameters for the association + # @option options [Class] :class the target class of the association; that is, the class to which the association objects belong + # @option options [Symbol] :class_name the name of the target class of the association; only this or Class is necessary + # @option options [Symbol] :inverse_of the name of the association on the target class + # @option options [Symbol] :foreign_key the name of the field for belongs_to association + # + # @return [Dynamoid::Association] the actual association instance itself + # + # @since 0.2.0 + def initialize(source, name, options) + @name = name + @options = options + @source = source + @loaded = false + end + + def loaded? + @loaded + end + + def find_target; end + + def target + unless loaded? + @target = find_target + @loaded = true + end + + @target + end + + def reset + @target = nil + @loaded = false + end + + def declaration_field_name + "#{name}_ids" + end + + def declaration_field_type + :set + end + + def disassociate_source + Array(target).each do |target_entry| + target_entry.send(target_association).disassociate(source.hash_key) if target_entry && target_association + end + end + + private + + # The target class name, either inferred through the association's name or specified in options. + # + # @since 0.2.0 + def target_class_name + options[:class_name] || name.to_s.classify + end + + # The target class, either inferred through the association's name or specified in options. + # + # @since 0.2.0 + def target_class + options[:class] || target_class_name.constantize + end + + # The target attribute: that is, the attribute on each object of the association that should reference the source. + # + # @since 0.2.0 + def target_attribute + # In simple case it's equivalent to + # "#{target_association}_ids".to_sym if target_association + if target_association + target_options = target_class.associations[target_association] + assoc = Dynamoid::Associations.const_get(target_options[:type].to_s.camelcase).new(nil, target_association, target_options) + assoc.send(:source_attribute) + end + end + + # The ids in the target association. + # + # @since 0.2.0 + def target_ids + target.send(target_attribute) || Set.new + end + + # The ids in the target association. + # + # @since 0.2.0 + def source_class + source.class + end + + # The source's association attribute: the name of the association with _ids afterwards, like "users_ids". + # + # @since 0.2.0 + def source_attribute + declaration_field_name.to_sym + end + + # The ids in the source association. + # + # @since 0.2.0 + def source_ids + # handle case when we store scalar value instead of collection (when foreign_key option is specified) + Array(source.send(source_attribute)).compact.to_set || Set.new + end + + # Create a new instance of the target class without trying to add it to the association. This creates a base, that caller can update before setting or adding it. + # + # @param attributes [Hash] attribute values for the new object + # + # @return [Dynamoid::Document] the newly-created object + # + # @since 1.1.1 + def build(attributes = {}) + target_class.build(attributes) + end + end + end +end diff --git a/dynamoid/lib/dynamoid/associations/belongs_to.rb b/dynamoid/lib/dynamoid/associations/belongs_to.rb new file mode 100644 index 000000000..52c455565 --- /dev/null +++ b/dynamoid/lib/dynamoid/associations/belongs_to.rb @@ -0,0 +1,54 @@ +# frozen_string_literal: true + +module Dynamoid + # The belongs_to association. For belongs_to, we reference only a single target instead of multiple records; that target is the + # object to which the association object is associated. + module Associations + # @private + class BelongsTo + include SingleAssociation + + def declaration_field_name + options[:foreign_key] || "#{name}_ids" + end + + def declaration_field_type + if options[:foreign_key] + target_class.attributes[target_class.hash_key][:type] + else + :set + end + end + + # Override default implementation + # to handle case when we store id as scalar value, not as collection + def associate(hash_key) + target.send(target_association).disassociate(source.hash_key) if target && target_association + + if options[:foreign_key] + source.update_attribute(source_attribute, hash_key) + else + source.update_attribute(source_attribute, Set[hash_key]) + end + end + + private + + # Find the target association, either has_many or has_one. Uses either options[:inverse_of] or the source class name and default parsing to + # return the most likely name for the target association. + # + # @since 0.2.0 + def target_association + name = options[:inverse_of] || source.class.to_s.underscore.pluralize.to_sym + if target_class.associations.dig(name, :type) == :has_many + return name + end + + name = options[:inverse_of] || source.class.to_s.underscore.to_sym + if target_class.associations.dig(name, :type) == :has_one + return name # rubocop:disable Style/RedundantReturn + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/associations/has_and_belongs_to_many.rb b/dynamoid/lib/dynamoid/associations/has_and_belongs_to_many.rb new file mode 100644 index 000000000..ce57b0e8b --- /dev/null +++ b/dynamoid/lib/dynamoid/associations/has_and_belongs_to_many.rb @@ -0,0 +1,25 @@ +# frozen_string_literal: true + +module Dynamoid + # The has and belongs to many association. + module Associations + # @private + class HasAndBelongsToMany + include ManyAssociation + + private + + # Find the target association, always another :has_and_belongs_to_many association. Uses either options[:inverse_of] or the source class name + # and default parsing to return the most likely name for the target association. + # + # @since 0.2.0 + def target_association + key_name = options[:inverse_of] || source.class.to_s.pluralize.underscore.to_sym + guess = target_class.associations[key_name] + return nil if guess.nil? || guess[:type] != :has_and_belongs_to_many + + key_name + end + end + end +end diff --git a/dynamoid/lib/dynamoid/associations/has_many.rb b/dynamoid/lib/dynamoid/associations/has_many.rb new file mode 100644 index 000000000..b5068c1e9 --- /dev/null +++ b/dynamoid/lib/dynamoid/associations/has_many.rb @@ -0,0 +1,25 @@ +# frozen_string_literal: true + +module Dynamoid + # The has_many association. + module Associations + # @private + class HasMany + include ManyAssociation + + private + + # Find the target association, always a :belongs_to association. Uses either options[:inverse_of] or the source class name + # and default parsing to return the most likely name for the target association. + # + # @since 0.2.0 + def target_association + key_name = options[:inverse_of] || source.class.to_s.singularize.underscore.to_sym + guess = target_class.associations[key_name] + return nil if guess.nil? || guess[:type] != :belongs_to + + key_name + end + end + end +end diff --git a/dynamoid/lib/dynamoid/associations/has_one.rb b/dynamoid/lib/dynamoid/associations/has_one.rb new file mode 100644 index 000000000..18c38e829 --- /dev/null +++ b/dynamoid/lib/dynamoid/associations/has_one.rb @@ -0,0 +1,26 @@ +# frozen_string_literal: true + +module Dynamoid + # The HasOne association. + module Associations + # @private + class HasOne + include Association + include SingleAssociation + + private + + # Find the target association, always a :belongs_to association. Uses either options[:inverse_of] or the source class name + # and default parsing to return the most likely name for the target association. + # + # @since 0.2.0 + def target_association + key_name = options[:inverse_of] || source.class.to_s.singularize.underscore.to_sym + guess = target_class.associations[key_name] + return nil if guess.nil? || guess[:type] != :belongs_to + + key_name + end + end + end +end diff --git a/dynamoid/lib/dynamoid/associations/many_association.rb b/dynamoid/lib/dynamoid/associations/many_association.rb new file mode 100644 index 000000000..407ec32e7 --- /dev/null +++ b/dynamoid/lib/dynamoid/associations/many_association.rb @@ -0,0 +1,250 @@ +# frozen_string_literal: true + +module Dynamoid + module Associations + module ManyAssociation + include Association + + attr_accessor :query + + def initialize(*args) + @query = {} + super + end + + include Enumerable + + # @private + # Delegate methods to the records the association represents. + delegate :first, :last, :empty?, :size, :class, to: :records + + # The records associated to the source. + # + # @return the association records; depending on which association this is, either a single instance or an array + # + # @private + # @since 0.2.0 + def find_target + return [] if source_ids.empty? + + Array(target_class.find(source_ids.to_a, raise_error: false)) + end + + # @private + def records + if query.empty? + target + else + results_with_query(target) + end + end + + # Alias convenience methods for the associations. + alias all records + alias count size + alias nil? empty? + + # Delegate include? to the records. + def include?(object) + records.include?(object) + end + + # Delete an object or array of objects from the association. + # + # tag.posts.delete(post) + # tag.posts.delete([post1, post2, post3]) + # + # This removes their records from the association field on the source, + # and attempts to remove the source from the target association if it is + # detected to exist. + # + # It saves both models immediately - the source model and the target one + # so any not saved changes will be saved as well. + # + # @param object [Dynamoid::Document|Array] model (or array of models) to remove from the association + # @return [Dynamoid::Document|Array] the deleted model + # @since 0.2.0 + def delete(object) + disassociate(Array(object).collect(&:hash_key)) + if target_association + Array(object).each { |obj| obj.send(target_association).disassociate(source.hash_key) } + end + object + end + + # Add an object or array of objects to an association. + # + # tag.posts << post + # tag.posts << [post1, post2, post3] + # + # This preserves the current records in the association (if any) and adds + # the object to the target association if it is detected to exist. + # + # It saves both models immediately - the source model and the target one + # so any not saved changes will be saved as well. + # + # @param object [Dynamoid::Document|Array] model (or array of models) to add to the association + # @return [Dynamoid::Document] the added model + # @since 0.2.0 + def <<(object) + associate(Array(object).collect(&:hash_key)) + + if target_association + Array(object).each { |obj| obj.send(target_association).associate(source.hash_key) } + end + + object + end + + # Replace an association with object or array of objects. This removes all of the existing associated records and replaces them with + # the passed object(s), and associates the target association if it is detected to exist. + # + # @param [Dynamoid::Document] object the object (or array of objects) to add to the association + # + # @return [Dynamoid::Document|Array] the added object + # + # @private + # @since 0.2.0 + def setter(object) + target.each { |o| delete(o) } + self << object + object + end + + # Create a new instance of the target class, persist it and add directly + # to the association. + # + # tag.posts.create!(title: 'foo') + # + # Several models can be created at once when an array of attributes + # specified: + # + # tag.posts.create!([{ title: 'foo' }, {title: 'bar'} ]) + # + # If the creation fails an exception will be raised. + # + # @param attributes [Hash] attribute values for the new object + # @return [Dynamoid::Document|Array] the newly-created object + # @since 0.2.0 + def create!(attributes = {}) + self << target_class.create!(attributes) + end + + # Create a new instance of the target class, persist it and add directly + # to the association. + # + # tag.posts.create(title: 'foo') + # + # Several models can be created at once when an array of attributes + # specified: + # + # tag.posts.create([{ title: 'foo' }, {title: 'bar'} ]) + # + # @param attributes [Hash] attribute values for the new object + # @return [Dynamoid::Document|Array] the newly-created object + # @since 0.2.0 + def create(attributes = {}) + self << target_class.create(attributes) + end + + # Create a new instance of the target class and add it directly to the association. If the create fails an exception will be raised. + # + # @return [Dynamoid::Document] the newly-created object + # + # @private + # @since 0.2.0 + def each(&block) + records.each(&block) + end + + # Destroys all members of the association and removes them from the + # association. + # + # tag.posts.destroy_all + # + # @since 0.2.0 + def destroy_all + objs = target + source.update_attribute(source_attribute, nil) + objs.each(&:destroy) + end + + # Deletes all members of the association and removes them from the + # association. + # + # tag.posts.delete_all + # + # @since 0.2.0 + def delete_all + objs = target + source.update_attribute(source_attribute, nil) + objs.each(&:delete) + end + + # Naive association filtering. + # + # tag.posts.where(title: 'foo') + # + # It loads lazily all the associated models and checks provided + # conditions. That's why only equality conditions can be specified. + # + # @param args [Hash] A hash of attributes; each must match every returned object's attribute exactly. + # @return [Dynamoid::Association] the association this method was called on (for chaining purposes) + # @since 0.2.0 + def where(args) + filtered = clone + filtered.query = query.clone + args.each { |k, v| filtered.query[k] = v } + filtered + end + + # Is this array equal to the association's records? + # + # @return [Boolean] true/false + # + # @since 0.2.0 + def ==(other) + records == Array(other) + end + + # Delegate methods we don't find directly to the records array. + # + # @private + # @since 0.2.0 + def method_missing(method, *args) + if records.respond_to?(method) + records.send(method, *args) + else + super + end + end + + # @private + def associate(hash_key) + source.update_attribute(source_attribute, source_ids.merge(Array(hash_key))) + end + + # @private + def disassociate(hash_key) + source.update_attribute(source_attribute, source_ids - Array(hash_key)) + end + + private + + # If a query exists, filter all existing results based on that query. + # + # @param [Array] results the raw results for the association + # + # @return [Array] the filtered results for the query + # + # @since 0.2.0 + def results_with_query(results) + results.find_all do |result| + query.all? do |attribute, value| + result.send(attribute) == value + end + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/associations/single_association.rb b/dynamoid/lib/dynamoid/associations/single_association.rb new file mode 100644 index 000000000..80a5a5798 --- /dev/null +++ b/dynamoid/lib/dynamoid/associations/single_association.rb @@ -0,0 +1,140 @@ +# frozen_string_literal: true + +module Dynamoid + module Associations + module SingleAssociation + include Association + + delegate :class, to: :target + + # @private + def setter(object) + if object.nil? + delete + return + end + + associate(object.hash_key) + self.target = object + object.send(target_association).associate(source.hash_key) if target_association + object + end + + # Delete a model from the association. + # + # post.logo.delete # => nil + # + # Saves both models immediately - a source model and a target one so any + # unsaved changes will be saved. Doesn't delete an associated model from + # DynamoDB. + def delete + disassociate_source + disassociate + target + end + + # Create a new instance of the target class, persist it and associate. + # + # post.logo.create!(hight: 50, width: 90) + # + # If the creation fails an exception will be raised. + # + # @param attributes [Hash] attributes of a model to create + # @return [Dynamoid::Document] created model + def create!(attributes = {}) + setter(target_class.create!(attributes)) + end + + # Create a new instance of the target class, persist it and associate. + # + # post.logo.create(hight: 50, width: 90) + # + # @param attributes [Hash] attributes of a model to create + # @return [Dynamoid::Document] created model + def create(attributes = {}) + setter(target_class.create(attributes)) + end + + # Is this object equal to the association's target? + # + # @return [Boolean] true/false + # + # @since 0.2.0 + def ==(other) + target == other + end + + if ::RUBY_VERSION < '2.7' + # Delegate methods we don't find directly to the target. + # + # @private + # @since 0.2.0 + def method_missing(method, *args, &block) + if target.respond_to?(method) + target.send(method, *args, &block) + else + super + end + end + else + # Delegate methods we don't find directly to the target. + # + # @private + # @since 0.2.0 + def method_missing(method, *args, **kwargs, &block) + if target.respond_to?(method) + target.send(method, *args, **kwargs, &block) + else + super + end + end + end + + # @private + def respond_to_missing?(method_name, include_private = false) + target.respond_to?(method_name, include_private) || super + end + + # @private + def nil? + target.nil? + end + + # @private + def empty? + # This is needed to that ActiveSupport's #blank? and #present? + # methods work as expected for SingleAssociations. + target.nil? + end + + # @private + def associate(hash_key) + disassociate_source + source.update_attribute(source_attribute, Set[hash_key]) + end + + # @private + def disassociate(_hash_key = nil) + source.update_attribute(source_attribute, nil) + end + + private + + # Find the target of the has_one association. + # + # @return [Dynamoid::Document] the found target (or nil if nothing) + # + # @since 0.2.0 + def find_target + return if source_ids.empty? + + target_class.find(source_ids.first, raise_error: false) + end + + def target=(object) + @target = object + @loaded = true + end + end + end +end diff --git a/dynamoid/lib/dynamoid/components.rb b/dynamoid/lib/dynamoid/components.rb new file mode 100644 index 000000000..b34129dbf --- /dev/null +++ b/dynamoid/lib/dynamoid/components.rb @@ -0,0 +1,41 @@ +# frozen_string_literal: true + +module Dynamoid + # All modules that a Document is composed of are defined in this + # module, to keep the document class from getting too cluttered. + # @private + module Components + extend ActiveSupport::Concern + + included do + extend ActiveModel::Translation + extend ActiveModel::Callbacks + + define_model_callbacks :create, :save, :destroy, :update + define_model_callbacks :initialize, :find, :touch, only: :after + + before_save :set_expires_field + after_initialize :set_inheritance_field + end + + include ActiveModel::AttributeMethods # Actually it will be inclided in Dirty module again + include ActiveModel::Conversion + include ActiveModel::MassAssignmentSecurity if defined?(ActiveModel::MassAssignmentSecurity) + include ActiveModel::Naming + include ActiveModel::Observing if defined?(ActiveModel::Observing) + include ActiveModel::Serializers::JSON + include ActiveModel::Serializers::Xml if defined?(ActiveModel::Serializers::Xml) + include Dynamoid::Persistence + include Dynamoid::Loadable + # Dirty module should be included after Persistence and Loadable + # because it overrides some methods declared in these modules + include Dynamoid::Dirty + include Dynamoid::Fields + include Dynamoid::Indexes + include Dynamoid::Finders + include Dynamoid::Associations + include Dynamoid::Criteria + include Dynamoid::Validations + include Dynamoid::IdentityMap + end +end diff --git a/dynamoid/lib/dynamoid/config.rb b/dynamoid/lib/dynamoid/config.rb new file mode 100644 index 000000000..34e2a7108 --- /dev/null +++ b/dynamoid/lib/dynamoid/config.rb @@ -0,0 +1,101 @@ +# frozen_string_literal: true + +require 'uri' +require 'logger' +require 'dynamoid/config/options' +require 'dynamoid/config/backoff_strategies/constant_backoff' +require 'dynamoid/config/backoff_strategies/exponential_backoff' + +module Dynamoid + # Contains all the basic configuration information required for Dynamoid: both sensible defaults and required fields. + # @private + module Config + # @since 3.3.1 + DEFAULT_NAMESPACE = if defined?(Rails) + klass = Rails.application.class + app_name = Rails::VERSION::MAJOR >= 6 ? klass.module_parent_name : klass.parent_name + "dynamoid_#{app_name}_#{Rails.env}" + else + 'dynamoid' + end + + extend self + + extend Options + include ActiveModel::Observing if defined?(ActiveModel::Observing) + + # All the default options. + option :adapter, default: 'aws_sdk_v3' + option :namespace, default: DEFAULT_NAMESPACE + option :access_key, default: nil + option :secret_key, default: nil + option :credentials, default: nil + option :region, default: nil + option :batch_size, default: 100 + option :capacity_mode, default: nil + option :read_capacity, default: 100 + option :write_capacity, default: 20 + option :warn_on_scan, default: true + option :endpoint, default: nil + option :identity_map, default: false + option :timestamps, default: true + option :sync_retry_max_times, default: 60 # a bit over 2 minutes + option :sync_retry_wait_seconds, default: 2 + option :convert_big_decimal, default: false + option :store_attribute_with_nil_value, default: false # keep or ignore attribute with nil value at saving + option :models_dir, default: './app/models' # perhaps you keep your dynamoid models in a different directory? + option :application_timezone, default: :utc # available values - :utc, :local, time zone name like "Hawaii" + option :dynamodb_timezone, default: :utc # available values - :utc, :local, time zone name like "Hawaii" + option :store_datetime_as_string, default: false # store Time fields in ISO 8601 string format + option :store_date_as_string, default: false # store Date fields in ISO 8601 string format + option :store_boolean_as_native, default: true + option :backoff, default: nil # callable object to handle exceeding of table throughput limit + option :backoff_strategies, default: { + constant: BackoffStrategies::ConstantBackoff, + exponential: BackoffStrategies::ExponentialBackoff + } + option :log_formatter, default: nil + option :http_continue_timeout, default: nil # specify if you'd like to overwrite Aws Configure - default: 1 + option :http_idle_timeout, default: nil # - default: 5 + option :http_open_timeout, default: nil # - default: 15 + option :http_read_timeout, default: nil # - default: 60 + option :create_table_on_save, default: true + + # The default logger for Dynamoid: either the Rails logger or just stdout. + # + # @since 0.2.0 + def default_logger + defined?(Rails) && Rails.respond_to?(:logger) ? Rails.logger : ::Logger.new($stdout) + end + + # Returns the assigned logger instance. + # + # @since 0.2.0 + def logger + @logger ||= default_logger + end + + # If you want to, set the logger manually to any output you'd like. Or pass false or nil to disable logging entirely. + # + # @since 0.2.0 + def logger=(logger) + case logger + when false, nil then @logger = ::Logger.new(nil) + when true then @logger = default_logger + else + @logger = logger if logger.respond_to?(:info) + end + end + + def build_backoff + if backoff.is_a?(Hash) + name = backoff.keys[0] + args = backoff.values[0] + + backoff_strategies[name].call(args) + else + backoff_strategies[backoff].call + end + end + end +end diff --git a/dynamoid/lib/dynamoid/config/backoff_strategies/constant_backoff.rb b/dynamoid/lib/dynamoid/config/backoff_strategies/constant_backoff.rb new file mode 100644 index 000000000..782c8b9bb --- /dev/null +++ b/dynamoid/lib/dynamoid/config/backoff_strategies/constant_backoff.rb @@ -0,0 +1,14 @@ +# frozen_string_literal: true + +module Dynamoid + module Config + # @private + module BackoffStrategies + class ConstantBackoff + def self.call(sec = 1) + -> { sleep sec } + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/config/backoff_strategies/exponential_backoff.rb b/dynamoid/lib/dynamoid/config/backoff_strategies/exponential_backoff.rb new file mode 100644 index 000000000..8b9755e80 --- /dev/null +++ b/dynamoid/lib/dynamoid/config/backoff_strategies/exponential_backoff.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +module Dynamoid + module Config + # @private + module BackoffStrategies + # Truncated binary exponential backoff algorithm + # See https://en.wikipedia.org/wiki/Exponential_backoff + class ExponentialBackoff + def self.call(opts = {}) + opts = { base_backoff: 0.5, ceiling: 3 }.merge(opts) + base_backoff = opts[:base_backoff] + ceiling = opts[:ceiling] + + times = 1 + + lambda do + power = [times - 1, ceiling - 1].min + backoff = base_backoff * (2**power) + sleep backoff + + times += 1 + end + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/config/options.rb b/dynamoid/lib/dynamoid/config/options.rb new file mode 100644 index 000000000..3b9825553 --- /dev/null +++ b/dynamoid/lib/dynamoid/config/options.rb @@ -0,0 +1,79 @@ +# frozen_string_literal: true + +# Shamelessly stolen from Mongoid! +module Dynamoid + module Config + # Encapsulates logic for setting options. + # @private + module Options + # Get the defaults or initialize a new empty hash. + # + # @example Get the defaults. + # options.defaults + # + # @return [ Hash ] The default options. + # + # @since 0.2.0 + def defaults + @defaults ||= {} + end + + # Define a configuration option with a default. + # + # @example Define the option. + # Options.option(:persist_in_safe_mode, :default => false) + # + # @param [ Symbol ] name The name of the configuration option. + # @param [ Hash ] options Extras for the option. + # + # @option options [ Object ] :default The default value. + # + # @since 0.2.0 + def option(name, options = {}) + defaults[name] = settings[name] = options[:default] + + class_eval <<-RUBY, __FILE__, __LINE__ + 1 + def #{name} # def endpoint + settings[#{name.inspect}] # settings["endpoint"] + end # end + + def #{name}=(value) # def endpoint=(value) + settings[#{name.inspect}] = value # settings["endpoint"] = value + end # end + + def #{name}? # def endpoint? + #{name} # endpoint + end # end + + def reset_#{name} # def reset_endpoint + settings[#{name.inspect}] = defaults[#{name.inspect}] # settings["endpoint"] = defaults["endpoint"] + end # end + RUBY + end + + # Reset the configuration options to the defaults. + # + # @example Reset the configuration options. + # config.reset + # + # @return [ Hash ] The defaults. + # + # @since 0.2.0 + def reset + settings.replace(defaults) + end + + # Get the settings or initialize a new empty hash. + # + # @example Get the settings. + # options.settings + # + # @return [ Hash ] The setting options. + # + # @since 0.2.0 + def settings + @settings ||= {} + end + end + end +end diff --git a/dynamoid/lib/dynamoid/criteria.rb b/dynamoid/lib/dynamoid/criteria.rb new file mode 100644 index 000000000..76c022948 --- /dev/null +++ b/dynamoid/lib/dynamoid/criteria.rb @@ -0,0 +1,34 @@ +# frozen_string_literal: true + +require 'dynamoid/criteria/chain' + +module Dynamoid + # Allows classes to be queried by where, all, first, and each and return criteria chains. + module Criteria + extend ActiveSupport::Concern + + # @private + module ClassMethods + %i[ + where consistent all first last delete_all destroy_all each record_limit + scan_limit batch start scan_index_forward find_by_pages project pluck + ].each do |name| + # Return a criteria chain in response to a method that will begin or end a chain. For more information, + # see Dynamoid::Criteria::Chain. + # + # @since 0.2.0 + define_method(name) do |*args, &blk| + # Don't use keywork arguments delegating (with **kw). It works in + # different way in different Ruby versions: <= 2.6, 2.7, 3.0 and in some + # future 3.x versions. Providing that there are no downstream methods + # with keyword arguments in Chain. + # + # https://eregon.me/blog/2019/11/10/the-delegation-challenge-of-ruby27.html + + chain = Dynamoid::Criteria::Chain.new(self) + chain.send(name, *args, &blk) + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/criteria/chain.rb b/dynamoid/lib/dynamoid/criteria/chain.rb new file mode 100644 index 000000000..9d46a9341 --- /dev/null +++ b/dynamoid/lib/dynamoid/criteria/chain.rb @@ -0,0 +1,762 @@ +# frozen_string_literal: true + +require_relative 'key_fields_detector' +require_relative 'nonexistent_fields_detector' +require_relative 'where_conditions' + +module Dynamoid + module Criteria + # The criteria chain is equivalent to an ActiveRecord relation (and realistically I should change the name from + # chain to relation). It is a chainable object that builds up a query and eventually executes it by a Query or Scan. + class Chain + attr_reader :source, :consistent_read, :key_fields_detector + + include Enumerable + + ALLOWED_FIELD_OPERATORS = Set.new( + %w[ + eq ne gt lt gte lte between begins_with in contains not_contains null not_null + ] + ).freeze + + # Create a new criteria chain. + # + # @param [Class] source the class upon which the ultimate query will be performed. + def initialize(source) + @where_conditions = WhereConditions.new + @source = source + @consistent_read = false + @scan_index_forward = true + + # we should re-initialize keys detector every time we change @where_conditions + @key_fields_detector = KeyFieldsDetector.new(@where_conditions, @source) + end + + # Returns a chain which is a result of filtering current chain with the specified conditions. + # + # It accepts conditions in the form of a hash. + # + # Post.where(links_count: 2) + # + # A key could be either string or symbol. + # + # In order to express conditions other than equality predicates could be used. + # Predicate should be added to an attribute name to form a key +'created_at.gt' => Date.yesterday+ + # + # Currently supported following predicates: + # - +gt+ - greater than + # - +gte+ - greater or equal + # - +lt+ - less than + # - +lte+ - less or equal + # - +ne+ - not equal + # - +between+ - an attribute value is greater than the first value and less than the second value + # - +in+ - check an attribute in a list of values + # - +begins_with+ - check for a prefix in string + # - +contains+ - check substring or value in a set or array + # - +not_contains+ - check for absence of substring or a value in set or array + # - +null+ - attribute doesn't exists in an item + # - +not_null+ - attribute exists in an item + # + # All the predicates match operators supported by DynamoDB's + # {ComparisonOperator}[https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_Condition.html#DDB-Type-Condition-ComparisonOperator] + # + # Post.where('size.gt' => 1000) + # Post.where('size.gte' => 1000) + # Post.where('size.lt' => 35000) + # Post.where('size.lte' => 35000) + # Post.where('author.ne' => 'John Doe') + # Post.where('created_at.between' => [Time.now - 3600, Time.now]) + # Post.where('category.in' => ['tech', 'fashion']) + # Post.where('title.begins_with' => 'How long') + # Post.where('tags.contains' => 'Ruby') + # Post.where('tags.not_contains' => 'Ruby on Rails') + # Post.where('legacy_attribute.null' => true) + # Post.where('optional_attribute.not_null' => true) + # + # There are some limitations for a sort key. Only following predicates + # are supported - +gt+, +gte+, +lt+, +lte+, +between+, +begins_with+. + # + # +where+ without argument will return the current chain. + # + # Multiple calls can be chained together and conditions will be merged: + # + # Post.where('size.gt' => 1000).where('title' => 'some title') + # + # It's equivalent to: + # + # Post.where('size.gt' => 1000, 'title' => 'some title') + # + # But only one condition can be specified for a certain attribute. The + # last specified condition will override all the others. Only condition + # 'size.lt' => 200 will be used in following examples: + # + # Post.where('size.gt' => 100, 'size.lt' => 200) + # Post.where('size.gt' => 100).where('size.lt' => 200) + # + # Internally +where+ performs either +Scan+ or +Query+ operation. + # + # @return [Dynamoid::Criteria::Chain] + # @since 0.2.0 + def where(args) + detector = NonexistentFieldsDetector.new(args, @source) + if detector.found? + Dynamoid.logger.warn(detector.warning_message) + end + + @where_conditions.update(args.symbolize_keys) + + # we should re-initialize keys detector every time we change @where_conditions + @key_fields_detector = KeyFieldsDetector.new(@where_conditions, @source, forced_index_name: @forced_index_name) + + self + end + + # Turns on strongly consistent reads. + # + # By default reads are eventually consistent. + # + # Post.where('size.gt' => 1000).consistent + # + # @return [Dynamoid::Criteria::Chain] + def consistent + @consistent_read = true + self + end + + # Returns all the records matching the criteria. + # + # Since +where+ and most of the other methods return a +Chain+ + # the only way to get a result as a collection is to call the +all+ + # method. It returns +Enumerator+ which could be used directly or + # transformed into +Array+ + # + # Post.all # => Enumerator + # Post.where(links_count: 2).all # => Enumerator + # Post.where(links_count: 2).all.to_a # => Array + # + # When the result set is too large DynamoDB divides it into separate + # pages. While an enumerator iterates over the result models each page + # is loaded lazily. So even an extra large result set can be loaded and + # processed with considerably small memory footprint and throughput + # consumption. + # + # @return [Enumerator::Lazy] + # @since 0.2.0 + def all + records + end + + # Returns the actual number of items in a table matching the criteria. + # + # Post.where(links_count: 2).count + # + # Internally it uses either `Scan` or `Query` DynamoDB's operation so it + # costs like all the matching items were read from a table. + # + # The only difference is that items are read by DynemoDB but not actually + # loaded on the client side. DynamoDB returns only count of items after + # filtering. + # + # @return [Integer] + def count + if @key_fields_detector.key_present? + count_via_query + else + count_via_scan + end + end + + # Returns the first item matching the criteria. + # + # Post.where(links_count: 2).first + # + # Applies `record_limit(1)` to ensure only a single record is fetched + # when no non-key conditions are present and `scan_limit(1)` when no + # conditions are present at all. + # + # If used without criteria it just returns the first item of some + # arbitrary order. + # + # Post.first + # + # @return [Model|nil] + def first(*args) + n = args.first || 1 + + return dup.scan_limit(n).to_a.first(*args) if @where_conditions.empty? + return super if @key_fields_detector.non_key_present? + + dup.record_limit(n).to_a.first(*args) + end + + # Returns the last item matching the criteria. + # + # Post.where(links_count: 2).last + # + # DynamoDB doesn't support ordering by some arbitrary attribute except a + # sort key. So this method is mostly useful during development and + # testing. + # + # If used without criteria it just returns the last item of some arbitrary order. + # + # Post.last + # + # It isn't efficient from the performance point of view as far as it reads and + # loads all the filtered items from DynamoDB. + # + # @return [Model|nil] + def last + all.to_a.last + end + + # Deletes all the items matching the criteria. + # + # Post.where(links_count: 2).delete_all + # + # If called without criteria then it deletes all the items in a table. + # + # Post.delete_all + # + # It loads all the items either with +Scan+ or +Query+ operation and + # deletes them in batch with +BatchWriteItem+ operation. +BatchWriteItem+ + # is limited by request size and items count so it's quite possible the + # deletion will require several +BatchWriteItem+ calls. + def delete_all + ids = [] + ranges = [] + + if @key_fields_detector.key_present? + Dynamoid.adapter.query(source.table_name, query_key_conditions, query_non_key_conditions, query_options).flat_map { |i| i }.collect do |hash| + ids << hash[source.hash_key.to_sym] + ranges << hash[source.range_key.to_sym] if source.range_key + end + else + Dynamoid.adapter.scan(source.table_name, scan_conditions, scan_options).flat_map { |i| i }.collect do |hash| + ids << hash[source.hash_key.to_sym] + ranges << hash[source.range_key.to_sym] if source.range_key + end + end + + Dynamoid.adapter.delete(source.table_name, ids, range_key: ranges.presence) + end + alias destroy_all delete_all + + # Set the record limit. + # + # The record limit is the limit of evaluated items returned by the + # +Query+ or +Scan+. In other words it's how many items should be + # returned in response. + # + # Post.where(links_count: 2).record_limit(1000) # => 1000 models + # Post.record_limit(1000) # => 1000 models + # + # It could be very inefficient in terms of HTTP requests in pathological + # cases. DynamoDB doesn't support out of the box the limits for items + # count after filtering. So it's possible to make a lot of HTTP requests + # to find items matching criteria and skip not matching. It means that + # the cost (read capacity units) is unpredictable. + # + # Because of such issues with performance and cost it's mostly useful in + # development and testing. + # + # When called without criteria it works like +scan_limit+. + # + # @return [Dynamoid::Criteria::Chain] + def record_limit(limit) + @record_limit = limit + self + end + + # Set the scan limit. + # + # The scan limit is the limit of records that DynamoDB will internally + # read with +Query+ or +Scan+. It's different from the record limit as + # with filtering DynamoDB may look at N scanned items but return 0 + # items if none passes the filter. So it can return less items than was + # specified with the limit. + # + # Post.where(links_count: 2).scan_limit(1000) # => 850 models + # Post.scan_limit(1000) # => 1000 models + # + # By contrast with +record_limit+ the cost (read capacity units) and + # performance is predictable. + # + # When called without criteria it works like +record_limit+. + # + # @return [Dynamoid::Criteria::Chain] + def scan_limit(limit) + @scan_limit = limit + self + end + + # Set the batch size. + # + # The batch size is a number of items which will be lazily loaded one by one. + # When the batch size is set then items will be loaded batch by batch of + # the specified size instead of relying on the default paging mechanism + # of DynamoDB. + # + # Post.where(links_count: 2).batch(1000).all.each do |post| + # # process a post + # end + # + # It's useful to limit memory usage or throughput consumption + # + # @return [Dynamoid::Criteria::Chain] + def batch(batch_size) + @batch_size = batch_size + self + end + + # Set the start item. + # + # When the start item is set the items will be loaded starting right + # after the specified item. + # + # Post.where(links_count: 2).start(post) + # + # It can be used to implement an own pagination mechanism. + # + # Post.where(author_id: author_id).start(last_post).scan_limit(50) + # + # The specified start item will not be returned back in a result set. + # + # Actually it doesn't need all the item attributes to start - an item may + # have only the primary key attributes (partition and sort key if it's + # declared). + # + # Post.where(links_count: 2).start(Post.new(id: id)) + # + # It also supports a +Hash+ argument with the keys attributes - a + # partition key and a sort key (if it's declared). + # + # Post.where(links_count: 2).start(id: id) + # + # @return [Dynamoid::Criteria::Chain] + def start(start) + @start = start + self + end + + # Reverse the sort order. + # + # By default the sort order is ascending (by the sort key value). Set a + # +false+ value to reverse the order. + # + # Post.where(id: id, 'views_count.gt' => 1000).scan_index_forward(false) + # + # It works only for queries with a partition key condition e.g. +id: + # 'some-id'+ which internally performs +Query+ operation. + # + # @return [Dynamoid::Criteria::Chain] + def scan_index_forward(scan_index_forward) + @scan_index_forward = scan_index_forward + self + end + + # Force the index name to use for queries. + # + # By default allows the library to select the most appropriate index. + # Sometimes you have more than one index which will fulfill your query's + # needs. When this case occurs you may want to force an order. This occurs + # when you are searching by hash key, but not specifying a range key. + # + # class Comment + # include Dynamoid::Document + # + # table key: :post_id + # range_key :author_id + # + # field :post_date, :datetime + # + # global_secondary_index name: :time_sorted_comments, hash_key: :post_id, range_key: post_date, projected_attributes: :all + # end + # + # + # Comment.where(post_id: id).with_index(:time_sorted_comments).scan_index_forward(false) + # + # @return [Dynamoid::Criteria::Chain] + def with_index(index_name) + raise Dynamoid::Errors::InvalidIndex, "Unknown index #{index_name}" unless @source.find_index_by_name(index_name) + + @forced_index_name = index_name + @key_fields_detector = KeyFieldsDetector.new(@where_conditions, @source, forced_index_name: index_name) + self + end + + # Allows to use the results of a search as an enumerable over the results + # found. + # + # Post.each do |post| + # end + # + # Post.all.each do |post| + # end + # + # Post.where(links_count: 2).each do |post| + # end + # + # It works similar to the +all+ method so results are loaded lazily. + # + # @since 0.2.0 + def each(&block) + records.each(&block) + end + + # Iterates over the pages returned by DynamoDB. + # + # DynamoDB has its own paging machanism and divides a large result set + # into separate pages. The +find_by_pages+ method provides access to + # these native DynamoDB pages. + # + # The pages are loaded lazily. + # + # Post.where('views_count.gt' => 1000).find_by_pages do |posts, options| + # # process posts + # end + # + # It passes as block argument an +Array+ of models and a Hash with options. + # + # Options +Hash+ contains only one option +:last_evaluated_key+. The last + # evaluated key is a Hash with key attributes of the last item processed by + # DynamoDB. It can be used to resume querying using the +start+ method. + # + # posts, options = Post.where('views_count.gt' => 1000).find_by_pages.first + # last_key = options[:last_evaluated_key] + # + # # ... + # + # Post.where('views_count.gt' => 1000).start(last_key).find_by_pages do |posts, options| + # end + # + # If it's called without a block then it returns an +Enumerator+. + # + # enum = Post.where('views_count.gt' => 1000).find_by_pages + # + # enum.each do |posts, options| + # # process posts + # end + # + # @return [Enumerator::Lazy] + def find_by_pages(&block) + pages.each(&block) + end + + # Select only specified fields. + # + # It takes one or more field names and returns a collection of models with only + # these fields set. + # + # Post.where('views_count.gt' => 1000).project(:title) + # Post.where('views_count.gt' => 1000).project(:title, :created_at) + # Post.project(:id) + # + # It can be used to avoid loading large field values and to decrease a + # memory footprint. + # + # @return [Dynamoid::Criteria::Chain] + def project(*fields) + @project = fields.map(&:to_sym) + self + end + + # Select only specified fields. + # + # It takes one or more field names and returns an array of either values + # or arrays of values. + # + # Post.pluck(:id) # => ['1', '2'] + # Post.pluck(:title, :title) # => [['1', 'Title #1'], ['2', 'Title#2']] + # + # Post.where('views_count.gt' => 1000).pluck(:title) + # + # There are some differences between +pluck+ and +project+. +pluck+ + # - doesn't instantiate models + # - it isn't chainable and returns +Array+ instead of +Chain+ + # + # It deserializes values if a field type isn't supported by DynamoDB natively. + # + # It can be used to avoid loading large field values and to decrease a + # memory footprint. + # + # @return [Array] + def pluck(*args) + fields = args.map(&:to_sym) + + # `project` has a side effect - it sets `@project` instance variable. + # So use a duplicate to not pollute original chain. + scope = dup + scope.project(*fields) + + if fields.many? + scope.items.map do |item| + fields.map { |key| Undumping.undump_field(item[key], source.attributes[key]) } + end.to_a + else + key = fields.first + scope.items.map { |item| Undumping.undump_field(item[key], source.attributes[key]) }.to_a + end + end + + private + + # The actual records referenced by the association. + # + # @return [Enumerator] an iterator of the found records. + # + # @since 0.2.0 + def records + pages.lazy.flat_map { |items, _| items } + end + + # Raw items like they are stored before type casting + def items + raw_pages.lazy.flat_map { |items, _| items } + end + protected :items + + # Arrays of records, sized based on the actual pages produced by DynamoDB + # + # @return [Enumerator] an iterator of the found records. + # + # @since 3.1.0 + def pages + raw_pages.lazy.map do |items, options| + models = items.map { |i| source.from_database(i) } + models.each { |m| m.run_callbacks :find } + [models, options] + end.each + end + + # Pages of items before type casting + def raw_pages + if @key_fields_detector.key_present? + raw_pages_via_query + else + issue_scan_warning if Dynamoid::Config.warn_on_scan && !@where_conditions.empty? + raw_pages_via_scan + end + end + + # If the query matches an index, we'll query the associated table to find results. + # + # @return [Enumerator] an iterator of the found pages. An array of records + # + # @since 3.1.0 + def raw_pages_via_query + Enumerator.new do |y| + Dynamoid.adapter.query(source.table_name, query_key_conditions, query_non_key_conditions, query_options).each do |items, metadata| + options = metadata.slice(:last_evaluated_key) + + y.yield items, options + end + end + end + + # If the query does not match an index, we'll manually scan the associated table to find results. + # + # @return [Enumerator] an iterator of the found pages. An array of records + # + # @since 3.1.0 + def raw_pages_via_scan + Enumerator.new do |y| + Dynamoid.adapter.scan(source.table_name, scan_conditions, scan_options).each do |items, metadata| + options = metadata.slice(:last_evaluated_key) + + y.yield items, options + end + end + end + + def issue_scan_warning + Dynamoid.logger.warn 'Queries without an index are forced to use scan and are generally much slower than indexed queries!' + Dynamoid.logger.warn "You can index this query by adding index declaration to #{source.to_s.underscore}.rb:" + Dynamoid.logger.warn "* global_secondary_index hash_key: 'some-name', range_key: 'some-another-name'" + Dynamoid.logger.warn "* local_secondary_index range_key: 'some-name'" + Dynamoid.logger.warn "Not indexed attributes: #{@where_conditions.keys.sort.collect { |name| ":#{name}" }.join(', ')}" + end + + def count_via_query + Dynamoid.adapter.query_count(source.table_name, query_key_conditions, query_non_key_conditions, query_options) + end + + def count_via_scan + Dynamoid.adapter.scan_count(source.table_name, scan_conditions, scan_options) + end + + def field_condition(key, value_before_type_casting) + name, operator = key.to_s.split('.') + value = type_cast_condition_parameter(name, value_before_type_casting) + operator ||= 'eq' + + unless operator.in? ALLOWED_FIELD_OPERATORS + raise Dynamoid::Errors::Error, "Unsupported operator #{operator} in #{key}" + end + + condition = + case operator + # NULL/NOT_NULL operators don't have parameters + # So { null: true } means NULL check and { null: false } means NOT_NULL one + # The same logic is used for { not_null: BOOL } + when 'null' + value ? [:null, nil] : [:not_null, nil] + when 'not_null' + value ? [:not_null, nil] : [:null, nil] + else + [operator.to_sym, value] + end + + [name.to_sym, condition] + end + + def query_key_conditions + opts = {} + + # Add hash key + # TODO: always have hash key in @where_conditions? + _, condition = field_condition(@key_fields_detector.hash_key, @where_conditions[@key_fields_detector.hash_key]) + opts[@key_fields_detector.hash_key] = [condition] + + # Add range key + if @key_fields_detector.range_key + if @where_conditions[@key_fields_detector.range_key].present? + _, condition = field_condition(@key_fields_detector.range_key, @where_conditions[@key_fields_detector.range_key]) + opts[@key_fields_detector.range_key] = [condition] + end + + @where_conditions.keys.select { |k| k.to_s =~ /^#{@key_fields_detector.range_key}\./ }.each do |key| + name, condition = field_condition(key, @where_conditions[key]) + opts[name] ||= [] + opts[name] << condition + end + end + + opts + end + + def query_non_key_conditions + opts = {} + + # Honor STI and :type field if it presents + if @source.attributes.key?(@source.inheritance_field) && + @key_fields_detector.hash_key.to_sym != @source.inheritance_field.to_sym + @where_conditions.update(sti_condition) + end + + # TODO: Separate key conditions and non-key conditions properly: + # only =, >, >=, <, <=, between and begins_with + # could be used for sort key in KeyConditionExpression + keys = (@where_conditions.keys.map(&:to_sym) - [@key_fields_detector.hash_key.to_sym, @key_fields_detector.range_key.try(:to_sym)]) + .reject { |k, _| k.to_s =~ /^#{@key_fields_detector.range_key}\./ } + keys.each do |key| + name, condition = field_condition(key, @where_conditions[key]) + opts[name] ||= [] + opts[name] << condition + end + + opts + end + + # TODO: casting should be operator aware + # e.g. for NULL operator value should be boolean + # and isn't related to an attribute own type + def type_cast_condition_parameter(key, value) + return value if %i[array set].include?(source.attributes[key.to_sym][:type]) + + if [true, false].include?(value) # Support argument for null/not_null operators + value + elsif !value.respond_to?(:to_ary) + options = source.attributes[key.to_sym] + value_casted = TypeCasting.cast_field(value, options) + Dumping.dump_field(value_casted, options) + else + value.to_ary.map do |el| + options = source.attributes[key.to_sym] + value_casted = TypeCasting.cast_field(el, options) + Dumping.dump_field(value_casted, options) + end + end + end + + # Start key needs to be set up based on the index utilized + # If using a secondary index then we must include the index's composite key + # as well as the tables composite key. + def start_key + return @start if @start.is_a?(Hash) + + hash_key = @key_fields_detector.hash_key || source.hash_key + range_key = @key_fields_detector.range_key || source.range_key + + key = {} + key[hash_key] = type_cast_condition_parameter(hash_key, @start.send(hash_key)) + if range_key + key[range_key] = type_cast_condition_parameter(range_key, @start.send(range_key)) + end + # Add table composite keys if they differ from secondary index used composite key + if hash_key != source.hash_key + key[source.hash_key] = type_cast_condition_parameter(source.hash_key, @start.hash_key) + end + if source.range_key && range_key != source.range_key + key[source.range_key] = type_cast_condition_parameter(source.range_key, @start.range_value) + end + key + end + + def query_options + opts = {} + # Don't specify select = ALL_ATTRIBUTES option explicitly because it's + # already a default value of Select statement. Explicite Select value + # conflicts with AttributesToGet statement (project option). + opts[:index_name] = @key_fields_detector.index_name if @key_fields_detector.index_name + opts[:record_limit] = @record_limit if @record_limit + opts[:scan_limit] = @scan_limit if @scan_limit + opts[:batch_size] = @batch_size if @batch_size + opts[:exclusive_start_key] = start_key if @start + opts[:scan_index_forward] = @scan_index_forward + opts[:project] = @project + opts[:consistent_read] = true if @consistent_read + opts + end + + def scan_conditions + # Honor STI and :type field if it presents + if sti_condition + @where_conditions.update(sti_condition) + end + + {}.tap do |opts| + @where_conditions.keys.map(&:to_sym).each do |key| + name, condition = field_condition(key, @where_conditions[key]) + opts[name] ||= [] + opts[name] << condition + end + end + end + + def scan_options + opts = {} + opts[:index_name] = @key_fields_detector.index_name if @key_fields_detector.index_name + opts[:record_limit] = @record_limit if @record_limit + opts[:scan_limit] = @scan_limit if @scan_limit + opts[:batch_size] = @batch_size if @batch_size + opts[:exclusive_start_key] = start_key if @start + opts[:consistent_read] = true if @consistent_read + opts[:project] = @project + opts + end + + # TODO: return Array, not String + def sti_condition + condition = {} + type = @source.inheritance_field + + if @source.attributes.key?(type) && !@source.abstract_class? + sti_names = @source.deep_subclasses.map(&:sti_name) << @source.sti_name + condition[:"#{type}.in"] = sti_names + end + + condition + end + end + end +end diff --git a/dynamoid/lib/dynamoid/criteria/key_fields_detector.rb b/dynamoid/lib/dynamoid/criteria/key_fields_detector.rb new file mode 100644 index 000000000..e86e71a10 --- /dev/null +++ b/dynamoid/lib/dynamoid/criteria/key_fields_detector.rb @@ -0,0 +1,150 @@ +# frozen_string_literal: true + +module Dynamoid + module Criteria + # @private + class KeyFieldsDetector + class Query + def initialize(where_conditions) + @where_conditions = where_conditions + @fields_with_operator = where_conditions.keys.map(&:to_s) + @fields = where_conditions.keys.map(&:to_s).map { |s| s.split('.').first } + end + + def contain_only?(field_names) + (@fields - field_names.map(&:to_s)).blank? + end + + def contain?(field_name) + @fields.include?(field_name.to_s) + end + + def contain_with_eq_operator?(field_name) + @fields_with_operator.include?(field_name.to_s) + end + end + + def initialize(where_conditions, source, forced_index_name: nil) + @source = source + @query = Query.new(where_conditions) + @forced_index_name = forced_index_name + @result = find_keys_in_query + end + + def non_key_present? + !@query.contain_only?([hash_key, range_key].compact) + end + + def key_present? + @result.present? && @query.contain_with_eq_operator?(hash_key) + end + + def hash_key + @result && @result[:hash_key] + end + + def range_key + @result && @result[:range_key] + end + + def index_name + @result && @result[:index_name] + end + + private + + def find_keys_in_query + return match_forced_index if @forced_index_name + + match_table_and_sort_key || + match_local_secondary_index || + match_global_secondary_index_and_sort_key || + match_table || + match_global_secondary_index + end + + # Use table's default range key + def match_table_and_sort_key + return unless @query.contain_with_eq_operator?(@source.hash_key) + return unless @source.range_key + + if @query.contain?(@source.range_key) + { + hash_key: @source.hash_key, + range_key: @source.range_key + } + end + end + + # See if can use any local secondary index range key + # Chooses the first LSI found that can be utilized for the query + def match_local_secondary_index + return unless @query.contain_with_eq_operator?(@source.hash_key) + + lsi = @source.local_secondary_indexes.values.find do |i| + @query.contain?(i.range_key) + end + + if lsi.present? + { + hash_key: @source.hash_key, + range_key: lsi.range_key, + index_name: lsi.name, + } + end + end + + # See if can use any global secondary index + # Chooses the first GSI found that can be utilized for the query + # GSI with range key involved into query conditions has higher priority + # But only do so if projects ALL attributes otherwise we won't + # get back full data + def match_global_secondary_index_and_sort_key + gsi = @source.global_secondary_indexes.values.find do |i| + @query.contain_with_eq_operator?(i.hash_key) && i.projected_attributes == :all && + @query.contain?(i.range_key) + end + + if gsi.present? + { + hash_key: gsi.hash_key, + range_key: gsi.range_key, + index_name: gsi.name, + } + end + end + + def match_table + return unless @query.contain_with_eq_operator?(@source.hash_key) + + { + hash_key: @source.hash_key, + } + end + + def match_global_secondary_index + gsi = @source.global_secondary_indexes.values.find do |i| + @query.contain_with_eq_operator?(i.hash_key) && i.projected_attributes == :all + end + + if gsi.present? + { + hash_key: gsi.hash_key, + range_key: gsi.range_key, + index_name: gsi.name, + } + end + end + + def match_forced_index + idx = @source.find_index_by_name(@forced_index_name) + + { + hash_key: idx.hash_key, + range_key: idx.range_key, + index_name: idx.name, + } + end + end + end +end diff --git a/dynamoid/lib/dynamoid/criteria/nonexistent_fields_detector.rb b/dynamoid/lib/dynamoid/criteria/nonexistent_fields_detector.rb new file mode 100644 index 000000000..fbda05b7a --- /dev/null +++ b/dynamoid/lib/dynamoid/criteria/nonexistent_fields_detector.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +module Dynamoid + module Criteria + # @private + class NonexistentFieldsDetector + def initialize(conditions, source) + @conditions = conditions + @source = source + @nonexistent_fields = nonexistent_fields + end + + def found? + @nonexistent_fields.present? + end + + def warning_message + return unless found? + + fields_list = @nonexistent_fields.map { |s| "`#{s}`" }.join(', ') + count = @nonexistent_fields.size + + 'where conditions contain nonexistent ' \ + "field #{'name'.pluralize(count)} #{fields_list}" + end + + private + + def nonexistent_fields + fields_from_conditions - fields_existent + end + + def fields_from_conditions + @conditions.keys.map { |s| s.to_s.split('.')[0].to_sym } + end + + def fields_existent + @source.attributes.keys.map(&:to_sym) + end + end + end +end diff --git a/dynamoid/lib/dynamoid/criteria/where_conditions.rb b/dynamoid/lib/dynamoid/criteria/where_conditions.rb new file mode 100644 index 000000000..52baadd70 --- /dev/null +++ b/dynamoid/lib/dynamoid/criteria/where_conditions.rb @@ -0,0 +1,29 @@ +# frozen_string_literal: true + +module Dynamoid + module Criteria + # @private + class WhereConditions + def initialize + @conditions = [] + end + + def update(hash) + @conditions << hash.symbolize_keys + end + + def keys + @conditions.flat_map(&:keys) + end + + def empty? + @conditions.empty? + end + + def [](key) + hash = @conditions.find { |h| h.key?(key) } + hash[key] if hash + end + end + end +end diff --git a/dynamoid/lib/dynamoid/dirty.rb b/dynamoid/lib/dynamoid/dirty.rb new file mode 100644 index 000000000..5be07d819 --- /dev/null +++ b/dynamoid/lib/dynamoid/dirty.rb @@ -0,0 +1,271 @@ +# frozen_string_literal: true + +module Dynamoid + # Support interface of Rails' ActiveModel::Dirty module + # + # The reason why not just include ActiveModel::Dirty - + # ActiveModel::Dirty conflicts either with @attributes or + # #attributes in different Rails versions. + # + # Separate implementation (or copy-pasting) is the best way to + # avoid endless monkey-patching + # + # Documentation: + # https://api.rubyonrails.org/v4.2/classes/ActiveModel/Dirty.html + module Dirty + extend ActiveSupport::Concern + include ActiveModel::AttributeMethods + + included do + attribute_method_suffix '_changed?', '_change', '_will_change!', '_was' + attribute_method_suffix '_previously_changed?', '_previous_change' + attribute_method_affix prefix: 'restore_', suffix: '!' + end + + # @private + module ClassMethods + def update_fields(*) + super.tap do |model| + model.clear_changes_information if model + end + end + + def upsert(*) + super.tap do |model| + model.clear_changes_information if model + end + end + + def from_database(*) + super.tap(&:clear_changes_information) + end + end + + # @private + def save(*) + super.tap do |status| + changes_applied if status + end + end + + # @private + def save!(*) + super.tap do + changes_applied + end + end + + # @private + def update(*) + super.tap do + clear_changes_information + end + end + + # @private + def update!(*) + super.tap do + clear_changes_information + end + end + + # @private + def reload(*) + super.tap do + clear_changes_information + end + end + + # Returns +true+ if any attribute have unsaved changes, +false+ otherwise. + # + # person.changed? # => false + # person.name = 'Bob' + # person.changed? # => true + # + # @return [true|false] + def changed? + changed_attributes.present? + end + + # Returns an array with names of the attributes with unsaved changes. + # + # person = Person.new + # person.changed # => [] + # person.name = 'Bob' + # person.changed # => ["name"] + # + # @return [Array[String]] + def changed + changed_attributes.keys + end + + # Returns a hash of changed attributes indicating their original + # and new values like attr => [original value, new value]. + # + # person.changes # => {} + # person.name = 'Bob' + # person.changes # => { "name" => ["Bill", "Bob"] } + # + # @return [ActiveSupport::HashWithIndifferentAccess] + def changes + ActiveSupport::HashWithIndifferentAccess[changed.map { |name| [name, attribute_change(name)] }] + end + + # Returns a hash of attributes that were changed before the model was saved. + # + # person.name # => "Bob" + # person.name = 'Robert' + # person.save + # person.previous_changes # => {"name" => ["Bob", "Robert"]} + # + # @return [ActiveSupport::HashWithIndifferentAccess] + def previous_changes + @previously_changed ||= ActiveSupport::HashWithIndifferentAccess.new + end + + # Returns a hash of the attributes with unsaved changes indicating their original + # values like attr => original value. + # + # person.name # => "Bob" + # person.name = 'Robert' + # person.changed_attributes # => {"name" => "Bob"} + # + # @return [ActiveSupport::HashWithIndifferentAccess] + def changed_attributes + @changed_attributes ||= ActiveSupport::HashWithIndifferentAccess.new + end + + # Clear all dirty data: current changes and previous changes. + def clear_changes_information + @previously_changed = ActiveSupport::HashWithIndifferentAccess.new + @changed_attributes = ActiveSupport::HashWithIndifferentAccess.new + end + + # Clears dirty data and moves +changes+ to +previous_changes+. + def changes_applied + @previously_changed = changes + @changed_attributes = ActiveSupport::HashWithIndifferentAccess.new + end + + # Remove changes information for the provided attributes. + # + # @param attributes [Array[String]] - a list of attributes to clear changes for + def clear_attribute_changes(names) + attributes_changed_by_setter.except!(*names) + end + + # Handle *_changed? for +method_missing+. + # + # person.attribute_changed?(:name) # => true + # person.attribute_changed?(:name, from: 'Alice') + # person.attribute_changed?(:name, to: 'Bob') + # person.attribute_changed?(:name, from: 'Alice', to: 'Bod') + # + # @private + # @param name [Symbol] attribute name + # @param options [Hash] conditions on +from+ and +to+ value (optional) + # @option options [Symbol] :from previous attribute value + # @option options [Symbol] :to current attribute value + def attribute_changed?(name, options = {}) + result = changes_include?(name) + result &&= options[:to] == read_attribute(name) if options.key?(:to) + result &&= options[:from] == changed_attributes[name] if options.key?(:from) + result + end + + # Handle *_was for +method_missing+. + # + # person = Person.create(name: 'Alice') + # person.name = 'Bob' + # person.attribute_was(:name) # => "Alice" + # + # @private + # @param name [Symbol] attribute name + def attribute_was(name) + attribute_changed?(name) ? changed_attributes[name] : read_attribute(name) + end + + # Restore all previous data of the provided attributes. + # + # @param attributes [Array[Symbol]] a list of attribute names + def restore_attributes(names = changed) + names.each { |name| restore_attribute! name } + end + + # Handles *_previously_changed? for +method_missing+. + # + # person = Person.create(name: 'Alice') + # person.name = 'Bob' + # person.save + # person.attribute_changed?(:name) # => true + # + # @private + # @param name [Symbol] attribute name + # @return [true|false] + def attribute_previously_changed?(name) + previous_changes_include?(name) + end + + # Handles *_previous_change for +method_missing+. + # + # person = Person.create(name: 'Alice') + # person.name = 'Bob' + # person.save + # person.attribute_previously_changed(:name) # => ["Alice", "Bob"] + # + # @private + # @param name [Symbol] + # @return [Array] + def attribute_previous_change(name) + previous_changes[name] if attribute_previously_changed?(name) + end + + private + + def changes_include?(name) + attributes_changed_by_setter.include?(name) + end + alias attribute_changed_by_setter? changes_include? + + # Handle *_change for +method_missing+. + def attribute_change(name) + [changed_attributes[name], read_attribute(name)] if attribute_changed?(name) + end + + # Handle *_will_change! for +method_missing+. + def attribute_will_change!(name) + return if attribute_changed?(name) + + begin + value = read_attribute(name) + value = value.clone if value.duplicable? + rescue TypeError, NoMethodError + end + + set_attribute_was(name, value) + end + + # Handle restore_*! for +method_missing+. + def restore_attribute!(name) + if attribute_changed?(name) + write_attribute(name, changed_attributes[name]) + clear_attribute_changes([name]) + end + end + + # Returns +true+ if name were changed before the model was saved, + # +false+ otherwise. + def previous_changes_include?(name) + previous_changes.include?(name) + end + + # This is necessary because `changed_attributes` might be overridden in + # other implemntations (e.g. in `ActiveRecord`) + alias attributes_changed_by_setter changed_attributes + + # Force an attribute to have a particular "before" value + def set_attribute_was(name, old_value) + attributes_changed_by_setter[name] = old_value + end + end +end diff --git a/dynamoid/lib/dynamoid/document.rb b/dynamoid/lib/dynamoid/document.rb new file mode 100644 index 000000000..8591e7d0b --- /dev/null +++ b/dynamoid/lib/dynamoid/document.rb @@ -0,0 +1,339 @@ +# frozen_string_literal: true + +module Dynamoid + # This is the base module for all domain objects that need to be persisted to + # the database as documents. + module Document + extend ActiveSupport::Concern + include Dynamoid::Components + + included do + class_attribute :options, :read_only_attributes, :base_class, instance_accessor: false + self.options = {} + self.read_only_attributes = [] + self.base_class = self + + Dynamoid.included_models << self unless Dynamoid.included_models.include? self + end + + module ClassMethods + def attr_readonly(*read_only_attributes) + self.read_only_attributes.concat read_only_attributes.map(&:to_s) + end + + # Returns the read capacity for this table. + # + # @return [Integer] read capacity units + # @since 0.4.0 + def read_capacity + options[:read_capacity] || Dynamoid::Config.read_capacity + end + + # Returns the write_capacity for this table. + # + # @return [Integer] write capacity units + # @since 0.4.0 + def write_capacity + options[:write_capacity] || Dynamoid::Config.write_capacity + end + + # Returns the billing (capacity) mode for this table. + # + # Could be either +provisioned+ or +on_demand+. + # + # @return [Symbol] + def capacity_mode + options[:capacity_mode] || Dynamoid::Config.capacity_mode + end + + # Returns the field name used to support STI for this table. + # + # Default field name is +type+ but it can be overrided in the +table+ + # method call. + # + # User.inheritance_field # => :type + def inheritance_field + options[:inheritance_field] || :type + end + + # Returns the hash key field name for this class. + # + # By default +id+ field is used. But it can be overriden in the +table+ + # method call. + # + # User.hash_key # => :id + # + # @return [Symbol] a hash key name + # @since 0.4.0 + def hash_key + options[:key] || :id + end + + # Return the count of items for this class. + # + # It returns approximate value based on DynamoDB statistic. DynamoDB + # updates it periodically so the value can be no accurate. + # + # It's a reletively cheap operation and doesn't read all the items in a + # table. It makes just one HTTP request to DynamoDB. + # + # @return [Integer] items count in a table + # @since 0.6.1 + def count + Dynamoid.adapter.count(table_name) + end + + # Initialize a new object. + # + # User.build(name: 'A') + # + # Initialize an object and pass it into a block to set other attributes. + # + # User.build(name: 'A') do |u| + # u.age = 21 + # end + # + # The only difference between +build+ and +new+ methods is that +build+ + # supports STI (Single table inheritance) and looks at the inheritance + # field. So it can build a model of actual class. For instance: + # + # class Employee + # include Dynamoid::Document + # + # field :type + # field :name + # end + # + # class Manager < Employee + # end + # + # Employee.build(name: 'Alice', type: 'Manager') # => # + # + # @param attrs [Hash] Attributes with which to create the document + # @param block [Proc] Block to process a document after initialization + # @return [Dynamoid::Document] the new document + # @since 0.2.0 + def build(attrs = {}, &block) + choose_right_class(attrs).new(attrs, &block) + end + + # Does this model exist in a table? + # + # User.exists?('713') # => true + # + # If a range key is declared it should be specified in the following way: + # + # User.exists?([['713', 'range-key-value']]) # => true + # + # It's possible to check existence of several models at once: + # + # User.exists?(['713', '714', '715']) + # + # Or in case when a range key is declared: + # + # User.exists?( + # [ + # ['713', 'range-key-value-1'], + # ['714', 'range-key-value-2'], + # ['715', 'range-key-value-3'] + # ] + # ) + # + # It's also possible to specify models not with primary key but with + # conditions on the attributes (in the +where+ method style): + # + # User.exists?(age: 20, 'created_at.gt': Time.now - 1.day) + # + # @param id_or_conditions [String|Array[String]|Array[Array]|Hash] the primary id of the model, a list of primary ids or a hash with the options to filter from. + # @return [true|false] + # @since 0.2.0 + def exists?(id_or_conditions = {}) + case id_or_conditions + when Hash then where(id_or_conditions).count >= 1 + else + begin + find(id_or_conditions) + true + rescue Dynamoid::Errors::RecordNotFound + false + end + end + end + + attr_accessor :abstract_class + + def abstract_class? + defined?(@abstract_class) && @abstract_class == true + end + + def sti_name + name + end + + def sti_class_for(type_name) + type_name.constantize + rescue NameError + raise Errors::SubclassNotFound, "STI subclass does not found. Subclass: '#{type_name}'" + end + + # @private + def deep_subclasses + subclasses + subclasses.map(&:deep_subclasses).flatten + end + + # @private + def choose_right_class(attrs) + attrs[inheritance_field] ? sti_class_for(attrs[inheritance_field]) : self + end + end + + # Initialize a new object. + # + # User.new(name: 'A') + # + # Initialize an object and pass it into a block to set other attributes. + # + # User.new(name: 'A') do |u| + # u.age = 21 + # end + # + # @param attrs [Hash] Attributes with which to create the document + # @param block [Proc] Block to process a document after initialization + # @return [Dynamoid::Document] the new document + # + # @since 0.2.0 + def initialize(attrs = {}, &block) + run_callbacks :initialize do + @new_record = true + @attributes ||= {} + @associations ||= {} + @attributes_before_type_cast ||= {} + + attrs_with_defaults = self.class.attributes.each_with_object({}) do |(attribute, options), res| + if attrs.key?(attribute) + res[attribute] = attrs[attribute] + elsif options.key?(:default) + res[attribute] = evaluate_default_value(options[:default]) + end + end + + attrs_virtual = attrs.slice(*(attrs.keys - self.class.attributes.keys)) + + load(attrs_with_defaults.merge(attrs_virtual)) + + if block + yield(self) + end + end + end + + # Check equality of two models. + # + # A model is equal to another model only if their primary keys (hash key + # and optionally range key) are equal. + # + # @return [true|false] + # @since 0.2.0 + def ==(other) + if self.class.identity_map_on? + super + else + return false if other.nil? + + other.is_a?(Dynamoid::Document) && hash_key == other.hash_key && range_value == other.range_value + end + end + + # Check equality of two models. + # + # Works exactly like +==+ does. + # + # @return [true|false] + def eql?(other) + self == other + end + + # Generate an Integer hash value for this model. + # + # Hash value is based on primary key. So models can be used safely as a + # +Hash+ keys. + # + # @return [Integer] + def hash + [hash_key, range_value].hash + end + + # Return a model's hash key value. + # + # @since 0.4.0 + def hash_key + self[self.class.hash_key.to_sym] + end + + # Assign a model's hash key value, regardless of what it might be called to + # the object. + # + # @since 0.4.0 + def hash_key=(value) + self[self.class.hash_key.to_sym] = value + end + + # Return a model's range key value. + # + # Returns +nil+ if a range key isn't declared for a model. + def range_value + if self.class.range_key + self[self.class.range_key.to_sym] + end + end + + # Assign a model's range key value. + def range_value=(value) + if self.class.range_key + self[self.class.range_key.to_sym] = value + end + end + + def inspect + # attributes order is: + # - partition key + # - sort key + # - user defined attributes + # - timestamps - created_at/updated_at + names = [self.class.hash_key] + names << self.class.range_key if self.class.range_key + names += self.class.attributes.keys - names - %i[created_at updated_at] + names << :created_at if self.class.attributes.key?(:created_at) + names << :updated_at if self.class.attributes.key?(:updated_at) + + inspection = names.map do |name| + value = read_attribute(name) + "#{name}: #{value.inspect}" + end.join(', ') + + "#<#{self.class.name} #{inspection}>" + end + + private + + def dumped_range_value + Dumping.dump_field(range_value, self.class.attributes[self.class.range_key]) + end + + # Evaluates the default value given, this is used by undump + # when determining the value of the default given for a field options. + # + # @param val [Object] the attribute's default value + def evaluate_default_value(val) + if val.respond_to?(:call) + val.call + elsif val.duplicable? + val.dup + else + val + end + end + end +end + +ActiveSupport.run_load_hooks(:dynamoid, Dynamoid::Document) diff --git a/dynamoid/lib/dynamoid/dumping.rb b/dynamoid/lib/dynamoid/dumping.rb new file mode 100644 index 000000000..5965469bd --- /dev/null +++ b/dynamoid/lib/dynamoid/dumping.rb @@ -0,0 +1,314 @@ +# frozen_string_literal: true + +module Dynamoid + # @private + module Dumping + def self.dump_attributes(attributes, attributes_options) + {}.tap do |h| + attributes.each do |attribute, value| + h[attribute] = dump_field(value, attributes_options[attribute]) + end + end + end + + def self.dump_field(value, options) + return nil if value.nil? + + dumper = find_dumper(options) + + if dumper.nil? + raise ArgumentError, "Unknown type #{options[:type]}" + end + + dumper.process(value) + end + + def self.find_dumper(options) + dumper_class = case options[:type] + when :string then StringDumper + when :integer then IntegerDumper + when :number then NumberDumper + when :set then SetDumper + when :array then ArrayDumper + when :map then MapDumper + when :datetime then DateTimeDumper + when :date then DateDumper + when :serialized then SerializedDumper + when :raw then RawDumper + when :boolean then BooleanDumper + when :binary then BinaryDumper + when Class then CustomTypeDumper + end + + if dumper_class.present? + dumper_class.new(options) + end + end + + module DeepSanitizeHelper + extend self + + def deep_sanitize(value) + case value + when Hash + sanitize_hash(value).transform_values { |v| deep_sanitize(v) } + when Array + sanitize_array(value).map { |v| deep_sanitize(v) } + else + value + end + end + + private + + def sanitize_hash(hash) + hash.transform_values { |v| invalid_value?(v) ? nil : v } + end + + def sanitize_array(array) + array.map { |v| invalid_value?(v) ? nil : v } + end + + def invalid_value?(value) + (value.is_a?(Set) || value.is_a?(String)) && value.empty? + end + end + + class Base + def initialize(options) + @options = options + end + + def process(value) + value + end + end + + # string -> string + class StringDumper < Base + end + + # integer -> number + class IntegerDumper < Base + end + + # number -> number + class NumberDumper < Base + end + + # set -> set + class SetDumper < Base + ALLOWED_TYPES = %i[string integer number date datetime serialized].freeze + + def process(set) + if @options.key?(:of) + process_typed_collection(set) + else + set + end + end + + private + + def process_typed_collection(set) + if allowed_type? + dumper = Dumping.find_dumper(element_options) + result = set.map { |el| dumper.process(el) } + + if element_type == :string + result.reject!(&:empty?) + end + + result.to_set + else + raise ArgumentError, "Set element type #{element_type} isn't supported" + end + end + + def allowed_type? + ALLOWED_TYPES.include?(element_type) || element_type.is_a?(Class) + end + + def element_type + if @options[:of].is_a?(Hash) + @options[:of].keys.first + else + @options[:of] + end + end + + def element_options + if @options[:of].is_a?(Hash) + @options[:of][element_type].dup.tap do |options| + options[:type] = element_type + end + else + { type: element_type } + end + end + end + + # array -> array + class ArrayDumper < Base + ALLOWED_TYPES = %i[string integer number date datetime serialized].freeze + + def process(array) + if @options.key?(:of) + process_typed_collection(array) + else + array + end + end + + private + + def process_typed_collection(array) + if allowed_type? + dumper = Dumping.find_dumper(element_options) + result = array.map { |el| dumper.process(el) } + + if element_type == :string + result.reject!(&:empty?) + end + + result + else + raise ArgumentError, "Array element type #{element_type} isn't supported" + end + end + + def allowed_type? + ALLOWED_TYPES.include?(element_type) || element_type.is_a?(Class) + end + + def element_type + if @options[:of].is_a?(Hash) + @options[:of].keys.first + else + @options[:of] + end + end + + def element_options + if @options[:of].is_a?(Hash) + @options[:of][element_type].dup.tap do |options| + options[:type] = element_type + end + else + { type: element_type } + end + end + end + + # hash -> map + class MapDumper < Base + def process(value) + DeepSanitizeHelper.deep_sanitize(value) + end + end + + # datetime -> integer/string + class DateTimeDumper < Base + def process(value) + value.nil? ? nil : format_datetime(value, @options) + end + + private + + def format_datetime(value, options) + use_string_format = if options[:store_as_string].nil? + Dynamoid.config.store_datetime_as_string + else + options[:store_as_string] + end + + if use_string_format + value_in_time_zone = Dynamoid::DynamodbTimeZone.in_time_zone(value) + value_in_time_zone.iso8601 + else + unless value.respond_to?(:to_i) && value.respond_to?(:nsec) + value = value.to_time + end + BigDecimal(format('%d.%09d', value.to_i, value.nsec)) + end + end + end + + # date -> integer/string + class DateDumper < Base + def process(value) + value.nil? ? nil : format_date(value, @options) + end + + private + + def format_date(value, options) + use_string_format = if options[:store_as_string].nil? + Dynamoid.config.store_date_as_string + else + options[:store_as_string] + end + + if use_string_format + value.to_date.iso8601 + else + (value.to_date - Dynamoid::Persistence::UNIX_EPOCH_DATE).to_i + end + end + end + + # any standard Ruby object -> self + class RawDumper < Base + def process(value) + DeepSanitizeHelper.deep_sanitize(value) + end + end + + # object -> string + class SerializedDumper < Base + def process(value) + @options[:serializer] ? @options[:serializer].dump(value) : value.to_yaml + end + end + + # True/False -> True/False/string + class BooleanDumper < Base + def process(value) + unless value.nil? + store_as_boolean = if @options[:store_as_native_boolean].nil? + Dynamoid.config.store_boolean_as_native + else + @options[:store_as_native_boolean] + end + if store_as_boolean + !!value + else + value.to_s[0] # => "f" or "t" + end + end + end + end + + # string -> string + class BinaryDumper < Base + def process(value) + Base64.strict_encode64(value) + end + end + + # any object -> string + class CustomTypeDumper < Base + def process(value) + field_class = @options[:type] + + if value.respond_to?(:dynamoid_dump) + value.dynamoid_dump + elsif field_class.respond_to?(:dynamoid_dump) + field_class.dynamoid_dump(value) + else + raise ArgumentError, "Neither #{field_class} nor #{value} supports serialization for Dynamoid." + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/dynamodb_time_zone.rb b/dynamoid/lib/dynamoid/dynamodb_time_zone.rb new file mode 100644 index 000000000..689862dfc --- /dev/null +++ b/dynamoid/lib/dynamoid/dynamodb_time_zone.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +module Dynamoid + # @private + module DynamodbTimeZone + def self.in_time_zone(value) + case Dynamoid::Config.dynamodb_timezone + when :utc + value.utc.to_datetime + when :local + value.getlocal.to_datetime + else + value.in_time_zone(Dynamoid::Config.dynamodb_timezone).to_datetime + end + end + end +end diff --git a/dynamoid/lib/dynamoid/errors.rb b/dynamoid/lib/dynamoid/errors.rb new file mode 100644 index 000000000..6d65ecd6d --- /dev/null +++ b/dynamoid/lib/dynamoid/errors.rb @@ -0,0 +1,84 @@ +# frozen_string_literal: true + +module Dynamoid + # All the errors specific to Dynamoid. The goal is to mimic ActiveRecord. + module Errors + # Generic Dynamoid error + class Error < StandardError; end + + class MissingHashKey < Error; end + class MissingRangeKey < Error; end + + class MissingIndex < Error; end + + # InvalidIndex is raised when an invalid index is specified, for example if + # specified key attribute(s) or projected attributes do not exist. + class InvalidIndex < Error + def initialize(item) + if item.is_a? String + super(item) + else + super("Validation failed: #{item.errors.full_messages.join(', ')}") + end + end + end + + class RecordNotDestroyed < Error + attr_reader :record + + def initialize(record) + super('Failed to destroy item') + @record = record + end + end + + # This class is intended to be private to Dynamoid. + class ConditionalCheckFailedException < Error + attr_reader :inner_exception + + def initialize(inner) + super + @inner_exception = inner + end + end + + class RecordNotUnique < ConditionalCheckFailedException + attr_reader :original_exception + + def initialize(original_exception, record) + super("Attempted to write record #{record} when its key already exists") + @original_exception = original_exception + end + end + + class StaleObjectError < ConditionalCheckFailedException + attr_reader :record, :attempted_action + + def initialize(record, attempted_action) + super("Attempted to #{attempted_action} a stale object #{record}") + @record = record + @attempted_action = attempted_action + end + end + + class RecordNotFound < Error + end + + class DocumentNotValid < Error + attr_reader :document + + def initialize(document) + super("Validation failed: #{document.errors.full_messages.join(', ')}") + @document = document + end + end + + class InvalidQuery < Error; end + + class UnsupportedKeyType < Error; end + + class UnknownAttribute < Error; end + + class SubclassNotFound < Error; end + end +end diff --git a/dynamoid/lib/dynamoid/fields.rb b/dynamoid/lib/dynamoid/fields.rb new file mode 100644 index 000000000..4b09bace6 --- /dev/null +++ b/dynamoid/lib/dynamoid/fields.rb @@ -0,0 +1,384 @@ +# frozen_string_literal: true + +require 'dynamoid/fields/declare' + +module Dynamoid + # All fields on a Dynamoid::Document must be explicitly defined -- if you have fields in the database that are not + # specified with field, then they will be ignored. + module Fields + extend ActiveSupport::Concern + + # Initialize the attributes we know the class has, in addition to our magic attributes: id, created_at, and updated_at. + included do + class_attribute :attributes, instance_accessor: false + class_attribute :range_key + + self.attributes = {} + + # Timestamp fields could be disabled later in `table` method call. + # So let's declare them here and remove them later if it will be necessary + field :created_at, :datetime if Dynamoid::Config.timestamps + field :updated_at, :datetime if Dynamoid::Config.timestamps + + field :id # Default primary key + end + + module ClassMethods + # Specify a field for a document. + # + # class User + # include Dynamoid::Document + # + # field :last_name + # field :age, :integer + # field :last_sign_in, :datetime + # end + # + # Its type determines how it is coerced when read in and out of the + # data store. You can specify +string+, +integer+, +number+, +set+, +array+, + # +map+, +datetime+, +date+, +serialized+, +raw+, +boolean+ and +binary+ + # or specify a class that defines a serialization strategy. + # + # By default field type is +string+. + # + # Set can store elements of the same type only (it's a limitation of + # DynamoDB itself). If a set should store elements only of some particular + # type then +of+ option should be specified: + # + # field :hobbies, :set, of: :string + # + # Only +string+, +integer+, +number+, +date+, +datetime+ and +serialized+ + # element types are supported. + # + # Element type can have own options - they should be specified in the + # form of +Hash+: + # + # field :hobbies, :set, of: { serialized: { serializer: JSON } } + # + # Array can contain element of different types but if supports the same + # +of+ option to convert all the provided elements to the declared type. + # + # field :rates, :array, of: :number + # + # By default +date+ and +datetime+ fields are stored as integer values. + # The format can be changed to string with option +store_as_string+: + # + # field :published_on, :datetime, store_as_string: true + # + # Boolean field by default is stored as a string +t+ or +f+. But DynamoDB + # supports boolean type natively. In order to switch to the native + # boolean type an option +store_as_native_boolean+ should be specified: + # + # field :active, :boolean, store_as_native_boolean: true + # + # If you specify the +serialized+ type a value will be serialized to + # string in Yaml format by default. Custom way to serialize value to + # string can be specified with +serializer+ option. Custom serializer + # should have +dump+ and +load+ methods. + # + # If you specify a class for field type, Dynamoid will serialize using + # +dynamoid_dump+ method and load using +dynamoid_load+ method. + # + # Default field type is +string+. + # + # A field can have a default value. It's assigned at initializing a model + # if no value is specified: + # + # field :age, :integer, default: 1 + # + # If a defautl value should be recalculated every time it can be + # specified as a callable object (it should implement a +call+ method + # e.g. +Proc+ object): + # + # field :date_of_birth, :date, default: -> { Date.today } + # + # For every field Dynamoid creates several methods: + # + # * getter + # * setter + # * predicate +?+ to check whether a value set + # * +_before_type_cast?+ to get an original field value before it was type casted + # + # It works in the following way: + # + # class User + # include Dynamoid::Document + # + # field :age, :integer + # end + # + # user = User.new + # user.age # => nil + # user.age? # => false + # + # user.age = 20 + # user.age? # => true + # + # user.age = '21' + # user.age # => 21 - integer + # user.age_before_type_cast # => '21' - string + # + # There is also an option +alias+ which allows to use another name for a + # field: + # + # class User + # include Dynamoid::Document + # + # field :firstName, :string, alias: :first_name + # end + # + # user = User.new(firstName: 'Michael') + # user.firstName # Michael + # user.first_name # Michael + # + # @param name [Symbol] name of the field + # @param type [Symbol] type of the field (optional) + # @param options [Hash] any additional options for the field type (optional) + # + # @since 0.2.0 + def field(name, type = :string, options = {}) + if type == :float + Dynamoid.logger.warn("Field type :float, which you declared for '#{name}', is deprecated in favor of :number.") + type = :number + end + + Dynamoid::Fields::Declare.new(self, name, type, options).call + end + + # Declare a table range key. + # + # class User + # include Dynamoid::Document + # + # range :last_name + # end + # + # By default a range key is a string. In order to use any other type it + # should be specified as a second argument: + # + # range :age, :integer + # + # Type options can be specified as well: + # + # range :date_of_birth, :date, store_as_string: true + # + # @param name [Symbol] a range key attribute name + # @param type [Symbol] a range key type (optional) + # @param options [Hash] type options (optional) + def range(name, type = :string, options = {}) + field(name, type, options) + self.range_key = name + end + + # Set table level properties. + # + # There are some sensible defaults: + # + # * table name is based on a model class e.g. +users+ for +User+ class + # * hash key name - +id+ by default + # * hash key type - +string+ by default + # * generating timestamp fields +created_at+ and +updated_at+ + # * billing mode and read/write capacity units + # + # The +table+ method can be used to override the defaults: + # + # class User + # include Dynamoid::Document + # + # table name: :customers, key: :uuid + # end + # + # The hash key field is declared by default and a type is a string. If + # another type is needed the field should be declared explicitly: + # + # class User + # include Dynamoid::Document + # + # field :id, :integer + # end + # + # @param options [Hash] options to override default table settings + # @option options [Symbol] :name name of a table + # @option options [Symbol] :key name of a hash key attribute + # @option options [Symbol] :inheritance_field name of an attribute used for STI + # @option options [Symbol] :capacity_mode table billing mode - either +provisioned+ or +on_demand+ + # @option options [Integer] :write_capacity table write capacity units + # @option options [Integer] :read_capacity table read capacity units + # @option options [true|false] :timestamps whether generate +created_at+ and +updated_at+ fields or not + # @option options [Hash] :expires set up a table TTL and should have following structure +{ field: , after: }+ + # + # @since 0.4.0 + def table(options) + self.options = options + + # a default 'id' column is created when Dynamoid::Document is included + unless attributes.key? hash_key + remove_field :id + field(hash_key) + end + + # The created_at/updated_at fields are declared in the `included` callback first. + # At that moment the only known setting is `Dynamoid::Config.timestamps`. + # Now `options[:timestamps]` may override the global setting for a model. + # So we need to make decision again and declare the fields or rollback thier declaration. + # + # Do not replace with `#timestamps_enabled?`. + if options[:timestamps] && !Dynamoid::Config.timestamps + # The fields weren't declared in `included` callback because they are disabled globaly + field :created_at, :datetime + field :updated_at, :datetime + elsif options[:timestamps] == false && Dynamoid::Config.timestamps + # The fields were declared in `included` callback but they are disabled for a table + remove_field :created_at + remove_field :updated_at + end + end + + # Remove a field declaration + # + # Removes a field from the list of fields and removes all te generated + # for a field methods. + # + # @param field [Symbol] a field name + def remove_field(field) + field = field.to_sym + attributes.delete(field) || raise('No such field') + + # Dirty API + undefine_attribute_methods + define_attribute_methods attributes.keys + + generated_methods.module_eval do + remove_method field + remove_method :"#{field}=" + remove_method :"#{field}?" + remove_method :"#{field}_before_type_cast" + end + end + + # @private + def timestamps_enabled? + options[:timestamps] || (options[:timestamps].nil? && Dynamoid::Config.timestamps) + end + + # @private + def generated_methods + @generated_methods ||= Module.new.tap do |mod| + include(mod) + end + end + end + + # You can access the attributes of an object directly on its attributes method, which is by default an empty hash. + attr_accessor :attributes + alias raw_attributes attributes + + # Write an attribute on the object. + # + # user.age = 20 + # user.write_attribute(:age, 21) + # user.age # => 21 + # + # Also marks the previous value as dirty. + # + # @param name [Symbol] the name of the field + # @param value [Object] the value to assign to that field + # @return [Dynamoid::Document] self + # + # @since 0.2.0 + def write_attribute(name, value) + name = name.to_sym + old_value = read_attribute(name) + + unless attribute_is_present_on_model?(name) + raise Dynamoid::Errors::UnknownAttribute, "Attribute #{name} is not part of the model" + end + + if association = @associations[name] + association.reset + end + + @attributes_before_type_cast[name] = value + + value_casted = TypeCasting.cast_field(value, self.class.attributes[name]) + attribute_will_change!(name) if old_value != value_casted # Dirty API + + attributes[name] = value_casted + self + end + alias []= write_attribute + + # Read an attribute from an object. + # + # user.age = 20 + # user.read_attribute(:age) # => 20 + # + # @param name [Symbol] the name of the field + # @return attribute value + # @since 0.2.0 + def read_attribute(name) + attributes[name.to_sym] + end + alias [] read_attribute + + # Return attributes values before type casting. + # + # user = User.new + # user.age = '21' + # user.age # => 21 + # + # user.attributes_before_type_cast # => { age: '21' } + # + # @return [Hash] original attribute values + def attributes_before_type_cast + @attributes_before_type_cast + end + + # Return the value of the attribute identified by name before type casting. + # + # user = User.new + # user.age = '21' + # user.age # => 21 + # + # user.read_attribute_before_type_cast(:age) # => '21' + # + # @param name [Symbol] attribute name + # @return original attribute value + def read_attribute_before_type_cast(name) + return nil unless name.respond_to?(:to_sym) + + @attributes_before_type_cast[name.to_sym] + end + + private + + def set_expires_field + options = self.class.options[:expires] + + if options.present? + name = options[:field] + seconds = options[:after] + + if self[name].blank? + send(:"#{name}=", Time.now.to_i + seconds) + end + end + end + + def set_inheritance_field + # actually it does only following logic: + # self.type ||= self.class.sti_name if self.class.attributes[:type] + return if self.class.abstract_class? + + type = self.class.inheritance_field + if self.class.attributes[type] && send(type).nil? + send(:"#{type}=", self.class.sti_name) + end + end + + def attribute_is_present_on_model?(attribute_name) + setter = :"#{attribute_name}=" + respond_to?(setter) + end + end +end diff --git a/dynamoid/lib/dynamoid/fields/declare.rb b/dynamoid/lib/dynamoid/fields/declare.rb new file mode 100644 index 000000000..e8aaae42e --- /dev/null +++ b/dynamoid/lib/dynamoid/fields/declare.rb @@ -0,0 +1,86 @@ +# frozen_string_literal: true + +module Dynamoid + module Fields + # @private + class Declare + def initialize(source, name, type, options) + @source = source + @name = name.to_sym + @type = type + @options = options + end + + def call + # Register new field metadata + @source.attributes = @source.attributes.merge( + @name => { type: @type }.merge(@options) + ) + + # Should be called before `define_attribute_methods` method because it + # defines an attribute getter itself + warn_about_method_overriding + + # Dirty API + @source.define_attribute_method(@name) + + # Generate getters and setters as well as other helper methods + generate_instance_methods + + # If alias name specified - generate the same instance methods + if @options[:alias] + generate_instance_methods_for_alias + end + end + + private + + def warn_about_method_overriding + warn_if_method_exists(@name) + warn_if_method_exists("#{@name}=") + warn_if_method_exists("#{@name}?") + warn_if_method_exists("#{@name}_before_type_cast?") + end + + def generate_instance_methods + # only local variable is visible in `module_eval` block + name = @name + + @source.generated_methods.module_eval do + define_method(name) { read_attribute(name) } + define_method(:"#{name}?") do + value = read_attribute(name) + case value + when true then true + when false, nil then false + else + !value.nil? + end + end + define_method(:"#{name}=") { |value| write_attribute(name, value) } + define_method(:"#{name}_before_type_cast") { read_attribute_before_type_cast(name) } + end + end + + def generate_instance_methods_for_alias + # only local variable is visible in `module_eval` block + name = @name + + alias_name = @options[:alias].to_sym + + @source.generated_methods.module_eval do + alias_method alias_name, name + alias_method :"#{alias_name}=", :"#{name}=" + alias_method :"#{alias_name}?", :"#{name}?" + alias_method :"#{alias_name}_before_type_cast", :"#{name}_before_type_cast" + end + end + + def warn_if_method_exists(method) + if @source.instance_methods.include?(method.to_sym) + Dynamoid.logger.warn("Method #{method} generated for the field #{@name} overrides already existing method") + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/finders.rb b/dynamoid/lib/dynamoid/finders.rb new file mode 100644 index 000000000..ec75a8201 --- /dev/null +++ b/dynamoid/lib/dynamoid/finders.rb @@ -0,0 +1,313 @@ +# frozen_string_literal: true + +module Dynamoid + # This module defines the finder methods that hang off the document at the + # class level, like find, find_by_id, and the method_missing style finders. + module Finders + extend ActiveSupport::Concern + + module ClassMethods + # Find one or many objects, specified by one id or an array of ids. + # + # By default it raises +RecordNotFound+ exception if at least one model + # isn't found. This behavior can be changed with +raise_error+ option. If + # specified +raise_error: false+ option then +find+ will not raise the + # exception. + # + # When a document schema includes range key it always should be specified + # in +find+ method call. In case it's missing +MissingRangeKey+ exception + # will be raised. + # + # Please note that +find+ doesn't preserve order of models in result when + # passes multiple ids. + # + # Supported following options: + # * +consistent_read+ + # * +range_key+ + # * +raise_error+ + # + # @param ids [String|Array] hash key or an array of hash keys + # @param options [Hash] + # @return [Dynamoid::Document] one object or an array of objects, depending on whether the input was an array or not + # + # @example Find by partition key + # Document.find(101) + # + # @example Find by partition key and sort key + # Document.find(101, range_key: 'archived') + # + # @example Find several documents by partition key + # Document.find(101, 102, 103) + # Document.find([101, 102, 103]) + # + # @example Find several documents by partition key and sort key + # Document.find([[101, 'archived'], [102, 'new'], [103, 'deleted']]) + # + # @example Perform strong consistent reads + # Document.find(101, consistent_read: true) + # Document.find(101, 102, 103, consistent_read: true) + # Document.find(101, range_key: 'archived', consistent_read: true) + # + # @since 0.2.0 + def find(*ids, **options) + if ids.size == 1 && !ids[0].is_a?(Array) + _find_by_id(ids[0], options.reverse_merge(raise_error: true)) + else + _find_all(ids.flatten(1), options.reverse_merge(raise_error: true)) + end + end + + # Find several models at once. + # + # Returns objects found by the given array of ids, either hash keys, or + # hash/range key combinations using +BatchGetItem+. + # + # Returns empty array if no results found. + # + # Uses backoff specified by +Dynamoid::Config.backoff+ config option. + # + # @param ids [Array] array of primary keys + # @param options [Hash] + # @option options [true|false] :consistent_read + # @option options [true|false] :raise_error + # + # @example + # # Find all the user with hash key + # User.find_all(['1', '2', '3']) + # + # # Find all the tweets using hash key and range key with consistent read + # Tweet.find_all([['1', 'red'], ['1', 'green']], consistent_read: true) + def find_all(ids, options = {}) + ActiveSupport::Deprecation.warn('[Dynamoid] .find_all is deprecated! Call .find instead of') + + _find_all(ids, options) + end + + # Find one object directly by primary key. + # + # @param id [String] the id of the object to find + # @param options [Hash] + # @option options [true|false] :consistent_read + # @option options [true|false] :raise_error + # @option options [Scalar value] :range_key + # @return [Dynamoid::Document] the found object, or nil if nothing was found + # + # @example Find by partition key + # Document.find_by_id(101) + # + # @example Find by partition key and sort key + # Document.find_by_id(101, range_key: 'archived') + # + # @since 0.2.0 + def find_by_id(id, options = {}) + ActiveSupport::Deprecation.warn('[Dynamoid] .find_by_id is deprecated! Call .find instead of', caller[1..-1]) + + _find_by_id(id, options) + end + + # @private + def _find_all(ids, options = {}) + raise Errors::MissingRangeKey if range_key && ids.any? { |_pk, sk| sk.nil? } + + if range_key + ids = ids.map do |pk, sk| + sk_casted = TypeCasting.cast_field(sk, attributes[range_key]) + sk_dumped = Dumping.dump_field(sk_casted, attributes[range_key]) + + [pk, sk_dumped] + end + end + + read_options = options.slice(:consistent_read) + + items = if Dynamoid.config.backoff + items = [] + backoff = nil + Dynamoid.adapter.read(table_name, ids, read_options) do |hash, has_unprocessed_items| + items += hash[table_name] + + if has_unprocessed_items + backoff ||= Dynamoid.config.build_backoff + backoff.call + else + backoff = nil + end + end + items + else + items = Dynamoid.adapter.read(table_name, ids, read_options) + items ? items[table_name] : [] + end + + if items.size == ids.size || !options[:raise_error] + models = items ? items.map { |i| from_database(i) } : [] + models.each { |m| m.run_callbacks :find } + models + else + ids_list = range_key ? ids.map { |pk, sk| "(#{pk},#{sk})" } : ids.map(&:to_s) + message = "Couldn't find all #{name.pluralize} with primary keys [#{ids_list.join(', ')}] " + message += "(found #{items.size} results, but was looking for #{ids.size})" + raise Errors::RecordNotFound, message + end + end + + # @private + def _find_by_id(id, options = {}) + raise Errors::MissingRangeKey if range_key && options[:range_key].nil? + + if range_key + key = options[:range_key] + key_casted = TypeCasting.cast_field(key, attributes[range_key]) + key_dumped = Dumping.dump_field(key_casted, attributes[range_key]) + + options[:range_key] = key_dumped + end + + if item = Dynamoid.adapter.read(table_name, id, options.slice(:range_key, :consistent_read)) + model = from_database(item) + model.run_callbacks :find + model + elsif options[:raise_error] + primary_key = range_key ? "(#{id},#{options[:range_key]})" : id + message = "Couldn't find #{name} with primary key #{primary_key}" + raise Errors::RecordNotFound, message + end + end + + # Find one object directly by hash and range keys. + # + # @param hash_key [Scalar value] hash key of the object to find + # @param range_key [Scalar value] range key of the object to find + # + def find_by_composite_key(hash_key, range_key, options = {}) + ActiveSupport::Deprecation.warn('[Dynamoid] .find_by_composite_key is deprecated! Call .find instead of') + + _find_by_id(hash_key, options.merge(range_key: range_key)) + end + + # Find all objects by hash and range keys. + # + # @example find all ChamberTypes whose level is greater than 1 + # class ChamberType + # include Dynamoid::Document + # field :chamber_type, :string + # range :level, :integer + # table :key => :chamber_type + # end + # + # ChamberType.find_all_by_composite_key('DustVault', range_greater_than: 1) + # + # @param [String] hash_key of the objects to find + # @param [Hash] options the options for the range key + # @option options [Range] :range_value find the range key within this range + # @option options [Number] :range_greater_than find range keys greater than this + # @option options [Number] :range_less_than find range keys less than this + # @option options [Number] :range_gte find range keys greater than or equal to this + # @option options [Number] :range_lte find range keys less than or equal to this + # + # @return [Array] an array of all matching items + def find_all_by_composite_key(hash_key, options = {}) + ActiveSupport::Deprecation.warn('[Dynamoid] .find_all_composite_key is deprecated! Call .where instead of') + + Dynamoid.adapter.query(table_name, options.merge(hash_value: hash_key)).flat_map { |i| i }.collect do |item| + from_database(item) + end + end + + # Find all objects by using local secondary or global secondary index + # + # @example + # class User + # include Dynamoid::Document + # + # table :key => :email + # global_secondary_index hash_key: :age, range_key: :rank + # + # field :email, :string + # field :age, :integer + # field :gender, :string + # field :rank :number + # end + # + # # NOTE: the first param and the second param are both hashes, + # # so curly braces must be used on first hash param if sending both params + # User.find_all_by_secondary_index({ age: 5 }, range: { "rank.lte": 10 }) + # + # @param hash [Hash] conditions for the hash key e.g. +{ age: 5 }+ + # @param options [Hash] conditions on range key e.g. +{ "rank.lte": 10 }, query filter, projected keys, scan_index_forward etc. + # @return [Array] an array of all matching items + def find_all_by_secondary_index(hash, options = {}) + ActiveSupport::Deprecation.warn('[Dynamoid] .find_all_by_secondary_index is deprecated! Call .where instead of') + + range = options[:range] || {} + hash_key_field, hash_key_value = hash.first + range_key_field, range_key_value = range.first + + if range_key_field + range_key_field = range_key_field.to_s + range_key_op = 'eq' + if range_key_field.include?('.') + range_key_field, range_key_op = range_key_field.split('.', 2) + end + end + + # Find the index + index = find_index(hash_key_field, range_key_field) + raise Dynamoid::Errors::MissingIndex, "attempted to find #{[hash_key_field, range_key_field]}" if index.nil? + + # Query + query_key_conditions = {} + query_key_conditions[hash_key_field.to_sym] = [[:eq, hash_key_value]] + if range_key_field + query_key_conditions[range_key_field.to_sym] = [[range_key_op.to_sym, range_key_value]] + end + + query_non_key_conditions = options + .except(*Dynamoid::AdapterPlugin::AwsSdkV3::Query::OPTIONS_KEYS) + .except(:range) + .symbolize_keys + + query_options = options.slice(*Dynamoid::AdapterPlugin::AwsSdkV3::Query::OPTIONS_KEYS) + query_options[:index_name] = index.name + + Dynamoid.adapter.query(table_name, query_key_conditions, query_non_key_conditions, query_options) + .flat_map { |i| i } + .map { |item| from_database(item) } + end + + # Find using exciting method_missing finders attributes. Uses criteria + # chains under the hood to accomplish this neatness. + # + # @example find a user by a first name + # User.find_by_first_name('Josh') + # + # @example find all users by first and last name + # User.find_all_by_first_name_and_last_name('Josh', 'Symonds') + # + # @return [Dynamoid::Document|Array] the found object, or an array of found objects if all was somewhere in the method + # + # @private + # @since 0.2.0 + def method_missing(method, *args) + # Cannot use Symbol#start_with? because it was introduced in Ruby 2.7, but we support Ruby >= 2.3 + if method.to_s.start_with?('find') + ActiveSupport::Deprecation.warn("[Dynamoid] .#{method} is deprecated! Call .where instead of") + + finder = method.to_s.split('_by_').first + attributes = method.to_s.split('_by_').last.split('_and_') + + chain = Dynamoid::Criteria::Chain.new(self) + chain = chain.where({}.tap { |h| attributes.each_with_index { |attr, index| h[attr.to_sym] = args[index] } }) + + if finder.include?('all') + chain.all + else + chain.first + end + else + super + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/identity_map.rb b/dynamoid/lib/dynamoid/identity_map.rb new file mode 100644 index 000000000..9ec3b52da --- /dev/null +++ b/dynamoid/lib/dynamoid/identity_map.rb @@ -0,0 +1,91 @@ +# frozen_string_literal: true + +module Dynamoid + module IdentityMap + extend ActiveSupport::Concern + + def self.clear + Dynamoid.included_models.each { |m| m.identity_map.clear } + end + + module ClassMethods + def identity_map + @identity_map ||= {} + end + + # @private + def from_database(attrs = {}) + return super if identity_map_off? + + key = identity_map_key(attrs) + document = identity_map[key] + + if document.nil? + document = super + identity_map[key] = document + else + document.load(attrs) + end + + document + end + + # @private + def find_by_id(id, options = {}) + return super if identity_map_off? + + key = id.to_s + + if range_key = options[:range_key] + key += "::#{range_key}" + end + + identity_map[key] || super + end + + # @private + def identity_map_key(attrs) + key = attrs[hash_key].to_s + key += "::#{attrs[range_key]}" if range_key + key + end + + def identity_map_on? + Dynamoid::Config.identity_map + end + + def identity_map_off? + !identity_map_on? + end + end + + def identity_map + self.class.identity_map + end + + # @private + def save(*args) + return super if self.class.identity_map_off? + + if result = super + identity_map[identity_map_key] = self + end + result + end + + # @private + def delete + return super if self.class.identity_map_off? + + identity_map.delete(identity_map_key) + super + end + + # @private + def identity_map_key + key = hash_key.to_s + key += "::#{range_value}" if self.class.range_key + key + end + end +end diff --git a/dynamoid/lib/dynamoid/indexes.rb b/dynamoid/lib/dynamoid/indexes.rb new file mode 100644 index 000000000..7155a0aa2 --- /dev/null +++ b/dynamoid/lib/dynamoid/indexes.rb @@ -0,0 +1,332 @@ +# frozen_string_literal: true + +module Dynamoid + module Indexes + extend ActiveSupport::Concern + + # @private + # @see https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/HowItWorks.CoreComponents.html#HowItWorks.CoreComponents.PrimaryKey + # Types allowed in indexes + PERMITTED_KEY_DYNAMODB_TYPES = %i[ + string + binary + number + ].freeze + + included do + class_attribute :local_secondary_indexes, instance_accessor: false + class_attribute :global_secondary_indexes, instance_accessor: false + self.local_secondary_indexes = {} + self.global_secondary_indexes = {} + end + + module ClassMethods + # Defines a Global Secondary index on a table. Keys can be specified as + # hash-only, or hash & range. + # + # class Post + # include Dynamoid::Document + # + # field :category + # + # global_secondary_index hash_key: :category + # end + # + # The full example with all the options being specified: + # + # global_secondary_index hash_key: :category, + # range_key: :created_at, + # name: 'posts_category_created_at_index', + # projected_attributes: :all, + # read_capacity: 100, + # write_capacity: 20 + # + # Global secondary index should be declared after fields for mentioned + # hash key and optional range key are declared (with method +field+) + # + # The only mandatory option is +hash_key+. Raises + # +Dynamoid::Errors::InvalidIndex+ exception if passed incorrect + # options. + # + # @param [Hash] options the options to pass for this table + # @option options [Symbol] name the name for the index; this still gets + # namespaced. If not specified, will use a default name. + # @option options [Symbol] hash_key the index hash key column. + # @option options [Symbol] range_key the index range key column (if + # applicable). + # @option options [Symbol, Array] projected_attributes table + # attributes to project for this index. Can be +:keys_only+, +:all+ + # or an array of included fields. If not specified, defaults to + # +:keys_only+. + # @option options [Integer] read_capacity set the read capacity for the + # index; does not work on existing indexes. + # @option options [Integer] write_capacity set the write capacity for + # the index; does not work on existing indexes. + def global_secondary_index(options = {}) + unless options.present? + raise Dynamoid::Errors::InvalidIndex, 'empty index definition' + end + + unless options[:hash_key].present? + raise Dynamoid::Errors::InvalidIndex, 'A global secondary index requires a :hash_key to be specified' + end + + index_opts = { + read_capacity: Dynamoid::Config.read_capacity, + write_capacity: Dynamoid::Config.write_capacity + }.merge(options) + + index_opts[:dynamoid_class] = self + index_opts[:type] = :global_secondary + + index = Dynamoid::Indexes::Index.new(index_opts) + gsi_key = index_key(options[:hash_key], options[:range_key]) + global_secondary_indexes[gsi_key] = index + self + end + + # Defines a local secondary index on a table. Will use the same primary + # hash key as the table. + # + # class Comment + # include Dynamoid::Document + # + # table hash_key: :post_id + # range :created_at, :datetime + # field :author_id + # + # local_secondary_index range_key: :author_id + # end + # + # The full example with all the options being specified: + # + # local_secondary_index range_key: :created_at, + # name: 'posts_created_at_index', + # projected_attributes: :all + # + # Local secondary index should be declared after fields for mentioned + # hash key and optional range key are declared (with method +field+) as + # well as after +table+ method call. + # + # The only mandatory option is +range_key+. Raises + # +Dynamoid::Errors::InvalidIndex+ exception if passed incorrect + # options. + # + # @param [Hash] options options to pass for this index. + # @option options [Symbol] name the name for the index; this still gets + # namespaced. If not specified, a name is automatically generated. + # @option options [Symbol] range_key the range key column for the index. + # @option options [Symbol, Array] projected_attributes table + # attributes to project for this index. Can be +:keys_only+, +:all+ + # or an array of included fields. If not specified, defaults to + # +:keys_only+. + def local_secondary_index(options = {}) + unless options.present? + raise Dynamoid::Errors::InvalidIndex, 'empty index definition' + end + + primary_hash_key = hash_key + primary_range_key = range_key + index_range_key = options[:range_key] + + unless index_range_key.present? + raise Dynamoid::Errors::InvalidIndex, 'A local secondary index ' \ + 'requires a :range_key to be specified' + end + + if primary_range_key.present? && index_range_key == primary_range_key + raise Dynamoid::Errors::InvalidIndex, 'A local secondary index ' \ + 'must use a different :range_key than the primary key' + end + + index_opts = options.merge( + dynamoid_class: self, + type: :local_secondary, + hash_key: primary_hash_key + ) + + index = Dynamoid::Indexes::Index.new(index_opts) + key = index_key(primary_hash_key, index_range_key) + local_secondary_indexes[key] = index + self + end + + # Returns an index by its hash key and optional range key. + # + # It works only for indexes without explicit name declared. + # + # @param hash [scalar] the hash key used to declare an index + # @param range [scalar] the range key used to declare an index (optional) + # @return [Dynamoid::Indexes::Index, nil] index object or nil if it isn't found + def find_index(hash, range = nil) + indexes[index_key(hash, range)] + end + + # Returns an index by its name + # + # @param name [string, symbol] the name of the index to lookup + # @return [Dynamoid::Indexes::Index, nil] index object or nil if it isn't found + def find_index_by_name(name) + string_name = name.to_s + indexes.each_value.detect { |i| i.name.to_s == string_name } + end + + # Returns true iff the provided hash[,range] key combo is a local + # secondary index. + # + # @param [Symbol] hash hash key name. + # @param [Symbol] range range key name. + # @return [Boolean] true iff provided keys correspond to a local + # secondary index. + def is_local_secondary_index?(hash, range = nil) + local_secondary_indexes[index_key(hash, range)].present? + end + + # Returns true iff the provided hash[,range] key combo is a global + # secondary index. + # + # @param [Symbol] hash hash key name. + # @param [Symbol] range range key name. + # @return [Boolean] true iff provided keys correspond to a global + # secondary index. + def is_global_secondary_index?(hash, range = nil) + global_secondary_indexes[index_key(hash, range)].present? + end + + # Generates a convenient lookup key name for a hash/range index. + # Should normally not be used directly. + # + # @param [Symbol] hash hash key name. + # @param [Symbol] range range key name. + # @return [String] returns "hash" if hash only, "hash_range" otherwise. + def index_key(hash, range = nil) + name = hash.to_s + name += "_#{range}" if range.present? + name + end + + # Generates a default index name. + # + # @param [Symbol] hash hash key name. + # @param [Symbol] range range key name. + # @return [String] index name of the form "table_name_index_index_key". + def index_name(hash, range = nil) + "#{table_name}_index_#{index_key(hash, range)}" + end + + # Convenience method to return all indexes on the table. + # + # @return [Hash] the combined hash of global and local + # secondary indexes. + def indexes + local_secondary_indexes.merge(global_secondary_indexes) + end + + # Returns an array of hash keys for all the declared Glocal Secondary + # Indexes. + # + # @return [Array[String]] array of hash keys + def indexed_hash_keys + global_secondary_indexes.map do |_name, index| + index.hash_key.to_s + end + end + end + + # Represents the attributes of a DynamoDB index. + class Index + include ActiveModel::Validations + + PROJECTION_TYPES = %i[keys_only all].to_set + DEFAULT_PROJECTION_TYPE = :keys_only + + attr_accessor :name, :dynamoid_class, :type, :hash_key, :range_key, + :hash_key_schema, :range_key_schema, :projected_attributes, + :read_capacity, :write_capacity + + validate do + validate_index_type + validate_hash_key + validate_range_key + validate_projected_attributes + end + + def initialize(attrs = {}) + unless attrs[:dynamoid_class].present? + raise Dynamoid::Errors::InvalidIndex, ':dynamoid_class is required' + end + + @dynamoid_class = attrs[:dynamoid_class] + @type = attrs[:type] + @hash_key = attrs[:hash_key] + @range_key = attrs[:range_key] + @name = attrs[:name] || @dynamoid_class.index_name(@hash_key, @range_key) + @projected_attributes = + attrs[:projected_attributes] || DEFAULT_PROJECTION_TYPE + @read_capacity = attrs[:read_capacity] + @write_capacity = attrs[:write_capacity] + + raise Dynamoid::Errors::InvalidIndex, self unless valid? + end + + # Convenience method to determine the projection type for an index. + # Projection types are: :keys_only, :all, :include. + # + # @return [Symbol] the projection type. + def projection_type + if @projected_attributes.is_a? Array + :include + else + @projected_attributes + end + end + + private + + def validate_projected_attributes + unless @projected_attributes.is_a?(Array) || + PROJECTION_TYPES.include?(@projected_attributes) + errors.add(:projected_attributes, 'Invalid projected attributes specified.') + end + end + + def validate_index_type + unless @type.present? && + %i[local_secondary global_secondary].include?(@type) + errors.add(:type, 'Invalid index :type specified') + end + end + + def validate_hash_key + validate_index_key(:hash_key, @hash_key) + end + + def validate_range_key + validate_index_key(:range_key, @range_key) + end + + def validate_index_key(key_param, key_val) + return if key_val.blank? + + key_field_attributes = @dynamoid_class.attributes[key_val] + if key_field_attributes.blank? + errors.add(key_param, "No such field #{key_val} defined on table") + return + end + + key_dynamodb_type = dynamodb_type(key_field_attributes[:type], key_field_attributes) + if PERMITTED_KEY_DYNAMODB_TYPES.include?(key_dynamodb_type) + send(:"#{key_param}_schema=", { key_val => key_dynamodb_type }) + else + errors.add(key_param, "Index :#{key_param} is not a valid key type") + end + end + + def dynamodb_type(field_type, options) + PrimaryKeyTypeMapping.dynamodb_type(field_type, options) + rescue Errors::UnsupportedKeyType + field_type + end + end + end +end diff --git a/dynamoid/lib/dynamoid/loadable.rb b/dynamoid/lib/dynamoid/loadable.rb new file mode 100644 index 000000000..2fbe285a9 --- /dev/null +++ b/dynamoid/lib/dynamoid/loadable.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +module Dynamoid + module Loadable + extend ActiveSupport::Concern + + def load(attrs) + attrs.each do |key, value| + send(:"#{key}=", value) if respond_to?(:"#{key}=") + end + + self + end + + # Reload an object from the database -- if you suspect the object has changed in the data store and you need those + # changes to be reflected immediately, you would call this method. This is a consistent read. + # + # @return [Dynamoid::Document] self + # + # @since 0.2.0 + def reload + options = { consistent_read: true } + + if self.class.range_key + options[:range_key] = range_value + end + + self.attributes = self.class.find(hash_key, **options).attributes + + @associations.each_value(&:reset) + @new_record = false + + self + end + end +end diff --git a/dynamoid/lib/dynamoid/log/formatter.rb b/dynamoid/lib/dynamoid/log/formatter.rb new file mode 100644 index 000000000..3b7f284e8 --- /dev/null +++ b/dynamoid/lib/dynamoid/log/formatter.rb @@ -0,0 +1,41 @@ +# frozen_string_literal: true + +module Dynamoid + module Log + # https://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/Log/Formatter.html + # https://docs.aws.amazon.com/sdk-for-ruby/v2/api/Seahorse/Client/Response.html + # https://aws.amazon.com/ru/blogs/developer/logging-requests/ + module Formatter + class Debug + def format(response) + bold = "\x1b[1m" + color = "\x1b[34m" + reset = "\x1b[0m" + + [ + response.context.operation.name, + "#{bold}#{color}\nRequest:\n#{reset}#{bold}", + JSON.pretty_generate(JSON.parse(response.context.http_request.body.string)), + "#{bold}#{color}\nResponse:\n#{reset}#{bold}", + JSON.pretty_generate(JSON.parse(response.context.http_response.body.string)), + reset + ].join("\n") + end + end + + class Compact + def format(response) + bold = "\x1b[1m" + reset = "\x1b[0m" + + [ + response.context.operation.name, + bold, + response.context.http_request.body.string, + reset + ].join(' ') + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/middleware/identity_map.rb b/dynamoid/lib/dynamoid/middleware/identity_map.rb new file mode 100644 index 000000000..4638a382e --- /dev/null +++ b/dynamoid/lib/dynamoid/middleware/identity_map.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +module Dynamoid + # @private + module Middleware + class IdentityMap + def initialize(app) + @app = app + end + + def call(env) + Dynamoid::IdentityMap.clear + @app.call(env) + ensure + Dynamoid::IdentityMap.clear + end + end + end +end diff --git a/dynamoid/lib/dynamoid/persistence.rb b/dynamoid/lib/dynamoid/persistence.rb new file mode 100644 index 000000000..6d5d9c934 --- /dev/null +++ b/dynamoid/lib/dynamoid/persistence.rb @@ -0,0 +1,937 @@ +# frozen_string_literal: true + +require 'bigdecimal' +require 'securerandom' +require 'yaml' + +require 'dynamoid/persistence/import' +require 'dynamoid/persistence/update_fields' +require 'dynamoid/persistence/upsert' +require 'dynamoid/persistence/save' +require 'dynamoid/persistence/inc' +require 'dynamoid/persistence/update_validations' + +# encoding: utf-8 +module Dynamoid + # Persistence is responsible for dumping objects to and marshalling objects from the data store. It tries to reserialize + # values to be of the same type as when they were passed in, based on the fields in the class. + module Persistence + extend ActiveSupport::Concern + + attr_accessor :new_record, :destroyed + alias new_record? new_record + alias destroyed? destroyed + + # @private + UNIX_EPOCH_DATE = Date.new(1970, 1, 1).freeze + + module ClassMethods + def table_name + table_base_name = options[:name] || base_class.name.split('::').last.downcase.pluralize + + @table_name ||= [Dynamoid::Config.namespace.to_s, table_base_name].reject(&:empty?).join('_') + end + + # Create a table. + # + # Uses a configuration specified in a model class (with the +table+ + # method) e.g. table name, schema (hash and range keys), global and local + # secondary indexes, billing mode and write/read capacity. + # + # For instance here + # + # class User + # include Dynamoid::Document + # + # table key: :uuid + # range :last_name + # + # field :first_name + # field :last_name + # end + # + # User.create_table + # + # +create_table+ method call will create a table +dynamoid_users+ with + # hash key +uuid+ and range key +name+, DynamoDB default billing mode and + # Dynamoid default read/write capacity units (100/20). + # + # All the configuration can be overridden with +options+ argument. + # + # User.create_table(table_name: 'users', read_capacity: 200, write_capacity: 40) + # + # Dynamoid creates a table synchronously by default. DynamoDB table + # creation is an asynchronous operation and a client should wait until a + # table status changes to +ACTIVE+ and a table becomes available. That's + # why Dynamoid is polling a table status and returns results only when a + # table becomes available. + # + # Polling is configured with +Dynamoid::Config.sync_retry_max_times+ and + # +Dynamoid::Config.sync_retry_wait_seconds+ configuration options. If + # table creation takes more time than configured waiting time then + # Dynamoid stops polling and returns +true+. + # + # In order to return back asynchronous behaviour and not to wait until a + # table is created the +sync: false+ option should be specified. + # + # User.create_table(sync: false) + # + # Subsequent method calls for the same table will be ignored. + # + # @param options [Hash] + # + # @option options [Symbol] :table_name name of the table + # @option options [Symbol] :id hash key name of the table + # @option options [Symbol] :hash_key_type Dynamoid type of the hash key - +:string+, +:integer+ or any other scalar type + # @option options [Hash] :range_key a Hash with range key name and type in format +{ => }+ e.g. +{ last_name: :string }+ + # @option options [String] :billing_mode billing mode of a table - either +PROVISIONED+ (default) or +PAY_PER_REQUEST+ (for On-Demand Mode) + # @option options [Integer] :read_capacity read capacity units for the table; does not work on existing tables and is ignored when billing mode is +PAY_PER_REQUEST+ + # @option options [Integer] :write_capacity write capacity units for the table; does not work on existing tables and is ignored when billing mode is +PAY_PER_REQUEST+ + # @option options [Hash] :local_secondary_indexes + # @option options [Hash] :global_secondary_indexes + # @option options [true|false] :sync specifies should the method call be synchronous and wait until a table is completely created + # + # @return [true|false] Whether a table created successfully + # @since 0.4.0 + def create_table(options = {}) + range_key_hash = if range_key + { range_key => PrimaryKeyTypeMapping.dynamodb_type(attributes[range_key][:type], attributes[range_key]) } + end + + options = { + id: hash_key, + table_name: table_name, + billing_mode: capacity_mode, + write_capacity: write_capacity, + read_capacity: read_capacity, + range_key: range_key_hash, + hash_key_type: PrimaryKeyTypeMapping.dynamodb_type(attributes[hash_key][:type], attributes[hash_key]), + local_secondary_indexes: local_secondary_indexes.values, + global_secondary_indexes: global_secondary_indexes.values + }.merge(options) + + created_successfuly = Dynamoid.adapter.create_table(options[:table_name], options[:id], options) + + if created_successfuly && self.options[:expires] + attribute = self.options[:expires][:field] + Dynamoid.adapter.update_time_to_live(options[:table_name], attribute) + end + + self + end + + # Deletes the table for the model. + # + # Dynamoid deletes a table asynchronously and doesn't wait until a table + # is deleted completely. + # + # Subsequent method calls for the same table will be ignored. + # @return [Model class] self + def delete_table + Dynamoid.adapter.delete_table(table_name) + self + end + + # @private + def from_database(attrs = {}) + klass = choose_right_class(attrs) + attrs_undumped = Undumping.undump_attributes(attrs, klass.attributes) + klass.new(attrs_undumped).tap { |r| r.new_record = false } + end + + # Create several models at once. + # + # users = User.import([{ name: 'a' }, { name: 'b' }]) + # + # +import+ is a relatively low-level method and bypasses some + # mechanisms like callbacks and validation. + # + # It sets timestamp fields +created_at+ and +updated_at+ if they are + # blank. It sets a hash key field as well if it's blank. It expects that + # the hash key field is +string+ and sets a random UUID value if the field + # value is blank. All the field values are type casted to the declared + # types. + # + # It works efficiently and uses the `BatchWriteItem` operation. In order + # to cope with throttling it uses a backoff strategy if it's specified with + # `Dynamoid::Config.backoff` configuration option. + # + # Because of the nature of DynamoDB and its limits only 25 models can be + # saved at once. So multiple HTTP requests can be sent to DynamoDB. + # + # @param array_of_attributes [Array] + # @return [Array] Created models + def import(array_of_attributes) + Import.call(self, array_of_attributes) + end + + # Create a model. + # + # Initializes a new model and immediately saves it to DynamoDB. + # + # User.create(first_name: 'Mark', last_name: 'Tyler') + # + # Accepts both Hash and Array of Hashes and can create several models. + # + # User.create([{ first_name: 'Alice' }, { first_name: 'Bob' }]) + # + # Creates a model and pass it into a block to set other attributes. + # + # User.create(first_name: 'Mark') do |u| + # u.age = 21 + # end + # + # Validates model and runs callbacks. + # + # @param attrs [Hash|Array[Hash]] Attributes of the models + # @param block [Proc] Block to process a document after initialization + # @return [Dynamoid::Document] The created document + # @since 0.2.0 + def create(attrs = {}, &block) + if attrs.is_a?(Array) + attrs.map { |attr| create(attr, &block) } + else + build(attrs, &block).tap(&:save) + end + end + + # Create a model. + # + # Initializes a new object and immediately saves it to the Dynamoid. + # Raises an exception +Dynamoid::Errors::DocumentNotValid+ if validation + # failed. Accepts both Hash and Array of Hashes and can create several + # models. + # + # @param attrs [Hash|Array[Hash]] Attributes with which to create the object. + # @param block [Proc] Block to process a document after initialization + # @return [Dynamoid::Document] The created document + # @since 0.2.0 + def create!(attrs = {}, &block) + if attrs.is_a?(Array) + attrs.map { |attr| create!(attr, &block) } + else + build(attrs, &block).tap(&:save!) + end + end + + # Update document with provided attributes. + # + # Instantiates document and saves changes. Runs validations and + # callbacks. Don't save changes if validation fails. + # + # User.update('1', age: 26) + # + # If range key is declared for a model it should be passed as well: + # + # User.update('1', 'Tylor', age: 26) + # + # @param hash_key [Scalar value] hash key + # @param range_key_value [Scalar value] range key (optional) + # @param attrs [Hash] + # @return [Dynamoid::Document] Updated document + def update(hash_key, range_key_value = nil, attrs) + model = find(hash_key, range_key: range_key_value, consistent_read: true) + model.update_attributes(attrs) + model + end + + # Update document with provided attributes. + # + # Instantiates document and saves changes. Runs validations and + # callbacks. + # + # User.update!('1', age: 26) + # + # If range key is declared for a model it should be passed as well: + # + # User.update('1', 'Tylor', age: 26) + # + # Raises +Dynamoid::Errors::DocumentNotValid+ exception if validation fails. + # + # @param hash_key [Scalar value] hash key + # @param range_key_value [Scalar value] range key (optional) + # @param attrs [Hash] + # @return [Dynamoid::Document] Updated document + def update!(hash_key, range_key_value = nil, attrs) + model = find(hash_key, range_key: range_key_value, consistent_read: true) + model.update_attributes!(attrs) + model + end + + # Update document. + # + # Doesn't run validations and callbacks. + # + # User.update_fields('1', age: 26) + # + # If range key is declared for a model it should be passed as well: + # + # User.update_fields('1', 'Tylor', age: 26) + # + # Can make a conditional update so a document will be updated only if it + # meets the specified conditions. Conditions can be specified as a +Hash+ + # with +:if+ key: + # + # User.update_fields('1', { age: 26 }, { if: { version: 1 } }) + # + # Here +User+ model has an integer +version+ field and the document will + # be updated only if the +version+ attribute currently has value 1. + # + # If a document with specified hash and range keys doesn't exist or + # conditions were specified and failed the method call returns +nil+. + # + # To check if some attribute (or attributes) isn't stored in a DynamoDB + # item (e.g. it wasn't set explicitly) there is another condition - + # +unless_exists+: + # + # user = User.create(name: 'Tylor') + # User.update_fields(user.id, { age: 18 }, { unless_exists: [:age] }) + # + # +update_fields+ uses the +UpdateItem+ operation so it saves changes and + # loads an updated document back with one HTTP request. + # + # Raises a +Dynamoid::Errors::UnknownAttribute+ exception if any of the + # attributes is not on the model + # + # @param hash_key_value [Scalar value] hash key + # @param range_key_value [Scalar value] range key (optional) + # @param attrs [Hash] + # @param conditions [Hash] (optional) + # @return [Dynamoid::Document|nil] Updated document + def update_fields(hash_key_value, range_key_value = nil, attrs = {}, conditions = {}) + optional_params = [range_key_value, attrs, conditions].compact + if optional_params.first.is_a?(Hash) + range_key_value = nil + attrs, conditions = optional_params[0..1] + else + range_key_value = optional_params.first + attrs, conditions = optional_params[1..2] + end + + UpdateFields.call(self, + partition_key: hash_key_value, + sort_key: range_key_value, + attributes: attrs, + conditions: conditions) + end + + # Update an existing document or create a new one. + # + # If a document with specified hash and range keys doesn't exist it + # creates a new document with specified attributes. Doesn't run + # validations and callbacks. + # + # User.upsert('1', age: 26) + # + # If range key is declared for a model it should be passed as well: + # + # User.upsert('1', 'Tylor', age: 26) + # + # Can make a conditional update so a document will be updated only if it + # meets the specified conditions. Conditions can be specified as a +Hash+ + # with +:if+ key: + # + # User.upsert('1', { age: 26 }, { if: { version: 1 } }) + # + # Here +User+ model has an integer +version+ field and the document will + # be updated only if the +version+ attribute currently has value 1. + # + # To check if some attribute (or attributes) isn't stored in a DynamoDB + # item (e.g. it wasn't set explicitly) there is another condition - + # +unless_exists+: + # + # user = User.create(name: 'Tylor') + # User.upsert(user.id, { age: 18 }, { unless_exists: [:age] }) + # + # If conditions were specified and failed the method call returns +nil+. + # + # +upsert+ uses the +UpdateItem+ operation so it saves changes and loads + # an updated document back with one HTTP request. + # + # Raises a +Dynamoid::Errors::UnknownAttribute+ exception if any of the + # attributes is not on the model + # + # @param hash_key_value [Scalar value] hash key + # @param range_key_value [Scalar value] range key (optional) + # @param attrs [Hash] + # @param conditions [Hash] (optional) + # @return [Dynamoid::Document|nil] Updated document + def upsert(hash_key_value, range_key_value = nil, attrs = {}, conditions = {}) + optional_params = [range_key_value, attrs, conditions].compact + if optional_params.first.is_a?(Hash) + range_key_value = nil + attrs, conditions = optional_params[0..1] + else + range_key_value = optional_params.first + attrs, conditions = optional_params[1..2] + end + + Upsert.call(self, + partition_key: hash_key_value, + sort_key: range_key_value, + attributes: attrs, + conditions: conditions) + end + + # Increase a numeric field by specified value. + # + # User.inc('1', age: 2) + # + # Can update several fields at once. + # + # User.inc('1', age: 2, version: 1) + # + # If range key is declared for a model it should be passed as well: + # + # User.inc('1', 'Tylor', age: 2) + # + # It's an atomic operation it does not interfere with other write + # requests. + # + # Uses efficient low-level +UpdateItem+ operation and does only one HTTP + # request. + # + # Doesn't run validations and callbacks. Doesn't update +created_at+ and + # +updated_at+ as well. + # + # When `:touch` option is passed the timestamp columns are updating. If + # attribute names are passed, they are updated along with updated_at + # attribute: + # + # User.inc('1', age: 2, touch: true) + # User.inc('1', age: 2, touch: :viewed_at) + # User.inc('1', age: 2, touch: [:viewed_at, :accessed_at]) + # + # @param hash_key_value [Scalar value] hash key + # @param range_key_value [Scalar value] range key (optional) + # @param counters [Hash] value to increase by + # @option counters [true | Symbol | Array[Symbol]] :touch to update update_at attribute and optionally the specified ones + # @return [Model class] self + def inc(hash_key_value, range_key_value = nil, counters) + Inc.call(self, hash_key_value, range_key_value, counters) + self + end + end + + # Update document timestamps. + # + # Set +updated_at+ attribute to current DateTime. + # + # post.touch + # + # Can update other fields in addition with the same timestamp if their + # names passed as arguments. + # + # user.touch(:last_login_at, :viewed_at) + # + # Some specific value can be used to save: + # + # user.touch(time: 1.hour.ago) + # + # No validation is performed and only +after_touch+ callback is called. + # + # The method must be used on a persisted object, otherwise + # +Dynamoid::Errors::Error+ will be thrown. + # + # @param names [*Symbol] a list of attribute names to update (optional) + # @param time [Time] datetime value that can be used instead of the current time (optional) + # @return [Dynamoid::Document] self + def touch(*names, time: nil) + if new_record? + raise Dynamoid::Errors::Error, 'cannot touch on a new or destroyed record object' + end + + time_to_assign = time || DateTime.now + + self.updated_at = time_to_assign + names.each do |name| + attributes[name] = time_to_assign + end + + attribute_names = names.map(&:to_sym) + [:updated_at] + attributes_with_values = attributes.slice(*attribute_names) + + run_callbacks :touch do + self.class.update_fields(hash_key, range_value, attributes_with_values) + clear_attribute_changes(attribute_names.map(&:to_s)) + end + + self + end + + # Is this object persisted in DynamoDB? + # + # user = User.new + # user.persisted? # => false + # + # user.save + # user.persisted? # => true + # + # @return [true|false] + # @since 0.2.0 + def persisted? + !(new_record? || @destroyed) + end + + # Create new model or persist changes. + # + # Run the validation and callbacks. Returns +true+ if saving is successful + # and +false+ otherwise. + # + # user = User.new + # user.save # => true + # + # user.age = 26 + # user.save # => true + # + # Validation can be skipped with +validate: false+ option: + # + # user = User.new(age: -1) + # user.save(validate: false) # => true + # + # +save+ by default sets timestamps attributes - +created_at+ and + # +updated_at+ when creates new model and updates +updated_at+ attribute + # when update already existing one. + # + # Changing +updated_at+ attribute at updating a model can be skipped with + # +touch: false+ option: + # + # user.save(touch: false) + # + # If a model is new and hash key (+id+ by default) is not assigned yet + # it was assigned implicitly with random UUID value. + # + # If +lock_version+ attribute is declared it will be incremented. If it's blank then it will be initialized with 1. + # + # +save+ method call raises +Dynamoid::Errors::RecordNotUnique+ exception + # if primary key (hash key + optional range key) already exists in a + # table. + # + # +save+ method call raises +Dynamoid::Errors::StaleObjectError+ exception + # if there is +lock_version+ attribute and the document in a table was + # already changed concurrently and +lock_version+ was consequently + # increased. + # + # When a table is not created yet the first +save+ method call will create + # a table. It's useful in test environment to avoid explicit table + # creation. + # + # @param options [Hash] (optional) + # @option options [true|false] :validate validate a model or not - +true+ by default (optional) + # @option options [true|false] :touch update tiemstamps fields or not - +true+ by default (optional) + # @return [true|false] Whether saving successful or not + # @since 0.2.0 + def save(options = {}) + if Dynamoid.config.create_table_on_save + self.class.create_table(sync: true) + end + + create_or_update = new_record? ? :create : :update + + run_callbacks(:save) do + run_callbacks(create_or_update) do + Save.call(self, touch: options[:touch]) + end + end + end + + # Update multiple attributes at once, saving the object once the updates + # are complete. Returns +true+ if saving is successful and +false+ + # otherwise. + # + # user.update_attributes(age: 27, last_name: 'Tylor') + # + # Raises a +Dynamoid::Errors::UnknownAttribute+ exception if any of the + # attributes is not on the model + # + # @param attributes [Hash] a hash of attributes to update + # @return [true|false] Whether updating successful or not + # @since 0.2.0 + def update_attributes(attributes) + attributes.each { |attribute, value| write_attribute(attribute, value) } + save + end + + # Update multiple attributes at once, saving the object once the updates + # are complete. + # + # user.update_attributes!(age: 27, last_name: 'Tylor') + # + # Raises a +Dynamoid::Errors::DocumentNotValid+ exception if some vaidation + # fails. + # + # Raises a +Dynamoid::Errors::UnknownAttribute+ exception if any of the + # attributes is not on the model + # + # @param attributes [Hash] a hash of attributes to update + def update_attributes!(attributes) + attributes.each { |attribute, value| write_attribute(attribute, value) } + save! + end + + # Update a single attribute, saving the object afterwards. + # + # Returns +true+ if saving is successful and +false+ otherwise. + # + # user.update_attribute(:last_name, 'Tylor') + # + # Validation is skipped. + # + # Raises a +Dynamoid::Errors::UnknownAttribute+ exception if any of the + # attributes is not on the model + # + # @param attribute [Symbol] attribute name to update + # @param value [Object] the value to assign it + # @return [Dynamoid::Document] self + # + # @since 0.2.0 + def update_attribute(attribute, value) + # final implementation is in the Dynamoid::Validation module + write_attribute(attribute, value) + save + self + end + + # Update a model. + # + # Doesn't run validation. Runs only +update+ callbacks. Reloads all attribute values. + # + # Accepts mandatory block in order to specify operations which will modify + # attributes. Supports following operations: +add+, +delete+ and +set+. + # + # Operation +add+ just adds a value for numeric attributes and join + # collections if attribute is a collection (one of +array+, +set+ or + # +map+). + # + # user.update! do |t| + # t.add(age: 1, followers_count: 5) + # t.add(hobbies: ['skying', 'climbing']) + # end + # + # Operation +delete+ is applied to collection attribute types and + # substructs one collection from another. + # + # user.update! do |t| + # t.delete(hobbies: ['skying']) + # end + # + # Operation +set+ just changes an attribute value: + # + # user.update! do |t| + # t.set(age: 21) + # end + # + # All the operations work like +ADD+, +DELETE+ and +PUT+ actions supported + # by +AttributeUpdates+ + # {parameter}[https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/LegacyConditionalParameters.AttributeUpdates.html] + # of +UpdateItem+ operation. + # + # It's an atomic operation. So adding or deleting elements in a collection + # or incrementing or decrementing a numeric field is atomic and does not + # interfere with other write requests. + # + # Can update a model conditionaly: + # + # user.update!(if: { age: 20 }) do |t| + # t.add(age: 1) + # end + # + # To check if some attribute (or attributes) isn't stored in a DynamoDB + # item (e.g. it wasn't set explicitly) there is another condition - + # +unless_exists+: + # + # user = User.create(name: 'Tylor') + # user.update!(unless_exists: [:age]) do |t| + # t.set(age: 18) + # end + # + # If a document doesn't meet conditions it raises + # +Dynamoid::Errors::StaleObjectError+ exception. + # + # It will increment the +lock_version+ attribute if a table has the column, + # but will not check it. Thus, a concurrent +save+ call will never cause an + # +update!+ to fail, but an +update!+ may cause a concurrent +save+ to + # fail. + # + # @param conditions [Hash] Conditions on model attributes to make a conditional update (optional) + # @return [Dynamoid::Document] self + def update!(conditions = {}) + run_callbacks(:update) do + options = {} + if range_key + value = read_attribute(range_key) + attribute_options = self.class.attributes[range_key] + options[:range_key] = Dumping.dump_field(value, attribute_options) + end + + begin + table_name = self.class.table_name + update_item_options = options.merge(conditions: conditions) + + new_attrs = Dynamoid.adapter.update_item(table_name, hash_key, update_item_options) do |t| + t.add(lock_version: 1) if self.class.attributes[:lock_version] + + if self.class.timestamps_enabled? + time_now = DateTime.now.in_time_zone(Time.zone) + time_now_dumped = Dumping.dump_field(time_now, self.class.attributes[:updated_at]) + t.set(updated_at: time_now_dumped) + end + + yield t + end + load(Undumping.undump_attributes(new_attrs, self.class.attributes)) + rescue Dynamoid::Errors::ConditionalCheckFailedException + raise Dynamoid::Errors::StaleObjectError.new(self, 'update') + end + end + + self + end + + # Update a model. + # + # Doesn't run validation. Runs only +update+ callbacks. Reloads all attribute values. + # + # Accepts mandatory block in order to specify operations which will modify + # attributes. Supports following operations: +add+, +delete+ and +set+. + # + # Operation +add+ just adds a value for numeric attributes and join + # collections if attribute is a collection (one of +array+, +set+ or + # +map+). + # + # user.update do |t| + # t.add(age: 1, followers_count: 5) + # t.add(hobbies: ['skying', 'climbing']) + # end + # + # Operation +delete+ is applied to collection attribute types and + # substructs one collection from another. + # + # user.update do |t| + # t.delete(hobbies: ['skying']) + # end + # + # If it's applied to a scalar attribute then the item's attribute is + # removed at all: + # + # user.update do |t| + # t.delete(age: nil) + # end + # + # or even without useless value at all: + # + # user.update do |t| + # t.delete(:age) + # end + # + # Operation +set+ just changes an attribute value: + # + # user.update do |t| + # t.set(age: 21) + # end + # + # All the operations works like +ADD+, +DELETE+ and +PUT+ actions supported + # by +AttributeUpdates+ + # {parameter}[https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/LegacyConditionalParameters.AttributeUpdates.html] + # of +UpdateItem+ operation. + # + # Can update a model conditionaly: + # + # user.update(if: { age: 20 }) do |t| + # t.add(age: 1) + # end + # + # To check if some attribute (or attributes) isn't stored in a DynamoDB + # item (e.g. it wasn't set explicitly) there is another condition - + # +unless_exists+: + # + # user = User.create(name: 'Tylor') + # user.update(unless_exists: [:age]) do |t| + # t.set(age: 18) + # end + # + # If a document doesn't meet conditions it just returns +false+. Otherwise it returns +true+. + # + # It will increment the +lock_version+ attribute if a table has the column, + # but will not check it. Thus, a concurrent +save+ call will never cause an + # +update!+ to fail, but an +update!+ may cause a concurrent +save+ to + # fail. + # + # @param conditions [Hash] Conditions on model attributes to make a conditional update (optional) + # @return [true|false] - whether conditions are met and updating is successful + def update(conditions = {}, &block) + update!(conditions, &block) + true + rescue Dynamoid::Errors::StaleObjectError + false + end + + # Change numeric attribute value. + # + # Initializes attribute to zero if +nil+ and adds the specified value (by + # default is 1). Only makes sense for number-based attributes. + # + # user.increment(:followers_count) + # user.increment(:followers_count, 2) + # + # @param attribute [Symbol] attribute name + # @param by [Numeric] value to add (optional) + # @return [Dynamoid::Document] self + def increment(attribute, by = 1) + self[attribute] ||= 0 + self[attribute] += by + self + end + + # Change numeric attribute value and save a model. + # + # Initializes attribute to zero if +nil+ and adds the specified value (by + # default is 1). Only makes sense for number-based attributes. + # + # user.increment!(:followers_count) + # user.increment!(:followers_count, 2) + # + # Only `attribute` is saved. The model itself is not saved. So any other + # modified attributes will still be dirty. Validations and callbacks are + # skipped. + # + # When `:touch` option is passed the timestamp columns are updating. If + # attribute names are passed, they are updated along with updated_at + # attribute: + # + # user.increment!(:followers_count, touch: true) + # user.increment!(:followers_count, touch: :viewed_at) + # user.increment!(:followers_count, touch: [:viewed_at, :accessed_at]) + # + # @param attribute [Symbol] attribute name + # @param by [Numeric] value to add (optional) + # @param touch [true | Symbol | Array[Symbol]] to update update_at attribute and optionally the specified ones + # @return [Dynamoid::Document] self + def increment!(attribute, by = 1, touch: nil) + increment(attribute, by) + change = read_attribute(attribute) - (attribute_was(attribute) || 0) + + run_callbacks :touch do + self.class.inc(hash_key, range_value, attribute => change, touch: touch) + clear_attribute_changes(attribute) + end + + self + end + + # Change numeric attribute value. + # + # Initializes attribute to zero if +nil+ and subtracts the specified value + # (by default is 1). Only makes sense for number-based attributes. + # + # user.decrement(:followers_count) + # user.decrement(:followers_count, 2) + # + # @param attribute [Symbol] attribute name + # @param by [Numeric] value to subtract (optional) + # @return [Dynamoid::Document] self + def decrement(attribute, by = 1) + increment(attribute, -by) + end + + # Change numeric attribute value and save a model. + # + # Initializes attribute to zero if +nil+ and subtracts the specified value + # (by default is 1). Only makes sense for number-based attributes. + # + # user.decrement!(:followers_count) + # user.decrement!(:followers_count, 2) + # + # Only `attribute` is saved. The model itself is not saved. So any other + # modified attributes will still be dirty. Validations and callbacks are + # skipped. + # + # When `:touch` option is passed the timestamp columns are updating. If + # attribute names are passed, they are updated along with updated_at + # attribute: + # + # user.decrement!(:followers_count, touch: true) + # user.decrement!(:followers_count, touch: :viewed_at) + # user.decrement!(:followers_count, touch: [:viewed_at, :accessed_at]) + # + # @param attribute [Symbol] attribute name + # @param by [Numeric] value to subtract (optional) + # @param touch [true | Symbol | Array[Symbol]] to update update_at attribute and optionally the specified ones + # @return [Dynamoid::Document] self + def decrement!(attribute, by = 1, touch: nil) + increment!(attribute, -by, touch: touch) + end + + # Delete a model. + # + # Runs callbacks. + # + # Supports optimistic locking with the +lock_version+ attribute and doesn't + # delete a model if it's already changed. + # + # Returns +self+ if deleted successfully and +false+ otherwise. + # + # @return [Dynamoid::Document|false] whether deleted successfully + # @since 0.2.0 + def destroy + ret = run_callbacks(:destroy) do + delete + end + + @destroyed = true + + ret == false ? false : self + end + + # Delete a model. + # + # Runs callbacks. + # + # Supports optimistic locking with the +lock_version+ attribute and doesn't + # delete a model if it's already changed. + # + # Raises +Dynamoid::Errors::RecordNotDestroyed+ exception if model deleting + # failed. + def destroy! + destroy || (raise Dynamoid::Errors::RecordNotDestroyed, self) + end + + # Delete a model. + # + # Supports optimistic locking with the +lock_version+ attribute and doesn't + # delete a model if it's already changed. + # + # Raises +Dynamoid::Errors::StaleObjectError+ exception if cannot delete a + # model. + # + # @return [Dynamoid::Document] self + # @since 0.2.0 + def delete + options = range_key ? { range_key: Dumping.dump_field(read_attribute(range_key), self.class.attributes[range_key]) } : {} + + # Add an optimistic locking check if the lock_version column exists + if self.class.attributes[:lock_version] + conditions = { if: {} } + conditions[:if][:lock_version] = + if changes[:lock_version].nil? + lock_version + else + changes[:lock_version][0] + end + options[:conditions] = conditions + end + + @destroyed = true + + Dynamoid.adapter.delete(self.class.table_name, hash_key, options) + + self.class.associations.each_key do |name| + send(name).disassociate_source + end + + self + rescue Dynamoid::Errors::ConditionalCheckFailedException + raise Dynamoid::Errors::StaleObjectError.new(self, 'delete') + end + end +end diff --git a/dynamoid/lib/dynamoid/persistence/import.rb b/dynamoid/lib/dynamoid/persistence/import.rb new file mode 100644 index 000000000..4d0f01c98 --- /dev/null +++ b/dynamoid/lib/dynamoid/persistence/import.rb @@ -0,0 +1,76 @@ +# frozen_string_literal: true + +require 'securerandom' + +module Dynamoid + module Persistence + # @private + class Import + def self.call(model_class, array_of_attributes) + new(model_class, array_of_attributes).call + end + + def initialize(model_class, array_of_attributes) + @model_class = model_class + @array_of_attributes = array_of_attributes + end + + def call + models = @array_of_attributes.map(&method(:build_model)) + + unless Dynamoid.config.backoff + import(models) + else + import_with_backoff(models) + end + + models.each do |m| + m.new_record = false + m.clear_changes_information + end + models + end + + private + + def build_model(attributes) + attrs = attributes.symbolize_keys + + if @model_class.timestamps_enabled? + time_now = DateTime.now.in_time_zone(Time.zone) + attrs[:created_at] ||= time_now + attrs[:updated_at] ||= time_now + end + + @model_class.build(attrs).tap do |model| + model.hash_key = SecureRandom.uuid if model.hash_key.blank? + end + end + + def import_with_backoff(models) + backoff = nil + table_name = @model_class.table_name + items = array_of_dumped_attributes(models) + + Dynamoid.adapter.batch_write_item(table_name, items) do |has_unprocessed_items| + if has_unprocessed_items + backoff ||= Dynamoid.config.build_backoff + backoff.call + else + backoff = nil + end + end + end + + def import(models) + Dynamoid.adapter.batch_write_item(@model_class.table_name, array_of_dumped_attributes(models)) + end + + def array_of_dumped_attributes(models) + models.map do |m| + Dumping.dump_attributes(m.attributes, @model_class.attributes) + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/persistence/inc.rb b/dynamoid/lib/dynamoid/persistence/inc.rb new file mode 100644 index 000000000..7b6b48102 --- /dev/null +++ b/dynamoid/lib/dynamoid/persistence/inc.rb @@ -0,0 +1,66 @@ +# frozen_string_literal: true + +module Dynamoid + module Persistence + # @private + class Inc + def self.call(model_class, hash_key, range_key = nil, counters) + new(model_class, hash_key, range_key, counters).call + end + + # rubocop:disable Style/OptionalArguments + def initialize(model_class, hash_key, range_key = nil, counters) + @model_class = model_class + @hash_key = hash_key + @range_key = range_key + @counters = counters + end + # rubocop:enable Style/OptionalArguments + + def call + touch = @counters.delete(:touch) + + Dynamoid.adapter.update_item(@model_class.table_name, @hash_key, update_item_options) do |t| + @counters.each do |name, value| + t.add(name => cast_and_dump_attribute_value(name, value)) + end + + if touch + value = DateTime.now.in_time_zone(Time.zone) + + timestamp_attributes_to_touch(touch).each do |name| + t.set(name => cast_and_dump_attribute_value(name, value)) + end + end + end + end + + private + + def update_item_options + if @model_class.range_key + range_key_options = @model_class.attributes[@model_class.range_key] + value_casted = TypeCasting.cast_field(@range_key, range_key_options) + value_dumped = Dumping.dump_field(value_casted, range_key_options) + { range_key: value_dumped } + else + {} + end + end + + def cast_and_dump_attribute_value(name, value) + value_casted = TypeCasting.cast_field(value, @model_class.attributes[name]) + Dumping.dump_field(value_casted, @model_class.attributes[name]) + end + + def timestamp_attributes_to_touch(touch) + return [] unless touch + + names = [] + names << :updated_at if @model_class.timestamps_enabled? + names += Array.wrap(touch) if touch != true + names + end + end + end +end diff --git a/dynamoid/lib/dynamoid/persistence/save.rb b/dynamoid/lib/dynamoid/persistence/save.rb new file mode 100644 index 000000000..787f9ca0a --- /dev/null +++ b/dynamoid/lib/dynamoid/persistence/save.rb @@ -0,0 +1,107 @@ +# frozen_string_literal: true + +module Dynamoid + module Persistence + # @private + class Save + def self.call(model, **options) + new(model, **options).call + end + + def initialize(model, touch: nil) + @model = model + @touch = touch # touch=false means explicit disabling of updating the `updated_at` attribute + end + + def call + @model.hash_key = SecureRandom.uuid if @model.hash_key.blank? + + return true unless @model.changed? + + @model.created_at ||= DateTime.now.in_time_zone(Time.zone) if @model.class.timestamps_enabled? + + if @model.class.timestamps_enabled? && !@model.updated_at_changed? && !(@touch == false && @model.persisted?) + @model.updated_at = DateTime.now.in_time_zone(Time.zone) + end + + # Add an optimistic locking check if the lock_version column exists + if @model.class.attributes[:lock_version] + @model.lock_version = (@model.lock_version || 0) + 1 + end + + if @model.new_record? + attributes_dumped = Dumping.dump_attributes(@model.attributes, @model.class.attributes) + Dynamoid.adapter.write(@model.class.table_name, attributes_dumped, conditions_for_write) + else + attributes_to_persist = @model.attributes.slice(*@model.changed.map(&:to_sym)) + + Dynamoid.adapter.update_item(@model.class.table_name, @model.hash_key, options_to_update_item) do |t| + attributes_to_persist.each do |name, value| + value_dumped = Dumping.dump_field(value, @model.class.attributes[name]) + t.set(name => value_dumped) + end + end + end + + @model.new_record = false + true + rescue Dynamoid::Errors::ConditionalCheckFailedException => e + if @model.new_record? + raise Dynamoid::Errors::RecordNotUnique.new(e, @model) + else + raise Dynamoid::Errors::StaleObjectError.new(@model, 'persist') + end + end + + private + + # Should be called after incrementing `lock_version` attribute + def conditions_for_write + conditions = {} + + # Add an 'exists' check to prevent overwriting existing records with new ones + if @model.new_record? + conditions[:unless_exists] = [@model.class.hash_key] + if @model.range_key + conditions[:unless_exists] << @model.range_key + end + end + + # Add an optimistic locking check if the lock_version column exists + # Uses the original lock_version value from Dirty API + # in case user changed 'lock_version' manually + if @model.class.attributes[:lock_version] && (@model.changes[:lock_version][0]) + conditions[:if] ||= {} + conditions[:if][:lock_version] = @model.changes[:lock_version][0] + end + + conditions + end + + def options_to_update_item + options = {} + + if @model.class.range_key + value_dumped = Dumping.dump_field(@model.range_value, @model.class.attributes[@model.class.range_key]) + options[:range_key] = value_dumped + end + + conditions = {} + conditions[:if] ||= {} + conditions[:if][@model.class.hash_key] = @model.hash_key + + # Add an optimistic locking check if the lock_version column exists + # Uses the original lock_version value from Dirty API + # in case user changed 'lock_version' manually + if @model.class.attributes[:lock_version] && (@model.changes[:lock_version][0]) + conditions[:if] ||= {} + conditions[:if][:lock_version] = @model.changes[:lock_version][0] + end + + options[:conditions] = conditions + + options + end + end + end +end diff --git a/dynamoid/lib/dynamoid/persistence/update_fields.rb b/dynamoid/lib/dynamoid/persistence/update_fields.rb new file mode 100644 index 000000000..203965946 --- /dev/null +++ b/dynamoid/lib/dynamoid/persistence/update_fields.rb @@ -0,0 +1,65 @@ +# frozen_string_literal: true + +module Dynamoid + module Persistence + # @private + class UpdateFields + def self.call(*args, **options) + new(*args, **options).call + end + + def initialize(model_class, partition_key:, sort_key:, attributes:, conditions:) + @model_class = model_class + @partition_key = partition_key + @sort_key = sort_key + @attributes = attributes.symbolize_keys + @conditions = conditions + end + + def call + UpdateValidations.validate_attributes_exist(@model_class, @attributes) + + if @model_class.timestamps_enabled? + @attributes[:updated_at] ||= DateTime.now.in_time_zone(Time.zone) + end + + raw_attributes = update_item + @model_class.new(undump_attributes(raw_attributes)) + rescue Dynamoid::Errors::ConditionalCheckFailedException + end + + private + + def update_item + Dynamoid.adapter.update_item(@model_class.table_name, @partition_key, options_to_update_item) do |t| + @attributes.each do |k, v| + value_casted = TypeCasting.cast_field(v, @model_class.attributes[k]) + value_dumped = Dumping.dump_field(value_casted, @model_class.attributes[k]) + t.set(k => value_dumped) + end + end + end + + def undump_attributes(attributes) + Undumping.undump_attributes(attributes, @model_class.attributes) + end + + def options_to_update_item + options = {} + + if @model_class.range_key + value_casted = TypeCasting.cast_field(@sort_key, @model_class.attributes[@model_class.range_key]) + value_dumped = Dumping.dump_field(value_casted, @model_class.attributes[@model_class.range_key]) + options[:range_key] = value_dumped + end + + conditions = @conditions.deep_dup + conditions[:if] ||= {} + conditions[:if][@model_class.hash_key] = @partition_key + options[:conditions] = conditions + + options + end + end + end +end diff --git a/dynamoid/lib/dynamoid/persistence/update_validations.rb b/dynamoid/lib/dynamoid/persistence/update_validations.rb new file mode 100644 index 000000000..b9b663c1b --- /dev/null +++ b/dynamoid/lib/dynamoid/persistence/update_validations.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +module Dynamoid + module Persistence + # @private + module UpdateValidations + def self.validate_attributes_exist(model_class, attributes) + model_attributes = model_class.attributes.keys + + attributes.each_key do |attr_name| + unless model_attributes.include?(attr_name) + raise Dynamoid::Errors::UnknownAttribute, "Attribute #{attr_name} does not exist in #{model_class}" + end + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/persistence/upsert.rb b/dynamoid/lib/dynamoid/persistence/upsert.rb new file mode 100644 index 000000000..20fa825bf --- /dev/null +++ b/dynamoid/lib/dynamoid/persistence/upsert.rb @@ -0,0 +1,62 @@ +# frozen_string_literal: true + +module Dynamoid + module Persistence + # @private + class Upsert + def self.call(*args, **options) + new(*args, **options).call + end + + def initialize(model_class, partition_key:, sort_key:, attributes:, conditions:) + @model_class = model_class + @partition_key = partition_key + @sort_key = sort_key + @attributes = attributes.symbolize_keys + @conditions = conditions + end + + def call + UpdateValidations.validate_attributes_exist(@model_class, @attributes) + + if @model_class.timestamps_enabled? + @attributes[:updated_at] ||= DateTime.now.in_time_zone(Time.zone) + end + + raw_attributes = update_item + @model_class.new(undump_attributes(raw_attributes)) + rescue Dynamoid::Errors::ConditionalCheckFailedException + end + + private + + def update_item + Dynamoid.adapter.update_item(@model_class.table_name, @partition_key, options_to_update_item) do |t| + @attributes.each do |k, v| + value_casted = TypeCasting.cast_field(v, @model_class.attributes[k]) + value_dumped = Dumping.dump_field(value_casted, @model_class.attributes[k]) + + t.set(k => value_dumped) + end + end + end + + def options_to_update_item + options = {} + + if @model_class.range_key + value_casted = TypeCasting.cast_field(@sort_key, @model_class.attributes[@model_class.range_key]) + value_dumped = Dumping.dump_field(value_casted, @model_class.attributes[@model_class.range_key]) + options[:range_key] = value_dumped + end + + options[:conditions] = @conditions + options + end + + def undump_attributes(raw_attributes) + Undumping.undump_attributes(raw_attributes, @model_class.attributes) + end + end + end +end diff --git a/dynamoid/lib/dynamoid/primary_key_type_mapping.rb b/dynamoid/lib/dynamoid/primary_key_type_mapping.rb new file mode 100644 index 000000000..054ccae56 --- /dev/null +++ b/dynamoid/lib/dynamoid/primary_key_type_mapping.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +module Dynamoid + # @private + class PrimaryKeyTypeMapping + def self.dynamodb_type(type, options) + if type.is_a?(Class) + type = type.respond_to?(:dynamoid_field_type) ? type.dynamoid_field_type : :string + end + + case type + when :string, :serialized + :string + when :integer, :number + :number + when :datetime + string_format = if options[:store_as_string].nil? + Dynamoid::Config.store_datetime_as_string + else + options[:store_as_string] + end + string_format ? :string : :number + when :date + string_format = if options[:store_as_string].nil? + Dynamoid::Config.store_date_as_string + else + options[:store_as_string] + end + string_format ? :string : :number + else + raise Errors::UnsupportedKeyType, "#{type} cannot be used as a type of table key attribute" + end + end + end +end diff --git a/dynamoid/lib/dynamoid/railtie.rb b/dynamoid/lib/dynamoid/railtie.rb new file mode 100644 index 000000000..029cec326 --- /dev/null +++ b/dynamoid/lib/dynamoid/railtie.rb @@ -0,0 +1,14 @@ +# frozen_string_literal: true + +if defined? Rails + + module Dynamoid + # @private + class Railtie < Rails::Railtie + rake_tasks do + Dir[File.join(File.dirname(__FILE__), 'tasks/*.rake')].each { |f| load f } + end + end + end + +end diff --git a/dynamoid/lib/dynamoid/tasks.rb b/dynamoid/lib/dynamoid/tasks.rb new file mode 100644 index 000000000..1b7389145 --- /dev/null +++ b/dynamoid/lib/dynamoid/tasks.rb @@ -0,0 +1,3 @@ +# frozen_string_literal: true + +load 'dynamoid/tasks/database.rake' diff --git a/dynamoid/lib/dynamoid/tasks/database.rake b/dynamoid/lib/dynamoid/tasks/database.rake new file mode 100644 index 000000000..61b378fbd --- /dev/null +++ b/dynamoid/lib/dynamoid/tasks/database.rake @@ -0,0 +1,41 @@ +# frozen_string_literal: true + +require 'dynamoid' +require 'dynamoid/tasks/database' + +namespace :dynamoid do + desc 'Creates DynamoDB tables, one for each of your Dynamoid models - does not modify pre-existing tables' + task create_tables: :environment do + # Load models so Dynamoid will be able to discover tables expected. + Dir[File.join(Dynamoid::Config.models_dir, '**/*.rb')].sort.each { |file| require file } + if Dynamoid.included_models.any? + tables = Dynamoid::Tasks::Database.create_tables + result = tables[:created].map { |c| "#{c} created" } + tables[:existing].map { |e| "#{e} already exists" } + result.sort.each { |r| puts r } + else + puts 'Dynamoid models are not loaded, or you have no Dynamoid models.' + end + end + + desc 'Tests if the DynamoDB instance can be contacted using your configuration' + task ping: :environment do + success = false + failure_reason = nil + + begin + Dynamoid::Tasks::Database.ping + success = true + rescue StandardError => e + failure_reason = e.message + end + + msg = "Connection to DynamoDB #{success ? 'OK' : 'FAILED'}" + msg += if Dynamoid.config.endpoint + " at local endpoint '#{Dynamoid.config.endpoint}'" + else + ' at remote AWS endpoint' + end + msg += ", reason being '#{failure_reason}'" unless success + puts msg + end +end diff --git a/dynamoid/lib/dynamoid/tasks/database.rb b/dynamoid/lib/dynamoid/tasks/database.rb new file mode 100644 index 000000000..19f4904bf --- /dev/null +++ b/dynamoid/lib/dynamoid/tasks/database.rb @@ -0,0 +1,32 @@ +# frozen_string_literal: true + +module Dynamoid + # @private + module Tasks + module Database + module_function + + # Create any new tables for the models. Existing tables are not + # modified. + def create_tables + results = { created: [], existing: [] } + # We can't quite rely on Dynamoid.included_models alone, we need to select only viable models + Dynamoid.included_models.reject { |m| m.base_class.try(:name).blank? }.uniq(&:table_name).each do |model| + if Dynamoid.adapter.list_tables.include? model.table_name + results[:existing] << model.table_name + else + model.create_table(sync: true) + results[:created] << model.table_name + end + end + results + end + + # Is the DynamoDB reachable? + def ping + Dynamoid.adapter.list_tables + true + end + end + end +end diff --git a/dynamoid/lib/dynamoid/transaction_write.rb b/dynamoid/lib/dynamoid/transaction_write.rb new file mode 100644 index 000000000..f0c441a2b --- /dev/null +++ b/dynamoid/lib/dynamoid/transaction_write.rb @@ -0,0 +1,101 @@ +# frozen_string_literal: true + +require 'dynamoid/transaction_write/action' +require 'dynamoid/transaction_write/create' +require 'dynamoid/transaction_write/delete' +require 'dynamoid/transaction_write/destroy' +require 'dynamoid/transaction_write/update_upsert' +require 'dynamoid/transaction_write/update' +require 'dynamoid/transaction_write/upsert' + +module Dynamoid + class TransactionWrite + attr_accessor :action_inputs, :models + + def initialize(_options = {}) + @action_inputs = [] + @models = [] + end + + def self.execute(options = {}) + transaction = new(options) + yield(transaction) + transaction.commit + end + + def commit + return unless @action_inputs.present? # nothing to commit + + Dynamoid.adapter.transact_write_items(@action_inputs) + models.each { |model| model.new_record = false } + end + + def save!(model, options = {}) + save(model, options.reverse_merge(raise_validation_error: true)) + end + + def save(model, options = {}) + model.new_record? ? create(model, {}, options) : update(model, {}, options) + end + + def create!(model_or_model_class, attributes = {}, options = {}, &block) + create(model_or_model_class, attributes, options.reverse_merge(raise_validation_error: true), &block) + end + + def create(model_or_model_class, attributes = {}, options = {}, &block) + add_action_and_validate Dynamoid::TransactionWrite::Create.new(model_or_model_class, attributes, options, &block) + end + + # upsert! does not exist because upserting instances that can raise validation errors is not officially supported + + def upsert(model_or_model_class, attributes = {}, options = {}, &block) + add_action_and_validate Dynamoid::TransactionWrite::Upsert.new(model_or_model_class, attributes, options, &block) + end + + def update!(model_or_model_class, attributes = {}, options = {}, &block) + update(model_or_model_class, attributes, options.reverse_merge(raise_validation_error: true), &block) + end + + def update(model_or_model_class, attributes = {}, options = {}, &block) + add_action_and_validate Dynamoid::TransactionWrite::Update.new(model_or_model_class, attributes, options, &block) + end + + def delete(model_or_model_class, key_or_attributes = {}, options = {}) + add_action_and_validate Dynamoid::TransactionWrite::Delete.new(model_or_model_class, key_or_attributes, options) + end + + def destroy!(model_or_model_class, key_or_attributes = {}, options = {}) + destroy(model_or_model_class, key_or_attributes, options.reverse_merge(raise_validation_error: true)) + end + + def destroy(model_or_model_class, key_or_attributes = {}, options = {}) + add_action_and_validate Dynamoid::TransactionWrite::Destroy.new(model_or_model_class, key_or_attributes, options) + end + + private + + # validates unless validations are skipped + # runs callbacks unless callbacks are skipped + # raise validation error or returns false if not valid + # otherwise adds hash of action to list in preparation for committing + def add_action_and_validate(action) + if !action.skip_validation? && !action.valid? + raise Dynamoid::Errors::DocumentNotValid, action.model if action.raise_validation_error? + + return false + end + + if action.skip_callbacks? + @action_inputs << action.to_h + else + action.run_callbacks do + @action_inputs << action.to_h + end + end + action.changes_applied # action has been processed and added to queue so mark as applied + models << action.model if action.model + + action.model || true # return model if it exists + end + end +end diff --git a/dynamoid/lib/dynamoid/transaction_write/action.rb b/dynamoid/lib/dynamoid/transaction_write/action.rb new file mode 100644 index 000000000..d550afff7 --- /dev/null +++ b/dynamoid/lib/dynamoid/transaction_write/action.rb @@ -0,0 +1,144 @@ +# frozen_string_literal: true + +module Dynamoid + class TransactionWrite + class Action + VALID_OPTIONS = %i[skip_callbacks skip_validation raise_validation_error skip_existence_check].freeze + attr_accessor :model, :attributes, :options, :additions, :deletions, :removals + + def initialize(model_or_model_class, attributes = {}, options = {}) + if model_or_model_class.is_a?(Dynamoid::Document) + self.model = model_or_model_class + else + @model_class = model_or_model_class + end + self.attributes = attributes + self.options = options || {} + self.additions = {} + self.deletions = {} + self.removals = [] + invalid_keys = self.options.keys - VALID_OPTIONS + raise ArgumentError, "Invalid options found: '#{invalid_keys}'" if invalid_keys.present? + + yield(self) if block_given? + end + + def model_class + model&.class || @model_class + end + + # returns hash_key from the model if it exists or first element from id + def hash_key + return model.hash_key if model + + attributes[model_class.hash_key] + end + + # returns range_key from the model if it exists or second element from id, or nil + def range_key + return nil unless model_class.range_key? + return model.attributes[model_class.range_key] if model + + attributes[model_class.range_key] + end + + # sets a value in the attributes + def set(values) + attributes.merge!(values) + end + + # increments a number or adds to a set, starts at 0 or [] if it doesn't yet exist + def add(values) + additions.merge!(values) + end + + # deletes a value or values from a set type or simply sets a field to nil + def delete(field_or_values) + if field_or_values.is_a?(Hash) + deletions.merge!(field_or_values) + else + # adds to array of fields for use in REMOVE update expression + removals << field_or_values + end + end + + def find_from_attributes(model_or_model_class, attributes) + model_class = model_or_model_class.is_a?(Dynamoid::Document) ? model_or_model_class.class : model_or_model_class + if attributes.is_a?(Hash) + raise Dynamoid::Errors::MissingHashKey unless attributes[model_class.hash_key].present? + + model_class.find(attributes[model_class.hash_key], + range_key: model_class.range_key? ? attributes[model_class.range_key] : nil, + consistent_read: true) + else + model_class.find(attributes, consistent_read: true) + end + end + + def skip_callbacks? + !!options[:skip_callbacks] + end + + def skip_validation? + !!options[:skip_validation] + end + + def valid? + model&.valid? + end + + def raise_validation_error? + !!options[:raise_validation_error] + end + + def run_callbacks + yield if block_given? + end + + def changes_applied + !!model&.changes_applied + end + + def add_timestamps(attributes, skip_created_at: false) + return attributes if options[:skip_timestamps] || !model_class&.timestamps_enabled? + + result = attributes.clone + timestamp = DateTime.now.in_time_zone(Time.zone) + result[:created_at] ||= timestamp unless skip_created_at + result[:updated_at] ||= timestamp + result + end + + def touch_model_timestamps(skip_created_at: false) + return if !model || options[:skip_timestamps] || !model_class.timestamps_enabled? + + timestamp = DateTime.now.in_time_zone(Time.zone) + model.updated_at = timestamp + model.created_at ||= timestamp unless skip_created_at + end + + def write_attributes_to_model + return unless model && attributes.present? + + attributes.each { |attribute, value| model.write_attribute(attribute, value) } + end + + # copied from the protected method in AwsSdkV3 + def sanitize_item(attributes) + config_value = Dynamoid.config.store_attribute_with_nil_value + store_attribute_with_nil_value = config_value.nil? ? false : !!config_value + + attributes.reject do |_, v| + ((v.is_a?(Set) || v.is_a?(String)) && v.empty?) || + (!store_attribute_with_nil_value && v.nil?) + end.transform_values do |v| + v.is_a?(Hash) ? v.stringify_keys : v + end + end + + def to_h + raise 'override me' + end + end + end +end diff --git a/dynamoid/lib/dynamoid/transaction_write/create.rb b/dynamoid/lib/dynamoid/transaction_write/create.rb new file mode 100644 index 000000000..607bbd0f7 --- /dev/null +++ b/dynamoid/lib/dynamoid/transaction_write/create.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true + +require_relative 'action' + +module Dynamoid + class TransactionWrite + class Create < Action + # model is created if not otherwise specified so callbacks can be run + def initialize(model_or_model_class, attributes = {}, options = {}) + model = if model_or_model_class.is_a?(Dynamoid::Document) + model_or_model_class + else + model_or_model_class.new(attributes) + end + super(model, attributes, options) + + # don't initialize hash key here, callbacks haven't run yet + end + + def run_callbacks + # model always exists + model.run_callbacks(:save) do + model.run_callbacks(:create) do + model.run_callbacks(:validate) do + yield if block_given? + end + end + end + end + + def to_h + model.hash_key = SecureRandom.uuid if model.hash_key.blank? + write_attributes_to_model + touch_model_timestamps + # model always exists + item = Dynamoid::Dumping.dump_attributes(model.attributes, model_class.attributes) + + condition = "attribute_not_exists(#{model_class.hash_key})" + condition += " and attribute_not_exists(#{model_class.range_key})" if model_class.range_key? # needed? + result = { + put: { + item: sanitize_item(item), + table_name: model_class.table_name, + } + } + result[:put][:condition_expression] = condition unless options[:skip_existence_check] + + result + end + end + end +end diff --git a/dynamoid/lib/dynamoid/transaction_write/delete.rb b/dynamoid/lib/dynamoid/transaction_write/delete.rb new file mode 100644 index 000000000..ae191b008 --- /dev/null +++ b/dynamoid/lib/dynamoid/transaction_write/delete.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +require_relative 'action' + +module Dynamoid + class TransactionWrite + class Delete < Action + # a model is not needed since callbacks are not run + def initialize(model_or_model_class, key_or_attributes = {}, options = {}) + options = options.reverse_merge(skip_validation: true) + super(model_or_model_class, {}, options) + self.attributes = if key_or_attributes.is_a?(Hash) + key_or_attributes + else + { + model_class.hash_key => key_or_attributes + } + end + raise Dynamoid::Errors::MissingHashKey unless hash_key.present? + raise Dynamoid::Errors::MissingRangeKey unless !model_class.range_key? || range_key.present? + end + + def to_h + key = { model_class.hash_key => hash_key } + key[model_class.range_key] = range_key if model_class.range_key? + { + delete: { + key: key, + table_name: model_class.table_name + } + } + end + end + end +end diff --git a/dynamoid/lib/dynamoid/transaction_write/destroy.rb b/dynamoid/lib/dynamoid/transaction_write/destroy.rb new file mode 100644 index 000000000..a401f3ea3 --- /dev/null +++ b/dynamoid/lib/dynamoid/transaction_write/destroy.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true + +require_relative 'action' + +module Dynamoid + class TransactionWrite + class Destroy < Action + # model is found if not otherwise specified so callbacks can be run + def initialize(model_or_model_class, key_or_attributes = {}, options = {}) + model = if model_or_model_class.is_a?(Dynamoid::Document) + model_or_model_class + else + find_from_attributes(model_or_model_class, key_or_attributes) + end + super(model, {}, options) + raise Dynamoid::Errors::MissingHashKey unless hash_key.present? + raise Dynamoid::Errors::MissingRangeKey unless !model_class.range_key? || range_key.present? + end + + def run_callbacks + model.run_callbacks(:destroy) do + yield if block_given? + end + end + + # destroy defaults to not using validation + def skip_validation? + options[:skip_validation] != false + end + + def to_h + model.destroyed = true + key = { model_class.hash_key => hash_key } + key[model_class.range_key] = range_key if model_class.range_key? + # force fast fail i.e. won't retry if record is missing + condition_expression = "attribute_exists(#{model_class.hash_key})" + condition_expression += " and attribute_exists(#{model_class.range_key})" if model_class.range_key? # needed? + result = { + delete: { + key: key, + table_name: model_class.table_name + } + } + result[:delete][:condition_expression] = condition_expression unless options[:skip_existence_check] + result + end + end + end +end diff --git a/dynamoid/lib/dynamoid/transaction_write/update.rb b/dynamoid/lib/dynamoid/transaction_write/update.rb new file mode 100644 index 000000000..b312cef88 --- /dev/null +++ b/dynamoid/lib/dynamoid/transaction_write/update.rb @@ -0,0 +1,33 @@ +# frozen_string_literal: true + +require_relative 'action' + +module Dynamoid + class TransactionWrite + class Update < UpdateUpsert + # model is found if not otherwise specified so callbacks can be run + def initialize(model_or_model_class, attributes, options = {}) + model = if model_or_model_class.is_a?(Dynamoid::Document) + model_or_model_class + else + find_from_attributes(model_or_model_class, attributes) + end + super(model, attributes, options) + end + + def run_callbacks + unless model + yield if block_given? + return + end + model.run_callbacks(:save) do + model.run_callbacks(:update) do + model.run_callbacks(:validate) do + yield if block_given? + end + end + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/transaction_write/update_upsert.rb b/dynamoid/lib/dynamoid/transaction_write/update_upsert.rb new file mode 100644 index 000000000..4349f4523 --- /dev/null +++ b/dynamoid/lib/dynamoid/transaction_write/update_upsert.rb @@ -0,0 +1,120 @@ +# frozen_string_literal: true + +require_relative 'action' + +module Dynamoid + class TransactionWrite + class UpdateUpsert < Action + def initialize(model_or_model_class, attributes = {}, options = {}, &block) + super(model_or_model_class, attributes, options, &block) + + write_attributes_to_model + end + + # shared by Update and Upsert + def to_h + if model + # model.hash_key = SecureRandom.uuid if model.hash_key.blank? + touch_model_timestamps(skip_created_at: true) + changes = model.changes.map { |k, v| [k.to_sym, v[1]] }.to_h # hash of dirty attributes + else + changes = attributes.clone || {} + # changes[model_class.hash_key] = SecureRandom.uuid + changes = add_timestamps(changes, skip_created_at: true) + end + changes.delete(model_class.hash_key) # can't update id! + changes.delete(model_class.range_key) if model_class.range_key? + item = Dynamoid::Dumping.dump_attributes(changes, model_class.attributes) + + # set 'key' that is used to look up record for updating + key = { model_class.hash_key => hash_key } + key[model_class.range_key] = range_key if model_class.range_key? + + # e.g. "SET #updated_at = :updated_at ADD record_count :i" + item_keys = item.keys + update_expression = "SET #{item_keys.each_with_index.map { |_k, i| "#_n#{i} = :_s#{i}" }.join(', ')}" + + # e.g. {":updated_at" => 1645453.234, ":i" => 1} + expression_attribute_values = item_keys.each_with_index.map { |k, i| [":_s#{i}", item[k]] }.to_h + expression_attribute_names = {} + + update_expression = set_additions(expression_attribute_values, update_expression) + update_expression = set_deletions(expression_attribute_values, update_expression) + expression_attribute_names, update_expression = set_removals(expression_attribute_names, update_expression) + + # only alias names for fields in models, other values such as for ADD do not have them + # e.g. {"#updated_at" => "updated_at"} + # attribute_keys_in_model = item_keys.intersection(model_class.attributes.keys) + # expression_attribute_names = attribute_keys_in_model.map{|k| ["##{k}","#{k}"]}.to_h + expression_attribute_names.merge!(item_keys.each_with_index.map { |k, i| ["#_n#{i}", k.to_s] }.to_h) + + condition_expression = "attribute_exists(#{model_class.hash_key})" # fail if record is missing + condition_expression += " and attribute_exists(#{model_class.range_key})" if model_class.range_key? # needed? + + result = { + update: { + key: key, + table_name: model_class.table_name, + update_expression: update_expression, + expression_attribute_values: expression_attribute_values + } + } + result[:update][:expression_attribute_names] = expression_attribute_names if expression_attribute_names.present? + result[:update][:condition_expression] = condition_expression unless options[:skip_existence_check] + + result + end + + private + + # adds all of the ADD statements to the update_expression and returns it + def set_additions(expression_attribute_values, update_expression) + return update_expression unless additions.present? + + # ADD statements can be used to increment a counter: + # txn.update!(UserCount, "UserCount#Red", {}, options: {add: {record_count: 1}}) + add_keys = additions.keys + update_expression += " ADD #{add_keys.each_with_index.map { |k, i| "#{k} :_a#{i}" }.join(', ')}" + # convert any enumerables into sets + add_values = additions.transform_values do |v| + if !v.is_a?(Set) && v.is_a?(Enumerable) + Set.new(v) + else + v + end + end + add_keys.each_with_index { |k, i| expression_attribute_values[":_a#{i}"] = add_values[k] } + update_expression + end + + # adds all of the DELETE statements to the update_expression and returns it + def set_deletions(expression_attribute_values, update_expression) + return update_expression unless deletions.present? + + delete_keys = deletions.keys + update_expression += " DELETE #{delete_keys.each_with_index.map { |k, i| "#{k} :_d#{i}" }.join(', ')}" + # values must be sets + delete_values = deletions.transform_values do |v| + if v.is_a?(Set) + v + else + Set.new(v.is_a?(Enumerable) ? v : [v]) + end + end + delete_keys.each_with_index { |k, i| expression_attribute_values[":_d#{i}"] = delete_values[k] } + update_expression + end + + # adds all of the removals as a REMOVE clause + def set_removals(expression_attribute_names, update_expression) + return expression_attribute_names, update_expression unless removals.present? + + update_expression += " REMOVE #{removals.each_with_index.map { |_k, i| "#_r#{i}" }.join(', ')}" + expression_attribute_names = expression_attribute_names.merge( + removals.each_with_index.map { |k, i| ["#_r#{i}", k.to_s] }.to_h + ) + [expression_attribute_names, update_expression] + end + end + end +end diff --git a/dynamoid/lib/dynamoid/transaction_write/upsert.rb b/dynamoid/lib/dynamoid/transaction_write/upsert.rb new file mode 100644 index 000000000..94243a6fc --- /dev/null +++ b/dynamoid/lib/dynamoid/transaction_write/upsert.rb @@ -0,0 +1,25 @@ +# frozen_string_literal: true + +require_relative 'action' + +module Dynamoid + class TransactionWrite + class Upsert < UpdateUpsert + # Upsert is just like Update buts skips the existence check so a new record can be created when it's missing. + # No callbacks. + def initialize(model_or_model_class, attributes, options = {}) + options = options.reverse_merge(skip_existence_check: true) + super(model_or_model_class, attributes, options) + raise Dynamoid::Errors::MissingHashKey unless hash_key.present? + raise Dynamoid::Errors::MissingRangeKey unless !model_class.range_key? || range_key.present? + end + + # no callbacks are run + + # only run validations if a model has been provided + def valid? + model ? model.valid? : true + end + end + end +end diff --git a/dynamoid/lib/dynamoid/type_casting.rb b/dynamoid/lib/dynamoid/type_casting.rb new file mode 100644 index 000000000..7217e2747 --- /dev/null +++ b/dynamoid/lib/dynamoid/type_casting.rb @@ -0,0 +1,302 @@ +# frozen_string_literal: true + +module Dynamoid + # @private + module TypeCasting + def self.cast_attributes(attributes, attributes_options) + {}.tap do |h| + attributes.symbolize_keys.each do |attribute, value| + h[attribute] = cast_field(value, attributes_options[attribute]) + end + end + end + + def self.cast_field(value, options) + return value if options.nil? + return nil if value.nil? + + type_caster = find_type_caster(options) + if type_caster.nil? + raise ArgumentError, "Unknown type #{options[:type]}" + end + + type_caster.process(value) + end + + def self.find_type_caster(options) + type_caster_class = case options[:type] + when :string then StringTypeCaster + when :integer then IntegerTypeCaster + when :number then NumberTypeCaster + when :set then SetTypeCaster + when :array then ArrayTypeCaster + when :map then MapTypeCaster + when :datetime then DateTimeTypeCaster + when :date then DateTypeCaster + when :raw then RawTypeCaster + when :serialized then SerializedTypeCaster + when :boolean then BooleanTypeCaster + when :binary then BinaryTypeCaster + when Class then CustomTypeCaster + end + + if type_caster_class.present? + type_caster_class.new(options) + end + end + + class Base + def initialize(options) + @options = options + end + + def process(value) + value + end + end + + class StringTypeCaster < Base + def process(value) + case value + when true + 't' + when false + 'f' + when String + value.dup + else + value.to_s + end + end + end + + class IntegerTypeCaster < Base + def process(value) + # rubocop:disable Lint/DuplicateBranch + if value == true + 1 + elsif value == false + 0 + elsif value.is_a?(String) && value.blank? + nil + elsif value.is_a?(Float) && !value.finite? + nil + elsif !value.respond_to?(:to_i) + nil + else + value.to_i + end + # rubocop:enable Lint/DuplicateBranch + end + end + + class NumberTypeCaster < Base + def process(value) + # rubocop:disable Lint/DuplicateBranch + if value == true + 1 + elsif value == false + 0 + elsif value.is_a?(Symbol) + value.to_s.to_d + elsif value.is_a?(String) && value.blank? + nil + elsif value.is_a?(Float) && !value.finite? + nil + elsif !value.respond_to?(:to_d) + nil + else + value.to_d + end + # rubocop:enable Lint/DuplicateBranch + end + end + + class SetTypeCaster < Base + def process(value) + set = type_cast_to_set(value) + + if set.present? && @options[:of].present? + process_typed_set(set) + else + set + end + end + + private + + def type_cast_to_set(value) + if value.is_a?(Set) + value.dup + elsif value.respond_to?(:to_set) + value.to_set + end + end + + def process_typed_set(set) + type_caster = TypeCasting.find_type_caster(element_options) + + if type_caster.nil? + raise ArgumentError, "Set element type #{element_type} isn't supported" + end + + set.to_set { |el| type_caster.process(el) } + end + + def element_type + if @options[:of].is_a?(Hash) + @options[:of].keys.first + else + @options[:of] + end + end + + def element_options + if @options[:of].is_a?(Hash) + @options[:of][element_type].dup.tap do |options| + options[:type] = element_type + end + else + { type: element_type } + end + end + end + + class ArrayTypeCaster < Base + def process(value) + array = type_cast_to_array(value) + + if array.present? && @options[:of].present? + process_typed_array(array) + else + array + end + end + + private + + def type_cast_to_array(value) + if value.is_a?(Array) + value.dup + elsif value.respond_to?(:to_a) + value.to_a + end + end + + def process_typed_array(array) + type_caster = TypeCasting.find_type_caster(element_options) + + if type_caster.nil? + raise ArgumentError, "Set element type #{element_type} isn't supported" + end + + array.map { |el| type_caster.process(el) } + end + + def element_type + if @options[:of].is_a?(Hash) + @options[:of].keys.first + else + @options[:of] + end + end + + def element_options + if @options[:of].is_a?(Hash) + @options[:of][element_type].dup.tap do |options| + options[:type] = element_type + end + else + { type: element_type } + end + end + end + + class MapTypeCaster < Base + def process(value) + return nil if value.nil? + + if value.is_a? Hash + value + elsif value.respond_to? :to_hash + value.to_hash + elsif value.respond_to? :to_h + value.to_h + end + end + end + + class DateTimeTypeCaster < Base + def process(value) + if !value.respond_to?(:to_datetime) + nil + elsif value.is_a?(String) + dt = begin + DateTime.parse(value) + rescue StandardError + nil + end + if dt + seconds = string_utc_offset(value) || ApplicationTimeZone.utc_offset + offset = seconds_to_offset(seconds) + DateTime.new(dt.year, dt.mon, dt.mday, dt.hour, dt.min, dt.sec, offset) + end + else + value.to_datetime + end + end + + private + + def string_utc_offset(string) + Date._parse(string)[:offset] + end + + # 3600 -> "+01:00" + def seconds_to_offset(seconds) + ActiveSupport::TimeZone.seconds_to_utc_offset(seconds) + end + end + + class DateTypeCaster < Base + def process(value) + if value.respond_to?(:to_date) + begin + value.to_date + rescue StandardError + nil + end + end + end + end + + class RawTypeCaster < Base + end + + class SerializedTypeCaster < Base + end + + class BooleanTypeCaster < Base + def process(value) + if value == '' + nil + else + ![false, 'false', 'FALSE', 0, '0', 'f', 'F', 'off', 'OFF'].include? value + end + end + end + + class BinaryTypeCaster < Base + def process(value) + if value.is_a? String + value.dup + else + value.to_s + end + end + end + + class CustomTypeCaster < Base + end + end +end diff --git a/dynamoid/lib/dynamoid/undumping.rb b/dynamoid/lib/dynamoid/undumping.rb new file mode 100644 index 000000000..76e7a114f --- /dev/null +++ b/dynamoid/lib/dynamoid/undumping.rb @@ -0,0 +1,303 @@ +# frozen_string_literal: true + +module Dynamoid + # @private + module Undumping + def self.undump_attributes(attributes, attributes_options) + {}.tap do |h| + # ignore existing attributes not declared in document class + attributes.symbolize_keys + .select { |attribute| attributes_options.key?(attribute) } + .each do |attribute, value| + h[attribute] = undump_field(value, attributes_options[attribute]) + end + end + end + + def self.undump_field(value, options) + return nil if value.nil? + + undumper = find_undumper(options) + + if undumper.nil? + raise ArgumentError, "Unknown type #{options[:type]}" + end + + undumper.process(value) + end + + def self.find_undumper(options) + undumper_class = case options[:type] + when :string then StringUndumper + when :integer then IntegerUndumper + when :number then NumberUndumper + when :set then SetUndumper + when :array then ArrayUndumper + when :map then MapUndumper + when :datetime then DateTimeUndumper + when :date then DateUndumper + when :raw then RawUndumper + when :serialized then SerializedUndumper + when :boolean then BooleanUndumper + when :binary then BinaryUndumper + when Class then CustomTypeUndumper + end + + if undumper_class.present? + undumper_class.new(options) + end + end + + module UndumpHashHelper + extend self + + def undump_hash(hash) + {}.tap do |h| + hash.each { |key, value| h[key.to_sym] = undump_hash_value(value) } + end + end + + private + + def undump_hash_value(val) + case val + when BigDecimal + if Dynamoid::Config.convert_big_decimal + val.to_f + else + val + end + when Hash + undump_hash(val) + when Array + val.map { |v| undump_hash_value(v) } + else + val + end + end + end + + class Base + def initialize(options) + @options = options + end + + def process(value) + value + end + end + + class StringUndumper < Base + end + + class IntegerUndumper < Base + def process(value) + value.to_i + end + end + + class NumberUndumper < Base + end + + class SetUndumper < Base + ALLOWED_TYPES = %i[string integer number date datetime serialized].freeze + + def process(set) + if @options.key?(:of) + process_typed_collection(set) + else + set.is_a?(Set) ? set : Set.new(set) + end + end + + private + + def process_typed_collection(set) + if allowed_type? + undumper = Undumping.find_undumper(element_options) + set.to_set { |el| undumper.process(el) } + else + raise ArgumentError, "Set element type #{element_type} isn't supported" + end + end + + def allowed_type? + ALLOWED_TYPES.include?(element_type) || element_type.is_a?(Class) + end + + def element_type + if @options[:of].is_a?(Hash) + @options[:of].keys.first + else + @options[:of] + end + end + + def element_options + if @options[:of].is_a?(Hash) + @options[:of][element_type].dup.tap do |options| + options[:type] = element_type + end + else + { type: element_type } + end + end + end + + class ArrayUndumper < Base + ALLOWED_TYPES = %i[string integer number date datetime serialized].freeze + + def process(array) + if @options.key?(:of) + process_typed_collection(array) + else + array.is_a?(Array) ? array : Array(array) + end + end + + private + + def process_typed_collection(array) + if allowed_type? + undumper = Undumping.find_undumper(element_options) + array.map { |el| undumper.process(el) } + else + raise ArgumentError, "Array element type #{element_type} isn't supported" + end + end + + def allowed_type? + ALLOWED_TYPES.include?(element_type) || element_type.is_a?(Class) + end + + def element_type + if @options[:of].is_a?(Hash) + @options[:of].keys.first + else + @options[:of] + end + end + + def element_options + if @options[:of].is_a?(Hash) + @options[:of][element_type].dup.tap do |options| + options[:type] = element_type + end + else + { type: element_type } + end + end + end + + class MapUndumper < Base + def process(value) + UndumpHashHelper.undump_hash(value) + end + end + + class DateTimeUndumper < Base + def process(value) + return value if value.is_a?(Date) || value.is_a?(DateTime) || value.is_a?(Time) + + use_string_format = if @options[:store_as_string].nil? + Dynamoid.config.store_datetime_as_string + else + @options[:store_as_string] + end + value = DateTime.iso8601(value).to_time.to_i if use_string_format + ApplicationTimeZone.at(value) + end + end + + class DateUndumper < Base + def process(value) + use_string_format = if @options[:store_as_string].nil? + Dynamoid.config.store_date_as_string + else + @options[:store_as_string] + end + + if use_string_format + Date.iso8601(value) + else + Dynamoid::Persistence::UNIX_EPOCH_DATE + value.to_i + end + end + end + + class RawUndumper < Base + def process(value) + if value.is_a?(Hash) + UndumpHashHelper.undump_hash(value) + else + value + end + end + end + + class SerializedUndumper < Base + # We must use YAML.safe_load in Ruby 3.1 to handle serialized Set class + minimum_ruby_version = ->(version) { Gem::Version.new(RUBY_VERSION) >= Gem::Version.new(version) } + # Once we drop support for Rubies older than 2.6 we can remove this conditional (with major version bump)! + # YAML_SAFE_LOAD = minimum_ruby_version.call("2.6") + # But we don't want to change behavior for Ruby <= 3.0 that has been using the gem, without a major version bump + YAML_SAFE_LOAD = minimum_ruby_version.call('3.1') + + def process(value) + if @options[:serializer] + @options[:serializer].load(value) + elsif YAML_SAFE_LOAD + # The classes listed in permitted classes are added to the default set of "safe loadable" classes. + # TrueClass + # FalseClass + # NilClass + # Integer + # Float + # String + # Array + # Hash + YAML.safe_load(value, permitted_classes: [Symbol, Set, Date, Time, DateTime]) + else + YAML.load(value) + end + end + end + + class BooleanUndumper < Base + STRING_VALUES = %w[t f].freeze + + def process(value) + store_as_boolean = if @options[:store_as_native_boolean].nil? + Dynamoid.config.store_boolean_as_native + else + @options[:store_as_native_boolean] + end + if store_as_boolean + !!value + elsif STRING_VALUES.include?(value) + value == 't' + else + raise ArgumentError, 'Boolean column neither true nor false' + end + end + end + + class BinaryUndumper < Base + def process(value) + Base64.strict_decode64(value) + end + end + + class CustomTypeUndumper < Base + def process(value) + field_class = @options[:type] + + unless field_class.respond_to?(:dynamoid_load) + raise ArgumentError, "#{field_class} does not support serialization for Dynamoid." + end + + field_class.dynamoid_load(value) + end + end + end +end diff --git a/dynamoid/lib/dynamoid/validations.rb b/dynamoid/lib/dynamoid/validations.rb new file mode 100644 index 000000000..1fe21cebe --- /dev/null +++ b/dynamoid/lib/dynamoid/validations.rb @@ -0,0 +1,74 @@ +# frozen_string_literal: true + +module Dynamoid + # Provide ActiveModel validations to Dynamoid documents. + module Validations + extend ActiveSupport::Concern + + include ActiveModel::Validations + include ActiveModel::Validations::Callbacks + + # Override save to provide validation support. + # + # @private + # @since 0.2.0 + def save(options = {}) + options.reverse_merge!(validate: true) + return false if options[:validate] && !valid? + + super + end + + # Is this object valid? + # + # @since 0.2.0 + def valid?(context = nil) + context ||= (new_record? ? :create : :update) + super(context) + end + + # Raise an error unless this object is valid. + # + # @private + # @since 0.2.0 + def save! + raise Dynamoid::Errors::DocumentNotValid, self unless valid? + + save(validate: false) + self + end + + def update_attribute(attribute, value) + write_attribute(attribute, value) + save(validate: false) + self + end + + module ClassMethods + # Override validates_presence_of to handle false values as present. + # + # @since 1.1.1 + def validates_presence_of(*attr_names) + validates_with PresenceValidator, _merge_attributes(attr_names) + end + + # Validates that the specified attributes are present (false or not blank). + class PresenceValidator < ActiveModel::EachValidator + # Validate the record for the record and value. + def validate_each(record, attr_name, value) + # Use keyword argument `options` because it was a Hash in Rails < 6.1 + # and became a keyword argument in 6.1. This way it works in both + # cases. + record.errors.add(attr_name, :blank, **options) if not_present?(value) + end + + private + + # Check whether a value is not present. + def not_present?(value) + value.blank? && value != false + end + end + end + end +end diff --git a/dynamoid/lib/dynamoid/version.rb b/dynamoid/lib/dynamoid/version.rb new file mode 100644 index 000000000..92a11547d --- /dev/null +++ b/dynamoid/lib/dynamoid/version.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +module Dynamoid + VERSION = '3.10.0' +end diff --git a/dynamoid/spec/app/models/address.rb b/dynamoid/spec/app/models/address.rb new file mode 100644 index 000000000..99d250cf3 --- /dev/null +++ b/dynamoid/spec/app/models/address.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +class Address + include Dynamoid::Document + + field :city + field :options, :serialized + field :deliverable, :boolean + field :latitude, :number + field :config, :raw + field :registered_on, :date + + field :lock_version, :integer # Provides Optimistic Locking + + def zip_code=(_zip_code) + self.city = 'Chicago' + end +end diff --git a/dynamoid/spec/app/models/bar.rb b/dynamoid/spec/app/models/bar.rb new file mode 100644 index 000000000..28d8ef412 --- /dev/null +++ b/dynamoid/spec/app/models/bar.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +class Bar + include Dynamoid::Document + + table name: :bar, + key: :bar_id, + read_capacity: 200, + write_capacity: 200 + + range :visited_at, :datetime + field :name + field :visited_at, :integer + + validates_presence_of :name, :visited_at + + global_secondary_index hash_key: :name, range_key: :visited_at +end diff --git a/dynamoid/spec/app/models/cadillac.rb b/dynamoid/spec/app/models/cadillac.rb new file mode 100644 index 000000000..3d23397b7 --- /dev/null +++ b/dynamoid/spec/app/models/cadillac.rb @@ -0,0 +1,6 @@ +# frozen_string_literal: true + +require_relative 'car' + +class Cadillac < Car +end diff --git a/dynamoid/spec/app/models/camel_case.rb b/dynamoid/spec/app/models/camel_case.rb new file mode 100644 index 000000000..d197e04ed --- /dev/null +++ b/dynamoid/spec/app/models/camel_case.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +class CamelCase + include Dynamoid::Document + + field :color + + belongs_to :magazine + has_many :users + has_one :sponsor + has_and_belongs_to_many :subscriptions + + before_create :doing_before_create + after_create :doing_after_create + before_update :doing_before_update + after_update :doing_after_update + + private + + def doing_before_create + true + end + + def doing_after_create + true + end + + def doing_before_update + true + end + + def doing_after_update + true + end +end diff --git a/dynamoid/spec/app/models/car.rb b/dynamoid/spec/app/models/car.rb new file mode 100644 index 000000000..1697bd1d1 --- /dev/null +++ b/dynamoid/spec/app/models/car.rb @@ -0,0 +1,7 @@ +# frozen_string_literal: true + +require_relative 'vehicle' + +class Car < Vehicle + field :power_locks, :boolean +end diff --git a/dynamoid/spec/app/models/magazine.rb b/dynamoid/spec/app/models/magazine.rb new file mode 100644 index 000000000..bc5349998 --- /dev/null +++ b/dynamoid/spec/app/models/magazine.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +class Magazine + include Dynamoid::Document + table key: :title + + field :title + field :size, :number + + has_many :subscriptions + has_many :camel_cases + has_one :sponsor + + belongs_to :owner, class_name: 'User', inverse_of: :books + + def publish(advertisements:, free_issue: false) + result = advertisements * (free_issue ? 2 : 1) + result = yield(result) if block_given? + result + end +end diff --git a/dynamoid/spec/app/models/message.rb b/dynamoid/spec/app/models/message.rb new file mode 100644 index 000000000..d50aa79c3 --- /dev/null +++ b/dynamoid/spec/app/models/message.rb @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +class Message + include Dynamoid::Document + + table name: :messages, key: :message_id, read_capacity: 200, write_capacity: 200 + + range :time, :datetime + + field :text +end diff --git a/dynamoid/spec/app/models/nuclear_submarine.rb b/dynamoid/spec/app/models/nuclear_submarine.rb new file mode 100644 index 000000000..4796eb720 --- /dev/null +++ b/dynamoid/spec/app/models/nuclear_submarine.rb @@ -0,0 +1,6 @@ +# frozen_string_literal: true + +require_relative 'vehicle' +class NuclearSubmarine < Vehicle + field :torpedoes, :integer +end diff --git a/dynamoid/spec/app/models/post.rb b/dynamoid/spec/app/models/post.rb new file mode 100644 index 000000000..8bf37b69e --- /dev/null +++ b/dynamoid/spec/app/models/post.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +class Post + include Dynamoid::Document + + table name: :posts, key: :post_id, read_capacity: 200, write_capacity: 200 + + range :posted_at, :datetime + + field :body + field :length + field :name + + local_secondary_index range_key: :name + global_secondary_index hash_key: :name, range_key: :posted_at + global_secondary_index hash_key: :length +end diff --git a/dynamoid/spec/app/models/sponsor.rb b/dynamoid/spec/app/models/sponsor.rb new file mode 100644 index 000000000..5dde1ee92 --- /dev/null +++ b/dynamoid/spec/app/models/sponsor.rb @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +class Sponsor + include Dynamoid::Document + + belongs_to :magazine + has_many :subscriptions + + belongs_to :camel_case +end diff --git a/dynamoid/spec/app/models/subscription.rb b/dynamoid/spec/app/models/subscription.rb new file mode 100644 index 000000000..fa2529d1b --- /dev/null +++ b/dynamoid/spec/app/models/subscription.rb @@ -0,0 +1,14 @@ +# frozen_string_literal: true + +class Subscription + include Dynamoid::Document + + field :length, :integer + + belongs_to :magazine + has_and_belongs_to_many :users + + belongs_to :customer, class_name: 'User', inverse_of: :monthly + + has_and_belongs_to_many :camel_cases +end diff --git a/dynamoid/spec/app/models/tweet.rb b/dynamoid/spec/app/models/tweet.rb new file mode 100644 index 000000000..e1e6e151d --- /dev/null +++ b/dynamoid/spec/app/models/tweet.rb @@ -0,0 +1,14 @@ +# frozen_string_literal: true + +class Tweet + include Dynamoid::Document + + table name: :twitters, key: :tweet_id, read_capacity: 200, write_capacity: 200 + + range :group, :string + + field :msg + field :count, :integer + field :tags, :set + field :user_name +end diff --git a/dynamoid/spec/app/models/user.rb b/dynamoid/spec/app/models/user.rb new file mode 100644 index 000000000..6f17a69bd --- /dev/null +++ b/dynamoid/spec/app/models/user.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +class User + include Dynamoid::Document + + field :name + field :email + field :password + field :admin, :boolean + field :last_logged_in_at, :datetime + + field :favorite_colors, :serialized + field :todo_list, :array + + has_and_belongs_to_many :subscriptions + + has_many :books, class_name: 'Magazine', inverse_of: :owner + has_one :monthly, class_name: 'Subscription', inverse_of: :customer + + has_and_belongs_to_many :followers, class_name: 'User', inverse_of: :following + has_and_belongs_to_many :following, class_name: 'User', inverse_of: :followers + + belongs_to :camel_case +end diff --git a/dynamoid/spec/app/models/vehicle.rb b/dynamoid/spec/app/models/vehicle.rb new file mode 100644 index 000000000..376657015 --- /dev/null +++ b/dynamoid/spec/app/models/vehicle.rb @@ -0,0 +1,9 @@ +# frozen_string_literal: true + +class Vehicle + include Dynamoid::Document + + field :type + + field :description +end diff --git a/dynamoid/spec/dynamoid/adapter_plugin/aws_sdk_v3/create_table_spec.rb b/dynamoid/spec/dynamoid/adapter_plugin/aws_sdk_v3/create_table_spec.rb new file mode 100644 index 000000000..ca253a6ca --- /dev/null +++ b/dynamoid/spec/dynamoid/adapter_plugin/aws_sdk_v3/create_table_spec.rb @@ -0,0 +1,220 @@ +# frozen_string_literal: true + +require 'dynamoid/adapter_plugin/aws_sdk_v3/create_table' +require 'spec_helper' + +describe Dynamoid::AdapterPlugin::AwsSdkV3::CreateTable do + let(:client) { double('client') } + let(:options) do + { + hash_key_type: :string, + billing_mode: :provisioned, + read_capacity: 50, + write_capacity: 10 + } + end + let(:response) { double('response', table_description: table_description) } + let(:table_description) { double('table_description', table_status: 'ACTIVE') } + + describe 'call' do + context 'table properties' do + it 'has the correct table name' do + expect(client).to receive(:create_table) + .with(hash_including(table_name: :dogs)) + .and_return(response) + + described_class.new(client, :dogs, :id, options).call + end + + it 'defines the billing mode' do + expect(client).to receive(:create_table) + .with(hash_including(billing_mode: 'PROVISIONED')) + .and_return(response) + + described_class.new(client, :dogs, :id, options).call + end + + it 'defines the read and write capacity' do + expect(client).to receive(:create_table) + .with(hash_including(provisioned_throughput: { read_capacity_units: 50, write_capacity_units: 10 })) + .and_return(response) + + described_class.new(client, :dogs, :id, options).call + end + + context 'on demand' do + let(:options) do + { billing_mode: :on_demand } + end + + it 'defines the billing mode as PAY_PER_REQUEST' do + expect(client).to receive(:create_table) + .with(hash_including(billing_mode: 'PAY_PER_REQUEST')) + .and_return(response) + + described_class.new(client, :dogs, :id, options).call + end + + it 'does not define read and write capacity' do + expect(client).to receive(:create_table) + .with(hash_excluding(:provisioned_throughput)) + .and_return(response) + + described_class.new(client, :dogs, :id, options).call + end + end + end + + context 'key schema' do + it 'defines a simple primary key' do + expect(client).to receive(:create_table) + .with(hash_including(key_schema: [hash_including(attribute_name: 'id', key_type: 'HASH')])) + + described_class.new(client, :dogs, :id, options).call + end + + it 'defines the primary key attribute' do + expect(client).to receive(:create_table) + .with(hash_including(attribute_definitions: [hash_including(attribute_name: 'id', attribute_type: 'S')])) + .and_return(response) + + described_class.new(client, :dogs, :id, options).call + end + + it 'defines a composite primary key' do + expect(client).to receive(:create_table) + .with( + hash_including( + key_schema: [ + hash_including(attribute_name: 'id', key_type: 'HASH'), + hash_including(attribute_name: 'name', key_type: 'RANGE') + ] + ) + ).and_return(response) + + described_class.new(client, :dogs, :id, range_key: { name: :string }).call + end + + it 'defines the composite key attributes' do + expect(client).to receive(:create_table) + .with( + hash_including( + attribute_definitions: [ + hash_including(attribute_name: 'id', attribute_type: 'S'), + hash_including(attribute_name: 'name', attribute_type: 'S') + ] + ) + ).and_return(response) + + described_class.new(client, :dogs, :id, hash_key_type: :string, range_key: { name: :string }).call + end + end + + context 'local secondary index' do + let(:options) do + super().merge(local_secondary_indexes: [index]) + end + + let(:index) do + double('index', name: 'local', projection_type: :all, type: :local_secondary, + hash_key_schema: hash_key_schema, range_key_schema: range_key_schema) + end + + let(:hash_key_schema) do + { type: :string } + end + + let(:range_key_schema) do + { id: :string } + end + + it 'defines the index' do + expect(client).to receive(:create_table) + .with( + hash_including( + local_secondary_indexes: [ + hash_including( + index_name: 'local', key_schema: [ + hash_including(attribute_name: 'type', key_type: 'HASH'), + hash_including(attribute_name: 'id', key_type: 'RANGE') + ] + ) + ] + ) + ).and_return(response) + + described_class.new(client, :dogs, :id, options).call + end + end + + context 'global secondary index' do + let(:options) do + super().merge(global_secondary_indexes: [index]) + end + + let(:index) do + double('index', name: 'global', projection_type: :all, type: :global_secondary, + hash_key_schema: hash_key_schema, range_key_schema: range_key_schema, + read_capacity: 20, write_capacity: 5) + end + + let(:hash_key_schema) do + { type: :string } + end + + let(:range_key_schema) do + { id: :string } + end + + it 'defines the index' do + expect(client).to receive(:create_table) + .with( + hash_including( + global_secondary_indexes: [ + hash_including( + index_name: 'global', key_schema: [ + hash_including(attribute_name: 'type', key_type: 'HASH'), + hash_including(attribute_name: 'id', key_type: 'RANGE') + ] + ) + ] + ) + ).and_return(response) + + described_class.new(client, :dogs, :id, options).call + end + + it 'defines the provisioned capacity' do + expect(client).to receive(:create_table) + .with( + hash_including( + global_secondary_indexes: [ + hash_including(provisioned_throughput: { read_capacity_units: 20, write_capacity_units: 5 }) + ] + ) + ).and_return(response) + + described_class.new(client, :dogs, :id, options).call + end + + context 'on demand' do + let(:options) do + super().merge(billing_mode: :on_demand) + end + + it 'does not define a capacity' do + expect(client).to receive(:create_table) + .with( + hash_including( + global_secondary_indexes: [ + hash_excluding(provisioned_throughput: { read_capacity_units: 20, write_capacity_units: 5 }) + ] + ) + ).and_return(response) + + described_class.new(client, :dogs, :id, options).call + end + end + end + end +end diff --git a/dynamoid/spec/dynamoid/adapter_plugin/aws_sdk_v3/until_past_table_status_spec.rb b/dynamoid/spec/dynamoid/adapter_plugin/aws_sdk_v3/until_past_table_status_spec.rb new file mode 100644 index 000000000..7c0c4dce9 --- /dev/null +++ b/dynamoid/spec/dynamoid/adapter_plugin/aws_sdk_v3/until_past_table_status_spec.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +require 'spec_helper' +require 'dynamoid/adapter_plugin/aws_sdk_v3' + +describe Dynamoid::AdapterPlugin::AwsSdkV3::UntilPastTableStatus do + describe 'call' do + context 'table creation' do + let(:client) { double('client') } + let(:response_creating) { double('response#creating', table: creating_table) } + let(:response_active) { double('response#active', table: active_table) } + let(:creating_table) { double('creating_table', table_status: 'CREATING') } + let(:active_table) { double('creating_table', table_status: 'ACTIVE') } + + it 'wait until table is created', config: { sync_retry_max_times: 60 } do + expect(client).to receive(:describe_table) + .with(table_name: :dogs).exactly(3).times + .and_return(response_creating, response_creating, response_active) + + described_class.new(client, :dogs, :creating).call + end + + it 'stops if exceeded Dynamoid.config.sync_retry_max_times attempts limit', + config: { sync_retry_max_times: 5 } do + expect(client).to receive(:describe_table) + .exactly(6).times + .and_return(*[response_creating] * 6) + + described_class.new(client, :dogs, :creating).call + end + + it 'uses :sync_retry_max_times seconds to delay attempts', + config: { sync_retry_wait_seconds: 2, sync_retry_max_times: 3 } do + service = described_class.new(client, :dogs, :creating) + allow(client).to receive(:describe_table).and_return(response_creating).exactly(4).times + expect(service).to receive(:sleep).with(2).exactly(4).times + + service.call + end + end + end +end diff --git a/dynamoid/spec/dynamoid/adapter_plugin/aws_sdk_v3_spec.rb b/dynamoid/spec/dynamoid/adapter_plugin/aws_sdk_v3_spec.rb new file mode 100644 index 000000000..cc7b7bfd3 --- /dev/null +++ b/dynamoid/spec/dynamoid/adapter_plugin/aws_sdk_v3_spec.rb @@ -0,0 +1,1390 @@ +# frozen_string_literal: true + +require 'dynamoid/adapter_plugin/aws_sdk_v3' +require 'spec_helper' + +describe Dynamoid::AdapterPlugin::AwsSdkV3 do + # + # These let() definitions create tables "dynamoid_tests_TestTable" and return the + # name of the table. + # + # Name => Constructor args + { + 1 => [:id], + 2 => [:id], + 3 => [:id, { range_key: { range: :number } }], + 4 => [:id, { range_key: { range: :number } }], + 5 => [:id, { read_capacity: 10_000, write_capacity: 1000 }] + }.each do |n, args| + name = "dynamoid_tests_TestTable#{n}" + let(:"test_table#{n}") do + Dynamoid.adapter.create_table(name, *args) + name + end + end + + # + # Test limit controls in querys and scans + # + # Since query and scans have different interface, then including this shared example + # requires some inputs. The internal aspects will configure request parameters and + # the Dynamoid adapter call correctly. + # + # @param [Symbol] request_type the name of the request, either :query or :scan + # + shared_examples 'correctly handling limits' do |request_type| + before do + @request_type = request_type + end + + def query_key_conditions + { id: [[:eq, '1']] } + end + + def dynamo_request(table_name, conditions = {}, options = {}) + if @request_type == :query + Dynamoid.adapter.query(table_name, query_key_conditions, conditions, options).flat_map { |i| i } + else + Dynamoid.adapter.scan(table_name, conditions, options).flat_map { |i| i } + end + end + + context 'multiple name entities' do + before do + (1..4).each do |i| + Dynamoid.adapter.put_item(test_table3, id: '1', name: 'Josh', range: i.to_f) + Dynamoid.adapter.put_item(test_table3, id: '1', name: 'Pascal', range: (i + 4).to_f) + end + end + + it 'returns correct records' do + expect(dynamo_request(test_table3).count).to eq(8) + end + + it 'returns correct record limit' do + expect(dynamo_request(test_table3, {}, { record_limit: 1 }).count).to eq(1) + expect(dynamo_request(test_table3, {}, { record_limit: 3 }).count).to eq(3) + end + + it 'returns correct batch' do + # Receives 8 times for each item and 1 more for empty page + expect(Dynamoid.adapter.client).to receive(request_type).exactly(9).times.and_call_original + expect(dynamo_request(test_table3, {}, { batch_size: 1 }).count).to eq(8) + end + + it 'returns correct batch and paginates in batches' do + expect(Dynamoid.adapter.client).to receive(request_type).exactly(3).times.and_call_original + expect(dynamo_request(test_table3, {}, { batch_size: 3 }).count).to eq(8) + end + + it 'returns correct record limit and batch' do + expect(dynamo_request(test_table3, {}, { record_limit: 1, batch_size: 1 }).count).to eq(1) + end + + it 'returns correct record limit with filter' do + expect( + dynamo_request(test_table3, { name: [[:eq, 'Josh']] }, { record_limit: 1 }).count + ).to eq(1) + end + + it 'obeys correct scan limit with filter' do + expect(Dynamoid.adapter.client).to receive(request_type).once.and_call_original + expect( + dynamo_request(test_table3, { name: [[:eq, 'Josh']] }, { scan_limit: 2 }).count + ).to eq(2) + end + + it 'obeys correct scan limit over record limit with filter' do + expect(Dynamoid.adapter.client).to receive(request_type).once.and_call_original + expect( + dynamo_request( + test_table3, + { name: [[:eq, 'Josh']] }, + { + scan_limit: 2, + record_limit: 10 # Won't be able to return more than 2 due to scan limit + } + ).count + ).to eq(2) + end + + it 'obeys correct scan limit with filter with some return' do + expect(Dynamoid.adapter.client).to receive(request_type).once.and_call_original + expect( + dynamo_request(test_table3, { name: [[:eq, 'Pascal']] }, { scan_limit: 5 }).count + ).to eq(1) + end + + it 'obeys correct scan limit and batch size with filter with some return' do + expect(Dynamoid.adapter.client).to receive(request_type).twice.and_call_original + expect( + dynamo_request( + test_table3, + { name: [[:eq, 'Josh']] }, + { + scan_limit: 3, + batch_size: 2 # This would force batching of size 2 for potential of 4 results! + } + ).count + ).to eq(3) + end + + it 'obeys correct scan limit with filter and batching for some return' do + expect(Dynamoid.adapter.client).to receive(request_type).exactly(5).times.and_call_original + # We should paginate through 5 responses each of size 1 (batch) and + # only scan through 5 records at most which with our given filter + # should return 1 result since first 4 are Josh and last is Pascal. + expect( + dynamo_request( + test_table3, + { name: [[:eq, 'Pascal']] }, + { + batch_size: 1, + scan_limit: 5, + record_limit: 3 + } + ).count + ).to eq(1) + end + + it 'obeys correct record limit with filter, batching, and scan limit' do + expect(Dynamoid.adapter.client).to receive(request_type).exactly(6).times.and_call_original + # We should paginate through 6 responses each of size 1 (batch) and + # only scan through 6 records at most which with our given filter + # should return 2 results, and hit record limit before scan limit. + expect( + dynamo_request( + test_table3, + { name: [[:eq, 'Pascal']] }, + { + batch_size: 1, + scan_limit: 10, + record_limit: 2 + } + ).count + ).to eq(2) + end + end + + # + # Tests that even with large records we are paginating to pull more data + # even if we hit response data size limits + # + context 'large records still returns as much data' do + before do + # 64 of these items will exceed the 1MB result record_limit thus query won't return all results on first loop + # We use :age since :range won't work for filtering in queries + 200.times do |i| + Dynamoid.adapter.put_item( + test_table3, + id: '1', + range: i.to_f, + age: i.to_f, + data: 'A' * 1024 * 16 + ) + end + end + + it 'returns correct for limits and scan limit' do + expect(dynamo_request(test_table3, {}, { scan_limit: 100 }).count).to eq(100) + end + + it 'returns correct for scan limit with filtering' do + # Not sure why there is difference but :query will do 1 page and see 100 records and filter out 10 + # while :scan will do 2 pages and see 64 records on first page similar to the 1MB return limit + # and then look at 36 records and find 10 on the second page. + pages = request_type == :query ? 1 : 2 + expect(Dynamoid.adapter.client).to receive(request_type).exactly(pages).times.and_call_original + expect( + dynamo_request(test_table3, { age: [[:gte, 90.0]] }, { scan_limit: 100 }).count + ).to eq(10) + end + + it 'returns correct for record limit' do + expect(Dynamoid.adapter.client).to receive(request_type).twice.and_call_original + expect( + dynamo_request(test_table3, { age: [[:gte, 5.0]] }, { record_limit: 100 }).count + ).to eq(100) + end + + it 'returns correct record limit with filtering' do + expect( + dynamo_request(test_table3, { age: [[:gte, 133.0]] }, { record_limit: 100 }).count + ).to eq(67) + end + + it 'returns correct with batching' do + # Since we hit the data size limit 3 times, so we must make 4 requests + # which is limitation of DynamoDB and therefore batch limit is + # restricted by this limitation as well! + expect(Dynamoid.adapter.client).to receive(request_type).exactly(4).times.and_call_original + expect(dynamo_request(test_table3, {}, { batch_size: 100 }).count).to eq(200) + end + + it 'returns correct with batching and record limit beyond data size limit' do + # Since we hit limit once, we need to make sure the second request only + # requests for as many as we have left for our record limit. + expect(Dynamoid.adapter.client).to receive(request_type).twice.and_call_original + expect( + dynamo_request(test_table3, {}, { record_limit: 83, batch_size: 100 }).count + ).to eq(83) + end + + it 'returns correct with batching and record limit' do + expect(Dynamoid.adapter.client).to receive(request_type).exactly(11).times.and_call_original + # Since we do age >= 5.0 we lose the first 5 results so we make 11 paginated requests + expect( + dynamo_request( + test_table3, + { age: [[:gte, 5.0]] }, + { + record_limit: 100, + batch_size: 10 + } + ).count + ).to eq(100) + end + end + + it 'correctly limits edge case of record and scan counts approaching limits' do + (1..4).each do |i| + Dynamoid.adapter.put_item(test_table3, id: '1', name: 'Josh', range: i.to_f) + end + Dynamoid.adapter.put_item(test_table3, id: '1', name: 'Pascal', range: 5.0) + (6..10).each do |i| + Dynamoid.adapter.put_item(test_table3, id: '1', name: 'Josh', range: i.to_f) + end + + expect(Dynamoid.adapter.client).to receive(request_type).twice.and_call_original + # In faulty code, the record limit would adjust limit to 2 thus on second page + # we would get the 5th Josh (range value 6.0) whereas correct implementation would + # adjust limit to 1 since can only scan 1 more record therefore would see Pascal + # and not go to next valid record. + expect( + dynamo_request( + test_table3, + { name: [[:eq, 'Josh']] }, + { + batch_size: 4, + scan_limit: 5, # Scan limit would adjust requested limit to 1 + record_limit: 6 # Record limit would adjust requested limit to 2 + } + ).count + ).to eq(4) + end + end + + # + # Tests adapter against ranged tables + # + shared_examples 'range queries' do + before do + Dynamoid.adapter.put_item(test_table3, id: '1', range: 1.0) + Dynamoid.adapter.put_item(test_table3, id: '1', range: 3.0) + end + + it 'performs query on a table with a range and selects items in a range' do + expect(Dynamoid.adapter.query(test_table3, { id: [[:eq, '1']], range: [[:between, [0.0, 3.0]]] }).to_a).to eq [[[{ id: '1', range: BigDecimal('1') }, { id: '1', range: BigDecimal('3') }], { last_evaluated_key: nil }]] + end + + it 'performs query on a table with a range and selects items in a range with :select option' do + expect(Dynamoid.adapter.query(test_table3, { id: [[:eq, '1']], range: [[:between, [0.0, 3.0]]] }, {}, { select: 'ALL_ATTRIBUTES' }).to_a).to eq [[[{ id: '1', range: BigDecimal('1') }, { id: '1', range: BigDecimal('3') }], { last_evaluated_key: nil }]] + end + + it 'performs query on a table with a range and selects items greater than' do + expect(Dynamoid.adapter.query(test_table3, { id: [[:eq, '1']], range: [[:gt, 1.0]] }).to_a).to eq [[[{ id: '1', range: BigDecimal('3') }], { last_evaluated_key: nil }]] + end + + it 'performs query on a table with a range and selects items less than' do + expect(Dynamoid.adapter.query(test_table3, { id: [[:eq, '1']], range: [[:lt, 2.0]] }).to_a).to eq [[[{ id: '1', range: BigDecimal('1') }], { last_evaluated_key: nil }]] + end + + it 'performs query on a table with a range and selects items gte' do + expect(Dynamoid.adapter.query(test_table3, { id: [[:eq, '1']], range: [[:gte, 1.0]] }).to_a).to eq [[[{ id: '1', range: BigDecimal('1') }, { id: '1', range: BigDecimal('3') }], { last_evaluated_key: nil }]] + end + + it 'performs query on a table with a range and selects items lte' do + expect(Dynamoid.adapter.query(test_table3, { id: [[:eq, '1']], range: [[:lte, 3.0]] }).to_a).to eq [[[{ id: '1', range: BigDecimal('1') }, { id: '1', range: BigDecimal('3') }], { last_evaluated_key: nil }]] + end + + it 'performs query on a table and returns items based on returns correct limit' do + expect(Dynamoid.adapter.query(test_table3, { id: [[:eq, '1']], range: [[:gt, 0.0]] }, {}, { record_limit: 1 }).flat_map { |i| i }.count).to eq(1) + end + + it 'performs query on a table with a range and selects all items' do + 200.times { |i| Dynamoid.adapter.put_item(test_table3, id: '1', range: i.to_f, data: 'A' * 1024 * 16) } + # 64 of these items will exceed the 1MB result limit thus query won't return all results on first loop + expect(Dynamoid.adapter.query(test_table3, { id: [[:eq, '1']], range: [[:gte, 0.0]] }).flat_map { |i| i }.count).to eq(200) + end + end + + # + # Tests scan_index_forwards flag behavior on range queries + # + shared_examples 'correct ordering' do + before do + Dynamoid.adapter.put_item(test_table4, id: '1', order: 1, range: 1.0) + Dynamoid.adapter.put_item(test_table4, id: '1', order: 2, range: 2.0) + Dynamoid.adapter.put_item(test_table4, id: '1', order: 3, range: 3.0) + Dynamoid.adapter.put_item(test_table4, id: '1', order: 4, range: 4.0) + Dynamoid.adapter.put_item(test_table4, id: '1', order: 5, range: 5.0) + Dynamoid.adapter.put_item(test_table4, id: '1', order: 6, range: 6.0) + end + + it 'performs query on a table with a range and selects items less than that is in the correct order, scan_index_forward true' do + query = Dynamoid.adapter.query(test_table4, { id: [[:eq, '1']], range: [[:gt, 0]] }, {}, { scan_index_forward: true }).flat_map { |i| i }.to_a + expect(query[0]).to eq(id: '1', order: 1, range: BigDecimal('1')) + expect(query[1]).to eq(id: '1', order: 2, range: BigDecimal('2')) + expect(query[2]).to eq(id: '1', order: 3, range: BigDecimal('3')) + expect(query[3]).to eq(id: '1', order: 4, range: BigDecimal('4')) + expect(query[4]).to eq(id: '1', order: 5, range: BigDecimal('5')) + expect(query[5]).to eq(id: '1', order: 6, range: BigDecimal('6')) + end + + it 'performs query on a table with a range and selects items less than that is in the correct order, scan_index_forward false' do + query = Dynamoid.adapter.query(test_table4, { id: [[:eq, '1']], range: [[:gt, 0]] }, {}, { scan_index_forward: false }).flat_map { |i| i }.to_a + expect(query[5]).to eq(id: '1', order: 1, range: BigDecimal('1')) + expect(query[4]).to eq(id: '1', order: 2, range: BigDecimal('2')) + expect(query[3]).to eq(id: '1', order: 3, range: BigDecimal('3')) + expect(query[2]).to eq(id: '1', order: 4, range: BigDecimal('4')) + expect(query[1]).to eq(id: '1', order: 5, range: BigDecimal('5')) + expect(query[0]).to eq(id: '1', order: 6, range: BigDecimal('6')) + end + end + + describe '#batch_get_item' do + let(:table) { "#{Dynamoid::Config.namespace}_table" } + let(:table_another) { "#{Dynamoid::Config.namespace}_table_another" } + let(:table_with_composite_key) { "#{Dynamoid::Config.namespace}_table_with_composite_key" } + + before do + Dynamoid.adapter.create_table(table, :id) + Dynamoid.adapter.create_table(table_another, :id) + Dynamoid.adapter.create_table(table_with_composite_key, :id, range_key: { age: :number }) + end + + after do + Dynamoid.adapter.delete_table(table) + Dynamoid.adapter.delete_table(table_another) + Dynamoid.adapter.delete_table(table_with_composite_key) + end + + it 'passes options to underlying BatchGet call' do + pending 'at the moment passing the options to underlying batch get is not supported' + + expect_any_instance_of(Aws::DynamoDB::Client).to receive(:batch_get_item).with(request_items: { test_table1 => { keys: [{ 'id' => '1' }, { 'id' => '2' }], consistent_read: true } }).and_call_original + described_class.batch_get_item({ test_table1 => %w[1 2] }, consistent_read: true) + end + + it 'loads multiple items at once' do + Dynamoid.adapter.put_item(table, id: '1', name: 'Josh') + Dynamoid.adapter.put_item(table, id: '2', name: 'Justin') + + results = Dynamoid.adapter.batch_get_item(table => %w[1 2]) + expect(results).to eq( + { + table => [ + { id: '1', name: 'Josh' }, + { id: '2', name: 'Justin' }, + ] + } + ) + end + + it 'loads items from multiple tables' do + Dynamoid.adapter.put_item(table, id: '1', name: 'Josh') + Dynamoid.adapter.put_item(table_another, id: '2', name: 'Justin') + + results = Dynamoid.adapter.batch_get_item(table => ['1'], table_another => ['2']) + expect(results).to eq( + { + table => [ + { id: '1', name: 'Josh' } + ], + table_another => [ + { id: '2', name: 'Justin' } + ] + } + ) + end + + it 'performs BatchGetItem API call' do + expect(Dynamoid.adapter.client).to receive(:batch_get_item).and_call_original + Dynamoid.adapter.batch_get_item(table => ['1']) + end + + it 'accepts [] as an ids list' do + results = Dynamoid.adapter.batch_get_item(table => []) + expect(results).to eq(table => []) + end + + it 'accepts {} as table_names_with_ids argument' do + results = Dynamoid.adapter.batch_get_item({}) + expect(results).to eq({}) + end + + it 'accepts table name as String and as Symbol' do + Dynamoid.adapter.put_item(table, id: '1', name: 'Josh') + + results = Dynamoid.adapter.batch_get_item(table.to_s => ['1']) + expect(results).to eq(table => [{ id: '1', name: 'Josh' }]) + + results = Dynamoid.adapter.batch_get_item(table.to_sym => ['1']) + expect(results).to eq(table => [{ id: '1', name: 'Josh' }]) + end + + context 'when simple key' do + it 'accepts one id passed as singular value' do + Dynamoid.adapter.put_item(table, id: '1', name: 'Josh') + + results = Dynamoid.adapter.batch_get_item(table => '1') + expect(results).to eq(table => [{ id: '1', name: 'Josh' }]) + end + + it 'accepts one id passed as array' do + Dynamoid.adapter.put_item(table, id: '1', name: 'Josh') + + results = Dynamoid.adapter.batch_get_item(table => ['1']) + expect(results).to eq(table => [{ id: '1', name: 'Josh' }]) + end + + it 'accepts multiple ids' do + Dynamoid.adapter.put_item(table, id: '1', name: 'Josh') + Dynamoid.adapter.put_item(table, id: '2', name: 'Justin') + + results = Dynamoid.adapter.batch_get_item(table => %w[1 2]) + expect(results).to eq( + { + table => [ + { id: '1', name: 'Josh' }, + { id: '2', name: 'Justin' }, + ] + } + ) + end + end + + context 'when composite primary key' do + it 'accepts one id passed as singular value' do + skip 'It is not supported and needed yet' + + Dynamoid.adapter.put_item(table_with_composite_key, id: '1', age: 29, name: 'Josh') + + results = Dynamoid.adapter.batch_get_item(table_with_composite_key => ['1', 29]) + expect(results).to eq(table_with_composite_key => [{ id: '1', age: 29, name: 'Josh' }]) + end + + it 'accepts one id passed as array' do + Dynamoid.adapter.put_item(table_with_composite_key, id: '1', age: 29, name: 'Josh') + + results = Dynamoid.adapter.batch_get_item(table_with_composite_key => [['1', 29]]) + expect(results).to eq(table_with_composite_key => [{ id: '1', age: 29, name: 'Josh' }]) + end + + it 'accepts multiple ids' do + Dynamoid.adapter.put_item(table_with_composite_key, id: '1', age: 29, name: 'Josh') + Dynamoid.adapter.put_item(table_with_composite_key, id: '2', age: 16, name: 'Justin') + + results = Dynamoid.adapter.batch_get_item(table_with_composite_key => [['1', 29], ['2', 16]]) + + expect(results).to match( + { + table_with_composite_key => contain_exactly( + { id: '1', age: BigDecimal('29'), name: 'Josh' }, + { id: '2', age: BigDecimal('16'), name: 'Justin' }, + ) + } + ) + end + end + + it 'can load any number of items (even more than 100)' do + ids = (1..101).map(&:to_s) + + ids.each do |id| + Dynamoid.adapter.put_item(table, id: id) + end + + results = Dynamoid.adapter.batch_get_item(table => ids) + items = results[table] + + expect(items.size).to eq 101 + end + + it 'loads unprocessed items for a table without a range key' do + # BatchGetItem has following limitations: + # * up to 100 items at once + # * up to 16 MB at once + # * one item size up to 400 KB (common limitation) + # + # To reach limits we will write as large data as possible + # and then read it back + # + # 100 * 400 KB = ~40 MB + # 40 MB / 16 MB ~ 3 + # So we expect BatchGetItem to be called 3 times + # + # '9' is an experimentally founded value + # it includes lenght('id' + 'text') + some not documented overhead (1-100 bytes) + + ids = (1..100).map(&:to_s) + + ids.each do |id| + text = '#' * (400.kilobytes - 9) + Dynamoid.adapter.put_item(table, id: id, text: text) + end + + expect(Dynamoid.adapter.client).to receive(:batch_get_item) + .exactly(3) + .times.and_call_original + + results = Dynamoid.adapter.batch_get_item(table => ids) + items = results[table] + + expect(items.size).to eq 100 + expect(items.map { |h| h[:id] }).to match_array(ids) + end + + it 'loads unprocessed items for a table with a range key' do + # BatchGetItem has following limitations: + # * up to 100 items at once + # * up to 16 MB at once + # * one item size up to 400 KB (common limitation) + # + # To reach limits we will write as large data as possible + # and then read it back + # + # 100 * 400 KB = ~40 MB + # 40 MB / 16 MB ~ 3 + # So we expect BatchGetItem to be called 3 times + # + # '15' is an experimentally found value + # it includes the size of ('id' + 'age') + some not documented overhead + + ids = (1..100).map { |id| [id.to_s, id] } + + ids.each do |id, age| + text = '#' * (400.kilobytes - 15) + Dynamoid.adapter.put_item(table_with_composite_key, id: id, age: age, name: text) + end + + expect(Dynamoid.adapter.client).to receive(:batch_get_item) + .exactly(3) + .times.and_call_original + + results = Dynamoid.adapter.batch_get_item(table_with_composite_key => ids) + items = results[table_with_composite_key] + + expect(items.size).to eq(100) + expect(items.map { |h| [h[:id], h[:age]] }).to match_array(ids) + end + + context 'when called with block' do + it 'returns nil' do + Dynamoid.adapter.put_item(table, id: '1') + results = Dynamoid.adapter.batch_get_item(table => '1') { |batch, _| batch } + + expect(results).to be_nil + end + + it 'calles block for each loaded items batch' do + ids = (1..110).map(&:to_s) + + ids.each do |id| + Dynamoid.adapter.put_item(table, id: id) + end + + batches = [] + Dynamoid.adapter.batch_get_item(table => ids) do |batch| + batches << batch + end + + # expect only 2 batches: 1-100 and 101-110 + expect(batches.size).to eq 2 + batch1, batch2 = batches + + expect(batch1.keys).to eq [table] + expect(batch1[table].size).to eq 100 + + expect(batch2.keys).to eq [table] + expect(batch2[table].size).to eq 10 + + actual_ids = (batch1[table] + batch2[table]).map { |h| h[:id] } + expect(actual_ids).to match_array(ids) + end + + it 'passes as block arguments flag if there are unprocessed items for each batch' do + # It should be enough to exceed limit of 16 MB per call + # 50 * 400KB = ~20 MB + # 9 bytes = length('id' + 'text') + some not documented overhead (1-100 bytes) + + ids = (1..50).map(&:to_s) + + ids.each do |id| + text = '#' * (400.kilobytes - 9) + Dynamoid.adapter.put_item(table, id: id, text: text) + end + + complete_statuses = [] + Dynamoid.adapter.batch_get_item(table => ids) do |_, not_completed| + complete_statuses << not_completed + end + + expect(complete_statuses).to eq [true, false] + end + end + end + + context 'without a preexisting table' do + # CreateTable and DeleteTable + it 'performs CreateTable and DeleteTable' do + table = Dynamoid.adapter.create_table('CreateTable', :id, range_key: { created_at: :number }) + + expect(Dynamoid.adapter.list_tables).to include 'CreateTable' + + Dynamoid.adapter.delete_table('CreateTable') + end + + it 'creates table synchronously' do + table = Dynamoid.adapter.create_table('snakes', :id, sync: true) + + expect(Dynamoid.adapter.list_tables).to include 'snakes' + + Dynamoid.adapter.delete_table('snakes') + end + + it 'deletes table synchronously' do + table = Dynamoid.adapter.create_table('snakes', :id, sync: true) + expect(Dynamoid.adapter.list_tables).to include 'snakes' + + Dynamoid.adapter.delete_table('snakes', sync: true) + expect(Dynamoid.adapter.list_tables).not_to include 'snakes' + end + + describe 'create table with secondary index' do + let(:doc_class) do + Class.new do + include Dynamoid::Document + range :range, :number + field :range2 + field :hash2 + end + end + + it 'creates table with local_secondary_index' do + # setup + doc_class.table(name: 'table_lsi', key: :id) + doc_class.local_secondary_index( + range_key: :range2 + ) + + Dynamoid.adapter.create_table( + 'table_lsi', + :id, + local_secondary_indexes: doc_class.local_secondary_indexes.values, + range_key: { range: :number } + ) + + # execute + resp = Dynamoid.adapter.client.describe_table(table_name: 'table_lsi') + data = resp.data + lsi = data.table.local_secondary_indexes.first + + # test + expect(Dynamoid::AdapterPlugin::AwsSdkV3::PARSE_TABLE_STATUS.call(resp)).to eq(Dynamoid::AdapterPlugin::AwsSdkV3::TABLE_STATUSES[:active]) + expect(lsi.index_name).to eql 'dynamoid_tests_table_lsi_index_id_range2' + expect(lsi.key_schema.map(&:to_hash)).to eql [ + { attribute_name: 'id', key_type: 'HASH' }, + { attribute_name: 'range2', key_type: 'RANGE' } + ] + expect(lsi.projection.to_hash).to eql(projection_type: 'KEYS_ONLY') + end + + it 'creates table with global_secondary_index' do + # setup + doc_class.table(name: 'table_gsi', key: :id) + doc_class.global_secondary_index( + hash_key: :hash2, + range_key: :range2, + write_capacity: 10, + read_capacity: 20 + ) + Dynamoid.adapter.create_table( + 'table_gsi', + :id, + global_secondary_indexes: doc_class.global_secondary_indexes.values, + range_key: { range: :number } + ) + + # execute + resp = Dynamoid.adapter.client.describe_table(table_name: 'table_gsi') + data = resp.data + gsi = data.table.global_secondary_indexes.first + + # test + expect(Dynamoid::AdapterPlugin::AwsSdkV3::PARSE_TABLE_STATUS.call(resp)).to eq(Dynamoid::AdapterPlugin::AwsSdkV3::TABLE_STATUSES[:active]) + expect(gsi.index_name).to eql 'dynamoid_tests_table_gsi_index_hash2_range2' + expect(gsi.key_schema.map(&:to_hash)).to eql [ + { attribute_name: 'hash2', key_type: 'HASH' }, + { attribute_name: 'range2', key_type: 'RANGE' } + ] + expect(gsi.projection.to_hash).to eql(projection_type: 'KEYS_ONLY') + expect(gsi.provisioned_throughput.write_capacity_units).to eql 10 + expect(gsi.provisioned_throughput.read_capacity_units).to eql 20 + end + end + end + + context 'with a preexisting table' do + # GetItem, PutItem and DeleteItem + it 'passes options to underlying GetItem call' do + expect(Dynamoid.adapter.client).to receive(:get_item).with(hash_including(consistent_read: true)).and_call_original + expect(Dynamoid.adapter.get_item(test_table1, '1', consistent_read: true)).to be_nil + end + + it 'performs GetItem for an item that does not exist' do + expect(Dynamoid.adapter.get_item(test_table1, '1')).to be_nil + end + + it 'performs GetItem for an item that does exist' do + Dynamoid.adapter.put_item(test_table1, id: '1', name: 'Josh') + + expect(Dynamoid.adapter.get_item(test_table1, '1')).to eq(name: 'Josh', id: '1') + + Dynamoid.adapter.delete_item(test_table1, '1') + + expect(Dynamoid.adapter.get_item(test_table1, '1')).to be_nil + end + + it 'performs GetItem for an item that does exist with a range key' do + Dynamoid.adapter.put_item(test_table3, id: '1', name: 'Josh', range: 2.0) + + expect(Dynamoid.adapter.get_item(test_table3, '1', range_key: 2.0)).to eq(name: 'Josh', id: '1', range: 2.0) + + Dynamoid.adapter.delete_item(test_table3, '1', range_key: 2.0) + + expect(Dynamoid.adapter.get_item(test_table3, '1', range_key: 2.0)).to be_nil + end + + it 'performs DeleteItem for an item that does not exist' do + Dynamoid.adapter.delete_item(test_table1, '1') + + expect(Dynamoid.adapter.get_item(test_table1, '1')).to be_nil + end + + it 'performs PutItem for an item that does not exist' do + Dynamoid.adapter.put_item(test_table1, id: '1', name: 'Josh') + + expect(Dynamoid.adapter.get_item(test_table1, '1')).to eq(id: '1', name: 'Josh') + end + + # BatchDeleteItem + it 'performs BatchDeleteItem with singular keys' do + Dynamoid.adapter.put_item(test_table1, id: '1', name: 'Josh') + Dynamoid.adapter.put_item(test_table2, id: '1', name: 'Justin') + + Dynamoid.adapter.batch_delete_item(test_table1 => ['1'], test_table2 => ['1']) + + results = Dynamoid.adapter.batch_get_item(test_table1 => '1', test_table2 => '1') + expect(results.size).to eq 2 + + expect(results[test_table1]).to be_blank + expect(results[test_table2]).to be_blank + end + + it 'performs BatchDeleteItem with multiple keys' do + Dynamoid.adapter.put_item(test_table1, id: '1', name: 'Josh') + Dynamoid.adapter.put_item(test_table1, id: '2', name: 'Justin') + + Dynamoid.adapter.batch_delete_item(test_table1 => %w[1 2]) + + results = Dynamoid.adapter.batch_get_item(test_table1 => %w[1 2]) + + expect(results.size).to eq 1 + expect(results[test_table1]).to be_blank + end + + it 'performs BatchDeleteItem with one ranged key' do + Dynamoid.adapter.put_item(test_table3, id: '1', name: 'Josh', range: 1.0) + Dynamoid.adapter.put_item(test_table3, id: '2', name: 'Justin', range: 2.0) + + Dynamoid.adapter.batch_delete_item(test_table3 => [['1', 1.0]]) + results = Dynamoid.adapter.batch_get_item(test_table3 => [['1', 1.0]]) + + expect(results.size).to eq 1 + expect(results[test_table3]).to be_blank + end + + it 'performs BatchDeleteItem with multiple ranged keys' do + Dynamoid.adapter.put_item(test_table3, id: '1', name: 'Josh', range: 1.0) + Dynamoid.adapter.put_item(test_table3, id: '2', name: 'Justin', range: 2.0) + + Dynamoid.adapter.batch_delete_item(test_table3 => [['1', 1.0], ['2', 2.0]]) + results = Dynamoid.adapter.batch_get_item(test_table3 => [['1', 1.0], ['2', 2.0]]) + + expect(results.size).to eq 1 + expect(results[test_table3]).to be_blank + end + + it 'performs BatchDeleteItem with more than 25 items' do + (25 + 1).times do |i| + Dynamoid.adapter.put_item(test_table1, id: i.to_s) + end + + expect(Dynamoid.adapter.client).to receive(:batch_write_item) + .twice.and_call_original + Dynamoid.adapter.batch_delete_item(test_table1 => (0..25).map(&:to_s)) + + results = Dynamoid.adapter.scan(test_table1).flat_map { |i| i } + expect(results.to_a.size).to eq 0 + end + + it 'performs BatchDeleteItem with more than 25 items and different tables' do + 13.times do |i| + Dynamoid.adapter.put_item(test_table1, id: i.to_s) + Dynamoid.adapter.put_item(test_table2, id: i.to_s) + end + + expect(Dynamoid.adapter.client).to receive(:batch_write_item) + .twice.and_call_original + Dynamoid.adapter.batch_delete_item( + test_table1 => (0..12).map(&:to_s), + test_table2 => (0..12).map(&:to_s) + ) + + results = Dynamoid.adapter.scan(test_table1).flat_map { |i| i } + expect(results.to_a.size).to eq 0 + + results = Dynamoid.adapter.scan(test_table2).flat_map { |i| i } + expect(results.to_a.size).to eq 0 + end + + describe '#batch_write_item' do + it 'creates several items at once' do + Dynamoid.adapter.batch_write_item(test_table3, [ + { id: '1', range: 1.0 }, + { id: '2', range: 2.0 }, + { id: '3', range: 3.0 } + ]) + + results = Dynamoid.adapter.scan(test_table3) + expect(results.to_a.first).to match [ + contain_exactly( + { id: '1', range: 1.0 }, + { id: '2', range: 2.0 }, + { id: '3', range: 3.0 } + ), + { last_evaluated_key: nil } + ] + end + + it 'performs BatchDeleteItem with more than 25 items' do + items = (1..26).map { |i| { id: i.to_s } } + + expect(Dynamoid.adapter.client).to receive(:batch_write_item) + .twice.and_call_original + + Dynamoid.adapter.batch_write_item(test_table1, items) + end + + it 'writes unprocessed items' do + # batch_write_item has following limitations: + # * up to 25 items at once + # * up to 16 MB at once + # + # dynamodb-local ignores provisioned throughput settings + # so we cannot emulate unprocessed items - let's stub + + ids = (1..3).map(&:to_s) + items = ids.map { |id| { id: id } } + + records = [] + responses = [ + double('response 1', unprocessed_items: { test_table1 => [ + double(put_request: double(item: { id: '2' })), + double(put_request: double(item: { id: '3' })) + ] }), + double('response 2', unprocessed_items: { test_table1 => [ + double(put_request: double(item: { id: '3' })) + ] }), + double('response 3', unprocessed_items: nil) + ] + allow(Dynamoid.adapter.client).to receive(:batch_write_item) do |args| + records << args[:request_items][test_table1].map { |h| h[:put_request][:item] } + responses.shift + end + + Dynamoid.adapter.batch_write_item(test_table1, items) + expect(records).to eq( + [ + [{ id: '1' }, { id: '2' }, { id: '3' }], + [{ id: '2' }, { id: '3' }], + [{ id: '3' }] + ] + ) + end + + context 'optional block passed' do + it 'passes as block arguments flag if there are unprocessed items for each batch' do + # dynamodb-local ignores provisioned throughput settings + # so we cannot emulate unprocessed items - let's stub + + responses = [ + double('response 1', unprocessed_items: { test_table1 => [ + double(put_request: double(item: { id: '25' })) # fail + ] }), + double('response 2', unprocessed_items: nil), # success + double('response 3', unprocessed_items: { test_table1 => [ + double(put_request: double(item: { id: '25' })) # fail + ] }), + double('response 4', unprocessed_items: nil) # success + ] + allow(Dynamoid.adapter.client).to receive(:batch_write_item).and_return(*responses) + + args = [] + items = (1..50).map(&:to_s).map { |id| { id: id } } # the limit is 25 items at once + Dynamoid.adapter.batch_write_item(test_table1, items) do |has_unprocessed_items| + args << has_unprocessed_items + end + expect(args).to eq [true, false, true, false] + end + end + end + + # ListTables + it 'performs ListTables' do + # Force creation of the tables + test_table1; test_table2; test_table3; test_table4 + + expect(Dynamoid.adapter.list_tables).to include test_table1 + expect(Dynamoid.adapter.list_tables).to include test_table2 + end + + context 'when calling ListTables with more than 200 tables' do + let!(:count_before) { Dynamoid.adapter.list_tables.size } + + before do + 201.times do |n| + Dynamoid.adapter.create_table("dynamoid_tests_ALotOfTables#{n}", [:id]) + end + end + + after do + 201.times do |n| + Dynamoid.adapter.delete_table("dynamoid_tests_ALotOfTables#{n}") + end + end + + it 'automatically pages through all results' do + expect(Dynamoid.adapter.list_tables).to include 'dynamoid_tests_ALotOfTables44' + expect(Dynamoid.adapter.list_tables).to include 'dynamoid_tests_ALotOfTables200' + expect(Dynamoid.adapter.list_tables.size).to eq 201 + count_before + end + end + + # Query + it 'performs query on a table and returns items' do + Dynamoid.adapter.put_item(test_table1, id: '1', name: 'Josh') + + expect(Dynamoid.adapter.query(test_table1, { id: [[:eq, '1']] }).first).to eq([[id: '1', name: 'Josh'], { last_evaluated_key: nil }]) + end + + it 'performs query on a table and returns items if there are multiple items' do + Dynamoid.adapter.put_item(test_table1, id: '1', name: 'Josh') + Dynamoid.adapter.put_item(test_table1, id: '2', name: 'Justin') + + expect(Dynamoid.adapter.query(test_table1, { id: [[:eq, '1']] }).first).to eq([[id: '1', name: 'Josh'], { last_evaluated_key: nil }]) + end + + context 'backoff is specified' do + before do + @old_backoff = Dynamoid.config.backoff + @old_backoff_strategies = Dynamoid.config.backoff_strategies.dup + + @counter = 0 + Dynamoid.config.backoff_strategies[:simple] = ->(_) { -> { @counter += 1 } } + Dynamoid.config.backoff = { simple: nil } + end + + after do + Dynamoid.config.backoff = @old_backoff + Dynamoid.config.backoff_strategies = @old_backoff_strategies + end + + it 'uses specified backoff' do + Dynamoid.adapter.put_item(test_table3, id: '1', range: 1) + Dynamoid.adapter.put_item(test_table3, id: '1', range: 2) + + expect(Dynamoid.adapter.query(test_table3, { id: [[:eq, '1']] }, {}, { batch_size: 1 }).flat_map { |i| i }.count).to eq 2 + expect(@counter).to eq 2 + end + end + + it_behaves_like 'range queries' + + describe 'query' do + include_examples 'correctly handling limits', :query + end + + # Scan + it 'performs scan on a table and returns items' do + Dynamoid.adapter.put_item(test_table1, id: '1', name: 'Josh') + + expect(Dynamoid.adapter.scan(test_table1, name: { eq: 'Josh' }).to_a).to eq [[[{ id: '1', name: 'Josh' }], { last_evaluated_key: nil }]] + end + + it 'performs scan on a table and returns items if there are multiple items but only one match' do + Dynamoid.adapter.put_item(test_table1, id: '1', name: 'Josh') + Dynamoid.adapter.put_item(test_table1, id: '2', name: 'Justin') + + expect(Dynamoid.adapter.scan(test_table1, name: { eq: 'Josh' }).to_a).to eq [[[{ id: '1', name: 'Josh' }], { last_evaluated_key: nil }]] + end + + it 'performs scan on a table and returns multiple items if there are multiple matches' do + Dynamoid.adapter.put_item(test_table1, id: '1', name: 'Josh') + Dynamoid.adapter.put_item(test_table1, id: '2', name: 'Josh') + + expect( + Dynamoid.adapter.scan(test_table1, name: { eq: 'Josh' }).to_a + ).to match( + [ + [ + contain_exactly({ name: 'Josh', id: '2' }, { name: 'Josh', id: '1' }), + { last_evaluated_key: nil } + ] + ] + ) + end + + it 'performs scan on a table and returns all items if no criteria are specified' do + Dynamoid.adapter.put_item(test_table1, id: '1', name: 'Josh') + Dynamoid.adapter.put_item(test_table1, id: '2', name: 'Josh') + + expect(Dynamoid.adapter.scan(test_table1, {}).flat_map { |i| i }).to include({ name: 'Josh', id: '2' }, name: 'Josh', id: '1') + end + + it 'performs scan on a table and returns correct limit' do + Dynamoid.adapter.put_item(test_table1, id: '1', name: 'Josh') + Dynamoid.adapter.put_item(test_table1, id: '2', name: 'Josh') + Dynamoid.adapter.put_item(test_table1, id: '3', name: 'Josh') + Dynamoid.adapter.put_item(test_table1, id: '4', name: 'Josh') + + expect(Dynamoid.adapter.scan(test_table1, {}, record_limit: 1).flat_map { |i| i }.count).to eq(1) + end + + it 'performs scan on a table and returns correct batch' do + Dynamoid.adapter.put_item(test_table1, id: '1', name: 'Josh') + Dynamoid.adapter.put_item(test_table1, id: '2', name: 'Josh') + Dynamoid.adapter.put_item(test_table1, id: '3', name: 'Josh') + Dynamoid.adapter.put_item(test_table1, id: '4', name: 'Josh') + + expect(Dynamoid.adapter.scan(test_table1, {}, batch_size: 1).flat_map { |i| i }.count).to eq(4) + end + + it 'performs scan on a table and returns correct limit and batch' do + Dynamoid.adapter.put_item(test_table1, id: '1', name: 'Josh') + Dynamoid.adapter.put_item(test_table1, id: '2', name: 'Josh') + Dynamoid.adapter.put_item(test_table1, id: '3', name: 'Josh') + Dynamoid.adapter.put_item(test_table1, id: '4', name: 'Josh') + + expect(Dynamoid.adapter.scan(test_table1, {}, record_limit: 1, batch_size: 1).flat_map { |i| i }.count).to eq(1) + end + + context 'backoff is specified' do + before do + @old_backoff = Dynamoid.config.backoff + @old_backoff_strategies = Dynamoid.config.backoff_strategies.dup + + @counter = 0 + Dynamoid.config.backoff_strategies[:simple] = ->(_) { -> { @counter += 1 } } + Dynamoid.config.backoff = { simple: nil } + end + + after do + Dynamoid.config.backoff = @old_backoff + Dynamoid.config.backoff_strategies = @old_backoff_strategies + end + + it 'uses specified backoff' do + Dynamoid.adapter.put_item(test_table1, id: '1', name: 'Josh') + Dynamoid.adapter.put_item(test_table1, id: '2', name: 'Josh') + Dynamoid.adapter.put_item(test_table1, id: '3', name: 'Josh') + Dynamoid.adapter.put_item(test_table1, id: '4', name: 'Josh') + + expect(Dynamoid.adapter.scan(test_table1, {}, batch_size: 1).flat_map { |i| i }.count).to eq 4 + expect(@counter).to eq 4 + end + end + + describe 'scans' do + it_behaves_like 'correctly handling limits', :scan + end + + # Truncate + it 'performs truncate on an existing table' do + Dynamoid.adapter.put_item(test_table1, id: '1', name: 'Josh') + Dynamoid.adapter.put_item(test_table1, id: '2', name: 'Pascal') + + expect(Dynamoid.adapter.get_item(test_table1, '1')).to eq(name: 'Josh', id: '1') + expect(Dynamoid.adapter.get_item(test_table1, '2')).to eq(name: 'Pascal', id: '2') + + Dynamoid.adapter.truncate(test_table1) + + expect(Dynamoid.adapter.get_item(test_table1, '1')).to be_nil + expect(Dynamoid.adapter.get_item(test_table1, '2')).to be_nil + end + + it 'performs truncate on an existing table with a range key' do + Dynamoid.adapter.put_item(test_table3, id: '1', name: 'Josh', range: 1.0) + Dynamoid.adapter.put_item(test_table3, id: '2', name: 'Justin', range: 2.0) + + Dynamoid.adapter.truncate(test_table3) + + expect(Dynamoid.adapter.get_item(test_table3, '1', range_key: 1.0)).to be_nil + expect(Dynamoid.adapter.get_item(test_table3, '2', range_key: 2.0)).to be_nil + end + + it_behaves_like 'correct ordering' + end + + # DescribeTable + + # UpdateItem + describe '#update_item' do + it 'updates an existing item' do + Dynamoid.adapter.put_item(test_table1, id: '1', name: 'Josh') + + Dynamoid.adapter.update_item(test_table1, '1') do |t| + t.set(name: 'Justin') + end + + expect(Dynamoid.adapter.get_item(test_table1, '1')).to eq(name: 'Justin', id: '1') + end + + it 'creates a new item' do + expect(Dynamoid.adapter.get_item(test_table1, '1')).to be_nil + + Dynamoid.adapter.update_item(test_table1, '1') do |t| + t.set(name: 'Justin') + end + + expect(Dynamoid.adapter.get_item(test_table1, '1')).to eq(name: 'Justin', id: '1') + end + + context 'for attribute values' do + it 'adds attribute values' do + Dynamoid.adapter.put_item(test_table1, id: '1', name: 'Josh') + + Dynamoid.adapter.update_item(test_table1, '1') do |t| + t.add(age: 1, followers_count: 5) + t.add(hobbies: %w[skying climbing].to_set) + end + + expected_attributes = { + age: 1, + followers_count: 5, + hobbies: %w[skying climbing].to_set + } + expect(Dynamoid.adapter.get_item(test_table1, '1')).to include(expected_attributes) + end + + it 'deletes attribute values' do + Dynamoid.adapter.put_item(test_table1, id: '1', hobbies: %w[skying climbing].to_set) + + Dynamoid.adapter.update_item(test_table1, '1') do |t| + t.delete(hobbies: ['skying'].to_set) + end + + expected_attributes = { hobbies: ['climbing'].to_set } + expect(Dynamoid.adapter.get_item(test_table1, '1')).to include(expected_attributes) + end + + it 'deletes attributes' do + Dynamoid.adapter.put_item(test_table1, id: '1', hobbies: %w[skying climbing].to_set, category_id: 1) + + Dynamoid.adapter.update_item(test_table1, '1') do |t| + t.delete(hobbies: nil) + t.delete(:category_id) + end + + expect(Dynamoid.adapter.get_item(test_table1, '1')).not_to include(:hobbies, :category_id) + end + + it 'sets attribute values' do + Dynamoid.adapter.put_item(test_table1, id: '1', name: 'Josh') + + Dynamoid.adapter.update_item(test_table1, '1') do |t| + t.set(age: 21) + end + + expected_attributes = { age: 21 } + expect(Dynamoid.adapter.get_item(test_table1, '1')).to include(expected_attributes) + end + end + + context 'updates item conditionally' do + it 'raises Exception if condition fails' do + Dynamoid.adapter.put_item(test_table1, id: '1', name: 'Josh', age: 17) + + expect do + Dynamoid.adapter.update_item(test_table1, '1', conditions: { if: { age: 18 } }) do |t| + t.set(email: 'justin@example.com') + end + end.to raise_error(Dynamoid::Errors::ConditionalCheckFailedException) + + excluded_attributes = { email: 'justin@example.com' } + expect(Dynamoid.adapter.get_item(test_table1, '1')).not_to include(excluded_attributes) + end + + it 'updates item if condition succeeds' do + Dynamoid.adapter.put_item(test_table1, id: '1', name: 'Josh', age: 18) + + Dynamoid.adapter.update_item(test_table1, '1', conditions: { if: { age: 18 } }) do |t| + t.set(email: 'justin@example.com') + end + + expected_attributes = { email: 'justin@example.com' } + expect(Dynamoid.adapter.get_item(test_table1, '1')).to include(expected_attributes) + end + end + end + + # UpdateTable + + describe 'update_time_to_live' do + let(:table_name) { "#{Dynamoid::Config.namespace}_table_with_expiration" } + + before do + Dynamoid.adapter.create_table(table_name, :id) + end + + after do + Dynamoid.adapter.delete_table(table_name) + end + + it 'calls UpdateTimeToLive' do + allow(Dynamoid.adapter.client).to receive(:update_time_to_live).and_call_original + Dynamoid.adapter.update_time_to_live(table_name, :ttl) + expect(Dynamoid.adapter.client).to have_received(:update_time_to_live) + .with( + table_name: table_name, + time_to_live_specification: { + attribute_name: :ttl, + enabled: true, + } + ) + end + + it 'updates a table schema' do + Dynamoid.adapter.update_time_to_live(table_name, :ttl) + + response = Dynamoid.adapter.client.describe_time_to_live(table_name: table_name) + expect(response.time_to_live_description.time_to_live_status).to eq 'ENABLED' + expect(response.time_to_live_description.attribute_name).to eq 'ttl' + end + end + + describe '#execute' do + it 'executes a PartiQL query' do + Dynamoid.adapter.put_item(test_table1, id: '1', name: 'Josh') + + Dynamoid.adapter.execute("UPDATE #{test_table1} SET name = 'Mike' WHERE id = '1'") + + item = Dynamoid.adapter.get_item(test_table1, '1') + expect(item[:name]).to eql 'Mike' + end + + it 'returns items for SELECT statement' do + Dynamoid.adapter.put_item(test_table1, id: '1', name: 'Josh') + + items = Dynamoid.adapter.execute("SELECT * FROM #{test_table1}") + expect(items.size).to eql 1 + expect(items).to eql [{ id: '1', name: 'Josh' }] + end + + it 'returns [] for statements other than SELECT' do + Dynamoid.adapter.put_item(test_table1, id: '1', name: 'Josh') + + response = Dynamoid.adapter.execute("UPDATE #{test_table1} SET name = 'Mike' WHERE id = '1'") + expect(response).to eql [] + + response = Dynamoid.adapter.execute("INSERT INTO #{test_table1} VALUE { 'id': '2' }") + expect(response).to eql [] + + response = Dynamoid.adapter.execute("DELETE FROM #{test_table1} WHERE id = '1'") + expect(response).to eql [] + end + + it 'accepts bind parameters as array of values' do + Dynamoid.adapter.put_item(test_table1, id: '1', name: 'Josh') + + Dynamoid.adapter.execute("UPDATE #{test_table1} SET name = 'Mike' WHERE id = ?", ['1']) + + item = Dynamoid.adapter.get_item(test_table1, '1') + expect(item[:name]).to eql 'Mike' + end + + it 'returns [] when WHERE condition evaluated to false' do + expect(Dynamoid.adapter.scan_count(test_table1)).to eql 0 + + response = Dynamoid.adapter.execute("SELECT * FROM #{test_table1} WHERE id = '1'") + expect(response.to_a).to eql [] + + response = Dynamoid.adapter.execute("UPDATE #{test_table1} SET name = 'Mike' WHERE id = '1'") + expect(response.to_a).to eql [] + + response = Dynamoid.adapter.execute("DELETE FROM #{test_table1} WHERE id = '1'") + expect(response.to_a).to eql [] + end + + it 'accepts :consistent_read option' do + expect(Dynamoid.adapter.client).to receive(:execute_statement) + .with(including(consistent_read: true)) + .and_call_original + + Dynamoid.adapter.execute("SELECT * FROM #{test_table1} WHERE id = '1'", [], consistent_read: true) + + expect(Dynamoid.adapter.client).to receive(:execute_statement) + .with(including(consistent_read: false)) + .and_call_original + + Dynamoid.adapter.execute("SELECT * FROM #{test_table1} WHERE id = '1'", [], consistent_read: false) + end + + it 'loads lazily all the pages of a paginated result' do + next_token = double('next-token') + obj1 = { 'attribute1' => 1 } + obj2 = { 'attribute2' => 2 } + obj3 = { 'attribute3' => 3 } + obj4 = { 'attribute4' => 4 } + response1 = double('response-1', next_token: next_token, items: [obj1, obj2]) + response2 = double('response-1', next_token: nil, items: [obj3, obj4]) + + expect(Dynamoid.adapter.client).to receive(:execute_statement) + .and_return(response1, response2) + + items = Dynamoid.adapter.execute('PartlySQL statement') + expect(items).to be_a(Enumerator::Lazy) + expect(items.to_a).to eql [ + { attribute1: 1 }, + { attribute2: 2 }, + { attribute3: 3 }, + { attribute4: 4 } + ] + end + end + + # connection_config + describe '#connectin_config' do + subject { described_class.new.connection_config } + + before do + Dynamoid.configure.http_open_timeout = 30 + end + + it 'not nil options entried' do + expect(subject.keys).to contain_exactly(:endpoint, :log_formatter, :log_level, :logger, :http_open_timeout) + expect(subject[:http_open_timeout]).to eq 30 + end + end +end diff --git a/dynamoid/spec/dynamoid/adapter_spec.rb b/dynamoid/spec/dynamoid/adapter_spec.rb new file mode 100644 index 000000000..12c5302c4 --- /dev/null +++ b/dynamoid/spec/dynamoid/adapter_spec.rb @@ -0,0 +1,184 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Dynamoid::Adapter do + subject { described_class.new } + + def test_table + 'dynamoid_tests_TestTable' + end + let(:single_id) { '123' } + let(:many_ids) { %w[1 2] } + + { + 1 => [:id], + 2 => [:id], + 3 => [:id, { range_key: { range: :number } }], + 4 => [:id, { range_key: { range: :number } }] + }.each do |n, args| + name = "dynamoid_tests_TestTable#{n}" + let(:"test_table#{n}") do + Dynamoid.adapter.create_table(name, *args) + name + end + end + + describe 'connection management' do + it 'does not auto-establish a connection' do + expect_any_instance_of(described_class.adapter_plugin_class).not_to receive(:connect!) + subject + end + + it 'establishes a connection when adapter is requested' do + expect_any_instance_of(described_class.adapter_plugin_class).to receive(:connect!).and_call_original + subject.adapter + end + + it 'reuses a connection' do + expect_any_instance_of(described_class.adapter_plugin_class).to receive(:connect!).once.and_call_original + subject.adapter + subject.adapter + end + end + + describe 'caching tables' do + it 'caches list of tables' do + expect(subject).to receive(:list_tables).once.and_call_original + subject.create_table('test_table', 'key') + subject.tables + subject.tables + end + + it 'maintains table cache when creating a table' do + # cache + subject.tables + + expect(subject).not_to receive(:list_tables) + subject.create_table('test_table', 'key') + expect(subject.tables).to include('test_table') + end + + it 'clears cached list via #clear_cache!' do + subject.create_table('test_table', 'key') + subject.clear_cache! + expect(subject).to receive(:list_tables).and_call_original + subject.tables + end + end + + it 'raises NoMethodError if we try a method that is not on the child' do + expect { subject.foobar }.to raise_error(NoMethodError) + end + + it 'writes through the adapter' do + expect(subject).to receive(:put_item).with(test_table, { id: single_id }, nil).and_return(true) + subject.write(test_table, id: single_id) + end + + describe '#read' do + it 'reads through the adapter for one ID' do + expect(subject).to receive(:get_item).with(test_table, single_id, {}).and_return(true) + subject.read(test_table, single_id) + end + + it 'reads through the adapter for many IDs' do + expect(subject).to receive(:batch_get_item).with({ test_table => many_ids }, {}).and_return(true) + subject.read(test_table, many_ids) + end + + it 'reads through the adapter for one ID and a range key' do + allow(subject).to receive(:get_item).and_return(true) + subject.read(test_table, single_id, range_key: 'boot') + expect(subject).to have_received(:get_item).with(test_table, single_id, { range_key: 'boot' }) + end + end + + describe '#create_table' do + let(:table_name) { "#{Dynamoid::Config.namespace}_create_table_test" } + + after do + Dynamoid.adapter.delete_table(table_name) + end + + it 'does not try to create table if it is already in cache' do + expect(Dynamoid.adapter.client).to receive(:create_table).once + .and_call_original + + 3.times { Dynamoid.adapter.create_table(table_name, :id, sync: true) } + end + + it 'returns true if table created' do + actual = Dynamoid.adapter.create_table(table_name, :id, sync: true) + expect(actual).to be true + end + + it 'returns false if table was created earlier' do + Dynamoid.adapter.create_table(table_name, :id, sync: true) + + actual = Dynamoid.adapter.create_table(table_name, :id, sync: true) + expect(actual).to be false + end + end + + describe '#delete' do + it 'deletes through the adapter for one ID' do + Dynamoid.adapter.put_item(test_table1, id: '1') + Dynamoid.adapter.put_item(test_table1, id: '2') + + expect do + subject.delete(test_table1, '1') + end.to change { + Dynamoid.adapter.scan(test_table1).flat_map { |i| i }.to_a.size + }.from(2).to(1) + + expect(Dynamoid.adapter.get_item(test_table1, '1')).to be_nil + end + + it 'deletes through the adapter for many IDs' do + Dynamoid.adapter.put_item(test_table1, id: '1') + Dynamoid.adapter.put_item(test_table1, id: '2') + Dynamoid.adapter.put_item(test_table1, id: '3') + + expect do + subject.delete(test_table1, %w[1 2]) + end.to change { + Dynamoid.adapter.scan(test_table1).flat_map { |i| i }.to_a.size + }.from(3).to(1) + + expect(Dynamoid.adapter.get_item(test_table1, '1')).to be_nil + expect(Dynamoid.adapter.get_item(test_table1, '2')).to be_nil + end + + it 'deletes through the adapter for one ID and a range key' do + Dynamoid.adapter.put_item(test_table3, id: '1', range: 1.0) + Dynamoid.adapter.put_item(test_table3, id: '2', range: 2.0) + + expect do + subject.delete(test_table3, '1', range_key: 1.0) + end.to change { + Dynamoid.adapter.scan(test_table3).flat_map { |i| i }.to_a.size + }.from(2).to(1) + + expect(Dynamoid.adapter.get_item(test_table3, '1', range_key: 1.0)).to be_nil + end + + it 'deletes through the adapter for many IDs and a range key' do + Dynamoid.adapter.put_item(test_table3, id: '1', range: 1.0) + Dynamoid.adapter.put_item(test_table3, id: '1', range: 2.0) + Dynamoid.adapter.put_item(test_table3, id: '2', range: 1.0) + Dynamoid.adapter.put_item(test_table3, id: '2', range: 2.0) + + expect(subject).to receive(:batch_delete_item).and_call_original + + expect do + subject.delete(test_table3, %w[1 2], range_key: 1.0) + end.to change { + Dynamoid.adapter.scan(test_table3).flat_map { |i| i }.to_a.size + }.from(4).to(2) + + expect(Dynamoid.adapter.get_item(test_table3, '1', range_key: 1.0)).to be_nil + expect(Dynamoid.adapter.get_item(test_table3, '2', range_key: 1.0)).to be_nil + end + end +end diff --git a/dynamoid/spec/dynamoid/associations/association_spec.rb b/dynamoid/spec/dynamoid/associations/association_spec.rb new file mode 100644 index 000000000..4a328cb59 --- /dev/null +++ b/dynamoid/spec/dynamoid/associations/association_spec.rb @@ -0,0 +1,184 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Dynamoid::Associations::Association do + let(:subscription) { Subscription.create } + let(:magazine) { Magazine.create } + + before do + Subscription.create_table + Magazine.create_table + end + + it 'returns an empty array if there are no associations' do + expect(magazine.subscriptions).to be_empty + end + + it 'adds an item to an association' do + magazine.subscriptions << subscription + + expect(magazine.subscriptions.size).to eq 1 + expect(magazine.subscriptions).to include(subscription) + end + + it 'deletes an item from an association' do + magazine.subscriptions << subscription + + magazine.subscriptions.delete(subscription) + expect(magazine.subscriptions.size).to eq 0 + end + + it 'creates an item from an association' do + subscription = magazine.subscriptions.create + + expect(subscription.class).to eq Subscription + expect(magazine.subscriptions.size).to eq 1 + expect(magazine.subscriptions).to include subscription + end + + it 'returns the number of items in the association' do + magazine.subscriptions.create + expect(magazine.subscriptions.size).to eq 1 + + second_subcription = magazine.subscriptions.create + expect(magazine.subscriptions.size).to eq 2 + + magazine.subscriptions.delete(second_subcription) + expect(magazine.subscriptions.size).to eq 1 + + magazine.subscriptions = [] + expect(magazine.subscriptions.size).to eq 0 + end + + it 'assigns directly via the equals operator' do + magazine.subscriptions = [subscription] + + expect(magazine.subscriptions).to eq [subscription] + end + + it 'assigns directly via the equals operator and reflects to the target association' do + magazine.subscriptions = [subscription] + + expect(subscription.magazine).to eq magazine + end + + it 'does not assign reflection association if the reflection association does not exist' do + sponsor = Sponsor.create + + subscription = sponsor.subscriptions.create + expect(subscription).not_to respond_to :sponsor + expect(subscription).not_to respond_to :sponsors + expect(subscription).not_to respond_to :sponsors_ids + expect(subscription).not_to respond_to :sponsor_ids + end + + it 'deletes all items from the association' do + magazine.subscriptions << Subscription.create + magazine.subscriptions << Subscription.create + magazine.subscriptions << Subscription.create + + expect(magazine.subscriptions.size).to eq 3 + + magazine.subscriptions = nil + expect(magazine.subscriptions.size).to eq 0 + end + + it 'uses where inside an association and returns a result' do + included_subscription = magazine.subscriptions.create(length: 10) + unincldued_subscription = magazine.subscriptions.create(length: 8) + + expect(magazine.subscriptions.where(length: 10).all).to eq [included_subscription] + end + + it 'uses where inside an association and returns an empty set' do + included_subscription = magazine.subscriptions.create(length: 10) + unincldued_subscription = magazine.subscriptions.create(length: 8) + + expect(magazine.subscriptions.where(length: 6).all).to be_empty + end + + it 'includes enumerable' do + subscription1 = magazine.subscriptions.create + subscription2 = magazine.subscriptions.create + subscription3 = magazine.subscriptions.create + + expect(magazine.subscriptions.collect(&:hash_key).sort).to eq [subscription1.hash_key, subscription2.hash_key, subscription3.hash_key].sort + end + + it 'works for camel-cased associations' do + expect(magazine.camel_cases.create.class).to eq CamelCase + end + + it 'destroys associations' do + expect(magazine.subscriptions).to receive(:target).and_return([subscription]) + expect(subscription).to receive(:destroy) + + magazine.subscriptions.destroy_all + end + + it 'deletes associations' do + expect(magazine.subscriptions).to receive(:target).and_return([subscription]) + expect(subscription).to receive(:delete) + + magazine.subscriptions.delete_all + end + + it 'replaces existing associations when using the setter' do + subscription1 = magazine.subscriptions.create + subscription2 = magazine.subscriptions.create + subscription3 = subscription + + expect(subscription1.reload.magazine_ids).to be_present + expect(subscription2.reload.magazine_ids).to be_present + + magazine.subscriptions = subscription3 + expect(magazine.subscriptions_ids).to eq Set[subscription3.hash_key] + + expect(subscription1.reload.magazine_ids).to be_blank + expect(subscription2.reload.magazine_ids).to be_blank + expect(subscription3.reload.magazine_ids).to eq Set[magazine.hash_key] + end + + it 'destroys all objects and removes them from the association' do + subscription1 = magazine.subscriptions.create + subscription2 = magazine.subscriptions.create + subscription3 = magazine.subscriptions.create + + magazine.subscriptions.destroy_all + + expect(magazine.subscriptions).to be_blank + expect(Subscription.all.to_a).to be_empty + end + + it 'deletes all objects and removes them from the association' do + subscription1 = magazine.subscriptions.create + subscription2 = magazine.subscriptions.create + subscription3 = magazine.subscriptions.create + + magazine.subscriptions.delete_all + + expect(magazine.subscriptions).to be_blank + expect(Subscription.all.to_a).to be_empty + end + + it 'delegates class to the association object' do + expect(magazine.sponsor.class).to eq nil.class + magazine.sponsor.create + expect(magazine.sponsor.class).to eq Sponsor + + expect(magazine.subscriptions.class).to eq Array + magazine.subscriptions.create + expect(magazine.subscriptions.class).to eq Array + end + + it 'loads association one time only' do + pending("FIXME: find_target doesn't exist anymore") + sponsor = magazine.sponsor.create + + expect(magazine.sponsor).to receive(:find_target).once.and_return(sponsor) + + magazine.sponsor.hash_key + magazine.sponsor.hash_key + end +end diff --git a/dynamoid/spec/dynamoid/associations/belongs_to_spec.rb b/dynamoid/spec/dynamoid/associations/belongs_to_spec.rb new file mode 100644 index 000000000..538462a6b --- /dev/null +++ b/dynamoid/spec/dynamoid/associations/belongs_to_spec.rb @@ -0,0 +1,345 @@ +# frozen_string_literal: true + +require 'active_support' +require 'active_support/core_ext/object' + +require 'spec_helper' + +describe Dynamoid::Associations::BelongsTo do + context 'has many' do + let(:subscription) { Subscription.create } + let(:camel_case) { CamelCase.create } + let(:magazine) { subscription.magazine.create } + let(:user) { magazine.owner.create } + + it 'determines nil if it has no associated record' do + expect(subscription.magazine).to be_nil + end + + it 'determines target association correctly' do + expect(camel_case.magazine.send(:target_association)).to eq :camel_cases + end + + it 'delegates equality to its source record' do + expect(subscription.magazine).to eq magazine + expect(magazine.subscriptions).to include subscription + end + + it 'associates has_many automatically' do + expect(user.books.size).to eq 1 + expect(user.books).to include magazine + end + + context 'proxied behavior' do + let(:proxy) do + expect(magazine.subscriptions).to include(subscription) + subscription.magazine + end + + it 'forwards dynamoid methods' do + proxy.update_attribute(:size, 101) + + expect(Magazine.first.size).to eq 101 + end + + it 'forwards business-logic methods' do + expect(proxy.respond_to?(:publish)).to eq true + expect(proxy.publish(advertisements: 10)).to eq 10 + expect(proxy.publish(advertisements: 10, free_issue: true) { |c| c *= 42 }).to eq 840 + end + end + end + + context 'has one' do + let(:subscription) { Subscription.create } + let(:sponsor) { Sponsor.create } + let(:magazine) { sponsor.magazine.create } + let(:user) { subscription.customer.create } + + it 'considers an association nil/blank if it has no associated record' do + expect(sponsor.magazine).to be_nil + expect(sponsor.magazine).to be_blank + end + + it 'considers an association present if it has an associated record' do + sponsor.magazine.create + + expect(magazine.sponsor).to be_present + end + + it 'delegates equality to its source record' do + expect(sponsor.magazine).to eq magazine + end + + it 'associates has_one automatically' do + expect(magazine.sponsor).to eq sponsor + expect(user.monthly).to eq subscription + end + end + + describe 'assigning' do + context 'has many' do + let(:subscription) { Subscription.create } + + it 'associates model on this side' do + magazine = Magazine.create + subscription.magazine = magazine + + expect(subscription.magazine).to eq(magazine) + end + + it 'associates model on that side' do + magazine = Magazine.create + subscription.magazine = magazine + + expect(magazine.subscriptions.to_a).to eq([subscription]) + end + + it 're-associates model on this side' do + magazine_old = Magazine.create + magazine_new = Magazine.create + subscription.magazine = magazine_old + + expect do + subscription.magazine = magazine_new + end.to change { subscription.magazine.target }.from(magazine_old).to(magazine_new) + end + + it 're-associates model on that side' do + magazine_old = Magazine.create + magazine_new = Magazine.create + subscription.magazine = magazine_old + + expect do + subscription.magazine = magazine_new + end.to change { magazine_new.subscriptions.target }.from([]).to([subscription]) + end + + it 'deletes previous model from association' do + magazine_old = Magazine.create + magazine_new = Magazine.create + subscription.magazine = magazine_old + + expect do + subscription.magazine = magazine_new + end.to change { magazine_old.subscriptions.to_a }.from([subscription]).to([]) + end + + it 'stores the same object on this side' do + magazine = Magazine.create + subscription.magazine = magazine + + expect(subscription.magazine.target.object_id).to eq(magazine.object_id) + end + + it 'does not store the same object on that side' do + magazine = Magazine.create + subscription.magazine = magazine + + expect(magazine.subscriptions.target[0].object_id).not_to eq(subscription.object_id) + end + end + + context 'has one' do + let(:sponsor) { Sponsor.create } + + it 'associates model on this side' do + magazine = Magazine.create + sponsor.magazine = magazine + + expect(sponsor.magazine).to eq(magazine) + end + + it 'associates model on that side' do + magazine = Magazine.create + sponsor.magazine = magazine + + expect(magazine.sponsor).to eq(sponsor) + end + + it 're-associates model on this side' do + magazine_old = Magazine.create + magazine_new = Magazine.create + sponsor.magazine = magazine_old + + expect do + sponsor.magazine = magazine_new + end.to change { sponsor.magazine.target }.from(magazine_old).to(magazine_new) + end + + it 're-associates model on this side' do + magazine_old = Magazine.create + magazine_new = Magazine.create + sponsor.magazine = magazine_old + + expect do + sponsor.magazine = magazine_new + end.to change { magazine_new.sponsor.target }.from(nil).to(sponsor) + end + + it 'deletes previous model from association' do + magazine_old = Magazine.create + magazine_new = Magazine.create + sponsor.magazine = magazine_old + + expect do + sponsor.magazine = magazine_new + end.to change { magazine_old.sponsor.target }.from(sponsor).to(nil) + end + + it 'stores the same object on this side' do + magazine = Magazine.create + sponsor.magazine = magazine + + expect(sponsor.magazine.target.object_id).to eq(magazine.object_id) + end + + it 'does not store the same object on that side' do + magazine = Magazine.create + + sponsor.magazine = magazine + expect(magazine.sponsor.target.object_id).not_to eq(sponsor.object_id) + end + end + end + + context 'set to nil' do + it 'can be set to nil' do + subscription = Subscription.new + + expect { subscription.magazine = nil }.not_to raise_error + expect(subscription.magazine).to eq nil + + subscription.save! + expect(Subscription.find(subscription.id).magazine).to eq nil + end + + it 'overrides previous saved value' do + magazine = Magazine.create! + subscription = Subscription.create!(magazine: magazine) + + expect do + subscription.magazine = nil + subscription.save! + end.to change { + Subscription.find(subscription.id).magazine.target + }.from(magazine).to(nil) + end + + it 'updates association on the other side' do + magazine = Magazine.create! + subscription = Subscription.create!(magazine: magazine) + + expect do + subscription.magazine = nil + subscription.save! + end.to change { + Magazine.find(magazine.title).subscriptions.to_a + }.from([subscription]).to([]) + end + end + + describe '#delete' do + it 'clears association on this side' do + subscription = Subscription.create + magazine = subscription.magazine.create + + expect do + subscription.magazine.delete + end.to change { subscription.magazine.target }.from(magazine).to(nil) + end + + it 'persists changes on this side' do + subscription = Subscription.create + magazine = subscription.magazine.create + + expect do + subscription.magazine.delete + end.to change { Subscription.find(subscription.id).magazine.target }.from(magazine).to(nil) + end + + context 'has many' do + let(:subscription) { Subscription.create } + let!(:magazine) { subscription.magazine.create } + + it 'clears association on that side' do + expect do + subscription.magazine.delete + end.to change { magazine.subscriptions.target }.from([subscription]).to([]) + end + + it 'persists changes on that side' do + expect do + subscription.magazine.delete + end.to change { Magazine.find(magazine.title).subscriptions.target }.from([subscription]).to([]) + end + end + + context 'has one' do + let(:sponsor) { Sponsor.create } + let!(:magazine) { sponsor.magazine.create } + + it 'clears association on that side' do + expect do + sponsor.magazine.delete + end.to change { magazine.sponsor.target }.from(sponsor).to(nil) + end + + it 'persists changes on that side' do + expect do + sponsor.magazine.delete + end.to change { Magazine.find(magazine.title).sponsor.target }.from(sponsor).to(nil) + end + end + end + + describe 'foreign_key option' do + before do + @directory_class = directory_class = new_class(table_name: :directories) + + @file_class = file_class = new_class(table_name: :files) do + belongs_to :directory, class: directory_class, foreign_key: :directory_id + + def self.to_s + 'File' + end + end + + @directory_class.instance_eval do + has_many :files, class: file_class + + def self.to_s + 'Directory' + end + end + end + + it 'specifies field name' do + file = @file_class.new + expect(file.respond_to?(:directory_id)).to eq(true) + end + + it 'forces to store :id as a scalar value and not as collection' do + directory = @directory_class.create! + file = @file_class.new(directory: directory) + expect(file.directory_id).to eq(directory.id) + end + + it 'assigns and persists id correctly on this side of association' do + directory = @directory_class.create! + file = @file_class.create!(directory: directory) + + expect(@file_class.find(file.id).directory_id).to eq(directory.id) + expect(file.directory).to eq(directory) + expect(@file_class.find(file.id).directory).to eq(directory) + end + + it 'assigns and persists id correctly on the other side of association' do + directory = @directory_class.create! + file = @file_class.create!(directory: directory) + + expect(directory.files.to_a).to eq [file] + expect(@directory_class.find(directory.id).files.to_a).to eq [file] + end + end +end diff --git a/dynamoid/spec/dynamoid/associations/has_and_belongs_to_many_spec.rb b/dynamoid/spec/dynamoid/associations/has_and_belongs_to_many_spec.rb new file mode 100644 index 000000000..aef0fd896 --- /dev/null +++ b/dynamoid/spec/dynamoid/associations/has_and_belongs_to_many_spec.rb @@ -0,0 +1,98 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Dynamoid::Associations::HasAndBelongsToMany do + let(:subscription) { Subscription.create } + let(:camel_case) { CamelCase.create } + + it 'determines equality from its records' do + user = subscription.users.create + + expect(subscription.users.size).to eq 1 + expect(subscription.users).to include user + end + + it 'determines target association correctly' do + expect(subscription.users.send(:target_association)).to eq :subscriptions + expect(camel_case.subscriptions.send(:target_association)).to eq :camel_cases + end + + it 'determines target attribute' do + expect(subscription.users.send(:target_attribute)).to eq :subscriptions_ids + end + + it 'associates has_and_belongs_to_many automatically' do + user = subscription.users.create + + expect(user.subscriptions.size).to eq 1 + expect(user.subscriptions).to include subscription + expect(subscription.users.size).to eq 1 + expect(subscription.users).to include user + + user = User.create + follower = user.followers.create + expect(follower.following).to include user + expect(user.followers).to include follower + end + + it 'disassociates has_and_belongs_to_many automatically' do + user = subscription.users.create + + subscription.users.delete(user) + expect(subscription.users.size).to eq 0 + expect(user.subscriptions.size).to eq 0 + end + + describe 'assigning' do + let(:subscription) { Subscription.create } + let(:user) { User.create } + + it 'associates model on this side' do + subscription.users << user + expect(subscription.users.to_a).to eq([user]) + end + + it 'associates model on that side' do + subscription.users << user + expect(user.subscriptions.to_a).to eq([subscription]) + end + end + + describe '#delete' do + it 'clears association on this side' do + subscription = Subscription.create + user = subscription.users.create + + expect do + subscription.users.delete(user) + end.to change { subscription.users.target }.from([user]).to([]) + end + + it 'persists changes on this side' do + subscription = Subscription.create + user = subscription.users.create + + expect do + subscription.users.delete(user) + end.to change { Subscription.find(subscription.id).users.target }.from([user]).to([]) + end + + context 'has and belongs to many' do + let(:subscription) { Subscription.create } + let!(:user) { subscription.users.create } + + it 'clears association on that side' do + expect do + subscription.users.delete(user) + end.to change { subscription.users.target }.from([user]).to([]) + end + + it 'persists changes on that side' do + expect do + subscription.users.delete(user) + end.to change { Subscription.find(subscription.id).users.target }.from([user]).to([]) + end + end + end +end diff --git a/dynamoid/spec/dynamoid/associations/has_many_spec.rb b/dynamoid/spec/dynamoid/associations/has_many_spec.rb new file mode 100644 index 000000000..6004b9dd1 --- /dev/null +++ b/dynamoid/spec/dynamoid/associations/has_many_spec.rb @@ -0,0 +1,149 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Dynamoid::Associations::HasMany do + let(:magazine) { Magazine.create } + let(:user) { User.create } + let(:camel_case) { CamelCase.create } + + it 'determines equality from its records' do + subscription = magazine.subscriptions.create + + expect(magazine.subscriptions).to eq subscription + end + + it 'determines target association correctly' do + expect(magazine.subscriptions.send(:target_association)).to eq :magazine + expect(user.books.send(:target_association)).to eq :owner + expect(camel_case.users.send(:target_association)).to eq :camel_case + end + + it 'determins target class correctly' do + expect(magazine.subscriptions.send(:target_class)).to eq Subscription + expect(user.books.send(:target_class)).to eq Magazine + end + + it 'determines target attribute' do + expect(magazine.subscriptions.send(:target_attribute)).to eq :magazine_ids + expect(user.books.send(:target_attribute)).to eq :owner_ids + end + + it 'associates belongs_to automatically' do + subscription = magazine.subscriptions.create + + expect(subscription.magazine).to eq magazine + + magazine = user.books.create + expect(magazine.owner).to eq user + end + + it 'has a where method to filter associates' do + red = magazine.camel_cases.create + red.color = 'red' + red.save + + blue = magazine.camel_cases.create + blue.color = 'blue' + blue.save + + expect(magazine.camel_cases.count).to eq 2 + expect(magazine.camel_cases.where(color: 'red').count).to eq 1 + end + + it 'is not modified by the where method' do + red = magazine.camel_cases.create + red.color = 'red' + red.save + + blue = magazine.camel_cases.create + blue.color = 'blue' + blue.save + + expect(magazine.camel_cases.where(color: 'red').count).to eq 1 + expect(magazine.camel_cases.where(color: 'yellow').count).to eq 0 + expect(magazine.camel_cases.count).to eq 2 + end + + describe 'assigning' do + let(:magazine) { Magazine.create } + let(:subscription) { Subscription.create } + + it 'associates model on this side' do + magazine.subscriptions << subscription + expect(magazine.subscriptions.to_a).to eq([subscription]) + end + + it 'associates model on that side' do + magazine.subscriptions << subscription + expect(subscription.magazine).to eq(magazine) + end + + it 're-associates new model on this side' do + magazine_old = Magazine.create + magazine_new = Magazine.create + magazine_old.subscriptions << subscription + + expect do + magazine_new.subscriptions << subscription + end.to change { magazine_new.subscriptions.to_a }.from([]).to([subscription]) + end + + it 're-associates new model on that side' do + magazine_old = Magazine.create + magazine_new = Magazine.create + magazine_old.subscriptions << subscription + + expect do + magazine_new.subscriptions << subscription + end.to change { subscription.magazine.target }.from(magazine_old).to(magazine_new) + end + + it 'deletes previous model from association' do + magazine_old = Magazine.create + magazine_new = Magazine.create + magazine_old.subscriptions << subscription + + expect do + magazine_new.subscriptions << subscription + end.to change { Magazine.find(magazine_old.title).subscriptions.to_a }.from([subscription]).to([]) + end + end + + describe '#delete' do + it 'clears association on this side' do + magazine = Magazine.create + subscription = magazine.subscriptions.create + + expect do + magazine.subscriptions.delete(subscription) + end.to change { magazine.subscriptions.target }.from([subscription]).to([]) + end + + it 'persists changes on this side' do + magazine = Magazine.create + subscription = magazine.subscriptions.create + + expect do + magazine.subscriptions.delete(subscription) + end.to change { Magazine.find(magazine.title).subscriptions.target }.from([subscription]).to([]) + end + + context 'belongs to' do + let(:magazine) { Magazine.create } + let!(:subscription) { magazine.subscriptions.create } + + it 'clears association on that side' do + expect do + magazine.subscriptions.delete(subscription) + end.to change { magazine.subscriptions.target }.from([subscription]).to([]) + end + + it 'persists changes on that side' do + expect do + magazine.subscriptions.delete(subscription) + end.to change { Magazine.find(magazine.title).subscriptions.target }.from([subscription]).to([]) + end + end + end +end diff --git a/dynamoid/spec/dynamoid/associations/has_one_spec.rb b/dynamoid/spec/dynamoid/associations/has_one_spec.rb new file mode 100644 index 000000000..ff11771ad --- /dev/null +++ b/dynamoid/spec/dynamoid/associations/has_one_spec.rb @@ -0,0 +1,191 @@ +# frozen_string_literal: true + +require 'active_support' +require 'active_support/core_ext/object' + +require 'spec_helper' + +describe Dynamoid::Associations::HasOne do + let(:magazine) { Magazine.create } + let(:user) { User.create } + let(:camel_case) { CamelCase.create } + + it 'considers an association nil/blank if it has no associated record' do + expect(magazine.sponsor).to be_nil + expect(magazine.sponsor).to be_blank + end + + it 'considers an association present if it has an associated record' do + magazine.sponsor.create + + expect(magazine.sponsor).to be_present + end + + it 'determines target association correctly' do + expect(camel_case.sponsor.send(:target_association)).to eq :camel_case + end + + it 'returns only one object when associated' do + magazine.sponsor.create + + expect(magazine.sponsor).not_to be_a Array + end + + it 'delegates equality to its source record' do + sponsor = magazine.sponsor.create + + expect(magazine.sponsor).to eq sponsor + end + + it 'is equal from its target record' do + sponsor = magazine.sponsor.create + + expect(magazine.sponsor).to eq sponsor + end + + it 'associates belongs_to automatically' do + sponsor = magazine.sponsor.create + expect(sponsor.magazine).to eq magazine + expect(magazine.sponsor).to eq sponsor + + subscription = user.monthly.create + expect(subscription.customer).to eq user + end + + describe 'assigning' do + context 'belongs to' do + let(:magazine) { Magazine.create } + + it 'associates model on this side' do + sponsor = Sponsor.create + magazine.sponsor = sponsor + + expect(magazine.sponsor).to eq(sponsor) + end + + it 'associates model on that side' do + sponsor = Sponsor.create + magazine.sponsor = sponsor + + expect(sponsor.magazine).to eq(magazine) + end + + it 're-associates model on this side' do + sponsor_old = Sponsor.create + sponsor_new = Sponsor.create + magazine.sponsor = sponsor_old + + expect do + magazine.sponsor = sponsor_new + end.to change { magazine.sponsor.target }.from(sponsor_old).to(sponsor_new) + end + + it 're-associates model on that side' do + sponsor_old = Sponsor.create + sponsor_new = Sponsor.create + + magazine.sponsor = sponsor_old + expect do + magazine.sponsor = sponsor_new + end.to change { sponsor_new.magazine.target }.from(nil).to(magazine) + end + + it 'deletes previous model from association' do + sponsor_old = Sponsor.create + sponsor_new = Sponsor.create + + magazine.sponsor = sponsor_old + expect do + magazine.sponsor = sponsor_new + end.to change { sponsor_old.magazine.target }.from(magazine).to(nil) + end + + it 'stores the same object on this side' do + sponsor = Sponsor.create + magazine.sponsor = sponsor + + expect(magazine.sponsor.target.object_id).to eq(sponsor.object_id) + end + + it 'does not store the same object on that side' do + sponsor = Sponsor.create! + magazine.sponsor = sponsor + + expect(sponsor.magazine.target.object_id).not_to eq(magazine.object_id) + end + end + end + + context 'set to nil' do + it 'can be set to nil' do + magazine = Magazine.create! + + expect { magazine.sponsor = nil }.not_to raise_error + expect(magazine.sponsor).to eq nil + + magazine.save! + expect(Magazine.find(magazine.title).sponsor).to eq nil + end + + it 'overrides previous saved value' do + sponsor = Sponsor.create! + magazine = Magazine.create!(sponsor: sponsor) + + expect do + magazine.sponsor = nil + magazine.save! + end.to change { + Magazine.find(magazine.title).sponsor.target + }.from(sponsor).to(nil) + end + + it 'updates association on the other side' do + sponsor = Sponsor.create! + magazine = Magazine.create!(sponsor: sponsor) + + expect do + magazine.sponsor = nil + magazine.save! + end.to change { + Sponsor.find(sponsor.id).magazine.target + }.from(magazine).to(nil) + end + end + + describe '#delete' do + it 'clears association on this side' do + magazine = Magazine.create + sponsor = magazine.sponsor.create + + expect do + magazine.sponsor.delete + end.to change { magazine.sponsor.target }.from(sponsor).to(nil) + end + + it 'persists changes on this side' do + magazine = Magazine.create + sponsor = magazine.sponsor.create + + expect do + magazine.sponsor.delete + end.to change { Magazine.find(magazine.title).sponsor.target }.from(sponsor).to(nil) + end + + context 'belongs to' do + let(:magazine) { Magazine.create } + let!(:sponsor) { magazine.sponsor.create } + + it 'clears association on that side' do + expect do + magazine.sponsor.delete + end.to change { sponsor.magazine.target }.from(magazine).to(nil) + end + + it 'persists changes on that side' do + expect do + magazine.sponsor.delete + end.to change { Sponsor.find(sponsor.id).magazine.target }.from(magazine).to(nil) + end + end + end +end diff --git a/dynamoid/spec/dynamoid/associations_spec.rb b/dynamoid/spec/dynamoid/associations_spec.rb new file mode 100644 index 000000000..3df179086 --- /dev/null +++ b/dynamoid/spec/dynamoid/associations_spec.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Dynamoid::Associations do + let(:magazine) { Magazine.create } + + it 'defines a getter' do + expect(magazine).to respond_to :subscriptions + end + + it 'defines a setter' do + expect(magazine).to respond_to :subscriptions= + end +end diff --git a/dynamoid/spec/dynamoid/before_type_cast_spec.rb b/dynamoid/spec/dynamoid/before_type_cast_spec.rb new file mode 100644 index 000000000..f164e2d38 --- /dev/null +++ b/dynamoid/spec/dynamoid/before_type_cast_spec.rb @@ -0,0 +1,114 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'Before type cast' do + describe '#attributes_before_type_cast', config: { timestamps: false } do + let(:klass) do + new_class do + field :admin, :boolean + end + end + + it 'returns original attributes value' do + obj = klass.new(admin: 0) + + expect(obj.attributes_before_type_cast).to eql( + admin: 0, + ) + end + + it 'returns values for all the attributes even not assigned' do + klass_with_many_fields = new_class do + field :first_name + field :last_name + field :email + end + obj = klass_with_many_fields.new(first_name: 'John') + + expect(obj.attributes_before_type_cast).to eql( + first_name: 'John', + ) + end + + it 'returns original default value if field has default value' do + klass_with_default_value = new_class do + field :activated_on, :date, default: '2018-09-27' + end + obj = klass_with_default_value.new + + expect(obj.attributes_before_type_cast).to eql( + activated_on: '2018-09-27', + ) + end + + it 'returns nil if field does not have default value' do + obj = klass.new + + expect(obj.attributes_before_type_cast).to eql({}) + end + + it 'returns values loaded from the storage before type casting' do + obj = klass.create!(admin: false) + obj2 = klass.find(obj.id) + + expect(obj2.attributes_before_type_cast).to eql( + id: obj.id, + admin: false, + ) + end + end + + describe '#read_attribute_before_type_cast' do + let(:klass) do + new_class do + field :admin, :boolean + end + end + + it 'returns attribute original value' do + obj = klass.new(admin: 1) + + expect(obj.read_attribute_before_type_cast(:admin)).to eql(1) + end + + it 'accepts string as well as symbol argument' do + obj = klass.new(admin: 1) + + expect(obj.read_attribute_before_type_cast('admin')).to eql(1) + end + + it 'returns nil if there is no such attribute' do + obj = klass.new + + expect(obj.read_attribute_before_type_cast(:first_name)).to eql(nil) + end + end + + describe '#_before_type_cast' do + let(:klass) do + new_class do + field :first_name + field :last_name + field :admin, :boolean + end + end + + it 'exists for every model attribute' do + obj = klass.new + + expect(obj).to respond_to(:id) + expect(obj).to respond_to(:first_name_before_type_cast) + expect(obj).to respond_to(:last_name_before_type_cast) + expect(obj).to respond_to(:admin) + expect(obj).to respond_to(:created_at) + expect(obj).to respond_to(:updated_at) + end + + it 'returns attribute original value' do + obj = klass.new(admin: 0) + + expect(obj.admin_before_type_cast).to eql(0) + end + end +end diff --git a/dynamoid/spec/dynamoid/config/backoff_strategies/exponential_backoff_spec.rb b/dynamoid/spec/dynamoid/config/backoff_strategies/exponential_backoff_spec.rb new file mode 100644 index 000000000..c4bc3d9d0 --- /dev/null +++ b/dynamoid/spec/dynamoid/config/backoff_strategies/exponential_backoff_spec.rb @@ -0,0 +1,74 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Dynamoid::Config::BackoffStrategies::ExponentialBackoff do + let(:base_backoff) { 1 } + let(:ceiling) { 5 } + let(:backoff) { described_class.call(base_backoff: base_backoff, ceiling: ceiling) } + + it 'sleeps the first time for specified base backoff time' do + expect(described_class).to receive(:sleep).with(base_backoff) + backoff.call + end + + it 'sleeps for exponentialy increasing time' do + seconds = [] + allow(described_class).to receive(:sleep) do |s| + seconds << s + end + + backoff.call + expect(seconds).to eq [base_backoff] + + backoff.call + expect(seconds).to eq [base_backoff, base_backoff * 2] + + backoff.call + expect(seconds).to eq [base_backoff, base_backoff * 2, base_backoff * 4] + + backoff.call + expect(seconds).to eq [base_backoff, base_backoff * 2, base_backoff * 4, base_backoff * 8] + end + + it 'stops to increase time after ceiling times' do + seconds = [] + allow(described_class).to receive(:sleep) do |s| + seconds << s + end + + 6.times { backoff.call } + expect(seconds).to eq [ + base_backoff, + base_backoff * 2, + base_backoff * 4, + base_backoff * 8, + base_backoff * 16, + base_backoff * 16 + ] + end + + it 'can be called without parameters' do + backoff = nil + expect do + backoff = described_class.call + end.not_to raise_error + end + + it 'uses base backoff = 0.5 and ceiling = 3 by default' do + backoff = described_class.call + + seconds = [] + allow(described_class).to receive(:sleep) do |s| + seconds << s + end + + 4.times { backoff.call } + expect(seconds).to eq([ + 0.5, + 0.5 * 2, + 0.5 * 4, + 0.5 * 4 + ]) + end +end diff --git a/dynamoid/spec/dynamoid/config_spec.rb b/dynamoid/spec/dynamoid/config_spec.rb new file mode 100644 index 000000000..f361dd888 --- /dev/null +++ b/dynamoid/spec/dynamoid/config_spec.rb @@ -0,0 +1,55 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Dynamoid::Config do + describe 'credentials' do + let(:credentials_new) do + Aws::Credentials.new('your_access_key_id', 'your_secret_access_key') + end + + before do + @credentials_old = Dynamoid.config.credentials + Dynamoid.config.credentials = credentials_new + Dynamoid.adapter.connect! # clear cached client + end + + after do + Dynamoid.config.credentials = @credentials_old + Dynamoid.adapter.connect! # clear cached client + end + + it 'passes credentials to a client connection' do + credentials = Dynamoid.adapter.client.config.credentials + + expect(credentials.access_key_id).to eq 'your_access_key_id' + expect(credentials.secret_access_key).to eq 'your_secret_access_key' + end + end + + describe 'log_formatter' do + let(:log_formatter) { Aws::Log::Formatter.short } + let(:logger) { Logger.new(buffer) } + let(:buffer) { StringIO.new } + + before do + @log_formatter = Dynamoid.config.log_formatter + @logger = Dynamoid.config.logger + + Dynamoid.config.log_formatter = log_formatter + Dynamoid.config.logger = logger + Dynamoid.adapter.connect! # clear cached client + end + + after do + Dynamoid.config.log_formatter = @log_formatter + Dynamoid.config.logger = @logger + Dynamoid.adapter.connect! # clear cached client + end + + it 'changes logging format' do + new_class.create_table + expect(buffer.string).to match(/\[Aws::DynamoDB::Client 200 .+\] create_table \n/) + end + end +end diff --git a/dynamoid/spec/dynamoid/criteria/chain_spec.rb b/dynamoid/spec/dynamoid/criteria/chain_spec.rb new file mode 100644 index 000000000..d611ea9f8 --- /dev/null +++ b/dynamoid/spec/dynamoid/criteria/chain_spec.rb @@ -0,0 +1,2284 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Dynamoid::Criteria::Chain do + let(:time) { DateTime.now } + let!(:user) { User.create(name: 'Josh', email: 'josh@joshsymonds.com', password: 'Test123') } + let(:chain) { described_class.new(User) } + + describe 'Query vs Scan' do + it 'Scans when query is empty' do + chain = described_class.new(Address) + chain = chain.where({}) + expect(chain).to receive(:raw_pages_via_scan).and_return([]) + chain.all + end + + it 'Queries when query is only ID' do + chain = described_class.new(Address) + chain = chain.where(id: 'test') + expect(chain).to receive(:raw_pages_via_query).and_return([]) + chain.all + end + + it 'Queries when query contains ID' do + chain = described_class.new(Address) + chain = chain.where(id: 'test', city: 'Bucharest') + expect(chain).to receive(:raw_pages_via_query).and_return([]) + chain.all + end + + it 'Scans when query includes keys that are neither a hash nor a range' do + chain = described_class.new(Address) + chain = chain.where(city: 'Bucharest') + expect(chain).to receive(:raw_pages_via_scan).and_return([]) + chain.all + end + + it 'Scans when query is only a range' do + chain = described_class.new(Tweet) + chain = chain.where(group: 'xx') + expect(chain).to receive(:raw_pages_via_scan).and_return([]) + chain.all + end + + it 'Scans when there is only not-equal operator for hash key' do + chain = described_class.new(Address) + chain = chain.where('id.in': ['test']) + expect(chain).to receive(:raw_pages_via_scan).and_return([]) + chain.all + end + end + + describe 'Limits' do + shared_examples 'correct handling chain limits' do |request_type| + let(:model) do + new_class do + range :age, :integer + field :name + end + end + + before do + @request_type = request_type + (1..10).each do |i| + model.create(id: '1', name: 'Josh', age: i) + model.create(id: '1', name: 'Pascal', age: i + 100) + end + end + + def request_params + return { id: '1' } if @request_type == :query + + {} + end + + it 'supports record_limit' do + expect(model.where(request_params.merge(name: 'Josh')).record_limit(1).count).to eq(1) + expect(model.where(request_params.merge(name: 'Josh')).record_limit(3).count).to eq(3) + end + + it 'supports scan_limit' do + expect(model.where(request_params.merge(name: 'Pascal')).scan_limit(1).count).to eq(0) + expect(model.where(request_params.merge(name: 'Pascal')).scan_limit(11).count).to eq(1) + end + + it 'supports batch' do + expect(model.where(request_params.merge(name: 'Josh')).batch(1).count).to eq(10) + expect(model.where(request_params.merge(name: 'Josh')).batch(3).count).to eq(10) + end + + it 'supports combined limits with batch size 1' do + # Scanning through 13 means it'll see 10 Josh objects and then + # 3 Pascal objects but it'll hit record_limit first with 2 objects + # so we'd only see 12 requests due to batching. + expect(Dynamoid.adapter.client).to receive(request_type).exactly(12).times.and_call_original + expect(model.where(request_params.merge(name: 'Pascal')) + .record_limit(2) + .scan_limit(13) + .batch(1).count).to eq(2) + end + + it 'supports combined limits with batch size other than 1' do + # Querying in batches of 3 so we'd see: + # 3 Josh, 3 Josh, 3 Josh, 1 Josh + 2 Pascal, 3 Pascal, 3 Pascal, 2 Pascal + # So total of 7 requests + expect(Dynamoid.adapter.client).to receive(request_type).exactly(7).times.and_call_original + expect(model.where(request_params.merge(name: 'Pascal')) + .record_limit(10) + .batch(3).count).to eq(10) + end + end + + describe 'Query' do + it_behaves_like 'correct handling chain limits', :query + end + + describe 'Scan' do + it_behaves_like 'correct handling chain limits', :scan + end + end + + describe 'Query with keys conditions' do + let(:model) do + new_class(partition_key: :name) do + range :age, :integer + end + end + + it 'supports eq' do + customer1 = model.create(name: 'Bob', age: 10) + customer2 = model.create(name: 'Bob', age: 30) + + chain = described_class.new(model) + expect(chain).to receive(:raw_pages_via_query).and_call_original + expect(chain.where(name: 'Bob', age: 10).all).to contain_exactly(customer1) + expect(chain.key_fields_detector.hash_key).to eq(:name) + expect(chain.key_fields_detector.range_key).to eq(:age) + expect(chain.key_fields_detector.index_name).to be_nil + end + + it 'supports lt' do + customer1 = model.create(name: 'Bob', age: 5) + customer2 = model.create(name: 'Bob', age: 9) + customer3 = model.create(name: 'Bob', age: 12) + + expect(model.where(name: 'Bob', 'age.lt': 10).all).to contain_exactly(customer1, customer2) + end + + it 'supports gt' do + customer1 = model.create(name: 'Bob', age: 11) + customer2 = model.create(name: 'Bob', age: 12) + customer3 = model.create(name: 'Bob', age: 9) + + expect(model.where(name: 'Bob', 'age.gt': 10).all).to contain_exactly(customer1, customer2) + end + + it 'supports lte' do + customer1 = model.create(name: 'Bob', age: 5) + customer2 = model.create(name: 'Bob', age: 9) + customer3 = model.create(name: 'Bob', age: 12) + + expect(model.where(name: 'Bob', 'age.lte': 9).all).to contain_exactly(customer1, customer2) + end + + it 'supports gte' do + customer1 = model.create(name: 'Bob', age: 11) + customer2 = model.create(name: 'Bob', age: 12) + customer3 = model.create(name: 'Bob', age: 9) + + expect(model.where(name: 'Bob', 'age.gte': 11).all).to contain_exactly(customer1, customer2) + end + + it 'supports begins_with' do + model = new_class(partition_key: :name) do + range :job_title + end + + customer1 = model.create(name: 'Bob', job_title: 'Environmental Air Quality Consultant') + customer2 = model.create(name: 'Bob', job_title: 'Environmental Project Manager') + customer3 = model.create(name: 'Bob', job_title: 'Creative Consultant') + + expect(model.where(name: 'Bob', 'job_title.begins_with': 'Environmental').all) + .to contain_exactly(customer1, customer2) + end + + it 'supports between' do + customer1 = model.create(name: 'Bob', age: 10) + customer2 = model.create(name: 'Bob', age: 20) + customer3 = model.create(name: 'Bob', age: 30) + customer4 = model.create(name: 'Bob', age: 40) + + expect(model.where(name: 'Bob', 'age.between': [19, 31]).all).to contain_exactly(customer2, customer3) + end + + it 'supports multiple conditions for the same attribute' do + skip 'Aws::DynamoDB::Errors::ValidationException: KeyConditionExpressions must only contain one condition per key' + + customer1 = model.create(name: 'Bob', age: 10) + customer2 = model.create(name: 'Bob', age: 20) + customer3 = model.create(name: 'Bob', age: 30) + customer4 = model.create(name: 'Bob', age: 40) + + expect(model.where(name: 'Bob', 'age.gt': 19).where('age.lt': 31).all).to contain_exactly(customer2, customer3) + end + + it 'supports multiple conditions for the same attribute with the same operator' do + skip 'Aws::DynamoDB::Errors::ValidationException: KeyConditionExpressions must only contain one condition per key' + + customer1 = model.create(name: 'Bob', age: 10) + customer2 = model.create(name: 'Bob', age: 20) + customer3 = model.create(name: 'Bob', age: 30) + customer4 = model.create(name: 'Bob', age: 40) + + expect(model.where(name: 'Bob', 'age.gt': 31).where('age.gt': 19).all).to contain_exactly(customer4) + end + + it 'allows conditions with attribute names conflicting with DynamoDB reserved words' do + model = new_class do + range :size # SIZE is reserved word + end + + model.create_table + put_attributes(model.table_name, id: '1', size: 'c') + + documents = model.where(id: '1', size: 'c').to_a + expect(documents.map(&:id)).to eql ['1'] + end + + it 'raises error when operator is not supported' do + expect do + model.where(name: 'Bob', 'age.foo': 10).to_a + end.to raise_error(Dynamoid::Errors::Error, 'Unsupported operator foo in age.foo') + end + end + + # http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/LegacyConditionalParameters.QueryFilter.html + describe 'Query with non-keys conditions' do + let(:model) do + new_class do + table name: :customer, key: :name + range :last_name + field :age, :integer + end + end + + it 'supports eq' do + customer1 = model.create(name: 'a', last_name: 'a', age: 10) + customer2 = model.create(name: 'a', last_name: 'b', age: 30) + + chain = described_class.new(model) + expect(chain).to receive(:raw_pages_via_query).and_call_original + expect(chain.where(name: 'a', age: 10).all).to contain_exactly(customer1) + expect(chain.key_fields_detector.hash_key).to eq(:name) + expect(chain.key_fields_detector.range_key).to be_nil + expect(chain.key_fields_detector.index_name).to be_nil + end + + it 'supports eq for set' do + klass = new_class do + range :last_name + field :set, :set + end + + document1 = klass.create(id: '1', last_name: 'a', set: [1, 2].to_set) + document2 = klass.create(id: '1', last_name: 'b', set: [3, 4].to_set) + + chain = described_class.new(klass) + expect(chain).to receive(:raw_pages_via_query).and_call_original + expect(chain.where(id: '1', set: [1, 2].to_set).all).to contain_exactly(document1) + end + + it 'supports eq for array' do + klass = new_class do + range :last_name + field :array, :array + end + + document1 = klass.create(id: '1', last_name: 'a', array: [1, 2]) + document2 = klass.create(id: '1', last_name: 'b', array: [3, 4]) + + chain = described_class.new(klass) + expect(chain).to receive(:raw_pages_via_query).and_call_original + expect(chain.where(id: '1', array: [1, 2]).all).to contain_exactly(document1) + end + + it 'supports ne' do + customer1 = model.create(name: 'a', last_name: 'a', age: 5) + customer2 = model.create(name: 'a', last_name: 'b', age: 9) + + expect(model.where(name: 'a', 'age.ne': 9).all).to contain_exactly(customer1) + end + + it 'supports lt' do + customer1 = model.create(name: 'a', last_name: 'a', age: 5) + customer2 = model.create(name: 'a', last_name: 'b', age: 9) + customer3 = model.create(name: 'a', last_name: 'c', age: 12) + + expect(model.where(name: 'a', 'age.lt': 10).all).to contain_exactly(customer1, customer2) + end + + it 'supports gt' do + customer1 = model.create(name: 'a', last_name: 'a', age: 11) + customer2 = model.create(name: 'a', last_name: 'b', age: 12) + customer3 = model.create(name: 'a', last_name: 'c', age: 9) + + expect(model.where(name: 'a', 'age.gt': 10).all).to contain_exactly(customer1, customer2) + end + + it 'supports lte' do + customer1 = model.create(name: 'a', last_name: 'a', age: 5) + customer2 = model.create(name: 'a', last_name: 'b', age: 9) + customer3 = model.create(name: 'a', last_name: 'c', age: 12) + + expect(model.where(name: 'a', 'age.lte': 9).all).to contain_exactly(customer1, customer2) + end + + it 'supports gte' do + customer1 = model.create(name: 'a', last_name: 'a', age: 11) + customer2 = model.create(name: 'a', last_name: 'b', age: 12) + customer3 = model.create(name: 'a', last_name: 'c', age: 9) + + expect(model.where(name: 'a', 'age.gte': 11).all).to contain_exactly(customer1, customer2) + end + + it 'supports begins_with' do + model = new_class(partition_key: :name) do + range :last_name + field :job_title + end + + customer1 = model.create(name: 'a', last_name: 'a', job_title: 'Environmental Air Quality Consultant') + customer2 = model.create(name: 'a', last_name: 'b', job_title: 'Environmental Project Manager') + customer3 = model.create(name: 'a', last_name: 'c', job_title: 'Creative Consultant') + + expect(model.where(name: 'a', 'job_title.begins_with': 'Environmental').all) + .to contain_exactly(customer1, customer2) + end + + it 'supports between' do + customer1 = model.create(name: 'a', last_name: 'a', age: 10) + customer2 = model.create(name: 'a', last_name: 'b', age: 20) + customer3 = model.create(name: 'a', last_name: 'c', age: 30) + customer4 = model.create(name: 'a', last_name: 'd', age: 40) + + expect(model.where(name: 'a', 'age.between': [19, 31]).all).to contain_exactly(customer2, customer3) + end + + it 'supports in' do + customer1 = model.create(name: 'a', last_name: 'a', age: 10) + customer2 = model.create(name: 'a', last_name: 'b', age: 20) + customer3 = model.create(name: 'a', last_name: 'c', age: 30) + + expect(model.where(name: 'a', 'age.in': [10, 20]).all).to contain_exactly(customer1, customer2) + end + + it 'supports contains' do + model = new_class(partition_key: :name) do + range :last_name + field :job_title, :string + end + + customer1 = model.create(name: 'a', last_name: 'a', job_title: 'Environmental Air Quality Consultant') + customer2 = model.create(name: 'a', last_name: 'b', job_title: 'Environmental Project Manager') + customer3 = model.create(name: 'a', last_name: 'c', job_title: 'Creative Consultant') + + expect(model.where(name: 'a', 'job_title.contains': 'Consul').all) + .to contain_exactly(customer1, customer3) + end + + it 'supports not_contains' do + model = new_class(partition_key: :name) do + range :last_name + field :job_title, :string + end + + customer1 = model.create(name: 'a', last_name: 'a', job_title: 'Environmental Air Quality Consultant') + customer2 = model.create(name: 'a', last_name: 'b', job_title: 'Environmental Project Manager') + customer3 = model.create(name: 'a', last_name: 'c', job_title: 'Creative Consultant') + + expect(model.where(name: 'a', 'job_title.not_contains': 'Consul').all) + .to contain_exactly(customer2) + end + + it 'supports null' do + model.create_table + + put_attributes(model.table_name, name: 'a', last_name: 'aa', age: 1) + put_attributes(model.table_name, name: 'a', last_name: 'bb', age: 2) + put_attributes(model.table_name, name: 'a', last_name: 'cc',) + + documents = model.where(name: 'a', 'age.null': true).to_a + expect(documents.map(&:last_name)).to contain_exactly('cc') + + documents = model.where(name: 'a', 'age.null': false).to_a + expect(documents.map(&:last_name)).to contain_exactly('aa', 'bb') + end + + it 'supports not_null' do + model.create_table + + put_attributes(model.table_name, name: 'a', last_name: 'aa', age: 1) + put_attributes(model.table_name, name: 'a', last_name: 'bb', age: 2) + put_attributes(model.table_name, name: 'a', last_name: 'cc',) + + documents = model.where(name: 'a', 'age.not_null': true).to_a + expect(documents.map(&:last_name)).to contain_exactly('aa', 'bb') + + documents = model.where('age.not_null': false).to_a + expect(documents.map(&:last_name)).to contain_exactly('cc') + end + + it 'supports multiple conditions for the same attribute' do + customer1 = model.create(name: 'a', last_name: 'a', age: 10) + customer2 = model.create(name: 'a', last_name: 'b', age: 20) + customer3 = model.create(name: 'a', last_name: 'c', age: 30) + customer4 = model.create(name: 'a', last_name: 'd', age: 40) + + expect(model.where(name: 'a', 'age.gt': 19, 'age.lt': 31).all).to contain_exactly(customer2, customer3) + end + + it 'supports multiple conditions for the same attribute with the same operator' do + customer1 = model.create(name: 'a', last_name: 'a', age: 10) + customer2 = model.create(name: 'a', last_name: 'b', age: 20) + customer3 = model.create(name: 'a', last_name: 'c', age: 30) + customer4 = model.create(name: 'a', last_name: 'd', age: 40) + + expect(model.where(name: 'a', 'age.gt': 31).where('age.gt': 19).all).to contain_exactly(customer4) + end + + it 'allows conditions with attribute names conflicting with DynamoDB reserved words' do + model = new_class do + # SCAN, SET and SIZE are reserved words + field :scan + field :set + field :size + end + + model.create_table + put_attributes(model.table_name, id: '1', scan: 'a', set: 'b', size: 'c') + + documents = model.where(id: '1', scan: 'a', set: 'b', size: 'c').to_a + expect(documents.map(&:id)).to eql ['1'] + end + + it 'raises error when operator is not supported' do + expect do + model.where(name: 'a', 'age.foo': 9).to_a + end.to raise_error(Dynamoid::Errors::Error, 'Unsupported operator foo in age.foo') + end + end + + # http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/LegacyConditionalParameters.ScanFilter.html + describe 'Scan conditions' do + let(:model) do + new_class do + field :age, :integer + field :job_title, :string + end + end + + it 'supports eq' do + customer1 = model.create(age: 10) + customer2 = model.create(age: 30) + + chain = described_class.new(model) + expect(chain).to receive(:raw_pages_via_scan).and_call_original + expect(chain.where(age: 10).all).to contain_exactly(customer1) + expect(chain.key_fields_detector.hash_key).to be_nil + expect(chain.key_fields_detector.range_key).to be_nil + expect(chain.key_fields_detector.index_name).to be_nil + end + + it 'supports eq for set' do + klass = new_class do + field :set, :set + end + document1 = klass.create(set: %w[a b]) + document2 = klass.create(set: %w[b c]) + + expect(klass.where(set: %w[a b].to_set).all).to contain_exactly(document1) + end + + it 'supports eq for array' do + klass = new_class do + field :array, :array + end + document1 = klass.create(array: %w[a b]) + document2 = klass.create(array: %w[b c]) + + expect(klass.where(array: %w[a b]).all).to contain_exactly(document1) + end + + it 'supports ne' do + customer1 = model.create(age: 5) + customer2 = model.create(age: 9) + + expect(model.where('age.ne': 9).all).to contain_exactly(customer1) + end + + it 'supports lt' do + customer1 = model.create(age: 5) + customer2 = model.create(age: 9) + customer3 = model.create(age: 12) + + expect(model.where('age.lt': 10).all).to contain_exactly(customer1, customer2) + end + + it 'supports gt' do + customer1 = model.create(age: 11) + customer2 = model.create(age: 12) + customer3 = model.create(age: 9) + + expect(model.where('age.gt': 10).all).to contain_exactly(customer1, customer2) + end + + it 'supports lte' do + customer1 = model.create(age: 5) + customer2 = model.create(age: 9) + customer3 = model.create(age: 12) + + expect(model.where('age.lte': 9).all).to contain_exactly(customer1, customer2) + end + + it 'supports gte' do + customer1 = model.create(age: 11) + customer2 = model.create(age: 12) + customer3 = model.create(age: 9) + + expect(model.where('age.gte': 11).all).to contain_exactly(customer1, customer2) + end + + it 'supports begins_with' do + customer1 = model.create(job_title: 'Environmental Air Quality Consultant') + customer2 = model.create(job_title: 'Environmental Project Manager') + customer3 = model.create(job_title: 'Creative Consultant') + + expect(model.where('job_title.begins_with': 'Environmental').all) + .to contain_exactly(customer1, customer2) + end + + it 'supports between' do + customer1 = model.create(age: 10) + customer2 = model.create(age: 20) + customer3 = model.create(age: 30) + customer4 = model.create(age: 40) + + expect(model.where('age.between': [19, 31]).all).to contain_exactly(customer2, customer3) + end + + it 'supports in' do + customer1 = model.create(age: 10) + customer2 = model.create(age: 20) + customer3 = model.create(age: 30) + + expect(model.where('age.in': [10, 20]).all).to contain_exactly(customer1, customer2) + end + + it 'supports contains' do + customer1 = model.create(job_title: 'Environmental Air Quality Consultant') + customer2 = model.create(job_title: 'Environmental Project Manager') + customer3 = model.create(job_title: 'Creative Consultant') + + expect(model.where('job_title.contains': 'Consul').all) + .to contain_exactly(customer1, customer3) + end + + it 'supports contains for set' do + klass = new_class do + field :set, :set + end + document1 = klass.create(set: %w[a b]) + document2 = klass.create(set: %w[b c]) + + expect(klass.where('set.contains': 'a').all).to contain_exactly(document1) + expect(klass.where('set.contains': 'b').all).to contain_exactly(document1, document2) + expect(klass.where('set.contains': 'c').all).to contain_exactly(document2) + end + + it 'supports contains for array' do + klass = new_class do + field :array, :array + end + document1 = klass.create(array: %w[a b]) + document2 = klass.create(array: %w[b c]) + + expect(klass.where('array.contains': 'a').all).to contain_exactly(document1) + expect(klass.where('array.contains': 'b').all).to contain_exactly(document1, document2) + expect(klass.where('array.contains': 'c').all).to contain_exactly(document2) + end + + it 'supports not_contains' do + customer1 = model.create(job_title: 'Environmental Air Quality Consultant') + customer2 = model.create(job_title: 'Environmental Project Manager') + customer3 = model.create(job_title: 'Creative Consultant') + + expect(model.where('job_title.not_contains': 'Consul').all) + .to contain_exactly(customer2) + end + + it 'supports null' do + model.create_table + + put_attributes(model.table_name, id: '1', age: 1) + put_attributes(model.table_name, id: '2', age: 2) + put_attributes(model.table_name, id: '3') + + documents = model.where('age.null': true).to_a + expect(documents.map(&:id)).to contain_exactly('3') + + documents = model.where('age.null': false).to_a + expect(documents.map(&:id)).to contain_exactly('1', '2') + end + + it 'supports not_null' do + model.create_table + + put_attributes(model.table_name, id: '1', age: 1) + put_attributes(model.table_name, id: '2', age: 2) + put_attributes(model.table_name, id: '3') + + documents = model.where('age.not_null': true).to_a + expect(documents.map(&:id)).to contain_exactly('1', '2') + + documents = model.where('age.not_null': false).to_a + expect(documents.map(&:id)).to contain_exactly('3') + end + + it 'supports multiple conditions for the same attribute' do + customer1 = model.create(age: 10) + customer2 = model.create(age: 20) + customer3 = model.create(age: 30) + customer4 = model.create(age: 40) + + expect(model.where('age.gt': 19, 'age.lt': 31).all).to contain_exactly(customer2, customer3) + end + + it 'supports multiple conditions for the same attribute with the same operator' do + customer1 = model.create(age: 10) + customer2 = model.create(age: 20) + customer3 = model.create(age: 30) + customer4 = model.create(age: 40) + + expect(model.where('age.gt': 31).where('age.gt': 19).all.to_a).to eq([customer4]) + end + + it 'allows conditions with attribute names conflicting with DynamoDB reserved words' do + model = new_class do + # SCAN, SET and SIZE are reserved words + field :scan + field :set + field :size + end + + model.create_table + put_attributes(model.table_name, id: '1', scan: 'a', set: 'b', size: 'c') + + documents = model.where(scan: 'a', set: 'b', size: 'c').to_a + expect(documents.map(&:id)).to eql ['1'] + end + + it 'raises error when operator is not supported' do + expect do + model.where('age.foo': 9).to_a + end.to raise_error(Dynamoid::Errors::Error, 'Unsupported operator foo in age.foo') + end + end + + describe 'Lazy loading' do + describe '.all' do + it 'does load result lazily' do + Vehicle.create + + expect(Dynamoid.adapter.client).to receive(:scan).exactly(0).times.and_call_original + Vehicle.record_limit(1).all + end + end + + describe '.find_by_pages' do + it 'does load result lazily' do + Vehicle.create + + expect(Dynamoid.adapter.client).to receive(:scan).exactly(0).times.and_call_original + Vehicle.record_limit(1).find_by_pages + end + end + end + + describe 'local secondary indexes used for `where` clauses' do + let(:model) do + new_class(partition_key: :name) do + range :range, :integer + + field :range2, :integer + field :range3, :integer + + local_secondary_index range_key: :range2, name: :range2index + local_secondary_index range_key: :range3, name: :range3index + end + end + + before do + @customer1 = model.create(name: 'Bob', range: 1, range2: 11, range3: 111) + @customer2 = model.create(name: 'Bob', range: 2, range2: 22, range3: 222) + @customer3 = model.create(name: 'Bob', range: 3, range2: 33, range3: 333) + end + + it 'supports query on local secondary index but always defaults to table range key' do + chain = described_class.new(model) + expect(chain).to receive(:raw_pages_via_query).and_call_original + expect(chain.where(name: 'Bob', 'range.lt': 3, 'range2.gt': 15).to_a.size).to eq(1) + expect(chain.key_fields_detector.hash_key).to eq(:name) + expect(chain.key_fields_detector.range_key).to eq(:range) + expect(chain.key_fields_detector.index_name).to be_nil + end + + it 'supports query on local secondary index' do + chain = described_class.new(model) + expect(chain).to receive(:raw_pages_via_query).and_call_original + expect(chain.where(name: 'Bob', 'range2.gt': 15).to_a.size).to eq(2) + expect(chain.key_fields_detector.hash_key).to eq(:name) + expect(chain.key_fields_detector.range_key).to eq(:range2) + expect(chain.key_fields_detector.index_name).to eq(:range2index) + + chain = described_class.new(model) + expect(chain).to receive(:raw_pages_via_query).and_call_original + expect(chain.where(name: 'Bob', 'range3.lt': 200).to_a.size).to eq(1) + expect(chain.key_fields_detector.hash_key).to eq(:name) + expect(chain.key_fields_detector.range_key).to eq(:range3) + expect(chain.key_fields_detector.index_name).to eq(:range3index) + end + + it 'supports query on local secondary index with start' do + chain = described_class.new(model) + expect(chain).to receive(:raw_pages_via_query).and_call_original + expect(chain.where(name: 'Bob', 'range2.gt': 15).to_a.size).to eq(2) + expect(chain.key_fields_detector.hash_key).to eq(:name) + expect(chain.key_fields_detector.range_key).to eq(:range2) + expect(chain.key_fields_detector.index_name).to eq(:range2index) + + chain = described_class.new(model) + expect(chain).to receive(:raw_pages_via_query).and_call_original + expect(chain.where(name: 'Bob', 'range2.gt': 15).start(@customer2).all).to contain_exactly(@customer3) + expect(chain.key_fields_detector.hash_key).to eq(:name) + expect(chain.key_fields_detector.range_key).to eq(:range2) + expect(chain.key_fields_detector.index_name).to eq(:range2index) + end + end + + describe 'global secondary indexes used for `where` clauses' do + it 'does not use global secondary index if does not project all attributes' do + model = new_class(partition_key: :name) do + range :customerid, :integer + + field :city + field :age, :integer + field :gender + + global_secondary_index hash_key: :city, range_key: :age, name: :cityage + end + + customer1 = model.create(name: 'Bob', city: 'San Francisco', age: 10, gender: 'male', customerid: 1) + customer2 = model.create(name: 'Jeff', city: 'San Francisco', age: 15, gender: 'male', customerid: 2) + + chain = described_class.new(model) + expect(chain).to receive(:raw_pages_via_scan).and_call_original + expect(chain.where(city: 'San Francisco').to_a.size).to eq(2) + # Does not use GSI since not projecting all attributes + expect(chain.key_fields_detector.hash_key).to be_nil + expect(chain.key_fields_detector.range_key).to be_nil + expect(chain.key_fields_detector.index_name).to be_nil + end + + context 'with full composite key for table' do + let(:model) do + new_class(partition_key: :name) do + range :customerid, :integer + + field :city + field :email + field :age, :integer + field :gender + + global_secondary_index hash_key: :city, range_key: :age, name: :cityage, projected_attributes: :all + global_secondary_index hash_key: :city, range_key: :gender, name: :citygender, projected_attributes: :all + global_secondary_index hash_key: :email, range_key: :age, name: :emailage, projected_attributes: :all + global_secondary_index hash_key: :name, range_key: :age, name: :nameage, projected_attributes: :all + end + end + + before do + @customer1 = model.create(name: 'Bob', city: 'San Francisco', email: 'bob@test.com', age: 10, gender: 'male', + customerid: 1) + @customer2 = model.create(name: 'Jeff', city: 'San Francisco', email: 'jeff@test.com', age: 15, gender: 'male', + customerid: 2) + @customer3 = model.create(name: 'Mark', city: 'San Francisco', email: 'mark@test.com', age: 20, gender: 'male', + customerid: 3) + @customer4 = model.create(name: 'Greg', city: 'New York', email: 'greg@test.com', age: 25, gender: 'male', + customerid: 4) + end + + it 'supports query on global secondary index but always defaults to table hash key' do + chain = described_class.new(model) + expect(chain).to receive(:raw_pages_via_query).and_call_original + expect(chain.where(name: 'Bob').to_a.size).to eq(1) + expect(chain.key_fields_detector.hash_key).to eq(:name) + expect(chain.key_fields_detector.range_key).to be_nil + expect(chain.key_fields_detector.index_name).to be_nil + end + + it 'supports query on global secondary index' do + chain = described_class.new(model) + expect(chain).to receive(:raw_pages_via_query).and_call_original + expect(chain.where(city: 'San Francisco').to_a.size).to eq(3) + expect(chain.key_fields_detector.hash_key).to eq(:city) + expect(chain.key_fields_detector.range_key).to eq(:age) + expect(chain.key_fields_detector.index_name).to eq(:cityage) + + chain = described_class.new(model) + expect(chain).to receive(:raw_pages_via_query).and_call_original + expect(chain.where(city: 'San Francisco', 'age.gt': 12).to_a.size).to eq(2) + expect(chain.key_fields_detector.hash_key).to eq(:city) + expect(chain.key_fields_detector.range_key).to eq(:age) + expect(chain.key_fields_detector.index_name).to eq(:cityage) + + chain = described_class.new(model) + expect(chain).to receive(:raw_pages_via_query).and_call_original + expect(chain.where(email: 'greg@test.com').to_a.size).to eq(1) + expect(chain.key_fields_detector.hash_key).to eq(:email) + expect(chain.key_fields_detector.range_key).to eq(:age) + expect(chain.key_fields_detector.index_name).to eq(:emailage) + + chain = described_class.new(model) + expect(chain).to receive(:raw_pages_via_query).and_call_original + expect(chain.where(email: 'greg@test.com', 'age.gt': 12).to_a.size).to eq(1) + expect(chain.key_fields_detector.hash_key).to eq(:email) + expect(chain.key_fields_detector.range_key).to eq(:age) + expect(chain.key_fields_detector.index_name).to eq(:emailage) + end + + it 'supports scan when no global secondary index available' do + chain = described_class.new(model) + expect(chain).to receive(:raw_pages_via_scan).and_call_original + expect(chain.where(gender: 'male').to_a.size).to eq(4) + expect(chain.key_fields_detector.hash_key).to be_nil + expect(chain.key_fields_detector.range_key).to be_nil + expect(chain.key_fields_detector.index_name).to be_nil + end + + it 'supports query on global secondary index with start' do + chain = described_class.new(model) + expect(chain).to receive(:raw_pages_via_query).and_call_original + expect(chain.where(city: 'San Francisco').to_a.size).to eq(3) + expect(chain.key_fields_detector.hash_key).to eq(:city) + expect(chain.key_fields_detector.range_key).to eq(:age) + expect(chain.key_fields_detector.index_name).to eq(:cityage) + + # Now query with start at customer2 and we should only see customer3 + chain = described_class.new(model) + expect(chain).to receive(:raw_pages_via_query).and_call_original + expect(chain.where(city: 'San Francisco').start(@customer2).all).to contain_exactly(@customer3) + end + + it "does not use index if a condition for index hash key is other than 'equal'" do + chain = described_class.new(model) + expect(chain).to receive(:raw_pages_via_scan).and_call_original + expect(chain.where('city.begins_with': 'San').to_a.size).to eq(3) + expect(chain.key_fields_detector.hash_key).to be_nil + expect(chain.key_fields_detector.range_key).to be_nil + expect(chain.key_fields_detector.index_name).to be_nil + end + + it 'prefers global secondary index with range key used in conditions to index w/o such range key' do + chain = described_class.new(model) + expect(chain).to receive(:raw_pages_via_query).and_call_original + expect(chain.where(city: 'San Francisco', 'age.lte': 15).to_a.size).to eq(2) + expect(chain.key_fields_detector.hash_key).to eq(:city) + expect(chain.key_fields_detector.range_key).to eq(:age) + expect(chain.key_fields_detector.index_name).to eq(:cityage) + + chain = described_class.new(model) + expect(chain).to receive(:raw_pages_via_query).and_call_original + expect(chain.where(city: 'San Francisco', gender: 'male').to_a.size).to eq(3) + expect(chain.key_fields_detector.hash_key).to eq(:city) + expect(chain.key_fields_detector.range_key).to eq(:gender) + expect(chain.key_fields_detector.index_name).to eq(:citygender) + end + + it 'uses global secondary index when secondary hash key overlaps with primary hash key and range key matches' do + chain = described_class.new(model) + expect(chain).to receive(:raw_pages_via_query).and_call_original + expect(chain.where(name: 'Bob', age: 10).to_a.size).to eq(1) + expect(chain.key_fields_detector.hash_key).to eq(:name) + expect(chain.key_fields_detector.range_key).to eq(:age) + expect(chain.key_fields_detector.index_name).to eq(:nameage) + end + end + + it 'supports query on global secondary index with correct start key without table range key' do + model = new_class(partition_key: :name) do + field :city + field :age, :integer + + global_secondary_index hash_key: :city, range_key: :age, name: :cityage, projected_attributes: :all + end + + customer1 = model.create(name: 'Bob', city: 'San Francisco', age: 10) + customer2 = model.create(name: 'Jeff', city: 'San Francisco', age: 15) + + chain = described_class.new(model) + expect(chain).to receive(:raw_pages_via_query).and_call_original + expect(chain.where(city: 'San Francisco').start(customer1).all).to contain_exactly(customer2) + end + end + + describe 'type casting in `where` clause' do + let(:klass) do + new_class do + field :count, :integer + end + end + + it 'type casts condition values' do + obj1 = klass.create(count: 1) + obj2 = klass.create(count: 2) + + expect(klass.where(count: '1').all.to_a).to eql([obj1]) + end + + it 'type casts condition values with predicates' do + obj1 = klass.create(count: 1) + obj2 = klass.create(count: 2) + obj3 = klass.create(count: 3) + + expect(klass.where('count.gt': '1').all).to contain_exactly(obj2, obj3) + end + + it 'type casts collection of condition values' do + obj1 = klass.create(count: 1) + obj2 = klass.create(count: 2) + obj3 = klass.create(count: 3) + + expect(klass.where('count.in': %w[1 2]).all).to contain_exactly(obj1, obj2) + end + end + + describe 'dumping in `where` clause' do + it 'dumps datetime' do + model = new_class do + field :activated_at, :datetime + end + + customer1 = model.create(activated_at: Time.now) + customer2 = model.create(activated_at: Time.now - 1.hour) + customer3 = model.create(activated_at: Time.now - 2.hour) + + expect( + model.where('activated_at.gt': Time.now - 1.5.hours).all + ).to contain_exactly(customer1, customer2) + end + + it 'dumps date' do + model = new_class do + field :registered_on, :date + end + + customer1 = model.create(registered_on: Date.today) + customer2 = model.create(registered_on: Date.today - 2.day) + customer3 = model.create(registered_on: Date.today - 4.days) + + expect( + model.where('registered_on.gt': Date.today - 3.days).all + ).to contain_exactly(customer1, customer2) + end + + it 'dumps array elements' do + model = new_class do + field :birthday, :date + end + + customer1 = model.create(birthday: '1978-08-21'.to_date) + customer2 = model.create(birthday: '1984-05-13'.to_date) + customer3 = model.create(birthday: '1991-11-28'.to_date) + + expect( + model.where('birthday.between': ['1980-01-01'.to_date, '1990-01-01'.to_date]).all + ).to contain_exactly(customer2) + end + + context 'Query' do + it 'dumps partition key `equal` condition' do + model = new_class(partition_key: { name: :registered_on, type: :date }) + + customer1 = model.create(registered_on: Date.today) + customer2 = model.create(registered_on: Date.today - 2.day) + + expect( + model.where(registered_on: Date.today).all + ).to contain_exactly(customer1) + end + + it 'dumps sort key `equal` condition' do + model = new_class(partition_key: :first_name) do + range :registered_on, :date + end + + customer1 = model.create(first_name: 'Alice', registered_on: Date.today) + customer2 = model.create(first_name: 'Alice', registered_on: Date.today - 2.day) + + expect( + model.where(first_name: 'Alice', registered_on: Date.today).all + ).to contain_exactly(customer1) + end + + it 'dumps sort key `range` condition' do + model = new_class(partition_key: :first_name) do + range :registered_on, :date + end + + customer1 = model.create(first_name: 'Alice', registered_on: Date.today) + customer2 = model.create(first_name: 'Alice', registered_on: Date.today - 2.day) + customer3 = model.create(first_name: 'Alice', registered_on: Date.today - 4.days) + + expect( + model.where(first_name: 'Alice', 'registered_on.gt': Date.today - 3.days).all + ).to contain_exactly(customer1, customer2) + end + + it 'dumps non-key field `equal` condition' do + model = new_class(partition_key: :first_name) do + range :last_name + field :registered_on, :date # <==== not range key + end + + customer1 = model.create(first_name: 'Alice', last_name: 'Cooper', registered_on: Date.today) + customer2 = model.create(first_name: 'Alice', last_name: 'Morgan', registered_on: Date.today - 2.day) + + expect( + model.where(first_name: 'Alice', registered_on: Date.today).all + ).to contain_exactly(customer1) + end + + it 'dumps non-key field `range` condition' do + model = new_class(partition_key: :first_name) do + range :last_name + field :registered_on, :date # <==== not range key + end + + customer1 = model.create(first_name: 'Alice', last_name: 'Cooper', registered_on: Date.today) + customer2 = model.create(first_name: 'Alice', last_name: 'Morgan', registered_on: Date.today - 2.day) + customer3 = model.create(first_name: 'Alice', last_name: 'Smit', registered_on: Date.today - 4.days) + + expect( + model.where(first_name: 'Alice', 'registered_on.gt': Date.today - 3.days).all + ).to contain_exactly(customer1, customer2) + end + end + + context 'Scan' do + it 'dumps field for `equal` condition' do + model = new_class do + field :birthday, :date + end + + customer1 = model.create(birthday: '1978-08-21'.to_date) + customer2 = model.create(birthday: '1984-05-13'.to_date) + + expect(model.where(birthday: '1978-08-21').all).to contain_exactly(customer1) + end + + it 'dumps field for `range` condition' do + model = new_class do + field :birthday, :date + end + + customer1 = model.create(birthday: '1978-08-21'.to_date) + customer2 = model.create(birthday: '1984-05-13'.to_date) + + expect(model.where('birthday.gt': '1980-01-01').all).to contain_exactly(customer2) + end + end + end + + context 'field is not declared in document' do + context 'Query' do + let(:class_with_not_declared_field) do + new_class do + field :name + end + end + + before do + class_with_not_declared_field.create_table + end + + it 'ignores it without exceptions' do + Dynamoid.adapter.put_item(class_with_not_declared_field.table_name, id: '1', name: 'Mike', bod: '1996-12-21') + objects = class_with_not_declared_field.where(id: '1', name: 'Mike').to_a + + expect(objects.map(&:id)).to eql(['1']) + end + end + + context 'Scan' do + let(:class_with_not_declared_field) do + new_class do + range :name + end + end + + before do + class_with_not_declared_field.create_table + end + + it 'ignores it without exceptions' do + Dynamoid.adapter.put_item(class_with_not_declared_field.table_name, id: '1', name: 'Mike', bod: '1996-12-21') + objects = class_with_not_declared_field.where(id: '1', name: 'Mike').to_a + + expect(objects.map(&:id)).to eql(['1']) + end + end + end + + describe '#where' do + context 'passed condition for nonexistent attribute' do + let(:model) do + new_class do + field :city + end + end + + before do + model.create_table + end + + it 'writes warning message' do + expect(Dynamoid.logger).to receive(:warn) + .with('where conditions contain nonexistent field name `town`') + + model.where(town: 'New York') + end + + it 'writes warning message for condition with operator' do + expect(Dynamoid.logger).to receive(:warn) + .with('where conditions contain nonexistent field name `town`') + + model.where('town.contain': 'New York') + end + + it 'writes warning message with a list of attributes' do + expect(Dynamoid.logger).to receive(:warn) + .with('where conditions contain nonexistent field names `town`, `street1`') + + model.where(town: 'New York', street1: 'Allen Street') + end + end + + context 'nil check' do + let(:model) do + new_class do + field :name + end + end + + before do + @mike = model.create(name: 'Mike') + @johndoe = model.create(name: nil) + end + + context 'store_attribute_with_nil_value = true', config: { store_attribute_with_nil_value: true } do + it 'supports "eq nil" check' do + expect(model.where(name: nil).to_a).to eq [@johndoe] + end + + it 'supports "in [nil]" check' do + expect(model.where('name.in': [nil]).to_a).to eq [@johndoe] + end + + it 'supports "ne nil" check' do + expect(model.where('name.ne': nil).to_a).to eq [@mike] + end + end + + context 'store_attribute_with_nil_value = false', config: { store_attribute_with_nil_value: false } do + it 'supports "null" check' do + expect(model.where('name.null': true).to_a).to eq [@johndoe] + expect(model.where('name.null': false).to_a).to eq [@mike] + end + + it 'supports "not_null" check' do + expect(model.where('name.not_null': true).to_a).to eq [@mike] + expect(model.where('name.not_null': false).to_a).to eq [@johndoe] + end + + it 'does not support "eq nil" check' do + expect(model.where(name: nil).to_a).to eq [] + end + + it 'does not supports "in [nil]" check' do + expect(model.where('name.in': [nil]).to_a).to eq [] + end + + it 'does not support "ne nil" check' do + expect(model.where('name.ne': nil).to_a).to contain_exactly(@mike, @johndoe) + end + end + end + + # Regression + # https://github.com/Dynamoid/dynamoid/issues/435 + context 'when inheritance field (:type by default) is a GSI hash key' do + it 'works without exception' do + # rubocop:disable Lint/ConstantDefinitionInBlock + UserWithGSI = new_class class_name: 'UserWithGSI' do + field :type + + global_secondary_index hash_key: :type, + range_key: :created_at, + projected_attributes: :all + end + # rubocop:enable Lint/ConstantDefinitionInBlock + + obj = UserWithGSI.create + + actual = UserWithGSI.where(type: 'UserWithGSI').all.to_a + expect(actual).to eq [obj] + end + end + + describe 'callbacks' do + it 'runs after_initialize callback' do + klass_with_callback = new_class do + field :name + after_initialize { print 'run after_initialize' } + end + + object = klass_with_callback.create!(name: 'Alex') + + expect { klass_with_callback.where(name: 'Alex').to_a }.to output('run after_initialize').to_stdout + end + + it 'runs after_find callback' do + klass_with_callback = new_class do + field :name + after_find { print 'run after_find' } + end + + object = klass_with_callback.create!(name: 'Alex') + + expect { klass_with_callback.where(name: 'Alex').to_a }.to output('run after_find').to_stdout + end + + it 'runs callbacks in the proper order' do + klass_with_callback = new_class do + field :name + after_initialize { print 'run after_initialize' } + after_find { print 'run after_find' } + end + + object = klass_with_callback.create!(name: 'Alex') + + expect do + klass_with_callback.where(name: 'Alex').to_a + end.to output('run after_initializerun after_find').to_stdout + end + end + end + + describe '#find_by_pages' do + let(:model) do + new_class do + self.range_key = :range + field :city + field :age, :number + field :range, :number + field :data + end + end + + before do + 120.times do |i| + model.create( + id: '1', + range: i.to_f, + age: i.to_f, + data: 'A' * 1024 * 16 + ) + end + end + + it 'yields one page at a time' do + expect { |b| model.where(id: '1').find_by_pages(&b) }.to yield_successive_args( + [all(be_a(model)), { last_evaluated_key: an_instance_of(Hash) }], + [all(be_a(model)), { last_evaluated_key: nil }], + ) + end + + describe 'callbacks' do + it 'runs after_initialize callback' do + klass_with_callback = new_class do + field :name + after_initialize { print 'run after_initialize' } + end + + object = klass_with_callback.create!(name: 'Alex') + + expect do + klass_with_callback.where(name: 'Alex').find_by_pages { |*| } # rubocop:disable Lint/EmptyBlock + end.to output('run after_initialize').to_stdout + end + + it 'runs after_find callback' do + klass_with_callback = new_class do + field :name + after_find { print 'run after_find' } + end + + object = klass_with_callback.create!(name: 'Alex') + + expect do + klass_with_callback.where(name: 'Alex').find_by_pages { |*| } # rubocop:disable Lint/EmptyBlock + end.to output('run after_find').to_stdout + end + + it 'runs callbacks in the proper order' do + klass_with_callback = new_class do + field :name + after_initialize { print 'run after_initialize' } + after_find { print 'run after_find' } + end + + object = klass_with_callback.create!(name: 'Alex') + + expect do + klass_with_callback.where(name: 'Alex').find_by_pages { |*| } # rubocop:disable Lint/EmptyBlock + end.to output('run after_initializerun after_find').to_stdout + end + end + end + + describe '#start' do + let(:model) do + new_class(partition_key: :name) do + field :city + end + end + + it 'returns result from the specified item' do + customer1 = model.create(name: 'Bob', city: 'San Francisco') + customer2 = model.create(name: 'Jeff', city: 'San Francisco') + customer3 = model.create(name: 'Mark', city: 'San Francisco') + customer4 = model.create(name: 'Greg', city: 'New York') + + chain = described_class.new(model) + + customers = chain.where(city: 'San Francisco').record_limit(2).to_a + expect(customers.size).to eq 2 + + customers_rest = chain.where(city: 'San Francisco').start(customers.last).all.to_a + expect(customers_rest.size).to eq 1 + + expect(customers + customers_rest).to contain_exactly(customer1, customer2, customer3) + end + + it 'accepts hash argument' do + customer1 = model.create(name: 'Bob', city: 'San Francisco') + customer2 = model.create(name: 'Jeff', city: 'San Francisco') + customer3 = model.create(name: 'Mark', city: 'San Francisco') + customer4 = model.create(name: 'Greg', city: 'New York') + + chain = described_class.new(model) + + customers = chain.where(city: 'San Francisco').record_limit(2).to_a + expect(customers.size).to eq 2 + + customers_rest = chain.where(city: 'San Francisco').start(name: customers.last.name).all.to_a + expect(customers_rest.size).to eq 1 + + expect(customers + customers_rest).to contain_exactly(customer1, customer2, customer3) + end + + context 'document with range key' do + let(:model) do + Class.new do + include Dynamoid::Document + table name: :customer, key: :version + range :age, :integer + field :name + field :gender + end + end + + before do + @customer1 = model.create(version: 'v1', name: 'Bob', age: 10, gender: 'male') + @customer2 = model.create(version: 'v1', name: 'Jeff', age: 15, gender: 'female') + @customer3 = model.create(version: 'v1', name: 'Mark', age: 20, gender: 'male') + @customer4 = model.create(version: 'v1', name: 'Greg', age: 25, gender: 'female') + end + + it 'return query result from the specified item' do + chain = described_class.new(model) + + expect(chain).to receive(:raw_pages_via_query).and_call_original + customers = chain.where(version: 'v1', 'age.gt': 10).start(@customer2).all.to_a + + expect(customers).to contain_exactly(@customer3, @customer4) + end + + it 'return scan result from the specified item' do + chain = described_class.new(model) + + expect(chain).to receive(:raw_pages_via_scan).and_call_original + customers = chain.where(gender: 'male').start(@customer1).all.to_a + + expect(customers).to contain_exactly(@customer3) + end + end + + context 'document without range key' do + let(:model) do + new_class(partition_key: :name) do + field :age, :integer + end + end + + before do + @customer1 = model.create(name: 'Bob', age: 10) + @customer2 = model.create(name: 'Jeff', age: 15) + @customer3 = model.create(name: 'Mark', age: 20) + @customer4 = model.create(name: 'Greg', age: 25) + end + + it 'return scan result from the specified item' do + chain = described_class.new(model) + + expect(chain).to receive(:raw_pages_via_scan).and_call_original + customers = chain.where('age.gt': 10).start(@customer2).all.to_a + + expect(customers).to contain_exactly(@customer3, @customer4) + end + end + end + + describe '#delete_all' do + it 'deletes in batch' do + klass = new_class + klass.create! + + chain = described_class.new(klass) + + expect(Dynamoid.adapter.client).to receive(:batch_write_item).and_call_original + chain.delete_all + end + + context 'when some conditions specified' do + it 'deletes only proper items' do + klass = new_class do + field :title + end + + document1 = klass.create!(title: 'Doc #1') + klass.create!(title: 'Doc #2') + document3 = klass.create!(title: 'Doc #3') + + chain = described_class.new(klass) + chain = chain.where(title: 'Doc #2') + + expect { chain.delete_all }.to change { klass.count }.by(-1) + expect(klass.all).to contain_exactly(document1, document3) + end + + it 'loads items with Query if can' do + klass = new_class do + range :title + end + + document = klass.create!(title: 'Doc #1') + + chain = described_class.new(klass) + chain = chain.where(id: document.id) + + expect(Dynamoid.adapter.client).to receive(:query).and_call_original + expect { chain.delete_all }.to change { klass.count }.by(-1) + end + + it 'loads items with Scan if cannot use Query' do + klass = new_class do + range :title + field :author + end + + klass.create!(title: "The Cuckoo's Calling", author: 'J. K. Rowling') + + chain = described_class.new(klass) + chain = chain.where(author: 'J. K. Rowling') + + expect(Dynamoid.adapter.client).to receive(:scan).and_call_original + expect { chain.delete_all }.to change { klass.count }.by(-1) + end + + context 'Query (partition key specified)' do + it 'works well with composite primary key' do + klass = new_class do + range :title + end + + document = klass.create!(title: 'Doc #1') + klass.create!(title: 'Doc #2') + + chain = described_class.new(klass) + chain = chain.where(id: document.id) + + expect { chain.delete_all }.to change { klass.count }.by(-1) + end + + it 'works well when there is partition key only' do + klass = new_class do + field :title + end + + document = klass.create! + klass.create! + + chain = described_class.new(klass) + chain = chain.where(id: document.id) + + expect { chain.delete_all }.to change { klass.count }.by(-1) + end + end + + context 'Scan (partition key is not specified)' do + it 'works well with composite primary key' do + klass = new_class do + range :title + end + + klass.create!(title: 'Doc #1') + klass.create!(title: 'Doc #2') + + chain = described_class.new(klass) + chain = chain.where(title: 'Doc #1') + + expect { chain.delete_all }.to change { klass.count }.by(-1) + end + + it 'works well when there is partition key only' do + klass = new_class do + field :title + end + + klass.create!(title: 'Doc #1') + klass.create!(title: 'Doc #2') + + chain = described_class.new(klass) + chain = chain.where(title: 'Doc #1') + + expect { chain.delete_all }.to change { klass.count }.by(-1) + end + end + end + + context 'there are no conditions' do + it 'deletes all the items' do + klass = new_class do + field :title + end + + 3.times { klass.create! } + chain = described_class.new(klass) + expect { chain.delete_all }.to change { klass.count }.from(3).to(0) + end + + context 'Scan' do + it 'works well with composite primary key' do + klass = new_class do + range :title + end + + klass.create!(title: 'Doc #1') + chain = described_class.new(klass) + expect { chain.delete_all }.to change { klass.count }.by(-1) + end + + it 'works well when there is partition key only' do + klass = new_class + + klass.create! + chain = described_class.new(klass) + expect { chain.delete_all }.to change { klass.count }.by(-1) + end + end + end + end + + describe '#first' do + let(:model) do + new_class partition_key: :name do + range :age, :integer + field :city, :string + end + end + + it 'applies a scan limit if no conditions are present' do + document = model.create(name: 'Bob', age: 5) + + chain = described_class.new(model) + expect_any_instance_of(described_class).to receive(:scan_limit).with(1).and_call_original + expect(chain.first).to eq(document) + end + + it 'applies the correct scan limit if no conditions are present' do + document1 = model.create(name: 'Bob', age: 5) + document2 = model.create(name: 'Bob', age: 6) + document3 = model.create(name: 'Bob', age: 7) + + chain = described_class.new(model) + expect_any_instance_of(described_class).to receive(:scan_limit).with(2).and_call_original + expect(chain.first(2).to_set).to eq([document1, document2].to_set) + end + + it 'applies a record limit if only key conditions are present' do + document = model.create(name: 'Bob', age: 5) + + chain = described_class.new(model) + expect_any_instance_of(described_class).to receive(:record_limit).with(1).and_call_original + expect(chain.where(name: 'Bob', age: 5).first).to eq(document) + end + + it 'applies the correct record limit if only key conditions are present' do + document1 = model.create(name: 'Bob', age: 5) + document2 = model.create(name: 'Bob', age: 6) + document3 = model.create(name: 'Bob', age: 7) + + chain = described_class.new(model) + expect_any_instance_of(described_class).to receive(:record_limit).with(2).and_call_original + expect(chain.where(name: 'Bob').first(2)).to eq([document1, document2]) + end + + it 'does not apply a record limit if the hash key is missing' do + document = model.create(name: 'Bob', city: 'New York', age: 5) + + chain = described_class.new(model) + expect_any_instance_of(described_class).not_to receive(:record_limit) + expect(chain.where(age: 5).first).to eq(document) + end + + it 'does not apply a record limit if non-key conditions are present' do + document = model.create(name: 'Bob', city: 'New York', age: 5) + + chain = described_class.new(model) + expect_any_instance_of(described_class).not_to receive(:record_limit) + expect(chain.where(city: 'New York').first).to eq(document) + expect(chain.where(name: 'Bob', city: 'New York').first).to eq(document) + expect(chain.where(name: 'Bob', age: 5, city: 'New York').first).to eq(document) + end + + it 'does not apply a record limit if non-equality conditions are present' do + document1 = model.create(name: 'Bob', age: 5) + document2 = model.create(name: 'Alice', age: 6) + + chain = described_class.new(model) + expect_any_instance_of(described_class).not_to receive(:record_limit) + expect(chain.where('name.gt': 'Alice').first).to eq(document1) + end + + it 'returns nil if no matching document is present' do + model.create(name: 'Bob', age: 5) + + expect(model.where(name: 'Alice').first).to be_nil + end + + it 'returns the first document with regards to the sort order' do + document1 = model.create(name: 'Bob', age: 5) + document2 = model.create(name: 'Bob', age: 9) + document3 = model.create(name: 'Bob', age: 12) + + expect(model.first.age).to eq(5) + end + + it 'returns the first document matching the criteria and with regards to the sort order' do + document1 = model.create(name: 'Bob', age: 4) + document3 = model.create(name: 'Alice', age: 6) + document4 = model.create(name: 'Alice', age: 8) + + expect(model.where(name: 'Alice').first.age).to eq(6) + end + + context 'scope is reused' do + it 'does not affect other query methods when no key conditions' do + klass = new_class do + field :name + end + + klass.create!(name: 'Alice') + klass.create!(name: 'Lucy') + + scope = klass.where({}) + expect(scope.first).to be_present + expect(scope.count).to eq 2 + expect(scope.to_a.size).to eq 2 + end + + it 'does not affect other query methods when passed key conditions' do + klass = new_class do + range :name + end + + klass.create!(id: '1', name: 'Alice') + klass.create!(id: '1', name: 'Anne') + klass.create!(id: '1', name: 'Lucy') + + scope = klass.where(id: '1') + expect(scope.first).to be_present + expect(scope.count).to eq 3 + expect(scope.all.to_a.size).to eq 3 + end + end + + describe 'callbacks' do + it 'runs after_initialize callback' do + klass_with_callback = new_class do + after_initialize { print 'run after_initialize' } + end + + object = klass_with_callback.create! + + expect do + klass_with_callback.first + end.to output('run after_initialize').to_stdout + end + + it 'runs after_find callback' do + klass_with_callback = new_class do + after_find { print 'run after_find' } + end + + object = klass_with_callback.create! + + expect do + klass_with_callback.first + end.to output('run after_find').to_stdout + end + + it 'runs callbacks in the proper order' do + klass_with_callback = new_class do + after_initialize { print 'run after_initialize' } + after_find { print 'run after_find' } + end + + object = klass_with_callback.create! + + expect do + klass_with_callback.first + end.to output('run after_initializerun after_find').to_stdout + end + end + end + + describe '#count' do + describe 'Query vs Scan' do + it 'Scans when query is empty' do + chain = described_class.new(Address) + chain = chain.where({}) + expect(chain).to receive(:count_via_scan) + chain.count + end + + it 'Queries when query is only ID' do + chain = described_class.new(Address) + chain = chain.where(id: 'test') + expect(chain).to receive(:count_via_query) + chain.count + end + + it 'Queries when query contains ID' do + chain = described_class.new(Address) + chain = chain.where(id: 'test', city: 'Bucharest') + expect(chain).to receive(:count_via_query) + chain.count + end + + it 'Scans when query includes keys that are neither a hash nor a range' do + chain = described_class.new(Address) + chain = chain.where(city: 'Bucharest') + expect(chain).to receive(:count_via_scan) + chain.count + end + + it 'Scans when query is only a range' do + chain = described_class.new(Tweet) + chain = chain.where(group: 'xx') + expect(chain).to receive(:count_via_scan) + chain.count + end + + it 'Scans when there is only not-equal operator for hash key' do + chain = described_class.new(Address) + chain = chain.where('id.in': ['test']) + expect(chain).to receive(:count_via_scan) + chain.count + end + end + + context 'Query' do + let(:model) do + Class.new do + include Dynamoid::Document + + table name: :customer, key: :name + range :age, :integer + end + end + + it 'returns count of filtered documents' do + customer1 = model.create(name: 'Bob', age: 5) + customer2 = model.create(name: 'Bob', age: 9) + customer3 = model.create(name: 'Bob', age: 12) + + expect(model.where(name: 'Bob', 'age.lt': 10).count).to eql(2) + end + end + + context 'Scan' do + let(:model) do + new_class do + field :age, :integer + end + end + + it 'returns count of filtered documents' do + customer1 = model.create(age: 5) + customer2 = model.create(age: 9) + customer3 = model.create(age: 12) + + expect(model.where('age.lt': 10).count).to eql(2) + end + end + end + + describe '#project' do + let(:model) do + new_class do + field :name + field :age, :integer + end + end + + it 'loads only specified attributes' do + model.create(name: 'Alex', age: 21) + obj, = model.project(:age).to_a + + expect(obj.age).to eq 21 + + expect(obj.id).to eq nil + expect(obj.name).to eq nil + end + + it 'works with Scan' do + model.create(name: 'Alex', age: 21) + + chain = described_class.new(model) + expect(chain).to receive(:raw_pages_via_scan).and_call_original + + obj, = chain.project(:age).to_a + expect(obj.attributes).to eq(age: 21) + end + + it 'works with Query' do + obj = model.create(name: 'Alex', age: 21) + + chain = described_class.new(model) + expect(chain).to receive(:raw_pages_via_query).and_call_original + + obj_loaded, = chain.where(id: obj.id).project(:age).to_a + expect(obj_loaded.attributes).to eq(age: 21) + end + + context 'when attribute name is a DynamoDB reserved word' do + let(:model) do + new_class do + field :name + field :bucket, :integer # BUCKET is a reserved word + end + end + + it 'works with Scan' do + model.create(name: 'Alex', bucket: 2) + + chain = described_class.new(model) + expect(chain).to receive(:raw_pages_via_scan).and_call_original + + obj, = chain.project(:bucket).to_a + expect(obj.attributes).to eq(bucket: 2) + end + + it 'works with Query' do + object = model.create(name: 'Alex', bucket: 2) + + chain = described_class.new(model) + expect(chain).to receive(:raw_pages_via_query).and_call_original + + obj, = chain.where(id: object.id).project(:bucket).to_a + expect(obj.attributes).to eq(bucket: 2) + end + end + end + + describe '#pluck' do + let(:model) do + new_class do + field :name, :string + field :age, :integer + end + end + + it 'returns an array of attribute values' do + model.create(name: 'Alice', age: 21) + model.create(name: 'Bob', age: 34) + + expect(model.pluck(:name)).to contain_exactly('Alice', 'Bob') + end + + it 'returns an array of arrays of attribute values if requested several attributes' do + model.create(name: 'Alice', age: 21) + model.create(name: 'Bob', age: 34) + + expect(model.pluck(:name, :age)).to contain_exactly(['Alice', 21], ['Bob', 34]) + end + + it 'can be chained with where clause' do + model.create(name: 'Alice', age: 21) + model.create(name: 'Bob', age: 34) + + expect(model.where('age.gt': 30).pluck(:name)).to eq(['Bob']) + end + + it 'accepts both string and symbolic attribute names' do + model.create(name: 'Alice', age: 21) + + expect(model.pluck(:name)).to eq(['Alice']) + expect(model.pluck('name')).to eq(['Alice']) + expect(model.pluck('name', :age)).to eq([['Alice', 21]]) + end + + it 'casts values to declared field types' do + model.create(created_at: '03-04-2020 23:40:00'.to_time) + + expect(model.pluck(:created_at)).to eq(['03-04-2020 23:40:00'.to_time]) + end + + context 'scope is reused' do + it 'does not affect other query methods when there is one field to fetch' do + klass = new_class do + field :name + field :age, :integer + end + + klass.create!(name: 'Alice', age: 11) + scope = klass.where({}) + + scope.pluck(:name) + array = scope.all.to_a + + object = array[0] + expect(object.name).to eq 'Alice' + expect(object.age).to eq 11 + end + + it 'does not affect other query methods when there are several fields to fetch' do + klass = new_class do + field :name + field :age, :integer + field :tag_id + end + + klass.create!(name: 'Alice', age: 11, tag_id: '719') + scope = klass.where({}) + + scope.pluck(:name, :age) + array = scope.all.to_a + + object = array[0] + expect(object.name).to eq 'Alice' + expect(object.age).to eq 11 + expect(object.tag_id).to eq '719' + end + end + + context 'when attribute name is a DynamoDB reserved word' do + let(:model) do + new_class do + field :name + field :bucket, :integer # BUCKET is a reserved word + end + end + + it 'works with Scan' do + model.create(name: 'Alice', bucket: 1001) + model.create(name: 'Bob', bucket: 1002) + + expect(model.pluck(:bucket)).to contain_exactly(1001, 1002) + end + + it 'works with Query' do + object = model.create(name: 'Alice', bucket: 1001) + + expect(model.where(id: object.id).pluck(:bucket)).to eq([1001]) + end + end + end + + describe 'User' do + let(:chain) { described_class.new(User) } + + it 'defines each' do + chain = self.chain.where(name: 'Josh') + chain.each { |u| u.update_attribute(:name, 'Justin') } + + expect(User.find(user.id).name).to eq 'Justin' + end + + it 'includes Enumerable' do + chain = self.chain.where(name: 'Josh') + + expect(chain.collect(&:name)).to eq ['Josh'] + end + end + + describe 'Tweet' do + let!(:tweet1) { Tweet.create(tweet_id: 'x', group: 'one') } + let!(:tweet2) { Tweet.create(tweet_id: 'x', group: 'two') } + let!(:tweet3) { Tweet.create(tweet_id: 'xx', group: 'two') } + let(:tweets) { [tweet1, tweet2, tweet3] } + let(:chain) { described_class.new(Tweet) } + + it 'limits evaluated records' do + chain = self.chain.where({}) + expect(chain.record_limit(1).count).to eq 1 + expect(chain.record_limit(2).count).to eq 2 + end + + it 'finds tweets with a start' do + chain = self.chain.where(tweet_id: 'x') + chain.start(tweet1) + expect(chain.count).to eq 1 + expect(chain.first).to eq tweet2 + end + + it 'finds one specific tweet' do + chain = self.chain.where(tweet_id: 'xx', group: 'two') + expect(chain.all.to_a).to eq [tweet3] + end + + it 'finds posts with "where" method with "gt" query' do + ts_epsilon = 0.001 # 1 ms + time = DateTime.now + post1 = Post.create(post_id: 'x', posted_at: time) + post2 = Post.create(post_id: 'x', posted_at: (time + 1.hour)) + chain = described_class.new(Post) + chain = chain.where(post_id: 'x', 'posted_at.gt': (time + ts_epsilon)) + expect(chain.count).to eq 1 + stored_record = chain.first + expect(stored_record.attributes[:post_id]).to eq post2.attributes[:post_id] + # Must use an epsilon to compare timestamps after round-trip: https://github.com/Dynamoid/Dynamoid/issues/2 + expect(stored_record.attributes[:created_at]).to be_within(ts_epsilon).of(post2.attributes[:created_at]) + expect(stored_record.attributes[:posted_at]).to be_within(ts_epsilon).of(post2.attributes[:posted_at]) + expect(stored_record.attributes[:updated_at]).to be_within(ts_epsilon).of(post2.attributes[:updated_at]) + end + + it 'finds posts with "where" method with "lt" query' do + ts_epsilon = 0.001 # 1 ms + time = DateTime.now + post1 = Post.create(post_id: 'x', posted_at: time) + post2 = Post.create(post_id: 'x', posted_at: (time + 1.hour)) + chain = described_class.new(Post) + chain = chain.where(post_id: 'x', 'posted_at.lt': (time + 1.hour - ts_epsilon)) + expect(chain.count).to eq 1 + stored_record = chain.first + expect(stored_record.attributes[:post_id]).to eq post2.attributes[:post_id] + # Must use an epsilon to compare timestamps after round-trip: https://github.com/Dynamoid/Dynamoid/issues/2 + expect(stored_record.attributes[:created_at]).to be_within(ts_epsilon).of(post1.attributes[:created_at]) + expect(stored_record.attributes[:posted_at]).to be_within(ts_epsilon).of(post1.attributes[:posted_at]) + expect(stored_record.attributes[:updated_at]).to be_within(ts_epsilon).of(post1.attributes[:updated_at]) + end + + it 'finds posts with "where" method with "between" query' do + ts_epsilon = 0.001 # 1 ms + time = DateTime.now + post1 = Post.create(post_id: 'x', posted_at: time) + post2 = Post.create(post_id: 'x', posted_at: (time + 1.hour)) + chain = described_class.new(Post) + chain = chain.where(post_id: 'x', 'posted_at.between': [time - ts_epsilon, time + ts_epsilon]) + expect(chain.count).to eq 1 + stored_record = chain.first + expect(stored_record.attributes[:post_id]).to eq post2.attributes[:post_id] + # Must use an epsilon to compare timestamps after round-trip: https://github.com/Dynamoid/Dynamoid/issues/2 + expect(stored_record.attributes[:created_at]).to be_within(ts_epsilon).of(post1.attributes[:created_at]) + expect(stored_record.attributes[:posted_at]).to be_within(ts_epsilon).of(post1.attributes[:posted_at]) + expect(stored_record.attributes[:updated_at]).to be_within(ts_epsilon).of(post1.attributes[:updated_at]) + end + + describe 'batch queries' do + it 'returns all results' do + expect(chain.batch(2).all.count).to eq tweets.size + end + end + end + + describe '#with_index' do + context 'when Local Secondary Index (LSI) used' do + let(:klass_with_local_secondary_index) do + new_class do + range :owner_id + + field :age, :integer + + local_secondary_index range_key: :age, + name: :age_index, projected_attributes: :all + end + end + + before do + klass_with_local_secondary_index.create(id: 'the same id', owner_id: 'a', age: 3) + klass_with_local_secondary_index.create(id: 'the same id', owner_id: 'c', age: 2) + klass_with_local_secondary_index.create(id: 'the same id', owner_id: 'b', age: 1) + end + + it 'sorts the results in ascending order' do + chain = described_class.new(klass_with_local_secondary_index) + models = chain.where(id: 'the same id').with_index(:age_index).scan_index_forward(true) + expect(models.map(&:owner_id)).to eq %w[b c a] + end + + it 'sorts the results in desc order' do + chain = described_class.new(klass_with_local_secondary_index) + models = chain.where(id: 'the same id').with_index(:age_index).scan_index_forward(false) + expect(models.map(&:owner_id)).to eq %w[a c b] + end + end + + context 'when Global Secondary Index (GSI) used' do + let(:klass_with_global_secondary_index) do + new_class do + range :owner_id + + field :age, :integer + + global_secondary_index hash_key: :owner_id, range_key: :age, + name: :age_index, projected_attributes: :all + end + end + let(:chain) { described_class.new(klass_with_global_secondary_index) } + + before do + klass_with_global_secondary_index.create(id: 'other id', owner_id: 'a', age: 1) + klass_with_global_secondary_index.create(id: 'the same id', owner_id: 'a', age: 3) + klass_with_global_secondary_index.create(id: 'the same id', owner_id: 'c', age: 2) + klass_with_global_secondary_index.create(id: 'no age', owner_id: 'b') + end + + it 'sorts the results in ascending order' do + models = chain.where(owner_id: 'a').with_index(:age_index).scan_index_forward(true) + expect(models.map(&:age)).to eq [1, 3] + end + + it 'sorts the results in desc order' do + models = chain.where(owner_id: 'a').with_index(:age_index).scan_index_forward(false) + expect(models.map(&:age)).to eq [3, 1] + end + + it 'works with string names' do + models = chain.where(owner_id: 'a').with_index('age_index').scan_index_forward(false) + expect(models.map(&:age)).to eq [3, 1] + end + + it 'raises an error when an unknown index is passed' do + expect do + chain.where(owner_id: 'a').with_index(:missing_index) + end.to raise_error Dynamoid::Errors::InvalidIndex, /Unknown index/ + end + + it 'allows scanning the index' do + models = chain.with_index(:age_index) + expect(models.map(&:id)).not_to include 'no age' + end + end + end + + describe '#scan_index_forward' do + let(:klass_with_range_key) do + new_class do + range :name + field :age, :integer + end + end + + it 'returns collection sorted in ascending order by range key when called with true' do + klass_with_range_key.create(id: 'the same id', name: 'a') + klass_with_range_key.create(id: 'the same id', name: 'c') + klass_with_range_key.create(id: 'the same id', name: 'b') + + chain = described_class.new(klass_with_range_key) + models = chain.where(id: 'the same id').scan_index_forward(true) + expect(models.map(&:name)).to eq %w[a b c] + end + + it 'returns collection sorted in descending order by range key when called with false' do + klass_with_range_key.create(id: 'the same id', name: 'a') + klass_with_range_key.create(id: 'the same id', name: 'c') + klass_with_range_key.create(id: 'the same id', name: 'b') + + chain = described_class.new(klass_with_range_key) + models = chain.where(id: 'the same id').scan_index_forward(false) + expect(models.map(&:name)).to eq %w[c b a] + end + + it 'overides previous calls' do + klass_with_range_key.create(id: 'the same id', name: 'a') + klass_with_range_key.create(id: 'the same id', name: 'c') + klass_with_range_key.create(id: 'the same id', name: 'b') + + chain = described_class.new(klass_with_range_key) + models = chain.where(id: 'the same id').scan_index_forward(false).scan_index_forward(true) + expect(models.map(&:name)).to eq %w[a b c] + end + + context 'when Scan conditions' do + it 'does not affect query without conditions on hash' do + klass_with_range_key.create(id: 'the same id', name: 'a') + klass_with_range_key.create(id: 'the same id', name: 'c') + klass_with_range_key.create(id: 'the same id', name: 'b') + + chain = described_class.new(klass_with_range_key) + models = chain.where('name.gte': 'a').scan_index_forward(false) + expect(models.map(&:name)).not_to eq %w[c b a] + end + end + + context 'when Local Secondary Index (LSI) used' do + let(:klass_with_local_secondary_index) do + new_class do + range :name + field :age, :integer + + local_secondary_index range_key: :age, name: :age_index, projected_attributes: :all + end + end + + it 'affects a query' do + klass_with_local_secondary_index.create(id: 'the same id', age: 30, name: 'a') + klass_with_local_secondary_index.create(id: 'the same id', age: 10, name: 'c') + klass_with_local_secondary_index.create(id: 'the same id', age: 20, name: 'b') + + chain = described_class.new(klass_with_local_secondary_index) + models = chain.where(id: 'the same id', 'age.gt': 0).scan_index_forward(false) + expect(models.map(&:age)).to eq [30, 20, 10] + expect(chain.key_fields_detector.index_name).to eq(:age_index) + end + end + + context 'when Global Secondary Index (GSI) used' do + let(:klass_with_global_secondary_index) do + new_class do + range :name + field :age, :integer + field :nickname + + global_secondary_index hash_key: :age, range_key: :nickname, name: :age_nickname_index, projected_attributes: :all + end + end + + it 'affects a query' do + klass_with_global_secondary_index.create(age: 30, nickname: 'a', name: 'b') + klass_with_global_secondary_index.create(age: 30, nickname: 'c', name: 'c') + klass_with_global_secondary_index.create(age: 30, nickname: 'b', name: 'a') + + chain = described_class.new(klass_with_global_secondary_index) + models = chain.where(age: 30).scan_index_forward(false) + expect(models.map(&:nickname)).to eq %w[c b a] + expect(chain.key_fields_detector.index_name).to eq(:age_nickname_index) + end + end + end +end diff --git a/dynamoid/spec/dynamoid/criteria_new_spec.rb b/dynamoid/spec/dynamoid/criteria_new_spec.rb new file mode 100644 index 000000000..7a68dbb67 --- /dev/null +++ b/dynamoid/spec/dynamoid/criteria_new_spec.rb @@ -0,0 +1,182 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Dynamoid::Criteria do + it 'supports querying with .where method' do + klass = new_class do + field :name + end + + objects = klass.create([{ name: 'Alex' }, { name: 'Bob' }, { name: 'Alex' }]) + expect(klass.where(name: 'Alex')).to contain_exactly(objects[0], objects[2]) + end + + it 'supports querying with .all method' do + klass = new_class do + field :name + end + + objects = klass.create([{ name: 'Alex' }, { name: 'Bob' }]) + expect(klass.all).to match_array(objects) + end + + it 'supports querying with .first method' do + klass = new_class do + range :name + end + + object = klass.create(name: 'Alex') + expect(klass.first).to eq object + end + + it 'supports querying with .last method' do + klass = new_class do + range :name + end + + object = klass.create(name: 'Alex') + expect(klass.last).to eq object + end + + it 'supports querying with .each method' do + klass = new_class do + range :name + end + + result = [] + objects = klass.create([{ name: 'Alex' }, { name: 'Bob' }]) + klass.each { |obj| result << obj } + + expect(result).to match_array(objects) + end + + it 'supports querying with .record_limit method' do + klass = new_class do + field :name + end + + objects = klass.create([{ name: 'Alex' }, { name: 'Bob' }]) + actual = klass.record_limit(1).all.to_a + + expect(actual.size).to eq 1 + expect(actual[0]).to satisfy { |v| v.name == 'Alex' || v.name == 'Bob' } + end + + it 'supports querying with .scan_limit method' do + klass = new_class do + field :name + end + + objects = klass.create([{ name: 'Alex' }, { name: 'Bob' }]) + actual = klass.scan_limit(1).all.to_a + + expect(actual.size).to eq 1 + expect(actual[0]).to satisfy { |v| v.name == 'Alex' || v.name == 'Bob' } + end + + it 'supports querying with .batch method' do + klass = new_class do + field :name + end + + objects = klass.create([{ name: 'Alex' }, { name: 'Bob' }, { name: 'Alex' }]) + expect(klass.batch(2).all).to match_array(objects) + end + + it 'supports querying with .start method' do + klass = new_class do + table key: :age + range :name + field :age, :integer + end + + objects = klass.create([{ age: 20, name: 'Alex' }, { age: 20, name: 'Bob' }, { age: 20, name: 'Michael' }]) + + actual = klass.start(objects[0]).all.to_a + expect(actual).to eq objects[1..2] + end + + it 'supports querying with .scan_index_forward method' do + klass = new_class do + table key: :age + range :name + field :age, :integer + end + + objects = klass.create([{ age: 20, name: 'Alex' }, { age: 20, name: 'Bob' }, { age: 20, name: 'Michael' }]) + + # force Query with age: 20 partition key condition + actual = klass.scan_index_forward(true).where(age: 20).all.to_a + expect(actual).to eq objects + + # force Query with age: 20 partition key condition + actual = klass.scan_index_forward(false).where(age: 20).all.to_a + expect(actual).to eq objects.reverse + end + + it 'supports querying with .find_by_pages method' do + klass = new_class + objects = klass.create([{}, {}, {}]) + + pages = [] + klass.find_by_pages do |models, _options| + pages << models # actually there is only one page + end + + expect(pages.flatten).to match_array(objects) + end + + it 'supports querying with .project method' do + klass = new_class do + field :age, :integer + field :name, :string + end + klass.create(age: 20, name: 'Alex') + + objects_with_name = klass.project(:name).to_a + expect(objects_with_name.size).to eq 1 + + object_with_name = objects_with_name[0] + expect(object_with_name.name).to eq 'Alex' + expect(object_with_name.age).to eq nil + end + + it 'supports querying with .pluck method' do + klass = new_class do + field :age, :integer + field :name, :string + end + + klass.create([{ age: 20, name: 'Alex' }, { age: 20, name: 'Bob' }]) + expect(klass.pluck(:name)).to contain_exactly('Alex', 'Bob') + end + + it 'supports querying with .consistent method' do + klass = new_class do + field :age, :integer + end + + objects = klass.create([{ age: 20 }, { age: 30 }]) + actual = klass.consistent.all.to_a + expect(actual).to match_array(objects) + end + + it 'supports .delete_all method' do + klass = new_class do + field :age, :integer + end + + objects = klass.create([{ age: 20 }, { age: 30 }]) + expect { klass.delete_all }.to change { klass.all.to_a.size }.from(2).to(0) + end + + it 'supports .destroy_all method' do + klass = new_class do + field :age, :integer + end + + objects = klass.create([{ age: 20 }, { age: 30 }]) + expect { klass.destroy_all }.to change { klass.all.to_a.size }.from(2).to(0) + end +end diff --git a/dynamoid/spec/dynamoid/criteria_spec.rb b/dynamoid/spec/dynamoid/criteria_spec.rb new file mode 100644 index 000000000..f2e730af3 --- /dev/null +++ b/dynamoid/spec/dynamoid/criteria_spec.rb @@ -0,0 +1,136 @@ +# frozen_string_literal: true + +require 'spec_helper' + +# This file contains legacy specs. They should be revised and moved to the +# criteria_new_spec.rb file. + +describe Dynamoid::Criteria do + let!(:user1) { User.create(name: 'Josh', email: 'josh@joshsymonds.com', admin: true) } + let!(:user2) { User.create(name: 'Justin', email: 'justin@joshsymonds.com', admin: false) } + + it 'finds first using where' do + expect(User.where(name: 'Josh').first).to eq user1 + end + + it 'finds last using where' do + expect(User.where(admin: false).last).to eq user2 + end + + it 'finds all using where' do + expect(User.where(name: 'Josh').all.to_a).to eq [user1] + end + + it 'returns all records' do + expect(Set.new(User.all)).to eq Set.new([user1, user2]) + expect(User.all.first.new_record).to be_falsey + end + + context 'Magazine table' do + before do + Magazine.create_table + end + + it 'returns empty attributes for where' do + expect(Magazine.where(title: 'Josh').all.to_a).to eq [] + end + + it 'returns empty attributes for all' do + expect(Magazine.all.to_a).to eq [] + end + end + + it 'passes each to all members' do + expect { |b| User.each(&b) }.to yield_successive_args( + be_a(User).and(have_attributes('new_record' => false)), + be_a(User).and(have_attributes('new_record' => false)) + ) + end + + it 'passes find_by_pages to all members' do + expect { |b| User.find_by_pages(&b) }.to yield_successive_args( + [all(be_a(User)), { last_evaluated_key: nil }] + ) + end + + it 'returns a last_evaluated_key which may be used to restart iteration' do + # Creates exactly 2 full pages + 58.times { User.create(name: SecureRandom.uuid * 1024) } + + first_page, first_page_meta = User.find_by_pages.first + second_page, = User.start(first_page_meta[:last_evaluated_key]).find_by_pages.first + + expect(first_page & second_page).to be_empty + end + + it 'returns N records' do + 5.times { |i| User.create(name: 'Josh', email: "josh_#{i}@joshsymonds.com") } + expect(User.record_limit(2).all.count).to eq(2) + end + + # TODO: This test is broken using the AWS SDK adapter. + # it 'start with a record' do + # 5.times { |i| User.create(:name => 'Josh', :email => 'josh_#{i}@joshsymonds.com') } + # all = User.all + # User.start(all[3]).all.should eq(all[4..-1]) + # + # all = User.where(:name => 'Josh').all + # User.where(:name => 'Josh').start(all[3]).all.should eq(all[4..-1]) + # end + + it 'send consistent option to adapter' do + pending 'This test is broken as we are overriding the consistent_read option to true inside the adapter' + expect(Dynamoid::Adapter).to receive(:get_item) { |_table_name, _key, options| options[:consistent_read] == true } + User.where(name: 'x').consistent.first + + expect(Dynamoid::Adapter).to receive(:query) { |_table_name, options| options[:consistent_read] == true }.returns([]) + Tweet.where(tweet_id: 'xx', group: 'two').consistent.all + + expect(Dynamoid::Adapter).to receive(:query) { |_table_name, options| options[:consistent_read] == false }.returns([]) + Tweet.where(tweet_id: 'xx', group: 'two').all + end + + it 'does not raise exception when consistent_read is used with scan' do + expect do + User.where(password: 'password').consistent.first + end.not_to raise_error(Dynamoid::Errors::InvalidQuery) + end + + context 'when scans using non-indexed fields and warn_on_scan config option is true' do + before do + @warn_on_scan = Dynamoid::Config.warn_on_scan + Dynamoid::Config.warn_on_scan = true + end + + after do + Dynamoid::Config.warn_on_scan = @warn_on_scan + end + + it 'logs warnings' do + expect(Dynamoid.logger).to receive(:warn).with('Queries without an index are forced to use scan and are generally much slower than indexed queries!') + expect(Dynamoid.logger).to receive(:warn).with('You can index this query by adding index declaration to user.rb:') + expect(Dynamoid.logger).to receive(:warn).with("* global_secondary_index hash_key: 'some-name', range_key: 'some-another-name'") + expect(Dynamoid.logger).to receive(:warn).with("* local_secondary_index range_key: 'some-name'") + expect(Dynamoid.logger).to receive(:warn).with('Not indexed attributes: :name, :password') + + User.where(name: 'x', password: 'password').all + end + end + + context 'when doing intentional, full-table scan (query is empty) and warn_on_scan config option is true' do + before do + @warn_on_scan = Dynamoid::Config.warn_on_scan + Dynamoid::Config.warn_on_scan = true + end + + after do + Dynamoid::Config.warn_on_scan = @warn_on_scan + end + + it 'does not log any warnings' do + expect(Dynamoid.logger).not_to receive(:warn) + + User.all + end + end +end diff --git a/dynamoid/spec/dynamoid/dirty_spec.rb b/dynamoid/spec/dynamoid/dirty_spec.rb new file mode 100644 index 000000000..1c6b5cbef --- /dev/null +++ b/dynamoid/spec/dynamoid/dirty_spec.rb @@ -0,0 +1,500 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Dynamoid::Dirty do + let(:model) do + new_class do + field :name + end + end + + describe '#changed?' do + it 'returns true if any of the attributes have unsaved changes' do + obj = model.new(name: 'Bob') + expect(obj.changed?).to eq true + + obj = model.create(name: 'Bob') + obj.name = 'Alex' + expect(obj.changed?).to eq true + end + + it 'returns false otherwise' do + obj = model.create(name: 'Bob') + expect(obj.changed?).to eq false + + obj = model.new + expect(obj.changed?).to eq false + + obj = model.create(name: 'Bob') + obj.name = 'Bob' + expect(obj.changed?).to eq false + end + end + + describe '#changed' do + it 'returns an array with the name of the attributes with unsaved changes' do + obj = model.create(name: 'Alex') + obj.name = 'Bob' + expect(obj.changed).to eq ['name'] + + obj = model.new(name: 'Alex') + expect(obj.changed).to eq ['name'] + end + + it 'returns [] when there are no unsaved changes' do + obj = model.create(name: 'Alex') + expect(obj.changed).to eq [] + + obj = model.new + expect(obj.changed).to eq [] + + obj = model.create(name: 'Alex') + obj.name = 'Alex' + expect(obj.changed).to eq [] + end + end + + describe '#changed_attributes' do + it 'returns a hash of the attributes with unsaved changes indicating their original values' do + obj = model.create(name: 'Alex') + obj.name = 'Bob' + expect(obj.changed_attributes).to eq('name' => 'Alex') + + obj = model.new(name: 'Alex') + expect(obj.changed_attributes).to eq('name' => nil) + end + + it 'returns {} when there are no unsaved changes' do + obj = model.create(name: 'Alex') + expect(obj.changed_attributes).to eq({}) + + obj = model.new + expect(obj.changed_attributes).to eq({}) + + obj = model.create(name: 'Alex') + obj.name = 'Alex' + expect(obj.changed_attributes).to eq({}) + end + end + + describe '#clear_changes_information' do + it 'clears current changes information' do + obj = model.new(name: 'Alex') + + expect do + obj.clear_changes_information + end.to change { obj.changes }.from(a_hash_including(name: [nil, 'Alex'])).to({}) + end + + it 'clears previous changes information' do + obj = model.create!(name: 'Alex') # previous change + obj.name = 'Michael' # current change + + expect do + obj.clear_changes_information + end.to change { obj.previous_changes }.from(a_hash_including(name: [nil, 'Alex'])).to({}) + end + end + + describe '#changes_applied' do + it 'clears current changes information' do + obj = model.new(name: 'Alex') + + expect do + obj.changes_applied + end.to change { obj.changes }.from(name: [nil, 'Alex']).to({}) + end + + it 'moves changes to previous changes' do + obj = model.new(name: 'Alex') + + expect do + obj.changes_applied + end.to change { obj.previous_changes }.from({}).to(name: [nil, 'Alex']) + end + end + + describe '#clear_attribute_changes' do + it 'removes changes information for specified attributes' do + klass_with_several_fields = new_class do + field :name + field :age, :integer + field :city + end + + obj = klass_with_several_fields.create!(name: 'Alex', age: 21, city: 'Ottawa') + obj.name = 'Michael' + obj.age = 36 + obj.city = 'Mexico' + + expect(obj.changes).to eql('name' => %w[Alex Michael], 'age' => [21, 36], 'city' => %w[Ottawa Mexico]) + expect(obj.changed_attributes).to eql('name' => 'Alex', 'age' => 21, 'city' => 'Ottawa') + expect(obj.name_changed?).to eql true + expect(obj.age_changed?).to eql true + expect(obj.city_changed?).to eql true + + obj.clear_attribute_changes(%w[name age]) + + expect(obj.changes).to eql('city' => %w[Ottawa Mexico]) + expect(obj.changed_attributes).to eql('city' => 'Ottawa') + expect(obj.name_changed?).to eql false + expect(obj.age_changed?).to eql false + expect(obj.city_changed?).to eql true + end + end + + describe '#changes' do + it 'returns a hash of changed attributes indicating their original and new values' do + obj = model.create(name: 'Alex') + obj.name = 'Bob' + expect(obj.changes).to eq('name' => %w[Alex Bob]) + + obj = model.new(name: 'Alex') + expect(obj.changes).to eq('name' => [nil, 'Alex']) + end + + it 'returns {} when there are no unsaved changes' do + obj = model.create(name: 'Alex') + expect(obj.changes).to eq({}) + + obj = model.new + expect(obj.changes).to eq({}) + + obj = model.create(name: 'Alex') + obj.name = 'Alex' + expect(obj.changes).to eq({}) + end + end + + describe '#previous_changes' do + it 'returns a hash of attributes that were changed before the model was saved' do + obj = model.create(name: 'Alex', updated_at: '2019-07-20 00:53:32'.to_datetime) + obj.name = 'Bob' + obj.updated_at = '2019-07-20 20:11:01'.to_datetime + obj.save + + expect(obj.previous_changes).to eq( + 'name' => %w[Alex Bob], + 'updated_at' => ['2019-07-20 00:53:32'.to_datetime, '2019-07-20 20:11:01'.to_datetime] + ) + + obj = model.create(name: 'Alex') + # there are also changes for `created_at` and `updated_at` - just don't check them + expect(obj.previous_changes).to include('id' => [nil, obj.id], 'name' => [nil, 'Alex']) + end + + it 'returns {} when there were no changes made before saving' do + obj = model.create(name: 'Alex') + obj = model.find(obj.id) + expect(obj.previous_changes).to eq({}) + + obj = model.new(name: 'Alex') + expect(obj.previous_changes).to eq({}) + end + end + + describe '#_changed?' do + it 'returns true if attribute has unsaved value' do + obj = model.new(name: 'Bob') + expect(obj.name_changed?).to eq true + + obj = model.create(name: 'Bob') + obj.name = 'Alex' + expect(obj.name_changed?).to eq true + end + + it 'returns false/nil otherwise' do + obj = model.create(name: 'Bob') + expect(obj.name_changed?).to eq false + + obj = model.new + expect(obj.name_changed?).to eq false # in Rails => nil + + obj = model.create(name: 'Bob') + obj.name = 'Bob' + expect(obj.name_changed?).to eq false + end + end + + describe '#_change' do + it 'returns an array with previous and current values' do + obj = model.create(name: 'Alex') + obj.name = 'Bob' + expect(obj.name_change).to eq(%w[Alex Bob]) + + obj = model.new(name: 'Alex') + expect(obj.name_change).to eq([nil, 'Alex']) + end + + it 'returns nil when attribute does not have unsaved value' do + obj = model.create(name: 'Alex') + expect(obj.name_change).to eq(nil) + + obj = model.new + expect(obj.name_change).to eq(nil) + + obj = model.create(name: 'Alex') + obj.name = 'Alex' + expect(obj.name_change).to eq(nil) + end + end + + describe '#_previously_changed?' do + it 'returns true if attribute was changed before model was saved' do + obj = model.create(name: 'Alex') + obj.name = 'Bob' + obj.save + expect(obj.name_previously_changed?).to eq(true) + + obj = model.create(name: 'Alex') + expect(obj.name_previously_changed?).to eq(true) + end + + it 'returns false otherwise' do + obj = model.create(name: 'Alex') + obj = model.find(obj.id) + expect(obj.name_previously_changed?).to eq(false) + + obj = model.new(name: 'Alex') + expect(obj.name_previously_changed?).to eq(false) + end + end + + describe '#_previous_change' do + it 'returns an array of old and changed attribute value before the model was saved' do + obj = model.create(name: 'Alex') + obj.name = 'Bob' + obj.save + expect(obj.name_previous_change).to eq(%w[Alex Bob]) + + obj = model.create(name: 'Alex') + expect(obj.name_previous_change).to eq([nil, 'Alex']) + end + + it 'returns nil when there were no changes made before saving' do + obj = model.create(name: 'Alex') + obj = model.find(obj.id) + expect(obj.name_previous_change).to eq(nil) + + obj = model.new(name: 'Alex') + expect(obj.name_previous_change).to eq(nil) + end + end + + describe '#_will_change!' do + it 'marks that the attribute is changing' do + obj = model.create(name: 'Alex') + obj.name_will_change! + obj.name.reverse! + expect(obj.name_change).to eq(%w[Alex xelA]) + + obj = model.create(name: 'Alex') + obj.name.reverse! + expect(obj.name_change).to eq(nil) + end + end + + describe '#_was' do + it 'returns saved attribute value before changing' do + obj = model.create(name: 'Alex') + obj.name = 'Bob' + expect(obj.name_was).to eq('Alex') + + obj = model.new(name: 'Alex') + obj.name = 'Bob' + expect(obj.name_was).to eq(nil) + end + + it 'returns current saved value if attribute was not changed' do + obj = model.create(name: 'Alex') + expect(obj.name_was).to eq('Alex') + end + end + + describe '#restore_!' do + it 'restores original value if attribute is changed' do + a = model.create(name: 'Alex') + a.name = 'Bob' + a.restore_name! + expect(a.name).to eq 'Alex' + end + + it 'removes changes information' do + a = model.create(name: 'Alex') + a.name = 'Bob' + + expect { a.restore_name! }.to change { a.changed? }.from(true).to(false) + end + + it 'returns saved value otherwise' do + a = model.new(name: 'Alex') + a.restore_name! + expect(a.name).to eq nil + + a = model.create(name: 'Alex') + a.restore_name! + expect(a.name).to eq 'Alex' + end + end + + describe 'Document methods and dirty changes' do + describe '.find' do + it 'loads model that does not have unsaved changes' do + a = model.create(name: 'Alex') + a_loaded = model.find(a.id) + + expect(a_loaded.changed?).to eq false + expect(a_loaded.changes).to eq({}) + end + + it 'loads several models that do not have unsaved changes' do + a = model.create(name: 'Alex') + b = model.create(name: 'Bob') + (a_loaded, b_loaded) = model.find(a.id, b.id) + + expect(a_loaded.changed?).to eq false + expect(a_loaded.changes).to eq({}) + + expect(b_loaded.changed?).to eq false + expect(b_loaded.changes).to eq({}) + end + end + + describe '.new' do + it 'returns model that does not have unsaved changes if called without arguments' do + a = model.new + + expect(a.changed?).to eq false + expect(a.changes).to eq({}) + end + + it 'returns model that does have unsaved changes if called with arguments' do + a = model.new(name: 'Alex') + + expect(a.changed?).to eq true + expect(a.changes).to eq('name' => [nil, 'Alex']) + end + end + + describe '.create' do + it 'returns model without unsaved changes' do + a = model.create(name: 'Alex') + + expect(a.changed?).to eq false + expect(a.changes).to eq({}) + end + end + + describe '.update' do + it 'returns model without unsaved changes' do + a = model.create(name: 'Alex') + a_updated = model.update(a.id, name: 'Bob') + + expect(a_updated.changed?).to eq false + expect(a_updated.changes).to eq({}) + end + end + + describe '.update_fields' do + it 'returns model without unsaved changes' do + a = model.create(name: 'Alex') + a_updated = model.update_fields(a.id, name: 'Bob') + + expect(a_updated.changed?).to eq false + expect(a_updated.changes).to eq({}) + end + end + + describe '.upsert' do + it 'returns model without unsaved changes' do + a = model.create(name: 'Alex') + a_updated = model.upsert(a.id, name: 'Bob') + + expect(a_updated.changed?).to eq false + expect(a_updated.changes).to eq({}) + end + end + + describe '#reload' do + it 'cleans model unsaved changes' do + a = model.create(name: 'Alex') + a.name = 'Bob' + a.reload + + expect(a.changed?).to eq false + expect(a.changes).to eq({}) + end + end + + describe '.where' do + it 'returns model without unsaved changes (Query)' do + a = model.create(name: 'Alex') + (a_loaded,) = model.where(id: a.id).to_a + + expect(a_loaded.changed?).to eq false + expect(a_loaded.changes).to eq({}) + end + + it 'returns model without unsaved changes (Scan)' do + a = model.create(name: 'Alex') + (a_loaded,) = model.where(name: a.name).to_a + + expect(a_loaded.changed?).to eq false + expect(a_loaded.changes).to eq({}) + end + end + + describe '#save' do + it 'cleans model unsaved changes' do + a = model.new(name: 'Alex') + a.save + + expect(a.changed?).to eq false + expect(a.changes).to eq({}) + end + end + + describe '#update_attributes' do + it 'cleans model unsaved changes' do + a = model.create(name: 'Alex') + a.update_attributes(name: 'Bob') + + expect(a.changed?).to eq false + expect(a.changes).to eq({}) + end + end + + describe '#update_attribute' do + it 'cleans model unsaved changes' do + a = model.create(name: 'Alex') + a.update_attribute(:name, 'Bob') + + expect(a.changed?).to eq false + expect(a.changes).to eq({}) + end + end + + describe '#update' do + it 'cleans model unsaved changes' do + a = model.create(name: 'Alex') + a.update do |t| + t.set(name: 'Bob') + end + + expect(a.changed?).to eq false + expect(a.changes).to eq({}) + end + end + + describe '#touch' do + it 'cleans model unsaved changes' do + a = model.create(name: 'Alex') + a.touch + + expect(a.changed?).to eq false + expect(a.changes).to eq({}) + end + end + end +end diff --git a/dynamoid/spec/dynamoid/document_spec.rb b/dynamoid/spec/dynamoid/document_spec.rb new file mode 100644 index 000000000..9cf992df4 --- /dev/null +++ b/dynamoid/spec/dynamoid/document_spec.rb @@ -0,0 +1,426 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Dynamoid::Document do + it 'runs load hooks' do + result = nil + ActiveSupport.on_load(:dynamoid) { |loaded| result = loaded } + + expect(result).to eq(described_class) + end + + it 'initializes a new document' do + address = Address.new + + expect(address.new_record).to be_truthy + expect(address.attributes).to eq({}) + end + + it 'responds to will_change! methods for all fields' do + address = Address.new + expect(address).to respond_to(:id_will_change!) + expect(address).to respond_to(:options_will_change!) + expect(address).to respond_to(:created_at_will_change!) + expect(address).to respond_to(:updated_at_will_change!) + end + + it 'initializes a new document with attributes' do + address = Address.new(city: 'Chicago') + + expect(address.new_record).to be_truthy + + expect(address.attributes).to eq(city: 'Chicago') + end + + it 'initializes a new document with a virtual attribute' do + address = Address.new(zip_code: '12345') + + expect(address.new_record).to be_truthy + + expect(address.attributes).to eq(city: 'Chicago') + end + + it 'allows interception of write_attribute on load' do + klass = new_class do + field :city + + def city=(value) + self[:city] = value.downcase + end + end + expect(klass.new(city: 'Chicago').city).to eq 'chicago' + end + + it 'ignores unknown fields (does not raise error)' do + klass = new_class do + field :city + end + + model = klass.new(unknown_field: 'test', city: 'Chicago') + expect(model.city).to eql 'Chicago' + end + + describe '#initialize' do + let(:klass) do + new_class do + field :foo + end + end + + context 'when block specified' do + it 'calls a block and passes a model as argument' do + object = klass.new(foo: 'bar') do |obj| + obj.foo = 'baz' + end + + expect(object.foo).to eq('baz') + end + end + + describe 'type casting' do + let(:klass) do + new_class do + field :count, :integer + end + end + + it 'type casts attributes' do + obj = klass.new(count: '101') + expect(obj.attributes[:count]).to eql(101) + end + end + + describe 'callbacks' do + it 'runs after_initialize callback' do + klass_with_callback = new_class do + after_initialize { print 'run after_initialize' } + end + + expect { klass_with_callback.new }.to output('run after_initialize').to_stdout + end + end + end + + describe '.exist?' do + it 'checks if there is a document with specified primary key' do + address = Address.create(city: 'Chicago') + + expect(Address).to exist(address.id) + expect(Address).not_to exist('does-not-exist') + end + + it 'supports an array of primary keys' do + address_1 = Address.create(city: 'Chicago') + address_2 = Address.create(city: 'New York') + address_3 = Address.create(city: 'Los Angeles') + + expect(Address).to exist([address_1.id, address_2.id]) + expect(Address).not_to exist([address_1.id, 'does-not-exist']) + end + + it 'supports hash with conditions' do + address = Address.create(city: 'Chicago') + + expect(Address).to exist(city: address.city) + expect(Address).not_to exist(city: 'does-not-exist') + end + + it 'checks if there any document in table at all if called without argument' do + Address.create_table(sync: true) + expect(Address.count).to eq 0 + + expect { Address.create }.to change(Address, :exists?).from(false).to(true) + end + end + + it 'gets errors courtesy of ActiveModel' do + address = Address.create(city: 'Chicago') + + expect(address.errors).to be_empty + expect(address.errors.full_messages).to be_empty + end + + it 'has default table options' do + address = Address.create + + expect(address.id).not_to be_nil + expect(Address.table_name).to eq 'dynamoid_tests_addresses' + expect(Address.hash_key).to eq :id + expect(Address.read_capacity).to eq 100 + expect(Address.write_capacity).to eq 20 + expect(Address.inheritance_field).to eq :type + end + + it 'follows any table options provided to it' do + tweet = Tweet.create(group: 12_345) + + expect { tweet.id }.to raise_error(NoMethodError) + expect(tweet.tweet_id).not_to be_nil + expect(Tweet.table_name).to eq 'dynamoid_tests_twitters' + expect(Tweet.hash_key).to eq :tweet_id + expect(Tweet.read_capacity).to eq 200 + expect(Tweet.write_capacity).to eq 200 + end + + describe '#hash_key' do + context 'when there is already an attribute with name `hash_key`' do + let(:klass) do + new_class do + field :hash_key + end + end + + it 'returns id value if hash_key attribute is not set' do + obj = klass.new(id: 'id') + + expect(obj.id).to eq 'id' + expect(obj.hash_key).to eq 'id' + end + + it 'returns hash_key value if hash_key attribute is set' do + obj = klass.new(id: 'id', hash_key: 'hash key') + + expect(obj.id).to eq 'hash key' + expect(obj.hash_key).to eq 'hash key' + end + end + + context 'when hash key attribute name is `hash_key`' do + let(:klass) do + new_class do + table key: :hash_key + end + end + + it 'returns id value' do + obj = klass.new(hash_key: 'hash key') + expect(obj.hash_key).to eq 'hash key' + end + end + end + + describe '#range_value' do + context 'when there is already an attribute with name `range_key`' do + let(:klass) do + new_class do + range :name + field :range_value + end + end + + it 'returns range key value if range_key attribute is not set' do + obj = klass.new(name: 'name') + + expect(obj.name).to eq 'name' + expect(obj.range_value).to eq 'name' + end + + it 'returns range_key value if range_key attribute is set' do + obj = klass.new(name: 'name', range_value: 'range key') + + expect(obj.name).to eq 'range key' + expect(obj.range_value).to eq 'range key' + end + end + + context 'when range key attribute name is `range_value`' do + let(:klass) do + new_class do + range :range_value + end + end + + it 'returns range key value' do + obj = klass.new(range_value: 'range key') + expect(obj.range_value).to eq 'range key' + end + end + end + + shared_examples 'it has equality testing and hashing' do + it 'is equal to itself' do + expect(document).to eq document # rubocop:disable RSpec/IdenticalEqualityAssertion + end + + it 'is equal to another document with the same key(s)' do + expect(document).to eq same + end + + it 'is not equal to another document with different key(s)' do + expect(document).not_to eq different + end + + it 'is not equal to an object that is not a document' do + expect(document).not_to eq 'test' + end + + it 'is not equal to nil' do + expect(document).not_to eq nil + end + + it 'hashes documents with the keys to the same value' do + expect(document => 1).to have_key(same) + end + end + + context 'without a range key' do + it_behaves_like 'it has equality testing and hashing' do + let(:document) { Address.create(id: 123, city: 'Seattle') } + let(:different) { Address.create(id: 456, city: 'Seattle') } + let(:same) { Address.new(id: 123, city: 'Boston') } + end + end + + context 'with a range key' do + it_behaves_like 'it has equality testing and hashing' do + let(:document) { Tweet.create(tweet_id: 'x', group: 'abc', msg: 'foo') } + let(:different) { Tweet.create(tweet_id: 'y', group: 'abc', msg: 'foo') } + let(:same) { Tweet.new(tweet_id: 'x', group: 'abc', msg: 'bar') } + end + + it 'is not equal to another document with the same hash key but a different range value' do + document = Tweet.create(tweet_id: 'x', group: 'abc') + different = Tweet.create(tweet_id: 'x', group: 'xyz') + + expect(document).not_to eq different + end + end + + describe '#count' do + it 'returns the number of documents in the table' do + document = Tweet.create(tweet_id: 'x', group: 'abc') + different = Tweet.create(tweet_id: 'x', group: 'xyz') + + expect(Tweet.count).to eq 2 + end + end + + describe '.deep_subclasses' do + it 'returns direct children' do + expect(Car.deep_subclasses).to eq [Cadillac] + end + + it 'returns grandchildren too' do + expect(Vehicle.deep_subclasses).to include(Cadillac) + end + end + + describe 'TTL (Time to Live)' do + let(:model) do + new_class do + table expires: { field: :expired_at, after: 30 * 60 } + + field :expired_at, :integer + end + end + + let(:model_with_wrong_field_name) do + new_class do + table expires: { field: :foo, after: 30 * 60 } + + field :expired_at, :integer + end + end + + it 'sets default value at the creation' do + travel 1.hour do + obj = model.create + expect(obj.expired_at).to eq(Time.now.to_i + (30 * 60)) + end + end + + it 'sets default value at the updating' do + obj = model.create + + travel 1.hour do + obj.update_attributes(expired_at: nil) + expect(obj.expired_at).to eq(Time.now.to_i + (30 * 60)) + end + end + + it 'does not override already existing value' do + obj = model.create(expired_at: 1024) + expect(obj.expired_at).to eq 1024 + + obj.update_attributes(expired_at: 512) + expect(obj.expired_at).to eq 512 + end + + it 'raises an error if specified wrong field name' do + expect do + model_with_wrong_field_name.create + end.to raise_error(NoMethodError, /undefined method `foo='/) + end + end + + describe 'timestamps fields `created_at` and `updated_at`' do + let(:class_with_timestamps_true) do + new_class do + table timestamps: true + end + end + + let(:class_with_timestamps_false) do + new_class do + table timestamps: false + end + end + + it 'declares timestamps when Dynamoid::Config.timestamps = true', config: { timestamps: true } do + expect(new_class.attributes).to have_key(:created_at) + expect(new_class.attributes).to have_key(:updated_at) + + expect(new_class.new).to respond_to(:created_at) + expect(new_class.new).to respond_to(:updated_at) + end + + it 'does not declare timestamps when Dynamoid::Config.timestamps = false', config: { timestamps: false } do + expect(new_class.attributes).not_to have_key(:created_at) + expect(new_class.attributes).not_to have_key(:updated_at) + + expect(new_class.new).not_to respond_to(:created_at) + expect(new_class.new).not_to respond_to(:updated_at) + end + + it 'does not declare timestamps when Dynamoid::Config.timestamps = true but table timestamps = false', config: { timestamps: true } do + expect(class_with_timestamps_false.attributes).not_to have_key(:created_at) + expect(class_with_timestamps_false.attributes).not_to have_key(:updated_at) + + expect(class_with_timestamps_false.new).not_to respond_to(:created_at) + expect(class_with_timestamps_false.new).not_to respond_to(:updated_at) + end + + it 'declares timestamps when Dynamoid::Config.timestamps = false but table timestamps = true', config: { timestamps: false } do + expect(class_with_timestamps_true.attributes).to have_key(:created_at) + expect(class_with_timestamps_true.attributes).to have_key(:updated_at) + + expect(class_with_timestamps_true.new).to respond_to(:created_at) + expect(class_with_timestamps_true.new).to respond_to(:updated_at) + end + end + + describe '#inspect' do + it 'returns a String containing a model class name and a list of attributes and values' do + klass = new_class(class_name: 'Person') do + field :name + field :age, :integer + end + + object = klass.new(name: 'Alex', age: 21) + puts object.attributes + expect(object.inspect).to eql '#' + end + + it 'puts partition and sort keys on the first place' do + klass = new_class(class_name: 'Person') do + field :name + field :age, :integer + range :city + end + + object = klass.new(city: 'Kyiv') + expect(object.inspect).to eql '#' + end + end +end diff --git a/dynamoid/spec/dynamoid/dumping_spec.rb b/dynamoid/spec/dynamoid/dumping_spec.rb new file mode 100644 index 000000000..569677e4f --- /dev/null +++ b/dynamoid/spec/dynamoid/dumping_spec.rb @@ -0,0 +1,1515 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe 'Dumping' do + describe 'Boolean field' do + context 'string format' do + let(:klass) do + new_class do + field :active, :boolean, store_as_native_boolean: false + end + end + + it "saves false as 'f'" do + obj = klass.create(active: false) + + expect(reload(obj).active).to eql(false) + expect(raw_attributes(obj)[:active]).to eql('f') + end + + it "saves true as 't'" do + obj = klass.create(active: true) + + expect(reload(obj).active).to eql(true) + expect(raw_attributes(obj)[:active]).to eql('t') + end + + it 'stores nil value' do + obj = klass.create(active: nil) + + expect(reload(obj).active).to eql(nil) + expect(raw_attributes(obj)[:active]).to eql(nil) + end + end + + context 'boolean format' do + let(:klass) do + new_class do + field :active, :boolean, store_as_native_boolean: true + end + end + + it 'saves false as false' do + obj = klass.create(active: false) + + expect(reload(obj).active).to eql(false) + expect(raw_attributes(obj)[:active]).to eql(false) + end + + it 'saves true as true' do + obj = klass.create(active: true) + + expect(reload(obj).active).to eql(true) + expect(raw_attributes(obj)[:active]).to eql(true) + end + + it 'saves and loads boolean field correctly' do + obj = klass.create(active: true) + expect(reload(obj).active).to eql true + + obj = klass.create(active: false) + expect(reload(obj).active).to eql false + end + + it 'stores nil value' do + obj = klass.create(active: nil) + + expect(reload(obj).active).to eql(nil) + expect(raw_attributes(obj)[:active]).to eql(nil) + end + end + + describe '"store_boolean_as_native" global config option' do + it 'is stored as boolean by default' do + klass = new_class do + field :active, :boolean + end + + obj = klass.create(active: true) + + expect(raw_attributes(obj)[:active]).to eql(true) + expect(reload(obj).active).to eql(true) + end + + context 'store_boolean_as_native=true' do + it 'is stored as boolean if field option store_as_native_boolean is not set', + config: { store_boolean_as_native: true } do + klass = new_class do + field :active, :boolean + end + + obj = klass.create(active: true) + + expect(raw_attributes(obj)[:active]).to eql(true) + expect(reload(obj).active).to eql(true) + end + + it 'is stored as boolean if field option store_as_native_boolean=true', + config: { store_boolean_as_native: true } do + klass = new_class do + field :active, :boolean, store_as_native_boolean: true + end + + obj = klass.create(active: true) + + expect(raw_attributes(obj)[:active]).to eql(true) + expect(reload(obj).active).to eql(true) + end + + it 'is stored as string if field option store_as_native_boolean=false', + config: { store_boolean_as_native: true } do + klass = new_class do + field :active, :boolean, store_as_native_boolean: false + end + + obj = klass.create(active: true) + + expect(raw_attributes(obj)[:active]).to eql('t') + expect(reload(obj).active).to eql(true) + end + end + + context 'store_boolean_as_native=false' do + it 'is stored as string if field option store_as_native_boolean is not set', + config: { store_boolean_as_native: false } do + klass = new_class do + field :active, :boolean + end + + obj = klass.create(active: true) + + expect(raw_attributes(obj)[:active]).to eql('t') + expect(reload(obj).active).to eql(true) + end + + it 'is stored as boolean if field option store_as_native_boolean=true', + config: { store_boolean_as_native: false } do + klass = new_class do + field :active, :boolean, store_as_native_boolean: true + end + + obj = klass.create(active: true) + + expect(raw_attributes(obj)[:active]).to eql(true) + expect(reload(obj).active).to eql(true) + end + + it 'is stored as string if field option store_as_native_boolean=false', + config: { store_boolean_as_native: false } do + klass = new_class do + field :active, :boolean, store_as_native_boolean: false + end + + obj = klass.create(active: true) + + expect(raw_attributes(obj)[:active]).to eql('t') + expect(reload(obj).active).to eql(true) + end + end + end + end + + describe 'DateTime field' do + context 'Stored in :number format' do + let(:klass) do + new_class do + field :sent_at, :datetime + end + end + + it 'saves time as :number' do + time = Time.utc(2018, 7, 24, 22, 4, 30, 1).to_datetime + obj = klass.create(sent_at: time) + + expect(reload(obj).sent_at).to eql(time) + expect(raw_attributes(obj)[:sent_at]).to eql(BigDecimal('1532469870.000001')) + end + + it 'saves date as :number' do + obj = klass.create(sent_at: Date.new(2018, 7, 21)) + + expect(reload(obj).sent_at).to eq(DateTime.new(2018, 7, 21, 0, 0, 0)) + expect(raw_attributes(obj)[:sent_at]).to eql(BigDecimal('1532131200.0')) + end + + it 'stores nil value' do + obj = klass.create(sent_at: nil) + + expect(reload(obj).sent_at).to eql(nil) + expect(raw_attributes(obj)[:sent_at]).to eql(nil) + end + end + + context 'Stored in :string ISO-8601 format', + config: { application_timezone: :utc, dynamodb_timezone: :utc } do + let(:klass) do + new_class do + field :sent_at, :datetime, store_as_string: true + end + end + + it 'saves time as a :string and looses milliseconds' do + time = '2018-07-24 22:04:30.001 +03:00'.to_datetime + obj = klass.create(sent_at: time) + + expect(reload(obj).sent_at).to eql('2018-07-24 19:04:30 +00:00'.to_datetime) + expect(raw_attributes(obj)[:sent_at]).to eql('2018-07-24T19:04:30+00:00') + end + + it 'saves date as :string' do + date = Date.new(2018, 7, 21) + obj = klass.create(sent_at: date) + + expect(reload(obj).sent_at).to eq(DateTime.new(2018, 7, 21, 0, 0, 0, '+00:00')) + expect(raw_attributes(obj)[:sent_at]).to eql('2018-07-21T00:00:00+00:00') + end + + it 'saves as :string if global option :store_date_time_as_string is true' do + klass2 = new_class do + field :sent_at, :datetime + end + + store_datetime_as_string = Dynamoid.config.store_datetime_as_string + Dynamoid.config.store_datetime_as_string = true + + time = '2018-07-24 22:04:30.001 +03:00'.to_datetime + obj = klass2.create(sent_at: time) + + expect(reload(obj).sent_at).to eql('2018-07-24 19:04:30 +00:00'.to_datetime) + expect(raw_attributes(obj)[:sent_at]).to eql('2018-07-24T19:04:30+00:00') + + Dynamoid.config.store_datetime_as_string = store_datetime_as_string + end + + it 'prioritize field option over global one' do + store_datetime_as_string = Dynamoid.config.store_datetime_as_string + Dynamoid.config.store_datetime_as_string = false + + time = '2018-07-24 22:04:30.001 +03:00'.to_datetime + obj = klass.create(sent_at: time) + + expect(reload(obj).sent_at).to eql('2018-07-24 19:04:30 +00:00'.to_datetime) + expect(raw_attributes(obj)[:sent_at]).to eql('2018-07-24T19:04:30+00:00') + + Dynamoid.config.store_datetime_as_string = store_datetime_as_string + end + + it 'stores nil value' do + obj = klass.create(sent_at: nil) + + expect(reload(obj).sent_at).to eq(nil) + expect(raw_attributes(obj)[:sent_at]).to eql(nil) + end + end + + describe '"application_timezone" global config option' do + let(:klass) do + new_class do + field :last_logged_in_at, :datetime + end + end + + it 'loads time in local time zone if config.application_timezone = :local', + config: { application_timezone: :local } do + time = DateTime.now + obj = klass.create(last_logged_in_at: time) + obj = klass.find(obj.id) + + # we can't compare objects directly because lose precision of milliseconds in conversions + expect(obj.last_logged_in_at).to be_a(DateTime) + expect(obj.last_logged_in_at.to_s).to eql time.to_datetime.to_s + end + + it 'loads time in specified time zone if config.application_timezone = time zone name', + config: { application_timezone: 'Hawaii' } do + # Hawaii UTC-10 + time = '2017-06-20 08:00:00 +0300'.to_datetime + obj = klass.create(last_logged_in_at: time) + + expect(reload(obj).last_logged_in_at).to eql '2017-06-19 19:00:00 -1000'.to_datetime + end + + it 'loads time in UTC if config.application_timezone = :utc', + config: { application_timezone: :utc } do + time = '2017-06-20 08:00:00 +0300'.to_datetime + obj = klass.create(last_logged_in_at: time) + + expect(reload(obj).last_logged_in_at).to eql '2017-06-20 05:00:00 +0000'.to_datetime + end + end + + describe '"dynamodb_timezone" global config option' do + let(:klass) do + new_class do + field :last_logged_in_at, :datetime + end + end + + it 'stores time in local time zone', + config: { dynamodb_timezone: :local, store_datetime_as_string: true } do + time = DateTime.now + obj = klass.create(last_logged_in_at: time) + + expect(raw_attributes(obj)[:last_logged_in_at]).to eql time.to_s + end + + it 'stores time in specified time zone', + config: { dynamodb_timezone: 'Hawaii', store_datetime_as_string: true } do + time = '2017-06-20 08:00:00 +0300'.to_datetime + obj = klass.create(last_logged_in_at: time) + + expect(raw_attributes(obj)[:last_logged_in_at]).to eql('2017-06-19T19:00:00-10:00') + end + + it 'stores time in UTC', config: { dynamodb_timezone: :utc, store_datetime_as_string: true } do + time = '2017-06-20 08:00:00 +0300'.to_datetime + obj = klass.create(last_logged_in_at: time) + + expect(raw_attributes(obj)[:last_logged_in_at]).to eql('2017-06-20T05:00:00+00:00') + end + + it 'uses UTC by default', config: { store_datetime_as_string: true } do + time = '2017-06-20 08:00:00 +0300'.to_datetime + obj = klass.create(last_logged_in_at: time) + + expect(raw_attributes(obj)[:last_logged_in_at]).to eql('2017-06-20T05:00:00+00:00') + end + + it 'converts time between application time zone and dynamodb time zone correctly', + config: { application_timezone: 'Hong Kong', dynamodb_timezone: 'Hawaii', + store_datetime_as_string: true } do + # Hong Kong +8 + # Hawaii -10 + time = '2017-06-20 08:00:00 +0300'.to_datetime + obj = klass.create(last_logged_in_at: time) + + expect(raw_attributes(obj)[:last_logged_in_at]).to eql('2017-06-19T19:00:00-10:00') + expect(reload(obj).last_logged_in_at.to_s).to eql('2017-06-20T13:00:00+08:00') + end + end + end + + describe 'Date field' do + context 'stored in :string format' do + it 'stores in ISO 8601 format' do + klass = new_class do + field :signed_up_on, :date, store_as_string: true + end + + obj = klass.create(signed_up_on: '2017-09-25'.to_date) + + expect(reload(obj).signed_up_on).to eql('2017-09-25'.to_date) + expect(raw_attributes(obj)[:signed_up_on]).to eql('2017-09-25') + end + + it 'stores in string format when global option :store_date_as_string is true' do + klass = new_class do + field :signed_up_on, :date + end + + store_date_as_string = Dynamoid.config.store_date_as_string + Dynamoid.config.store_date_as_string = true + + obj = klass.create(signed_up_on: '2017-09-25'.to_date) + expect(raw_attributes(obj)[:signed_up_on]).to eql('2017-09-25') + + Dynamoid.config.store_date_as_string = store_date_as_string + end + + it 'prioritize field option over global one' do + klass = new_class do + field :signed_up_on, :date, store_as_string: true + end + + store_date_as_string = Dynamoid.config.store_date_as_string + Dynamoid.config.store_date_as_string = false + + obj = klass.create(signed_up_on: '2017-09-25'.to_date) + expect(raw_attributes(obj)[:signed_up_on]).to eql('2017-09-25') + + Dynamoid.config.store_date_as_string = store_date_as_string + end + + it 'stores nil value' do + klass = new_class do + field :signed_up_on, :date, store_as_string: true + end + + obj = klass.create(signed_up_on: nil) + + expect(reload(obj).signed_up_on).to eql(nil) + expect(raw_attributes(obj)[:signed_up_on]).to eql(nil) + end + end + + context 'stored in :number format' do + it 'stores as number of days between dates' do + klass = new_class do + field :signed_up_on, :date, store_as_string: false + end + + obj = klass.create(signed_up_on: '2017-09-25'.to_date) + + expect(reload(obj).signed_up_on).to eql('2017-09-25'.to_date) + expect(raw_attributes(obj)[:signed_up_on]).to eql(17_434) + end + + it 'stores in number format when global option :store_date_as_string is false' do + klass = new_class do + field :signed_up_on, :date + end + + store_date_as_string = Dynamoid.config.store_date_as_string + Dynamoid.config.store_date_as_string = false + + obj = klass.create(signed_up_on: '2017-09-25'.to_date) + expect(raw_attributes(obj)[:signed_up_on]).to eql(17_434) + + Dynamoid.config.store_date_as_string = store_date_as_string + end + + it 'prioritize field option over global one' do + klass = new_class do + field :signed_up_on, :date, store_as_string: false + end + + store_date_as_string = Dynamoid.config.store_date_as_string + Dynamoid.config.store_date_as_string = true + + obj = klass.create(signed_up_on: '2017-09-25'.to_date) + expect(raw_attributes(obj)[:signed_up_on]).to eql(17_434) + + Dynamoid.config.store_date_as_string = store_date_as_string + end + + it 'stores nil value' do + klass = new_class do + field :signed_up_on, :date, store_as_string: false + end + + obj = klass.create(signed_up_on: nil) + + expect(reload(obj).signed_up_on).to eql(nil) + expect(raw_attributes(obj)[:signed_up_on]).to eql(nil) + end + end + end + + describe 'Set field' do + let(:klass) do + new_class do + field :set_value, :set + end + end + + it 'stores a set of strings' do + set = Set.new(%w[a b]) + obj = klass.create(set_value: set) + + expect(reload(obj).set_value).to eql(set) + expect(raw_attributes(obj)[:set_value]).to eql(set) + end + + it 'stores a set of integers' do + set = Set.new([1, 2]) + obj = klass.create(set_value: Set.new([1, 2])) + + expect(reload(obj).set_value).to eql(Set.new(['1'.to_d, '2'.to_d])) + expect(raw_attributes(obj)[:set_value]).to eql(Set.new(['1'.to_d, '2'.to_d])) + end + + it 'stores a set of numbers' do + obj = klass.create(set_value: Set.new([1.5, '2'.to_d])) + + expect(reload(obj).set_value).to eql(Set.new(['1.5'.to_d, '2'.to_d])) + expect(raw_attributes(obj)[:set_value]).to eql(Set.new(['1.5'.to_d, '2'.to_d])) + end + + it 'stores empty set as nil' do + obj = klass.create(set_value: Set.new) + + expect(reload(obj).set_value).to eql(nil) + expect(raw_attributes(obj)[:set_value]).to eql(nil) + end + + it 'stores nil value' do + obj = klass.create(set_value: nil) + + expect(reload(obj).set_value).to eql(nil) + expect(raw_attributes(obj)[:set_value]).to eql(nil) + end + + describe 'typed set' do + context 'set of string' do + let(:class_with_typed_set) do + new_class do + field :values, :set, of: :string + end + end + + it 'stores elements as strings' do + obj = class_with_typed_set.create(values: Set.new(%w[a b c])) + + expect(reload(obj).values).to eql(Set.new(%w[a b c])) + expect(raw_attributes(obj)[:values]).to eql(Set.new(%w[a b c])) + end + + it 'removes empty strings' do + obj = class_with_typed_set.create(values: Set.new(['a', '', 'c'])) + + expect(reload(obj).values).to eql(Set.new(%w[a c])) + expect(raw_attributes(obj)[:values]).to eql(Set.new(%w[a c])) + end + end + + context 'set of number' do + let(:class_with_typed_set) do + new_class do + field :values, :set, of: :number + end + end + + it 'stores elements as BigDecimal' do + obj = class_with_typed_set.create(values: Set.new([1, 2.5, '3'.to_d])) + + expect(reload(obj).values).to eql(Set.new(['1'.to_d, '2.5'.to_d, '3'.to_d])) + expect(raw_attributes(obj)[:values]).to eql(Set.new(['1'.to_d, '2.5'.to_d, '3'.to_d])) + end + end + + context 'set of integer' do + let(:class_with_typed_set) do + new_class do + field :values, :set, of: :integer + end + end + + it 'stores elements as Integer' do + obj = class_with_typed_set.create(values: Set.new([1, 2])) + + expect(reload(obj).values).to eql(Set.new([1, 2])) + expect(raw_attributes(obj)[:values]).to eql(Set.new(['1'.to_d, '2'.to_d])) + end + end + + context 'set of date' do + let(:class_with_typed_set) do + new_class do + field :values, :set, of: :date + end + end + + it 'stores elements as Date' do + date = '2018-10-13'.to_date + obj = class_with_typed_set.create(values: Set.new([date])) + + expect(reload(obj).values).to eql(Set.new([date])) + expect(raw_attributes(obj)[:values]).to eql(Set.new(['17817'.to_d])) + end + + it 'uses numeric format if store_as_string is false' do + class_with_numeric_format = new_class do + field :values, :set, of: { date: { store_as_string: false } } + end + + date = '2018-10-13'.to_date + obj = class_with_numeric_format.create(values: Set.new([date])) + + expect(reload(obj).values).to eql(Set.new([date])) + expect(raw_attributes(obj)[:values]).to eql(Set.new(['17817'.to_d])) + end + + it 'uses string format if store_as_string is true' do + class_with_string_format = new_class do + field :values, :set, of: { date: { store_as_string: true } } + end + + date = '2018-10-13'.to_date + obj = class_with_string_format.create(values: Set.new([date])) + + expect(reload(obj).values).to eql(Set.new([date])) + expect(raw_attributes(obj)[:values]).to eql(Set.new(['2018-10-13'])) + end + end + + context 'set of datetime' do + let(:class_with_typed_set) do + new_class do + field :values, :set, of: :datetime + end + end + + it 'stores elements as DateTime' do + datetime = '2018-07-24 19:04:30 +00:00'.to_datetime + obj = class_with_typed_set.create(values: Set.new([datetime])) + + expect(reload(obj).values).to eql(Set.new([datetime])) + expect(raw_attributes(obj)[:values]).to eql(Set.new(['1532459070'.to_d])) + end + + it 'uses numeric format if store_as_string is false' do + class_with_numeric_format = new_class do + field :values, :set, of: { datetime: { store_as_string: false } } + end + + datetime = '2018-07-24 19:04:30 +00:00'.to_datetime + obj = class_with_numeric_format.create(values: Set.new([datetime])) + + expect(reload(obj).values).to eql(Set.new([datetime])) + expect(raw_attributes(obj)[:values]).to eql(Set.new(['1532459070'.to_d])) + end + + it 'uses string format if store_as_string is true' do + class_with_string_format = new_class do + field :values, :set, of: { datetime: { store_as_string: true } } + end + + datetime = '2018-07-24 19:04:30 +00:00'.to_datetime + obj = class_with_string_format.create(values: Set.new([datetime])) + + expect(reload(obj).values).to eql(Set.new([datetime])) + expect(raw_attributes(obj)[:values]).to eql(Set.new(['2018-07-24T19:04:30+00:00'])) + end + end + + context 'set of serialized' do + it 'serializes elements' do + class_with_typed_set = new_class do + field :values, :set, of: :serialized + end + + hash = { 'foo' => 'bar' } + obj = class_with_typed_set.create(values: Set.new([hash])) + + expect(reload(obj).values).to eql(Set.new([hash])) + expect(raw_attributes(obj)[:values]).to eql(Set.new([hash.to_yaml])) + end + + it 'uses provided serializer' do + class_with_typed_set = new_class do + field :values, :set, of: { serialized: { serializer: JSON } } + end + + hash = { 'foo' => 'bar' } + obj = class_with_typed_set.create(values: Set.new([hash])) + + expect(reload(obj).values).to eql(Set.new([hash])) + expect(raw_attributes(obj)[:values]).to eql(Set.new([hash.to_json])) + end + end + + context 'set of custom type' do + let(:user_class) do + Class.new do + attr_accessor :name + + def initialize(name) + self.name = name + end + + def dynamoid_dump + name + end + + def eql?(other) + name == other.name + end + + def hash + name.hash + end + + def self.dynamoid_load(string) + new(string.to_s) + end + end + end + + let(:class_with_typed_set) do + new_class(user_class: user_class) do |options| + field :values, :set, of: options[:user_class] + end + end + + it 'uses custom dumping mechanizm' do + user = user_class.new('John') + obj = class_with_typed_set.create(values: Set.new([user])) + + expect(reload(obj).values).to eql(Set.new([user])) + expect(raw_attributes(obj)[:values]).to eql(Set.new(['John'])) + end + end + + context 'specified type is not supported' do + let(:class_with_typed_set) do + new_class do + field :values, :set, of: :boolean + end + end + + it 'raises ArgumentError' do + expect { + class_with_typed_set.create(values: Set.new([true])) + }.to raise_error(ArgumentError, "Set element type boolean isn't supported") + end + end + end + end + + describe 'Array field' do + let(:klass) do + new_class do + field :tags, :array + end + end + + it 'stores array as list' do + array = %w[new archived] + obj = klass.create(tags: array) + + expect(reload(obj).tags).to eql(array) + expect(raw_attributes(obj)[:tags]).to eql(array) + end + + it 'can store empty array' do + obj = klass.create(tags: []) + + expect(reload(obj).tags).to eql([]) + expect(raw_attributes(obj)[:tags]).to eql([]) + end + + it 'can store elements of different types' do + array = ['a', 5, 12.5] + obj = klass.create(tags: array) + + expect(reload(obj).tags).to eql(array) + expect(raw_attributes(obj)[:tags]).to eql(array) + end + + it 'stores document as an array element' do + obj = klass.create(tags: [{ foo: 'bar' }]) + + expect(reload(obj).tags).to eql([{ 'foo' => 'bar' }]) + expect(raw_attributes(obj)[:tags]).to eql([{ 'foo' => 'bar' }]) + + array = %w[foo bar] + obj = klass.create(tags: [array]) + + expect(reload(obj).tags).to eql([array]) + expect(raw_attributes(obj)[:tags]).to eql([array]) + end + + it 'stores set as an array element' do + set = Set.new(%w[foo bar]) + obj = klass.create(tags: [set]) + + expect(reload(obj).tags).to eql([set]) + expect(raw_attributes(obj)[:tags]).to eql([set]) + end + + it 'stores nil as an array element' do + array = ['1', nil] + obj = klass.create(tags: array) + + expect(reload(obj).tags).to eql(array) + expect(raw_attributes(obj)[:tags]).to eql(array) + end + + it 'stores nil value' do + obj = klass.create(tags: nil) + + expect(reload(obj).tags).to eql(nil) + expect(raw_attributes(obj)[:tags]).to eql(nil) + end + + describe 'typed array' do + context 'array of string' do + let(:class_with_typed_array) do + new_class do + field :values, :array, of: :string + end + end + + it 'stores elements as strings' do + obj = class_with_typed_array.create(values: %w[a b c]) + + expect(reload(obj).values).to eql(%w[a b c]) + expect(raw_attributes(obj)[:values]).to eql(%w[a b c]) + end + + it 'removes empty strings' do + obj = class_with_typed_array.create(values: ['a', '', 'c']) + + expect(reload(obj).values).to eql(%w[a c]) + expect(raw_attributes(obj)[:values]).to eql(%w[a c]) + end + end + + context 'array of number' do + let(:class_with_typed_array) do + new_class do + field :values, :array, of: :number + end + end + + it 'stores elements as BigDecimal' do + obj = class_with_typed_array.create(values: [1, 2.5, '3'.to_d]) + + expect(reload(obj).values).to eql(['1'.to_d, '2.5'.to_d, '3'.to_d]) + expect(raw_attributes(obj)[:values]).to eql(['1'.to_d, '2.5'.to_d, '3'.to_d]) + end + end + + context 'array of integer' do + let(:class_with_typed_array) do + new_class do + field :values, :array, of: :integer + end + end + + it 'stores elements as Integer' do + obj = class_with_typed_array.create(values: [1, 2]) + + expect(reload(obj).values).to eql([1, 2]) + expect(raw_attributes(obj)[:values]).to eql(['1'.to_d, '2'.to_d]) + end + end + + context 'array of date' do + let(:class_with_typed_array) do + new_class do + field :values, :array, of: :date + end + end + + it 'stores elements as Date' do + date = '2018-10-13'.to_date + obj = class_with_typed_array.create(values: [date]) + + expect(reload(obj).values).to eql([date]) + expect(raw_attributes(obj)[:values]).to eql(['17817'.to_d]) + end + + it 'uses numeric format if store_as_string is false' do + class_with_numeric_format = new_class do + field :values, :array, of: { date: { store_as_string: false } } + end + + date = '2018-10-13'.to_date + obj = class_with_numeric_format.create(values: [date]) + + expect(reload(obj).values).to eql([date]) + expect(raw_attributes(obj)[:values]).to eql(['17817'.to_d]) + end + + it 'uses string format if store_as_string is true' do + class_with_string_format = new_class do + field :values, :array, of: { date: { store_as_string: true } } + end + + date = '2018-10-13'.to_date + obj = class_with_string_format.create(values: [date]) + + expect(reload(obj).values).to eql([date]) + expect(raw_attributes(obj)[:values]).to eql(['2018-10-13']) + end + end + + context 'array of datetime' do + let(:class_with_typed_array) do + new_class do + field :values, :array, of: :datetime + end + end + + it 'stores elements as DateTime' do + datetime = '2018-07-24 19:04:30 +00:00'.to_datetime + obj = class_with_typed_array.create(values: [datetime]) + + expect(reload(obj).values).to eql([datetime]) + expect(raw_attributes(obj)[:values]).to eql(['1532459070'.to_d]) + end + + it 'uses numeric format if store_as_string is false' do + class_with_numeric_format = new_class do + field :values, :array, of: { datetime: { store_as_string: false } } + end + + datetime = '2018-07-24 19:04:30 +00:00'.to_datetime + obj = class_with_numeric_format.create(values: [datetime]) + + expect(reload(obj).values).to eql([datetime]) + expect(raw_attributes(obj)[:values]).to eql(['1532459070'.to_d]) + end + + it 'uses string format if store_as_string is true' do + class_with_string_format = new_class do + field :values, :array, of: { datetime: { store_as_string: true } } + end + + datetime = '2018-07-24 19:04:30 +00:00'.to_datetime + obj = class_with_string_format.create(values: [datetime]) + + expect(reload(obj).values).to eql([datetime]) + expect(raw_attributes(obj)[:values]).to eql(['2018-07-24T19:04:30+00:00']) + end + end + + context 'array of serialized' do + it 'serializes elements' do + class_with_typed_array = new_class do + field :values, :array, of: :serialized + end + + hash = { 'foo' => 'bar' } + obj = class_with_typed_array.create(values: [hash]) + + expect(reload(obj).values).to eql([hash]) + expect(raw_attributes(obj)[:values]).to eql([hash.to_yaml]) + end + + it 'uses provided serializer' do + class_with_typed_array = new_class do + field :values, :array, of: { serialized: { serializer: JSON } } + end + + hash = { 'foo' => 'bar' } + obj = class_with_typed_array.create(values: [hash]) + + expect(reload(obj).values).to eql([hash]) + expect(raw_attributes(obj)[:values]).to eql([hash.to_json]) + end + end + + context 'array of custom type' do + let(:user_class) do + Class.new do + attr_accessor :name + + def initialize(name) + self.name = name + end + + def dynamoid_dump + name + end + + def eql?(other) + name == other.name + end + + def hash + name.hash + end + + def self.dynamoid_load(string) + new(string.to_s) + end + end + end + + let(:class_with_typed_array) do + new_class(user_class: user_class) do |options| + field :values, :array, of: options[:user_class] + end + end + + it 'uses custom dumping mechanizm' do + user = user_class.new('John') + obj = class_with_typed_array.create(values: [user]) + + expect(reload(obj).values).to eql([user]) + expect(raw_attributes(obj)[:values]).to eql(['John']) + end + end + + context 'specified type is not supported' do + let(:class_with_typed_array) do + new_class do + field :values, :array, of: :boolean + end + end + + it 'raises ArgumentError' do + expect { + class_with_typed_array.create(values: [true]) + }.to raise_error(ArgumentError, "Array element type boolean isn't supported") + end + end + end + end + + describe 'Map field' do + let(:klass) do + new_class do + field :settings, :map + end + end + + it 'stores as a Document' do + settings = { + Day: 'Monday', + UnreadEmails: 42, + ItemsOnMyDesk: [ + 'Coffee Cup', + 'Telephone', + { + Pens: { Quantity: 3 }, + Pencils: { Quantity: 2 }, + Erasers: { Quantity: 1 } + } + ] + } + obj = klass.create(settings: settings) + + expect(reload(obj).settings).to eql( + Day: 'Monday', + UnreadEmails: 42, + ItemsOnMyDesk: [ + 'Coffee Cup', + 'Telephone', + { + Pens: { Quantity: 3 }, + Pencils: { Quantity: 2 }, + Erasers: { Quantity: 1 } + } + ] + ) + expect(raw_attributes(obj)[:settings]).to eql( + 'Day' => 'Monday', + 'UnreadEmails' => 42, + 'ItemsOnMyDesk' => [ + 'Coffee Cup', + 'Telephone', + { + 'Pens' => { 'Quantity' => 3 }, + 'Pencils' => { 'Quantity' => 2 }, + 'Erasers' => { 'Quantity' => 1 } + } + ] + ) + end + + it 'deeply symbolizes keys' do + settings = { 'foo' => { 'bar' => 1 }, 'baz' => [{ 'foobar' => 2 }] } + obj = klass.create(settings: settings) + + expect(reload(obj).settings).to eql(foo: { bar: 1 }, baz: [{ foobar: 2 }]) + end + + describe 'sanitizing' do + it 'replaces empty set with nil in Hash' do + settings = { 'foo' => [].to_set } + obj = klass.create(settings: settings) + + expect(reload(obj).settings).to eql(foo: nil) + end + + it 'replaces empty string with nil in Hash' do + settings = { 'foo' => '' } + obj = klass.create(settings: settings) + + expect(reload(obj).settings).to eql(foo: nil) + end + + it 'replaces empty set with nil in nested Array' do + settings = { 'foo' => [1, 2, [].to_set] } + obj = klass.create(settings: settings) + + expect(reload(obj).settings).to eql(foo: [1, 2, nil]) + end + + it 'replaces empty string with nil in nested Array' do + settings = { 'foo' => [1, 2, ''] } + obj = klass.create(settings: settings) + + expect(reload(obj).settings).to eql(foo: [1, 2, nil]) + end + + it 'processes nested Hash and Array' do + settings = { a: 1, b: '', c: [1, 2, '', { d: 3, e: '' }] } + obj = klass.create(settings: settings) + + expect(reload(obj).settings).to eql(a: 1, b: nil, c: [1, 2, nil, { d: 3, e: nil }]) + end + end + end + + describe 'String field' do + it 'stores as strings' do + klass = new_class do + field :name, :string + end + + obj = klass.create(name: 'Matthew') + + expect(reload(obj).name).to eql('Matthew') + expect(raw_attributes(obj)[:name]).to eql('Matthew') + end + + it 'saves empty string as nil' do + klass = new_class do + field :name, :string + end + + obj = klass.create(name: '') + + expect(reload(obj).name).to eql(nil) + expect(raw_attributes(obj)[:name]).to eql(nil) + end + + it 'is used as default field type' do + klass = new_class do + field :name + end + + obj = klass.create(name: 'Matthew') + + expect(reload(obj).name).to eql('Matthew') + expect(raw_attributes(obj)[:name]).to eql('Matthew') + end + + it 'stores nil value' do + klass = new_class do + field :name, :string + end + + obj = klass.create(name: nil) + + expect(reload(obj).name).to eql(nil) + expect(raw_attributes(obj)[:name]).to eql(nil) + end + end + + describe 'Raw field' do + let(:klass) do + new_class do + field :config, :raw + end + end + + it 'stores Hash attribute as a Document' do + config = { acres: 5, 'trees' => { cyprus: 30 }, horses: %w[Lucky Dummy] } + obj = klass.create(config: config) + + expect(reload(obj).config).to eql( + acres: 5, trees: { cyprus: 30 }, horses: %w[Lucky Dummy] + ) + expect(raw_attributes(obj)[:config]).to eql( + 'acres' => 5, 'trees' => { 'cyprus' => 30 }, 'horses' => %w[Lucky Dummy] + ) + end + + it 'stores Array attribute as a List' do + config = %w[windows roof doors] + obj = klass.create(config: config) + + expect(reload(obj).config).to eql(config) + expect(raw_attributes(obj)[:config]).to eql(config) + end + + it 'stores Set attribute as a List' do + config = Set.new(%w[windows roof doors]) + obj = klass.create(config: config) + + expect(reload(obj).config).to eql(config) + expect(raw_attributes(obj)[:config]).to eql(config) + end + + it 'stores String attribute as a String' do + config = 'Config' + obj = klass.create(config: config) + + expect(reload(obj).config).to eql(config) + expect(raw_attributes(obj)[:config]).to eql(config) + end + + it 'stores Number attribute as a Number' do + config = 100 + obj = klass.create(config: config) + + expect(reload(obj).config).to eql(config) + expect(raw_attributes(obj)[:config]).to eql(config) + end + + it 'stores nil value' do + obj = klass.create(config: nil) + + expect(reload(obj).config).to eql(nil) + expect(raw_attributes(obj)[:config]).to eql(nil) + end + + describe 'Hash' do + it 'symbolizes deeply Hash keys' do + config = { 'foo' => { 'bar' => 1 }, 'baz' => [{ 'foobar' => 2 }] } + obj = klass.create(config: config) + + expect(reload(obj).config).to eql(foo: { bar: 1 }, baz: [{ foobar: 2 }]) + end + end + + describe 'sanitizing' do + it 'replaces empty set with nil in Hash' do + config = { 'foo' => [].to_set } + obj = klass.create(config: config) + + expect(reload(obj).config).to eql(foo: nil) + end + + it 'replaces empty string with nil in Hash' do + config = { 'foo' => '' } + obj = klass.create(config: config) + + expect(reload(obj).config).to eql(foo: nil) + end + + it 'replaces empty set with nil in Array' do + config = [1, 2, [].to_set] + obj = klass.create(config: config) + + expect(reload(obj).config).to eql([1, 2, nil]) + end + + it 'replaces empty string with nil in Array' do + config = [1, 2, ''] + obj = klass.create(config: config) + + expect(reload(obj).config).to eql([1, 2, nil]) + end + + it 'processes nested Hash and Array' do + config = { a: 1, b: '', c: [1, 2, '', { d: 3, e: '' }] } + obj = klass.create(config: config) + + expect(reload(obj).config).to eql(a: 1, b: nil, c: [1, 2, nil, { d: 3, e: nil }]) + end + end + end + + describe 'Integer field' do + let(:klass) do + new_class do + field :count, :integer + end + end + + it 'stores integer value as Integer' do + obj = klass.create(count: 10) + + expect(reload(obj).count).to eql(10) + expect(raw_attributes(obj)[:count]).to eql(10) + end + + it 'stores nil value' do + obj = klass.create(count: nil) + + expect(reload(obj).count).to eql(nil) + expect(raw_attributes(obj)[:count]).to eql(nil) + end + end + + describe 'Number field' do + let(:klass) do + new_class do + field :count, :number + end + end + + it 'stores integer value as Number' do + obj = klass.create(count: 10) + + expect(reload(obj).count).to eql(BigDecimal('10')) + expect(raw_attributes(obj)[:count]).to eql(BigDecimal('10')) + end + + it 'stores float value Number' do + # NOTE: Set as string to avoid error on JRuby 9.4.0.0: + # Aws::DynamoDB::Errors::ValidationException: + # DynamoDB only supports precision up to 38 digits + obj = klass.create(count: '10.001') + + expect(reload(obj).count).to eql(BigDecimal('10.001', 5)) + expect(raw_attributes(obj)[:count]).to eql(BigDecimal('10.001', 5)) + end + + it 'stores BigDecimal value as Number' do + obj = klass.create(count: BigDecimal('10.001', 5)) + + expect(reload(obj).count).to eql(BigDecimal('10.001', 5)) + expect(raw_attributes(obj)[:count]).to eql(BigDecimal('10.001', 5)) + end + + it 'stores nil value' do + obj = klass.create(count: nil) + + expect(reload(obj).count).to eql(nil) + expect(raw_attributes(obj)[:count]).to eql(nil) + end + end + + describe 'Serialized field' do + it 'uses YAML format by default' do + klass = new_class do + field :options, :serialized + end + + options = { foo: 'bar' } + obj = klass.create(options: options) + + expect(reload(obj).options).to eql(options) + expect(raw_attributes(obj)[:options]).to eql(options.to_yaml) + end + + it 'uses specified serializer object' do + serializer = Class.new do + def self.dump(value) + JSON.dump(value) + end + + def self.load(str) + JSON.parse(str) + end + end + + klass = new_class do + field :options, :serialized, serializer: serializer + end + + obj = klass.create(options: { foo: 'bar' }) + + expect(reload(obj).options).to eql('foo' => 'bar') + expect(raw_attributes(obj)[:options]).to eql('{"foo":"bar"}') + end + + it 'can store empty collections' do + klass = new_class do + field :options, :serialized + end + + obj = klass.create(options: Set.new) + + expect(reload(obj).options).to eql(Set.new) + expect(raw_attributes(obj)[:options]).to eql("--- !ruby/object:Set\nhash: {}\n") + end + + it 'does not store nil value' do + klass = new_class do + field :options, :serialized + end + + obj = klass.create(options: nil) + + expect(reload(obj).options).to eql(nil) + expect(raw_attributes(obj)[:options]).to eql(nil) + end + end + + describe 'Custom type field' do + context 'Custom type provided' do + let(:user_class) do + Class.new do + attr_accessor :name + + def initialize(name) + self.name = name + end + + def dynamoid_dump + name + end + + def eql?(other) + name == other.name + end + + def self.dynamoid_load(string) + new(string.to_s) + end + end + end + + let(:klass) do + new_class(user_class: user_class) do |options| + field :user, options[:user_class] + end + end + + it 'dumps and loads self' do + user = user_class.new('John') + obj = klass.create(user: user) + + expect(obj.user).to eql(user) + expect(reload(obj).user).to eql(user) + expect(raw_attributes(obj)[:user]).to eql('John') + end + end + + context 'Adapter provided' do + let(:user_class) do + Class.new do + attr_accessor :name + + def initialize(name) + self.name = name + end + + def eql?(other) + name == other.name + end + end + end + + let(:adapter) do + Class.new.tap do |c| + c.class_exec(user_class) do |user_class| + @user_class = user_class + + def self.dynamoid_dump(user) + user.name + end + + def self.dynamoid_load(string) + @user_class.new(string.to_s) + end + end + end + end + + let(:klass) do + new_class(adapter: adapter) do |options| + field :user, options[:adapter] + end + end + + it 'dumps and loads custom type' do + user = user_class.new('John') + obj = klass.create(user: user) + + expect(obj.user).to eql(user) + expect(reload(obj).user).to eql(user) + expect(raw_attributes(obj)[:user]).to eql('John') + end + end + + context 'DynamoDB type specified' do + let(:user_class) do + Class.new do + attr_accessor :name + + def initialize(name) + self.name = name + end + + def eql?(other) + name == other.name + end + end + end + + let(:adapter) do + Class.new.tap do |c| + c.class_exec(user_class) do |user_class| + @user_class = user_class + + def self.dynamoid_dump(user) + user.name.split + end + + def self.dynamoid_load(array) + array = array.name.split if array.is_a?(@user_class) + @user_class.new(array.join(' ')) + end + + def self.dynamoid_field_type + :array + end + end + end + end + + let(:klass) do + new_class(adapter: adapter) do |options| + field :user, options[:adapter] + end + end + + it 'stores converted value in a specified type' do + user = user_class.new('John Doe') + obj = klass.create(user: user) + + expect(obj.user).to eql(user) + expect(reload(obj).user).to eql(user) + expect(raw_attributes(obj)[:user]).to eql(%w[John Doe]) + end + end + end + + describe 'Binary field' do + let(:klass) do + new_class do + field :image, :binary + end + end + + let(:unfrozen_string) { +"\x00\x88\xFF" } + let(:binary_value) { unfrozen_string.force_encoding('ASCII-8BIT') } + + it 'encodes a string in base64-encoded format' do + obj = klass.create(image: binary_value) + + expect(reload(obj).image).to eql(binary_value) + expect(raw_attributes(obj)[:image]).to eql(Base64.strict_encode64(binary_value)) + end + end +end diff --git a/dynamoid/spec/dynamoid/fields_spec.rb b/dynamoid/spec/dynamoid/fields_spec.rb new file mode 100644 index 000000000..7ad302b8b --- /dev/null +++ b/dynamoid/spec/dynamoid/fields_spec.rb @@ -0,0 +1,474 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Dynamoid::Fields do + let(:address) { Address.new } + + describe '.field' do + context 'when :alias option specified' do + let(:klass) do + new_class do + field :Name, :string, alias: :name + end + end + + it 'generates getter and setter for alias' do + object = klass.new + + object.Name = 'Alex' + expect(object.name).to eq('Alex') + + object.name = 'Michael' + expect(object.name).to eq('Michael') + end + + it 'generates ? method' do + object = klass.new + + expect(object.name?).to eq false + object.name = 'Alex' + expect(object.name?).to eq true + end + + it 'generates _before_type_cast method' do + object = klass.new(name: :Alex) + + expect(object.name).to eq 'Alex' + expect(object.name_before_type_cast).to eq :Alex + end + end + + context 'when new generated method overrides existing one' do + let(:module_with_methods) do + Module.new do + def foo; end + + def bar=; end + + def baz?; end + + def foobar_before_type_cast?; end + end + end + + it 'warns about getter' do + message = 'Method foo generated for the field foo overrides already existing method' + expect(Dynamoid.logger).to receive(:warn).with(message) + + new_class(module: module_with_methods, class_name: 'Foobar') do + include @helper_options[:module] + field :foo + end + end + + it 'warns about setter' do + message = 'Method bar= generated for the field bar overrides already existing method' + expect(Dynamoid.logger).to receive(:warn).with(message) + + new_class(module: module_with_methods) do + include @helper_options[:module] + field :bar + end + end + + it 'warns about ?' do + message = 'Method baz? generated for the field baz overrides already existing method' + expect(Dynamoid.logger).to receive(:warn).with(message) + + new_class(module: module_with_methods) do + include @helper_options[:module] + field :baz + end + end + + it 'warns about _before_type_cast' do + message = 'Method foobar_before_type_cast? generated for the field foobar overrides already existing method' + expect(Dynamoid.logger).to receive(:warn).with(message) + + new_class(module: module_with_methods) do + include @helper_options[:module] + field :foobar + end + end + + it 'warns about hash_key field' do + messages = [ + 'Method hash_key= generated for the field hash_key overrides already existing method', + 'Method hash_key generated for the field hash_key overrides already existing method' + ] + expect(Dynamoid.logger).to receive(:warn).with(messages[0]) + expect(Dynamoid.logger).to receive(:warn).with(messages[1]) + + new_class do + table key: :hash_key + end + end + + it 'warns about range_value field' do + messages = [ + 'Method range_value= generated for the field range_value overrides already existing method', + 'Method range_value generated for the field range_value overrides already existing method' + ] + expect(Dynamoid.logger).to receive(:warn).with(messages[0]) + expect(Dynamoid.logger).to receive(:warn).with(messages[1]) + + new_class do + range :range_value + end + end + end + end + + it 'declares read attributes' do + expect(address.city).to be_nil + end + + it 'declares write attributes' do + address.city = 'Chicago' + expect(address.city).to eq 'Chicago' + end + + it 'declares a query attribute' do # rubocop:disable Lint/EmptyBlock, RSpec/NoExpectationExample + end + + it 'automatically declares id' do + expect { address.id }.not_to raise_error + end + + it 'allows range key serializers' do + serializer = Class.new do + def self.dump(val) + val&.strftime('%m/%d/%Y') + end + + def self.load(val) + val && DateTime.strptime(val, '%m/%d/%Y').to_date + end + end + + klass = new_class do + range :special_date, :serialized, serializer: serializer + end + + date = '2019-02-24'.to_date + model = klass.create!(special_date: date) + model_loaded = klass.find(model.id, range_key: model.special_date) + expect(model_loaded.special_date).to eq date + end + + context 'query attributes' do + it 'are declared' do + expect(address).not_to be_city + + address.city = 'Chicago' + + expect(address).to be_city + end + + it 'return false when boolean attributes are nil or false' do + address.deliverable = nil + expect(address).not_to be_deliverable + + address.deliverable = false + expect(address).not_to be_deliverable + end + + it 'return true when boolean attributes are true' do + address.deliverable = true + expect(address).to be_deliverable + end + end + + context 'with a saved address' do + let(:address) { Address.create(deliverable: true) } + let(:original_id) { address.id } + + it 'writes an attribute correctly' do + address.write_attribute(:city, 'Chicago') + expect(address.read_attribute(:city)).to eq 'Chicago' + end + + it 'writes an attribute with an alias' do + address[:city] = 'Chicago' + expect(address.read_attribute(:city)).to eq 'Chicago' + end + + it 'reads a written attribute' do + address.city = 'Chicago' + expect(address.read_attribute(:city)).to eq 'Chicago' + end + + it 'reads a written attribute with the alias' do + address.write_attribute(:city, 'Chicago') + expect(address[:city]).to eq 'Chicago' + end + + it 'updates one attribute' do + expect(address).to receive(:save).once.and_return(true) + address.update_attribute(:city, 'Chicago') + expect(address[:city]).to eq 'Chicago' + expect(address.id).to eq original_id + end + + it 'adds in dirty methods for attributes' do + address.city = 'Chicago' + address.save + + address.city = 'San Francisco' + + expect(address.city_was).to eq 'Chicago' + end + + it 'returns all attributes' do + expect(Address.attributes).to eq(id: { type: :string }, + created_at: { type: :datetime }, + updated_at: { type: :datetime }, + city: { type: :string }, + options: { type: :serialized }, + deliverable: { type: :boolean }, + latitude: { type: :number }, + config: { type: :raw }, + registered_on: { type: :date }, + lock_version: { type: :integer }) + end + end + + it 'raises an exception when items size exceeds 400kb' do + expect do + Address.create(city: 'Ten chars ' * 500_000) + end.to raise_error(Aws::DynamoDB::Errors::ValidationException, 'Item size has exceeded the maximum allowed size') + end + + describe '.remove_field' do + subject { address } + + before do + Address.field :foobar + Address.remove_field :foobar + end + + it 'is not in the attributes hash' do + expect(Address.attributes).not_to have_key(:foobar) + end + + it 'removes the accessor' do + expect(subject).not_to respond_to(:foobar) + end + + it 'removes the writer' do + expect(subject).not_to respond_to(:foobar=) + end + + it 'removes the interrogative' do + expect(subject).not_to respond_to(:foobar?) + end + end + + context 'default values for fields' do + let(:doc_class) do + new_class do + field :name, :string, default: 'x' + field :uid, :integer, default: -> { 42 } + field :config, :serialized, default: {} + field :version, :integer, default: 1 + field :hidden, :boolean, default: false + end + end + + it 'returns default value specified as object' do + expect(doc_class.new.name).to eq('x') + end + + it 'returns default value specified as lamda/block (callable object)' do + expect(doc_class.new.uid).to eq(42) + end + + it 'returns default value as is for serializable field' do + expect(doc_class.new.config).to eq({}) + end + + it 'supports `false` as default value' do + expect(doc_class.new.hidden).to eq(false) + end + + it 'can modify default value independently for every instance' do + doc = doc_class.new + doc.name << 'y' + expect(doc_class.new.name).to eq('x') + end + + it 'returns default value specified as object even if value cannot be duplicated' do + expect(doc_class.new.version).to eq(1) + end + + it 'saves default values' do + doc = doc_class.create! + doc = doc_class.find(doc.id) + expect(doc.name).to eq('x') + expect(doc.uid).to eq(42) + expect(doc.config).to eq({}) + expect(doc.version).to eq(1) + expect(doc.hidden).to be false + end + + it 'does not use default value if nil value assigns explicitly' do + doc = doc_class.new(name: nil) + expect(doc.name).to eq nil + end + + it 'supports default value for custom type' do + user_class = Class.new do + attr_accessor :name + + def initialize(name) + self.name = name + end + + def dynamoid_dump + name + end + + def eql?(other) + name == other.name + end + + def hash + name.hash + end + + def self.dynamoid_load(string) + new(string.to_s) + end + end + + model_class = new_class(user: user_class.new('Mary')) do |options| + field :user, user_class, default: options[:user] + end + + model = model_class.create + model = model_class.find(model.id) + + expect(model.user).to eql user_class.new('Mary') + end + end + + describe 'deprecated :float field type' do + let(:doc) do + new_class do + field :distance_m, :float + end.new + end + + it 'acts as a :number field' do + # NOTE: Set as string to avoid error on JRuby 9.4.0.0: + # Aws::DynamoDB::Errors::ValidationException: + # DynamoDB only supports precision up to 38 digits + doc.distance_m = '5.33' + doc.save! + doc.reload + expect(doc.distance_m).to eq 5.33 + end + + it 'warns' do + expect(Dynamoid.logger).to receive(:warn).with(/deprecated/) + doc + end + end + + context 'extention overrides field accessors' do + let(:klass) do + extention = Module.new do + def name + super.upcase + end + + def name=(str) + super(str.try(:downcase)) + end + end + + new_class do + include extention + + field :name + end + end + + it 'can access new setter' do + address = klass.new + address.name = 'AB cd' + expect(address[:name]).to eq('ab cd') + end + + it 'can access new getter' do + address = klass.new + address.name = 'ABcd' + expect(address.name).to eq('ABCD') + end + end + + describe '#write_attribute' do + it 'writes attribute on the model' do + klass = new_class do + field :count, :integer + end + + obj = klass.new + obj.write_attribute(:count, 10) + expect(obj.attributes[:count]).to eql(10) + end + + it 'returns self' do + klass = new_class do + field :count, :integer + end + + obj = klass.new + result = obj.write_attribute(:count, 10) + expect(result).to eql(obj) + end + + describe 'type casting' do + it 'type casts attributes' do + klass = new_class do + field :count, :integer + end + + obj = klass.new + obj.write_attribute(:count, '101') + expect(obj.attributes[:count]).to eql(101) + end + end + + it 'raises an UnknownAttribute error if the attribute is not on the model' do + obj = new_class.new + + expect { + obj.write_attribute(:name, 'Alex') + }.to raise_error Dynamoid::Errors::UnknownAttribute + end + + it 'marks an attribute as changed' do + klass = new_class do + field :name + end + + obj = klass.new + obj.write_attribute(:name, 'Alex') + expect(obj.name_changed?).to eq true + end + + it 'does not mark an attribute as changed if new value equals the old one' do + klass = new_class do + field :name + end + + obj = klass.create(name: 'Alex') + obj = klass.find(obj.id) + + obj.write_attribute(:name, 'Alex') + expect(obj.name_changed?).to eq false + end + end +end diff --git a/dynamoid/spec/dynamoid/finders_spec.rb b/dynamoid/spec/dynamoid/finders_spec.rb new file mode 100644 index 000000000..316490472 --- /dev/null +++ b/dynamoid/spec/dynamoid/finders_spec.rb @@ -0,0 +1,676 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Dynamoid::Finders do + let!(:address) { Address.create(city: 'Chicago') } + + describe '.find' do + let(:klass) do + new_class(class_name: 'Document') + end + + let(:klass_with_composite_key) do + new_class(class_name: 'Cat') do + range :age, :integer + end + end + + context 'one primary key provided' do + context 'simple primary key' do + it 'finds' do + obj = klass.create + expect(klass.find(obj.id)).to eql(obj) + end + + it 'raises RecordNotFound error when found nothing' do + klass.create_table + expect { + klass.find('wrong-id') + }.to raise_error(Dynamoid::Errors::RecordNotFound, "Couldn't find Document with primary key wrong-id") + end + end + + context 'composite primary key' do + it 'finds' do + obj = klass_with_composite_key.create(age: 12) + expect(klass_with_composite_key.find(obj.id, range_key: 12)).to eql(obj) + end + + it 'raises RecordNotFound error when found nothing' do + klass_with_composite_key.create_table + expect { + klass_with_composite_key.find('wrong-id', range_key: 100_500) + }.to raise_error(Dynamoid::Errors::RecordNotFound, "Couldn't find Cat with primary key (wrong-id,100500)") + end + + it 'type casts a sort key value' do + obj = klass_with_composite_key.create(age: 12) + expect(klass_with_composite_key.find(obj.id, range_key: '12.333')).to eql(obj) + end + + it 'dumps a sort key value' do + klass_with_date = new_class do + range :published_on, :date + end + + date = '2018/07/26'.to_date + obj = klass_with_date.create(published_on: date) + expect(klass_with_date.find(obj.id, range_key: date)).to eql(obj) + end + + it 'raises MissingRangeKey when range key is not specified' do + obj = klass_with_composite_key.create(age: 12) + + expect { + klass_with_composite_key.find(obj.id) + }.to raise_error(Dynamoid::Errors::MissingRangeKey) + end + end + + it 'returns persisted? object' do + obj = klass.create + expect(klass.find(obj.id)).to be_persisted + end + + context 'field is not declared in document' do + let(:class_with_not_declared_field) do + new_class do + field :name + end + end + + before do + class_with_not_declared_field.create_table + end + + it 'ignores it without exceptions' do + Dynamoid.adapter.put_item(class_with_not_declared_field.table_name, id: '1', bod: '1996-12-21') + obj = class_with_not_declared_field.find('1') + + expect(obj.id).to eql('1') + end + end + + describe 'raise_error option' do + before do + klass.create_table + end + + context 'when true' do + it 'leads to raising RecordNotFound exception if model not found' do + expect do + klass.find('blah-blah', raise_error: true) + end.to raise_error(Dynamoid::Errors::RecordNotFound) + end + end + + context 'when true' do + it 'leads to not raising exception if model not found' do + expect(klass.find('blah-blah', raise_error: false)).to eq nil + end + end + end + end + + context 'multiple primary keys provided' do + context 'simple primary key' do + it 'finds with an array of keys' do + objects = (1..2).map { klass.create } + obj1, obj2 = objects + expect(klass.find([obj1.id, obj2.id])).to match_array(objects) + end + + it 'finds with one key' do + obj = klass_with_composite_key.create(age: 12) + expect(klass_with_composite_key.find([[obj.id, obj.age]])).to eq([obj]) + end + + it 'returns an empty array if an empty array passed' do + klass.create_table + expect(klass.find([])).to eql([]) + end + + it 'raises RecordNotFound error when some objects are not found' do + objects = (1..2).map { klass.create } + obj1, obj2 = objects + + expect { + klass.find([obj1.id, obj2.id, 'wrong-id']) + }.to raise_error( + Dynamoid::Errors::RecordNotFound, + "Couldn't find all Documents with primary keys [#{obj1.id}, #{obj2.id}, wrong-id] " \ + '(found 2 results, but was looking for 3)' + ) + end + + it 'raises RecordNotFound if only one primary key provided and no result found' do + klass.create_table + expect { + klass.find(['wrong-id']) + }.to raise_error( + Dynamoid::Errors::RecordNotFound, + "Couldn't find all Documents with primary keys [wrong-id] (found 0 results, but was looking for 1)" + ) + end + + it 'finds with a list of keys' do + objects = (1..2).map { klass.create } + obj1, obj2 = objects + expect(klass.find(obj1.id, obj2.id)).to match_array(objects) + end + end + + context 'composite primary key' do + it 'finds with an array of keys' do + objects = (1..2).map { |i| klass_with_composite_key.create(age: i) } + obj1, obj2 = objects + expect(klass_with_composite_key.find([[obj1.id, obj1.age], [obj2.id, obj2.age]])).to match_array(objects) + end + + it 'finds with one key' do + obj = klass_with_composite_key.create(age: 12) + expect(klass_with_composite_key.find([[obj.id, obj.age]])).to eq([obj]) + end + + it 'returns an empty array if an empty array passed' do + klass_with_composite_key.create_table + expect(klass_with_composite_key.find([])).to eql([]) + end + + it 'raises RecordNotFound error when some objects are not found' do + obj = klass_with_composite_key.create(age: 12) + expect { + klass_with_composite_key.find([[obj.id, obj.age], ['wrong-id', 100_500]]) + }.to raise_error( + Dynamoid::Errors::RecordNotFound, + "Couldn't find all Cats with primary keys [(#{obj.id},12), (wrong-id,100500)] (found 1 results, but was looking for 2)" + ) + end + + it 'raises RecordNotFound if only one primary key provided and no result found' do + klass_with_composite_key.create_table + expect { + klass_with_composite_key.find([['wrong-id', 100_500]]) + }.to raise_error( + Dynamoid::Errors::RecordNotFound, + "Couldn't find all Cats with primary keys [(wrong-id,100500)] (found 0 results, but was looking for 1)" + ) + end + + it 'finds with a list of keys' do + pending 'still is not implemented' + + objects = (1..2).map { |i| klass_with_composite_key.create(age: i) } + obj1, obj2 = objects + expect(klass_with_composite_key.find([obj1.id, obj1.age], [obj2.id, obj2.age])).to match_array(objects) + end + + it 'type casts a sort key value' do + objects = (1..2).map { |i| klass_with_composite_key.create(age: i) } + obj1, obj2 = objects + expect(klass_with_composite_key.find([[obj1.id, '1'], [obj2.id, '2']])).to match_array(objects) + end + + it 'dumps a sort key value' do + klass_with_date = new_class do + range :published_on, :date + end + + obj1 = klass_with_date.create(published_on: '2018/07/26'.to_date) + obj2 = klass_with_date.create(published_on: '2018/07/27'.to_date) + + expect( + klass_with_date.find([[obj1.id, obj1.published_on], [obj2.id, obj2.published_on]]) + ).to contain_exactly(obj1, obj2) + end + + it 'raises MissingRangeKey when range key is not specified' do + obj1, obj2 = klass_with_composite_key.create([{ age: 1 }, { age: 2 }]) + + expect { + klass_with_composite_key.find([obj1.id, obj2.id]) + }.to raise_error(Dynamoid::Errors::MissingRangeKey) + end + end + + it 'returns persisted? objects' do + objects = (1..2).map { |i| klass_with_composite_key.create(age: i) } + obj1, obj2 = objects + + objects = klass_with_composite_key.find([[obj1.id, obj1.age], [obj2.id, obj2.age]]) + obj1, obj2 = objects + + expect(obj1).to be_persisted + expect(obj2).to be_persisted + end + + describe 'raise_error option' do + before do + klass.create_table + end + + context 'when true' do + it 'leads to raising exception if model not found' do + obj = klass.create + + expect do + klass.find([obj.id, 'blah-blah'], raise_error: true) + end.to raise_error(Dynamoid::Errors::RecordNotFound) + end + end + + context 'when true' do + it 'leads to not raising exception if model not found' do + obj = klass.create + + # expect(klass.find([obj.id, 'blah-blah'], raise_error: false)).to eq [obj] + expect(klass.find_all([obj.id, 'blah-blah'])).to eq [obj] + end + end + end + + context 'field is not declared in document' do + let(:class_with_not_declared_field) do + new_class do + field :name + end + end + + before do + class_with_not_declared_field.create_table + end + + it 'ignores it without exceptions' do + Dynamoid.adapter.put_item(class_with_not_declared_field.table_name, id: '1', dob: '1996-12-21') + Dynamoid.adapter.put_item(class_with_not_declared_field.table_name, id: '2', dob: '2001-03-14') + + objects = class_with_not_declared_field.find(%w[1 2]) + + expect(objects.size).to eql 2 + expect(objects.map(&:id)).to contain_exactly('1', '2') + end + end + + context 'backoff is specified' do + before do + @old_backoff = Dynamoid.config.backoff + @old_backoff_strategies = Dynamoid.config.backoff_strategies.dup + + @counter = 0 + Dynamoid.config.backoff_strategies[:simple] = ->(_) { -> { @counter += 1 } } + Dynamoid.config.backoff = { simple: nil } + end + + after do + Dynamoid.config.backoff = @old_backoff + Dynamoid.config.backoff_strategies = @old_backoff_strategies + end + + it 'returns items' do + users = (1..10).map { User.create } + + results = User.find(users.map(&:id)) + expect(results).to match_array(users) + end + + it 'raise RecordNotFound error when there are no results' do + User.create_table + + expect { + User.find(['some-fake-id']) + }.to raise_error(Dynamoid::Errors::RecordNotFound) + end + + it 'uses specified backoff when some items are not processed' do + # batch_get_item has following limitations: + # * up to 100 items at once + # * up to 16 MB at once + # + # So we write data as large as possible and read it back + # 100 * 400 KB (limit for item) = ~40 MB + # 40 MB / 16 MB = 3 times + + ids = (1..100).map(&:to_s) + users = ids.map do |id| + name = ' ' * (400.kilobytes - 120) # 400KB - length(attribute names) + User.create(id: id, name: name) + end + + results = User.find(users.map(&:id)) + expect(results).to match_array(users) + + expect(@counter).to eq 2 + end + + it 'uses new backoff after successful call without unprocessed items' do + skip 'it is difficult to test' + end + end + end + + describe 'callbacks' do + it 'runs after_initialize callback' do + klass_with_callback = new_class do + after_initialize { print 'run after_initialize' } + end + + object = klass_with_callback.create! + + expect { klass_with_callback.find(object.id) }.to output('run after_initialize').to_stdout + end + + it 'runs after_find callback' do + klass_with_callback = new_class do + after_find { print 'run after_find' } + end + + object = klass_with_callback.create! + + expect { klass_with_callback.find(object.id) }.to output('run after_find').to_stdout + end + + it 'runs callbacks in the proper order' do + klass_with_callback = new_class do + after_initialize { print 'run after_initialize' } + after_find { print 'run after_find' } + end + + object = klass_with_callback.create! + + expect do + klass_with_callback.find(object.id) + end.to output('run after_initializerun after_find').to_stdout + end + end + end + + it 'sends consistent option to the adapter' do + expect(Dynamoid.adapter).to receive(:get_item) + .with(anything, anything, hash_including(consistent_read: true)) + .and_call_original + Address.find(address.id, consistent_read: true) + end + + context 'with users' do + it 'finds using method_missing for attributes' do + array = Address.find_by_city('Chicago') + + expect(array).to eq address + end + + it 'finds using method_missing for multiple attributes' do + user = User.create(name: 'Josh', email: 'josh@joshsymonds.com') + + array = User.find_all_by_name_and_email('Josh', 'josh@joshsymonds.com').to_a + + expect(array).to eq [user] + end + + it 'finds using method_missing for single attributes and multiple results' do + user1 = User.create(name: 'Josh', email: 'josh@joshsymonds.com') + user2 = User.create(name: 'Josh', email: 'josh@joshsymonds.com') + + array = User.find_all_by_name('Josh').to_a + + expect(array.size).to eq 2 + expect(array).to include user1 + expect(array).to include user2 + end + + it 'finds using method_missing for multiple attributes and multiple results' do + user1 = User.create(name: 'Josh', email: 'josh@joshsymonds.com') + user2 = User.create(name: 'Josh', email: 'josh@joshsymonds.com') + + array = User.find_all_by_name_and_email('Josh', 'josh@joshsymonds.com').to_a + + expect(array.size).to eq 2 + expect(array).to include user1 + expect(array).to include user2 + end + + it 'finds using method_missing for multiple attributes and no results' do + user1 = User.create(name: 'Josh', email: 'josh@joshsymonds.com') + user2 = User.create(name: 'Justin', email: 'justin@joshsymonds.com') + + array = User.find_all_by_name_and_email('Gaga', 'josh@joshsymonds.com').to_a + + expect(array).to be_empty + end + + it 'finds using method_missing for a single attribute and no results' do + user1 = User.create(name: 'Josh', email: 'josh@joshsymonds.com') + user2 = User.create(name: 'Justin', email: 'justin@joshsymonds.com') + + array = User.find_all_by_name('Gaga').to_a + + expect(array).to be_empty + end + + it 'finds on a query that is not indexed' do + user = User.create(password: 'Test') + + array = User.find_all_by_password('Test').to_a + + expect(array).to eq [user] + end + + it 'finds on a query on multiple attributes that are not indexed' do + user = User.create(password: 'Test', name: 'Josh') + + array = User.find_all_by_password_and_name('Test', 'Josh').to_a + + expect(array).to eq [user] + end + + it 'returns an empty array when fields exist but nothing is found' do + User.create_table + array = User.find_all_by_password('Test').to_a + + expect(array).to be_empty + end + end + + context 'find_all' do + it 'passes options to the adapter' do + pending 'This test is broken as we are overriding the consistent_read option to true inside the adapter' + user_ids = [%w[1 red], %w[1 green]] + Dynamoid.adapter.expects(:read).with(anything, user_ids, consistent_read: true) + User.find_all(user_ids, consistent_read: true) + end + + describe 'callbacks' do + it 'runs after_initialize callback' do + klass_with_callback = new_class do + after_initialize { print 'run after_initialize' } + end + + object = klass_with_callback.create! + + expect { klass_with_callback.find_all([object.id]) }.to output('run after_initialize').to_stdout + end + + it 'runs after_find callback' do + klass_with_callback = new_class do + after_find { print 'run after_find' } + end + + object = klass_with_callback.create! + + expect { klass_with_callback.find_all([object.id]) }.to output('run after_find').to_stdout + end + + it 'runs callbacks in the proper order' do + klass_with_callback = new_class do + after_initialize { print 'run after_initialize' } + after_find { print 'run after_find' } + end + + object = klass_with_callback.create! + + expect do + klass_with_callback.find_all([object.id]) + end.to output('run after_initializerun after_find').to_stdout + end + end + end + + describe '.find_all_by_secondary_index' do + def time_to_decimal(time) + BigDecimal(format('%d.%09d', time.to_i, time.nsec)) + end + + it 'returns exception if index could not be found' do + Post.create(post_id: 1, posted_at: Time.now) + expect do + Post.find_all_by_secondary_index(posted_at: Time.now.to_i) + end.to raise_exception(Dynamoid::Errors::MissingIndex) + end + + context 'local secondary index' do + it 'queries the local secondary index' do + time = DateTime.now + p1 = Post.create(name: 'p1', post_id: 1, posted_at: time) + p2 = Post.create(name: 'p2', post_id: 1, posted_at: time + 1.day) + p3 = Post.create(name: 'p3', post_id: 2, posted_at: time) + + posts = Post.find_all_by_secondary_index( + { post_id: p1.post_id }, + range: { name: 'p1' } + ) + post = posts.first + + expect(posts.count).to eql 1 + expect(post.name).to eql 'p1' + expect(post.post_id).to eql '1' + end + end + + context 'global secondary index' do + it 'can sort' do + time = DateTime.now + first_visit = Bar.create(name: 'Drank', visited_at: (time - 1.day).to_i) + Bar.create(name: 'Drank', visited_at: time.to_i) + last_visit = Bar.create(name: 'Drank', visited_at: (time + 1.day).to_i) + + bars = Bar.find_all_by_secondary_index( + { name: 'Drank' }, range: { 'visited_at.lte': (time + 10.days).to_i } + ) + first_bar = bars.first + last_bar = bars.last + expect(bars.count).to eql 3 + expect(first_bar.name).to eql first_visit.name + expect(first_bar.bar_id).to eql first_visit.bar_id + expect(last_bar.name).to eql last_visit.name + expect(last_bar.bar_id).to eql last_visit.bar_id + end + + it 'honors :scan_index_forward => false' do + time = DateTime.now + first_visit = Bar.create(name: 'Drank', visited_at: time - 1.day) + Bar.create(name: 'Drank', visited_at: time) + last_visit = Bar.create(name: 'Drank', visited_at: time + 1.day) + different_bar = Bar.create(name: 'Junk', visited_at: time + 7.days) + bars = Bar.find_all_by_secondary_index( + { name: 'Drank' }, range: { 'visited_at.lte': (time + 10.days).to_i }, + scan_index_forward: false + ) + first_bar = bars.first + last_bar = bars.last + expect(bars.count).to eql 3 + expect(first_bar.name).to eql last_visit.name + expect(first_bar.bar_id).to eql last_visit.bar_id + expect(last_bar.name).to eql first_visit.name + expect(last_bar.bar_id).to eql first_visit.bar_id + end + + it 'queries gsi with hash key' do + time = DateTime.now + p1 = Post.create(post_id: 1, posted_at: time, length: '10') + p2 = Post.create(post_id: 2, posted_at: time, length: '30') + p3 = Post.create(post_id: 3, posted_at: time, length: '10') + + posts = Post.find_all_by_secondary_index(length: '10') + expect(posts.map(&:post_id).sort).to eql %w[1 3] + end + + it 'queries gsi with hash and range key' do + time = Time.now + p1 = Post.create(post_id: 1, posted_at: time, name: 'post1') + p2 = Post.create(post_id: 2, posted_at: time + 1.day, name: 'post1') + p3 = Post.create(post_id: 3, posted_at: time, name: 'post3') + + posts = Post.find_all_by_secondary_index( + { name: 'post1' }, + range: { posted_at: time_to_decimal(time) } + ) + expect(posts.map(&:post_id).sort).to eql ['1'] + end + end + + describe 'custom range queries' do + describe 'string comparisons' do + it 'filters based on begins_with operator' do + time = DateTime.now + Post.create(post_id: 1, posted_at: time, name: 'fb_post') + Post.create(post_id: 1, posted_at: time + 1.day, name: 'blog_post') + + posts = Post.find_all_by_secondary_index( + { post_id: '1' }, range: { 'name.begins_with': 'blog_' } + ) + expect(posts.map(&:name)).to eql ['blog_post'] + end + end + + describe 'numeric comparisons' do + before do + @time = DateTime.now + p1 = Post.create(post_id: 1, posted_at: @time, name: 'post') + p2 = Post.create(post_id: 2, posted_at: @time + 1.day, name: 'post') + p3 = Post.create(post_id: 3, posted_at: @time + 2.days, name: 'post') + end + + it 'filters based on gt (greater than)' do + posts = Post.find_all_by_secondary_index( + { name: 'post' }, + range: { 'posted_at.gt': time_to_decimal(@time + 1.day) } + ) + expect(posts.map(&:post_id).sort).to eql ['3'] + end + + it 'filters based on lt (less than)' do + posts = Post.find_all_by_secondary_index( + { name: 'post' }, + range: { 'posted_at.lt': time_to_decimal(@time + 1.day) } + ) + expect(posts.map(&:post_id).sort).to eql ['1'] + end + + it 'filters based on gte (greater than or equal to)' do + posts = Post.find_all_by_secondary_index( + { name: 'post' }, + range: { 'posted_at.gte': time_to_decimal(@time + 1.day) } + ) + expect(posts.map(&:post_id).sort).to eql %w[2 3] + end + + it 'filters based on lte (less than or equal to)' do + posts = Post.find_all_by_secondary_index( + { name: 'post' }, + range: { 'posted_at.lte': time_to_decimal(@time + 1.day) } + ) + expect(posts.map(&:post_id).sort).to eql %w[1 2] + end + + it 'filters based on between operator' do + between = [time_to_decimal(@time - 1.day), time_to_decimal(@time + 1.5.day)] + posts = Post.find_all_by_secondary_index( + { name: 'post' }, + range: { 'posted_at.between': between } + ) + expect(posts.map(&:post_id).sort).to eql %w[1 2] + end + end + end + end +end diff --git a/dynamoid/spec/dynamoid/identity_map_spec.rb b/dynamoid/spec/dynamoid/identity_map_spec.rb new file mode 100644 index 000000000..1bda7d917 --- /dev/null +++ b/dynamoid/spec/dynamoid/identity_map_spec.rb @@ -0,0 +1,47 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Dynamoid::IdentityMap do + before do + Dynamoid::Config.identity_map = true + end + + after do + Dynamoid::Config.identity_map = false + end + + context 'object identity' do + it 'maintains a single object' do + tweet = Tweet.create(tweet_id: 'x', group: 'one') + tweet1 = Tweet.where(tweet_id: 'x', group: 'one').first + expect(tweet).to equal(tweet1) + end + end + + context 'cache' do + it 'uses cache' do + tweet = Tweet.create(tweet_id: 'x', group: 'one') + expect(Dynamoid::Adapter).not_to receive(:read) + tweet1 = Tweet.find_by_id('x', range_key: 'one') + expect(tweet).to equal(tweet1) + end + + it 'clears cache on delete' do + tweet = Tweet.create(tweet_id: 'x', group: 'one') + tweet.delete + expect(Tweet.find_by_id('x', range_key: 'one')).to be_nil + end + end + + context 'clear' do + it 'clears the identiy map' do + Tweet.create(tweet_id: 'x', group: 'one') + Tweet.create(tweet_id: 'x', group: 'two') + + expect(Tweet.identity_map.size).to eq(2) + described_class.clear + expect(Tweet.identity_map.size).to eq(0) + end + end +end diff --git a/dynamoid/spec/dynamoid/indexes_spec.rb b/dynamoid/spec/dynamoid/indexes_spec.rb new file mode 100644 index 000000000..25849cfde --- /dev/null +++ b/dynamoid/spec/dynamoid/indexes_spec.rb @@ -0,0 +1,450 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Dynamoid::Indexes do + let(:doc_class) do + new_class + end + + describe 'base behaviour' do + it 'has a local secondary indexes hash' do + expect(doc_class).to respond_to(:local_secondary_indexes) + end + + it 'has a global secondary indexes hash' do + expect(doc_class).to respond_to(:global_secondary_indexes) + end + end + + describe '.global_secondary_index' do + context 'with a correct definition' do + before do + @dummy_index = double('Dynamoid::Indexes::Index') + allow(Dynamoid::Indexes::Index).to receive(:new).and_return(@dummy_index) + end + + it 'adds the index to the global_secondary_indexes hash' do + index_key = doc_class.index_key(:some_hash_field) + doc_class.global_secondary_index(hash_key: :some_hash_field) + + expected_index = doc_class.global_secondary_indexes[index_key] + expect(expected_index).to eq(@dummy_index) + end + + it 'with a range key, also adds the index to the global_secondary_indexes hash' do + index_key = doc_class.index_key(:some_hash_field, :some_range_field) + doc_class.global_secondary_index( + hash_key: :some_hash_field, + range_key: :some_range_field + ) + + expected_index = doc_class.global_secondary_indexes[index_key] + expect(expected_index).to eq(@dummy_index) + end + + context 'with optional parameters' do + context 'with a hash-only index' do + let(:doc_class_with_gsi) do + doc_class.global_secondary_index(hash_key: :secondary_hash_field) + end + + it 'creates the index with the correct options' do + test_class = doc_class_with_gsi + index_opts = { + dynamoid_class: test_class, + type: :global_secondary, + read_capacity: Dynamoid::Config.read_capacity, + write_capacity: Dynamoid::Config.write_capacity, + hash_key: :secondary_hash_field + } + expect(Dynamoid::Indexes::Index).to have_received(:new).with(index_opts) + end + + it 'adds the index to the global_secondary_indexes hash' do + test_class = doc_class_with_gsi + index_key = 'secondary_hash_field' + expect(test_class.global_secondary_indexes.keys).to eql [index_key] + expect(test_class.global_secondary_indexes[index_key]).to eq(@dummy_index) + end + end + + context 'with a hash and range index' do + let(:doc_class_with_gsi) do + doc_class.global_secondary_index( + hash_key: :secondary_hash_field, + range_key: :secondary_range_field + ) + end + + it 'creates the index with the correct options' do + test_class = doc_class_with_gsi + index_opts = { + dynamoid_class: test_class, + type: :global_secondary, + read_capacity: Dynamoid::Config.read_capacity, + write_capacity: Dynamoid::Config.write_capacity, + hash_key: :secondary_hash_field, + range_key: :secondary_range_field + } + expect(Dynamoid::Indexes::Index).to have_received(:new).with(index_opts) + end + + it 'adds the index to the global_secondary_indexes hash' do + test_class = doc_class_with_gsi + index_key = 'secondary_hash_field_secondary_range_field' + expect(test_class.global_secondary_indexes[index_key]).to eq(@dummy_index) + end + end + end + end + + context 'with an improper definition' do + it 'with a blank definition, throws an error' do + expect do + doc_class.global_secondary_index + end.to raise_error(Dynamoid::Errors::InvalidIndex, /empty index/) + end + + it 'with no :hash_key, throws an error' do + expect do + doc_class.global_secondary_index(range_key: :something) + end.to raise_error( + Dynamoid::Errors::InvalidIndex, /hash_key.*specified/ + ) + end + end + end + + describe '.local_secondary_index' do + context 'with correct parameters' do + before do + @dummy_index = double('Dynamoid::Indexes::Index') + allow(Dynamoid::Indexes::Index).to receive(:new).and_return(@dummy_index) + end + + let(:doc_class_with_lsi) do + Class.new do + include Dynamoid::Document + table name: :mytable, key: :some_hash_field + range :some_range_field # @WHAT + + local_secondary_index(range_key: :secondary_range_field) + end + end + + it 'creates the index with the correct options' do + test_class = doc_class_with_lsi + index_opts = { + dynamoid_class: test_class, + type: :local_secondary, + hash_key: :some_hash_field, + range_key: :secondary_range_field + } + expect(Dynamoid::Indexes::Index).to have_received(:new).with(index_opts) + end + + it 'adds the index to the local_secondary_indexes hash' do + test_class = doc_class_with_lsi + index_key = 'some_hash_field_secondary_range_field' + expect(test_class.local_secondary_indexes.keys).to eql [index_key] + expect(test_class.local_secondary_indexes[index_key]).to eq(@dummy_index) + end + end + + context 'with an improper definition' do + let(:doc_class_with_table) do + Class.new do + include Dynamoid::Document + table name: :mytable, key: :some_hash_field + range :some_range_field + end + end + + it 'with a blank definition, throws an error' do + expect do + doc_class.local_secondary_index + end.to raise_error(Dynamoid::Errors::InvalidIndex, /empty/) + end + + it 'throws an error if the range_key isn`t specified' do + test_class = doc_class_with_table + expect do + test_class.local_secondary_index(projected_attributes: :all) + end.to raise_error(Dynamoid::Errors::InvalidIndex, /range_key.*specified/) + end + + it 'throws an error if the range_key is the same as the primary range key' do + test_class = doc_class_with_table + expect do + test_class.local_secondary_index(range_key: :some_range_field) + end.to raise_error(Dynamoid::Errors::InvalidIndex, /different.*:range_key/) + end + end + end + + describe '.index_key' do + context 'when hash specified' do + it 'generates an index key of the form if only hash is specified' do + index_key = doc_class.index_key(:some_hash_field) + expect(index_key).to eq('some_hash_field') + end + end + + context 'when hash and range specified' do + it 'generates an index key of the form _' do + index_key = doc_class.index_key(:some_hash_field, :some_range_field) + expect(index_key).to eq('some_hash_field_some_range_field') + end + + it 'generates an index key of the form when range is nil' do + index_key = doc_class.index_key(:some_hash_field, nil) + expect(index_key).to eq('some_hash_field') + end + end + end + + describe '.index_name' do + let(:doc_class) do + Class.new do + include Dynamoid::Document + table name: :mytable + end + end + + it 'generates an index name of the form _index_' do + expect(doc_class).to receive(:index_key).and_return('whoa_an_index_key') + index_name = doc_class.index_name(:some_hash_field, :some_range_field) + expect(index_name).to eq("#{doc_class.table_name}_index_whoa_an_index_key") + end + end + + # Index nested class. + describe 'Index' do + describe '#initialize' do + let(:doc_class) do + Class.new do + include Dynamoid::Document + table name: :mytable, key: :some_hash_field + + field :primary_hash_field + field :primary_range_field + field :secondary_hash_field + field :secondary_range_field + field :array_field, :array + field :serialized_field, :serialized + end + end + + context 'validation' do + it 'throws an error when :dynamoid_class is not specified' do + expect do + Dynamoid::Indexes::Index.new + end.to raise_error(Dynamoid::Errors::InvalidIndex, /dynamoid_class.*required/) + end + + it 'throws an error if :type is invalid' do + expect do + Dynamoid::Indexes::Index.new( + dynamoid_class: doc_class, + hash_key: :primary_hash_field, + type: :garbage + ) + end.to raise_error(Dynamoid::Errors::InvalidIndex, /Invalid.*:type/) + end + + it 'throws an error when :hash_key is not a table attribute' do + expect do + Dynamoid::Indexes::Index.new( + dynamoid_class: doc_class, + hash_key: :garbage, + type: :global_secondary + ) + end.to raise_error(Dynamoid::Errors::InvalidIndex, /No such field/) + end + + it 'throws an error when :hash_key is of invalid type' do + expect do + Dynamoid::Indexes::Index.new( + dynamoid_class: doc_class, + hash_key: :array_field, + type: :global_secondary + ) + end.to raise_error(Dynamoid::Errors::InvalidIndex, /hash_key.*/) + end + + it 'throws an error when :range_key is of invalid type' do + expect do + Dynamoid::Indexes::Index.new( + dynamoid_class: doc_class, + hash_key: :primary_hash_field, + type: :global_secondary, + range_key: :array_field + ) + end.to raise_error(Dynamoid::Errors::InvalidIndex, /range_key.*/) + end + + it 'throws an error when :range_key is not a table attribute' do + expect do + Dynamoid::Indexes::Index.new( + dynamoid_class: doc_class, + hash_key: :primary_hash_field, + type: :global_secondary, + range_key: :garbage + ) + end.to raise_error(Dynamoid::Errors::InvalidIndex, /No such field/) + end + + it 'throws an error if :projected_attributes are invalid' do + expect do + Dynamoid::Indexes::Index.new( + dynamoid_class: doc_class, + hash_key: :primary_hash_field, + type: :global_secondary, + projected_attributes: :garbage + ) + end.to raise_error(Dynamoid::Errors::InvalidIndex, /Invalid projected attributes/) + end + end + + context 'correct parameters' do + context 'with only required params' do + let(:defaults_index) do + Dynamoid::Indexes::Index.new( + dynamoid_class: doc_class, + hash_key: :primary_hash_field, + range_key: :secondary_range_field, + type: :local_secondary + ) + end + + it 'sets name to the default index name' do + expected_name = doc_class.index_name( + :primary_hash_field, + :secondary_range_field + ) + expect(defaults_index.name).to eq(expected_name) + end + + it 'sets the hash_key_schema' do + expected = { primary_hash_field: :string } + expect(defaults_index.hash_key_schema).to eql expected + end + + it 'sets the range_key_schema' do + expected = { secondary_range_field: :string } + expect(defaults_index.range_key_schema).to eql expected + end + + it 'sets projected attributes to the default :keys_only' do + expect(defaults_index.projected_attributes).to eq(:keys_only) + end + + it 'sets all provided attributes' do + expect(defaults_index.dynamoid_class).to eq(doc_class) + expect(defaults_index.type).to eq(:local_secondary) + expect(defaults_index.hash_key).to eq(:primary_hash_field) + expect(defaults_index.range_key).to eq(:secondary_range_field) + end + end + + context 'with other params specified' do + let(:other_index) do + Dynamoid::Indexes::Index.new( + dynamoid_class: doc_class, + name: :mont_blanc, + hash_key: :secondary_hash_field, + type: :global_secondary, + projected_attributes: %i[secondary_hash_field array_field], + read_capacity: 100, + write_capacity: 200 + ) + end + + it 'sets the provided attributes' do + expect(other_index.dynamoid_class).to eq(doc_class) + expect(other_index.name).to eq(:mont_blanc) + expect(other_index.type).to eq(:global_secondary) + expect(other_index.hash_key).to eq(:secondary_hash_field) + expect(other_index.range_key.present?).to eq(false) + expect(other_index.read_capacity).to eq(100) + expect(other_index.write_capacity).to eq(200) + expect(other_index.projected_attributes).to eq( + %i[secondary_hash_field array_field] + ) + end + end + + context 'with custom type key params' do + let(:doc_class) do + new_class do + # rubocop:disable Lint/ConstantDefinitionInBlock + class CustomType + def dynamoid_dump + name + end + + def self.dynamoid_load(string) + new(string.to_s) + end + end + # rubocop:enable Lint/ConstantDefinitionInBlock + + field :custom_type_field, CustomType + field :custom_type_range_field, CustomType + end + end + + let(:index) do + Dynamoid::Indexes::Index.new( + dynamoid_class: doc_class, + hash_key: :custom_type_field, + range_key: :custom_type_range_field, + type: :global_secondary + ) + end + + it 'sets the correct key_schema' do + expect(index.hash_key_schema).to eql({ custom_type_field: :string }) + expect(index.range_key_schema).to eql({ custom_type_range_field: :string }) + end + end + end + end + + describe '#projection_type' do + let(:doc_class) do + Class.new do + include Dynamoid::Document + + table name: :mytable, key: :primary_hash_field + + field :primary_hash_field + field :secondary_hash_field + field :array_field, :array + end + end + + it 'projection type is :include' do + projection_include = Dynamoid::Indexes::Index.new( + dynamoid_class: doc_class, + hash_key: :secondary_hash_field, + type: :global_secondary, + projected_attributes: %i[secondary_hash_field array_field] + ).projection_type + expect(projection_include).to eq(:include) + end + + it 'projection type is :all' do + projection_all = Dynamoid::Indexes::Index.new( + dynamoid_class: doc_class, + hash_key: :secondary_hash_field, + type: :global_secondary, + projected_attributes: :all + ).projection_type + + expect(projection_all).to eq(:all) + end + end + end +end diff --git a/dynamoid/spec/dynamoid/loadable_spec.rb b/dynamoid/spec/dynamoid/loadable_spec.rb new file mode 100644 index 000000000..48443c8eb --- /dev/null +++ b/dynamoid/spec/dynamoid/loadable_spec.rb @@ -0,0 +1,105 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Dynamoid::Loadable do + describe '.reload' do + let(:address) { Address.create } + let(:message) { Message.create(text: 'Nice, supporting datetime range!', time: Time.now.to_datetime) } + let(:tweet) { tweet = Tweet.create(tweet_id: 'x', group: 'abc') } + + it 'reflects persisted changes' do + klass = new_class do + field :city + end + + address = klass.create(city: 'Miami') + copy = klass.find(address.id) + + address.update_attributes(city: 'Chicago') + expect(copy.reload.city).to eq 'Chicago' + end + + it 'reads with strong consistency' do + klass = new_class do + field :message + end + + tweet = klass.create + + expect(klass).to receive(:find).with(tweet.id, consistent_read: true).and_return(tweet) + tweet.reload + end + + it 'works with range key' do + klass = new_class do + field :message + range :group + end + + tweet = klass.create(group: 'tech') + expect(tweet.reload.group).to eq 'tech' + end + + it 'uses dumped value of sort key to load document' do + klass = new_class do + range :activated_on, :date + field :name + end + + obj = klass.create!(activated_on: Date.today, name: 'Old value') + obj2 = klass.where(id: obj.id, activated_on: obj.activated_on).first + obj2.update_attributes(name: 'New value') + + expect { obj.reload }.to change { obj.name }.from('Old value').to('New value') + end + + # https://github.com/Dynamoid/dynamoid/issues/564 + it 'marks model as persisted if not saved model is already persisted and successfuly reloaded' do + klass = new_class do + field :message + end + + object = klass.create(message: 'a') + copy = klass.new(id: object.id) + + expect { copy.reload }.to change { copy.new_record? }.from(true).to(false) + expect(copy.message).to eq 'a' + end + + describe 'callbacks' do + it 'runs after_initialize callback' do + klass_with_callback = new_class do + after_initialize { print 'run after_initialize' } + end + + object = klass_with_callback.create! + + expect { object.reload }.to output('run after_initialize').to_stdout + end + + it 'runs after_find callback' do + klass_with_callback = new_class do + after_find { print 'run after_find' } + end + + object = klass_with_callback.create! + + expect { object.reload }.to output('run after_find').to_stdout + end + + it 'runs callbacks in the proper order' do + klass_with_callback = new_class do + after_initialize { print 'run after_initialize' } + after_find { print 'run after_find' } + end + + object = klass_with_callback.create! + + expect do + object.reload + end.to output('run after_initializerun after_find').to_stdout + end + end + end +end diff --git a/dynamoid/spec/dynamoid/log/formatter/debug_spec.rb b/dynamoid/spec/dynamoid/log/formatter/debug_spec.rb new file mode 100644 index 000000000..759713685 --- /dev/null +++ b/dynamoid/spec/dynamoid/log/formatter/debug_spec.rb @@ -0,0 +1,95 @@ +# frozen_string_literal: true + +require 'spec_helper' +require 'dynamoid/log/formatter' + +describe Dynamoid::Log::Formatter::Debug do + describe '#format' do + subject { described_class.new } + + let(:logger) { Logger.new(buffer) } + let(:buffer) { StringIO.new } + + let(:request) do + <<~JSON + { + "TableName": "dynamoid_tests_items", + "KeySchema": [ + { + "AttributeName": "id", + "KeyType": "HASH" + } + ], + "AttributeDefinitions": [ + { + "AttributeName": "id", + "AttributeType": "S" + } + ], + "BillingMode": "PROVISIONED", + "ProvisionedThroughput": { + "ReadCapacityUnits": 100, + "WriteCapacityUnits": 20 + } + } + JSON + end + + let(:response_pattern) do + Regexp.compile <<~JSON + \\{ + "TableDescription": \\{ + "AttributeDefinitions": \\[ + \\{ + "AttributeName": "id", + "AttributeType": "S" + \\} + \\], + "TableName": "dynamoid_tests_items", + "KeySchema": \\[ + \\{ + "AttributeName": "id", + "KeyType": "HASH" + \\} + \\], + "TableStatus": "ACTIVE", + "CreationDateTime": .+?, + "ProvisionedThroughput": \\{ + "LastIncreaseDateTime": 0.0, + "LastDecreaseDateTime": 0.0, + "NumberOfDecreasesToday": 0, + "ReadCapacityUnits": 100, + "WriteCapacityUnits": 20 + \\}, + "TableSizeBytes": 0, + "ItemCount": 0, + "TableArn": ".+?", + "DeletionProtectionEnabled": false + \\} + \\} + JSON + end + + before do + @log_formatter = Dynamoid.config.log_formatter + @logger = Dynamoid.config.logger + + Dynamoid.config.log_formatter = subject + Dynamoid.config.logger = logger + Dynamoid.adapter.connect! # clear cached client + end + + after do + Dynamoid.config.log_formatter = @log_formatter + Dynamoid.config.logger = @logger + Dynamoid.adapter.connect! # clear cached client + end + + it 'logs request and response JSON body' do + new_class(table_name: 'items').create_table + + expect(buffer.string).to include(request) + expect(buffer.string).to match(response_pattern) + end + end +end diff --git a/dynamoid/spec/dynamoid/persistence_spec.rb b/dynamoid/spec/dynamoid/persistence_spec.rb new file mode 100644 index 000000000..48a9265c4 --- /dev/null +++ b/dynamoid/spec/dynamoid/persistence_spec.rb @@ -0,0 +1,5226 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Dynamoid::Persistence do + let(:address) { Address.new } + + context 'without AWS keys' do + unless ENV['ACCESS_KEY'] && ENV['SECRET_KEY'] + before do + Dynamoid.adapter.delete_table(Address.table_name) if Dynamoid.adapter.list_tables.include?(Address.table_name) + end + + it 'creates a table' do + Address.create_table(table_name: Address.table_name) + + expect(Dynamoid.adapter.list_tables).to include 'dynamoid_tests_addresses' + end + + it 'checks if a table already exists' do + Address.create_table(table_name: Address.table_name) + + expect(Address).to be_table_exists(Address.table_name) + expect(Address).not_to be_table_exists('crazytable') + end + end + end + + describe '.create_table' do + let(:user_class) do + Class.new do + attr_accessor :name + + def initialize(name) + self.name = name + end + + def dynamoid_dump + name + end + + def eql?(other) + name == other.name + end + + def self.dynamoid_load(string) + new(string.to_s) + end + end + end + + let(:user_class_with_type) do + Class.new do + attr_accessor :age + + def initialize(age) + self.age = age + end + + def dynamoid_dump + age + end + + def eql?(other) + age == other.age + end + + def self.dynamoid_load(string) + new(string.to_i) + end + + def self.dynamoid_field_type + :number + end + end + end + + it 'creates a table' do + klass = new_class + + tables = Dynamoid.adapter.list_tables + expect(tables.include?(klass.table_name)).to eq false + + klass.create_table + + tables = Dynamoid.adapter.list_tables + expect(tables.include?(klass.table_name)).to eq true + end + + it 'returns self' do + klass = new_class + expect(klass.create_table).to eq(klass) + end + + describe 'partition key attribute type' do + it 'maps :string to String' do + klass = new_class(partition_key: { name: :id, type: :string }) + klass.create_table + expect(raw_attribute_types(klass.table_name)['id']).to eql('S') + end + + it 'maps :integer to Number' do + klass = new_class(partition_key: { name: :id, type: :integer }) + klass.create_table + expect(raw_attribute_types(klass.table_name)['id']).to eql('N') + end + + it 'maps :number to Number' do + klass = new_class(partition_key: { name: :id, type: :number }) + klass.create_table + expect(raw_attribute_types(klass.table_name)['id']).to eql('N') + end + + describe ':datetime' do + it 'maps :datetime to Number' do + klass = new_class(partition_key: { name: :id, type: :datetime }) + klass.create_table + expect(raw_attribute_types(klass.table_name)['id']).to eql('N') + end + + it 'maps :datetime to String if field option :store_as_string is true' do + klass = new_class(partition_key: { name: :id, type: :datetime, options: { store_as_string: true } }) + klass.create_table + expect(raw_attribute_types(klass.table_name)['id']).to eql('S') + end + + it 'maps :datetime to Number if field option :store_as_string is false' do + klass = new_class(partition_key: { name: :id, type: :datetime, options: { store_as_string: false } }) + klass.create_table + expect(raw_attribute_types(klass.table_name)['id']).to eql('N') + end + + context 'field option :store_as_string is nil' do + it 'maps :datetime to String if :store_datetime_as_string is true', config: { store_datetime_as_string: true } do + klass = new_class(partition_key: { name: :id, type: :datetime, options: { store_as_string: nil } }) + klass.create_table + expect(raw_attribute_types(klass.table_name)['id']).to eql('S') + end + + it 'maps :datetime to Number if :store_datetime_as_string is false', config: { store_datetime_as_string: false } do + klass = new_class(partition_key: { name: :id, type: :datetime, options: { store_as_string: nil } }) + klass.create_table + expect(raw_attribute_types(klass.table_name)['id']).to eql('N') + end + end + end + + describe ':date' do + it 'maps :date to Number' do + klass = new_class(partition_key: { name: :id, type: :date }) + klass.create_table + expect(raw_attribute_types(klass.table_name)['id']).to eql('N') + end + + it 'maps :date to String if field option :store_as_string is true' do + klass = new_class(partition_key: { name: :id, type: :date, options: { store_as_string: true } }) + klass.create_table + expect(raw_attribute_types(klass.table_name)['id']).to eql('S') + end + + it 'maps :date to Number if field option :store_as_string is false' do + klass = new_class(partition_key: { name: :id, type: :date, options: { store_as_string: false } }) + klass.create_table + expect(raw_attribute_types(klass.table_name)['id']).to eql('N') + end + + context 'field option :store_as_string is nil' do + it 'maps :date to String if :store_date_as_string is true', config: { store_date_as_string: true } do + klass = new_class(partition_key: { name: :id, type: :date, options: { store_as_string: nil } }) + klass.create_table + expect(raw_attribute_types(klass.table_name)['id']).to eql('S') + end + + it 'maps :date to Number if :store_date_as_string is false', config: { store_date_as_string: false } do + klass = new_class(partition_key: { name: :id, type: :date, options: { store_as_string: nil } }) + klass.create_table + expect(raw_attribute_types(klass.table_name)['id']).to eql('N') + end + end + end + + it 'maps :serialized to String' do + klass = new_class(partition_key: { name: :id, type: :serialized }) + klass.create_table + expect(raw_attribute_types(klass.table_name)['id']).to eql('S') + end + + describe 'custom type' do + it 'maps custom type to String by default' do + klass = new_class(partition_key: { name: :id, type: user_class }) + klass.create_table + expect(raw_attribute_types(klass.table_name)['id']).to eql('S') + end + + it 'uses specified type if .dynamoid_field_type method declared' do + klass = new_class(partition_key: { name: :id, type: user_class_with_type }) + klass.create_table + expect(raw_attribute_types(klass.table_name)['id']).to eql('N') + end + end + + it 'does not support :array' do + klass = new_class(partition_key: { name: :id, type: :array }) + expect { klass.create_table }.to raise_error( + Dynamoid::Errors::UnsupportedKeyType, 'array cannot be used as a type of table key attribute' + ) + end + + it 'does not support :set' do + klass = new_class(partition_key: { name: :id, type: :set }) + expect { klass.create_table }.to raise_error( + Dynamoid::Errors::UnsupportedKeyType, 'set cannot be used as a type of table key attribute' + ) + end + + it 'does not support :raw' do + klass = new_class(partition_key: { name: :id, type: :raw }) + expect { klass.create_table }.to raise_error( + Dynamoid::Errors::UnsupportedKeyType, 'raw cannot be used as a type of table key attribute' + ) + end + + it 'does not support :boolean' do + klass = new_class(partition_key: { name: :id, type: :boolean }) + expect { klass.create_table }.to raise_error( + Dynamoid::Errors::UnsupportedKeyType, 'boolean cannot be used as a type of table key attribute' + ) + end + end + + describe 'sort key attribute type' do + it 'maps :string to String' do + klass = new_class do + range :prop, :string + end + klass.create_table + + expect(raw_attribute_types(klass.table_name)['prop']).to eql('S') + end + + it 'maps :integer to Number' do + klass = new_class do + range :prop, :integer + end + klass.create_table + + expect(raw_attribute_types(klass.table_name)['prop']).to eql('N') + end + + it 'maps :number to Number' do + klass = new_class do + range :prop, :number + end + klass.create_table + + expect(raw_attribute_types(klass.table_name)['prop']).to eql('N') + end + + describe ':datetime' do + it 'maps :datetime to Number' do + klass = new_class do + range :prop, :datetime + end + klass.create_table + + expect(raw_attribute_types(klass.table_name)['prop']).to eql('N') + end + + it 'maps :datetime to String if field option :store_as_string is true' do + klass = new_class do + range :prop, :datetime, store_as_string: true + end + klass.create_table + + expect(raw_attribute_types(klass.table_name)['prop']).to eql('S') + end + + it 'maps :datetime to Number if field option :store_as_string is false' do + klass = new_class do + range :prop, :datetime, store_as_string: false + end + klass.create_table + + expect(raw_attribute_types(klass.table_name)['prop']).to eql('N') + end + + context 'field option :store_as_string is nil' do + it 'maps :datetime to String if :store_datetime_as_string is true', config: { store_datetime_as_string: true } do + klass = new_class do + range :prop, :datetime, store_as_string: nil + end + klass.create_table + + expect(raw_attribute_types(klass.table_name)['prop']).to eql('S') + end + + it 'maps :datetime to Number if :store_datetime_as_string is false', config: { store_datetime_as_string: false } do + klass = new_class do + range :prop, :datetime, store_as_string: nil + end + klass.create_table + + expect(raw_attribute_types(klass.table_name)['prop']).to eql('N') + end + end + end + + describe ':date' do + it 'maps :date to Number' do + klass = new_class do + range :prop, :date + end + klass.create_table + + expect(raw_attribute_types(klass.table_name)['prop']).to eql('N') + end + + it 'maps :date to String if field option :store_as_string is true' do + klass = new_class do + range :prop, :date, store_as_string: true + end + klass.create_table + + expect(raw_attribute_types(klass.table_name)['prop']).to eql('S') + end + + it 'maps :date to Number if field option :store_as_string is false' do + klass = new_class do + range :prop, :date, store_as_string: false + end + klass.create_table + + expect(raw_attribute_types(klass.table_name)['prop']).to eql('N') + end + + context 'field option :store_as_string is nil' do + it 'maps :date to String if :store_date_as_string is true', config: { store_date_as_string: true } do + klass = new_class do + range :prop, :date, store_as_string: nil + end + klass.create_table + + expect(raw_attribute_types(klass.table_name)['prop']).to eql('S') + end + + it 'maps :date to Number if :store_date_as_string is false', config: { store_date_as_string: false } do + klass = new_class do + range :prop, :date, store_as_string: nil + end + klass.create_table + + expect(raw_attribute_types(klass.table_name)['prop']).to eql('N') + end + end + end + + it 'maps :serialized to String' do + klass = new_class do + range :prop, :serialized + end + klass.create_table + + expect(raw_attribute_types(klass.table_name)['prop']).to eql('S') + end + + describe 'custom type' do + it 'maps custom type to String by default' do + klass = new_class(sort_key_type: user_class) do |options| + range :prop, options[:sort_key_type] + end + klass.create_table + + expect(raw_attribute_types(klass.table_name)['prop']).to eql('S') + end + + it 'uses specified type if .dynamoid_field_type method declared' do + klass = new_class(sort_key_type: user_class_with_type) do |options| + range :prop, options[:sort_key_type] + end + klass.create_table + + expect(raw_attribute_types(klass.table_name)['prop']).to eql('N') + end + end + + it 'does not support :array' do + klass = new_class do + range :prop, :array + end + + expect { klass.create_table }.to raise_error( + Dynamoid::Errors::UnsupportedKeyType, 'array cannot be used as a type of table key attribute' + ) + end + + it 'does not support :set' do + klass = new_class do + range :prop, :set + end + + expect { klass.create_table }.to raise_error( + Dynamoid::Errors::UnsupportedKeyType, 'set cannot be used as a type of table key attribute' + ) + end + + it 'does not support :raw' do + klass = new_class do + range :prop, :raw + end + + expect { klass.create_table }.to raise_error( + Dynamoid::Errors::UnsupportedKeyType, 'raw cannot be used as a type of table key attribute' + ) + end + + it 'does not support :boolean' do + klass = new_class do + range :prop, :boolean + end + + expect { klass.create_table }.to raise_error( + Dynamoid::Errors::UnsupportedKeyType, 'boolean cannot be used as a type of table key attribute' + ) + end + end + + describe 'expiring (Time To Live)' do + let(:class_with_expiration) do + new_class do + table expires: { field: :ttl, after: 60 } + field :ttl, :integer + end + end + + it 'sets up TTL for table' do + expect(Dynamoid.adapter).to receive(:update_time_to_live) + .with(class_with_expiration.table_name, :ttl) + .and_call_original + + class_with_expiration.create_table + end + + it 'sets up TTL for table with specified table_name' do + table_name = "#{class_with_expiration.table_name}_alias" + + expect(Dynamoid.adapter).to receive(:update_time_to_live) + .with(table_name, :ttl) + .and_call_original + + class_with_expiration.create_table(table_name: table_name) + end + end + + describe 'capacity mode' do + # when capacity mode is PROVISIONED DynamoDB returns billing_mode_summary=nil + let(:table_description) { Dynamoid.adapter.adapter.send(:describe_table, model.table_name) } + let(:billing_mode) { table_description.schema.billing_mode_summary&.billing_mode } + + before do + model.create_table + end + + context 'when global config option capacity_mode=on_demand', config: { capacity_mode: :on_demand } do + context 'when capacity_mode=provisioned in table' do + let(:model) do + new_class do + table capacity_mode: :provisioned + end + end + + it 'creates table with provisioned capacity mode' do + expect(billing_mode).to eq nil # it means 'PROVISIONED' + end + end + + context 'when capacity_mode not set in table' do + let(:model) do + new_class do + table capacity_mode: nil + end + end + + it 'creates table with on-demand capacity mode' do + expect(billing_mode).to eq 'PAY_PER_REQUEST' + end + end + end + + context 'when global config option capacity_mode=provisioned', config: { capacity_mode: :provisioned } do + context 'when capacity_mode=on_demand in table' do + let(:model) do + new_class do + table capacity_mode: :on_demand + end + end + + it 'creates table with on-demand capacity mode' do + expect(billing_mode).to eq 'PAY_PER_REQUEST' + end + end + + context 'when capacity_mode not set in table' do + let(:model) do + new_class do + table capacity_mode: nil + end + end + + it 'creates table with provisioned capacity mode' do + expect(billing_mode).to eq nil # it means 'PROVISIONED' + end + end + end + + context 'when global config option capacity_mode is not set', config: { capacity_mode: nil } do + let(:model) do + new_class do + table capacity_mode: nil + end + end + + it 'creates table with provisioned capacity mode' do + expect(billing_mode).to eq nil # it means 'PROVISIONED' + end + end + end + end + + describe 'delete_table' do + it 'deletes the table' do + klass = new_class + klass.create_table + + tables = Dynamoid.adapter.list_tables + expect(tables.include?(klass.table_name)).to eq true + + klass.delete_table + + tables = Dynamoid.adapter.list_tables + expect(tables.include?(klass.table_name)).to eq false + end + + it 'returns self' do + klass = new_class + klass.create_table + + result = klass.delete_table + + expect(result).to eq klass + end + end + + describe 'record deletion' do + let(:klass) do + new_class do + field :city + + before_destroy do |_i| + # Halting the callback chain in active record changed with Rails >= 5.0.0.beta1 + # We now have to throw :abort to halt the callback chain + # See: https://github.com/rails/rails/commit/bb78af73ab7e86fd9662e8810e346b082a1ae193 + if ActiveModel::VERSION::MAJOR < 5 + false + else + throw :abort + end + end + end + end + + describe 'destroy' do + it 'deletes an item completely' do + @user = User.create(name: 'Josh') + @user.destroy + + expect(Dynamoid.adapter.read('dynamoid_tests_users', @user.id)).to be_nil + end + + it 'returns false when destroy fails (due to callback)' do + a = klass.create! + expect(a.destroy).to eql false + expect(klass.first.id).to eql a.id + end + end + + describe 'destroy!' do + it 'deletes the item' do + address.save! + address.destroy! + expect(Address.count).to eql 0 + end + + it 'raises exception when destroy fails (due to callback)' do + a = klass.create! + expect { a.destroy! }.to raise_error(Dynamoid::Errors::RecordNotDestroyed) + end + end + end + + it 'has a table name' do + expect(Address.table_name).to eq 'dynamoid_tests_addresses' + end + + context 'with namespace is empty' do + def reload_address + Object.send(:remove_const, 'Address') # rubocop:disable RSpec/RemoveConst + load 'app/models/address.rb' + end + + namespace = Dynamoid::Config.namespace + + before do + reload_address + Dynamoid.configure do |config| + config.namespace = '' + end + end + + after do + reload_address + Dynamoid.configure do |config| + config.namespace = namespace + end + end + + it 'does not add a namespace prefix to table names' do + table_name = Address.table_name + expect(Dynamoid::Config.namespace).to be_empty + expect(table_name).to eq 'addresses' + end + end + + it 'deletes an item completely' do + @user = User.create(name: 'Josh') + @user.destroy + + expect(Dynamoid.adapter.read('dynamoid_tests_users', @user.id)).to be_nil + end + + describe '.create' do + let(:klass) do + new_class do + field :city + end + end + + it 'creates a new document' do + address = klass.create(city: 'Chicago') + + expect(address.new_record).to eql false + expect(address.id).to be_present + + address_saved = klass.find(address.id) + expect(address_saved.city).to eq('Chicago') + end + + it 'creates multiple documents' do + addresses = klass.create([{ city: 'Chicago' }, { city: 'New York' }]) + + expect(addresses.size).to eq 2 + expect(addresses).to be_all(&:persisted?) + expect(addresses[0].city).to eq 'Chicago' + expect(addresses[1].city).to eq 'New York' + end + + context 'when block specified' do + it 'calls a block and passes a model as argument' do + object = klass.create(city: 'a') do |obj| + obj.city = 'b' + end + + expect(object.city).to eq('b') + end + + it 'calls a block and passes each model as argument if there are multiple models' do + objects = klass.create([{ city: 'a' }, { city: 'b' }]) do |obj| + obj.city = obj.city * 2 + end + + expect(objects[0].city).to eq('aa') + expect(objects[1].city).to eq('bb') + end + end + + describe 'validation' do + let(:klass_with_validation) do + new_class do + field :name + validates :name, length: { minimum: 4 } + end + end + + it 'does not save invalid model' do + obj = klass_with_validation.create(name: 'Theodor') + expect(obj).to be_persisted + + obj = klass_with_validation.create(name: 'Mo') + expect(obj).not_to be_persisted + end + + it 'saves valid models even if there are invalid' do + obj1, obj2 = klass_with_validation.create([{ name: 'Theodor' }, { name: 'Mo' }]) + + expect(obj1).to be_persisted + expect(obj2).not_to be_persisted + end + end + + it 'works with a HashWithIndifferentAccess argument' do + attrs = ActiveSupport::HashWithIndifferentAccess.new(city: 'Atlanta') + obj = klass.create(attrs) + + expect(obj).to be_persisted + expect(obj.city).to eq 'Atlanta' + end + + it 'creates table if it does not exist' do + expect { + klass.create(city: 'Chicago') + }.to change { + tables_created.include?(klass.table_name) + }.from(false).to(true) + end + + it 'saves empty set as nil' do + klass_with_set = new_class do + field :tags, :set + end + + obj = klass_with_set.create(tags: []) + obj_loaded = klass_with_set.find(obj.id) + + expect(obj_loaded.tags).to eql nil + end + + it 'saves empty string as nil' do + obj = klass.create(city: '') + obj_loaded = klass.find(obj.id) + + expect(obj_loaded.city).to eql nil + end + + describe 'callbacks' do + it 'runs before_create callback' do + klass_with_callback = new_class do + field :name + before_create { print 'run before_create' } + end + + expect do + klass_with_callback.create(name: 'Alex') + end.to output('run before_create').to_stdout + end + + it 'runs after_create callback' do + klass_with_callback = new_class do + field :name + after_create { print 'run after_create' } + end + + expect do + klass_with_callback.create(name: 'Alex') + end.to output('run after_create').to_stdout + end + + it 'runs around_create callback' do + klass_with_callback = new_class do + field :name + around_create :around_create_callback + + def around_create_callback + print 'start around_create' + yield + print 'finish around_create' + end + end + + expect do + klass_with_callback.create(name: 'Alex') + end.to output('start around_createfinish around_create').to_stdout + end + + it 'runs before_save callback' do + klass_with_callback = new_class do + field :name + before_save { print 'run before_save' } + end + + expect do + klass_with_callback.create(name: 'Alex') + end.to output('run before_save').to_stdout + end + + it 'runs after_save callbacks' do + klass_with_callback = new_class do + field :name + after_save { print 'run after_save' } + end + + expect do + klass_with_callback.create(name: 'Alex') + end.to output('run after_save').to_stdout + end + + it 'runs around_save callback' do + klass_with_callback = new_class do + field :name + around_save :around_save_callback + + def around_save_callback + print 'start around_save' + yield + print 'finish around_save' + end + end + + expect do + klass_with_callback.create(name: 'Alex') + end.to output('start around_savefinish around_save').to_stdout + end + + it 'runs before_validation callback' do + klass_with_callback = new_class do + field :name + before_validation { print 'run before_validation' } + end + + expect do + klass_with_callback.create(name: 'Alex') + end.to output('run before_validation').to_stdout + end + + it 'runs after_validation callback' do + klass_with_callback = new_class do + field :name + after_validation { print 'run after_validation' } + end + + expect do + klass_with_callback.create(name: 'Alex') + end.to output('run after_validation').to_stdout + end + + it 'runs callbacks in the proper order' do + klass_with_callbacks = new_class do + before_validation { puts 'run before_validation' } + after_validation { puts 'run after_validation' } + + before_create { puts 'run before_create' } + after_create { puts 'run after_create' } + around_create :around_create_callback + + before_save { puts 'run before_save' } + after_save { puts 'run after_save' } + around_save :around_save_callback + + def around_create_callback + puts 'start around_create' + yield + puts 'finish around_create' + end + + def around_save_callback + puts 'start around_save' + yield + puts 'finish around_save' + end + end + + # print each message on new line to force RSpec to show meaningful diff + expected_output = [ # rubocop:disable Style/StringConcatenation + 'run before_validation', + 'run after_validation', + 'run before_save', + 'start around_save', + 'run before_create', + 'start around_create', + 'finish around_create', + 'run after_create', + 'finish around_save', + 'run after_save' + ].join("\n") + "\n" + + expect { klass_with_callbacks.create }.to output(expected_output).to_stdout + end + end + + context 'not unique primary key' do + context 'composite key' do + let(:klass_with_composite_key) do + new_class do + range :name + end + end + + it 'raises RecordNotUnique error' do + klass_with_composite_key.create(id: '10', name: 'aaa') + + expect { + klass_with_composite_key.create(id: '10', name: 'aaa') + }.to raise_error(Dynamoid::Errors::RecordNotUnique) + end + end + + context 'simple key' do + let(:klass_with_simple_key) do + new_class + end + + it 'raises RecordNotUnique error' do + klass_with_simple_key.create(id: '10') + + expect { + klass_with_simple_key.create(id: '10') + }.to raise_error(Dynamoid::Errors::RecordNotUnique) + end + end + end + + describe 'timestamps' do + let(:klass) do + new_class + end + + it 'sets created_at and updated_at if Config.timestamps=true', config: { timestamps: true } do + travel 1.hour do + time_now = Time.now + obj = klass.create + + expect(obj.created_at.to_i).to eql(time_now.to_i) + expect(obj.updated_at.to_i).to eql(time_now.to_i) + end + end + + it 'uses provided values of created_at and updated_at if Config.timestamps=true', config: { timestamps: true } do + travel 1.hour do + created_at = updated_at = Time.now + obj = klass.create(created_at: created_at, updated_at: updated_at) + + expect(obj.created_at.to_i).to eql(created_at.to_i) + expect(obj.updated_at.to_i).to eql(updated_at.to_i) + end + end + + it 'does not raise error if Config.timestamps=false', config: { timestamps: false } do + expect { klass.create }.not_to raise_error + end + end + end + + describe '.create!' do + let(:klass) do + new_class do + field :city + end + end + + context 'when block specified' do + it 'calls a block and passes a model as argument' do + object = klass.create!(city: 'a') do |obj| + obj.city = 'b' + end + + expect(object.city).to eq('b') + end + + it 'calls a block and passes each model as argument if there are multiple models' do + objects = klass.create!([{ city: 'a' }, { city: 'b' }]) do |obj| + obj.city = obj.city * 2 + end + + expect(objects[0].city).to eq('aa') + expect(objects[1].city).to eq('bb') + end + end + + context 'validation' do + let(:klass_with_validation) do + new_class do + field :city + validates :city, presence: true + end + end + + it 'raises DocumentNotValid error when saves invalid model' do + expect do + klass_with_validation.create!(city: nil) + end.to raise_error(Dynamoid::Errors::DocumentNotValid) + end + + it 'raises DocumentNotValid error when saves multiple models and some of them are invalid' do + expect do + klass_with_validation.create!([{ city: 'Chicago' }, { city: nil }]) + end.to raise_error(Dynamoid::Errors::DocumentNotValid) + end + + it 'saves some valid models before raising error because of invalid model' do + klass_with_validation.create_table + + expect do + begin + klass_with_validation.create!([{ city: 'Chicago' }, { city: nil }, { city: 'London' }]) + rescue StandardError + nil + end + end.to change(klass_with_validation, :count).by(1) + + obj = klass_with_validation.last + expect(obj.city).to eq 'Chicago' + end + end + end + + describe '.update!' do + let(:document_class) do + new_class do + field :name + + validates :name, presence: true, length: { minimum: 5 } + end + end + + it 'loads and saves document' do + d = document_class.create(name: 'Document#1') + + expect do + document_class.update!(d.id, name: '[Updated]') + end.to change { d.reload.name }.from('Document#1').to('[Updated]') + end + + it 'returns updated document' do + d = document_class.create(name: 'Document#1') + d2 = document_class.update!(d.id, name: '[Updated]') + + expect(d2).to be_a(document_class) + expect(d2.name).to eq '[Updated]' + end + + it 'does not save invalid document' do + d = document_class.create(name: 'Document#1') + d2 = nil + + expect do + d2 = document_class.update!(d.id, name: '[Up') + end.to raise_error(Dynamoid::Errors::DocumentNotValid) + expect(d2).to be_nil + end + + it 'accepts range key value if document class declares it' do + klass = new_class do + field :name + range :status + end + + d = klass.create(status: 'old', name: 'Document#1') + expect do + klass.update!(d.id, 'old', name: '[Updated]') + end.to change { d.reload.name }.to('[Updated]') + end + + it 'dumps range key value to proper format' do + klass = new_class do + field :name + range :activated_on, :date + field :another_date, :datetime + end + + d = klass.create(activated_on: '2018-01-14'.to_date, name: 'Document#1') + expect do + klass.update!(d.id, '2018-01-14'.to_date, name: '[Updated]') + end.to change { d.reload.name }.to('[Updated]') + end + + it 'raises an UnknownAttribute error when adding an attribute that is not on the model' do + klass = new_class do + field :name + end + + obj = klass.create(name: 'Alex') + expect { + klass.update!(obj.id, age: 26) + }.to raise_error Dynamoid::Errors::UnknownAttribute + end + + it 'saves empty Set as nil' do + klass_with_set = new_class do + field :tags, :set + end + + obj = klass_with_set.create!(tags: [:fishing]) + klass_with_set.update!(obj.id, tags: []) + obj_loaded = klass_with_set.find(obj.id) + + expect(obj_loaded.tags).to eql nil + end + + it 'saves empty string as nil' do + klass_with_string = new_class do + field :name + end + + obj = klass_with_string.create!(name: 'alex') + klass_with_string.update!(obj.id, name: '') + obj_loaded = klass_with_string.find(obj.id) + + expect(obj_loaded.name).to eql nil + end + + describe 'timestamps' do + it 'sets updated_at if Config.timestamps=true', config: { timestamps: true } do + d = document_class.create(name: 'Document#1') + + travel 1.hour do + time_now = Time.now + + expect { + document_class.update!(d.id, name: '[Updated]') + }.to change { d.reload.updated_at.to_i }.to(time_now.to_i) + end + end + + it 'uses provided value of updated_at if Config.timestamps=true', config: { timestamps: true } do + d = document_class.create(name: 'Document#1') + + travel 1.hour do + updated_at = Time.now + 1.hour + + expect { + document_class.update!(d.id, name: '[Updated]', updated_at: updated_at) + }.to change { d.reload.updated_at.to_i }.to(updated_at.to_i) + end + end + + it 'does not raise error if Config.timestamps=false', config: { timestamps: false } do + doc = document_class.create(name: 'Document#1') + + expect do + document_class.update!(doc.id, name: '[Updated]') + end.not_to raise_error + end + + it 'does not change updated_at if attributes were assigned the same values' do + doc = document_class.create(name: 'Document#1', updated_at: Time.now - 1) + + expect do + document_class.update!(doc.id, name: doc.name) + end.not_to change { doc.reload.updated_at } + end + end + + describe 'type casting' do + it 'uses type casted value of sort key to call UpdateItem' do + document_class_with_range = new_class do + range :count, :integer + field :title + end + + obj = document_class_with_range.create(title: 'Old', count: '100') + document_class_with_range.update!(obj.id, '100', title: 'New') + expect(obj.reload.title).to eq 'New' + end + + it 'type casts attributes' do + klass = new_class do + field :count, :integer + end + + obj = klass.create(count: 100) + obj2 = klass.update!(obj.id, count: '101') + expect(obj2.attributes[:count]).to eql(101) + expect(raw_attributes(obj2)[:count]).to eql(101) + end + end + + describe 'callbacks' do + it 'runs before_update callback' do + klass_with_callback = new_class do + field :name + + before_update { print 'run before_update' } + end + + model = klass_with_callback.create(name: 'Document#1') + + expect do + klass_with_callback.update!(model.id, name: '[Updated]') + end.to output('run before_update').to_stdout + end + + it 'runs after_update callback' do + klass_with_callback = new_class do + field :name + + after_update { print 'run after_update' } + end + + model = klass_with_callback.create(name: 'Document#1') + + expect do + klass_with_callback.update!(model.id, name: '[Updated]') + end.to output('run after_update').to_stdout + end + + it 'runs around_update callback' do + klass_with_callback = new_class do + field :name + + around_update :around_update_callback + + def around_update_callback + print 'start around_update' + yield + print 'finish around_update' + end + end + + model = klass_with_callback.create(name: 'Document#1') + + expect do + klass_with_callback.update!(model.id, name: '[Updated]') + end.to output('start around_updatefinish around_update').to_stdout + end + + it 'runs before_save callback' do + klass_with_callback = new_class do + field :name + + before_save { print 'run before_save' } + end + + expect { # to suppress printing at model creation + model = klass_with_callback.create(name: 'Document#1') + + expect do + klass_with_callback.update!(model.id, name: '[Updated]') + end.to output('run before_save').to_stdout + }.to output.to_stdout + end + + it 'runs after_save callback' do + klass_with_callback = new_class do + field :name + + after_save { print 'run after_save' } + end + + expect { # to suppress printing at model creation + model = klass_with_callback.create(name: 'Document#1') + + expect do + klass_with_callback.update!(model.id, name: '[Updated]') + end.to output('run after_save').to_stdout + }.to output.to_stdout + end + + it 'runs around_save callback' do + klass_with_callback = new_class do + field :name + + around_save :around_save_callback + + def around_save_callback + print 'start around_save' + yield + print 'finish around_save' + end + end + + expect { # to suppress printing at model creation + model = klass_with_callback.create(name: 'Document#1') + + expect do + klass_with_callback.update!(model.id, name: '[Updated]') + end.to output('start around_savefinish around_save').to_stdout + }.to output.to_stdout + end + + it 'runs before_validation callback' do + klass_with_callback = new_class do + field :name + + before_validation { print 'run before_validation' } + end + + expect { # to suppress printing at model creation + model = klass_with_callback.create(name: 'Document#1') + + expect do + klass_with_callback.update!(model.id, name: '[Updated]') + end.to output('run before_validation').to_stdout + }.to output.to_stdout + end + + it 'runs after_validation callback' do + klass_with_callback = new_class do + field :name + + after_validation { print 'run after_validation' } + end + + expect { # to suppress printing at model creation + model = klass_with_callback.create(name: 'Document#1') + + expect do + klass_with_callback.update!(model.id, name: '[Updated]') + end.to output('run after_validation').to_stdout + }.to output.to_stdout + end + + it 'runs callbacks in the proper order' do + klass_with_callbacks = new_class do + field :name + + before_validation { puts 'run before_validation' } + after_validation { puts 'run after_validation' } + + before_update { puts 'run before_update' } + after_update { puts 'run after_update' } + around_update :around_update_callback + + before_save { puts 'run before_save' } + after_save { puts 'run after_save' } + around_save :around_save_callback + + def around_save_callback + puts 'start around_save' + yield + puts 'finish around_save' + end + + def around_update_callback + puts 'start around_update' + yield + puts 'finish around_update' + end + end + + # print each message on new line to force RSpec to show meaningful diff + expected_output = [ # rubocop:disable Style/StringConcatenation + 'run before_validation', + 'run after_validation', + 'run before_save', + 'start around_save', + 'run before_update', + 'start around_update', + 'finish around_update', + 'run after_update', + 'finish around_save', + 'run after_save' + ].join("\n") + "\n" + + expect { # to suppress printing at model creation + model = klass_with_callbacks.create(name: 'John') + + expect { + klass_with_callbacks.update!(model.id, name: '[Updated]') + }.to output(expected_output).to_stdout + }.to output.to_stdout + end + end + end + + describe '.update' do + let(:document_class) do + new_class do + field :name + + validates :name, presence: true, length: { minimum: 5 } + end + end + + it 'loads and saves document' do + d = document_class.create(name: 'Document#1') + + expect do + document_class.update(d.id, name: '[Updated]') + end.to change { d.reload.name }.from('Document#1').to('[Updated]') + end + + it 'returns updated document' do + d = document_class.create(name: 'Document#1') + d2 = document_class.update(d.id, name: '[Updated]') + + expect(d2).to be_a(document_class) + expect(d2.name).to eq '[Updated]' + end + + it 'does not save invalid document' do + d = document_class.create(name: 'Document#1') + d2 = nil + + expect do + d2 = document_class.update(d.id, name: '[Up') + end.not_to change { d.reload.name } + expect(d2).not_to be_valid + end + + it 'accepts range key value if document class declares it' do + klass = new_class do + field :name + range :status + end + + d = klass.create(status: 'old', name: 'Document#1') + expect do + klass.update(d.id, 'old', name: '[Updated]') + end.to change { d.reload.name }.to('[Updated]') + end + + it 'dumps range key value to proper format' do + klass = new_class do + field :name + range :activated_on, :date + field :another_date, :datetime + end + + d = klass.create(activated_on: '2018-01-14'.to_date, name: 'Document#1') + expect do + klass.update(d.id, '2018-01-14'.to_date, name: '[Updated]') + end.to change { d.reload.name }.to('[Updated]') + end + + it 'raises an UnknownAttribute error when adding an attribute that is not on the model' do + klass = new_class do + field :name + end + + obj = klass.create(name: 'Alex') + + expect do + klass.update(obj.id, name: 'New name', age: 26) + end.to raise_error Dynamoid::Errors::UnknownAttribute + end + + it 'saves empty Set as nil' do + klass_with_set = new_class do + field :tags, :set + end + + obj = klass_with_set.create!(tags: [:fishing]) + klass_with_set.update(obj.id, tags: []) + obj_loaded = klass_with_set.find(obj.id) + + expect(obj_loaded.tags).to eql nil + end + + it 'saves empty string as nil' do + klass_with_string = new_class do + field :name + end + + obj = klass_with_string.create!(name: 'alex') + klass_with_string.update(obj.id, name: '') + obj_loaded = klass_with_string.find(obj.id) + + expect(obj_loaded.name).to eql nil + end + + describe 'timestamps' do + it 'sets updated_at if Config.timestamps=true', config: { timestamps: true } do + d = document_class.create(name: 'Document#1') + + travel 1.hour do + time_now = Time.now + + expect { + document_class.update(d.id, name: '[Updated]') + }.to change { d.reload.updated_at.to_i }.to(time_now.to_i) + end + end + + it 'uses provided value of updated_at if Config.timestamps=true', config: { timestamps: true } do + d = document_class.create(name: 'Document#1') + + travel 1.hour do + updated_at = Time.now + 1.hour + + expect { + document_class.update(d.id, name: '[Updated]', updated_at: updated_at) + }.to change { d.reload.updated_at.to_i }.to(updated_at.to_i) + end + end + + it 'does not raise error if Config.timestamps=false', config: { timestamps: false } do + doc = document_class.create(name: 'Document#1') + + expect do + document_class.update(doc.id, name: '[Updated]') + end.not_to raise_error + end + + it 'does not change updated_at if attributes were assigned the same values' do + doc = document_class.create(name: 'Document#1', updated_at: Time.now - 1) + + expect do + document_class.update(doc.id, name: doc.name) + end.not_to change { doc.reload.updated_at } + end + end + + describe 'type casting' do + it 'uses type casted value of sort key to call UpdateItem' do + document_class_with_range = new_class do + range :count, :integer + field :title + end + + obj = document_class_with_range.create(title: 'Old', count: '100') + document_class_with_range.update(obj.id, '100', title: 'New') + expect(obj.reload.title).to eq 'New' + end + + it 'type casts attributes' do + klass = new_class do + field :count, :integer + end + + obj = klass.create(count: 100) + obj2 = klass.update(obj.id, count: '101') + expect(obj2.attributes[:count]).to eql(101) + expect(raw_attributes(obj2)[:count]).to eql(101) + end + end + end + + describe '.update_fields' do + let(:document_class) do + new_class do + field :title + field :version, :integer + field :published_on, :date + end + end + + it 'changes field value' do + obj = document_class.create(title: 'Old title') + expect do + document_class.update_fields(obj.id, title: 'New title') + end.to change { document_class.find(obj.id).title }.from('Old title').to('New title') + end + + it 'changes field value to nil' do + obj = document_class.create(title: 'New Document') + expect do + document_class.update_fields(obj.id, title: nil) + end.to change { document_class.find(obj.id).title }.from('New Document').to(nil) + end + + it 'returns updated document' do + obj = document_class.create(title: 'Old title') + result = document_class.update_fields(obj.id, title: 'New title') + + expect(result.id).to eq obj.id + expect(result.title).to eq 'New title' + end + + context 'condition specified' do + describe 'if condition' do + it 'updates when model matches conditions' do + obj = document_class.create(title: 'Old title', version: 1) + + expect { + document_class.update_fields(obj.id, { title: 'New title' }, if: { version: 1 }) + }.to change { document_class.find(obj.id).title }.to('New title') + end + + it 'does not update when model does not match conditions' do + obj = document_class.create(title: 'Old title', version: 1) + + expect { + result = document_class.update_fields(obj.id, { title: 'New title' }, if: { version: 6 }) + }.not_to change { document_class.find(obj.id).title } + end + + it 'returns nil when model does not match conditions' do + obj = document_class.create(title: 'Old title', version: 1) + + result = document_class.update_fields(obj.id, { title: 'New title' }, if: { version: 6 }) + expect(result).to eq nil + end + end + + describe 'unless_exists condition' do + it 'updates when item does not have specified attribute' do + # not specifying field value means (by default) the attribute will be + # skipped and not persisted in DynamoDB + obj = document_class.create(title: 'Old title') + expect(raw_attributes(obj).keys).to contain_exactly(:id, :title, :created_at, :updated_at) + + expect { + document_class.update_fields(obj.id, { title: 'New title' }, { unless_exists: [:version] }) + }.to change { document_class.find(obj.id).title }.to('New title') + end + + it 'does not update when model has specified attribute' do + obj = document_class.create(title: 'Old title', version: 1) + expect(raw_attributes(obj).keys).to contain_exactly(:id, :title, :version, :created_at, :updated_at) + + expect { + result = document_class.update_fields(obj.id, { title: 'New title' }, { unless_exists: [:version] }) + }.not_to change { document_class.find(obj.id).title } + end + + context 'when multiple attribute names' do + it 'updates when item does not have all the specified attributes' do + # not specifying field value means (by default) the attribute will be + # skipped and not persisted in DynamoDB + obj = document_class.create(title: 'Old title') + expect(raw_attributes(obj).keys).to contain_exactly(:id, :title, :created_at, :updated_at) + + expect { + document_class.update_fields(obj.id, { title: 'New title' }, { unless_exists: %i[version published_on] }) + }.to change { document_class.find(obj.id).title }.to('New title') + end + + it 'does not update when model has all the specified attributes' do + obj = document_class.create(title: 'Old title', version: 1, published_on: '2018-02-23'.to_date) + expect(raw_attributes(obj).keys).to contain_exactly(:id, :title, :version, :published_on, :created_at, :updated_at) + + expect { + result = document_class.update_fields(obj.id, { title: 'New title' }, { unless_exists: %i[version published_on] }) + }.not_to change { document_class.find(obj.id).title } + end + + it 'does not update when model has at least one specified attribute' do + # not specifying field value means (by default) the attribute will be + # skipped and not persisted in DynamoDB + obj = document_class.create(title: 'Old title', version: 1) + expect(raw_attributes(obj).keys).to contain_exactly(:id, :title, :version, :created_at, :updated_at) + + expect { + result = document_class.update_fields(obj.id, { title: 'New title' }, { unless_exists: %i[version published_on] }) + }.not_to change { document_class.find(obj.id).title } + end + end + end + end + + it 'does not create new document if it does not exist yet' do + document_class.create_table + + expect do + document_class.update_fields('some-fake-id', title: 'Title') + end.not_to change(document_class, :count) + end + + it 'accepts range key if it is declared' do + document_class_with_range = new_class do + field :title + range :category + end + + obj = document_class_with_range.create(category: 'New') + + expect do + document_class_with_range.update_fields(obj.id, 'New', title: '[Updated]') + end.to change { + document_class_with_range.find(obj.id, range_key: 'New').title + }.to('[Updated]') + end + + it 'uses dumped value of sort key to call UpdateItem' do + document_class_with_range = new_class do + field :title + range :published_on, :date + end + + obj = document_class_with_range.create(title: 'Old', published_on: '2018-02-23'.to_date) + document_class_with_range.update_fields(obj.id, '2018-02-23'.to_date, title: 'New') + expect(obj.reload.title).to eq 'New' + end + + it 'dumps attributes values' do + obj = document_class.create + document_class.update_fields(obj.id, published_on: '2018-02-23'.to_date) + attributes = Dynamoid.adapter.get_item(document_class.table_name, obj.id) + expect(attributes[:published_on]).to eq 17_585 + end + + it 'saves empty Set as nil' do + klass_with_set = new_class do + field :tags, :set + end + + obj = klass_with_set.create!(tags: [:fishing]) + klass_with_set.update_fields(obj.id, tags: []) + obj_loaded = klass_with_set.find(obj.id) + + expect(obj_loaded.tags).to eql nil + end + + it 'saves empty string as nil' do + klass_with_string = new_class do + field :name + end + + obj = klass_with_string.create!(name: 'Alex') + klass_with_string.update_fields(obj.id, name: '') + obj_loaded = klass_with_string.find(obj.id) + + expect(obj_loaded.name).to eql nil + end + + describe 'timestamps' do + it 'sets updated_at if Config.timestamps=true', config: { timestamps: true } do + obj = document_class.create(title: 'Old title') + + travel 1.hour do + time_now = Time.now + + expect { + document_class.update_fields(obj.id, title: 'New title') + }.to change { obj.reload.updated_at.to_i }.to(time_now.to_i) + end + end + + it 'uses provided value of updated_at if Config.timestamps=true', config: { timestamps: true } do + obj = document_class.create(title: 'Old title') + + travel 1.hour do + updated_at = Time.now + 1.hour + + expect { + document_class.update_fields(obj.id, title: 'New title', updated_at: updated_at) + }.to change { obj.reload.updated_at.to_i }.to(updated_at.to_i) + end + end + + it 'does not raise error if Config.timestamps=false', config: { timestamps: false } do + obj = document_class.create(title: 'Old title') + + expect do + document_class.update_fields(obj.id, title: 'New title') + end.not_to raise_error + end + + it 'does not set updated_at if Config.timestamps=true and table timestamps=false', config: { timestamps: true } do + document_class.table timestamps: false + + obj = document_class.create(title: 'Old title') + document_class.update_fields(obj.id, title: 'New title') + + expect(obj.reload.attributes).not_to have_key(:updated_at) + end + end + + describe 'type casting' do + it 'uses casted value of sort key to call UpdateItem' do + document_class_with_range = new_class do + range :count, :integer + field :title + end + + obj = document_class_with_range.create(title: 'Old', count: '100') + document_class_with_range.update_fields(obj.id, '100', title: 'New') + expect(obj.reload.title).to eq 'New' + end + + it 'type casts attributes' do + klass = new_class do + field :count, :integer + end + + obj = klass.create(count: 100) + obj2 = klass.update_fields(obj.id, count: '101') + expect(obj2.attributes[:count]).to eql(101) + expect(raw_attributes(obj2)[:count]).to eql(101) + end + end + + context ':raw field' do + let(:klass) do + new_class do + field :hash, :raw + end + end + + it 'works well with hash keys of any type' do + a = klass.create + + expect { + klass.update_fields(a.id, hash: { 1 => :b }) + }.not_to raise_error + + expect(klass.find(a.id)[:hash]).to eql('1': 'b') + end + end + + it 'raises an UnknownAttribute error when adding an attribute that is not on the model' do + obj = document_class.create(title: 'New Document') + + expect { + document_class.update_fields(obj.id, { title: 'New title', publisher: 'New publisher' }) + }.to raise_error Dynamoid::Errors::UnknownAttribute + end + end + + describe '.upsert' do + let(:document_class) do + new_class do + field :title + field :version, :integer + field :published_on, :date + end + end + + it 'changes field value' do + obj = document_class.create(title: 'Old title') + expect do + document_class.upsert(obj.id, title: 'New title') + end.to change { document_class.find(obj.id).title }.from('Old title').to('New title') + end + + it 'changes field value to nil' do + obj = document_class.create(title: 'New Document') + expect do + document_class.upsert(obj.id, title: nil) + end.to change { document_class.find(obj.id).title }.from('New Document').to(nil) + end + + it 'returns updated document' do + obj = document_class.create(title: 'Old title') + result = document_class.upsert(obj.id, title: 'New title') + + expect(result.id).to eq obj.id + expect(result.title).to eq 'New title' + end + + context 'conditions specified' do + describe 'if condition' do + it 'updates when model matches conditions' do + obj = document_class.create(title: 'Old title', version: 1) + + expect { + document_class.upsert(obj.id, { title: 'New title' }, if: { version: 1 }) + }.to change { document_class.find(obj.id).title }.to('New title') + end + + it 'does not update when model does not match conditions' do + obj = document_class.create(title: 'Old title', version: 1) + + expect { + result = document_class.upsert(obj.id, { title: 'New title' }, if: { version: 6 }) + }.not_to change { document_class.find(obj.id).title } + end + + it 'returns nil when model does not match conditions' do + obj = document_class.create(title: 'Old title', version: 1) + + result = document_class.upsert(obj.id, { title: 'New title' }, if: { version: 6 }) + expect(result).to eq nil + end + end + + describe 'unless_exists condition' do + it 'updates when item does not have specified attribute' do + # not specifying field value means (by default) the attribute will be + # skipped and not persisted in DynamoDB + obj = document_class.create(title: 'Old title') + expect(raw_attributes(obj).keys).to contain_exactly(:id, :title, :created_at, :updated_at) + + expect { + document_class.upsert(obj.id, { title: 'New title' }, { unless_exists: [:version] }) + }.to change { document_class.find(obj.id).title }.to('New title') + end + + it 'does not update when model has specified attribute' do + obj = document_class.create(title: 'Old title', version: 1) + expect(raw_attributes(obj).keys).to contain_exactly(:id, :title, :version, :created_at, :updated_at) + + expect { + result = document_class.upsert(obj.id, { title: 'New title' }, { unless_exists: [:version] }) + }.not_to change { document_class.find(obj.id).title } + end + + context 'when multiple attribute names' do + it 'updates when item does not have all the specified attributes' do + # not specifying field value means (by default) the attribute will be + # skipped and not persisted in DynamoDB + obj = document_class.create(title: 'Old title') + expect(raw_attributes(obj).keys).to contain_exactly(:id, :title, :created_at, :updated_at) + + expect { + document_class.upsert(obj.id, { title: 'New title' }, { unless_exists: %i[version published_on] }) + }.to change { document_class.find(obj.id).title }.to('New title') + end + + it 'does not update when model has all the specified attributes' do + obj = document_class.create(title: 'Old title', version: 1, published_on: '2018-02-23'.to_date) + expect(raw_attributes(obj).keys).to contain_exactly(:id, :title, :version, :published_on, :created_at, :updated_at) + + expect { + result = document_class.upsert(obj.id, { title: 'New title' }, { unless_exists: %i[version published_on] }) + }.not_to change { document_class.find(obj.id).title } + end + + it 'does not update when model has at least one specified attribute' do + # not specifying field value means (by default) the attribute will be + # skipped and not persisted in DynamoDB + obj = document_class.create(title: 'Old title', version: 1) + expect(raw_attributes(obj).keys).to contain_exactly(:id, :title, :version, :created_at, :updated_at) + + expect { + result = document_class.upsert(obj.id, { title: 'New title' }, { unless_exists: %i[version published_on] }) + }.not_to change { document_class.find(obj.id).title } + end + end + end + end + + it 'creates new document if it does not exist yet' do + document_class.create_table + + expect do + document_class.upsert('not-existed-id', title: 'Title') + end.to change(document_class, :count) + + obj = document_class.find('not-existed-id') + expect(obj.title).to eq 'Title' + end + + it 'accepts range key if it is declared' do + document_class_with_range = new_class do + field :title + range :category + end + + obj = document_class_with_range.create(category: 'New') + + expect do + document_class_with_range.upsert(obj.id, 'New', title: '[Updated]') + end.to change { + document_class_with_range.find(obj.id, range_key: 'New').title + }.to('[Updated]') + end + + it 'uses dumped value of sort key to call UpdateItem' do + document_class_with_range = new_class do + field :title + range :published_on, :date + end + + obj = document_class_with_range.create(title: 'Old', published_on: '2018-02-23'.to_date) + document_class_with_range.upsert(obj.id, '2018-02-23'.to_date, title: 'New') + expect(obj.reload.title).to eq 'New' + end + + it 'dumps attributes values' do + obj = document_class.create + document_class.upsert(obj.id, published_on: '2018-02-23'.to_date) + attributes = Dynamoid.adapter.get_item(document_class.table_name, obj.id) + expect(attributes[:published_on]).to eq 17_585 + end + + it 'saves empty Set as nil' do + klass_with_set = new_class do + field :tags, :set + end + + obj = klass_with_set.create!(tags: [:fishing]) + klass_with_set.upsert(obj.id, tags: []) + obj_loaded = klass_with_set.find(obj.id) + + expect(obj_loaded.tags).to eql nil + end + + it 'saves empty string as nil' do + klass_with_string = new_class do + field :name + end + + obj = klass_with_string.create!(name: 'Alex') + klass_with_string.upsert(obj.id, name: '') + obj_loaded = klass_with_string.find(obj.id) + + expect(obj_loaded.name).to eql nil + end + + describe 'timestamps' do + it 'sets updated_at if Config.timestamps=true', config: { timestamps: true } do + obj = document_class.create(title: 'Old title') + + travel 1.hour do + time_now = Time.now + + expect { + document_class.upsert(obj.id, title: 'New title') + }.to change { obj.reload.updated_at.to_i }.to(time_now.to_i) + end + end + + it 'uses provided value of updated_at if Config.timestamps=true', config: { timestamps: true } do + obj = document_class.create(title: 'Old title') + + travel 1.hour do + updated_at = Time.now + 1.hour + + expect { + document_class.upsert(obj.id, title: 'New title', updated_at: updated_at) + }.to change { obj.reload.updated_at.to_i }.to(updated_at.to_i) + end + end + + it 'does not raise error if Config.timestamps=false', config: { timestamps: false } do + obj = document_class.create(title: 'Old title') + + expect do + document_class.upsert(obj.id, title: 'New title') + end.not_to raise_error + end + + it 'does not set updated_at if Config.timestamps=true and table timestamps=false', config: { timestamps: true } do + document_class.table timestamps: false + + obj = document_class.create(title: 'Old title') + document_class.upsert(obj.id, title: 'New title') + + expect(obj.reload.attributes).not_to have_key(:updated_at) + end + end + + describe 'type casting' do + it 'uses casted value of sort key to call UpdateItem' do + document_class_with_range = new_class do + range :count, :integer + field :title + end + + obj = document_class_with_range.create(title: 'Old', count: '100') + document_class_with_range.upsert(obj.id, '100', title: 'New') + expect(obj.reload.title).to eq 'New' + end + + it 'type casts attributes' do + klass = new_class do + field :count, :integer + end + + obj = klass.create(count: 100) + obj2 = klass.upsert(obj.id, count: '101') + expect(obj2.attributes[:count]).to eql(101) + expect(raw_attributes(obj2)[:count]).to eql(101) + end + end + + context ':raw field' do + let(:klass) do + new_class do + field :hash, :raw + end + end + + it 'works well with hash keys of any type' do + a = klass.create + + expect { + klass.upsert(a.id, hash: { 1 => :b }) + }.not_to raise_error + + expect(klass.find(a.id)[:hash]).to eql('1': 'b') + end + end + + it 'raises an UnknownAttribute error when adding an attribute that is not on the model' do + obj = document_class.create(title: 'New Document') + + expect { + document_class.upsert(obj.id, { title: 'New title', publisher: 'New publisher' }) + }.to raise_error Dynamoid::Errors::UnknownAttribute + end + end + + describe '.inc' do + let(:document_class) do + new_class do + field :links_count, :integer + field :mentions_count, :integer + end + end + + it 'adds specified value' do + obj = document_class.create!(links_count: 2) + + expect { + document_class.inc(obj.id, links_count: 5) + }.to change { document_class.find(obj.id).links_count }.from(2).to(7) + end + + it 'accepts negative value' do + obj = document_class.create!(links_count: 10) + + expect { + document_class.inc(obj.id, links_count: -2) + }.to change { document_class.find(obj.id).links_count }.from(10).to(8) + end + + it 'traits nil value as zero' do + obj = document_class.create!(links_count: nil) + + expect { + document_class.inc(obj.id, links_count: 5) + }.to change { document_class.find(obj.id).links_count }.from(nil).to(5) + end + + it 'supports passing several attributes at once' do + obj = document_class.create!(links_count: 2, mentions_count: 31) + document_class.inc(obj.id, links_count: 5, mentions_count: 9) + + expect(document_class.find(obj.id).links_count).to eql(7) + expect(document_class.find(obj.id).mentions_count).to eql(40) + end + + it 'accepts sort key if it is declared' do + class_with_sort_key = new_class do + range :author_name + field :links_count, :integer + end + + obj = class_with_sort_key.create!(author_name: 'Mike', links_count: 2) + class_with_sort_key.inc(obj.id, 'Mike', links_count: 5) + + expect(obj.reload.links_count).to eql(7) + end + + it 'uses dumped value of sort key to call UpdateItem' do + class_with_sort_key = new_class do + range :published_on, :date + field :links_count, :integer + end + + obj = class_with_sort_key.create!(published_on: '2018-10-07'.to_date, links_count: 2) + class_with_sort_key.inc(obj.id, '2018-10-07'.to_date, links_count: 5) + + expect(obj.reload.links_count).to eql(7) + end + + it 'returns self' do + obj = document_class.create!(links_count: 2) + + expect(document_class.inc(obj.id, links_count: 5)).to eq(document_class) + end + + it 'updates `updated_at` attribute when touch: true option passed' do + obj = document_class.create!(links_count: 2, updated_at: Time.now - 1.day) + + expect { document_class.inc(obj.id, links_count: 5) }.not_to change { document_class.find(obj.id).updated_at } + expect { document_class.inc(obj.id, links_count: 5, touch: true) }.to change { document_class.find(obj.id).updated_at } + end + + it 'updates `updated_at` and the specified attributes when touch: name option passed' do + klass = new_class do + field :links_count, :integer + field :viewed_at, :datetime + end + + obj = klass.create!(age: 21, viewed_at: Time.now - 1.day, updated_at: Time.now - 2.days) + + expect do + expect do + klass.inc(obj.id, links_count: 5, touch: :viewed_at) + end.to change { klass.find(obj.id).updated_at } + end.to change { klass.find(obj.id).viewed_at } + end + + it 'updates `updated_at` and the specified attributes when touch: [*] option passed' do + klass = new_class do + field :links_count, :integer + field :viewed_at, :datetime + field :tagged_at, :datetime + end + + obj = klass.create!( + age: 21, + viewed_at: Time.now - 1.day, + tagged_at: Time.now - 3.days, + updated_at: Time.now - 2.days + ) + + expect do + expect do + expect do + klass.inc(obj.id, links_count: 5, touch: %i[viewed_at tagged_at]) + end.to change { klass.find(obj.id).updated_at } + end.to change { klass.find(obj.id).viewed_at } + end.to change { klass.find(obj.id).tagged_at } + end + + describe 'timestamps' do + it 'does not change updated_at', config: { timestamps: true } do + obj = document_class.create! + expect(obj.updated_at).to be_present + + expect { + document_class.inc(obj.id, links_count: 5) + }.not_to change { document_class.find(obj.id).updated_at } + end + end + + describe 'type casting' do + it 'uses casted value of sort key to call UpdateItem' do + class_with_sort_key = new_class do + range :published_on, :date + field :links_count, :integer + end + + obj = class_with_sort_key.create!(published_on: '2018-10-07'.to_date, links_count: 2) + class_with_sort_key.inc(obj.id, '2018-10-07', links_count: 5) + + expect(obj.reload.links_count).to eql(7) + end + + it 'type casts attributes' do + obj = document_class.create!(links_count: 2) + + expect { + document_class.inc(obj.id, links_count: '5.12345') + }.to change { document_class.find(obj.id).links_count }.from(2).to(7) + end + end + end + + describe '#save' do + let(:klass) do + new_class do + field :name + end + end + + let(:klass_with_range_key) do + new_class do + field :name + range :age, :integer + end + end + + let(:klass_with_range_key_and_custom_type) do + new_class do + field :name + range :tags, :serialized + end + end + + it 'persists new model' do + obj = klass.new(name: 'Alex') + obj.save + + expect(klass.exists?(obj.id)).to eq true + expect(klass.find(obj.id).name).to eq 'Alex' + end + + it 'saves changes of already persisted model' do + obj = klass.create!(name: 'Alex') + + obj.name = 'Michael' + obj.save + + obj_loaded = klass.find(obj.id) + expect(obj_loaded.name).to eql 'Michael' + end + + it 'saves changes of already persisted model if range key is declared' do + obj = klass_with_range_key.create!(name: 'Alex', age: 21) + + obj.name = 'Michael' + obj.save + + obj_loaded = klass_with_range_key.find(obj.id, range_key: obj.age) + expect(obj_loaded.name).to eql 'Michael' + end + + it 'saves changes of already persisted model if range key is declared and its type is not supported by DynamoDB natively' do + obj = klass_with_range_key_and_custom_type.create!(name: 'Alex', tags: %w[a b]) + + obj.name = 'Michael' + obj.save + + obj_loaded = klass_with_range_key_and_custom_type.find(obj.id, range_key: obj.tags) + expect(obj_loaded.name).to eql 'Michael' + end + + it 'marks persisted new model as persisted' do + obj = klass.new(name: 'Alex') + expect { obj.save }.to change { obj.persisted? }.from(false).to(true) + end + + it 'creates table if it does not exist' do + model = klass.new + + expect(klass).to receive(:create_table).with(sync: true).and_call_original + + expect { model.save } + .to change { tables_created.include?(klass.table_name) } + .from(false).to(true) + end + + it 'dumps attribute values' do + klass = new_class do + field :active, :boolean, store_as_native_boolean: false + end + + obj = klass.new(active: false) + obj.save + expect(raw_attributes(obj)[:active]).to eql('f') + end + + it 'saves empty Set as nil' do + klass_with_set = new_class do + field :tags, :set + end + + obj = klass_with_set.create!(tags: [:fishing]) + obj.tags = [] + obj.save + obj_loaded = klass_with_set.find(obj.id) + + expect(obj_loaded.tags).to eql nil + end + + it 'saves empty string as nil' do + klass_with_string = new_class do + field :name + end + + obj = klass_with_string.create!(name: 'Alex') + obj.name = '' + obj.save + obj_loaded = klass_with_string.find(obj.id) + + expect(obj_loaded.name).to eql nil + end + + it 'does not make a request to persist a model if there is no any changed attribute' do + obj = klass.create(name: 'Alex') + + expect(Dynamoid.adapter).to receive(:update_item).and_call_original + obj.name = 'Michael' + obj.save! + + expect(Dynamoid.adapter).not_to receive(:update_item).and_call_original + obj.save! + + expect(Dynamoid.adapter).not_to receive(:update_item) + obj_loaded = klass.find(obj.id) + obj_loaded.save! + end + + it 'returns true if there is no any changed attribute' do + obj = klass.create(name: 'Alex') + obj_loaded = klass.find(obj.id) + + expect(obj.save).to eql(true) + expect(obj_loaded.save).to eql(true) + end + + it 'calls PutItem for a new record' do + expect(Dynamoid.adapter).to receive(:write).and_call_original + klass.create(name: 'Alex') + end + + it 'calls UpdateItem for already persisted record' do + klass = new_class do + field :name + field :age, :integer + end + + obj = klass.create!(name: 'Alex', age: 21) + obj.age = 31 + + expect(Dynamoid.adapter).to receive(:update_item).and_call_original + obj.save + end + + it 'does not persist changes if a model was deleted' do + obj = klass.create!(name: 'Alex') + Dynamoid.adapter.delete_item(klass.table_name, obj.id) + + obj.name = 'Michael' + + expect do + expect { obj.save }.to raise_error(Dynamoid::Errors::StaleObjectError) + end.not_to change(klass, :count) + end + + it 'does not persist changes if a model was deleted and range key is declared' do + obj = klass_with_range_key.create!(name: 'Alex', age: 21) + Dynamoid.adapter.delete_item(klass_with_range_key.table_name, obj.id, range_key: obj.age) + + obj.name = 'Michael' + + expect do + expect { obj.save }.to raise_error(Dynamoid::Errors::StaleObjectError) + end.not_to change(klass_with_range_key, :count) + end + + it 'does not persist changes if a model was deleted, range key is declared and its type is not supported by DynamoDB natively' do + obj = klass_with_range_key_and_custom_type.create!(name: 'Alex', tags: %w[a b]) + Dynamoid.adapter.delete_item( + obj.class.table_name, + obj.id, + range_key: Dynamoid::Dumping.dump_field(obj.tags, klass_with_range_key_and_custom_type.attributes[:tags]) + ) + + obj.name = 'Michael' + + expect do + expect { obj.save }.to raise_error(Dynamoid::Errors::StaleObjectError) + end.not_to change { obj.class.count } + end + + context 'when disable_create_table_on_save is false' do + before do + Dynamoid.configure do |config| + @original_create_table_on_save = config.create_table_on_save + config.create_table_on_save = false + end + end + + after do + Dynamoid.configure do |config| + config.create_table_on_save = @original_create_table_on_save + end + end + + it 'raises Aws::DynamoDB::Errors::ResourceNotFoundException error' do + model = klass.new + + expect(klass).not_to receive(:create_table) + + expect { model.save! }.to raise_error(Aws::DynamoDB::Errors::ResourceNotFoundException) + end + end + + context 'when disable_create_table_on_save is false and the table exists' do + before do + Dynamoid.configure do |config| + @original_create_table_on_save = config.create_table_on_save + config.create_table_on_save = false + end + klass.create_table + end + + after do + Dynamoid.configure do |config| + config.create_table_on_save = @original_create_table_on_save + end + end + + it 'persists the model' do + obj = klass.new(name: 'John') + obj.save + + expect(klass.exists?(obj.id)).to eq true + expect(klass.find(obj.id).name).to eq 'John' + end + end + + describe 'partition key value' do + it 'generates "id" for new model' do + obj = klass.new + obj.save + + expect(obj.id).to be_present + expect(raw_attributes(obj)[:id]).to eql obj.id + end + + it 'does not override specified "id" for new model' do + obj = klass.new(id: '1024') + + expect { obj.save }.not_to change { obj.id } + end + + it 'does not override "id" for persisted model' do + obj = klass.create + obj.name = 'Alex' + + expect { obj.save }.not_to change { obj.id } + end + end + + describe 'pessimistic locking' do + let(:klass) do + new_class do + field :name + field :lock_version, :integer + end + end + + it 'generates "lock_version" if field declared' do + obj = klass.new + obj.save + + expect(obj.lock_version).to eq 1 + expect(raw_attributes(obj)[:lock_version]).to eq 1 + end + + it 'increments "lock_version" if it is declared' do + obj = klass.create + obj.name = 'Alex' + + expect { obj.save }.to change { obj.lock_version }.from(1).to(2) + end + + it 'prevents concurrent writes to tables with a lock_version' do + # version #1 + obj = klass.create # lock_version nil -> 1 + obj2 = klass.find(obj.id) # lock_version = 1 + + # version #2 + obj.name = 'Alex' + obj.save # lock_version 1 -> 2 + obj2.name = 'Bob' + + # tries to create version #2 again + expect { + obj2.save # lock_version 1 -> 2 + }.to raise_error(Dynamoid::Errors::StaleObjectError) + end + end + + describe 'callbacks' do + context 'new model' do + it 'runs before_create callback' do + klass_with_callback = new_class do + field :name + before_create { print 'run before_create' } + end + + obj = klass_with_callback.new(name: 'Alex') + expect { obj.save }.to output('run before_create').to_stdout + end + + it 'runs after_create callback' do + klass_with_callback = new_class do + field :name + after_create { print 'run after_create' } + end + + obj = klass_with_callback.new(name: 'Alex') + expect { obj.save }.to output('run after_create').to_stdout + end + + it 'runs around_create callback' do + klass_with_callback = new_class do + field :name + around_create :around_create_callback + + def around_create_callback + print 'start around_create' + yield + print 'finish around_create' + end + end + + obj = klass_with_callback.new(name: 'Alex') + expect { obj.save }.to output('start around_createfinish around_create').to_stdout + end + + it 'runs callbacks in the proper order' do + klass_with_callbacks = new_class do + before_validation { puts 'run before_validation' } + after_validation { puts 'run after_validation' } + + before_create { puts 'run before_create' } + after_create { puts 'run after_create' } + around_create :around_create_callback + + before_save { puts 'run before_save' } + after_save { puts 'run after_save' } + around_save :around_save_callback + + def around_create_callback + puts 'start around_create' + yield + puts 'finish around_create' + end + + def around_save_callback + puts 'start around_save' + yield + puts 'finish around_save' + end + end + obj = klass_with_callbacks.new(name: 'Alex') + + # print each message on new line to force RSpec to show meaningful diff + expected_output = [ # rubocop:disable Style/StringConcatenation + 'run before_validation', + 'run after_validation', + 'run before_save', + 'start around_save', + 'run before_create', + 'start around_create', + 'finish around_create', + 'run after_create', + 'finish around_save', + 'run after_save' + ].join("\n") + "\n" + + expect { obj.save }.to output(expected_output).to_stdout + end + end + + context 'persisted model' do + it 'runs before_update callback' do + klass_with_callback = new_class do + field :name + before_update { print 'run before_update' } + end + + obj = klass_with_callback.create(name: 'Alex') + obj.name = 'Bob' + + expect { obj.save }.to output('run before_update').to_stdout + end + + it 'runs after_update callback' do + klass_with_callback = new_class do + field :name + after_update { print 'run after_update' } + end + + obj = klass_with_callback.create(name: 'Alex') + obj.name = 'Bob' + + expect { obj.save }.to output('run after_update').to_stdout + end + + it 'runs around_update callback' do + klass_with_callback = new_class do + field :name + around_update :around_update_callback + + def around_update_callback + print 'start around_update' + yield + print 'finish around_update' + end + end + + obj = klass_with_callback.create(name: 'Alex') + expect { obj.save }.to output('start around_updatefinish around_update').to_stdout + end + + it 'runs callbacks in the proper order' do + klass_with_callbacks = new_class do + field :name + + before_validation { puts 'run before_validation' } + after_validation { puts 'run after_validation' } + + before_update { puts 'run before_update' } + after_update { puts 'run after_update' } + around_update :around_update_callback + + before_save { puts 'run before_save' } + after_save { puts 'run after_save' } + around_save :around_save_callback + + def around_update_callback + puts 'start around_update' + yield + puts 'finish around_update' + end + + def around_save_callback + puts 'start around_save' + yield + puts 'finish around_save' + end + end + + # print each message on new line to force RSpec to show meaningful diff + expected_output = [ # rubocop:disable Style/StringConcatenation + 'run before_validation', + 'run after_validation', + 'run before_save', + 'start around_save', + 'run before_update', + 'start around_update', + 'finish around_update', + 'run after_update', + 'finish around_save', + 'run after_save' + ].join("\n") + "\n" + + expect { # to suppress printing at model creation + obj = klass_with_callbacks.create(name: 'John') + obj.name = 'Bob' + + expect { obj.save }.to output(expected_output).to_stdout + }.to output.to_stdout + end + end + + it 'runs before_save callback' do + klass_with_callback = new_class do + field :name + before_save { print 'run before_save' } + end + + obj = klass_with_callback.new(name: 'Alex') + expect { obj.save }.to output('run before_save').to_stdout + end + + it 'runs after_save callbacks' do + klass_with_callback = new_class do + field :name + after_save { print 'run after_save' } + end + + obj = klass_with_callback.new(name: 'Alex') + expect { obj.save }.to output('run after_save').to_stdout + end + + it 'runs around_save callbacks' do + klass_with_callback = new_class do + field :name + around_save :around_save_callback + + def around_save_callback + print 'start around_save' + yield + print 'finish around_save' + end + end + + obj = klass_with_callback.new(name: 'Alex') + expect { obj.save }.to output('start around_savefinish around_save').to_stdout + end + + it 'runs before_validation callback' do + klass_with_callback = new_class do + field :name + before_validation { print 'run before_validation' } + end + + obj = klass_with_callback.new(name: 'Alex') + expect { obj.save }.to output('run before_validation').to_stdout + end + + it 'runs after_validation callback' do + klass_with_callback = new_class do + field :name + after_validation { print 'run after_validation' } + end + + obj = klass_with_callback.new(name: 'Alex') + expect { obj.save }.to output('run after_validation').to_stdout + end + end + + context 'not unique primary key' do + context 'composite key' do + let(:klass_with_composite_key) do + new_class do + range :name + end + end + + it 'raises RecordNotUnique error' do + klass_with_composite_key.create(id: '10', name: 'aaa') + obj = klass_with_composite_key.new(id: '10', name: 'aaa') + + expect { obj.save }.to raise_error(Dynamoid::Errors::RecordNotUnique) + end + end + + context 'simple key' do + let(:klass_with_simple_key) do + new_class + end + + it 'raises RecordNotUnique error' do + klass_with_simple_key.create(id: '10') + obj = klass_with_simple_key.new(id: '10') + + expect { obj.save }.to raise_error(Dynamoid::Errors::RecordNotUnique) + end + end + end + + context ':raw field' do + let(:klass) do + new_class do + field :hash, :raw + end + end + + it 'works well with hash keys of any type' do + a = nil + expect { + a = klass.new(hash: { 1 => :b }) + a.save! + }.not_to raise_error + + expect(klass.find(a.id)[:hash]).to eql('1': 'b') + end + end + + describe 'timestamps' do + let(:klass) do + new_class do + field :title + end + end + + context 'new record' do + it 'sets created_at and updated_at if Config.timestamps=true', config: { timestamps: true } do + travel 1.hour do + time_now = Time.now + obj = klass.new + obj.save + + expect(obj.created_at.to_i).to eql(time_now.to_i) + expect(obj.updated_at.to_i).to eql(time_now.to_i) + end + end + + it 'uses provided values of created_at and of updated_at if Config.timestamps=true', config: { timestamps: true } do + travel 1.hour do + created_at = updated_at = Time.now + obj = klass.new(created_at: created_at, updated_at: updated_at) + obj.save + + expect(obj.created_at.to_i).to eql(created_at.to_i) + expect(obj.updated_at.to_i).to eql(updated_at.to_i) + end + end + + it 'does not raise error if Config.timestamps=false', config: { timestamps: false } do + created_at = updated_at = Time.now + obj = klass.new + + expect { obj.save }.not_to raise_error + end + end + + context 'persisted record' do + it 'does not change created_at if Config.timestamps=true', config: { timestamps: true } do + obj = klass.create(title: 'Old title') + + travel 1.hour do + expect do + obj.title = 'New title' + obj.save + end.not_to change { obj.created_at.to_s } + end + end + + it 'sets updated_at if Config.timestamps=true', config: { timestamps: true } do + obj = klass.create(title: 'Old title') + + travel 1.hour do + time_now = Time.now + obj.title = 'New title' + obj.save + + expect(obj.updated_at.to_i).to eql(time_now.to_i) + end + end + + it 'uses provided value updated_at if Config.timestamps=true', config: { timestamps: true } do + obj = klass.create(title: 'Old title') + + travel 1.hour do + updated_at = Time.now + obj.title = 'New title' + obj.updated_at = updated_at + obj.save + + expect(obj.updated_at.to_i).to eql(updated_at.to_i) + end + end + + it 'does not raise error if Config.timestamps=false', config: { timestamps: false } do + obj = klass.create(title: 'Old title') + obj.title = 'New title' + + expect { obj.save }.not_to raise_error + end + + it 'does not change updated_at if there are no changes' do + obj = klass.create(title: 'Old title', updated_at: Time.now - 1) + + expect { obj.save }.not_to change { obj.updated_at } + end + + it 'does not change updated_at if attributes were assigned the same values' do + obj = klass.create(title: 'Old title', updated_at: Time.now - 1) + obj.title = obj.title # rubocop:disable Lint/SelfAssignment + + expect { obj.save }.not_to change { obj.updated_at } + end + end + end + + describe '`store_attribute_with_nil_value` config option' do + let(:klass) do + new_class do + field :age, :integer + end + end + + context 'true', config: { store_attribute_with_nil_value: true } do + it 'keeps document attribute with nil when model is not persisted' do + obj = klass.new(age: nil) + obj.save + + expect(raw_attributes(obj)).to include(age: nil) + end + + it 'keeps document attribute with nil when model is persisted' do + obj = klass.create(age: 42) + obj.age = nil + obj.save + + expect(raw_attributes(obj)).to include(age: nil) + end + end + + context 'false', config: { store_attribute_with_nil_value: false } do + it 'does not keep document attribute with nil when model is not persisted' do + obj = klass.new(age: nil) + obj.save + + # doesn't contain :age key + expect(raw_attributes(obj).keys).to contain_exactly(:id, :created_at, :updated_at) + end + + it 'does not keep document attribute with nil when model is persisted' do + obj = klass.create!(age: 42) + obj.age = nil + obj.save + + # doesn't contain :age key + expect(raw_attributes(obj).keys).to contain_exactly(:id, :created_at, :updated_at) + end + end + + context 'by default', config: { store_attribute_with_nil_value: nil } do + it 'does not keep document attribute with nil when model is not persisted' do + obj = klass.new(age: nil) + obj.save + + # doesn't contain :age key + expect(raw_attributes(obj).keys).to contain_exactly(:id, :created_at, :updated_at) + end + + it 'does not keep document attribute with nil when model is persisted' do + obj = klass.create!(age: 42) + obj.age = nil + obj.save + + # doesn't contain :age key + expect(raw_attributes(obj).keys).to contain_exactly(:id, :created_at, :updated_at) + end + end + end + + context 'when `touch: false` option passed' do + it 'does not update updated_at attribute' do + obj = klass.create! + updated_at = obj.updated_at + + travel 1.minute do + obj.name = 'foo' + obj.save(touch: false) + end + + expect(obj.updated_at).to eq updated_at + end + + it 'sets updated_at attribute for a new record' do + obj = klass.new(name: 'foo') + obj.save(touch: false) + + expect(klass.find(obj.id).updated_at).to be_present + end + end + end + + describe '#update_attribute' do + it 'changes the attribute value' do + klass = new_class do + field :age, :integer + end + + obj = klass.create(age: 18) + + expect { obj.update_attribute(:age, 20) }.to change { obj.age }.from(18).to(20) + end + + it 'persists the model' do + klass = new_class do + field :age, :integer + end + + obj = klass.create(age: 18) + obj.update_attribute(:age, 20) + + expect(klass.find(obj.id).age).to eq(20) + end + + it 'skips validation and saves not valid models' do + klass = new_class do + field :age, :integer + validates :age, numericality: { greater_than: 0 } + end + + obj = klass.create(age: 18) + obj.update_attribute(:age, -1) + + expect(klass.find(obj.id).age).to eq(-1) + end + + it 'returns self' do + klass = new_class do + field :age, :integer + end + + obj = klass.create(age: 18) + result = obj.update_attribute(:age, 20) + + expect(result).to eq(obj) + end + + it 'saves empty Set as nil' do + klass_with_set = new_class do + field :tags, :set + end + + obj = klass_with_set.create!(tags: [:fishing]) + obj.update_attribute(:tags, []) + obj_loaded = klass_with_set.find(obj.id) + + expect(obj_loaded.tags).to eql nil + end + + it 'saves empty string as nil' do + klass_with_string = new_class do + field :name + end + + obj = klass_with_string.create!(name: 'Alex') + obj.update_attribute(:name, '') + obj_loaded = klass_with_string.find(obj.id) + + expect(obj_loaded.name).to eql nil + end + + describe 'type casting' do + it 'type casts attributes' do + klass = new_class do + field :count, :integer + end + + obj = klass.create + obj.update_attribute(:count, '101') + expect(obj.attributes[:count]).to eql(101) + expect(raw_attributes(obj)[:count]).to eql(101) + end + end + + describe 'timestamps' do + let(:klass) do + new_class do + field :title + end + end + + it 'sets updated_at if Config.timestamps=true', config: { timestamps: true } do + obj = klass.create(title: 'Old title') + + travel 1.hour do + time_now = Time.now + obj.update_attribute(:title, 'New title') + + expect(obj.updated_at.to_i).to eql(time_now.to_i) + end + end + + it 'uses provided value updated_at if Config.timestamps=true', config: { timestamps: true } do + obj = klass.create(title: 'Old title') + + travel 1.hour do + updated_at = Time.now + obj.update_attribute(:updated_at, updated_at) + + expect(obj.updated_at.to_i).to eql(updated_at.to_i) + end + end + + it 'does not raise error if Config.timestamps=false', config: { timestamps: false } do + obj = klass.create(title: 'Old title') + + expect do + obj.update_attribute(:title, 'New title') + end.not_to raise_error + end + + it 'does not change updated_at if attributes were assigned the same values' do + obj = klass.create(title: 'Old title', updated_at: Time.now - 1) + obj.title = obj.title # rubocop:disable Lint/SelfAssignment + + expect do + obj.update_attribute(:title, 'Old title') + end.not_to change { obj.updated_at } + end + end + + it 'raises an UnknownAttribute error when adding an attribute that is not on the model' do + klass = new_class do + field :age, :integer + field :name, :string + end + + obj = klass.create!(name: 'Alex', age: 26) + + expect { + obj.update_attribute(:city, 'Dublin') + }.to raise_error(Dynamoid::Errors::UnknownAttribute) + end + + describe 'callbacks' do + it 'runs before_update callback' do + klass_with_callback = new_class do + field :name + before_update { print 'run before_update' } + end + + obj = klass_with_callback.create(name: 'Alex') + + expect do + obj.update_attribute(:name, 'Alexey') + end.to output('run before_update').to_stdout + end + + it 'runs after_update callback' do + klass_with_callback = new_class do + field :name + after_update { print 'run after_update' } + end + + obj = klass_with_callback.create(name: 'Alex') + + expect do + obj.update_attribute(:name, 'Alexey') + end.to output('run after_update').to_stdout + end + + it 'runs around_update callback' do + klass_with_callback = new_class do + field :name + + around_update :around_update_callback + + def around_update_callback + print 'start around_update' + yield + print 'finish around_update' + end + end + + obj = klass_with_callback.create(name: 'Alex') + + expect do + obj.update_attribute(:name, 'Alexey') + end.to output('start around_updatefinish around_update').to_stdout + end + + it 'runs before_save callback' do + klass_with_callback = new_class do + field :name + + before_save { print 'run before_save' } + end + + expect { # to suppress printing at model creation + obj = klass_with_callback.create(name: 'Alex') + + expect do + obj.update_attribute(:name, 'Alexey') + end.to output('run before_save').to_stdout + }.to output.to_stdout + end + + it 'runs after_save callback' do + klass_with_callback = new_class do + field :name + + after_save { print 'run after_save' } + end + + expect { # to suppress printing at model creation + obj = klass_with_callback.create(name: 'Alex') + + expect do + obj.update_attribute(:name, 'Alexey') + end.to output('run after_save').to_stdout + }.to output.to_stdout + end + + it 'runs around_save callback' do + klass_with_callback = new_class do + field :name + + around_save :around_save_callback + + def around_save_callback + print 'start around_save' + yield + print 'finish around_save' + end + end + + expect { # to suppress printing at model creation + obj = klass_with_callback.create(name: 'Alex') + + expect do + obj.update_attribute(:name, 'Alexey') + end.to output('start around_savefinish around_save').to_stdout + }.to output.to_stdout + end + + it 'does not run before_validation callback' do + klass_with_callback = new_class do + field :name + + before_validation { print 'run before_validation' } + end + + expect { # to suppress printing at model creation + obj = klass_with_callback.create(name: 'Alex') + + expect do + obj.update_attribute(:name, 'Alexey') + end.not_to output.to_stdout + }.to output.to_stdout + end + + it 'does not run after_validation callback' do + klass_with_callback = new_class do + field :name + + after_validation { print 'run after_validation' } + end + + expect { # to suppress printing at model creation + obj = klass_with_callback.create(name: 'Alex') + + expect do + obj.update_attribute(:name, 'Alexey') + end.not_to output.to_stdout + }.to output.to_stdout + end + + it 'runs callbacks in the proper order' do + klass_with_callbacks = new_class do + field :name + + before_update { puts 'run before_update' } + after_update { puts 'run after_update' } + around_update :around_update_callback + + before_save { puts 'run before_save' } + after_save { puts 'run after_save' } + around_save :around_save_callback + + around_save :around_save_callback + + def around_save_callback + puts 'start around_save' + yield + puts 'finish around_save' + end + + def around_update_callback + puts 'start around_update' + yield + puts 'finish around_update' + end + end + + # print each message on new line to force RSpec to show meaningful diff + expected_output = [ # rubocop:disable Style/StringConcatenation + 'run before_save', + 'start around_save', + 'run before_update', + 'start around_update', + 'finish around_update', + 'run after_update', + 'finish around_save', + 'run after_save' + ].join("\n") + "\n" + + expect { # to suppress printing at model creation + obj = klass_with_callbacks.create(name: 'Alex') + + expect { + obj.update_attribute(:name, 'Alexey') + }.to output(expected_output).to_stdout + }.to output.to_stdout + end + end + end + + describe '#update_attributes' do + let(:klass) do + new_class do + field :name + field :age, :integer + end + end + + it 'saves changed attributes' do + obj = klass.create!(name: 'Mike', age: 26) + obj.update_attributes(age: 27) + + expect(obj.age).to eql 27 + expect(klass.find(obj.id).age).to eql 27 + end + + it 'saves document if it is not persisted yet' do + obj = klass.new(name: 'Mike', age: 26) + obj.update_attributes(age: 27) + + expect(obj).to be_persisted + expect(obj.age).to eql 27 + expect(klass.find(obj.id).age).to eql 27 + end + + it 'does not save document if validaton fails' do + klass = new_class do + field :age, :integer + validates :age, numericality: { greater_than: 16 } + end + + obj = klass.create!(name: 'Mike', age: 26) + obj.update_attributes(age: 11) + + expect(obj.age).to eql 11 + expect(klass.find(obj.id).age).to eql 26 + end + + describe 'type casting' do + it 'type casts attributes' do + klass = new_class do + field :count, :integer + end + + obj = klass.create + obj.update_attributes(count: '101') + + expect(obj.attributes[:count]).to eql(101) + expect(raw_attributes(obj)[:count]).to eql(101) + end + end + + describe 'timestamps' do + let(:klass) do + new_class do + field :title + end + end + + it 'sets updated_at if Config.timestamps=true', config: { timestamps: true } do + obj = klass.create(title: 'Old title') + + travel 1.hour do + time_now = Time.now + obj.update_attributes(title: 'New title') + + expect(obj.updated_at.to_i).to eql(time_now.to_i) + end + end + + it 'uses provided value updated_at if Config.timestamps=true', config: { timestamps: true } do + obj = klass.create(title: 'Old title') + + travel 1.hour do + updated_at = Time.now + obj.update_attributes(updated_at: updated_at, title: 'New title') + + expect(obj.updated_at.to_i).to eql(updated_at.to_i) + end + end + + it 'does not raise error if Config.timestamps=false', config: { timestamps: false } do + obj = klass.create(title: 'Old title') + + expect do + obj.update_attributes(title: 'New title') + end.not_to raise_error + end + + it 'does not change updated_at if attributes were assigned the same values' do + obj = klass.create(title: 'Old title', updated_at: Time.now - 1) + obj.title = obj.title # rubocop:disable Lint/SelfAssignment + + expect do + obj.update_attributes(title: 'Old title') + end.not_to change { obj.updated_at } + end + end + + it 'raises an UnknownAttribute error when adding an attribute that is not on the model' do + obj = klass.create!(name: 'Alex', age: 26) + + expect { + obj.update_attributes(city: 'Dublin', age: 27) + }.to raise_error(Dynamoid::Errors::UnknownAttribute) + end + + describe 'callbacks' do + it 'runs before_update callback' do + klass_with_callback = new_class do + field :name + before_update { print 'run before_update' } + end + + obj = klass_with_callback.create(name: 'Alex') + + expect do + obj.update_attributes(name: 'Alexey') + end.to output('run before_update').to_stdout + end + + it 'runs after_update callback' do + klass_with_callback = new_class do + field :name + after_update { print 'run after_update' } + end + + obj = klass_with_callback.create(name: 'Alex') + + expect do + obj.update_attributes(name: 'Alexey') + end.to output('run after_update').to_stdout + end + + it 'runs callbacks in the proper order' do + klass_with_callbacks = new_class do + field :name + + before_update { print 'run before_update' } + after_update { print 'run after_update' } + + before_save { print 'run before_save' } + after_save { print 'run after_save' } + end + model = klass_with_callbacks.create(name: 'John') + + expected_output = + 'run before_save' \ + 'run before_update' \ + 'run after_update' \ + 'run after_save' + + expect { model.update_attributes(name: 'Mike') }.to output(expected_output).to_stdout + end + end + end + + describe '#update_attributes!' do + let(:klass) do + new_class do + field :name + field :age, :integer + end + end + + it 'saves changed attributes' do + obj = klass.create!(name: 'Mike', age: 26) + obj.update_attributes!(age: 27) + + expect(obj.age).to eql 27 + expect(klass.find(obj.id).age).to eql 27 + end + + it 'saves document if it is not persisted yet' do + obj = klass.new(name: 'Mike', age: 26) + obj.update_attributes!(age: 27) + + expect(obj).to be_persisted + expect(obj.age).to eql 27 + expect(klass.find(obj.id).age).to eql 27 + end + + it 'raises DocumentNotValid error if validaton fails' do + klass = new_class do + field :age, :integer + validates :age, numericality: { greater_than: 16 } + end + obj = klass.create!(name: 'Mike', age: 26) + + expect { + obj.update_attributes!(age: 11) + }.to raise_error(Dynamoid::Errors::DocumentNotValid) + + expect(obj.age).to eql 11 + expect(klass.find(obj.id).age).to eql 26 + end + + it 'raises an UnknownAttribute error when adding an attribute that is not on the model' do + obj = klass.create!(name: 'Alex', age: 26) + + expect { + obj.update_attributes!(city: 'Dublin', age: 27) + }.to raise_error(Dynamoid::Errors::UnknownAttribute) + end + + it 'saves empty Set as nil' do + klass_with_set = new_class do + field :tags, :set + end + + obj = klass_with_set.create!(tags: [:fishing]) + obj.update_attributes!(tags: []) + obj_loaded = klass_with_set.find(obj.id) + + expect(obj_loaded.tags).to eql nil + end + + it 'saves empty string as nil' do + klass_with_string = new_class do + field :name + end + + obj = klass_with_string.create!(name: 'Alex') + obj.update_attributes!(name: '') + obj_loaded = klass_with_string.find(obj.id) + + expect(obj_loaded.name).to eql nil + end + + describe 'type casting' do + it 'type casts attributes' do + klass = new_class do + field :count, :integer + end + + obj = klass.create + obj.update_attributes!(count: '101') + + expect(obj.attributes[:count]).to eql(101) + expect(raw_attributes(obj)[:count]).to eql(101) + end + end + + describe 'timestamps' do + let(:klass) do + new_class do + field :title + end + end + + it 'sets updated_at if Config.timestamps=true', config: { timestamps: true } do + obj = klass.create(title: 'Old title') + + travel 1.hour do + time_now = Time.now + obj.update_attributes!(title: 'New title') + + expect(obj.updated_at.to_i).to eql(time_now.to_i) + end + end + + it 'uses provided value updated_at if Config.timestamps=true', config: { timestamps: true } do + obj = klass.create(title: 'Old title') + + travel 1.hour do + updated_at = Time.now + obj.update_attributes!(updated_at: updated_at, title: 'New title') + + expect(obj.updated_at.to_i).to eql(updated_at.to_i) + end + end + + it 'does not raise error if Config.timestamps=false', config: { timestamps: false } do + obj = klass.create(title: 'Old title') + + expect do + obj.update_attributes!(title: 'New title') + end.not_to raise_error + end + + it 'does not change updated_at if attributes were assigned the same values' do + obj = klass.create(title: 'Old title', updated_at: Time.now - 1) + obj.title = obj.title # rubocop:disable Lint/SelfAssignment + + expect do + obj.update_attributes!(title: 'Old title') + end.not_to change { obj.updated_at } + end + end + + describe 'callbacks' do + it 'runs before_update callback' do + klass_with_callback = new_class do + field :name + before_update { print 'run before_update' } + end + + obj = klass_with_callback.create(name: 'Alex') + + expect do + obj.update_attributes!(name: 'Alexey') + end.to output('run before_update').to_stdout + end + + it 'runs after_update callback' do + klass_with_callback = new_class do + field :name + after_update { print 'run after_update' } + end + + obj = klass_with_callback.create(name: 'Alex') + + expect do + obj.update_attributes!(name: 'Alexey') + end.to output('run after_update').to_stdout + end + + it 'runs around_update callback' do + klass_with_callback = new_class do + field :name + + around_update :around_update_callback + + def around_update_callback + print 'start around_update' + yield + print 'finish around_update' + end + end + + obj = klass_with_callback.create(name: 'Alex') + + expect do + obj.update_attributes!(name: 'Alexey') + end.to output('start around_updatefinish around_update').to_stdout + end + + it 'runs before_save callback' do + klass_with_callback = new_class do + field :name + + before_save { print 'run before_save' } + end + + expect { # to suppress printing at model creation + obj = klass_with_callback.create(name: 'Alex') + + expect do + obj.update_attributes!(name: 'Alexey') + end.to output('run before_save').to_stdout + }.to output.to_stdout + end + + it 'runs after_save callback' do + klass_with_callback = new_class do + field :name + + after_save { print 'run after_save' } + end + + expect { # to suppress printing at model creation + obj = klass_with_callback.create(name: 'Alex') + + expect do + obj.update_attributes!(name: 'Alexey') + end.to output('run after_save').to_stdout + }.to output.to_stdout + end + + it 'runs around_save callback' do + klass_with_callback = new_class do + field :name + + around_save :around_save_callback + + def around_save_callback + print 'start around_save' + yield + print 'finish around_save' + end + end + + expect { # to suppress printing at model creation + obj = klass_with_callback.create(name: 'Alex') + + expect do + obj.update_attributes!(name: 'Alexey') + end.to output('start around_savefinish around_save').to_stdout + }.to output.to_stdout + end + + it 'runs before_validation callback' do + klass_with_callback = new_class do + field :name + + before_validation { print 'run before_validation' } + end + + expect { # to suppress printing at model creation + obj = klass_with_callback.create(name: 'Alex') + + expect do + obj.update_attributes!(name: 'Alexey') + end.to output('run before_validation').to_stdout + }.to output.to_stdout + end + + it 'runs after_validation callback' do + klass_with_callback = new_class do + field :name + + after_validation { print 'run after_validation' } + end + + expect { # to suppress printing at model creation + obj = klass_with_callback.create(name: 'Alex') + + expect do + obj.update_attributes!(name: 'Alexey') + end.to output('run after_validation').to_stdout + }.to output.to_stdout + end + + it 'runs callbacks in the proper order' do + klass_with_callbacks = new_class do + field :name + + before_validation { puts 'run before_validation' } + after_validation { puts 'run after_validation' } + + before_update { puts 'run before_update' } + after_update { puts 'run after_update' } + around_update :around_update_callback + + before_save { puts 'run before_save' } + after_save { puts 'run after_save' } + around_save :around_save_callback + + around_save :around_save_callback + + def around_save_callback + puts 'start around_save' + yield + puts 'finish around_save' + end + + def around_update_callback + puts 'start around_update' + yield + puts 'finish around_update' + end + end + + # print each message on new line to force RSpec to show meaningful diff + expected_output = [ # rubocop:disable Style/StringConcatenation + 'run before_validation', + 'run after_validation', + 'run before_save', + 'start around_save', + 'run before_update', + 'start around_update', + 'finish around_update', + 'run after_update', + 'finish around_save', + 'run after_save' + ].join("\n") + "\n" + + expect { # to suppress printing at model creation + obj = klass_with_callbacks.create(name: 'Alex') + + expect { + obj.update_attributes!(name: 'Alexey') + }.to output(expected_output).to_stdout + }.to output.to_stdout + end + end + end + + describe '#increment' do + let(:document_class) do + new_class do + field :age, :integer + end + end + + it 'increments specified attribute' do + obj = document_class.new(age: 21) + + expect { obj.increment(:age) }.to change { obj.age }.from(21).to(22) + end + + it 'initializes the attribute with zero if nil' do + obj = document_class.new(age: nil) + + expect { obj.increment(:age) }.to change { obj.age }.from(nil).to(1) + end + + it 'adds specified optional value' do + obj = document_class.new(age: 21) + + expect { obj.increment(:age, 10) }.to change { obj.age }.from(21).to(31) + end + + it 'returns self' do + obj = document_class.new(age: 21) + + expect(obj.increment(:age)).to eql(obj) + end + + it 'does not save changes' do + obj = document_class.new(age: 21) + obj.increment(:age) + + expect(obj).to be_new_record + end + end + + describe '#increment!' do + let(:document_class) do + new_class do + field :age, :integer + end + end + + it 'increments specified attribute' do + obj = document_class.create(age: 21) + + expect { obj.increment!(:age) }.to change { obj.age }.from(21).to(22) + end + + it 'initializes the attribute with zero if it == nil' do + obj = document_class.create(age: nil) + + expect { obj.increment!(:age) }.to change { obj.age }.from(nil).to(1) + end + + it 'adds specified optional value' do + obj = document_class.create(age: 21) + + expect { obj.increment!(:age, 10) }.to change { obj.age }.from(21).to(31) + end + + it 'persists the attribute new value' do + obj = document_class.create(age: 21) + obj.increment!(:age, 10) + obj_loaded = document_class.find(obj.id) + + expect(obj_loaded.age).to eq 31 + end + + it 'does not persist other changed attributes' do + klass = new_class do + field :age, :integer + field :title + end + + obj = klass.create!(age: 21, title: 'title') + obj.title = 'new title' + obj.increment!(:age) + + obj_loaded = klass.find(obj.id) + expect(obj_loaded.title).to eq 'title' + end + + it 'does not restore other changed attributes persisted values' do + klass = new_class do + field :age, :integer + field :title + end + + obj = klass.create!(age: 21, title: 'title') + obj.title = 'new title' + obj.increment!(:age) + + expect(obj.title).to eq 'new title' + expect(obj.title_changed?).to eq true + end + + it 'returns self' do + obj = document_class.create(age: 21) + expect(obj.increment!(:age, 10)).to eq obj + end + + it 'marks the attribute as not changed' do + obj = document_class.create(age: 21) + obj.increment!(:age, 10) + + expect(obj.age_changed?).to eq false + end + + it 'skips validation' do + class_with_validation = new_class do + field :age, :integer + validates :age, numericality: { less_than: 16 } + end + + obj = class_with_validation.create(age: 10) + obj.increment!(:age, 7) + expect(obj.valid?).to eq false + + obj_loaded = class_with_validation.find(obj.id) + expect(obj_loaded.age).to eq 17 + end + + it 'skips callbacks' do + klass = new_class do + field :age, :integer + field :title + + before_save :before_save_callback + + def before_save_callback; end + end + + obj = klass.new(age: 21) + + expect(obj).to receive(:before_save_callback) + obj.save! + + expect(obj).not_to receive(:before_save_callback) + obj.increment!(:age, 10) + end + + it 'works well if there is a sort key' do + klass_with_sort_key = new_class do + range :name + field :age, :integer + end + + obj = klass_with_sort_key.create(name: 'Alex', age: 21) + obj.increment!(:age, 10) + obj_loaded = klass_with_sort_key.find(obj.id, range_key: obj.name) + + expect(obj_loaded.age).to eq 31 + end + + it 'updates `updated_at` attribute when touch: true option passed' do + obj = document_class.create(age: 21, updated_at: Time.now - 1.day) + + expect { obj.increment!(:age) }.not_to change { document_class.find(obj.id).updated_at } + expect { obj.increment!(:age, touch: true) }.to change { document_class.find(obj.id).updated_at } + end + + context 'when :touch option passed' do + it 'updates `updated_at` and the specified attributes when touch: [*] option passed' do + klass = new_class do + field :age, :integer + field :viewed_at, :datetime + end + + obj = klass.create(age: 21, viewed_at: Time.now - 1.day, updated_at: Time.now - 2.days) + + expect do + expect do + obj.increment!(:age, touch: [:viewed_at]) + end.to change { klass.find(obj.id).updated_at } + end.to change { klass.find(obj.id).viewed_at } + end + + it 'runs after_touch callback' do + klass_with_callback = new_class do + field :age, :integer + after_touch { print 'run after_touch' } + end + + obj = klass_with_callback.create + + expect { obj.increment!(:age, touch: true) }.to output('run after_touch').to_stdout + end + end + end + + describe '#decrement' do + let(:document_class) do + new_class do + field :age, :integer + end + end + + it 'decrements specified attribute' do + obj = document_class.new(age: 21) + + expect { obj.decrement(:age) }.to change { obj.age }.from(21).to(20) + end + + it 'initializes the attribute with zero if nil' do + obj = document_class.new(age: nil) + + expect { obj.decrement(:age) }.to change { obj.age }.from(nil).to(-1) + end + + it 'adds specified optional value' do + obj = document_class.new(age: 21) + + expect { obj.decrement(:age, 10) }.to change { obj.age }.from(21).to(11) + end + + it 'returns self' do + obj = document_class.new(age: 21) + + expect(obj.decrement(:age)).to eql(obj) + end + + it 'does not save changes' do + obj = document_class.new(age: 21) + obj.decrement(:age) + + expect(obj).to be_new_record + end + end + + describe '#decrement!' do + let(:document_class) do + new_class do + field :age, :integer + end + end + + it 'decrements specified attribute' do + obj = document_class.create(age: 21) + + expect { obj.decrement!(:age) }.to change { obj.age }.from(21).to(20) + end + + it 'initializes the attribute with zero if nil' do + obj = document_class.create(age: nil) + + expect { obj.decrement!(:age) }.to change { obj.age }.from(nil).to(-1) + end + + it 'adds specified optional value' do + obj = document_class.create(age: 21) + + expect { obj.decrement!(:age, 10) }.to change { obj.age }.from(21).to(11) + end + + it 'persists the attribute new value' do + obj = document_class.create(age: 21) + obj.decrement!(:age, 10) + obj_loaded = document_class.find(obj.id) + + expect(obj_loaded.age).to eq 11 + end + + it 'does not persist other changed attributes' do + klass = new_class do + field :age, :integer + field :title + end + + obj = klass.create!(age: 21, title: 'title') + obj.title = 'new title' + obj.decrement!(:age) + + obj_loaded = klass.find(obj.id) + expect(obj_loaded.title).to eq 'title' + end + + it 'does not restore other changed attributes persisted values' do + klass = new_class do + field :age, :integer + field :title + end + + obj = klass.create!(age: 21, title: 'title') + obj.title = 'new title' + obj.decrement!(:age) + + expect(obj.title).to eq 'new title' + expect(obj.title_changed?).to eq true + end + + it 'returns self' do + obj = document_class.create(age: 21) + expect(obj.decrement!(:age, 10)).to eq obj + end + + it 'marks the attribute as not changed' do + obj = document_class.create(age: 21) + obj.decrement!(:age, 10) + + expect(obj.age_changed?).to eq false + end + + it 'skips validation' do + class_with_validation = new_class do + field :age, :integer + validates :age, numericality: { greater_than: 16 } + end + + obj = class_with_validation.create!(age: 20) + obj.decrement!(:age, 7) + expect(obj.valid?).to eq false + + obj_loaded = class_with_validation.find(obj.id) + expect(obj_loaded.age).to eq 13 + end + + it 'skips callbacks' do + klass = new_class do + field :age, :integer + field :title + + before_save :before_save_callback + + def before_save_callback; end + end + + obj = klass.new(age: 21) + + expect(obj).to receive(:before_save_callback) + obj.save! + + expect(obj).not_to receive(:before_save_callback) + obj.decrement!(:age, 10) + end + + it 'works well if there is a sort key' do + klass_with_sort_key = new_class do + range :name + field :age, :integer + end + + obj = klass_with_sort_key.create(name: 'Alex', age: 21) + obj.decrement!(:age, 10) + obj_loaded = klass_with_sort_key.find(obj.id, range_key: obj.name) + + expect(obj_loaded.age).to eq 11 + end + + it 'updates `updated_at` attribute when touch: true option passed' do + obj = document_class.create(age: 21, updated_at: Time.now - 1.day) + + expect { obj.decrement!(:age) }.not_to change { document_class.find(obj.id).updated_at } + expect { obj.decrement!(:age, touch: true) }.to change { document_class.find(obj.id).updated_at } + end + + context 'when :touch option passed' do + it 'updates `updated_at` and the specified attributes' do + klass = new_class do + field :age, :integer + field :viewed_at, :datetime + end + + obj = klass.create(age: 21, viewed_at: Time.now - 1.day, updated_at: Time.now - 2.days) + + expect do + expect do + obj.decrement!(:age, touch: [:viewed_at]) + end.to change { klass.find(obj.id).updated_at } + end.to change { klass.find(obj.id).viewed_at } + end + + it 'runs after_touch callback' do + klass_with_callback = new_class do + field :age, :integer + after_touch { print 'run after_touch' } + end + + obj = klass_with_callback.create + + expect { obj.decrement!(:age, touch: true) }.to output('run after_touch').to_stdout + end + end + end + + describe '#update!' do + # TODO: add some specs + + it 'returns self' do + klass = new_class do + field :age, :integer + end + + obj = klass.create + result = obj.update! { |t| t.set(age: 21) } + expect(result).to eq obj + end + + it 'checks the conditions on update' do + @tweet = Tweet.create!(tweet_id: 1, group: 'abc', count: 5, tags: Set.new(%w[db sql]), user_name: 'John') + + @tweet.update!(if: { count: 5 }) do |t| + t.add(count: 3) + end + expect(@tweet.count).to eql 8 + expect(Tweet.find(@tweet.tweet_id, range_key: @tweet.group).count).to eql 8 + + expect do + @tweet.update!(if: { count: 5 }) do |t| + t.add(count: 3) + end + end.to raise_error(Dynamoid::Errors::StaleObjectError) + end + + it 'saves empty Set as nil' do + klass_with_set = new_class do + field :tags, :set + end + + obj = klass_with_set.create!(tags: [:fishing]) + obj.update! { |t| t.set(tags: Set.new) } + obj_loaded = klass_with_set.find(obj.id) + + expect(obj_loaded.tags).to eql nil + end + + it 'saves empty string as nil' do + klass_with_string = new_class do + field :name + end + + obj = klass_with_string.create!(name: 'Alex') + obj.update! { |t| t.set(name: '') } + obj_loaded = klass_with_string.find(obj.id) + + expect(obj_loaded.name).to eql nil + end + end + + describe '#update' do + it 'supports add/delete/set operations on a field' do + @tweet = Tweet.create(tweet_id: 1, group: 'abc', count: 5, tags: Set.new(%w[db sql]), user_name: 'John') + + @tweet.update do |t| + t.add(count: 3) + t.delete(tags: Set.new(['db'])) + t.set(user_name: 'Alex') + end + + expect(@tweet.count).to eq(8) + expect(@tweet.tags.to_a).to eq(['sql']) + expect(@tweet.user_name).to eq 'Alex' + end + + context 'condition specified' do + let(:document_class) do + new_class do + field :title + field :version, :integer + field :published_on, :date + end + end + + describe 'if condition' do + it 'updates when model matches conditions' do + obj = document_class.create(title: 'Old title', version: 1) + + expect { + obj.update(if: { version: 1 }) { |t| t.set(title: 'New title') } + }.to change { document_class.find(obj.id).title }.to('New title') + end + + it 'returns true when model matches conditions' do + obj = document_class.create(title: 'Old title', version: 1) + + result = obj.update(if: { version: 1 }) { |t| t.set(title: 'New title') } + expect(result).to eq true + end + + it 'does not update when model does not match conditions' do + obj = document_class.create(title: 'Old title', version: 1) + + expect { + obj.update(if: { version: 6 }) { |t| t.set(title: 'New title') } + }.not_to change { document_class.find(obj.id).title } + end + + it 'returns false when model does not match conditions' do + obj = document_class.create(title: 'Old title', version: 1) + + result = obj.update(if: { version: 6 }) { |t| t.set(title: 'New title') } + expect(result).to eq false + end + end + + describe 'unless_exists condition' do + it 'updates when item does not have specified attribute' do + # not specifying field value means (by default) the attribute will be + # skipped and not persisted in DynamoDB + obj = document_class.create(title: 'Old title') + expect(raw_attributes(obj).keys).to contain_exactly(:id, :title, :created_at, :updated_at) + + expect { + obj.update(unless_exists: [:version]) { |t| t.set(title: 'New title') } + }.to change { document_class.find(obj.id).title }.to('New title') + end + + it 'does not update when model has specified attribute' do + obj = document_class.create(title: 'Old title', version: 1) + expect(raw_attributes(obj).keys).to contain_exactly(:id, :title, :version, :created_at, :updated_at) + + expect { + obj.update(unless_exists: [:version]) { |t| t.set(title: 'New title') } + }.not_to change { document_class.find(obj.id).title } + end + + context 'when multiple attribute names' do + it 'updates when item does not have all the specified attributes' do + # not specifying field value means (by default) the attribute will be + # skipped and not persisted in DynamoDB + obj = document_class.create(title: 'Old title') + expect(raw_attributes(obj).keys).to contain_exactly(:id, :title, :created_at, :updated_at) + + expect { + obj.update(unless_exists: %i[version published_on]) { |t| t.set(title: 'New title') } + }.to change { document_class.find(obj.id).title }.to('New title') + end + + it 'does not update when model has all the specified attributes' do + obj = document_class.create(title: 'Old title', version: 1, published_on: '2018-02-23'.to_date) + expect(raw_attributes(obj).keys).to contain_exactly(:id, :title, :version, :published_on, :created_at, :updated_at) + + expect { + obj.update(unless_exists: %i[version published_on]) { |t| t.set(title: 'New title') } + }.not_to change { document_class.find(obj.id).title } + end + + it 'does not update when model has at least one specified attribute' do + # not specifying field value means (by default) the attribute will be + # skipped and not persisted in DynamoDB + obj = document_class.create(title: 'Old title', version: 1) + expect(raw_attributes(obj).keys).to contain_exactly(:id, :title, :version, :created_at, :updated_at) + + expect { + obj.update(unless_exists: %i[version published_on]) { |t| t.set(title: 'New title') } + }.not_to change { document_class.find(obj.id).title } + end + end + end + end + + it 'prevents concurrent saves to tables with a lock_version' do + address.save! + a2 = Address.find(address.id) + a2.update { |a| a.set(city: 'Chicago') } + + expect do + address.city = 'Seattle' + address.save! + end.to raise_error(Dynamoid::Errors::StaleObjectError) + end + + it 'uses dumped value of sort key to call UpdateItem' do + klass = new_class do + range :activated_on, :date + field :name + end + klass.create_table + + obj = klass.create!(activated_on: Date.today, name: 'Old value') + obj.update { |d| d.set(name: 'New value') } + + expect(obj.reload.name).to eql('New value') + end + + it 'saves empty Set as nil' do + klass_with_set = new_class do + field :tags, :set + end + + obj = klass_with_set.create!(tags: [:fishing]) + obj.update { |t| t.set(tags: Set.new) } + obj_loaded = klass_with_set.find(obj.id) + + expect(obj_loaded.tags).to eql nil + end + + it 'saves empty string as nil' do + klass_with_string = new_class do + field :name + end + + obj = klass_with_string.create!(name: 'Alex') + obj.update { |t| t.set(name: '') } + obj_loaded = klass_with_string.find(obj.id) + + expect(obj_loaded.name).to eql nil + end + + describe 'timestamps' do + let(:klass) do + new_class do + field :title + end + end + + it 'sets updated_at if Config.timestamps=true', config: { timestamps: true } do + obj = klass.create(title: 'Old title') + + travel 1.hour do + time_now = Time.now + + expect { + obj.update { |d| d.set(title: 'New title') } + }.to change { obj.reload.updated_at.to_i }.to(time_now.to_i) + end + end + + it 'uses provided value of updated_at if Config.timestamps=true', config: { timestamps: true } do + obj = klass.create(title: 'Old title') + + travel 1.hour do + updated_at = Time.now + 1.hour + + expect { + obj.update do |d| + d.set(title: 'New title') + d.set(updated_at: updated_at.to_i) + end + }.to change { obj.reload.updated_at.to_i }.to(updated_at.to_i) + end + end + + it 'does not raise error if Config.timestamps=false', config: { timestamps: false } do + obj = klass.create(title: 'Old title') + + expect do + obj.update { |d| d.set(title: 'New title') } + end.not_to raise_error + end + + it 'does not set updated_at if Config.timestamps=true and table timestamps=false', config: { timestamps: true } do + klass.table timestamps: false + + obj = klass.create(title: 'Old title') + obj.update { |d| d.set(title: 'New title') } + + expect(obj.reload.attributes).not_to have_key(:updated_at) + end + end + + context ':raw field' do + let(:klass) do + new_class do + field :hash, :raw + end + end + + it 'works well with hash keys of any type' do + a = klass.create + + expect { + a.update { |d| d.set(hash: { 1 => :b }) } + }.not_to raise_error + + expect(klass.find(a.id)[:hash]).to eql('1': 'b') + end + end + + describe 'callbacks' do + it 'runs before_update callback' do + klass_with_callback = new_class do + field :count, :integer + before_update { print 'run before_update' } + end + model = klass_with_callback.create + + expect do + model.update do |t| + t.add(count: 3) + end + end.to output('run before_update').to_stdout + end + + it 'runs after_update callback' do + klass_with_callback = new_class do + field :count, :integer + before_update { print 'run after_update' } + end + model = klass_with_callback.create + + expect do + model.update do |t| + t.add(count: 3) + end + end.to output('run after_update').to_stdout + end + + it 'runs around_update callback' do + klass_with_callback = new_class do + field :count, :integer + around_update :around_update_callback + + def around_update_callback + print 'start around_update' + yield + print 'finish around_update' + end + end + + model = klass_with_callback.create + + expect do + model.update do |t| + t.add(count: 3) + end + end.to output('start around_updatefinish around_update').to_stdout + end + + it 'runs callbacks in the proper order' do + klass_with_callbacks = new_class do + field :count, :integer + + before_validation { puts 'run before_validation' } + after_validation { puts 'run after_validation' } + + before_update { puts 'run before_update' } + after_update { puts 'run after_update' } + around_update :around_update_callback + + before_save { puts 'run before_save' } + after_save { puts 'run after_save' } + around_save :around_save_callback + + around_save :around_save_callback + + def around_save_callback + puts 'start around_save' + yield + puts 'finish around_save' + end + + def around_update_callback + puts 'start around_update' + yield + puts 'finish around_update' + end + end + + # print each message on new line to force RSpec to show meaningful diff + expected_output = [ # rubocop:disable Style/StringConcatenation + 'run before_update', + 'start around_update', + 'finish around_update', + 'run after_update', + ].join("\n") + "\n" + + expect { # to suppress printing at model creation + model = klass_with_callbacks.create + + expect { + model.update do |t| + t.add(count: 3) + end + }.to output(expected_output).to_stdout + }.to output.to_stdout + end + end + end + + context 'destroy' do + # TODO: adopt test cases for the `delete` method + + describe 'callbacks' do + it 'runs before_destroy callback' do + klass_with_callback = new_class do + before_destroy { print 'run before_destroy' } + end + + obj = klass_with_callback.create + + expect { obj.destroy }.to output('run before_destroy').to_stdout + end + + it 'runs after_destroy callback' do + klass_with_callback = new_class do + after_destroy { print 'run after_destroy' } + end + + obj = klass_with_callback.create + expect { obj.destroy }.to output('run after_destroy').to_stdout + end + + it 'runs around_destroy callback' do + klass_with_callback = new_class do + around_destroy :around_destroy_callback + + def around_destroy_callback + print 'start around_destroy' + yield + print 'finish around_destroy' + end + end + + obj = klass_with_callback.create + + expect { obj.destroy }.to output('start around_destroyfinish around_destroy').to_stdout + end + end + end + + context 'delete' do + it 'deletes an item' do + klass = new_class + obj = klass.create + + expect { obj.delete }.to change { klass.exists? obj.id }.from(true).to(false) + end + + it 'returns self' do + klass = new_class + obj = klass.create + + expect(obj.delete).to eq obj + end + + it 'uses dumped value of sort key to call DeleteItem' do + klass = new_class do + range :activated_on, :date + end + + obj = klass.create!(activated_on: Date.today) + + expect { obj.delete }.to change { + klass.where(id: obj.id, activated_on: obj.activated_on).first + }.to(nil) + end + + context 'with lock version' do + it 'deletes a record if lock version matches' do + address.save! + expect { address.destroy }.not_to raise_error + end + + it 'does not delete a record if lock version does not match' do + address.save! + a1 = address + a2 = Address.find(address.id) + + a1.city = 'Seattle' + a1.save! + + expect { a2.destroy }.to raise_exception(Dynamoid::Errors::StaleObjectError) + end + + it 'uses the correct lock_version even if it is modified' do + address.save! + a1 = address + a1.lock_version = 100 + + expect { a1.destroy }.not_to raise_error + end + end + + context 'when model has associations' do + context 'when belongs_to association' do + context 'when has_many on the other side' do + let!(:source_model) { User.create } + let!(:target_model) { source_model.camel_case.create } + + it 'disassociates self' do + expect do + source_model.delete + end.to change { CamelCase.find(target_model.id).users.target }.from([source_model]).to([]) + end + + it 'updates cached ids list in associated model' do + source_model.delete + expect(CamelCase.find(target_model.id).users_ids).to eq nil + end + + it 'behaves correctly when associated model is linked with several models' do + source_model2 = User.create + target_model.users << source_model2 + + expect(CamelCase.find(target_model.id).users.target).to contain_exactly(source_model, source_model2) + source_model.delete + expect(CamelCase.find(target_model.id).users.target).to contain_exactly(source_model2) + expect(CamelCase.find(target_model.id).users_ids).to eq [source_model2.id].to_set + end + + it 'does not raise exception when foreign key is broken' do + source_model.update_attributes!(camel_case_ids: ['fake_id']) + + expect { source_model.delete }.not_to raise_error + expect(CamelCase.find(target_model.id).users.target).to eq [] + end + end + + context 'when has_one on the other side' do + let!(:source_model) { Sponsor.create } + let!(:target_model) { source_model.camel_case.create } + + it 'disassociates self' do + expect do + source_model.delete + end.to change { CamelCase.find(target_model.id).sponsor.target }.from(source_model).to(nil) + end + + it 'updates cached ids list in associated model' do + source_model.delete + expect(CamelCase.find(target_model.id).sponsor_ids).to eq nil + end + + it 'does not raise exception when foreign key is broken' do + source_model.update_attributes!(camel_case_ids: ['fake_id']) + + expect { source_model.delete }.not_to raise_error + expect(CamelCase.find(target_model.id).sponsor.target).to eq nil + end + end + end + + context 'when has_many association' do + let!(:source_model) { User.create } + let!(:target_model) { source_model.books.create } + + it 'disassociates self' do + expect do + source_model.delete + end.to change { Magazine.find(target_model.title).owner.target }.from(source_model).to(nil) + end + + it 'updates cached ids list in associated model' do + source_model.delete + expect(Magazine.find(target_model.title).owner_ids).to eq nil + end + + it 'does not raise exception when cached foreign key is broken' do + books_ids_new = source_model.books_ids + ['fake_id'] + source_model.update_attributes!(books_ids: books_ids_new) + + expect { source_model.delete }.not_to raise_error + expect(Magazine.find(target_model.title).owner).to eq nil + end + end + + context 'when has_one association' do + let!(:source_model) { User.create } + let!(:target_model) { source_model.monthly.create } + + it 'disassociates self' do + expect do + source_model.delete + end.to change { Subscription.find(target_model.id).customer.target }.from(source_model).to(nil) + end + + it 'updates cached ids list in associated model' do + source_model.delete + expect(Subscription.find(target_model.id).customer_ids).to eq nil + end + + it 'does not raise exception when cached foreign key is broken' do + source_model.update_attributes!(monthly_ids: ['fake_id']) + + expect { source_model.delete }.not_to raise_error + end + end + + context 'when has_and_belongs_to_many association' do + let!(:source_model) { User.create } + let!(:target_model) { source_model.subscriptions.create } + + it 'disassociates self' do + expect do + source_model.delete + end.to change { Subscription.find(target_model.id).users.target }.from([source_model]).to([]) + end + + it 'updates cached ids list in associated model' do + source_model.delete + expect(Subscription.find(target_model.id).users_ids).to eq nil + end + + it 'behaves correctly when associated model is linked with several models' do + source_model2 = User.create + target_model.users << source_model2 + + expect(Subscription.find(target_model.id).users.target).to contain_exactly(source_model, source_model2) + source_model.delete + expect(Subscription.find(target_model.id).users.target).to contain_exactly(source_model2) + expect(Subscription.find(target_model.id).users_ids).to eq [source_model2.id].to_set + end + + it 'does not raise exception when foreign key is broken' do + subscriptions_ids_new = source_model.subscriptions_ids + ['fake_id'] + source_model.update_attributes!(subscriptions_ids: subscriptions_ids_new) + + expect { source_model.delete }.not_to raise_error + expect(Subscription.find(target_model.id).users_ids).to eq nil + end + end + end + end + + describe '.import' do + before do + Address.create_table + User.create_table + Tweet.create_table + end + + it 'creates multiple documents' do + expect do + Address.import([{ city: 'Chicago' }, { city: 'New York' }]) + end.to change(Address, :count).by(2) + end + + it 'returns created documents' do + addresses = Address.import([{ city: 'Chicago' }, { city: 'New York' }]) + expect(addresses[0].city).to eq('Chicago') + expect(addresses[1].city).to eq('New York') + end + + it 'does not validate documents' do + klass = new_class do + field :city + validates :city, presence: true + end + klass.create_table + + addresses = klass.import([{ city: nil }, { city: 'Chicago' }]) + expect(addresses[0].persisted?).to be true + expect(addresses[1].persisted?).to be true + end + + it 'does not run callbacks' do + klass = new_class do + field :city + validates :city, presence: true + + before_save { raise 'before save callback called' } + end + klass.create_table + + expect { klass.import([{ city: 'Chicago' }]) }.not_to raise_error + end + + it 'makes batch operation' do + expect(Dynamoid.adapter).to receive(:batch_write_item).and_call_original + Address.import([{ city: 'Chicago' }, { city: 'New York' }]) + end + + it 'supports empty containers in `serialized` fields' do + users = User.import([name: 'Philip', favorite_colors: Set.new]) + + user = User.find(users[0].id) + expect(user.favorite_colors).to eq Set.new + end + + it 'supports array being empty' do + users = User.import([{ todo_list: [] }]) + + user = User.find(users[0].id) + expect(user.todo_list).to eq [] + end + + it 'saves empty Set as nil' do + tweets = Tweet.import([{ group: 'one', tags: [] }]) + + tweet = Tweet.find_by_tweet_id(tweets[0].tweet_id) + expect(tweet.tags).to eq nil + end + + it 'saves empty string as nil' do + users = User.import([{ name: '' }]) + + user = User.find(users[0].id) + expect(user.name).to eq nil + end + + it 'saves attributes with nil value' do + users = User.import([{ name: nil }]) + + user = User.find(users[0].id) + expect(user.name).to eq nil + end + + it 'supports container types being nil' do + users = User.import([{ name: 'Philip', todo_list: nil }]) + + user = User.find(users[0].id) + expect(user.todo_list).to eq nil + end + + describe 'timestamps' do + let(:klass) do + new_class + end + + before do + klass.create_table + end + + it 'sets created_at and updated_at if Config.timestamps=true', config: { timestamps: true } do + travel 1.hour do + time_now = Time.now + obj, = klass.import([{}]) + + expect(obj.created_at.to_i).to eql(time_now.to_i) + expect(obj.updated_at.to_i).to eql(time_now.to_i) + end + end + + it 'uses provided values of created_at and updated_at if Config.timestamps=true', config: { timestamps: true } do + travel 1.hour do + created_at = updated_at = Time.now + obj, = klass.import([{ created_at: created_at, updated_at: updated_at }]) + + expect(obj.created_at.to_i).to eql(created_at.to_i) + expect(obj.updated_at.to_i).to eql(updated_at.to_i) + end + end + + it 'does not raise error if Config.timestamps=false', config: { timestamps: false } do + expect { klass.import([{}]) }.not_to raise_error + end + end + + it 'dumps attribute values' do + klass = new_class do + field :active, :boolean, store_as_native_boolean: false + end + klass.create_table + + objects = klass.import([{ active: false }]) + obj = objects[0] + obj.save! + expect(raw_attributes(obj)[:active]).to eql('f') + end + + it 'type casts attributes' do + klass = new_class do + field :count, :integer + end + klass.create_table + + objects = klass.import([{ count: '101' }]) + obj = objects[0] + expect(obj.attributes[:count]).to eql(101) + expect(raw_attributes(obj)[:count]).to eql(101) + end + + it 'marks all the attributes as not changed/dirty' do + klass = new_class do + field :count, :integer + end + klass.create_table + + objects = klass.import([{ count: '101' }]) + obj = objects[0] + expect(obj.changed?).to eql false + end + + context 'backoff is specified' do + let(:backoff_strategy) do + ->(_) { -> { @counter += 1 } } + end + + before do + @old_backoff = Dynamoid.config.backoff + @old_backoff_strategies = Dynamoid.config.backoff_strategies.dup + + @counter = 0 + Dynamoid.config.backoff_strategies[:simple] = backoff_strategy + Dynamoid.config.backoff = { simple: nil } + end + + after do + Dynamoid.config.backoff = @old_backoff + Dynamoid.config.backoff_strategies = @old_backoff_strategies + end + + it 'creates multiple documents' do + expect do + Address.import([{ city: 'Chicago' }, { city: 'New York' }]) + end.to change(Address, :count).by(2) + end + + it 'uses specified backoff when some items are not processed' do + # dynamodb-local ignores provisioned throughput settings + # so we cannot emulate unprocessed items - let's stub + + klass = new_class + table_name = klass.table_name + items = (1..3).map(&:to_s).map { |id| { id: id } } + + responses = [ + double('response 1', unprocessed_items: { table_name => [ + double(put_request: double(item: { id: '3' })) + ] }), + double('response 2', unprocessed_items: { table_name => [ + double(put_request: double(item: { id: '3' })) + ] }), + double('response 3', unprocessed_items: nil) + ] + allow(Dynamoid.adapter.client).to receive(:batch_write_item).and_return(*responses) + + klass.import(items) + expect(@counter).to eq 2 + end + + it 'uses new backoff after successful call without unprocessed items' do + # dynamodb-local ignores provisioned throughput settings + # so we cannot emulate unprocessed items - let's stub + + klass = new_class + table_name = klass.table_name + # batch_write_item processes up to 15 items at once + # so we emulate 4 calls with items + items = (1..50).map(&:to_s).map { |id| { id: id } } + + responses = [ + double('response 1', unprocessed_items: { table_name => [ + double(put_request: double(item: { id: '25' })) + ] }), + double('response 3', unprocessed_items: nil), + double('response 2', unprocessed_items: { table_name => [ + double(put_request: double(item: { id: '25' })) + ] }), + double('response 3', unprocessed_items: nil) + ] + allow(Dynamoid.adapter.client).to receive(:batch_write_item).and_return(*responses) + + expect(backoff_strategy).to receive(:call).twice.and_call_original + klass.import(items) + expect(@counter).to eq 2 + end + end + + context ':raw field' do + let(:klass) do + new_class do + field :hash, :raw + end + end + + before do + klass.create_table + end + + it 'works well with hash keys of any type' do + a = nil + expect { + a, = klass.import([hash: { 1 => :b }]) + }.not_to raise_error + + expect(klass.find(a.id)[:hash]).to eql('1': 'b') + end + end + end + + describe '#touch' do + it 'assigns updated_at attribute to current time' do + klass = new_class + obj = klass.create + + travel 1.hour do + obj.touch + expect(obj.updated_at.to_i).to eq(Time.now.to_i) + end + end + + it 'saves updated_at attribute value' do + klass = new_class + obj = klass.create + + travel 1.hour do + obj.touch + + obj_persistes = klass.find(obj.id) + expect(obj_persistes.updated_at.to_i).to eq(Time.now.to_i) + end + end + + it 'returns self' do + klass = new_class + obj = klass.create + expect(obj.touch).to eq obj + end + + it 'assigns and saves specified time' do + klass = new_class + obj = klass.create + + time = Time.now + 1.day + obj.touch(time: time) + + obj_persistes = klass.find(obj.id) + expect(obj.updated_at.to_i).to eq(time.to_i) + expect(obj_persistes.updated_at.to_i).to eq(time.to_i) + end + + it 'assignes and saves also specified timestamp attributes' do + klass = new_class do + field :tagged_at, :datetime + field :logged_in_at, :datetime + end + obj = klass.create + + travel 1.hour do + obj.touch(:tagged_at, :logged_in_at) + + obj_persistes = klass.find(obj.id) + + expect(obj.updated_at.to_i).to eq(Time.now.to_i) + expect(obj_persistes.updated_at.to_i).to eq(Time.now.to_i) + + expect(obj.tagged_at.to_i).to eq(Time.now.to_i) + expect(obj_persistes.tagged_at.to_i).to eq(Time.now.to_i) + + expect(obj.logged_in_at.to_i).to eq(Time.now.to_i) + expect(obj_persistes.logged_in_at.to_i).to eq(Time.now.to_i) + end + end + + it 'does not save other changed attributes' do + klass = new_class do + field :name + end + + obj = klass.create(name: 'Alex') + obj.name = 'Michael' + + travel 1.hour do + obj.touch + + obj_persisted = klass.find(obj.id) + expect(obj_persisted.name).to eq 'Alex' + end + end + + it 'does not validate' do + klass_with_validation = new_class do + field :name + validates :name, length: { minimum: 4 } + end + + obj = klass_with_validation.create(name: 'Theodor') + obj.name = 'Mo' + + travel 1.hour do + obj.touch + + obj_persistes = klass_with_validation.find(obj.id) + expect(obj_persistes.updated_at.to_i).to eq(Time.now.to_i) + end + end + + it 'raise Dynamoid::Error when not persisted model' do + klass = new_class + obj = klass.new + + expect { + obj.touch + }.to raise_error(Dynamoid::Errors::Error, 'cannot touch on a new or destroyed record object') + end + + describe 'callbacks' do + it 'runs callbacks in the proper order' do + klass_with_callbacks = new_class do + before_validation { puts 'run before_validation' } + after_validation { puts 'run after_validation' } + + before_update { puts 'run before_update' } + after_update { puts 'run after_update' } + around_update :around_update_callback + + before_save { puts 'run before_save' } + after_save { puts 'run after_save' } + around_save :around_save_callback + + after_touch { puts 'run after_touch' } + + def around_save_callback + puts 'start around_save' + yield + puts 'finish around_save' + end + + def around_update_callback + puts 'start around_update' + yield + puts 'finish around_update' + end + end + + expect { # to suppress printing at model creation + obj = klass_with_callbacks.create + expect { obj.touch }.to output("run after_touch\n").to_stdout + }.to output.to_stdout + end + end + end + + describe '#persisted?' do + before do + klass.create_table + end + + let(:klass) do + new_class + end + + it 'returns true for saved model' do + model = klass.create! + expect(model.persisted?).to eq true + end + + it 'returns false for new model' do + model = klass.new + expect(model.persisted?).to eq false + end + + it 'returns false for deleted model' do + model = klass.create! + + model.delete + expect(model.persisted?).to eq false + end + + it 'returns false for destroyed model' do + model = klass.create! + + model.destroy + expect(model.persisted?).to eq false + end + end +end diff --git a/dynamoid/spec/dynamoid/sti_spec.rb b/dynamoid/spec/dynamoid/sti_spec.rb new file mode 100644 index 000000000..4bd043211 --- /dev/null +++ b/dynamoid/spec/dynamoid/sti_spec.rb @@ -0,0 +1,207 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'STI' do + describe 'fields' do + let!(:class_a) do + new_class do + field :type + field :a + end + end + + let!(:class_b) do + Class.new(class_a) do + field :b + end + end + + let!(:class_c) do + Class.new(class_a) do + field :c + end + end + + it 'enables only own attributes in a base class' do + expect(class_a.attributes.keys).to match_array(%i[id type a created_at updated_at]) + end + + it 'enabled only own attributes and inherited in a child class' do + expect(class_b.attributes.keys).to include(:a) + expect(class_b.attributes.keys).to include(:b) + expect(class_b.attributes.keys).not_to include(:c) + end + end + + describe 'document' do + it 'fills `type` field with class name' do + expect(Vehicle.new.type).to eq 'Vehicle' + end + + it 'reports the same table name for both base and derived classes' do + expect(Vehicle.table_name).to eq Car.table_name + expect(Vehicle.table_name).to eq NuclearSubmarine.table_name + end + end + + describe 'persistence' do + before do + class_a = new_class class_name: 'A' do + field :type + end + stub_const 'A', class_a + + class_b = Class.new(A) do + def self.name + 'B' + end + end + stub_const 'B', class_b + + class_c = Class.new(A) do + def self.name + 'C' + end + end + stub_const 'C', class_c + + class_d = Class.new(B) do + def self.name + 'D' + end + end + stub_const 'D', class_d + end + + it 'saves subclass objects in the parent table' do + b = B.create + expect(A.find(b.id)).to eql b + end + + it 'loads subclass item when querying the parent table' do + b = B.create! + c = C.create! + d = D.create! + + expect(A.all.to_a).to contain_exactly(b, c, d) + end + + it 'does not load parent item when quering the child table' do + a = A.create! + b = B.create! + + expect(B.all.to_a).to eql([b]) + end + + it 'does not load items of sibling class' do + b = B.create! + c = C.create! + + expect(B.all.to_a).to eql([b]) + expect(C.all.to_a).to eql([c]) + end + end + + describe 'quering' do + describe 'where' do + it 'honors STI' do + Vehicle.create(description: 'Description') + car = Car.create(description: 'Description') + + expect(Car.where(description: 'Description').all.to_a).to eq [car] + end + end + + describe 'all' do + it 'honors STI' do + Vehicle.create(description: 'Description') + car = Car.create + + expect(Car.all.to_a).to eq [car] + end + end + end + + describe '`inheritance_field` document option' do + before do + class_a = new_class class_name: 'A' do + table inheritance_field: :type_new + + field :type + field :type_new + end + stub_const 'A', class_a + + class_b = Class.new(A) do + def self.name + 'B' + end + end + stub_const 'B', class_b + end + + it 'allows to switch from `type` field to another one to store class name' do + b = B.create! + + expect(A.find(b.id)).to eql b + expect(b.type_new).to eql('B') + end + + it 'ignores `type` field at all' do + b = B.create! + expect(b.type).to eql(nil) + + b = B.create!(type: 'Integer') + expect(A.find(b.id)).to eql b + expect(b.type).to eql('Integer') + end + end + + describe '`sti_name` support' do + before do + class_a = new_class class_name: 'A' do + field :type + + def self.sti_class_for(type_name) + case type_name + when 'beta' + B + end + end + end + stub_const 'A', class_a + + class_b = Class.new(A) do + def self.sti_name + 'beta' + end + end + stub_const 'B', class_b + end + + it 'saves subclass objects in the parent table' do + b = B.create + expect(A.find(b.id)).to eql b + expect(b.type).to eql('beta') + end + end + + describe 'sti_class_for' do + before do + class_a = new_class class_name: 'A' do + field :type + end + + stub_const 'A', class_a + end + + it 'successes exist class' do + expect(A.sti_class_for('A')).to eql A + end + + it 'fails non-exist class' do + expect { A.sti_class_for('NonExistClass') }.to raise_error(Dynamoid::Errors::SubclassNotFound) + end + end +end diff --git a/dynamoid/spec/dynamoid/tasks/database_spec.rb b/dynamoid/spec/dynamoid/tasks/database_spec.rb new file mode 100644 index 000000000..11e74bc78 --- /dev/null +++ b/dynamoid/spec/dynamoid/tasks/database_spec.rb @@ -0,0 +1,45 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Dynamoid::Tasks::Database do + describe '#ping' do + context 'when the database is reachable' do + it 'is able to ping (connect to) DynamoDB' do + expect { described_class.ping }.not_to raise_exception + end + end + end + + describe '#create_tables' do + before do + @klass = new_class + end + + context "when the tables don't exist yet" do + it 'creates tables' do + expect { + described_class.create_tables + }.to change { + Dynamoid.adapter.list_tables.include?(@klass.table_name) + }.from(false).to(true) + end + + it 'returns created table names' do + results = described_class.create_tables + expect(results[:existing]).not_to include(@klass.table_name) + expect(results[:created]).to include(@klass.table_name) + end + end + + context 'when the tables already exist' do + it 'does not attempt to re-create the table' do + @klass.create_table + + results = described_class.create_tables + expect(results[:existing]).to include(@klass.table_name) + expect(results[:created]).not_to include(@klass.table_name) + end + end + end +end diff --git a/dynamoid/spec/dynamoid/transaction_write/action_spec.rb b/dynamoid/spec/dynamoid/transaction_write/action_spec.rb new file mode 100644 index 000000000..da1a3cef6 --- /dev/null +++ b/dynamoid/spec/dynamoid/transaction_write/action_spec.rb @@ -0,0 +1,31 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_relative 'context' + +# Dynamoid.config.logger.level = :debug + +describe Dynamoid::TransactionWrite, '.action' do + include_context 'transaction_write' + + context 'incrementally builds' do + it 'executes' do + klass.create_table + klass_with_composite_key.create_table + transaction = described_class.new + obj1 = transaction.create!(klass, { name: 'one' }) + obj2_id = SecureRandom.uuid + transaction.upsert(klass_with_composite_key, { id: obj2_id, age: 2, name: 'two' }) + expect(klass).not_to exist(obj1.id) + expect(klass).not_to exist(obj2_id) + transaction.commit + + obj1_found = klass.find(obj1.id) + obj2_found = klass_with_composite_key.find(obj2_id, range_key: 2) + expect(obj1_found).to eql(obj1) + expect(obj2_found.id).to eql(obj2_id) + expect(obj1_found.name).to eql('one') + expect(obj2_found.name).to eql('two') + end + end +end diff --git a/dynamoid/spec/dynamoid/transaction_write/context.rb b/dynamoid/spec/dynamoid/transaction_write/context.rb new file mode 100644 index 000000000..c3293d368 --- /dev/null +++ b/dynamoid/spec/dynamoid/transaction_write/context.rb @@ -0,0 +1,83 @@ +# frozen_string_literal: true + +require 'spec_helper' + +# Dynamoid.config.logger.level = :debug + +RSpec.shared_context 'transaction_write' do + let(:klass) do + new_class(class_name: 'Document') do + field :name + field :record_count, :integer + field :favorite_numbers, :set, of: :integer + field :favorite_names, :set, of: :string + end + end + + let(:klass_with_composite_key) do + new_class(class_name: 'Cat') do + range :age, :integer + field :name + end + end + + let(:klass_with_callbacks) do + new_class(class_name: 'Dog') do + field :name + + before_save { print 'saving ' } + after_save { print 'saved ' } + before_create { print 'creating ' } + after_create { print 'created ' } + before_update { print 'updating ' } + after_update { print 'updated ' } + before_destroy { print 'destroying ' } + after_destroy { print 'destroyed ' } + before_validation { print 'validating ' } + after_validation { print 'validated ' } + end + end + + let(:klass_with_around_callbacks) do + new_class(class_name: 'Mouse') do + field :name + + around_save :around_save_callback + around_create :around_create_callback + around_update :around_update_callback + around_destroy :around_destroy_callback + # no around_validation callback exists + + def around_save_callback + print 'saving ' + yield + print 'saved ' + end + + def around_create_callback + print 'creating ' + yield + print 'created ' + end + + def around_update_callback + print 'updating ' + yield + print 'updated ' + end + + def around_destroy_callback + print 'destroying ' + yield + print 'destroyed ' + end + end + end + + let(:klass_with_validation) do + new_class do + field :name + validates :name, length: { minimum: 4 } + end + end +end diff --git a/dynamoid/spec/dynamoid/transaction_write/create_spec.rb b/dynamoid/spec/dynamoid/transaction_write/create_spec.rb new file mode 100644 index 000000000..b13fc8ba7 --- /dev/null +++ b/dynamoid/spec/dynamoid/transaction_write/create_spec.rb @@ -0,0 +1,200 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_relative 'context' + +# Dynamoid.config.logger.level = :debug + +describe Dynamoid::TransactionWrite, '.create' do + include_context 'transaction_write' + + context 'creates' do + context 'simple primary key' do + before do + klass.create_table + end + + it 'with attribute in constructor' do + obj1 = klass.new(name: 'one') + expect(obj1.persisted?).to eql(false) + described_class.execute do |txn| + txn.create! obj1 + end + expect(obj1.persisted?).to eql(true) + obj1_found = klass.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to eql('one') + end + + it 'with attribute in transaction' do + obj2 = klass.new + described_class.execute do |txn| + txn.create! obj2, name: 'two' + end + obj2_found = klass.find(obj2.id) + expect(obj2_found).to eql(obj2) + expect(obj2_found.name).to eql('two') + end + + it 'with class constructed in transaction' do + obj3 = nil + described_class.execute do |txn| + obj3 = txn.create! klass, name: 'three' + end + obj3_found = klass.find(obj3.id) + expect(obj3_found).to eql(obj3) + expect(obj3_found.name).to eql('three') + end + end + + context 'composite key' do + before do + klass_with_composite_key.create_table + end + + it 'with attribute in constructor' do + obj1 = klass_with_composite_key.new(name: 'one', age: 1) + described_class.execute do |txn| + txn.create! obj1 + end + obj1_found = klass_with_composite_key.find(obj1.id, range_key: 1) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to eql('one') + end + + it 'with attribute in transaction' do + obj2 = klass_with_composite_key.new + described_class.execute do |txn| + txn.create! obj2, name: 'two', age: 2 + end + obj2_found = klass_with_composite_key.find(obj2.id, range_key: 2) + expect(obj2_found).to eql(obj2) + expect(obj2_found.name).to eql('two') + end + + it 'with class constructed in transaction' do + obj3 = nil + described_class.execute do |txn| + obj3 = txn.create! klass_with_composite_key, name: 'three', age: 3 + end + obj3_found = klass_with_composite_key.find(obj3.id, range_key: 3) + expect(obj3_found).to eql(obj3) + expect(obj3_found.name).to eql('three') + end + end + + it 'creates timestamps' do + klass.create_table + obj1 = klass.new(name: 'one') + expect(obj1.created_at).to be_nil + expect(obj1.updated_at).to be_nil + described_class.execute do |txn| + txn.create! obj1 + end + obj1_found = klass.find(obj1.id) + expect(obj1_found.created_at.to_f).to be_within(1.seconds).of Time.now.to_f + expect(obj1_found.updated_at.to_f).to be_within(1.seconds).of Time.now.to_f + end + + context 'validates' do + before do + klass_with_validation.create_table + end + + it 'does not create when invalid' do + obj1 = klass_with_validation.new(name: 'one') + described_class.execute do |txn| + expect(txn.create(obj1)).to eql(false) + end + expect(obj1.id).to be_nil + end + + it 'rolls back when invalid' do + obj1 = klass_with_validation.new(name: 'one') + obj2 = klass_with_validation.new(name: 'twotwo') + described_class.execute do |txn| + expect(txn.create(obj1)).to eql(false) + expect(txn.create(obj2)).to be_present + end + expect(obj1.id).to be_nil + expect(klass_with_validation).to exist(obj2.id) + end + + it 'succeeds when valid' do + obj1 = klass_with_validation.new(name: 'oneone') + described_class.execute do |txn| + expect(txn.create(obj1)).to be_present + end + + obj1_found = klass_with_validation.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to eql('oneone') + end + + it 'raises DocumentNotValid when not valid' do + obj1 = klass_with_validation.new(name: 'one') + expect { + described_class.execute do |txn| + txn.create! obj1 + end + }.to raise_error(Dynamoid::Errors::DocumentNotValid) + expect(obj1.id).to be_nil # hash key should NOT be auto-generated if validation fails + end + + it 'rolls back and raises DocumentNotValid when not valid' do + obj1 = klass_with_validation.new(name: 'one') + obj2 = klass_with_validation.new(name: 'twotwo') + expect { + described_class.execute do |txn| + txn.create! obj2 + txn.create! obj1 + end + }.to raise_error(Dynamoid::Errors::DocumentNotValid) + expect(obj1.id).to be_nil + expect(obj2.id).to be_present + expect(klass_with_validation).not_to exist(obj2.id) + end + + it 'does not raise exception when valid' do + obj1 = klass_with_validation.new(name: 'oneone') + described_class.execute do |txn| + txn.create!(obj1) + end + + obj1_found = klass_with_validation.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to eql('oneone') + end + + it 'does not raise exception when skipping validation' do + obj1 = klass_with_validation.new(name: 'one') + described_class.execute do |txn| + # this use is infrequent, normal entry is from save!(obj, options) + txn.create!(obj1, {}, skip_validation: true) + end + + obj1_found = klass_with_validation.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to eql('one') + end + end + + it 'uses callbacks' do + klass_with_callbacks.create_table + expect { + described_class.execute do |txn| + txn.create! klass_with_callbacks.new(name: 'two') + end + }.to output('validating validated saving creating created saved ').to_stdout + end + + it 'uses around callbacks' do + klass_with_around_callbacks.create_table + expect { + described_class.execute do |txn| + txn.create! klass_with_around_callbacks.new(name: 'two') + end + }.to output('saving creating created saved ').to_stdout + end + end +end diff --git a/dynamoid/spec/dynamoid/transaction_write/delete_spec.rb b/dynamoid/spec/dynamoid/transaction_write/delete_spec.rb new file mode 100644 index 000000000..0a3cf832d --- /dev/null +++ b/dynamoid/spec/dynamoid/transaction_write/delete_spec.rb @@ -0,0 +1,80 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_relative 'context' + +# Dynamoid.config.logger.level = :debug + +describe Dynamoid::TransactionWrite, '.delete' do + include_context 'transaction_write' + + context 'deletes' do + context 'simple primary key' do + before do + klass.create_table + end + + it 'with instance' do + obj1 = klass.create!(name: 'one') + obj1_found = klass.find(obj1.id) + expect(obj1_found).to eql(obj1) + described_class.execute do |txn| + txn.delete obj1 + end + expect(klass).not_to exist(obj1.id) + end + + it 'with id' do + obj2 = klass.create!(name: 'two') + obj2_found = klass.find(obj2.id) + expect(obj2_found).to eql(obj2) + described_class.execute do |txn| + txn.delete klass, obj2.id + end + expect(klass).not_to exist(obj2.id) + end + end + + context 'composite key' do + before do + klass_with_composite_key.create_table + end + + it 'with instance' do + obj1 = klass_with_composite_key.create!(name: 'one', age: 1) + obj1_found = klass_with_composite_key.find(obj1.id, range_key: 1) + expect(obj1_found).to eql(obj1) + described_class.execute do |txn| + txn.delete obj1 + end + expect(klass_with_composite_key).not_to exist({ id: obj1.id, age: 1 }) + end + + it 'with id' do + obj2 = klass_with_composite_key.create!(name: 'two', age: 2) + obj2_found = klass_with_composite_key.find(obj2.id, range_key: 2) + expect(obj2_found).to eql(obj2) + described_class.execute do |txn| + txn.delete klass_with_composite_key, id: obj2.id, age: 2 + end + expect(klass_with_composite_key).not_to exist({ id: obj2.id, age: 2 }) + end + + it 'requires hash key' do + expect { + described_class.execute do |txn| + txn.delete klass_with_composite_key, age: 5 + end + }.to raise_exception(Dynamoid::Errors::MissingHashKey) + end + + it 'requires range key' do + expect { + described_class.execute do |txn| + txn.delete klass_with_composite_key, id: 'bananas' + end + }.to raise_exception(Dynamoid::Errors::MissingRangeKey) + end + end + end +end diff --git a/dynamoid/spec/dynamoid/transaction_write/destroy_spec.rb b/dynamoid/spec/dynamoid/transaction_write/destroy_spec.rb new file mode 100644 index 000000000..b6170bd3e --- /dev/null +++ b/dynamoid/spec/dynamoid/transaction_write/destroy_spec.rb @@ -0,0 +1,107 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_relative 'context' + +# Dynamoid.config.logger.level = :debug + +describe Dynamoid::TransactionWrite, '.destroy' do + include_context 'transaction_write' + + context 'destroys' do + context 'simple primary key' do + before do + klass.create_table + end + + it 'with instance' do + obj1 = klass.create!(name: 'one') + obj1_found = klass.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1.persisted?).to eql(true) + expect(obj1).not_to be_destroyed + described_class.execute do |txn| + txn.destroy! obj1 + end + expect(obj1.destroyed?).to eql(true) + expect(obj1.persisted?).to eql(false) + expect(klass).not_to exist(obj1.id) + end + + it 'with id' do + obj2 = klass.create!(name: 'two') + obj2_found = klass.find(obj2.id) + expect(obj2_found).to eql(obj2) + described_class.execute do |txn| + txn.destroy! klass, obj2.id + end + expect(klass).not_to exist(obj2.id) + end + end + + context 'composite key' do + before do + klass_with_composite_key.create_table + end + + it 'with instance' do + obj1 = klass_with_composite_key.create!(name: 'one', age: 1) + obj2 = klass_with_composite_key.create!(name: 'two', age: 2) + obj1_found = klass_with_composite_key.find(obj1.id, range_key: 1) + expect(obj1_found).to eql(obj1) + described_class.execute do |txn| + txn.destroy! obj1 + end + expect(klass_with_composite_key).not_to exist({ id: obj1.id, age: 1 }) + end + + it 'with id' do + obj2 = klass_with_composite_key.create!(name: 'two', age: 2) + obj2_found = klass_with_composite_key.find(obj2.id, range_key: 2) + expect(obj2_found).to eql(obj2) + described_class.execute do |txn| + txn.destroy! klass_with_composite_key, { id: obj2.id, age: 2 } + end + expect(klass_with_composite_key).not_to exist({ id: obj2.id, age: 1 }) + end + + it 'requires hash key' do + expect { + described_class.execute do |txn| + txn.destroy! klass_with_composite_key, age: 5 + end + }.to raise_exception(Dynamoid::Errors::MissingHashKey) # not ArgumentError + end + + it 'requires range key' do + expect { + described_class.execute do |txn| + txn.destroy! klass_with_composite_key, id: 'bananas' + end + }.to raise_exception(Dynamoid::Errors::MissingRangeKey) # not ArgumentError + end + end + + it 'uses callbacks' do + klass_with_callbacks.create_table + obj1 = klass_with_callbacks.create!(name: 'one') + expect { + described_class.execute do |txn| + txn.destroy! obj1 + end + }.to output('destroying destroyed ').to_stdout + end + + it 'uses around callbacks' do + klass_with_around_callbacks.create_table + obj1 = klass_with_around_callbacks.create!(name: 'one') + expect { + described_class.execute do |txn| + txn.destroy! obj1 + end + }.to output('destroying destroyed ').to_stdout + end + + # TODO: test destroy! vs. destroy i.e. when an :abort is raised in a callback + end +end diff --git a/dynamoid/spec/dynamoid/transaction_write/put_spec.rb b/dynamoid/spec/dynamoid/transaction_write/put_spec.rb new file mode 100644 index 000000000..ed84379ad --- /dev/null +++ b/dynamoid/spec/dynamoid/transaction_write/put_spec.rb @@ -0,0 +1,99 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_relative 'context' + +# Dynamoid.config.logger.level = :debug + +describe Dynamoid::TransactionWrite, '.put' do # 'put' is actually an update + include_context 'transaction_write' + + # a 'put' is a create that overwrites existing records if present + context 'puts' do + context 'simple primary key' do + before do + klass.create_table + end + + it 'fails without skip_existence_check' do + obj1 = klass.create!(name: 'one') + expect { + described_class.execute do |txn| + txn.create! klass, id: obj1.id, name: 'oneo' + end + }.to raise_exception(Aws::DynamoDB::Errors::TransactionCanceledException) + end + + it 'can overwrite with attributes' do + obj1 = klass.create!(name: 'one') + expect(klass.find(obj1.id)).to eql(obj1) # it was created + described_class.execute do |txn| + txn.create! klass, { id: obj1.id, name: 'oneone' }, { skip_existence_check: true } + end + obj1_found = klass.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to eql('oneone') # overwritten + end + + it 'can overwrite with instance' do + obj1 = klass.create!(name: 'one') + expect(klass.find(obj1.id)).to eql(obj1) # it was created + described_class.execute do |txn| + # this use is infrequent, normal entry is from save!(obj, options) + txn.create! klass.new(id: obj1.id, name: 'oneone'), {}, { skip_existence_check: true } + end + obj1_found = klass.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to eql('oneone') # overwritten + end + end + + context 'composite key' do + before do + klass_with_composite_key.create_table + end + + it 'fails without skip_existence_check' do + obj1 = klass_with_composite_key.create!(name: 'one', age: 1) + expect { + described_class.execute do |txn| + txn.create! klass_with_composite_key, id: obj1.id, age: 1, name: 'oneo' + end + }.to raise_exception(Aws::DynamoDB::Errors::TransactionCanceledException) + end + + it 'can overwrite with attributes' do + obj1 = klass_with_composite_key.create!(name: 'one', age: 1) + obj2 = nil + described_class.execute do |txn| + txn.create! klass_with_composite_key, { id: obj1.id, age: 1, name: 'oneone' }, + { skip_existence_check: true } + obj2 = txn.create! klass_with_composite_key, id: obj1.id, age: 2, name: 'two' + end + obj1_found = klass_with_composite_key.find(obj1.id, range_key: 1) + obj2_found = klass_with_composite_key.find(obj1.id, range_key: 2) + expect(obj1_found).to eql(obj1) + expect(obj2_found).to eql(obj2) + expect(obj1_found.name).to eql('oneone') + expect(obj2_found.name).to eql('two') + end + + it 'can overwrite with instance' do + obj1 = klass_with_composite_key.create!(name: 'one', age: 1) + obj2 = nil + described_class.execute do |txn| + # this use is infrequent, normal entry is from save!(obj, options) + txn.create! klass_with_composite_key.new(id: obj1.id, age: 1, name: 'oneone'), {}, + { skip_existence_check: true } + obj2 = txn.create! klass_with_composite_key.new(id: obj1.id, age: 2, name: 'two') + end + obj1_found = klass_with_composite_key.find(obj1.id, range_key: 1) + obj2_found = klass_with_composite_key.find(obj1.id, range_key: 2) + expect(obj1_found).to eql(obj1) + expect(obj2_found).to eql(obj2) + expect(obj1_found.name).to eql('oneone') + expect(obj2_found.name).to eql('two') + end + end + end +end diff --git a/dynamoid/spec/dynamoid/transaction_write/save_spec.rb b/dynamoid/spec/dynamoid/transaction_write/save_spec.rb new file mode 100644 index 000000000..e0ee48f76 --- /dev/null +++ b/dynamoid/spec/dynamoid/transaction_write/save_spec.rb @@ -0,0 +1,190 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_relative 'context' + +# Dynamoid.config.logger.level = :debug + +describe Dynamoid::TransactionWrite, '.save' do # 'save' is an update or create + include_context 'transaction_write' + + # a 'save' does an update or create depending on if the record is new or not + context 'saves' do + context 'simple primary key' do + before do + klass.create_table + end + + it 'with an update' do + obj1 = klass.create!(name: 'one') + described_class.execute do |txn| + obj1.name = 'oneone' + txn.save! obj1 + end + obj1_found = klass.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to eql('oneone') + end + + it 'with a create' do + obj2 = klass.new(name: 'two') + described_class.execute do |txn| + txn.save! obj2 + end + obj2_found = klass.find(obj2.id) + expect(obj2_found).to eql(obj2) + expect(obj2_found.name).to eql('two') + end + end + + context 'composite key' do + before do + klass_with_composite_key.create_table + end + + it 'with an update' do + obj1 = klass_with_composite_key.create!(name: 'one', age: 1) + described_class.execute do |txn| + obj1.name = 'oneone' + txn.save! obj1 + end + obj1_found = klass_with_composite_key.find(obj1.id, range_key: 1) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to eql('oneone') + end + + it 'with a create' do + obj2 = klass_with_composite_key.new(name: 'two', age: 2) + described_class.execute do |txn| + txn.save! obj2 + end + obj2_found = klass_with_composite_key.find(obj2.id, range_key: 2) + expect(obj2_found).to eql(obj2) + expect(obj2_found.name).to eql('two') + end + end + + context 'validates' do + before do + klass_with_validation.create_table + end + + it 'does not save when invalid' do + obj1 = klass_with_validation.new(name: 'one') + described_class.execute do |txn| + expect(txn.save(obj1)).to eql(false) + end + expect(obj1.id).to be_nil + end + + it 'allows partial save when a record in the transaction is invalid' do + obj1 = klass_with_validation.new(name: 'one') + obj2 = klass_with_validation.create!(name: 'twolong') + obj2.name = 'twotwo' + described_class.execute do |txn| + expect(txn.save(obj2)).to be_truthy + expect(txn.save(obj1)).to eql(false) + end + expect(obj1.id).to be_nil + obj2_found = klass_with_validation.find(obj2.id) + expect(obj2_found.name).to eql('twotwo') + end + + it 'saves when valid' do + obj1 = klass_with_validation.new(name: 'oneone') + described_class.execute do |txn| + expect(txn.save(obj1)).to be_present + end + + obj1_found = klass_with_validation.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to eql('oneone') + end + + it 'raises DocumentNotValid when not valid' do + obj1 = klass_with_validation.new(name: 'one') + expect { + described_class.execute do |txn| + txn.save! obj1 + end + }.to raise_error(Dynamoid::Errors::DocumentNotValid) + expect(obj1.id).to be_nil + end + + it 'rolls back and raises DocumentNotValid when not valid' do + obj1 = klass_with_validation.new(name: 'one') + obj2 = klass_with_validation.create!(name: 'twolong') + obj2.name = 'twotwo' + expect { + described_class.execute do |txn| + txn.save! obj2 + txn.save! obj1 + end + }.to raise_error(Dynamoid::Errors::DocumentNotValid) + expect(obj1.id).to be_nil + obj2_found = klass_with_validation.find(obj2.id) + expect(obj2_found.name).to eql('twolong') + end + + it 'does not raise exception when valid' do + obj1 = klass_with_validation.new(name: 'oneone') + described_class.execute do |txn| + txn.save!(obj1) + end + + obj1_found = klass_with_validation.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to eql('oneone') + end + + it 'does not raise exception when skipping validation' do + obj1 = klass_with_validation.new(name: 'one') + described_class.execute do |txn| + txn.save!(obj1, skip_validation: true) + end + + obj1_found = klass_with_validation.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to eql('one') + end + end + + context 'callabacks' do + it 'uses callbacks' do + klass_with_callbacks.create_table + expect { + described_class.execute do |txn| + txn.save! klass_with_callbacks.new(name: 'two') + end + }.to output('validating validated saving creating created saved ').to_stdout + end + + it 'uses around callbacks' do + klass_with_around_callbacks.create_table + expect { + described_class.execute do |txn| + txn.save! klass_with_around_callbacks.new(name: 'two') + end + }.to output('saving creating created saved ').to_stdout + end + + it 'cak skip callbacks' do + klass_with_callbacks.create_table + expect { + described_class.execute do |txn| + txn.save! klass_with_callbacks.new(name: 'two'), skip_callbacks: true + end + }.to output('validating validated ').to_stdout # ActiveModel runs validation callbacks when we validate + end + + it 'cak skip callbacks and validation' do + klass_with_callbacks.create_table + expect { + described_class.execute do |txn| + txn.save! klass_with_callbacks.new(name: 'two'), skip_callbacks: true, skip_validation: true + end + }.to output('').to_stdout + end + end + end +end diff --git a/dynamoid/spec/dynamoid/transaction_write/update_spec.rb b/dynamoid/spec/dynamoid/transaction_write/update_spec.rb new file mode 100644 index 000000000..f6495650e --- /dev/null +++ b/dynamoid/spec/dynamoid/transaction_write/update_spec.rb @@ -0,0 +1,402 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_relative 'context' + +# Dynamoid.config.logger.level = :debug + +describe Dynamoid::TransactionWrite, '.update' do + include_context 'transaction_write' + + context 'updates' do + context 'simple primary key' do + before do + klass.create_table + end + + it 'with attribute outside transaction' do + obj1 = klass.create!(name: 'one') + obj1.name = 'oneone' + described_class.execute do |txn| + txn.update! obj1 + end + obj1_found = klass.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to eql('oneone') + end + + it 'with attribute in transaction' do + obj2 = klass.create!(name: 'two') + described_class.execute do |txn| + txn.update! obj2, { name: 'twotwo' } + end + obj2_found = klass.find(obj2.id) + expect(obj2_found).to eql(obj2) + expect(obj2_found.name).to eql('twotwo') + end + + it 'with class updates in transaction' do + obj3 = klass.create!(name: 'three') + described_class.execute do |txn| + txn.update! klass, { id: obj3.id, name: 'threethree' } + end + obj3_found = klass.find(obj3.id) + expect(obj3_found).to eql(obj3) + expect(obj3_found.name).to eql('threethree') + end + end + + context 'composite key' do + before do + klass_with_composite_key.create_table + end + + it 'with attribute outside transaction' do + obj1 = klass_with_composite_key.create!(name: 'one', age: 1) + obj1.name = 'oneone' + described_class.execute do |txn| + txn.update! obj1 + end + obj1_found = klass_with_composite_key.find(obj1.id, range_key: 1) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to eql('oneone') + end + + it 'with attribute in transaction' do + obj2 = klass_with_composite_key.create!(name: 'two', age: 2) + + described_class.execute do |txn| + txn.update! obj2, { name: 'twotwo' } + end + obj2_found = klass_with_composite_key.find(obj2.id, range_key: 2) + expect(obj2_found).to eql(obj2) + expect(obj2_found.name).to eql('twotwo') + end + + it 'with class updates in transaction' do + obj3 = klass_with_composite_key.create!(name: 'three', age: 3) + described_class.execute do |txn| + txn.update! klass_with_composite_key, { id: obj3.id, age: 3, name: 'threethree' } + end + obj3_found = klass_with_composite_key.find(obj3.id, range_key: 3) + expect(obj3_found).to eql(obj3) + expect(obj3_found.name).to eql('threethree') + end + end + + it 'updates timestamps of instance' do + klass.create_table + obj1 = klass.create!(name: 'one', created_at: Time.now - 48.hours, updated_at: Time.now - 24.hours) + obj1.name = 'oneone' + described_class.execute do |txn| + txn.update! obj1 + end + obj1_found = klass.find(obj1.id) + expect(obj1_found.created_at.to_f).to be < (Time.now - 47.hours).to_f + expect(obj1_found.updated_at.to_f).to be_within(1.seconds).of Time.now.to_f + end + + it 'updates timestamps by class' do + klass.create_table + obj3 = klass.create!(name: 'three', created_at: Time.now - 48.hours, updated_at: Time.now - 24.hours) + described_class.execute do |txn| + txn.update! klass, { id: obj3.id, name: 'threethree' } + end + obj3_found = klass.find(obj3.id) + expect(obj3_found.created_at.to_f).to be < (Time.now - 47.hours).to_f + expect(obj3_found.updated_at.to_f).to be_within(1.seconds).of Time.now.to_f + end + + context 'validates' do + before do + klass_with_validation.create_table + end + + it 'does not update when invalid' do + obj1 = klass_with_validation.create!(name: 'onelong') + described_class.execute do |txn| + obj1.name = 'one' + expect(txn.update(obj1)).to eql(false) + end + obj1_found = klass_with_validation.find(obj1.id) + expect(obj1_found.name).to eql('onelong') + end + + it 'allows partial update when a record in the transaction is invalid' do + obj1 = klass_with_validation.create!(name: 'onelong') + obj2 = klass_with_validation.create!(name: 'twolong') + described_class.execute do |txn| + obj1.name = 'one' + expect(txn.update(obj1)).to eql(false) + obj2.name = 'twotwo' + expect(txn.update(obj2)).to be_truthy + end + obj1_found = klass_with_validation.find(obj1.id) + expect(obj1_found.name).to eql('onelong') + obj2_found = klass_with_validation.find(obj2.id) + expect(obj2_found.name).to eql('twotwo') + end + + it 'succeeds when valid' do + obj1 = klass_with_validation.create!(name: 'onelong') + described_class.execute do |txn| + obj1.name = 'oneone' + expect(txn.update(obj1)).to be_present + end + + obj1_found = klass_with_validation.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to eql('oneone') + end + + it 'raises DocumentNotValid when not valid' do + obj1 = klass_with_validation.create!(name: 'onelong') + expect { + described_class.execute do |txn| + obj1.name = 'one' + txn.update! obj1 + end + }.to raise_error(Dynamoid::Errors::DocumentNotValid) + obj1_found = klass_with_validation.find(obj1.id) + expect(obj1_found.name).to eql('onelong') + end + + it 'rolls back and raises DocumentNotValid when not valid' do + obj1 = klass_with_validation.create!(name: 'onelong') + obj2 = klass_with_validation.create!(name: 'twolong') + expect { + described_class.execute do |txn| + obj2.name = 'twotwo' + txn.update! obj2 + obj1.name = 'one' + txn.update! obj1 + end + }.to raise_error(Dynamoid::Errors::DocumentNotValid) + obj1_found = klass_with_validation.find(obj1.id) + expect(obj1_found.name).to eql('onelong') + obj2_found = klass_with_validation.find(obj2.id) + expect(obj2_found.name).to eql('twolong') + end + + it 'does not raise exception when valid' do + obj1 = klass_with_validation.create!(name: 'onelong') + described_class.execute do |txn| + obj1.name = 'oneone' + txn.update! obj1 + end + + obj1_found = klass_with_validation.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to eql('oneone') + end + + it 'does not raise exception when skipping validation' do + obj1 = klass_with_validation.create!(name: 'onelong') + described_class.execute do |txn| + obj1.name = 'one' + # this use is infrequent, normal entry is from save!(obj, options) + txn.update! obj1, {}, { skip_validation: true } + end + + obj1_found = klass_with_validation.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to eql('one') + end + + it 'uses callbacks' do + klass_with_callbacks.create_table + obj1 = klass_with_callbacks.create!(name: 'one') + expect { + described_class.execute do |txn| + obj1.name = 'oneone' + txn.update! obj1 + end + }.to output('validating validated saving updating updated saved ').to_stdout + end + + it 'uses around callbacks' do + klass_with_around_callbacks.create_table + obj1 = klass_with_around_callbacks.create!(name: 'one') + expect { + described_class.execute do |txn| + obj1.name = 'oneone' + txn.update! obj1 + end + }.to output('saving updating updated saved ').to_stdout + end + + context 'sets in a block' do + it 'a string' do + obj1 = klass.create!(name: 'one', record_count: 10) + described_class.execute do |txn| + txn.update! obj1 do |u| + u.set(name: 'oneone') + end + end + obj1_found = klass.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to eql('oneone') + expect(obj1_found.record_count).to eql(10) + end + end + + context 'sets and adds in a block' do + it 'to an existing value' do + obj1 = klass.create!(name: 'one', record_count: 10) + described_class.execute do |txn| + txn.update! obj1 do |u| + u.set(name: 'oneone') + u.add(record_count: 5) + end + end + obj1_found = klass.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to eql('oneone') + expect(obj1_found.record_count).to eql(15) + end + + it 'an array' do + obj1 = klass.create!(name: 'one', favorite_numbers: [1, 2, 3]) + described_class.execute do |txn| + txn.update! obj1 do |u| + u.set(name: 'oneone') + u.add(favorite_numbers: [4]) # must be enumerable + end + end + obj1_found = klass.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to eql('oneone') + expect(obj1_found.favorite_numbers).to eql(Set[1, 2, 3, 4]) + end + end + + context 'adds' do + context 'a value' do + it 'to nil which defaults to zero' do + obj1 = klass.create!(name: 'one') + described_class.execute do |txn| + txn.update! obj1 do |u| + u.add(record_count: 5) + end + end + obj1_found = klass.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1_found.record_count).to eql(5) + end + + it 'to an existing value' do + obj1 = klass.create!(name: 'one', record_count: 10) + described_class.execute do |txn| + txn.update! obj1 do |u| + u.add(record_count: 5) + end + end + obj1_found = klass.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to eql('one') + expect(obj1_found.record_count).to eql(15) + end + end + + context 'to a set' do + it 'an array' do + obj1 = klass.create!(name: 'one', favorite_numbers: [1, 2, 3]) + described_class.execute do |txn| + txn.update! obj1 do |u| + u.add(favorite_numbers: [4]) # must be enumerable + end + end + obj1_found = klass.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to eql('one') + expect(obj1_found.favorite_numbers).to eql(Set[1, 2, 3, 4]) + end + + it 'a set of numbers' do + obj1 = klass.create!(name: 'one', favorite_numbers: [1, 2, 3]) + described_class.execute do |txn| + txn.update! obj1 do |u| + u.add(favorite_numbers: Set[3, 4]) # must be enumerable + end + end + obj1_found = klass.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to eql('one') + expect(obj1_found.favorite_numbers).to eql(Set[1, 2, 3, 4]) + end + + it 'a set of strings' do + obj1 = klass.create!(name: 'one', favorite_names: %w[adam ben charlie]) + described_class.execute do |txn| + txn.update! obj1 do |u| + u.add(favorite_names: Set['charlie', 'dan']) # must be enumerable + end + end + obj1_found = klass.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to eql('one') + expect(obj1_found.favorite_names).to eql(Set.new(%w[adam ben charlie dan])) + end + end + end + end + + context 'deletes' do + it 'a scalar' do + obj1 = klass.create!(name: 'one', favorite_numbers: [1, 2, 3]) + described_class.execute do |txn| + txn.update! obj1 do |u| + u.delete(:name) + u.delete(favorite_numbers: 2) + end + end + obj1_found = klass.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to be_nil + expect(obj1_found.favorite_numbers).to eql(Set[1, 3]) + end + + it 'an array' do + obj1 = klass.create!(name: 'one', favorite_numbers: [1, 2, 3]) + described_class.execute do |txn| + txn.update! obj1 do |u| + u.delete(:name) + u.delete(favorite_numbers: [2, 3]) + end + end + obj1_found = klass.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to be_nil + expect(obj1_found.favorite_numbers).to eql(Set[1]) + end + + it 'a set' do + obj1 = klass.create!(name: 'one', favorite_numbers: [1, 2, 3]) + described_class.execute do |txn| + txn.update! obj1 do |u| + u.delete(:name) + u.delete(favorite_numbers: Set[2, 3, 4]) + end + end + obj1_found = klass.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to be_nil + expect(obj1_found.favorite_numbers).to eql(Set[1]) + end + + it 'a set of strings' do + obj1 = klass.create!(name: 'one', favorite_names: %w[adam ben charlie]) + described_class.execute do |txn| + txn.update! obj1 do |u| + u.delete(:name) + u.delete(favorite_names: Set['ben', 'charlie', 'dan']) + end + end + obj1_found = klass.find(obj1.id) + expect(obj1_found).to eql(obj1) + expect(obj1_found.name).to be_nil + expect(obj1_found.favorite_names).to eql(Set['adam']) + end + end + end +end diff --git a/dynamoid/spec/dynamoid/transaction_write/upsert_spec.rb b/dynamoid/spec/dynamoid/transaction_write/upsert_spec.rb new file mode 100644 index 000000000..0c04014a2 --- /dev/null +++ b/dynamoid/spec/dynamoid/transaction_write/upsert_spec.rb @@ -0,0 +1,98 @@ +# frozen_string_literal: true + +require 'spec_helper' +require_relative 'context' + +# Dynamoid.config.logger.level = :debug + +describe Dynamoid::TransactionWrite, '.upsert' do + include_context 'transaction_write' + + context 'upserts' do + context 'simple primary key' do + before do + klass.create_table + end + + it 'with class constructed in transaction' do + obj3_id = SecureRandom.uuid + described_class.execute do |txn| + txn.upsert klass, { id: obj3_id, name: 'threethree' } + end + obj3_found = klass.find(obj3_id) + expect(obj3_found.id).to eql(obj3_id) + expect(obj3_found.name).to eql('threethree') + end + + it 'requires hash key in class' do + expect { + described_class.execute do |txn| + txn.upsert klass, { name: 'threethree' } + end + }.to raise_exception(Dynamoid::Errors::MissingHashKey) + end + end + + context 'composite key' do + before do + klass_with_composite_key.create_table + end + + it 'with class constructed in transaction' do + obj3_id = SecureRandom.uuid + described_class.execute do |txn| + txn.upsert klass_with_composite_key, { id: obj3_id, age: 3, name: 'threethree' } + end + obj3_found = klass_with_composite_key.find(obj3_id, range_key: 3) + expect(obj3_found.id).to eql(obj3_id) + expect(obj3_found.name).to eql('threethree') + end + + it 'with class constructed in transaction with return value' do + obj4_written = false + described_class.execute do |txn| + obj4_written = txn.upsert klass_with_composite_key, { id: SecureRandom.uuid, age: 4, name: 'fourfour' } + end + expect(obj4_written).to be true + end + + it 'requires hash key in class' do + expect { + described_class.execute do |txn| + txn.upsert klass_with_composite_key, { name: 'threethree' } + end + }.to raise_exception(Dynamoid::Errors::MissingHashKey) + end + + it 'requires range key in class' do + expect { + described_class.execute do |txn| + txn.upsert klass_with_composite_key, { id: 'bananas', name: 'threethree' } + end + }.to raise_exception(Dynamoid::Errors::MissingRangeKey) + end + end + + it 'updates timestamps by class when existing' do + klass.create_table + obj3 = klass.create!(name: 'three', created_at: Time.now - 48.hours, updated_at: Time.now - 24.hours) + described_class.execute do |txn| + txn.upsert klass, { id: obj3.id, name: 'threethree' } + end + obj3_found = klass.find(obj3.id) + expect(obj3_found.created_at.to_f).to be < (Time.now - 47.hours).to_f + expect(obj3_found.updated_at.to_f).to be_within(1.seconds).of Time.now.to_f + end + + it 'updates timestamps by class when not existing' do + klass.create_table + obj3_id = SecureRandom.uuid + described_class.execute do |txn| + txn.upsert klass, { id: obj3_id, name: 'threethree' } + end + obj3_found = klass.find(obj3_id) + expect(obj3_found.created_at).to be_nil + expect(obj3_found.updated_at.to_f).to be_within(1.seconds).of Time.now.to_f + end + end +end diff --git a/dynamoid/spec/dynamoid/type_casting_spec.rb b/dynamoid/spec/dynamoid/type_casting_spec.rb new file mode 100644 index 000000000..38b91a827 --- /dev/null +++ b/dynamoid/spec/dynamoid/type_casting_spec.rb @@ -0,0 +1,715 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe 'Type casting' do + describe 'Boolean field' do + let(:klass) do + new_class do + field :active, :boolean + end + end + + it 'converts nil to nil' do + obj = klass.new(active: nil) + expect(obj.active).to eql(nil) + end + + it 'converts "" to nil' do + obj = klass.new(active: '') + expect(obj.active).to eql(nil) + end + + it 'converts true to true' do + obj = klass.new(active: true) + expect(obj.active).to eql(true) + end + + it 'converts any not empty string to true' do + obj = klass.new(active: 'something') + expect(obj.active).to eql(true) + end + + it 'converts any random object to true' do + obj = klass.new(active: []) + expect(obj.active).to eql(true) + + obj = klass.new(active: {}) + expect(obj.active).to eql(true) + + obj = klass.new(active: :something) + expect(obj.active).to eql(true) + + obj = klass.new(active: Object.new) + expect(obj.active).to eql(true) + + obj = klass.new(active: 42) + expect(obj.active).to eql(true) + end + + it 'converts false to false' do + obj = klass.new(active: false) + expect(obj.active).to eql(false) + end + + it 'converts 0 to false' do + obj = klass.new(active: 0) + expect(obj.active).to eql(false) + end + + it 'converts "0" to false' do + obj = klass.new(active: '0') + expect(obj.active).to eql(false) + end + + it 'converts "f" to false' do + obj = klass.new(active: 'f') + expect(obj.active).to eql(false) + end + + it 'converts "F" to false' do + obj = klass.new(active: 'F') + expect(obj.active).to eql(false) + end + + it 'converts "false" to false' do + obj = klass.new(active: 'false') + expect(obj.active).to eql(false) + end + + it 'converts "FALSE" to false' do + obj = klass.new(active: 'FALSE') + expect(obj.active).to eql(false) + end + + it 'converts "off" to false' do + obj = klass.new(active: 'off') + expect(obj.active).to eql(false) + end + + it 'converts "OFF" to false' do + obj = klass.new(active: 'OFF') + expect(obj.active).to eql(false) + end + end + + describe 'DateTime field' do + let(:klass) do + new_class do + field :created_at, :datetime + end + end + + it 'converts Date, DateTime and Time to DateTime' do + obj = klass.new(created_at: Date.new(2018, 7, 21)) + expect(obj.created_at).to eql(DateTime.new(2018, 7, 21, 0, 0, 0, '+0')) + + datetime = DateTime.new(2018, 7, 21, 8, 40, 15, '+7') + obj = klass.new(created_at: datetime) + expect(obj.created_at).to eql(datetime) + + obj = klass.new(created_at: Time.new(2007, 11, 1, 15, 25, 0, '+09:00')) + expect(obj.created_at).to eql(DateTime.new(2007, 11, 1, 15, 25, 0, '+09:00')) + end + + it 'converts string with well formatted date or datetime to DateTime', config: { application_timezone: :utc } do + obj = klass.new(created_at: '2018-08-21') + expect(obj.created_at).to eql(DateTime.new(2018, 8, 21, 0, 0, 0, '+00:00')) + + obj = klass.new(created_at: '2018-08-21T21:55:30+01:00') + expect(obj.created_at).to eql(DateTime.new(2018, 8, 21, 21, 55, 30, '+1')) + end + + it 'preserves time zone specified in a string', config: { application_timezone: 'Hawaii' } do + obj = klass.new(created_at: '2018-08-21T21:55:30+01:00') + expect(obj.created_at).to eql(DateTime.new(2018, 8, 21, 21, 55, 30, '+1')) + end + + it 'uses config.application_timezone if time zone is not specified in a string', config: { application_timezone: 'Hawaii' } do + obj = klass.new(created_at: '2018-08-21T21:55:30') + expect(obj.created_at).to eql(DateTime.new(2018, 8, 21, 21, 55, 30, '-10:00')) + end + + it 'converts string with not well formatted date or datetime to nil' do + obj = klass.new(created_at: '') + expect(obj.created_at).to eql(nil) + + obj = klass.new(created_at: ' ') + expect(obj.created_at).to eql(nil) + + obj = klass.new(created_at: 'abc') + expect(obj.created_at).to eql(nil) + + obj = klass.new(created_at: '2018-08') + expect(obj.created_at).to eql(nil) + end + + it 'converts any random object to nil' do + obj = klass.new(created_at: nil) + expect(obj.created_at).to eql(nil) + + obj = klass.new(created_at: :abc) + expect(obj.created_at).to eql(nil) + + obj = klass.new(created_at: []) + expect(obj.created_at).to eql(nil) + + obj = klass.new(created_at: {}) + expect(obj.created_at).to eql(nil) + + obj = klass.new(created_at: true) + expect(obj.created_at).to eql(nil) + end + end + + describe 'Date field' do + let(:klass) do + new_class do + field :published_on, :date + end + end + + it 'converts Date, DateTime and Time to Date' do + date = Date.new(2018, 7, 21) + obj = klass.new(published_on: date) + expect(obj.published_on).to eql(date) + + obj = klass.new(published_on: DateTime.new(2018, 7, 21, 8, 40, 15, '+7')) + expect(obj.published_on).to eql(DateTime.new(2018, 7, 21)) + + obj = klass.new(published_on: Time.new(2007, 11, 1, 15, 25, 0, '+09:00')) + expect(obj.published_on).to eql(Date.new(2007, 11, 1)) + end + + it 'converts string with well formatted date or datetime to Date' do + obj = klass.new(published_on: '2018-08-21') + expect(obj.published_on).to eql(Date.new(2018, 8, 21)) + + obj = klass.new(published_on: '2018-08-21T21:55:30+01:00') + expect(obj.published_on).to eql(Date.new(2018, 8, 21)) + end + + it 'converts string with not well formatted date or datetime to nil' do + obj = klass.new(published_on: '') + expect(obj.published_on).to eql(nil) + + obj = klass.new(published_on: ' ') + expect(obj.published_on).to eql(nil) + + obj = klass.new(published_on: 'abc') + expect(obj.published_on).to eql(nil) + + obj = klass.new(published_on: '2018-08') + expect(obj.published_on).to eql(nil) + end + + it 'converts any random object to nil' do + obj = klass.new(published_on: nil) + expect(obj.published_on).to eql(nil) + + obj = klass.new(published_on: :abc) + expect(obj.published_on).to eql(nil) + + obj = klass.new(published_on: []) + expect(obj.published_on).to eql(nil) + + obj = klass.new(published_on: {}) + expect(obj.published_on).to eql(nil) + + obj = klass.new(published_on: true) + expect(obj.published_on).to eql(nil) + end + end + + describe 'Set field' do + let(:klass) do + new_class do + field :items, :set + end + end + + it 'converts to Set with #to_set method' do + obj = klass.new(items: ['milk']) + expect(obj.items).to eql(Set.new(['milk'])) + + struct = Struct.new(:name, :address, :postal_code) + obj = klass.new(items: struct.new('Joe Smith', '123 Maple, Anytown NC', 12_345)) + expect(obj.items).to eql(Set.new(['Joe Smith', '123 Maple, Anytown NC', 12_345])) + end + + it 'converts any random object to nil' do + obj = klass.new(items: 'a') + expect(obj.items).to eql(nil) + + obj = klass.new(items: 13) + expect(obj.items).to eql(nil) + + obj = klass.new(items: Time.now) + expect(obj.items).to eql(nil) + end + + it 'dups Set' do + set = Set.new(['milk']) + obj = klass.new(items: set) + + expect(obj.items).to eql(set) + expect(obj.items).not_to equal(set) + end + + describe 'typed set' do + it 'type casts strings' do + klass = new_class do + field :values, :set, of: :string + end + + obj = klass.new(values: Set.new([{ name: 'John' }])) + + expect(obj.values).to eql(Set.new(['{:name=>"John"}'])) + end + + it 'type casts integers' do + klass = new_class do + field :values, :set, of: :integer + end + + obj = klass.new(values: Set.new([1, 1.5, '2'.to_d])) + + expect(obj.values).to eql(Set.new([1, 1, 2])) + end + + it 'type casts numbers' do + klass = new_class do + field :values, :set, of: :number + end + + obj = klass.new(values: Set.new([1, 1.5, '2'.to_d])) + + expect(obj.values).to eql(Set.new(['1'.to_d, '1.5'.to_d, '2'.to_d])) + end + + it 'type casts dates' do + klass = new_class do + field :values, :set, of: :date + end + + obj = klass.new(values: Set.new(['2018-08-21'])) + + expect(obj.values).to eql(Set.new(['2018-08-21'.to_date])) + end + + it 'type casts datetimes' do + klass = new_class do + field :values, :set, of: :datetime + end + + obj = klass.new(values: Set.new(['2018-08-21T21:55:30+01:00'])) + + expect(obj.values).to eql(Set.new(['2018-08-21T21:55:30+01:00'.to_datetime])) + end + + it 'does not change serialized' + it 'does not change custom types' + end + end + + describe 'Array field' do + let(:klass) do + new_class do + field :items, :array + end + end + + it 'converts to Array with #to_a method' do + obj = klass.new(items: Set.new(['milk'])) + expect(obj.items).to eql(['milk']) + + obj = klass.new(items: { 'milk' => 13.60 }) + expect(obj.items).to eql([['milk', 13.6]]) + + struct = Struct.new(:name, :address, :postal_code) + obj = klass.new(items: struct.new('Joe Smith', '123 Maple, Anytown NC', 12_345)) + expect(obj.items).to eql(['Joe Smith', '123 Maple, Anytown NC', 12_345]) + end + + it 'converts any random object to nil' do + obj = klass.new(items: 'a') + expect(obj.items).to eql(nil) + + obj = klass.new(items: 13) + expect(obj.items).to eql(nil) + + obj = klass.new(items: Mutex.new) + expect(obj.items).to eql(nil) + end + + it 'dups Array' do + array = ['milk'] + obj = klass.new(items: array) + + expect(obj.items).to eql(array) + expect(obj.items).not_to equal(array) + end + + describe 'typed array' do + it 'type casts strings' do + klass = new_class do + field :values, :array, of: :string + end + + obj = klass.new(values: [{ name: 'John' }]) + + expect(obj.values).to eql(['{:name=>"John"}']) + end + + it 'type casts integers' do + klass = new_class do + field :values, :array, of: :integer + end + + obj = klass.new(values: [1, 1.5, '2'.to_d]) + + expect(obj.values).to eql([1, 1, 2]) + end + + it 'type casts numbers' do + klass = new_class do + field :values, :array, of: :number + end + + obj = klass.new(values: [1, 1.5, '2'.to_d]) + + expect(obj.values).to eql(['1'.to_d, '1.5'.to_d, '2'.to_d]) + end + + it 'type casts dates' do + klass = new_class do + field :values, :array, of: :date + end + + obj = klass.new(values: ['2018-08-21']) + + expect(obj.values).to eql(['2018-08-21'.to_date]) + end + + it 'type casts datetimes' do + klass = new_class do + field :values, :array, of: :datetime + end + + obj = klass.new(values: ['2018-08-21T21:55:30+01:00']) + + expect(obj.values).to eql(['2018-08-21T21:55:30+01:00'.to_datetime]) + end + + it 'does not change serialized' + it 'does not change custom types' + end + end + + describe 'String field' do + let(:klass) do + new_class do + field :name, :string + end + end + + it 'converts to string with #to_s method' do + name = double('object') + allow(name).to receive(:to_s).and_return('string representation') + obj = klass.new(name: name) + expect(obj.name).to eql('string representation') + + obj = klass.new(name: 123) + expect(obj.name).to eql('123') + + obj = klass.new(name: '2018-08-21'.to_date) + expect(obj.name).to eql('2018-08-21') + end + + it 'converts true to "t"' do + obj = klass.new(name: true) + expect(obj.name).to eql('t') + end + + it 'converts false to "f"' do + obj = klass.new(name: false) + expect(obj.name).to eql('f') + end + + it 'dups a string' do + string = 'foo' + obj = klass.new(name: string) + + expect(obj.name).to eql(string) + expect(obj.name).not_to equal(string) + end + end + + describe 'Raw field' do # rubocop:disable Lint/EmptyBlock + end + + describe 'Map field' do + let(:klass) do + new_class do + field :settings, :map + end + end + + it 'accepts Hash object' do + obj = klass.new(settings: { foo: 21 }) + expect(obj.settings).to eq(foo: 21) + end + + it 'tries to convert to Hash with #to_h' do + settings = Object.new + def settings.to_h + { foo: 'bar' } + end + + obj = klass.new(settings: settings) + expect(obj.settings).to eq(foo: 'bar') + + obj = klass.new(settings: [[:foo, 'bar']]) + expect(obj.settings).to eq(foo: 'bar') + end + + it 'tries to convert to Hash with #to_hash' do + settings = Object.new + def settings.to_hash + { foo: 'bar' } + end + + obj = klass.new(settings: settings) + expect(obj.settings).to eq(foo: 'bar') + end + + it 'sets nil if fails to convert to Hash' do + obj = klass.new(settings: Object.new) + expect(obj.settings).to eq(nil) + + obj = klass.new(settings: 'foo') + expect(obj.settings).to eq(nil) + + obj = klass.new(settings: 42) + expect(obj.settings).to eq(nil) + end + end + + describe 'Integer field' do + let(:klass) do + new_class do + field :age, :integer + end + end + + it 'converts to integer with #to_i method' do + obj = klass.new(age: 23) + expect(obj.age).to eql(23) + + obj = klass.new(age: 23.999) + expect(obj.age).to eql(23) + + obj = klass.new(age: '1abc') + expect(obj.age).to eql(1) + + obj = klass.new(age: '0x1a') + expect(obj.age).to eql(0) + + obj = klass.new(age: Time.at(204_973_019)) + expect(obj.age).to eql(204_973_019) + end + + it 'converts true to 1' do + obj = klass.new(age: true) + expect(obj.age).to eql(1) + end + + it 'converts false to 0' do + obj = klass.new(age: false) + expect(obj.age).to eql(0) + end + + it 'converts nil to nil' do + obj = klass.new(age: nil) + expect(obj.age).to eql(nil) + end + + it 'converts "" to nil' do + obj = klass.new(age: '') + expect(obj.age).to eql(nil) + end + + it 'converts string with whytespaces to nil' do + obj = klass.new(age: ' ') + expect(obj.age).to eql(nil) + end + + it 'converts random object to nil' do + obj = klass.new(age: {}) + expect(obj.age).to eql(nil) + + obj = klass.new(age: []) + expect(obj.age).to eql(nil) + + obj = klass.new(age: Date.today) + expect(obj.age).to eql(nil) + + obj = klass.new(age: :'26') + expect(obj.age).to eql(nil) + end + + it 'converts NaN and INFINITY to nil' do + obj = klass.new(age: Float::NAN) + expect(obj.age).to eql(nil) + + obj = klass.new(age: Float::INFINITY) + expect(obj.age).to eql(nil) + end + end + + describe 'Number field' do + let(:klass) do + new_class do + field :age, :number + end + end + + it 'converts to BigDecimal with #to_d method' do + obj = klass.new(age: 23) + expect(obj.age).to eql(BigDecimal('23')) + + # NOTE: 23.9 as a float becomes in JRuby 9.4.0.0: + # 0.2389999999999999857891452847979962825775146484375e2 + # So we use a string here. + obj = klass.new(age: '23.9') + expect(obj.age).to eql(BigDecimal('23.9')) + + obj = klass.new(age: '23') + expect(obj.age).to eql(BigDecimal('23')) + + obj = klass.new(age: '1abc') + expect(obj.age).to eql(BigDecimal('1')) + + obj = klass.new(age: '0x1a') + expect(obj.age).to eql(BigDecimal('0')) + + obj = klass.new(age: '23abc') + expect(obj.age).to eql(BigDecimal('23')) + end + + it 'converts symbols' do + obj = klass.new(age: :'23') + expect(obj.age).to eql(BigDecimal('23')) + + obj = klass.new(age: :'23abc') + expect(obj.age).to eql(BigDecimal('23')) + + obj = klass.new(age: :abc) + expect(obj.age).to eql(BigDecimal('0.0')) + + obj = klass.new(age: :'') + expect(obj.age).to eql(BigDecimal('0.0')) + end + + it 'converts true to 1' do + obj = klass.new(age: true) + expect(obj.age).to eql(1) + end + + it 'converts false to 0' do + obj = klass.new(age: false) + expect(obj.age).to eql(0) + end + + it 'converts nil to nil' do + obj = klass.new(age: nil) + expect(obj.age).to eql(nil) + end + + it 'converts "" to nil' do + obj = klass.new(age: '') + expect(obj.age).to eql(nil) + end + + it 'converts string with whytespaces to nil' do + obj = klass.new(age: ' ') + expect(obj.age).to eql(nil) + end + + it 'converts random object to nil' do + obj = klass.new(age: {}) + expect(obj.age).to eql(nil) + + obj = klass.new(age: []) + expect(obj.age).to eql(nil) + + obj = klass.new(age: Date.today) + expect(obj.age).to eql(nil) + end + + it 'converts NaN and INFINITY to nil' do + obj = klass.new(age: Float::NAN) + expect(obj.age).to eql(nil) + + obj = klass.new(age: Float::INFINITY) + expect(obj.age).to eql(nil) + end + end + + describe 'Binary field' do + let(:klass) do + new_class do + field :image, :binary + end + end + + it 'converts to string with #to_s method' do + value = double('object') + allow(value).to receive(:to_s).and_return('string representation') + + obj = klass.new(image: value) + expect(obj.image).to eql('string representation') + end + + it 'dups a string' do + value = 'foo' + obj = klass.new(image: value) + + expect(obj.image).to eql(value) + expect(obj.image).not_to equal(value) + end + end + + describe 'Serialized field' do # rubocop:disable Lint/EmptyBlock + end + + describe 'Custom type field' do # rubocop:disable Lint/EmptyBlock + end + + context 'there is no such field' do + let(:klass) do + new_class do + attr_accessor :active + end + end + + it 'does not process it' do + obj = klass.new(active: true) + expect(obj.active).to eql(true) + end + end + + context 'unknown type' do + let(:klass) do + new_class do + field :active, :some_incorrect_type + end + end + + it 'raises an exception' do + expect do + klass.new(active: 'f') + end.to raise_error(ArgumentError, 'Unknown type some_incorrect_type') + end + end +end diff --git a/dynamoid/spec/dynamoid/validations_spec.rb b/dynamoid/spec/dynamoid/validations_spec.rb new file mode 100644 index 000000000..6a1b0c399 --- /dev/null +++ b/dynamoid/spec/dynamoid/validations_spec.rb @@ -0,0 +1,104 @@ +# frozen_string_literal: true + +require 'spec_helper' + +describe Dynamoid::Validations do + let(:doc_class) do + new_class + end + + it 'validates presence of' do + doc_class.field :name + doc_class.validates_presence_of :name + doc = doc_class.new + expect(doc.save).to be_falsey + expect(doc.new_record).to be_truthy + doc.name = 'secret' + expect(doc.save).not_to be_falsey + expect(doc.errors).to be_empty + end + + it 'validates presence of boolean field' do + doc_class.field :flag, :boolean + doc_class.validates_presence_of :flag + doc = doc_class.new + expect(doc.save).to be_falsey + doc.flag = false + expect(doc.save).not_to be_falsey + expect(doc.errors).to be_empty + end + + it 'raises document not found' do + doc_class.field :name + doc_class.validates_presence_of :name + doc = doc_class.new + expect { doc.save! }.to raise_error(Dynamoid::Errors::DocumentNotValid) do |error| + expect(error.document).to eq doc + end + + expect { doc_class.create! }.to raise_error(Dynamoid::Errors::DocumentNotValid) + + doc = doc_class.create!(name: 'test') + expect(doc.errors).to be_empty + end + + it 'does not validate when saves if `validate` option is false' do + klass = new_class do + field :name + validates :name, presence: true + end + + model = klass.new + model.save(validate: false) + expect(model).to be_persisted + end + + it 'returns true if model is valid' do + klass = new_class do + field :name + validates :name, presence: true + end + + expect(klass.new(name: 'some-name').save).to eq(true) + end + + it 'returns false if model is invalid' do + klass = new_class do + field :name + validates :name, presence: true + end + + expect(klass.new(name: nil).save).to eq(false) + end + + describe 'save!' do + it 'returns self' do + klass = new_class + + model = klass.new + expect(model.save!).to eq(model) + end + end + + describe '#valid?' do + describe 'callbacks' do + it 'runs before_validation callback' do + klass_with_callback = new_class do + before_validation { print 'run before_validation' } + end + + obj = klass_with_callback.new + expect { obj.valid? }.to output('run before_validation').to_stdout + end + + it 'runs after_validation callback' do + klass_with_callback = new_class do + after_validation { print 'run after_validation' } + end + + obj = klass_with_callback.new + expect { obj.valid? }.to output('run after_validation').to_stdout + end + end + end +end diff --git a/dynamoid/spec/spec_helper.rb b/dynamoid/spec/spec_helper.rb new file mode 100644 index 000000000..c09293123 --- /dev/null +++ b/dynamoid/spec/spec_helper.rb @@ -0,0 +1,99 @@ +# frozen_string_literal: true + +# Standard Libs +# N/A + +# Third Party Libs +# https://guides.rubyonrails.org/active_support_core_extensions.html#stand-alone-active-support +require 'active_support' +require 'active_support/testing/time_helpers' +require 'rspec' +require 'pry' + +# Debugging +DEBUG = ENV['DEBUG'] == 'true' + +ruby_version = Gem::Version.new(RUBY_VERSION) +minimum_version = ->(version, engine = 'ruby') { ruby_version >= Gem::Version.new(version) && engine == RUBY_ENGINE } +actual_version = lambda do |major, minor| + actual = Gem::Version.new(ruby_version) + major == actual.segments[0] && minor == actual.segments[1] && RUBY_ENGINE == 'ruby' +end +debugging = minimum_version.call('2.7') && DEBUG +RUN_COVERAGE = minimum_version.call('2.6') && (ENV['COVER_ALL'] || ENV['CI_CODECOV'] || ENV['CI'].nil?) +ALL_FORMATTERS = actual_version.call(2, 7) && (ENV['COVER_ALL'] || ENV['CI_CODECOV'] || ENV['CI']) # rubocop:disable Style/FetchEnvVar + +if DEBUG + if debugging + require 'byebug' + elsif minimum_version.call('2.7', 'jruby') + require 'pry-debugger-jruby' + end +end + +# Load Code Coverage as the last thing before this gem +if RUN_COVERAGE + require 'simplecov' # Config file `.simplecov` is run immediately when simplecov loads + require 'codecov' + require 'simplecov-json' + require 'simplecov-lcov' + require 'simplecov-cobertura' + if ALL_FORMATTERS + # This would override the formatter set in .simplecov, if set + SimpleCov::Formatter::LcovFormatter.config do |c| + c.report_with_single_file = true + c.single_report_path = 'coverage/lcov.info' + end + + SimpleCov.formatters = [ + SimpleCov::Formatter::HTMLFormatter, + SimpleCov::Formatter::CoberturaFormatter, # XML for Jenkins + SimpleCov::Formatter::LcovFormatter, + SimpleCov::Formatter::JSONFormatter, # For CodeClimate + SimpleCov::Formatter::Codecov, # For CodeCov + ] + end +end + +# This Gem +require 'dynamoid' +require 'dynamoid/log/formatter' + +ENV['ACCESS_KEY'] ||= 'abcd' +ENV['SECRET_KEY'] ||= '1234' + +Aws.config.update( + region: 'us-west-2', + credentials: Aws::Credentials.new(ENV.fetch('ACCESS_KEY'), ENV.fetch('SECRET_KEY')) +) + +Dynamoid.configure do |config| + config.endpoint = 'http://localhost:8000' + config.namespace = 'dynamoid_tests' + config.warn_on_scan = false + config.sync_retry_wait_seconds = 0 + config.sync_retry_max_times = 3 + config.log_formatter = Dynamoid::Log::Formatter::Debug.new +end + +Dynamoid.logger.level = Logger::FATAL + +MODELS = File.join(File.dirname(__FILE__), 'app/models') + +# Requires supporting files with custom matchers and macros, etc, +# in ./support/ and its subdirectories. +Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].sort.each { |f| require f } + +Dir[File.join(MODELS, '*.rb')].sort.each { |file| require file } + +RSpec.configure do |config| + config.order = :random + config.raise_errors_for_deprecations! + config.alias_it_should_behave_like_to :configured_with, 'configured with' + + config.include NewClassHelper + config.include DumpingHelper + config.include PersistenceHelper + config.include ChainHelper + config.include ActiveSupport::Testing::TimeHelpers +end diff --git a/dynamoid/spec/support/chain_helper.rb b/dynamoid/spec/support/chain_helper.rb new file mode 100644 index 000000000..985619cf4 --- /dev/null +++ b/dynamoid/spec/support/chain_helper.rb @@ -0,0 +1,7 @@ +# frozen_string_literal: true + +module ChainHelper + def put_attributes(table_name, attributes) + Dynamoid.adapter.put_item(table_name, attributes) + end +end diff --git a/dynamoid/spec/support/clear_adapter_table_cache.rb b/dynamoid/spec/support/clear_adapter_table_cache.rb new file mode 100644 index 000000000..3705b5e90 --- /dev/null +++ b/dynamoid/spec/support/clear_adapter_table_cache.rb @@ -0,0 +1,7 @@ +# frozen_string_literal: true + +RSpec.configure do |config| + config.before do + Dynamoid.adapter.clear_cache! + end +end diff --git a/dynamoid/spec/support/config.rb b/dynamoid/spec/support/config.rb new file mode 100644 index 000000000..0b0d2b927 --- /dev/null +++ b/dynamoid/spec/support/config.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +RSpec.configure do |config| + config.around :each, :config do |example| + config = example.metadata[:config] + config_old = {} + + config.each do |key, value| + config_old[key] = Dynamoid::Config.send(key) + Dynamoid::Config.send(:"#{key}=", value) + end + + example.run + + config.each_key do |key| + Dynamoid::Config.send(:"#{key}=", config_old[key]) + end + end +end diff --git a/dynamoid/spec/support/delete_all_tables_in_namespace.rb b/dynamoid/spec/support/delete_all_tables_in_namespace.rb new file mode 100644 index 000000000..bc24cccae --- /dev/null +++ b/dynamoid/spec/support/delete_all_tables_in_namespace.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +RSpec.configure do |config| + config.before do + unless Dynamoid.adapter.tables.empty? + Dynamoid.adapter.list_tables.each do |table| + Dynamoid.adapter.delete_table(table) if table =~ /^#{Dynamoid::Config.namespace}/ + end + Dynamoid.adapter.tables.clear + end + end +end diff --git a/dynamoid/spec/support/helpers/dumping_helper.rb b/dynamoid/spec/support/helpers/dumping_helper.rb new file mode 100644 index 000000000..f32d292b4 --- /dev/null +++ b/dynamoid/spec/support/helpers/dumping_helper.rb @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +module DumpingHelper + def raw_attributes(document) + Dynamoid.adapter.get_item(document.class.table_name, document.id) + end + + def reload(document) + document.class.find(document.id) + end +end diff --git a/dynamoid/spec/support/helpers/new_class_helper.rb b/dynamoid/spec/support/helpers/new_class_helper.rb new file mode 100644 index 000000000..a099f9b7c --- /dev/null +++ b/dynamoid/spec/support/helpers/new_class_helper.rb @@ -0,0 +1,40 @@ +# frozen_string_literal: true + +# Declaration DSL of partition key and sort key is weird. +# So let's use helpers to simplify class declaration in specs. +module NewClassHelper + def new_class(options = {}, &block) + table_name = options[:table_name] || :"documents_#{Time.now.to_i}_#{rand(1000)}" + class_name = (options[:class_name] || table_name).to_s.classify + partition_key = options[:partition_key] + + klass = Class.new do + include Dynamoid::Document + + if partition_key + if partition_key.is_a? Hash + table name: table_name, key: partition_key[:name] + if partition_key[:options] + field partition_key[:name], partition_key[:type] || :string, partition_key[:options] + else + field partition_key[:name], partition_key[:type] || :string + end + else + table name: table_name, key: partition_key + field partition_key + end + else + table name: table_name + end + + @class_name = class_name + @helper_options = options + + def self.name + @class_name + end + end + klass.class_exec(options, &block) if block + klass + end +end diff --git a/dynamoid/spec/support/helpers/persistence_helper.rb b/dynamoid/spec/support/helpers/persistence_helper.rb new file mode 100644 index 000000000..69b8b69db --- /dev/null +++ b/dynamoid/spec/support/helpers/persistence_helper.rb @@ -0,0 +1,13 @@ +# frozen_string_literal: true + +module PersistenceHelper + def raw_attribute_types(table_name) + Dynamoid.adapter.adapter.send(:describe_table, table_name).schema.attribute_definitions.map do |ad| + [ad.attribute_name, ad.attribute_type] + end.to_h + end + + def tables_created + Dynamoid.adapter.list_tables + end +end diff --git a/dynamoid/spec/support/log_level.rb b/dynamoid/spec/support/log_level.rb new file mode 100644 index 000000000..b8c99fd26 --- /dev/null +++ b/dynamoid/spec/support/log_level.rb @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +RSpec.configure do |config| + config.around :each, :log_level do |example| + level_old = Dynamoid::Config.logger.level + + Dynamoid::Config.logger.level = example.metadata[:log_level] + example.run + Dynamoid::Config.logger.level = level_old + end +end diff --git a/dynamoid/spec/support/unregister_declared_classes.rb b/dynamoid/spec/support/unregister_declared_classes.rb new file mode 100644 index 000000000..d322b6b39 --- /dev/null +++ b/dynamoid/spec/support/unregister_declared_classes.rb @@ -0,0 +1,9 @@ +# frozen_string_literal: true + +RSpec.configure do |config| + config.around do |example| + included_models_before = Dynamoid.included_models.dup + example.run + Dynamoid.included_models.replace(included_models_before) + end +end