diff --git a/.github/workflows/audit.yml b/.github/workflows/audit.yml index fee3184a7889..86213361416d 100644 --- a/.github/workflows/audit.yml +++ b/.github/workflows/audit.yml @@ -10,7 +10,7 @@ jobs: security_audit: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions-rs/audit-check@v1 + - uses: actions/checkout@v4 + - uses: actions-rust-lang/audit@v1 with: token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/benches.yml b/.github/workflows/benches.yml deleted file mode 100644 index 313d17a985c9..000000000000 --- a/.github/workflows/benches.yml +++ /dev/null @@ -1,88 +0,0 @@ -on: - pull_request: - types: - - labeled - -name: Benchmarks - -jobs: - benchmarks: - if: github.event.label.name == 'run-benchmarks' - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - backend: ["postgres", "sqlite", "mysql"] - steps: - - name: Checkout sources - uses: actions/checkout@v3 - - - name: Install postgres (Linux) - if: matrix.backend == 'postgres' - run: | - sudo apt-get update - sudo apt-get install -y libpq-dev postgresql - echo "host all all 127.0.0.1/32 md5" > sudo tee -a /etc/postgresql/10/main/pg_hba.conf - sudo service postgresql restart && sleep 3 - sudo -u postgres psql -c "ALTER USER postgres PASSWORD 'postgres';" - sudo service postgresql restart && sleep 3 - echo 'DATABASE_URL=postgres://postgres:postgres@localhost/' >> $GITHUB_ENV - - - name: Install sqlite (Linux) - if: matrix.backend == 'sqlite' - run: | - sudo apt-get update - sudo apt-get install -y libsqlite3-dev - echo 'DATABASE_URL=/tmp/test.db' >> $GITHUB_ENV - - - name: Install mysql (Linux) - if: matrix.backend == 'mysql' - run: | - sudo systemctl start mysql.service - sudo apt-get update - sudo apt-get -y install libmysqlclient-dev - mysql -e "create database diesel_test; create database diesel_unit_test; grant all on \`diesel_%\`.* to 'root'@'localhost';" -uroot -proot - echo 'DATABASE_URL=mysql://root:root@localhost/diesel_test' >> $GITHUB_ENV - - - name: Install rust toolchain - uses: dtolnay/rust-toolchain@stable - - - name: Install critcmp - run: cargo +stable install critcmp - - - name: Benchmark changes - run: cargo +stable bench --manifest-path diesel_bench/Cargo.toml --no-default-features --features "${{matrix.backend}}" -- --save-baseline changes - - - name: Checkout master - run: | - git fetch origin - git reset --hard origin/master - - - name: Benchmark master - run: cargo +stable bench --manifest-path diesel_bench/Cargo.toml --no-default-features --features "${{matrix.backend}}" -- --save-baseline master - - - name: Critcmp - run: | - cd diesel_bench - critcmp master changes - echo "# ${{matrix.backend}}" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo '```' >> $GITHUB_STEP_SUMMARY - critcmp master changes >> $GITHUB_STEP_SUMMARY - echo '```' >> $GITHUB_STEP_SUMMARY - - # This does not work due to github not allowing to post comments from forked repos - # - name: Post the output as comment - # uses: actions/github-script@v3 - # with: - # github-token: ${{secrets.GITHUB_TOKEN}} - # script: | - # const fs = require('fs'); - # const data = fs.readFileSync('diesel_bench/output.txt', 'utf8'); - - # github.issues.createComment({ - # issue_number: context.issue.number, - # owner: context.repo.owner, - # repo: context.repo.repo, - # body: data - # }) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index eaff2a397dfd..5677f3d12c92 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -21,24 +21,21 @@ concurrency: jobs: check_and_test: name: Check - needs: [sqlite_bundled, rustfmt_and_clippy] + needs: [sqlite_bundled, rustfmt_and_clippy, postgres_bundled] strategy: fail-fast: false matrix: rust: ["stable", "beta", "nightly"] backend: ["postgres", "sqlite", "mysql"] - os: [ubuntu-latest, macos-latest, windows-2019] + os: [ubuntu-latest, macos-latest, macos-14, windows-2019] runs-on: ${{ matrix.os }} steps: - name: Checkout sources - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Cache cargo registry - uses: actions/cache@v3 + uses: Swatinem/rust-cache@v2 with: - path: | - ~/.cargo/registry - ~/.cargo/git key: ${{ runner.os }}-${{ matrix.backend }}-cargo-${{ hashFiles('**/Cargo.toml') }} - name: Set environment variables @@ -121,7 +118,7 @@ jobs: echo "MYSQL_UNIT_TEST_DATABASE_URL=mysql://root:root@localhost/diesel_unit_test" >> $GITHUB_ENV - name: Install postgres (MacOS) - if: runner.os == 'macOS' && matrix.backend == 'postgres' + if: matrix.os == 'macos-latest' && matrix.backend == 'postgres' run: | initdb -D /usr/local/var/postgres pg_ctl -D /usr/local/var/postgres start @@ -129,6 +126,15 @@ jobs: createuser -s postgres echo "PG_DATABASE_URL=postgres://postgres@localhost/" >> $GITHUB_ENV echo "PG_EXAMPLE_DATABASE_URL=postgres://postgres@localhost/diesel_example" >> $GITHUB_ENV + - name: Install postgres (MacOS M1) + if: matrix.os == 'macos-14' && matrix.backend == 'postgres' + run: | + brew install postgresql + brew services start postgresql@14 + sleep 3 + createuser -s postgres + echo "PG_DATABASE_URL=postgres://postgres@localhost/" >> $GITHUB_ENV + echo "PG_EXAMPLE_DATABASE_URL=postgres://postgres@localhost/diesel_example" >> $GITHUB_ENV - name: Install sqlite (MacOS) if: runner.os == 'macOS' && matrix.backend == 'sqlite' @@ -137,7 +143,7 @@ jobs: echo "SQLITE_DATABASE_URL=/tmp/test.db" >> $GITHUB_ENV - name: Install mysql (MacOS) - if: runner.os == 'macOS' && matrix.backend == 'mysql' + if: matrix.os == 'macos-latest' && matrix.backend == 'mysql' run: | brew install mariadb@10.5 /usr/local/opt/mariadb@10.5/bin/mysql_install_db @@ -149,6 +155,20 @@ jobs: echo "MYSQL_UNIT_TEST_DATABASE_URL=mysql://runner@localhost/diesel_unit_test" >> $GITHUB_ENV echo "MYSQLCLIENT_LIB_DIR=/usr/local/opt/mariadb@10.5/lib" >> $GITHUB_ENV + - name: Install mysql (MacOS M1) + if: matrix.os == 'macos-14' && matrix.backend == 'mysql' + run: | + brew install mariadb@10.5 + ls /opt/homebrew/opt/mariadb@10.5 + /opt/homebrew/opt/mariadb@10.5/bin/mysql_install_db + /opt/homebrew/opt/mariadb@10.5/bin/mysql.server start + sleep 3 + /opt/homebrew/opt/mariadb@10.5/bin/mysql -e "create database diesel_test; create database diesel_unit_test; grant all on \`diesel_%\`.* to 'runner'@'localhost';" -urunner + echo "MYSQL_DATABASE_URL=mysql://runner@localhost/diesel_test" >> $GITHUB_ENV + echo "MYSQL_EXAMPLE_DATABASE_URL=mysql://runner@localhost/diesel_example" >> $GITHUB_ENV + echo "MYSQL_UNIT_TEST_DATABASE_URL=mysql://runner@localhost/diesel_unit_test" >> $GITHUB_ENV + echo "MYSQLCLIENT_LIB_DIR=/opt/homebrew/opt/mariadb@10.5/lib" >> $GITHUB_ENV + - name: Install sqlite (Windows) if: runner.os == 'Windows' && matrix.backend == 'sqlite' shell: cmd @@ -240,7 +260,7 @@ jobs: env: BACKEND: ${{ matrix.backend }} run: | - (cd examples/${{ matrix.backend }} && rustup run ${{ matrix.rust }} bash test_all) + (cd examples/${{ matrix.backend }} && rustup run ${{ matrix.rust }} bash test_all) - name: Test migrations-internals shell: bash @@ -300,16 +320,13 @@ jobs: name: Compiletests runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@master with: toolchain: nightly-2023-09-21 - name: Cache cargo registry - uses: actions/cache@v3 + uses: Swatinem/rust-cache@v2 with: - path: | - ~/.cargo/registry - ~/.cargo/git key: compile_test-cargo-${{ hashFiles('**/Cargo.toml') }} - name: Install dependencies @@ -325,16 +342,13 @@ jobs: name: Check rustfmt style && run clippy runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@stable with: components: clippy, rustfmt - name: Cache cargo registry - uses: actions/cache@v3 + uses: Swatinem/rust-cache@v2 with: - path: | - ~/.cargo/registry - ~/.cargo/git key: clippy-cargo-${{ hashFiles('**/Cargo.toml') }} - name: Install dependencies @@ -405,16 +419,14 @@ jobs: name: Check sqlite bundled + Sqlite with asan runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: dtolnay/rust-toolchain@nightly + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@master with: + toolchain: nightly components: "rust-src" - name: Cache cargo registry - uses: actions/cache@v3 + uses: Swatinem/rust-cache@v2 with: - path: | - ~/.cargo/registry - ~/.cargo/git key: sqlite_bundled-cargo-${{ hashFiles('**/Cargo.toml') }} - name: Test diesel-cli @@ -432,22 +444,57 @@ jobs: RUSTFLAGS: -Zsanitizer=address ASAN_OPTIONS: detect_stack_use_after_return=1 run: cargo +nightly -Z build-std test --manifest-path diesel/Cargo.toml --no-default-features --features "sqlite extras libsqlite3-sys libsqlite3-sys/bundled libsqlite3-sys/with-asan" --target x86_64-unknown-linux-gnu + postgres_bundled: + name: Check postgres bundled + Postgres with asan + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: dtolnay/rust-toolchain@nightly + with: + components: "rust-src" + - name: Cache cargo registry + uses: Swatinem/rust-cache@v2 + with: + key: postgres_bundled-cargo-${{ hashFiles('**/Cargo.toml') }} + - name: Install postgres (Linux) + run: | + sudo apt-get update + sudo apt-get install -y libpq-dev postgresql valgrind + echo "host all all 127.0.0.1/32 md5" > sudo tee -a /etc/postgresql/10/main/pg_hba.conf + sudo service postgresql restart && sleep 3 + sudo -u postgres psql -c "ALTER USER postgres PASSWORD 'postgres';" + sudo service postgresql restart && sleep 3 + echo "PG_DATABASE_URL=postgres://postgres:postgres@localhost/" >> $GITHUB_ENV + echo $PG_DATABASE_URL + + - name: Test diesel-cli + run: cargo +nightly test --manifest-path diesel_cli/Cargo.toml --no-default-features --features "postgres-bundled" + + - name: Run diesel_tests with ASAN enabled + env: + RUSTFLAGS: -Zsanitizer=address + ASAN_OPTIONS: detect_stack_use_after_return=1 + run: cargo +nightly -Z build-std test --manifest-path diesel_tests/Cargo.toml --no-default-features --features "postgres pq-sys pq-sys/bundled pq-src/with-asan" --target x86_64-unknown-linux-gnu + + - name: Run diesel tests with ASAN enabled + env: + RUSTDOCFLAGS: -Zsanitizer=address + RUSTFLAGS: -Zsanitizer=address + ASAN_OPTIONS: detect_stack_use_after_return=1 + run: cargo +nightly -Z build-std test --manifest-path diesel/Cargo.toml --no-default-features --features "postgres pq-sys pq-sys/bundled pq-src/with-asan" --target x86_64-unknown-linux-gnu minimal_rust_version: name: Check Minimal supported rust version (1.70.0) runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@1.70.0 - uses: dtolnay/rust-toolchain@nightly - uses: taiki-e/install-action@cargo-hack - uses: taiki-e/install-action@cargo-minimal-versions - name: Cache cargo registry - uses: actions/cache@v3 + uses: Swatinem/rust-cache@v2 with: - path: | - ~/.cargo/registry - ~/.cargo/git key: minimal_rust_version-cargo-${{ hashFiles('**/Cargo.toml') }} - name: Install dependencies run: | @@ -462,7 +509,7 @@ jobs: - name: Check diesel_migrations run: cargo +1.70.0 minimal-versions check -p diesel_migrations --all-features - name: Check diesel_cli - run: cargo +1.70.0 minimal-versions check -p diesel_cli --all-features + run: cargo +1.70.0 minimal-versions check -p diesel_cli --features "default sqlite-bundled" typos: name: Spell Check with Typos @@ -471,7 +518,7 @@ jobs: steps: - name: Checkout Actions Repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Check the spelling of the files in our repo uses: crate-ci/typos@master diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml index e54b45f7967e..1a939352290d 100644 --- a/.github/workflows/doc.yml +++ b/.github/workflows/doc.yml @@ -14,13 +14,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout sources - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Cache cargo registry - uses: actions/cache@v3 + uses: Swatinem/rust-cache@v2 with: - path: | - ~/.cargo/registry - ~/.cargo/git key: cargo-doc-cargo-${{ hashFiles('**/Cargo.toml') }} - name: Get the branch name id: current_branch @@ -41,7 +38,7 @@ jobs: - name: Publish documentation if: success() - uses: JamesIves/github-pages-deploy-action@v4.2.5 + uses: JamesIves/github-pages-deploy-action@v4 with: token: ${{ secrets.GITHUB_TOKEN }} branch: gh-pages # The branch the action should deploy to. diff --git a/.github/workflows/metrics.yml b/.github/workflows/metrics.yml index a810a5e647bb..dc61e63eb680 100644 --- a/.github/workflows/metrics.yml +++ b/.github/workflows/metrics.yml @@ -11,6 +11,7 @@ env: CARGO_INCREMENTAL: 0 CARGO_NET_RETRY: 10 RUSTUP_MAX_RETRIES: 10 + RUSTFLAGS: "-C target-cpu=native" jobs: metrics: @@ -21,27 +22,35 @@ jobs: backend: ["postgres", "sqlite", "mysql"] steps: - name: Checkout sources - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: cache - uses: actions/cache@v3 + uses: Swatinem/rust-cache@v2 with: - path: | - ~/.cargo/registry - ~/.cargo/git - diesel_bench/target key: metrics-${{matrix.backend}}-cargo-${{ hashFiles('diesel_bench/Cargo.toml')}} - name: Install postgres (Linux) if: matrix.backend == 'postgres' + env: + PG_VERSION: 16 run: | sudo apt-get update - sudo apt-get install -y libpq-dev postgresql - echo "host all all 127.0.0.1/32 md5" > sudo tee -a /etc/postgresql/10/main/pg_hba.conf - sudo service postgresql restart && sleep 3 + sudo DEBIAN_FRONTEND=noninteractive apt-get --purge remove postgresql\* -y + sudo apt-get install gnupg2 -y + curl -fsSL https://www.postgresql.org/media/keys/ACCC4CF8.asc|sudo gpg --dearmor -o /etc/apt/trusted.gpg.d/postgresql.gpg + sudo sh -c 'echo "deb https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list' + sudo apt-get update + sudo apt-get install -y libpq-dev postgresql-$PG_VERSION + sudo tee /etc/postgresql/$PG_VERSION/main/pg_hba.conf <<'EOF' + local all postgres peer + local all all peer + host all all 127.0.0.1/32 trust + host all all ::1/128 trust + EOF + sudo service postgresql start $PG_VERSION && sleep 3 sudo -u postgres psql -c "ALTER USER postgres PASSWORD 'postgres';" - sudo service postgresql restart && sleep 3 - echo 'DATABASE_URL=postgres://postgres:postgres@localhost/' >> $GITHUB_ENV + sudo service postgresql restart $PG_VERSION && sleep 3 + echo 'DATABASE_URL=postgres://postgres:postgres@localhost:5432/' >> $GITHUB_ENV - name: Install sqlite (Linux) if: matrix.backend == 'sqlite' @@ -65,15 +74,15 @@ jobs: - name: Run Benchmarks (Postgres) if: matrix.backend == 'postgres' - run: cargo +stable bench --manifest-path diesel_bench/Cargo.toml --no-default-features --features "postgres sqlx-bench sqlx/postgres rust_postgres futures sea-orm sea-orm/sqlx-postgres criterion/async_tokio quaint quaint/postgresql quaint/serde-support serde diesel-async diesel-async/postgres wtx" + run: cargo +stable bench --no-fail-fast --manifest-path diesel_bench/Cargo.toml --no-default-features --features "postgres sqlx-bench sqlx/postgres rust_postgres tokio_postgres futures futures-util sea-orm sea-orm/sqlx-postgres criterion/async_tokio serde diesel-async diesel-async/postgres wtx" - name: Run Benchmarks (Sqlite) if: matrix.backend == 'sqlite' - run: cargo +stable bench --manifest-path diesel_bench/Cargo.toml --no-default-features --features "sqlite sqlx-bench sqlx/sqlite tokio rusqlite futures sea-orm sea-orm/sqlx-sqlite criterion/async_tokio" + run: cargo +stable bench --no-fail-fast --manifest-path diesel_bench/Cargo.toml --no-default-features --features "sqlite sqlx-bench sqlx/sqlite tokio rusqlite futures sea-orm sea-orm/sqlx-sqlite criterion/async_tokio" - name: Run Benchmarks (Mysql) if: matrix.backend == 'mysql' - run: cargo +stable bench --manifest-path diesel_bench/Cargo.toml --no-default-features --features "mysql sqlx-bench sqlx/mysql tokio rustorm rustorm/with-mysql rustorm_dao rust_mysql futures sea-orm sea-orm/sqlx-mysql criterion/async_tokio quaint quaint/mysql quaint/serde-support serde diesel-async diesel-async/mysql" + run: cargo +stable bench --no-fail-fast --manifest-path diesel_bench/Cargo.toml --no-default-features --features "mysql sqlx-bench sqlx/mysql tokio rustorm rustorm/with-mysql rustorm_dao rust_mysql futures sea-orm sea-orm/sqlx-mysql criterion/async_tokio serde diesel-async diesel-async/mysql" - name: Push metrics env: diff --git a/.github/workflows/run_benches.yml b/.github/workflows/run_benches.yml new file mode 100644 index 000000000000..47986f625ce3 --- /dev/null +++ b/.github/workflows/run_benches.yml @@ -0,0 +1,102 @@ +on: + pull_request: + types: [labeled, opened, reopened, synchronize] + +name: Run and Cache Benchmarks + +jobs: + run_benchmarks: + if: github.event_name == 'pull_request' && contains(github.event.pull_request.labels.*.name, 'run-benchmarks') + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + backend: ["postgres", "sqlite", "mysql"] + steps: + - name: Checkout sources + uses: actions/checkout@v4 + + - name: Install postgres (Linux) + if: matrix.backend == 'postgres' + env: + PG_VERSION: 16 + run: | + sudo apt-get update + sudo DEBIAN_FRONTEND=noninteractive apt-get --purge remove postgresql\* -y + sudo apt-get install gnupg2 -y + curl -fsSL https://www.postgresql.org/media/keys/ACCC4CF8.asc|sudo gpg --dearmor -o /etc/apt/trusted.gpg.d/postgresql.gpg + sudo sh -c 'echo "deb https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list' + sudo apt-get update + sudo apt-get install -y libpq-dev postgresql-$PG_VERSION + sudo tee /etc/postgresql/$PG_VERSION/main/pg_hba.conf <<'EOF' + local all postgres peer + local all all peer + host all all 127.0.0.1/32 trust + host all all ::1/128 trust + EOF + sudo service postgresql start $PG_VERSION && sleep 3 + sudo -u postgres psql -c "ALTER USER postgres PASSWORD 'postgres';" + sudo service postgresql restart $PG_VERSION && sleep 3 + echo 'DATABASE_URL=postgres://postgres:postgres@localhost:5432/' >> $GITHUB_ENV + + - name: Install sqlite (Linux) + if: matrix.backend == 'sqlite' + run: | + sudo apt-get update + sudo apt-get install -y libsqlite3-dev + echo 'DATABASE_URL=/tmp/test.db' >> $GITHUB_ENV + + - name: Install mysql (Linux) + if: matrix.backend == 'mysql' + run: | + sudo systemctl start mysql.service + sudo apt-get update + sudo apt-get -y install libmysqlclient-dev + mysql -e "create database diesel_test; create database diesel_unit_test; grant all on \`diesel_%\`.* to 'root'@'localhost';" -uroot -proot + echo 'DATABASE_URL=mysql://root:root@localhost/diesel_test' >> $GITHUB_ENV + + - name: Install rust toolchain + uses: dtolnay/rust-toolchain@stable + + - name: Install critcmp + run: cargo +stable install critcmp + + - name: Benchmark PR ${{ matrix.backend }} + run: cargo +stable bench --manifest-path diesel_bench/Cargo.toml --no-default-features --features "${{ matrix.backend }}" -- --save-baseline changes > pr_${{ matrix.backend }}.txt + + - name: Upload PR ${{ matrix.backend }} Benchmark Results + uses: actions/upload-artifact@v4 + with: + name: pr_${{ matrix.backend }}.txt + path: ./pr_${{ matrix.backend }}.txt + + - name: Checkout base branch + uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.base.sha }} + repository: ${{ github.event.pull_request.base.repo.full_name }} + + - name: Benchmark base ${{ matrix.backend }} + run: cargo +stable bench --manifest-path diesel_bench/Cargo.toml --no-default-features --features "${{ matrix.backend }}" -- --save-baseline master > base_${{ matrix.backend }}.txt + + - name: Upload base ${{ matrix.backend }} Benchmark Results + uses: actions/upload-artifact@v4 + with: + name: base_${{ matrix.backend }}.txt + path: ./base_${{ matrix.backend }}.txt + + - name: Upload GitHub Event + uses: actions/upload-artifact@v4 + with: + name: event_${{ matrix.backend }}.json + path: ${{ github.event_path }} + + - name: Critcmp + run: | + cd diesel_bench + critcmp master changes + echo "# ${{matrix.backend}}" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + critcmp master changes >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY diff --git a/.github/workflows/track_benches.yml b/.github/workflows/track_benches.yml new file mode 100644 index 000000000000..c2a343e868d5 --- /dev/null +++ b/.github/workflows/track_benches.yml @@ -0,0 +1,100 @@ +on: + workflow_run: + workflows: [Run and Cache Benchmarks] + types: + - completed + +name: Track Benchmarks + +jobs: + track_benchmarks: + if: github.event.workflow_run.conclusion == 'success' + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + backend: ["postgres", "sqlite", "mysql"] + env: + BENCHER_PROJECT: diesel + BENCHER_ADAPTER: rust_criterion + BENCHER_TESTBED: ubuntu-latest-${{ matrix.backend }} + PR_BENCHMARK_RESULTS: pr_${{ matrix.backend }}.txt + BASE_BENCHMARK_RESULTS: base_${{ matrix.backend }}.txt + GITHUB_EVENT: event_${{ matrix.backend }}.json + # This is the confidence interval for the t-test Threshold + # Adjust this value to lower to make the test more sensitive to changes + # Adjust this value to higher to make the test less sensitive to changes + # https://bencher.dev/docs/explanation/thresholds/#t-test-threshold-upper-boundary + UPPER_BOUNDARY: 0.98 + steps: + - name: Download Benchmark Results + uses: actions/github-script@v6 + with: + script: | + async function downloadArtifact(artifactName) { + let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({ + owner: context.repo.owner, + repo: context.repo.repo, + run_id: context.payload.workflow_run.id, + }); + let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => { + return artifact.name == artifactName + })[0]; + if (!matchArtifact) { + core.setFailed(`Failed to find artifact: ${artifactName}`); + } + let download = await github.rest.actions.downloadArtifact({ + owner: context.repo.owner, + repo: context.repo.repo, + artifact_id: matchArtifact.id, + archive_format: 'zip', + }); + let fs = require('fs'); + fs.writeFileSync(`${process.env.GITHUB_WORKSPACE}/${artifactName}.zip`, Buffer.from(download.data)); + } + await downloadArtifact(process.env.PR_BENCHMARK_RESULTS); + await downloadArtifact(process.env.BASE_BENCHMARK_RESULTS); + await downloadArtifact(process.env.GITHUB_EVENT); + - name: Unzip Benchmark Results + run: | + unzip $PR_BENCHMARK_RESULTS.zip + unzip $BASE_BENCHMARK_RESULTS.zip + unzip $GITHUB_EVENT.zip + - name: Export GitHub Event Data + uses: actions/github-script@v6 + with: + script: | + let fs = require('fs'); + let githubEvent = JSON.parse(fs.readFileSync("event.json", {encoding: 'utf8'})); + console.log(githubEvent); + core.exportVariable("PR_HEAD", `${githubEvent.pull_request.head.ref}-${githubEvent.pull_request.head.sha.slice(0, 8)}`); + core.exportVariable("PR_ID", `${githubEvent.pull_request.head.ref}/${process.env.BENCHER_TESTBED}/${process.env.BENCHER_ADAPTER}`); + core.exportVariable("PR_NUMBER", githubEvent.number); + - uses: bencherdev/bencher@main + - name: Track base Benchmarks + run: | + bencher run \ + --if-branch '${{ env.PR_HEAD }}' \ + --else-branch \ + --token "${{ secrets.BENCHER_API_TOKEN }}" \ + --file "$BASE_BENCHMARK_RESULTS" + - name: Create PR threshold + run: | + bencher threshold create \ + --project "$BENCHER_PROJECT" \ + --branch '${{ env.PR_HEAD }}' \ + --testbed "$BENCHER_TESTBED" \ + --measure latency \ + --test t \ + --upper-boundary ${{ env.UPPER_BOUNDARY }} \ + --token "${{ secrets.BENCHER_API_TOKEN }}" + - name: Track PR Benchmarks + run: | + bencher run \ + --branch '${{ env.PR_HEAD }}' \ + --token "${{ secrets.BENCHER_API_TOKEN }}" \ + --ci-id '${{ env.PR_ID }}' \ + --ci-number '${{ env.PR_NUMBER }}' \ + --github-actions "${{ secrets.GITHUB_TOKEN }}" \ + --err \ + --file "$PR_BENCHMARK_RESULTS" diff --git a/CHANGELOG.md b/CHANGELOG.md index d8ba5dfd53d1..2402a513412e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,9 +3,9 @@ All user visible changes to this project will be documented in this file. This project adheres to [Semantic Versioning](http://semver.org/), as described for Rust libraries in [RFC #1105](https://github.com/rust-lang/rfcs/blob/master/text/1105-api-evolution.md) -For any named minimal supported Rust version we guarantee that it is possible to build Diesel with the -default features enabled using some set of dependencies. Those set of dependencies is not necessarily -an up to date version of the specific dependency. We check this by using the unstable `-Z minimal-version` cargo flag. +For any named minimal supported Rust version we guarantee that it is possible to build Diesel with the +default features enabled using some set of dependencies. Those set of dependencies is not necessarily +an up to date version of the specific dependency. We check this by using the unstable `-Z minimal-version` cargo flag. Increasing the minimal supported Rust version will always be coupled at least with a minor release. ## Unreleased @@ -18,6 +18,9 @@ Increasing the minimal supported Rust version will always be coupled at least wi * Added an optional `#[diesel(skip_insertion)]` field attribute to the `Insertable` derive macro, allowing fields which map to generated columns to be skipped during insertion. * Support for connection instrumentation. This allows to inspect any query run by your application * Logging in diesel-cli +* Support for libsqlite3-sys 0.28 +* Add `sqlite-integer-primary-key-is-bigint` configuration option, usable with SQLite 3.37 or above, allowing to use `BigInt` for `INTEGER PRIMARY KEY` columns in SQLite for tables without the `WITHOUT ROWID` attribute ([SQLite doc](https://www.sqlite.org/lang_createtable.html#rowid)). +* Support for multiple `print_schema` entry in `diesel.toml` (e.g. `[print_schema.user1]`), which allows generating multiple schema.rs files ### Changed @@ -26,7 +29,7 @@ Increasing the minimal supported Rust version will always be coupled at least wi ## [2.1.0] 2023-05-26 -### Changed +### Changed * The minimal officially supported rustc version is now 1.65.0 @@ -48,7 +51,7 @@ Increasing the minimal supported Rust version will always be coupled at least wi ## [2.0.4] 2023-04-18 -## Fixed +## Fixed * Workaround the missing name resolution in rust-analyzer. This should fix type inference for some diesel queries. (It remains broken for queries containing `.filter()`/`.inner_join()`/`.left_join()`. These require fixes in rust-analyzer itself) * Fixed a bug that could lead to inserting null values instead of empty values for custom sqlite types @@ -59,9 +62,9 @@ Increasing the minimal supported Rust version will always be coupled at least wi * Support for `libsqlite3-sys` 0.26 -## [diesel_derives 2.0.2] 2023-03-13 +## [diesel_derives 2.0.2] 2023-03-13 -## Fixed +## Fixed * Fixing the fallout of a breaking change from `quote` by not using their internal API @@ -130,7 +133,7 @@ Increasing the minimal supported Rust version will always be coupled at least wi in such a way to support constructing a dynamic value depending on this type. * Added a `without-deprecated` feature that unconditionally disables deprecated items. - Use this feature flag to verify that none of your dependencies is setting + Use this feature flag to verify that none of your dependencies is setting the `with-deprecated` flag internally. * Added support for PostgreSQL's `SIMILAR TO` and `NOT SIMILAR TO`. @@ -154,7 +157,7 @@ Increasing the minimal supported Rust version will always be coupled at least wi * Diesel CLI will now generate SQL type definitions for SQL types that are not supported by diesel out of the box. It's possible to disable this behavior via the `generate_missing_sql_type_definitions` config option. -* Added an option to `#[derive(Insertable)]` that let you insert `NULL` values instead of `DEFAULT` values for `Option` +* Added an option to `#[derive(Insertable)]` that let you insert `NULL` values instead of `DEFAULT` values for `Option` * Added support for all the derive attributes being inside `#[diesel(...)]` @@ -259,12 +262,12 @@ Increasing the minimal supported Rust version will always be coupled at least wi card implementations for types implementing `Queryable` or `QueryableByName` so non generic code does not require any change. For generic code you likely need to replace a trait bound on `Queryable` with a trait bound on `FromSqlRow` - and a bound to `QueryableByName` with `FromSqlRow`. + and a bound to `QueryableByName` with `FromSqlRow`. * CLI flags of `only-tables` and `except-tables` are now interpreted as regular expressions. Similarly, `only_tables` and `except_tables` in `diesel.toml` are treated as regular expressions. -* Now you can sort column fields by name with the `column-sorting` option. +* Now you can sort column fields by name with the `column-sorting` option. It can be set to either `ordinal_position` (default) or `name`. This ensures stable sorting even if columns are removed and re-added. @@ -277,25 +280,25 @@ Increasing the minimal supported Rust version will always be coupled at least wi * `TypeMetadata::MetadataLookup` is now `?Sized`. -* Multiple implementations of `Connection` are now possible +* Multiple implementations of `Connection` are now possible because of the new `PgMetadataLookup` trait. * For the `Pg` backend, `TypeMetadata::MetadataLookup` has changed to `dyn PgMetadataLookup`. -* Diesel's migration framework was rewritten from the ground. Existing migrations continue to +* Diesel's migration framework was rewritten from the ground. Existing migrations continue to be compatible with the rewrite, but code calling into `diesel_migrations` requires an update. See the [migration guide](2-0-migration) for details. * `eq_any()` now emits a `= ANY()` expression for the postgresql backend instead of `IN()` * `ne_all()` now emits a `!= ALL()` expression for the postgresql backend instead of `NOT IN()` -* The sqlite backend now uses a single batch insert statement if there are now default values present +* The sqlite backend now uses a single batch insert statement if there are now default values present in the values clause * The MySQL connection is using the CLIENT_FOUND_ROWS from now on. This means that updating rows without changing any values will return the number of matched rows (like most other SQL servers do), as opposed to the number of changed rows. -* The definition of `ToSql::to_sql` and `QueryFragment::walk_ast` has changed to allow serializing values without +* The definition of `ToSql::to_sql` and `QueryFragment::walk_ast` has changed to allow serializing values without copying the value itself. This is useful for database backends like sqlite where you can directly share a buffer - with the database. Beside of the changed signature, existing impls of this trait should remain unchanged in almost + with the database. Beside of the changed signature, existing impls of this trait should remain unchanged in almost all cases. * The `PIPES_AS_CONCAT` sql_mode is no longer set @@ -342,14 +345,14 @@ queries or set `PIPES_AS_CONCAT` manually. * We've refactored our type level representation of nullable values. This allowed us to fix multiple long standing bugs regarding the correct handling of nullable values in some corner cases (#104, #2274) - + * Parenthesis are now inserted around all infix operations provided by diesel's `ExpressionMethods` traits * Queries containing a `distinct on` clause check now on compile time that a compatible order clause was set. * Implementations of custom SQLite SQL functions now check for panics -* `diesel print-schema` now generates `Array>` rather than `Array` for Postgres Array types. Existence of +* `diesel print-schema` now generates `Array>` rather than `Array` for Postgres Array types. Existence of `NULL` values in database arrays would previously result in deserialization errors. Non-nullable arrays are now opt in (by schema patching). @@ -363,8 +366,8 @@ queries or set `PIPES_AS_CONCAT` manually. * `diesel::pg::upsert` has been deprecated to support upsert queries on more than one backend. Please use `diesel::upsert` instead. - -* `diesel::dsl::any` and `diesel::dsl::all` are now deprecated in + +* `diesel::dsl::any` and `diesel::dsl::all` are now deprecated in favour of `ExpressionMethods::eq_any()` and `ExpressionMethods::ne_all()` diff --git a/diesel/Cargo.toml b/diesel/Cargo.toml index e23bacc0efdf..c1545c56e0c9 100644 --- a/diesel/Cargo.toml +++ b/diesel/Cargo.toml @@ -16,9 +16,10 @@ rust-version.workspace = true byteorder = { version = "1.0", optional = true } chrono = { version = "0.4.20", optional = true, default-features = false, features = ["clock", "std"] } libc = { version = "0.2.0", optional = true } -libsqlite3-sys = { version = ">=0.17.2, <0.28.0", optional = true, features = ["bundled_bindings"] } +libsqlite3-sys = { version = ">=0.17.2, <0.29.0", optional = true, features = ["bundled_bindings"] } mysqlclient-sys = { version = "0.2.5", optional = true } -pq-sys = { version = "0.4.0", optional = true } +pq-sys = { version = ">=0.4.0, <0.6.0", optional = true } +pq-src = { version = "0.1", optional = true } quickcheck = { version = "1.0.3", optional = true } serde_json = { version = ">=0.8.0, <2.0", optional = true } url = { version = "2.1.0", optional = true } diff --git a/diesel/src/associations/belongs_to.rs b/diesel/src/associations/belongs_to.rs index 470dc637891a..cf188fdd5d3d 100644 --- a/diesel/src/associations/belongs_to.rs +++ b/diesel/src/associations/belongs_to.rs @@ -1,4 +1,4 @@ -use super::{HasTable, Identifiable}; +use super::HasTable; use crate::dsl::{Eq, EqAny, Filter, FindBy}; use crate::expression::array_comparison::AsInExpression; use crate::expression::AsExpression; diff --git a/diesel/src/connection/mod.rs b/diesel/src/connection/mod.rs index a0fe95c6963b..2ad7df2ba1b8 100644 --- a/diesel/src/connection/mod.rs +++ b/diesel/src/connection/mod.rs @@ -392,6 +392,7 @@ where &mut self, ) -> &mut >::TransactionStateData; + /// Get the instrumentation instance stored in this connection #[diesel_derives::__diesel_public_if( feature = "i-implement-a-third-party-backend-and-opt-into-breaking-changes" )] diff --git a/diesel/src/connection/statement_cache.rs b/diesel/src/connection/statement_cache.rs index 2d4cd7ef4a18..af6d2968d5b9 100644 --- a/diesel/src/connection/statement_cache.rs +++ b/diesel/src/connection/statement_cache.rs @@ -255,7 +255,9 @@ where doc(cfg(feature = "i-implement-a-third-party-backend-and-opt-into-breaking-changes")) )] pub trait QueryFragmentForCachedStatement { + /// Convert the query fragment into a SQL string for the given backend fn construct_sql(&self, backend: &DB) -> QueryResult; + /// Check whether it's safe to cache the query fn is_safe_to_cache_prepared(&self, backend: &DB) -> QueryResult; } impl QueryFragmentForCachedStatement for T @@ -269,6 +271,7 @@ where self.to_sql(&mut query_builder, backend)?; Ok(query_builder.finish()) } + fn is_safe_to_cache_prepared(&self, backend: &DB) -> QueryResult { >::is_safe_to_cache_prepared(self, backend) } diff --git a/diesel/src/expression/assume_not_null.rs b/diesel/src/expression/assume_not_null.rs index ad5f9a9d0b54..834e47f4b0fb 100644 --- a/diesel/src/expression/assume_not_null.rs +++ b/diesel/src/expression/assume_not_null.rs @@ -1,12 +1,10 @@ -use crate::backend::Backend; -use crate::expression::TypedExpressionType; use crate::expression::*; use crate::query_builder::*; use crate::query_source::joins::ToInnerJoin; use crate::result::QueryResult; use crate::sql_types::{DieselNumericOps, IntoNotNullable}; -#[derive(Debug, Copy, Clone, DieselNumericOps, ValidGrouping)] +#[derive(Default, Debug, Copy, Clone, DieselNumericOps, ValidGrouping)] pub struct AssumeNotNull(T); impl AssumeNotNull { diff --git a/diesel/src/expression/bound.rs b/diesel/src/expression/bound.rs index 26b53d5c0a65..6f48f0482b5b 100644 --- a/diesel/src/expression/bound.rs +++ b/diesel/src/expression/bound.rs @@ -1,11 +1,10 @@ use std::marker::PhantomData; use super::*; -use crate::backend::Backend; use crate::query_builder::*; use crate::result::QueryResult; use crate::serialize::ToSql; -use crate::sql_types::{DieselNumericOps, HasSqlType, SqlType}; +use crate::sql_types::DieselNumericOps; #[doc(hidden)] // This is used by the `AsExpression` derive #[derive(Debug, Clone, Copy, DieselNumericOps)] diff --git a/diesel/src/expression/coerce.rs b/diesel/src/expression/coerce.rs index 57f32771516e..9291722637df 100644 --- a/diesel/src/expression/coerce.rs +++ b/diesel/src/expression/coerce.rs @@ -1,10 +1,8 @@ -use std::marker::PhantomData; - -use crate::backend::Backend; use crate::expression::*; use crate::query_builder::*; use crate::result::QueryResult; -use crate::sql_types::{DieselNumericOps, SqlType}; +use crate::sql_types::DieselNumericOps; +use std::marker::PhantomData; #[derive(Debug, Copy, Clone, QueryId, DieselNumericOps)] #[doc(hidden)] diff --git a/diesel/src/expression/nullable.rs b/diesel/src/expression/nullable.rs index 8912b002a6e9..5015a3b6e551 100644 --- a/diesel/src/expression/nullable.rs +++ b/diesel/src/expression/nullable.rs @@ -1,5 +1,4 @@ -use crate::backend::{Backend, DieselReserveSpecialization}; -use crate::expression::TypedExpressionType; +use crate::backend::DieselReserveSpecialization; use crate::expression::*; use crate::query_builder::*; use crate::query_source::joins::ToInnerJoin; diff --git a/diesel/src/expression/sql_literal.rs b/diesel/src/expression/sql_literal.rs index ff1ea6199414..75e8dc76cf6a 100644 --- a/diesel/src/expression/sql_literal.rs +++ b/diesel/src/expression/sql_literal.rs @@ -1,11 +1,10 @@ use std::marker::PhantomData; -use crate::backend::Backend; use crate::expression::*; use crate::query_builder::*; use crate::query_dsl::RunQueryDsl; use crate::result::QueryResult; -use crate::sql_types::{DieselNumericOps, SqlType}; +use crate::sql_types::DieselNumericOps; #[derive(Debug, Clone, DieselNumericOps)] #[must_use = "Queries are only executed when calling `load`, `get_result`, or similar."] diff --git a/diesel/src/expression/subselect.rs b/diesel/src/expression/subselect.rs index 2f65d50b935f..9bff4424a9f2 100644 --- a/diesel/src/expression/subselect.rs +++ b/diesel/src/expression/subselect.rs @@ -4,7 +4,6 @@ use crate::expression::array_comparison::MaybeEmpty; use crate::expression::*; use crate::query_builder::*; use crate::result::QueryResult; -use crate::sql_types::SqlType; #[derive(Debug, Copy, Clone, QueryId)] pub struct Subselect { diff --git a/diesel/src/internal/table_macro.rs b/diesel/src/internal/table_macro.rs index 30ab0084ad54..52c2105a3d59 100644 --- a/diesel/src/internal/table_macro.rs +++ b/diesel/src/internal/table_macro.rs @@ -1,6 +1,9 @@ #[doc(hidden)] pub use crate::expression::nullable::Nullable as NullableExpression; #[doc(hidden)] +#[cfg(feature = "postgres_backend")] +pub use crate::pg::query_builder::tablesample::TablesampleMethod; +#[doc(hidden)] pub use crate::query_builder::from_clause::{FromClause, NoFromClause}; #[doc(hidden)] pub use crate::query_builder::nodes::{ diff --git a/diesel/src/lib.rs b/diesel/src/lib.rs index 682ebd73683d..8d7d670cbc85 100644 --- a/diesel/src/lib.rs +++ b/diesel/src/lib.rs @@ -334,6 +334,14 @@ pub mod dsl { #[doc(inline)] pub use diesel_derives::auto_type; + + #[cfg(feature = "postgres_backend")] + #[doc(inline)] + pub use crate::pg::expression::extensions::OnlyDsl; + + #[cfg(feature = "postgres_backend")] + #[doc(inline)] + pub use crate::pg::expression::extensions::TablesampleDsl; } pub mod helper_types { diff --git a/diesel/src/macros/mod.rs b/diesel/src/macros/mod.rs index 79d02859dfd5..8290d0517398 100644 --- a/diesel/src/macros/mod.rs +++ b/diesel/src/macros/mod.rs @@ -243,6 +243,34 @@ macro_rules! __diesel_internal_backend_specific_allow_tables_to_appear_in_same_q for $left::table { } + impl $crate::query_source::TableNotEqual<$left::table> + for $crate::query_builder::Tablesample<$right::table, TSM> + where + TSM: $crate::internal::table_macro::TablesampleMethod, + { + } + impl $crate::query_source::TableNotEqual<$right::table> + for $crate::query_builder::Tablesample<$left::table, TSM> + where + TSM: $crate::internal::table_macro::TablesampleMethod, + { + } + impl + $crate::query_source::TableNotEqual< + $crate::query_builder::Tablesample<$left::table, TSM>, + > for $right::table + where + TSM: $crate::internal::table_macro::TablesampleMethod, + { + } + impl + $crate::query_source::TableNotEqual< + $crate::query_builder::Tablesample<$right::table, TSM>, + > for $left::table + where + TSM: $crate::internal::table_macro::TablesampleMethod, + { + } }; } #[doc(hidden)] diff --git a/diesel/src/mysql/connection/bind.rs b/diesel/src/mysql/connection/bind.rs index 7632e648d991..5ce7202e380f 100644 --- a/diesel/src/mysql/connection/bind.rs +++ b/diesel/src/mysql/connection/bind.rs @@ -734,7 +734,6 @@ fn known_buffer_size_for_ffi_type(tpe: ffi::enum_field_types) -> Option { #[cfg(test)] mod tests { - use super::MysqlValue; use super::*; use crate::connection::statement_cache::MaybeCached; use crate::deserialize::FromSql; diff --git a/diesel/src/mysql/connection/mod.rs b/diesel/src/mysql/connection/mod.rs index 011558bdaed9..992d92c4f915 100644 --- a/diesel/src/mysql/connection/mod.rs +++ b/diesel/src/mysql/connection/mod.rs @@ -9,7 +9,6 @@ use self::stmt::Statement; use self::url::ConnectionOptions; use super::backend::Mysql; use crate::connection::instrumentation::DebugQuery; -use crate::connection::instrumentation::InstrumentationEvent; use crate::connection::instrumentation::StrQueryHelper; use crate::connection::statement_cache::{MaybeCached, StatementCache}; use crate::connection::*; diff --git a/diesel/src/mysql/connection/stmt/mod.rs b/diesel/src/mysql/connection/stmt/mod.rs index 3e22feeebfda..8bb527b34645 100644 --- a/diesel/src/mysql/connection/stmt/mod.rs +++ b/diesel/src/mysql/connection/stmt/mod.rs @@ -1,6 +1,5 @@ #![allow(unsafe_code)] // module uses ffi use mysqlclient_sys as ffi; -use std::convert::TryFrom; use std::ffi::CStr; use std::os::raw as libc; use std::ptr::NonNull; diff --git a/diesel/src/mysql/types/date_and_time/chrono.rs b/diesel/src/mysql/types/date_and_time/chrono.rs index 141ca4c4cd5f..786dcd39b9f3 100644 --- a/diesel/src/mysql/types/date_and_time/chrono.rs +++ b/diesel/src/mysql/types/date_and_time/chrono.rs @@ -32,6 +32,7 @@ impl ToSql for NaiveDateTime { hour: self.hour() as libc::c_uint, minute: self.minute() as libc::c_uint, second: self.second() as libc::c_uint, + #[allow(deprecated)] // otherwise we would need to bump our minimal chrono version second_part: libc::c_ulong::from(self.timestamp_subsec_micros()), neg: false, time_type: MysqlTimestampType::MYSQL_TIMESTAMP_DATETIME, @@ -162,11 +163,11 @@ mod tests { #[test] fn times_relative_to_now_encode_correctly() { let connection = &mut connection(); - let time = Utc::now().naive_utc() + Duration::days(1); + let time = Utc::now().naive_utc() + Duration::try_days(1).unwrap(); let query = select(now.lt(time)); assert!(query.get_result::(connection).unwrap()); - let time = Utc::now().naive_utc() - Duration::days(1); + let time = Utc::now().naive_utc() - Duration::try_days(1).unwrap(); let query = select(now.gt(time)); assert!(query.get_result::(connection).unwrap()); } diff --git a/diesel/src/mysql/types/date_and_time/time.rs b/diesel/src/mysql/types/date_and_time/time.rs index fcd7afc4b249..0877e8734b8c 100644 --- a/diesel/src/mysql/types/date_and_time/time.rs +++ b/diesel/src/mysql/types/date_and_time/time.rs @@ -1,4 +1,3 @@ -use std::convert::TryInto; use std::os::raw as libc; use time::{ Date as NaiveDate, Month, OffsetDateTime, PrimitiveDateTime, Time as NaiveTime, UtcOffset, diff --git a/diesel/src/mysql/types/primitives.rs b/diesel/src/mysql/types/primitives.rs index 87bd822f8ce4..cdedd0887d7b 100644 --- a/diesel/src/mysql/types/primitives.rs +++ b/diesel/src/mysql/types/primitives.rs @@ -3,7 +3,6 @@ use crate::mysql::{Mysql, MysqlValue, NumericRepresentation}; use crate::result::Error::DeserializationError; use crate::sql_types::{BigInt, Binary, Double, Float, Integer, SmallInt, Text}; use crate::Queryable; -use std::convert::TryInto; use std::error::Error; use std::str::{self, FromStr}; diff --git a/diesel/src/pg/connection/mod.rs b/diesel/src/pg/connection/mod.rs index a774292b1fe6..3166e5f32842 100644 --- a/diesel/src/pg/connection/mod.rs +++ b/diesel/src/pg/connection/mod.rs @@ -10,8 +10,8 @@ use self::raw::{PgTransactionStatus, RawConnection}; use self::result::PgResult; use self::stmt::Statement; use crate::connection::instrumentation::DebugQuery; +use crate::connection::instrumentation::Instrumentation; use crate::connection::instrumentation::StrQueryHelper; -use crate::connection::instrumentation::{Instrumentation, InstrumentationEvent}; use crate::connection::statement_cache::{MaybeCached, StatementCache}; use crate::connection::*; use crate::expression::QueryMetadata; diff --git a/diesel/src/pg/expression/extensions/interval_dsl.rs b/diesel/src/pg/expression/extensions/interval_dsl.rs index c9fb66eee254..6cf5df0d24d1 100644 --- a/diesel/src/pg/expression/extensions/interval_dsl.rs +++ b/diesel/src/pg/expression/extensions/interval_dsl.rs @@ -252,7 +252,6 @@ mod tests { use self::quickcheck::quickcheck; use super::*; - use crate::data_types::PgInterval; use crate::dsl::sql; use crate::prelude::*; use crate::test_helpers::*; diff --git a/diesel/src/pg/expression/extensions/mod.rs b/diesel/src/pg/expression/extensions/mod.rs index 5e808bd64dce..1fc9be7aec09 100644 --- a/diesel/src/pg/expression/extensions/mod.rs +++ b/diesel/src/pg/expression/extensions/mod.rs @@ -3,6 +3,8 @@ //! re-exported in `diesel::dsl` mod interval_dsl; mod only_dsl; +mod tablesample_dsl; pub use self::interval_dsl::IntervalDsl; pub use self::only_dsl::OnlyDsl; +pub use self::tablesample_dsl::TablesampleDsl; diff --git a/diesel/src/pg/expression/extensions/tablesample_dsl.rs b/diesel/src/pg/expression/extensions/tablesample_dsl.rs new file mode 100644 index 000000000000..e7c130fb9df3 --- /dev/null +++ b/diesel/src/pg/expression/extensions/tablesample_dsl.rs @@ -0,0 +1,72 @@ +use crate::pg::query_builder::tablesample::{BernoulliMethod, SystemMethod}; +use crate::query_builder::Tablesample; +use crate::Table; + +/// The `tablesample` method +/// +/// The `TABLESAMPLE` clause is used to select a randomly sampled subset of rows from a table. +/// +/// This is only implemented for the Postgres backend. While `TABLESAMPLE` is standardized in +/// SQL:2003, in practice each RDBMS seems to implement a superset of the SQL:2003 syntax, +/// supporting a wide variety of sampling methods. +/// +/// Calling this function on a table (`mytable.tablesample(...)`) will result in the SQL +/// `FROM mytable TABLESAMPLE ...` -- +/// `mytable.tablesample(...)` can be used just like any table in diesel since it implements +/// [Table](crate::Table). +/// +/// The `BernoulliMethod` and `SystemMethod` types can be used to indicate the sampling method for +/// a `TABLESAMPLE method(p)` clause where p is specified by the portion argument. The provided +/// percentage should be an integer between 0 and 100. +/// +/// To generate a `TABLESAMPLE ... REPEATABLE (f)` clause, you'll need to call +/// [`.with_seed(f)`](Tablesample::with_seed). +/// +/// Example: +/// +/// ```rust +/// # include!("../../../doctest_setup.rs"); +/// # use schema::{posts, users}; +/// # use diesel::dsl::*; +/// # fn main() { +/// # let connection = &mut establish_connection(); +/// let random_user_ids = users::table +/// .tablesample_bernoulli(10) +/// .select((users::id)) +/// .load::(connection); +/// # } +/// ``` +/// Selects the ids for a random 10 percent of users. +/// +/// It can also be used in inner joins: +/// +/// ```rust +/// # include!("../../../doctest_setup.rs"); +/// # use schema::{posts, users}; +/// # use diesel::dsl::*; +/// # fn main() { +/// # let connection = &mut establish_connection(); +/// # let _ = +/// users::table +/// .tablesample_system(10).with_seed(42.0) +/// .inner_join(posts::table) +/// .select((users::name, posts::title)) +/// .load::<(String, String)>(connection); +/// # } +/// ``` +/// That query selects all of the posts for all of the users in a random 10 percent storage pages, +/// returning the same results each time it is run due to the static seed of 42.0. +/// +pub trait TablesampleDsl: Table { + /// See the trait-level docs. + fn tablesample_bernoulli(self, portion: i16) -> Tablesample { + Tablesample::new(self, portion) + } + + /// See the trait-level docs. + fn tablesample_system(self, portion: i16) -> Tablesample { + Tablesample::new(self, portion) + } +} + +impl TablesampleDsl for T {} diff --git a/diesel/src/pg/query_builder/mod.rs b/diesel/src/pg/query_builder/mod.rs index 3d56825cfee8..4de20c07b6fe 100644 --- a/diesel/src/pg/query_builder/mod.rs +++ b/diesel/src/pg/query_builder/mod.rs @@ -7,6 +7,7 @@ mod limit_offset; pub(crate) mod on_constraint; pub(crate) mod only; mod query_fragment_impls; +pub(crate) mod tablesample; pub use self::distinct_on::DistinctOnClause; pub use self::distinct_on::OrderDecorator; diff --git a/diesel/src/pg/query_builder/tablesample.rs b/diesel/src/pg/query_builder/tablesample.rs new file mode 100644 index 000000000000..dc21f46ae5e5 --- /dev/null +++ b/diesel/src/pg/query_builder/tablesample.rs @@ -0,0 +1,214 @@ +use crate::expression::{Expression, ValidGrouping}; +use crate::pg::Pg; +use crate::query_builder::{AsQuery, AstPass, FromClause, QueryFragment, QueryId, SelectStatement}; +use crate::query_source::QuerySource; +use crate::result::QueryResult; +use crate::sql_types::{Double, SmallInt}; +use crate::{JoinTo, SelectableExpression, Table}; +use std::marker::PhantomData; + +#[doc(hidden)] +pub trait TablesampleMethod: Clone { + fn method_name_sql() -> &'static str; +} + +#[derive(Clone, Copy, Debug)] +/// Used to specify the `BERNOULLI` sampling method. +pub struct BernoulliMethod; + +impl TablesampleMethod for BernoulliMethod { + fn method_name_sql() -> &'static str { + "BERNOULLI" + } +} + +#[derive(Clone, Copy, Debug)] +/// Used to specify the `SYSTEM` sampling method. +pub struct SystemMethod; + +impl TablesampleMethod for SystemMethod { + fn method_name_sql() -> &'static str { + "SYSTEM" + } +} + +/// Represents a query with a `TABLESAMPLE` clause. +#[derive(Debug, Clone, Copy)] +pub struct Tablesample +where + TSM: TablesampleMethod, +{ + source: S, + method: PhantomData, + portion: i16, + seed: Option, +} + +impl Tablesample +where + TSM: TablesampleMethod, +{ + pub(crate) fn new(source: S, portion: i16) -> Tablesample { + Tablesample { + source, + method: PhantomData, + portion, + seed: None, + } + } + + /// This method allows you to specify the random number generator seed to use in the sampling + /// method. This allows you to obtain repeatable results. + pub fn with_seed(self, seed: f64) -> Tablesample { + Tablesample { + source: self.source, + method: self.method, + portion: self.portion, + seed: Some(seed), + } + } +} + +impl QueryId for Tablesample +where + S: QueryId, + TSM: TablesampleMethod, +{ + type QueryId = (); + const HAS_STATIC_QUERY_ID: bool = false; +} + +impl QuerySource for Tablesample +where + S: Table + Clone, + TSM: TablesampleMethod, + ::DefaultSelection: + ValidGrouping<()> + SelectableExpression>, +{ + type FromClause = Self; + type DefaultSelection = ::DefaultSelection; + + fn from_clause(&self) -> Self::FromClause { + self.clone() + } + + fn default_selection(&self) -> Self::DefaultSelection { + self.source.default_selection() + } +} + +impl QueryFragment for Tablesample +where + S: QueryFragment, + TSM: TablesampleMethod, +{ + fn walk_ast<'b>(&'b self, mut out: AstPass<'_, 'b, Pg>) -> QueryResult<()> { + self.source.walk_ast(out.reborrow())?; + out.push_sql(" TABLESAMPLE "); + out.push_sql(TSM::method_name_sql()); + out.push_sql("("); + out.push_bind_param::(&self.portion)?; + out.push_sql(")"); + if let Some(f) = &self.seed { + out.push_sql(" REPEATABLE("); + out.push_bind_param::(f)?; + out.push_sql(")"); + } + Ok(()) + } +} + +impl AsQuery for Tablesample +where + S: Table + Clone, + TSM: TablesampleMethod, + ::DefaultSelection: + ValidGrouping<()> + SelectableExpression>, +{ + type SqlType = <::DefaultSelection as Expression>::SqlType; + type Query = SelectStatement>; + fn as_query(self) -> Self::Query { + SelectStatement::simple(self) + } +} + +impl JoinTo for Tablesample +where + S: JoinTo, + T: Table, + S: Table, + TSM: TablesampleMethod, +{ + type FromClause = >::FromClause; + type OnClause = >::OnClause; + + fn join_target(rhs: T) -> (Self::FromClause, Self::OnClause) { + >::join_target(rhs) + } +} + +impl Table for Tablesample +where + S: Table + Clone + AsQuery, + TSM: TablesampleMethod, + + ::PrimaryKey: SelectableExpression>, + ::AllColumns: SelectableExpression>, + ::DefaultSelection: + ValidGrouping<()> + SelectableExpression>, +{ + type PrimaryKey = ::PrimaryKey; + type AllColumns = ::AllColumns; + + fn primary_key(&self) -> Self::PrimaryKey { + self.source.primary_key() + } + + fn all_columns() -> Self::AllColumns { + S::all_columns() + } +} + +#[cfg(test)] +mod test { + use super::*; + use crate::backend::Backend; + use crate::pg::Pg; + use crate::query_builder::QueryBuilder; + use diesel::dsl::*; + use diesel::*; + + macro_rules! assert_sql { + ($query:expr, $sql:expr) => { + let mut query_builder = ::QueryBuilder::default(); + $query.to_sql(&mut query_builder, &Pg).unwrap(); + let sql = query_builder.finish(); + assert_eq!(sql, $sql); + }; + } + + table! { + users { + id -> Integer, + name -> VarChar, + } + } + + #[test] + fn test_generated_tablesample_sql() { + assert_sql!( + users::table.tablesample_bernoulli(10), + "\"users\" TABLESAMPLE BERNOULLI($1)" + ); + + assert_sql!( + users::table.tablesample_system(10), + "\"users\" TABLESAMPLE SYSTEM($1)" + ); + + assert_sql!( + users::table.tablesample_system(10).with_seed(42.0), + "\"users\" TABLESAMPLE SYSTEM($1) REPEATABLE($2)" + ); + } +} diff --git a/diesel/src/pg/types/date_and_time/chrono.rs b/diesel/src/pg/types/date_and_time/chrono.rs index 6dc9bef95485..38bddfdad363 100644 --- a/diesel/src/pg/types/date_and_time/chrono.rs +++ b/diesel/src/pg/types/date_and_time/chrono.rs @@ -124,7 +124,9 @@ impl ToSql for NaiveDate { impl FromSql for NaiveDate { fn from_sql(bytes: PgValue<'_>) -> deserialize::Result { let PgDate(offset) = FromSql::::from_sql(bytes)?; - match pg_epoch_date().checked_add_signed(Duration::days(i64::from(offset))) { + #[allow(deprecated)] // otherwise we would need to bump our minimal chrono version + let duration = Duration::days(i64::from(offset)); + match pg_epoch_date().checked_add_signed(duration) { Some(date) => Ok(date), None => { let error_message = format!( @@ -205,11 +207,11 @@ mod tests { #[test] fn times_relative_to_now_encode_correctly() { let connection = &mut connection(); - let time = Utc::now().naive_utc() + Duration::seconds(60); + let time = Utc::now().naive_utc() + Duration::try_seconds(60).unwrap(); let query = select(now.at_time_zone("utc").lt(time)); assert!(query.get_result::(connection).unwrap()); - let time = Utc::now().naive_utc() - Duration::seconds(60); + let time = Utc::now().naive_utc() - Duration::try_seconds(60).unwrap(); let query = select(now.at_time_zone("utc").gt(time)); assert!(query.get_result::(connection).unwrap()); } @@ -280,8 +282,8 @@ mod tests { let query = select(sql::("'J0'::date").eq(julian_epoch)); assert!(query.get_result::(connection).unwrap()); - let max_date = NaiveDate::MAX; - let query = select(sql::("'262143-12-31'::date").eq(max_date)); + let max_date = NaiveDate::from_ymd_opt(262142, 12, 31).unwrap(); + let query = select(sql::("'262142-12-31'::date").eq(max_date)); assert!(query.get_result::(connection).unwrap()); let january_first_2018 = NaiveDate::from_ymd_opt(2018, 1, 1).unwrap(); @@ -311,8 +313,8 @@ mod tests { let query = select(sql::("'J0'::date")); assert_eq!(Ok(julian_epoch), query.get_result::(connection)); - let max_date = NaiveDate::MAX; - let query = select(sql::("'262143-12-31'::date")); + let max_date = NaiveDate::from_ymd_opt(262142, 12, 31).unwrap(); + let query = select(sql::("'262142-12-31'::date")); assert_eq!(Ok(max_date), query.get_result::(connection)); let january_first_2018 = NaiveDate::from_ymd_opt(2018, 1, 1).unwrap(); diff --git a/diesel/src/pg/types/floats/mod.rs b/diesel/src/pg/types/floats/mod.rs index e5810b7837a3..c5ee3bfb9b2f 100644 --- a/diesel/src/pg/types/floats/mod.rs +++ b/diesel/src/pg/types/floats/mod.rs @@ -37,6 +37,7 @@ pub enum PgNumeric { } #[derive(Debug, Clone, Copy)] +#[allow(dead_code)] // that's used by debug in the error impl struct InvalidNumericSign(u16); impl ::std::fmt::Display for InvalidNumericSign { @@ -62,14 +63,14 @@ impl FromSql for PgNumeric { match sign { 0 => Ok(PgNumeric::Positive { - weight: weight, - scale: scale, - digits: digits, + weight, + scale, + digits, }), 0x4000 => Ok(PgNumeric::Negative { - weight: weight, - scale: scale, - digits: digits, + weight, + scale, + digits, }), 0xC000 => Ok(PgNumeric::NaN), invalid => Err(Box::new(InvalidNumericSign(invalid))), diff --git a/diesel/src/pg/types/json.rs b/diesel/src/pg/types/json.rs index 059c86341ae6..9f43addf5e2e 100644 --- a/diesel/src/pg/types/json.rs +++ b/diesel/src/pg/types/json.rs @@ -46,9 +46,13 @@ impl ToSql for serde_json::Value { } } -#[cfg(tests)] +#[cfg(test)] mod tests { - use crate::query_builder::bind_types::ByteWrapper; + use crate::deserialize::FromSql; + use crate::pg::{Pg, PgValue}; + use crate::query_builder::bind_collector::ByteWrapper; + use crate::serialize::{Output, ToSql}; + use crate::sql_types; #[test] fn json_to_sql() { diff --git a/diesel/src/pg/types/mac_addr.rs b/diesel/src/pg/types/mac_addr.rs index f28a84853492..ce8fb822e35e 100644 --- a/diesel/src/pg/types/mac_addr.rs +++ b/diesel/src/pg/types/mac_addr.rs @@ -1,4 +1,3 @@ -use std::convert::TryInto; use std::io::prelude::*; use crate::deserialize::{self, FromSql}; diff --git a/diesel/src/pg/types/mod.rs b/diesel/src/pg/types/mod.rs index 7d28e169dec3..8d3779c6c54c 100644 --- a/diesel/src/pg/types/mod.rs +++ b/diesel/src/pg/types/mod.rs @@ -566,7 +566,7 @@ pub mod sql_types { mod ops { use super::sql_types::*; use crate::sql_types::ops::*; - use crate::sql_types::{Bigint, Cidr, Inet, Interval}; + use crate::sql_types::{Bigint, Interval}; impl Add for Timestamptz { type Rhs = Interval; diff --git a/diesel/src/pg/types/numeric.rs b/diesel/src/pg/types/numeric.rs index 7b4008731a4c..e4f0e177fbc2 100644 --- a/diesel/src/pg/types/numeric.rs +++ b/diesel/src/pg/types/numeric.rs @@ -16,7 +16,6 @@ mod bigdecimal { use crate::serialize::{self, Output, ToSql}; use crate::sql_types::Numeric; - use std::convert::{TryFrom, TryInto}; use std::error::Error; /// Iterator over the digits of a big uint in base 10k. diff --git a/diesel/src/query_builder/bind_collector.rs b/diesel/src/query_builder/bind_collector.rs index bfe470715b1b..7db821a75a98 100644 --- a/diesel/src/query_builder/bind_collector.rs +++ b/diesel/src/query_builder/bind_collector.rs @@ -32,6 +32,18 @@ pub trait BindCollector<'a, DB: TypeMetadata>: Sized { where DB: Backend + HasSqlType, U: ToSql + ?Sized + 'a; + + /// Push a null value with the given type information onto the bind collector + /// + // For backward compatibility reasons we provide a default implementation + // but custom backends that want to support `#[derive(MultiConnection)]` + // need to provide a customized implementation of this function + #[diesel_derives::__diesel_public_if( + feature = "i-implement-a-third-party-backend-and-opt-into-breaking-changes" + )] + fn push_null_value(&mut self, _metadata: DB::TypeMetadata) -> QueryResult<()> { + Ok(()) + } } #[derive(Debug)] @@ -105,6 +117,12 @@ where self.metadata.push(metadata); Ok(()) } + + fn push_null_value(&mut self, metadata: DB::TypeMetadata) -> QueryResult<()> { + self.metadata.push(metadata); + self.binds.push(None); + Ok(()) + } } // This is private for now as we may want to add `Into` impls for the wrapper type diff --git a/diesel/src/query_builder/combination_clause.rs b/diesel/src/query_builder/combination_clause.rs index cb4b6240403c..67b2ebfca804 100644 --- a/diesel/src/query_builder/combination_clause.rs +++ b/diesel/src/query_builder/combination_clause.rs @@ -238,8 +238,6 @@ pub struct ParenthesisWrapper(T); mod postgres { use super::*; use crate::pg::Pg; - use crate::query_builder::{AstPass, QueryFragment}; - use crate::QueryResult; impl> QueryFragment for ParenthesisWrapper { fn walk_ast<'b>(&'b self, mut out: AstPass<'_, 'b, Pg>) -> QueryResult<()> { @@ -262,8 +260,6 @@ mod postgres { mod mysql { use super::*; use crate::mysql::Mysql; - use crate::query_builder::{AstPass, QueryFragment}; - use crate::QueryResult; impl> QueryFragment for ParenthesisWrapper { fn walk_ast<'b>(&'b self, mut out: AstPass<'_, 'b, Mysql>) -> QueryResult<()> { @@ -281,9 +277,7 @@ mod mysql { #[cfg(feature = "sqlite")] mod sqlite { use super::*; - use crate::query_builder::{AstPass, QueryFragment}; use crate::sqlite::Sqlite; - use crate::QueryResult; impl> QueryFragment for ParenthesisWrapper { fn walk_ast<'b>(&'b self, mut out: AstPass<'_, 'b, Sqlite>) -> QueryResult<()> { diff --git a/diesel/src/query_builder/delete_statement/mod.rs b/diesel/src/query_builder/delete_statement/mod.rs index e35a0cf70ec6..f69456f8a102 100644 --- a/diesel/src/query_builder/delete_statement/mod.rs +++ b/diesel/src/query_builder/delete_statement/mod.rs @@ -1,4 +1,4 @@ -use crate::backend::{Backend, DieselReserveSpecialization}; +use crate::backend::DieselReserveSpecialization; use crate::dsl::{Filter, IntoBoxed, OrFilter}; use crate::expression::{AppearsOnTable, SelectableExpression}; use crate::query_builder::returning_clause::*; @@ -7,9 +7,6 @@ use crate::query_builder::*; use crate::query_dsl::methods::{BoxedDsl, FilterDsl, OrFilterDsl}; use crate::query_dsl::RunQueryDsl; use crate::query_source::{QuerySource, Table}; -use crate::result::QueryResult; - -use super::from_clause::FromClause; #[must_use = "Queries are only executed when calling `load`, `get_result` or similar."] /// Represents a SQL `DELETE` statement. diff --git a/diesel/src/query_builder/distinct_clause.rs b/diesel/src/query_builder/distinct_clause.rs index a772afc308bd..297c90ab0ecb 100644 --- a/diesel/src/query_builder/distinct_clause.rs +++ b/diesel/src/query_builder/distinct_clause.rs @@ -1,7 +1,6 @@ -use crate::backend::{Backend, DieselReserveSpecialization}; +use crate::backend::DieselReserveSpecialization; use crate::query_builder::*; use crate::query_dsl::order_dsl::ValidOrderingForDistinct; -use crate::result::QueryResult; #[derive(Debug, Clone, Copy, QueryId)] pub struct NoDistinctClause; diff --git a/diesel/src/query_builder/insert_statement/column_list.rs b/diesel/src/query_builder/insert_statement/column_list.rs index 19f3e56a887f..ec11a6b2c405 100644 --- a/diesel/src/query_builder/insert_statement/column_list.rs +++ b/diesel/src/query_builder/insert_statement/column_list.rs @@ -1,7 +1,5 @@ -use crate::backend::Backend; use crate::query_builder::*; use crate::query_source::Column; -use crate::result::QueryResult; /// Represents the column list for use in an insert statement. /// diff --git a/diesel/src/query_builder/insert_statement/insert_from_select.rs b/diesel/src/query_builder/insert_statement/insert_from_select.rs index 49aa8c39c721..b49d25f6e979 100644 --- a/diesel/src/query_builder/insert_statement/insert_from_select.rs +++ b/diesel/src/query_builder/insert_statement/insert_from_select.rs @@ -1,4 +1,3 @@ -use crate::backend::Backend; use crate::expression::{Expression, NonAggregate, SelectableExpression}; use crate::insertable::*; use crate::query_builder::*; diff --git a/diesel/src/query_builder/insert_statement/insert_with_default_for_sqlite.rs b/diesel/src/query_builder/insert_statement/insert_with_default_for_sqlite.rs index 181ea4ed44ef..ba477d9b50f7 100644 --- a/diesel/src/query_builder/insert_statement/insert_with_default_for_sqlite.rs +++ b/diesel/src/query_builder/insert_statement/insert_with_default_for_sqlite.rs @@ -1,5 +1,4 @@ use super::{BatchInsert, InsertStatement}; -use crate::connection::Connection; use crate::insertable::InsertValues; use crate::insertable::{CanInsertInSingleQuery, ColumnInsertValue, DefaultableColumnInsertValue}; use crate::prelude::*; @@ -7,7 +6,6 @@ use crate::query_builder::{AstPass, QueryId, ValuesClause}; use crate::query_builder::{DebugQuery, QueryFragment}; use crate::query_dsl::methods::ExecuteDsl; use crate::sqlite::Sqlite; -use crate::{QueryResult, Table}; use std::fmt::{self, Debug, Display}; pub trait DebugQueryHelper { diff --git a/diesel/src/query_builder/insert_statement/mod.rs b/diesel/src/query_builder/insert_statement/mod.rs index f317b03f9001..fd992de0c27b 100644 --- a/diesel/src/query_builder/insert_statement/mod.rs +++ b/diesel/src/query_builder/insert_statement/mod.rs @@ -8,14 +8,13 @@ pub(crate) use self::insert_from_select::InsertFromSelect; pub(crate) use self::private::{Insert, InsertOrIgnore, Replace}; use super::returning_clause::*; -use crate::backend::{sql_dialect, Backend, DieselReserveSpecialization, SqlDialect}; +use crate::backend::{sql_dialect, DieselReserveSpecialization, SqlDialect}; use crate::expression::grouped::Grouped; use crate::expression::operators::Eq; use crate::expression::{Expression, NonAggregate, SelectableExpression}; use crate::query_builder::*; use crate::query_dsl::RunQueryDsl; use crate::query_source::{Column, Table}; -use crate::result::QueryResult; use crate::{insertable::*, QuerySource}; use std::marker::PhantomData; @@ -283,8 +282,12 @@ impl InsertStatement { /// let inserted_names = diesel::insert_into(users) /// .values(&vec![name.eq("Timmy"), name.eq("Jimmy")]) /// .returning(name) - /// .get_results(connection); - /// assert_eq!(Ok(vec!["Timmy".to_string(), "Jimmy".to_string()]), inserted_names); + /// .get_results(connection) + /// .unwrap(); + /// // Note that the returned order is not guaranteed to be preserved + /// assert_eq!(inserted_names.len(), 2); + /// assert!(inserted_names.contains(&"Timmy".to_string())); + /// assert!(inserted_names.contains(&"Jimmy".to_string())); /// # } /// # #[cfg(not(feature = "postgres"))] /// # fn main() {} diff --git a/diesel/src/query_builder/mod.rs b/diesel/src/query_builder/mod.rs index 6da06828c1bb..6ce2750cbbf1 100644 --- a/diesel/src/query_builder/mod.rs +++ b/diesel/src/query_builder/mod.rs @@ -120,6 +120,9 @@ pub(crate) use self::insert_statement::ColumnList; #[cfg(feature = "postgres_backend")] pub use crate::pg::query_builder::only::Only; +#[cfg(feature = "postgres_backend")] +pub use crate::pg::query_builder::tablesample::{Tablesample, TablesampleMethod}; + use crate::backend::Backend; use crate::result::QueryResult; use std::error::Error; diff --git a/diesel/src/query_builder/nodes/mod.rs b/diesel/src/query_builder/nodes/mod.rs index 71f152b8e390..2fb676bd7f8a 100644 --- a/diesel/src/query_builder/nodes/mod.rs +++ b/diesel/src/query_builder/nodes/mod.rs @@ -1,6 +1,5 @@ -use crate::backend::{Backend, DieselReserveSpecialization}; +use crate::backend::DieselReserveSpecialization; use crate::query_builder::*; -use crate::result::QueryResult; use std::marker::PhantomData; #[doc(hidden)] // used by the table macro diff --git a/diesel/src/query_builder/select_clause.rs b/diesel/src/query_builder/select_clause.rs index bcbed30fe3a8..006355c819b6 100644 --- a/diesel/src/query_builder/select_clause.rs +++ b/diesel/src/query_builder/select_clause.rs @@ -1,5 +1,4 @@ use super::from_clause::AsQuerySource; -use crate::backend::Backend; use crate::expression::{Expression, SelectableExpression}; use crate::query_builder::*; use crate::query_source::QuerySource; diff --git a/diesel/src/query_builder/select_statement/boxed.rs b/diesel/src/query_builder/select_statement/boxed.rs index ad56a7f8cae7..d0840026f856 100644 --- a/diesel/src/query_builder/select_statement/boxed.rs +++ b/diesel/src/query_builder/select_statement/boxed.rs @@ -1,13 +1,12 @@ use std::marker::PhantomData; -use crate::backend::{sql_dialect, Backend, DieselReserveSpecialization}; +use crate::backend::{sql_dialect, DieselReserveSpecialization}; use crate::dsl::AsExprOf; use crate::expression::subselect::ValidSubselect; use crate::expression::*; use crate::insertable::Insertable; use crate::query_builder::combination_clause::*; use crate::query_builder::distinct_clause::DistinctClause; -use crate::query_builder::from_clause::FromClause; use crate::query_builder::group_by_clause::ValidGroupByClause; use crate::query_builder::having_clause::HavingClause; use crate::query_builder::insert_statement::InsertFromSelect; @@ -21,7 +20,6 @@ use crate::query_dsl::methods::*; use crate::query_dsl::*; use crate::query_source::joins::*; use crate::query_source::{QuerySource, Table}; -use crate::result::QueryResult; use crate::sql_types::{BigInt, BoolOrNullableBool, IntoNullable}; // This is used by the table macro internally diff --git a/diesel/src/query_builder/select_statement/dsl_impls.rs b/diesel/src/query_builder/select_statement/dsl_impls.rs index b503264a4288..f7c27dc61398 100644 --- a/diesel/src/query_builder/select_statement/dsl_impls.rs +++ b/diesel/src/query_builder/select_statement/dsl_impls.rs @@ -23,7 +23,6 @@ use crate::query_builder::NoFromClause; use crate::query_builder::{ AsQuery, IntoBoxedClause, Query, QueryFragment, SelectQuery, SelectStatement, }; -use crate::query_dsl::boxed_dsl::BoxedDsl; use crate::query_dsl::methods::*; use crate::query_dsl::order_dsl::ValidOrderingForDistinct; use crate::query_dsl::*; diff --git a/diesel/src/query_builder/update_statement/changeset.rs b/diesel/src/query_builder/update_statement/changeset.rs index 518e98795018..b3349b372c30 100644 --- a/diesel/src/query_builder/update_statement/changeset.rs +++ b/diesel/src/query_builder/update_statement/changeset.rs @@ -1,10 +1,9 @@ -use crate::backend::{Backend, DieselReserveSpecialization}; +use crate::backend::DieselReserveSpecialization; use crate::expression::grouped::Grouped; use crate::expression::operators::Eq; use crate::expression::AppearsOnTable; use crate::query_builder::*; use crate::query_source::{Column, QuerySource}; -use crate::result::QueryResult; use crate::Table; /// Types which can be passed to diff --git a/diesel/src/query_builder/update_statement/mod.rs b/diesel/src/query_builder/update_statement/mod.rs index 2384826d8ae8..b234d35cc03a 100644 --- a/diesel/src/query_builder/update_statement/mod.rs +++ b/diesel/src/query_builder/update_statement/mod.rs @@ -1,9 +1,7 @@ pub(crate) mod changeset; pub(super) mod target; -use self::target::UpdateTarget; - -use crate::backend::{Backend, DieselReserveSpecialization}; +use crate::backend::DieselReserveSpecialization; use crate::dsl::{Filter, IntoBoxed}; use crate::expression::{ is_aggregate, AppearsOnTable, Expression, MixedAggregates, SelectableExpression, ValidGrouping, @@ -14,7 +12,6 @@ use crate::query_dsl::methods::{BoxedDsl, FilterDsl}; use crate::query_dsl::RunQueryDsl; use crate::query_source::Table; use crate::result::Error::QueryBuilderError; -use crate::result::QueryResult; use crate::{query_builder::*, QuerySource}; pub(crate) use self::private::UpdateAutoTypeHelper; diff --git a/diesel/src/query_builder/upsert/on_conflict_actions.rs b/diesel/src/query_builder/upsert/on_conflict_actions.rs index 452b054eba56..4f3e144ac147 100644 --- a/diesel/src/query_builder/upsert/on_conflict_actions.rs +++ b/diesel/src/query_builder/upsert/on_conflict_actions.rs @@ -1,11 +1,9 @@ use std::marker::PhantomData; use crate::backend::sql_dialect::on_conflict_clause; -use crate::backend::Backend; use crate::expression::{AppearsOnTable, Expression}; use crate::query_builder::*; use crate::query_source::*; -use crate::result::QueryResult; #[doc(hidden)] #[derive(Debug, Clone, Copy, QueryId)] diff --git a/diesel/src/query_builder/upsert/on_conflict_clause.rs b/diesel/src/query_builder/upsert/on_conflict_clause.rs index 99b940d62559..81d1f787d762 100644 --- a/diesel/src/query_builder/upsert/on_conflict_clause.rs +++ b/diesel/src/query_builder/upsert/on_conflict_clause.rs @@ -1,11 +1,9 @@ use super::on_conflict_actions::*; use super::on_conflict_target::*; use crate::backend::sql_dialect; -use crate::backend::Backend; use crate::insertable::*; use crate::query_builder::where_clause::{NoWhereClause, WhereClause}; use crate::query_builder::*; -use crate::result::QueryResult; #[doc(hidden)] #[derive(Debug, Clone, Copy)] diff --git a/diesel/src/query_builder/upsert/on_conflict_target.rs b/diesel/src/query_builder/upsert/on_conflict_target.rs index 7592acd9e2c3..125cda7bdaca 100644 --- a/diesel/src/query_builder/upsert/on_conflict_target.rs +++ b/diesel/src/query_builder/upsert/on_conflict_target.rs @@ -1,8 +1,7 @@ -use crate::backend::{sql_dialect, Backend}; +use crate::backend::sql_dialect; use crate::expression::SqlLiteral; use crate::query_builder::*; use crate::query_source::Column; -use crate::result::QueryResult; #[doc(hidden)] pub trait OnConflictTarget {} diff --git a/diesel/src/query_builder/where_clause.rs b/diesel/src/query_builder/where_clause.rs index f344db814dce..e8ee69c906f5 100644 --- a/diesel/src/query_builder/where_clause.rs +++ b/diesel/src/query_builder/where_clause.rs @@ -1,10 +1,9 @@ use super::from_clause::AsQuerySource; use super::*; -use crate::backend::{Backend, DieselReserveSpecialization}; +use crate::backend::DieselReserveSpecialization; use crate::expression::grouped::Grouped; use crate::expression::operators::{And, Or}; use crate::expression::*; -use crate::result::QueryResult; use crate::sql_types::BoolOrNullableBool; /// Add `Predicate` to the current `WHERE` clause, joining with `AND` if diff --git a/diesel/src/query_source/joins.rs b/diesel/src/query_source/joins.rs index 0d0cf7495d1f..426dec14fb1c 100644 --- a/diesel/src/query_source/joins.rs +++ b/diesel/src/query_source/joins.rs @@ -1,13 +1,11 @@ -use super::{AppearsInFromClause, Plus, QuerySource}; +use super::{AppearsInFromClause, Plus}; use crate::backend::Backend; use crate::backend::DieselReserveSpecialization; use crate::expression::grouped::Grouped; use crate::expression::nullable::Nullable; -use crate::expression::SelectableExpression; use crate::prelude::*; use crate::query_builder::*; use crate::query_dsl::InternalJoinDsl; -use crate::result::QueryResult; use crate::sql_types::BoolOrNullableBool; use crate::util::TupleAppend; diff --git a/diesel/src/r2d2.rs b/diesel/src/r2d2.rs index 79b8d0431e58..5c656a18b846 100644 --- a/diesel/src/r2d2.rs +++ b/diesel/src/r2d2.rs @@ -100,7 +100,6 @@ pub use r2d2::*; /// [`r2d2::Pool`]: r2d2::Pool pub type PoolError = r2d2::Error; -use std::convert::Into; use std::fmt; use std::marker::PhantomData; diff --git a/diesel/src/result.rs b/diesel/src/result.rs index 9dd778a6130f..71a642d32d62 100644 --- a/diesel/src/result.rs +++ b/diesel/src/result.rs @@ -1,6 +1,5 @@ //! Errors, type aliases, and functions related to working with `Result`. -use std::convert::From; use std::error::Error as StdError; use std::ffi::NulError; use std::fmt::{self, Display}; diff --git a/diesel/src/sqlite/connection/bind_collector.rs b/diesel/src/sqlite/connection/bind_collector.rs index 8cce52c35ed3..e276c1b5b0c1 100644 --- a/diesel/src/sqlite/connection/bind_collector.rs +++ b/diesel/src/sqlite/connection/bind_collector.rs @@ -194,4 +194,9 @@ impl<'a> BindCollector<'a, Sqlite> for SqliteBindCollector<'a> { )); Ok(()) } + + fn push_null_value(&mut self, metadata: SqliteType) -> QueryResult<()> { + self.binds.push((InternalSqliteBindValue::Null, metadata)); + Ok(()) + } } diff --git a/diesel/src/sqlite/connection/mod.rs b/diesel/src/sqlite/connection/mod.rs index e2b058d844c5..8151ee0d12cb 100644 --- a/diesel/src/sqlite/connection/mod.rs +++ b/diesel/src/sqlite/connection/mod.rs @@ -639,7 +639,7 @@ mod tests { #[test] fn register_custom_function() { let connection = &mut SqliteConnection::establish(":memory:").unwrap(); - fun_case_internals::register_impl(connection, |x: String| { + fun_case_utils::register_impl(connection, |x: String| { x.chars() .enumerate() .map(|(i, c)| { @@ -664,7 +664,7 @@ mod tests { #[test] fn register_multiarg_function() { let connection = &mut SqliteConnection::establish(":memory:").unwrap(); - my_add_internals::register_impl(connection, |x: i32, y: i32| x + y).unwrap(); + my_add_utils::register_impl(connection, |x: i32, y: i32| x + y).unwrap(); let added = crate::select(my_add(1, 2)).get_result::(connection); assert_eq!(Ok(3), added); @@ -675,7 +675,7 @@ mod tests { #[test] fn register_noarg_function() { let connection = &mut SqliteConnection::establish(":memory:").unwrap(); - answer_internals::register_impl(connection, || 42).unwrap(); + answer_utils::register_impl(connection, || 42).unwrap(); let answer = crate::select(answer()).get_result::(connection); assert_eq!(Ok(42), answer); @@ -684,7 +684,7 @@ mod tests { #[test] fn register_nondeterministic_noarg_function() { let connection = &mut SqliteConnection::establish(":memory:").unwrap(); - answer_internals::register_nondeterministic_impl(connection, || 42).unwrap(); + answer_utils::register_nondeterministic_impl(connection, || 42).unwrap(); let answer = crate::select(answer()).get_result::(connection); assert_eq!(Ok(42), answer); @@ -696,7 +696,7 @@ mod tests { fn register_nondeterministic_function() { let connection = &mut SqliteConnection::establish(":memory:").unwrap(); let mut y = 0; - add_counter_internals::register_nondeterministic_impl(connection, move |x: i32| { + add_counter_utils::register_nondeterministic_impl(connection, move |x: i32| { y += 1; x + y }) @@ -707,8 +707,6 @@ mod tests { assert_eq!(Ok((2, 3, 4)), added); } - use crate::sqlite::SqliteAggregateFunction; - define_sql_function! { #[aggregate] fn my_sum(expr: Integer) -> Integer; @@ -752,7 +750,7 @@ mod tests { .execute(connection) .unwrap(); - my_sum_internals::register_impl::(connection).unwrap(); + my_sum_utils::register_impl::(connection).unwrap(); let result = my_sum_example .select(my_sum(value)) @@ -771,7 +769,7 @@ mod tests { .execute(connection) .unwrap(); - my_sum_internals::register_impl::(connection).unwrap(); + my_sum_utils::register_impl::(connection).unwrap(); let result = my_sum_example .select(my_sum(value)) @@ -843,7 +841,7 @@ mod tests { .execute(connection) .unwrap(); - range_max_internals::register_impl::, _, _, _>(connection).unwrap(); + range_max_utils::register_impl::, _, _, _>(connection).unwrap(); let result = range_max_example .select(range_max(value1, value2, value3)) .get_result::>(connection) diff --git a/diesel/src/sqlite/connection/raw.rs b/diesel/src/sqlite/connection/raw.rs index 9f31a58106a4..bf5e910d3fab 100644 --- a/diesel/src/sqlite/connection/raw.rs +++ b/diesel/src/sqlite/connection/raw.rs @@ -261,7 +261,7 @@ impl Drop for RawConnection { enum SqliteCallbackError { Abort(&'static str), DieselError(crate::result::Error), - Panic(Box, String), + Panic(String), } impl SqliteCallbackError { @@ -273,7 +273,7 @@ impl SqliteCallbackError { s = e.to_string(); &s } - SqliteCallbackError::Panic(_, msg) => msg, + SqliteCallbackError::Panic(msg) => msg, }; unsafe { context_error_str(ctx, msg); @@ -347,12 +347,7 @@ extern "C" fn run_custom_function( } Ok(()) }) - .unwrap_or_else(|p| { - Err(SqliteCallbackError::Panic( - p, - data_ptr.function_name.clone(), - )) - }); + .unwrap_or_else(|p| Err(SqliteCallbackError::Panic(data_ptr.function_name.clone()))); if let Err(e) = result { e.emit(ctx); } @@ -383,10 +378,10 @@ extern "C" fn run_aggregator_step_function(ctx, args) }) .unwrap_or_else(|e| { - Err(SqliteCallbackError::Panic( - e, - format!("{}::step() panicked", std::any::type_name::()), - )) + Err(SqliteCallbackError::Panic(format!( + "{}::step() panicked", + std::any::type_name::() + ))) }); match result { @@ -496,11 +491,11 @@ extern "C" fn run_aggregator_final_function()), - )) + .unwrap_or_else(|_e| { + Err(SqliteCallbackError::Panic(format!( + "{}::finalize() panicked", + std::any::type_name::() + ))) }); if let Err(e) = result { e.emit(ctx); @@ -570,7 +565,6 @@ where }) .unwrap_or_else(|p| { Err(SqliteCallbackError::Panic( - p, user_ptr .map(|u| u.collation_name.clone()) .unwrap_or_default(), @@ -601,7 +595,7 @@ where ); std::process::abort() } - Err(SqliteCallbackError::Panic(_, msg)) => { + Err(SqliteCallbackError::Panic(msg)) => { eprintln!("Collation function {} panicked", msg); std::process::abort() } diff --git a/diesel/src/sqlite/connection/row.rs b/diesel/src/sqlite/connection/row.rs index d5a86de42fd5..585e0d69096c 100644 --- a/diesel/src/sqlite/connection/row.rs +++ b/diesel/src/sqlite/connection/row.rs @@ -1,5 +1,4 @@ use std::cell::{Ref, RefCell}; -use std::convert::TryFrom; use std::rc::Rc; use super::sqlite_value::{OwnedSqliteValue, SqliteValue}; diff --git a/diesel/src/sqlite/connection/stmt.rs b/diesel/src/sqlite/connection/stmt.rs index 07aee05bce60..d17224df5ba6 100644 --- a/diesel/src/sqlite/connection/stmt.rs +++ b/diesel/src/sqlite/connection/stmt.rs @@ -517,7 +517,6 @@ impl<'stmt, 'query> StatementUse<'stmt, 'query> { mod tests { use crate::prelude::*; use crate::sql_types::Text; - use crate::SqliteConnection; // this is a regression test for // https://github.com/diesel-rs/diesel/issues/3558 diff --git a/diesel/src/sqlite/types/date_and_time/chrono.rs b/diesel/src/sqlite/types/date_and_time/chrono.rs index 679dd90dd767..a8df1a0099e8 100644 --- a/diesel/src/sqlite/types/date_and_time/chrono.rs +++ b/diesel/src/sqlite/types/date_and_time/chrono.rs @@ -75,6 +75,7 @@ fn parse_julian(julian_days: f64) -> Option { let timestamp = (julian_days - EPOCH_IN_JULIAN_DAYS) * SECONDS_IN_DAY; let seconds = timestamp as i64; let nanos = (timestamp.fract() * 1E9) as u32; + #[allow(deprecated)] // otherwise we would need to bump our minimal chrono version NaiveDateTime::from_timestamp_opt(seconds, nanos) } @@ -325,11 +326,11 @@ mod tests { #[test] fn times_relative_to_now_encode_correctly() { let connection = &mut connection(); - let time = Utc::now().naive_utc() + Duration::seconds(60); + let time = Utc::now().naive_utc() + Duration::try_seconds(60).unwrap(); let query = select(now.lt(time)); assert_eq!(Ok(true), query.get_result(connection)); - let time = Utc::now().naive_utc() - Duration::seconds(600); + let time = Utc::now().naive_utc() - Duration::try_seconds(600).unwrap(); let query = select(now.gt(time)); assert_eq!(Ok(true), query.get_result(connection)); } diff --git a/diesel/src/type_impls/primitives.rs b/diesel/src/type_impls/primitives.rs index 470ffdd06b14..27b200f5bb10 100644 --- a/diesel/src/type_impls/primitives.rs +++ b/diesel/src/type_impls/primitives.rs @@ -8,6 +8,8 @@ use crate::serialize::{self, IsNull, Output, ToSql}; use crate::sql_types::{ self, BigInt, Binary, Bool, Double, Float, Integer, SingleValue, SmallInt, Text, }; +use std::borrow::Cow; +use std::fmt; #[allow(dead_code)] mod foreign_impls { @@ -201,8 +203,6 @@ where } } -use std::borrow::{Cow, ToOwned}; -use std::fmt; impl<'a, T: ?Sized, ST, DB> ToSql for Cow<'a, T> where T: 'a + ToOwned + ToSql, diff --git a/diesel_bench/Cargo.toml b/diesel_bench/Cargo.toml index f0593b304390..990d2d1c93ad 100644 --- a/diesel_bench/Cargo.toml +++ b/diesel_bench/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "diesel_bench" version = "0.1.0" -edition = "2018" +edition = "2021" build = "build.rs" autobenches = false @@ -10,22 +10,38 @@ autobenches = false [dependencies] dotenvy = "0.15" -criterion = {version = "0.5", default-features = false, features = ["csv_output", "cargo_bench_support"]} -sqlx = {version = "0.7", features = ["runtime-tokio-rustls"], optional = true} -tokio = {version = "1", optional = true} -rusqlite = {version = "0.29", optional = true} -rust_postgres = {version = "0.19.7", optional = true, package = "postgres"} -rust_mysql = {version = "23.0", optional = true, package = "mysql"} -rustorm = {version = "0.20", optional = true} -rustorm_dao = {version = "0.20", optional = true} -quaint = {version = "=0.2.0-alpha.13", optional = true, features = ["uuid"]} -serde = {version = "1", optional = true, features = ["derive"]} -sea-orm = { git = "https://github.com/SeaQL/sea-orm/", branch = "master", optional = true, features = ["runtime-tokio-rustls"]} -futures = {version = "0.3", optional = true} -diesel-async = {version = "0.4", optional = true, default-features = false} -criterion-perf-events = { version = "0.4", optional = true} -perfcnt = {version = "0.8", optional = true} -wtx = { default-features = false, features = ["atoi", "postgres", "simdutf8", "std", "tokio"], optional = true, version = "0.12" } +criterion = { version = "0.5", default-features = false, features = [ + "csv_output", + "cargo_bench_support", +] } +sqlx = { version = "0.7.3", features = [ + "runtime-tokio-rustls", +], optional = true } +tokio = { version = "1", optional = true, features = ["rt-multi-thread"] } +rusqlite = { version = "0.30", optional = true } +rust_postgres = { version = "0.19.7", optional = true, package = "postgres" } +tokio_postgres = { version = "0.7.10", optional = true, package = "tokio-postgres" } +rust_mysql = { version = "23.0", optional = true, package = "mysql" } +rustorm = { version = "0.20", optional = true } +rustorm_dao = { version = "0.20", optional = true } +quaint = { version = "0.2.0-alpha.13", optional = true, default-features = false } +serde = { version = "1", optional = true, features = ["derive"] } +sea-orm = { version = "0.12.14", optional = true, features = [ + "runtime-tokio-rustls", +] } +futures = { version = "0.3", optional = true } +futures-util = { version = "0.3", optional = true } +diesel-async = { version = "0.4.1", optional = true, default-features = false } +criterion-perf-events = { version = "0.4", optional = true } +perfcnt = { version = "0.8", optional = true } +wtx = { default-features = false, features = [ + "atoi", + "memchr", + "postgres", + "simdutf8", + "std", + "tokio", +], optional = true, version = "0.14" } [dependencies.diesel] path = "../diesel" @@ -53,8 +69,11 @@ sqlx-bench = ["sqlx", "tokio", "sqlx/runtime-tokio-rustls"] instruction_count = ["criterion-perf-events", "perfcnt"] fast_run = [] +[profile.release] +lto = true +codegen-units = 1 [patch.crates-io] -quaint = {git = "https://github.com/prisma/prisma-engines", rev = "8f088bb"} -diesel-async = { git = "https://github.com/weiznich/diesel_async", rev = "017ebe2"} -diesel = { path = "../diesel"} +quaint = { git = "https://github.com/prisma/prisma-engines", branch = "main" } +diesel-async = { git = "https://github.com/weiznich/diesel_async", branch = "main" } +diesel = { path = "../diesel" } diff --git a/diesel_bench/benches/lib.rs b/diesel_bench/benches/lib.rs index 247b7dcd9e48..b2461463cd8c 100644 --- a/diesel_bench/benches/lib.rs +++ b/diesel_bench/benches/lib.rs @@ -5,6 +5,8 @@ mod diesel_benches; mod mysql_benches; #[cfg(all(feature = "postgres", feature = "rust_postgres"))] mod postgres_benches; +#[cfg(all(feature = "postgres", feature = "tokio_postgres"))] +mod tokio_postgres_benches; #[cfg(feature = "quaint")] mod quaint_benches; #[cfg(all(feature = "rusqlite", feature = "sqlite"))] @@ -142,6 +144,16 @@ fn bench_trivial_query(c: &mut CriterionType) { crate::postgres_benches::bench_trivial_query_by_name(b, *i); }); + #[cfg(all(feature = "postgres", feature = "tokio_postgres"))] + group.bench_with_input(BenchmarkId::new("tokio_postgres_by_id", size), size, |b, i| { + crate::tokio_postgres_benches::bench_trivial_query_by_id(b, *i); + }); + + #[cfg(all(feature = "postgres", feature = "tokio_postgres"))] + group.bench_with_input(BenchmarkId::new("tokio_postgres_by_name", size), size, |b, i| { + crate::tokio_postgres_benches::bench_trivial_query_by_name(b, *i); + }); + #[cfg(all(feature = "mysql", feature = "rust_mysql"))] group.bench_with_input(BenchmarkId::new("mysql_by_id", size), size, |b, i| { crate::mysql_benches::bench_trivial_query_by_id(b, *i); @@ -245,6 +257,16 @@ fn bench_medium_complex_query(c: &mut CriterionType) { crate::postgres_benches::bench_medium_complex_query_by_name(b, *i); }); + #[cfg(all(feature = "postgres", feature = "tokio_postgres"))] + group.bench_with_input(BenchmarkId::new("tokio_postgres_by_id", size), size, |b, i| { + crate::tokio_postgres_benches::bench_medium_complex_query_by_id(b, *i); + }); + + #[cfg(all(feature = "postgres", feature = "tokio_postgres"))] + group.bench_with_input(BenchmarkId::new("tokio_postgres_by_name", size), size, |b, i| { + crate::tokio_postgres_benches::bench_medium_complex_query_by_name(b, *i); + }); + #[cfg(all(feature = "mysql", feature = "rust_mysql"))] group.bench_with_input(BenchmarkId::new("mysql_by_id", size), size, |b, i| { crate::mysql_benches::bench_medium_complex_query_by_id(b, *i); @@ -287,6 +309,11 @@ fn bench_loading_associations_sequentially(c: &mut CriterionType) { crate::postgres_benches::loading_associations_sequentially(b) }); + #[cfg(all(feature = "postgres", feature = "tokio_postgres"))] + group.bench_function("tokio_postgres", |b| { + crate::tokio_postgres_benches::loading_associations_sequentially(b) + }); + #[cfg(all(feature = "sqlite", feature = "rusqlite"))] group.bench_function("rusqlite", |b| { crate::rusqlite_benches::loading_associations_sequentially(b) @@ -358,6 +385,11 @@ fn bench_insert(c: &mut CriterionType) { crate::postgres_benches::bench_insert(b, *i); }); + #[cfg(all(feature = "postgres", feature = "tokio_postgres"))] + group.bench_with_input(BenchmarkId::new("tokio_postgres", size), size, |b, i| { + crate::tokio_postgres_benches::bench_insert(b, *i); + }); + #[cfg(all(feature = "sqlite", feature = "rusqlite"))] group.bench_with_input(BenchmarkId::new("rusqlite", size), size, |b, i| { crate::rusqlite_benches::bench_insert(b, *i); diff --git a/diesel_bench/benches/tokio_postgres_benches.rs b/diesel_bench/benches/tokio_postgres_benches.rs new file mode 100644 index 000000000000..57d47765ee40 --- /dev/null +++ b/diesel_bench/benches/tokio_postgres_benches.rs @@ -0,0 +1,458 @@ +use super::Bencher; +use std::collections::HashMap; +use tokio_postgres::{types::ToSql, Client, NoTls}; +use tokio::runtime::Runtime; +use futures_util::stream::StreamExt; + +const NO_PARAMS: Vec<&dyn ToSql> = Vec::new(); + +pub struct User { + pub id: i32, + pub name: String, + pub hair_color: Option, +} + +pub struct Post { + pub id: i32, + pub user_id: i32, + pub title: String, + pub body: Option, +} + +pub struct Comment { + pub id: i32, + pub post_id: i32, + pub text: String, +} + +async fn connection() -> Client { + dotenvy::dotenv().ok(); + let connection_url = dotenvy::var("POSTGRES_DATABASE_URL") + .or_else(|_| dotenvy::var("DATABASE_URL")) + .expect("DATABASE_URL must be set in order to run tests"); + let (client, connection) = tokio_postgres::connect(&connection_url, NoTls).await.unwrap(); + + // The connection object performs the actual communication with the database, + // so spawn it off to run on its own. + tokio::spawn(async move { + if let Err(e) = connection.await { + eprintln!("connection error: {}", e); + } + }); + + client + .execute("TRUNCATE TABLE comments CASCADE", &[]) + .await + .unwrap(); + client + .execute("TRUNCATE TABLE posts CASCADE", &[]) + .await + .unwrap(); + client + .execute("TRUNCATE TABLE users CASCADE", &[]) + .await + .unwrap(); + + client +} + +async fn insert_users( + size: usize, + client: &Client, + hair_color_init: impl Fn(usize) -> Option, +) { + let mut query = String::from("INSERT INTO users (name, hair_color) VALUES"); + + let mut params = Vec::with_capacity(2 * size); + + for x in 0..size { + query += &format!( + "{} (${}, ${})", + if x == 0 { "" } else { "," }, + 2 * x + 1, + 2 * x + 2 + ); + params.push((format!("User {}", x), hair_color_init(x))); + } + + let params = params + .iter() + .flat_map(|(a, b)| vec![a as _, b as _]) + .collect::>(); + + client.execute(&query as &str, ¶ms).await.unwrap(); +} + +pub fn bench_trivial_query_by_id(b: &mut Bencher, size: usize) { + let runtime = Runtime::new().expect("Failed to create runtime"); + let (client, query) = runtime.block_on(async { + let client = connection().await; + insert_users(size, &client, |_| None).await; + let query = client + .prepare("SELECT id, name, hair_color FROM users") + .await + .unwrap(); + (client, query) + }); + + b.iter(|| { + runtime.block_on(async { + client + .query_raw(&query, NO_PARAMS) + .await + .unwrap() + .map(|row| { + let row = row.unwrap(); + User { + id: row.get(0), + name: row.get(1), + hair_color: row.get(2), + } + }) + .collect::>() + .await + }) + }) +} + +pub fn bench_trivial_query_by_name(b: &mut Bencher, size: usize) { + let runtime = Runtime::new().expect("Failed to create runtime"); + let (client, query) = runtime.block_on(async { + let client = connection().await; + insert_users(size, &client, |_| None).await; + + let query = client + .prepare("SELECT id, name, hair_color FROM users") + .await + .unwrap(); + (client, query) + }); + + b.iter(|| { + runtime.block_on(async { + client + .query_raw(&query, NO_PARAMS) + .await + .unwrap() + .map(|row| { + let row = row.unwrap(); + User { + id: row.get("id"), + name: row.get("name"), + hair_color: row.get("hair_color"), + } + }) + .collect::>() + .await + }) + }) +} + +pub fn bench_medium_complex_query_by_id(b: &mut Bencher, size: usize) { + let runtime = Runtime::new().expect("Failed to create runtime"); + let (client, query) = runtime.block_on(async { + let client = connection().await; + insert_users(size, &client, |i| { + Some(if i % 2 == 0 { "black" } else { "brown" }.into()) + }) + .await; + + let query = client + .prepare( + "SELECT u.id, u.name, u.hair_color, p.id, p.user_id, p.title, p.body \ + FROM users as u LEFT JOIN posts as p on u.id = p.user_id WHERE u.hair_color = $1", + ) + .await + .unwrap(); + (client, query) + }); + + b.iter(|| { + runtime.block_on(async { + client + .query_raw(&query, &[&"black"]) + .await + .unwrap() + .map(|row| { + let row = row.unwrap(); + let user = User { + id: row.get(0), + name: row.get(1), + hair_color: row.get(2), + }; + let post = if let Some(id) = row.get(3) { + Some(Post { + id, + user_id: row.get(4), + title: row.get(5), + body: row.get(6), + }) + } else { + None + }; + (user, post) + }) + .collect::>() + .await + }) + }) +} + +pub fn bench_medium_complex_query_by_name(b: &mut Bencher, size: usize) { + let runtime = Runtime::new().expect("Failed to create runtime"); + let (client, query) = runtime.block_on(async { + let client = connection().await; + insert_users(size, &client, |i| { + Some(if i % 2 == 0 { "black" } else { "brown" }.into()) + }).await; + + let query = client + .prepare( + "SELECT u.id as myuser_id, u.name, u.hair_color, p.id as post_id, p.user_id, p.title, p.body \ + FROM users as u LEFT JOIN posts as p on u.id = p.user_id", + ) + .await + .unwrap(); + (client, query) + }); + + b.iter(|| { + runtime.block_on(async { + client + .query_raw(&query, NO_PARAMS) + .await + .unwrap() + .map(|row| { + let row = row.unwrap(); + let user = User { + id: row.get("myuser_id"), + name: row.get("name"), + hair_color: row.get("hair_color"), + }; + let post = if let Some(id) = row.get("post_id") { + Some(Post { + id, + user_id: row.get("user_id"), + title: row.get("title"), + body: row.get("body"), + }) + } else { + None + }; + (user, post) + }) + .collect::>() + .await + }) + }) +} + +pub fn bench_insert(b: &mut Bencher, size: usize) { + let runtime = Runtime::new().expect("Failed to create runtime"); + let client = runtime.block_on(async { + let client = connection().await; + client + }); + + b.iter(|| { + runtime.block_on(async { + insert_users(size, &client, |_| Some(String::from("hair_color"))).await; + }) + }) +} + +pub fn loading_associations_sequentially(b: &mut Bencher) { + let runtime = Runtime::new().expect("Failed to create runtime"); + let (client, user_query) = runtime.block_on(async { + let client = connection().await; + insert_users(100, &client, |i| { + Some(if i % 2 == 0 { + String::from("black") + } else { + String::from("brown") + }) + }) + .await; + + let user_ids = client + .query_raw("SELECT id FROM users", NO_PARAMS) + .await + .unwrap() + .map(|row| row.unwrap().get::<&str, i32>("id")) + .collect::>() + .await; + + let data = user_ids + .iter() + .flat_map(|user_id| { + (0..10).map(move |i| (format!("Post {} by user {}", i, user_id), user_id, None)) + }) + .collect::>(); + + let mut insert_query = String::from("INSERT INTO posts(title, user_id, body) VALUES"); + + for x in 0..data.len() { + insert_query += &format!( + "{} (${}, ${}, ${})", + if x == 0 { "" } else { "," }, + 3 * x + 1, + 3 * x + 2, + 3 * x + 3 + ); + } + + let data = data + .iter() + .flat_map(|(title, user_id, body): &(_, _, Option)| { + vec![title as &(dyn ToSql + Sync), user_id as _, body as _] + }) + .collect::>(); + + client.execute(&insert_query as &str, &data).await.unwrap(); + + let all_posts = client + .query_raw("SELECT id FROM posts", NO_PARAMS) + .await + .unwrap() + .map(|row| row.unwrap().get::<&str, i32>("id")) + .collect::>() + .await; + + let data = all_posts + .iter() + .flat_map(|post_id| { + (0..10).map(move |i| (format!("Comment {} on post {}", i, post_id), post_id)) + }) + .collect::>(); + + let mut insert_query = String::from("INSERT INTO comments(text, post_id) VALUES"); + + for x in 0..data.len() { + insert_query += &format!( + "{} (${}, ${})", + if x == 0 { "" } else { "," }, + 2 * x + 1, + 2 * x + 2, + ); + } + + let data = data + .iter() + .flat_map(|(title, post_id)| vec![title as _, post_id as _]) + .collect::>(); + + client.execute(&insert_query as &str, &data).await.unwrap(); + + let user_query = client + .prepare("SELECT id, name, hair_color FROM users") + .await + .unwrap(); + + (client, user_query) + }); + + b.iter(|| { + runtime.block_on(async { + let users = client + .query_raw(&user_query, NO_PARAMS) + .await + .unwrap() + .map(|row| { + let row = row.unwrap(); + User { + id: row.get("id"), + name: row.get("name"), + hair_color: row.get("hair_color"), + } + }) + .collect::>() + .await; + + let mut posts_query = + String::from("SELECT id, title, user_id, body FROM posts WHERE user_id IN("); + + let user_ids = users + .iter() + .enumerate() + .map(|(i, &User { id, .. })| { + posts_query += &format!("{}${}", if i == 0 { "" } else { "," }, i + 1); + id + }) + .collect::>(); + + posts_query += ")"; + + let posts = client + .query_raw(&posts_query as &str, user_ids) + .await + .unwrap() + .map(|row| { + let row = row.unwrap(); + Post { + id: row.get("id"), + user_id: row.get("user_id"), + title: row.get("title"), + body: row.get("body"), + } + }) + .collect::>() + .await; + + let mut comments_query = + String::from("SELECT id, post_id, text FROM comments WHERE post_id IN("); + + let post_ids = posts + .iter() + .enumerate() + .map(|(i, &Post { id, .. })| { + comments_query += &format!("{}${}", if i == 0 { "" } else { "," }, i + 1); + id + }) + .collect::>(); + + comments_query += ")"; + + let comments = client + .query_raw(&comments_query as &str, post_ids) + .await + .unwrap() + .map(|row| { + let row = row.unwrap(); + Comment { + id: row.get("id"), + post_id: row.get("post_id"), + text: row.get("text"), + } + }) + .collect::>() + .await; + + let mut posts = posts + .into_iter() + .map(|p| (p.id, (p, Vec::new()))) + .collect::>(); + + let mut users = users + .into_iter() + .map(|u| (u.id, (u, Vec::new()))) + .collect::>(); + + for comment in comments { + posts.get_mut(&comment.post_id).unwrap().1.push(comment); + } + + for (_, post_with_comments) in posts { + users + .get_mut(&post_with_comments.0.user_id) + .unwrap() + .1 + .push(post_with_comments); + } + + users + .into_iter() + .map(|(_, users_with_post_and_comment)| users_with_post_and_comment) + .collect::)>)>>() + }) + }) +} diff --git a/diesel_bench/benches/wtx.rs b/diesel_bench/benches/wtx.rs index 6f04efbaf8e8..cd4e86b80bcc 100644 --- a/diesel_bench/benches/wtx.rs +++ b/diesel_bench/benches/wtx.rs @@ -1,13 +1,12 @@ use crate::Bencher; -use std::collections::HashMap; -use std::fmt::Write; +use std::{collections::HashMap, fmt::Write}; use tokio::{net::TcpStream, runtime::Runtime}; use wtx::{ database::{ client::postgres::{Config, Executor, ExecutorBuffer}, - Encode, Executor as _, Record as _, + Executor as _, Record as _, }, - misc::UriPartsRef, + misc::{Either, UriRef}, rng::StdRng, }; @@ -85,21 +84,15 @@ pub fn bench_loading_associations_sequentially(b: &mut Bencher) { let mut posts_query = String::from("SELECT id, title, user_id, body FROM posts WHERE user_id IN("); - let mut users_ids = Vec::with_capacity(LEN); - concat( - users.iter().enumerate(), - &mut posts_query, - |local_str, (idx, &User { id, .. })| { - local_str.write_fmt(format_args!("${}", idx + 1)).unwrap(); - users_ids.push(id); - }, - ); - posts_query += ")"; + concat(0..users.len(), &mut posts_query, |local_str, idx| { + local_str.write_fmt(format_args!("${}", idx + 1)).unwrap(); + }); + posts_query.push(')'); let mut posts = Vec::with_capacity(LEN); - conn.fetch_many_with_stmt::( + conn.fetch_many_with_stmt( posts_query.as_str(), - users_ids.as_slice(), + &mut users.iter().map(|user| user.id), |record| { posts.push(Post { body: record.decode_opt(3).unwrap(), @@ -115,21 +108,15 @@ pub fn bench_loading_associations_sequentially(b: &mut Bencher) { let mut comments_query = String::from("SELECT id, post_id, text FROM comments WHERE post_id IN("); - let mut posts_ids = Vec::with_capacity(LEN); - concat( - posts.iter().enumerate(), - &mut comments_query, - |local_str, (idx, &Post { id, .. })| { - local_str.write_fmt(format_args!("${}", idx + 1)).unwrap(); - posts_ids.push(id); - }, - ); - comments_query += ")"; + concat(0..posts.len(), &mut comments_query, |local_str, idx| { + local_str.write_fmt(format_args!("${}", idx + 1)).unwrap(); + }); + comments_query.push(')'); let mut comments = Vec::with_capacity(LEN); - conn.fetch_many_with_stmt::( + conn.fetch_many_with_stmt( comments_query.as_str(), - posts_ids.as_slice(), + &mut posts.iter().map(|post| post.id), |record| { comments.push(Comment { id: record.decode(0).unwrap(), @@ -197,7 +184,7 @@ pub fn bench_medium_complex_query(b: &mut Bencher, size: usize) { b.iter(|| { runtime.block_on(async { let mut _rslt = Vec::with_capacity(size); - conn.fetch_many_with_stmt::(stmt_hash, ("black",), |record| { + conn.fetch_many_with_stmt(stmt_hash, ("black",), |record| { let user = User { id: record.decode(0).unwrap(), name: record.decode(1).unwrap(), @@ -237,8 +224,8 @@ pub fn bench_trivial_query(b: &mut Bencher, size: usize) { (conn, stmt_hash) }); b.iter(|| { - let mut users = Vec::with_capacity(size); runtime.block_on(async { + let mut users = Vec::with_capacity(size); conn.fetch_many_with_stmt(stmt_hash, (), |record| { users.push(User { id: record.decode(0).unwrap(), @@ -266,35 +253,32 @@ where } } -async fn connection() -> Executor { +async fn connection() -> Executor { dotenvy::dotenv().ok(); let url = dotenvy::var("POSTGRES_DATABASE_URL") .or_else(|_| dotenvy::var("DATABASE_URL")) .expect("DATABASE_URL must be set in order to run tests"); - let up = UriPartsRef::new(url.as_str()); + let uri = UriRef::new(url.as_str()); let mut rng = StdRng::default(); let mut conn = Executor::connect( - &Config::from_uri_parts(&up).unwrap(), + &Config::from_uri(&uri).unwrap(), ExecutorBuffer::with_default_params(&mut rng), &mut rng, - TcpStream::connect(up.host()).await.unwrap(), + TcpStream::connect(uri.host()).await.unwrap(), + ) + .await + .unwrap(); + conn.execute( + "TRUNCATE TABLE comments CASCADE;TRUNCATE TABLE posts CASCADE;TRUNCATE TABLE users CASCADE", + |_| {}, ) .await .unwrap(); - conn.execute("TRUNCATE TABLE comments CASCADE", |_| {}) - .await - .unwrap(); - conn.execute("TRUNCATE TABLE posts CASCADE", |_| {}) - .await - .unwrap(); - conn.execute("TRUNCATE TABLE users CASCADE", |_| {}) - .await - .unwrap(); conn } -async fn insert_posts(conn: &mut Executor) { - let mut users_ids = Vec::with_capacity(N); +async fn insert_posts(conn: &mut Executor) { + let mut users_ids: Vec = Vec::with_capacity(N); conn.fetch_many_with_stmt("SELECT id FROM users", (), |record| { users_ids.push(record.decode(0).unwrap()); Ok(()) @@ -305,11 +289,17 @@ async fn insert_posts(conn: &mut Executor)), + ] + }) }) .collect::>(); - let mut insert_stmt = String::from("INSERT INTO posts(title, user_id, body) VALUES"); + let mut insert_stmt = String::from("INSERT INTO posts(title, user_id, body) VALUES "); concat( 0..params.len(), &mut insert_stmt, @@ -325,45 +315,30 @@ async fn insert_posts(conn: &mut Executor)| { - let a: &dyn Encode<_, _> = &el.0; - let b: &dyn Encode<_, _> = &el.1; - let c: &dyn Encode<_, _> = &el.2; - [a, b, c] - }) - .collect::>>(); - - conn.execute_with_stmt::(insert_stmt.as_str(), params_ref.as_slice()) + conn.execute_with_stmt(insert_stmt.as_str(), &mut params.into_iter().flatten()) .await .unwrap(); } async fn insert_users( - conn: &mut Executor, + conn: &mut Executor, hair_color_init: impl Fn(usize) -> Option<&'static str>, ) { let mut query = String::from("INSERT INTO users (name, hair_color) VALUES"); - let mut params = Vec::with_capacity(2 * N); - concat(0..N, &mut query, |local_query, idx| { local_query .write_fmt(format_args!("(${}, ${})", 2 * idx + 1, 2 * idx + 2)) .unwrap(); - params.push((format!("User {idx}"), hair_color_init(idx))); }); - let params_ref = params - .iter() - .flat_map(|el: &(String, Option<&'static str>)| { - let a: &dyn Encode<_, _> = &el.0; - let b: &dyn Encode<_, _> = &el.1; - [a, b] - }) - .collect::>(); + let mut params = (0..N).into_iter().flat_map(|idx| { + [ + Either::Left(format!("User {idx}")), + Either::Right(hair_color_init(idx)), + ] + }); - conn.execute_with_stmt::(query.as_str(), params_ref.as_slice()) + conn.execute_with_stmt(query.as_str(), &mut params) .await .unwrap(); } diff --git a/diesel_cli/Cargo.toml b/diesel_cli/Cargo.toml index f632665b26ef..87e24064d5d1 100644 --- a/diesel_cli/Cargo.toml +++ b/diesel_cli/Cargo.toml @@ -23,19 +23,23 @@ path = "src/main.rs" doc = false [dependencies] -chrono = { version = "0.4.20", default-features = false, features = ["clock", "std"] } -clap = { version = "4.0.2", features = ["cargo", "string"] } +chrono = { version = "0.4.20", default-features = false, features = [ + "clock", + "std", +] } +clap = { version = "4.4.14", features = ["cargo", "string"] } clap_complete = "4" dotenvy = "0.15" heck = "0.4.0" -serde = { version = "1.0.0", features = ["derive"] } +serde = { version = "1.0.193", features = ["derive", "std"] } toml = "0.8" url = { version = "2.2.2" } -libsqlite3-sys = { version = ">=0.17.2, <0.28.0", optional = true } +libsqlite3-sys = { version = ">=0.17.2, <0.29.0", optional = true } +pq-sys = { version = ">=0.4, <0.6.0", optional = true } diffy = "0.3.0" regex = "1.0.6" serde_regex = "1.1" -diesel_table_macro_syntax = {version = "0.1", path = "../diesel_table_macro_syntax"} +diesel_table_macro_syntax = { version = "0.1", path = "../diesel_table_macro_syntax" } syn = { version = "2", features = ["visit"] } tracing = "0.1" tracing-subscriber = { version = "0.3.10", features = ["env-filter"] } @@ -61,13 +65,14 @@ postgres = ["diesel/postgres", "uses_information_schema"] sqlite = ["diesel/sqlite"] mysql = ["diesel/mysql", "uses_information_schema"] sqlite-bundled = ["sqlite", "libsqlite3-sys/bundled"] +postgres-bundled = ["postgres", "pq-sys/bundled"] uses_information_schema = [] [[test]] name = "tests" [package.metadata.dist] -features = ["sqlite-bundled"] +features = ["sqlite-bundled", "postgres-bundled"] [package.metadata.dist.dependencies.apt] libpq-dev = '*' diff --git a/diesel_cli/src/cli.rs b/diesel_cli/src/cli.rs index 25bf28de6028..a160a3e0bf9e 100644 --- a/diesel_cli/src/cli.rs +++ b/diesel_cli/src/cli.rs @@ -6,6 +6,13 @@ use clap_complete::Shell; use crate::print_schema; +fn position_sensitive_flag(arg: Arg) -> Arg { + arg.num_args(0) + .value_parser(clap::value_parser!(bool)) + .default_missing_value("true") + .default_value("false") +} + pub fn build_cli() -> Command { let database_arg = Arg::new("DATABASE_URL") .long("database-url") @@ -148,6 +155,18 @@ pub fn build_cli() -> Command { .num_args(0..=1) .require_equals(true), ) + .arg( + Arg::new("sqlite-integer-primary-key-is-bigint") + .long("sqlite-integer-primary-key-is-bigint") + .requires("SCHEMA_RS") + .action(ArgAction::SetTrue) + .help( + "For SQLite 3.37 and above, detect `INTEGER PRIMARY KEY` columns as `BigInt`, \ + when the table isn't declared with `WITHOUT ROWID`.\n\ + See https://www.sqlite.org/lang_createtable.html#rowid for more information.\n\ + Only used with the `--diff-schema` argument." + ), + ) .arg( Arg::new("table-name") .index(2) @@ -156,20 +175,24 @@ pub fn build_cli() -> Command { .help("Table names to filter."), ) .arg( - Arg::new("only-tables") + position_sensitive_flag(Arg::new("only-tables")) .short('o') .long("only-tables") - .action(ArgAction::SetTrue) - .help("Only include tables from table-name that matches regexp.") - .conflicts_with("except-tables"), + .action(ArgAction::Append) + .help("Only include tables from table-name that matches regexp."), ) .arg( - Arg::new("except-tables") + position_sensitive_flag(Arg::new("except-tables")) .short('e') .long("except-tables") - .action(ArgAction::SetTrue) - .help("Exclude tables from table-name that matches regex.") - .conflicts_with("only-tables"), + .action(ArgAction::Append) + .help("Exclude tables from table-name that matches regex."), + ) + .arg( + Arg::new("schema-key") + .long("schema-key") + .action(clap::ArgAction::Append) + .help("select schema key from diesel.toml, use 'default' for print_schema without key."), ), ) .subcommand_required(true) @@ -226,25 +249,23 @@ pub fn build_cli() -> Command { .help("Table names to filter."), ) .arg( - Arg::new("only-tables") + position_sensitive_flag(Arg::new("only-tables")) .short('o') .long("only-tables") - .action(ArgAction::SetTrue) + .action(ArgAction::Append) .help("Only include tables from table-name that matches regexp.") - .conflicts_with("except-tables"), ) .arg( - Arg::new("except-tables") + position_sensitive_flag(Arg::new("except-tables")) .short('e') .long("except-tables") - .action(ArgAction::SetTrue) + .action(ArgAction::Append) .help("Exclude tables from table-name that matches regex.") - .conflicts_with("only-tables"), ) .arg( - Arg::new("with-docs") + position_sensitive_flag(Arg::new("with-docs")) .long("with-docs") - .action(ArgAction::SetTrue) + .action(ArgAction::Append) .help("Render documentation comments for tables and columns."), ) .arg( @@ -252,6 +273,7 @@ pub fn build_cli() -> Command { .long("with-docs-config") .help("Render documentation comments for tables and columns.") .num_args(1) + .action(ArgAction::Append) .value_parser(PossibleValuesParser::new(print_schema::DocConfig::VARIANTS_STR)), ) .arg( @@ -259,12 +281,14 @@ pub fn build_cli() -> Command { .long("column-sorting") .help("Sort order for table columns.") .num_args(1) + .action(ArgAction::Append) .value_parser(PossibleValuesParser::new(["ordinal_position", "name"])), ) .arg( Arg::new("patch-file") .long("patch-file") .num_args(1) + .action(ArgAction::Append) .value_parser(clap::value_parser!(std::path::PathBuf)) .help("A unified diff file to be applied to the final schema."), ) @@ -272,14 +296,15 @@ pub fn build_cli() -> Command { Arg::new("import-types") .long("import-types") .num_args(1..) + .action(ArgAction::Append) .action(clap::ArgAction::Append) .number_of_values(1) .help("A list of types to import for every table, separated by commas."), ) .arg( - Arg::new("generate-custom-type-definitions") + position_sensitive_flag(Arg::new("generate-custom-type-definitions")) .long("no-generate-missing-sql-type-definitions") - .action(ArgAction::SetTrue) + .action(ArgAction::Append) .help("Generate SQL type definitions for types not provided by diesel"), ) .arg( @@ -289,6 +314,22 @@ pub fn build_cli() -> Command { .action(clap::ArgAction::Append) .number_of_values(1) .help("A list of derives to implement for every automatically generated SqlType in the schema, separated by commas."), + ) + .arg( + Arg::new("schema-key") + .long("schema-key") + .action(ArgAction::Append) + .default_values(["default"]) + .help("select schema key from diesel.toml, use 'default' for print_schema without key."), + ).arg( + position_sensitive_flag(Arg::new("sqlite-integer-primary-key-is-bigint")) + .long("sqlite-integer-primary-key-is-bigint") + .action(ArgAction::Append) + .help( + "For SQLite 3.37 and above, detect `INTEGER PRIMARY KEY` columns as `BigInt`, \ + when the table isn't declared with `WITHOUT ROWID`.\n\ + See https://www.sqlite.org/lang_createtable.html#rowid for more information." + ), ); let config_arg = Arg::new("CONFIG_FILE") diff --git a/diesel_cli/src/config.rs b/diesel_cli/src/config.rs index 7a886aef3d4d..adee31571385 100644 --- a/diesel_cli/src/config.rs +++ b/diesel_cli/src/config.rs @@ -1,25 +1,48 @@ +use super::find_project_root; +use crate::infer_schema_internals::TableName; +use crate::print_schema::ColumnSorting; +use crate::print_schema::{self, DocConfig}; use clap::ArgMatches; use serde::de::{self, MapAccess, Visitor}; use serde::{Deserialize, Deserializer}; use serde_regex::Serde as RegexWrapper; -use std::fs; +use std::collections::btree_map::Entry; +use std::collections::BTreeMap; +use std::ops::Bound; use std::path::{Path, PathBuf}; use std::{env, fmt}; - -use super::find_project_root; -use crate::infer_schema_internals::TableName; -use crate::print_schema; -use crate::print_schema::ColumnSorting; +use std::{fs, iter}; #[derive(Deserialize, Default, Debug)] #[serde(deny_unknown_fields)] pub struct Config { #[serde(default)] - pub print_schema: PrintSchema, + pub print_schema: RootPrintSchema, #[serde(default)] pub migrations_directory: Option, } +fn get_values_with_indices( + matches: &ArgMatches, + id: &str, +) -> Result>, crate::errors::Error> { + match matches.indices_of(id) { + Some(indices) => match matches.try_get_many::(id) { + Ok(Some(values)) => Ok(Some( + indices + .zip(values) + .map(|(index, value)| (index, value.clone())) + .collect(), + )), + Ok(None) => { + unreachable!("`ids` only reports what is present") + } + Err(e) => Err(e.into()), + }, + None => Ok(None), + } +} + impl Config { pub fn file_path(matches: &ArgMatches) -> PathBuf { matches @@ -53,23 +76,326 @@ impl Config { } pub fn set_filter(mut self, matches: &ArgMatches) -> Result { - let table_names = matches - .get_many::("table-name") - .unwrap_or_default() - .map(|table_name_regex| regex::Regex::new(table_name_regex).map(Into::into)) - .collect::, _>>()?; + if self.print_schema.has_multiple_schema { + let selected_schema_keys = + get_values_with_indices::(matches, "schema-key")?.unwrap_or_default(); + let table_names_with_indices = + get_values_with_indices::(matches, "table-name")?; + let only_tables_with_indices = get_values_with_indices::(matches, "only-tables")?; + let except_tables_with_indices = + get_values_with_indices::(matches, "except-tables")?; - if matches.get_flag("only-tables") { - self.print_schema.filter = Filtering::OnlyTables(table_names) - } else if matches.get_flag("except-tables") { - self.print_schema.filter = Filtering::ExceptTables(table_names) + for (key, boundary) in selected_schema_keys.values().cloned().zip( + selected_schema_keys + .keys() + .cloned() + .map(Bound::Included) + .zip( + selected_schema_keys + .keys() + .cloned() + .skip(1) + .map(Bound::Excluded) + .chain(iter::once(Bound::Unbounded)), + ), + ) { + let print_schema = self + .print_schema + .all_configs + .get_mut(&key) + .ok_or(crate::errors::Error::NoSchemaKeyFound(key))?; + if let Some(table_names_with_indices) = table_names_with_indices.clone() { + let table_names = table_names_with_indices + .range(boundary) + .map(|(_, v)| v.clone()) + .map(|table_name_regex| { + regex::Regex::new(&table_name_regex).map(Into::into) + }) + .collect::, _>>()?; + if table_names.is_empty() { + continue; + } + if only_tables_with_indices + .clone() + .and_then(|only_tables_with_indices| { + only_tables_with_indices + .range(boundary) + .nth(0) + .map(|v| *v.1) + }) + .unwrap_or(false) + { + print_schema.filter = Filtering::OnlyTables(table_names.clone()); + } + if except_tables_with_indices + .clone() + .and_then(|except_tables_with_indices| { + except_tables_with_indices + .range(boundary) + .nth(0) + .map(|v| *v.1) + }) + .unwrap_or(false) + { + print_schema.filter = Filtering::ExceptTables(table_names); + } + } + } + } else { + let print_schema = self + .print_schema + .all_configs + .entry("default".to_string()) + .or_insert(PrintSchema::default().set_filter(matches)?); + let print_schema = print_schema.clone().set_filter(matches)?; + self.print_schema + .all_configs + .entry("default".to_string()) + .and_modify(|v| *v = print_schema); } + Ok(self) + } + + pub fn update_config(mut self, matches: &ArgMatches) -> Result { + if self.print_schema.has_multiple_schema { + if let Some(selected_schema_keys) = + get_values_with_indices::(matches, "schema-key")? + { + let schema_with_indices = get_values_with_indices::(matches, "schema")?; + let with_docs_with_indices = get_values_with_indices::(matches, "with-docs")?; + let with_docs_config_with_indices = + get_values_with_indices::(matches, "with-docs-config")?; + let patch_file_with_indices = + get_values_with_indices::(matches, "patch-file")?; + let column_sorting_with_indices = + get_values_with_indices::(matches, "column-sorting")?; + let import_types_with_indices = + get_values_with_indices::(matches, "import-types")?; + let generate_custom_type_definitions_with_indices = + get_values_with_indices::(matches, "generate-custom-type-definitions")?; + let custom_type_derives_with_indices = + get_values_with_indices::(matches, "custom-type-derives")?; + let sqlite_integer_primary_key_is_bigint_with_indices = + get_values_with_indices::( + matches, + "sqlite-integer-primary-key-is-bigint", + )?; + for (key, boundary) in selected_schema_keys.values().cloned().zip( + selected_schema_keys + .keys() + .cloned() + .map(Bound::Included) + .zip( + selected_schema_keys + .keys() + .cloned() + .skip(1) + .map(Bound::Excluded) + .chain(iter::once(Bound::Unbounded)), + ), + ) { + let print_schema = self + .print_schema + .all_configs + .get_mut(&key) + .ok_or(crate::errors::Error::NoSchemaKeyFound(key))?; + if let Some(schema) = schema_with_indices + .clone() + .and_then(|v| v.range(boundary).nth(0).map(|v| v.1.clone())) + { + print_schema.schema = Some(schema) + } + if with_docs_with_indices + .clone() + .and_then(|with_docs_with_indices| { + with_docs_with_indices.range(boundary).nth(0).map(|v| *v.1) + }) + .unwrap_or(false) + { + print_schema.with_docs = + DocConfig::DatabaseCommentsFallbackToAutoGeneratedDocComment; + } + + if let Some(doc_config) = with_docs_config_with_indices + .clone() + .and_then(|v| v.range(boundary).nth(0).map(|v| v.1.clone())) + { + print_schema.with_docs = doc_config.parse().map_err(|_| { + crate::errors::Error::UnsupportedFeature(format!( + "Invalid documentation config mode: {doc_config}" + )) + })?; + } + + if let Some(sorting) = column_sorting_with_indices + .clone() + .and_then(|v| v.range(boundary).nth(0).map(|v| v.1.clone())) + { + match sorting.as_str() { + "ordinal_position" => { + print_schema.column_sorting = ColumnSorting::OrdinalPosition + } + "name" => print_schema.column_sorting = ColumnSorting::Name, + _ => { + return Err(crate::errors::Error::UnsupportedFeature(format!( + "Invalid column sorting mode: {sorting}" + ))) + } + } + } + + if let Some(patch_file) = patch_file_with_indices + .clone() + .and_then(|v| v.range(boundary).nth(0).map(|v| v.1.clone())) + { + print_schema.patch_file = Some(patch_file); + } + + let import_types = import_types_with_indices + .clone() + .map(|v| v.range(boundary).map(|v| v.1.clone()).collect()) + .unwrap_or(vec![]); + if !import_types.is_empty() { + print_schema.import_types = Some(import_types); + } + + if generate_custom_type_definitions_with_indices + .clone() + .and_then(|generate_custom_type_definitions_with_indices| { + generate_custom_type_definitions_with_indices + .range(boundary) + .nth(0) + .map(|v| *v.1) + }) + .unwrap_or(false) + { + print_schema.generate_missing_sql_type_definitions = Some(false) + } + + let custom_type_derives = custom_type_derives_with_indices + .clone() + .map(|v| v.range(boundary).map(|v| v.1.clone()).collect()) + .unwrap_or(vec![]); + if !custom_type_derives.is_empty() { + print_schema.custom_type_derives = Some(custom_type_derives); + } + if let Some(sqlite_integer_primary_key_is_bigint) = + sqlite_integer_primary_key_is_bigint_with_indices + .clone() + .and_then(|with_docs_with_indices| { + with_docs_with_indices.range(boundary).nth(0).map(|v| *v.1) + }) + { + print_schema.sqlite_integer_primary_key_is_bigint = + Some(sqlite_integer_primary_key_is_bigint); + } + } + } + } else { + let config = match self.print_schema.all_configs.entry("default".to_string()) { + Entry::Vacant(entry) => entry.insert(PrintSchema::default()), + Entry::Occupied(entry) => entry.into_mut(), + }; + if let Some(schema_name) = matches.get_one::("schema") { + config.schema = Some(schema_name.clone()) + } + if matches.get_flag("with-docs") { + config.with_docs = DocConfig::DatabaseCommentsFallbackToAutoGeneratedDocComment; + } + if let Some(doc_config) = matches.get_one::("with-docs-config") { + config.with_docs = doc_config.parse().map_err(|_| { + crate::errors::Error::UnsupportedFeature(format!( + "Invalid documentation config mode: {doc_config}" + )) + })?; + } + if let Some(sorting) = matches.get_one::("column-sorting") { + match sorting as &str { + "ordinal_position" => config.column_sorting = ColumnSorting::OrdinalPosition, + "name" => config.column_sorting = ColumnSorting::Name, + _ => { + return Err(crate::errors::Error::UnsupportedFeature(format!( + "Invalid column sorting mode: {sorting}" + ))) + } + } + } + + if let Some(path) = matches.get_one::("patch-file") { + config.patch_file = Some(path.clone()); + } + + if let Some(types) = matches.get_many("import-types") { + let types = types.cloned().collect(); + config.import_types = Some(types); + } + + if matches.get_flag("generate-custom-type-definitions") { + config.generate_missing_sql_type_definitions = Some(false); + } + + if let Some(derives) = matches.get_many("custom-type-derives") { + let derives = derives.cloned().collect(); + config.custom_type_derives = Some(derives); + } + if matches.get_flag("sqlite-integer-primary-key-is-bigint") { + config.sqlite_integer_primary_key_is_bigint = Some(true); + } + } Ok(self) } } -#[derive(Default, Deserialize, Debug)] +#[derive(Default, Clone, Debug)] +pub struct RootPrintSchema { + has_multiple_schema: bool, + pub all_configs: BTreeMap, +} + +impl<'de> Deserialize<'de> for RootPrintSchema { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + #[derive(Deserialize)] + struct Inner { + #[serde(flatten)] + default_config: PrintSchema, + #[serde(flatten)] + other_configs: BTreeMap, + } + let Inner { + other_configs, + default_config, + } = Inner::deserialize(deserializer)?; + if other_configs.is_empty() { + Ok(RootPrintSchema { + has_multiple_schema: false, + all_configs: BTreeMap::from([("default".into(), default_config)]), + }) + } else { + let mut other_configs = other_configs; + other_configs + .entry("default".to_string()) + .or_insert(default_config); + Ok(RootPrintSchema { + all_configs: other_configs, + has_multiple_schema: true, + }) + } + } +} + +impl RootPrintSchema { + fn set_relative_path_base(&mut self, base: &Path) { + for config in self.all_configs.values_mut() { + config.set_relative_path_base(base); + } + } +} + +#[derive(Default, Deserialize, Clone, Debug)] #[serde(deny_unknown_fields)] pub struct PrintSchema { #[serde(default)] @@ -90,6 +416,8 @@ pub struct PrintSchema { pub generate_missing_sql_type_definitions: Option, #[serde(default)] pub custom_type_derives: Option>, + #[serde(default)] + pub sqlite_integer_primary_key_is_bigint: Option, } impl PrintSchema { @@ -135,6 +463,29 @@ impl PrintSchema { derives } } + + pub fn set_filter(mut self, matches: &ArgMatches) -> Result { + let table_names = matches + .get_many::("table-name") + .unwrap_or_default() + .map(|table_name_regex| regex::Regex::new(table_name_regex).map(Into::into)) + .collect::, _>>()?; + + if matches + .try_get_one::("only-tables")? + .cloned() + .unwrap_or(false) + { + self.filter = Filtering::OnlyTables(table_names) + } else if matches + .try_get_one::("except-tables")? + .cloned() + .unwrap_or(false) + { + self.filter = Filtering::ExceptTables(table_names) + } + Ok(self) + } } #[derive(Default, Deserialize, Debug)] diff --git a/diesel_cli/src/errors.rs b/diesel_cli/src/errors.rs index 9bf7299c2226..4d6d48cd38e3 100644 --- a/diesel_cli/src/errors.rs +++ b/diesel_cli/src/errors.rs @@ -58,4 +58,8 @@ pub enum Error { ColumnLiteralParseError(syn::Error), #[error("Failed to parse database url: {0}")] UrlParsingError(#[from] url::ParseError), + #[error("Failed to parse CLI parameter: {0}")] + ClapMatchesError(#[from] clap::parser::MatchesError), + #[error("No `[print_schema.{0}]` entries in your diesel.toml")] + NoSchemaKeyFound(String), } diff --git a/diesel_cli/src/infer_schema_internals/inference.rs b/diesel_cli/src/infer_schema_internals/inference.rs index cf7650e0573c..2e95ebbb5c1f 100644 --- a/diesel_cli/src/infer_schema_internals/inference.rs +++ b/diesel_cli/src/infer_schema_internals/inference.rs @@ -3,7 +3,7 @@ use diesel::result::Error::NotFound; use super::data_structures::*; use super::table_data::*; -use crate::config::Filtering; +use crate::config::{Filtering, PrintSchema}; use crate::database::InferConnection; use crate::print_schema::{ColumnSorting, DocConfig}; @@ -185,10 +185,15 @@ fn get_column_information( fn determine_column_type( attr: &ColumnInformation, conn: &mut InferConnection, + #[allow(unused_variables)] table: &TableName, + #[allow(unused_variables)] primary_keys: &[String], + #[allow(unused_variables)] config: &PrintSchema, ) -> Result { match *conn { #[cfg(feature = "sqlite")] - InferConnection::Sqlite(_) => super::sqlite::determine_column_type(attr), + InferConnection::Sqlite(ref mut conn) => { + super::sqlite::determine_column_type(conn, attr, table, primary_keys, config) + } #[cfg(feature = "postgres")] InferConnection::Pg(ref mut conn) => { use crate::infer_schema_internals::information_schema::DefaultSchema; @@ -259,11 +264,10 @@ pub fn load_foreign_key_constraints( pub fn load_table_data( connection: &mut InferConnection, name: TableName, - column_sorting: &ColumnSorting, - with_docs: DocConfig, + config: &PrintSchema, ) -> Result { // No point in loading table comments if they are not going to be displayed - let table_comment = match with_docs { + let table_comment = match config.with_docs { DocConfig::NoDocComments => None, DocConfig::OnlyDatabaseComments | DocConfig::DatabaseCommentsFallbackToAutoGeneratedDocComment => { @@ -272,15 +276,11 @@ pub fn load_table_data( }; let primary_key = get_primary_keys(connection, &name)?; - let primary_key = primary_key - .iter() - .map(|k| rust_name_for_sql_name(k)) - .collect(); - let column_data = get_column_information(connection, &name, column_sorting)? + let column_data = get_column_information(connection, &name, &config.column_sorting)? .into_iter() .map(|c| { - let ty = determine_column_type(&c, connection)?; + let ty = determine_column_type(&c, connection, &name, &primary_key, config)?; let ColumnInformation { column_name, @@ -298,6 +298,11 @@ pub fn load_table_data( }) .collect::>()?; + let primary_key = primary_key + .iter() + .map(|k| rust_name_for_sql_name(k)) + .collect::>(); + Ok(TableData { name, primary_key, diff --git a/diesel_cli/src/infer_schema_internals/mysql.rs b/diesel_cli/src/infer_schema_internals/mysql.rs index c1ec114b05f7..0a46e460aad5 100644 --- a/diesel_cli/src/infer_schema_internals/mysql.rs +++ b/diesel_cli/src/infer_schema_internals/mysql.rs @@ -1,5 +1,5 @@ -use diesel::deserialize::{self, FromStaticSqlRow, Queryable}; -use diesel::mysql::{Mysql, MysqlConnection}; +use diesel::deserialize::FromStaticSqlRow; +use diesel::mysql::Mysql; use diesel::*; use heck::ToUpperCamelCase; use std::borrow::Cow; diff --git a/diesel_cli/src/infer_schema_internals/pg.rs b/diesel_cli/src/infer_schema_internals/pg.rs index 38f8bae1ed15..e991129aa592 100644 --- a/diesel_cli/src/infer_schema_internals/pg.rs +++ b/diesel_cli/src/infer_schema_internals/pg.rs @@ -3,7 +3,7 @@ use super::information_schema::DefaultSchema; use super::TableName; use crate::print_schema::ColumnSorting; use diesel::connection::DefaultLoadingMode; -use diesel::deserialize::{self, FromStaticSqlRow, Queryable}; +use diesel::deserialize::{self, FromStaticSqlRow}; use diesel::dsl::AsExprOf; use diesel::expression::AsExpression; use diesel::pg::Pg; diff --git a/diesel_cli/src/infer_schema_internals/sqlite.rs b/diesel_cli/src/infer_schema_internals/sqlite.rs index 3fc16779abdf..c89a9ffdfd56 100644 --- a/diesel_cli/src/infer_schema_internals/sqlite.rs +++ b/diesel_cli/src/infer_schema_internals/sqlite.rs @@ -1,4 +1,6 @@ -use diesel::deserialize::{self, Queryable}; +use std::fmt; + +use diesel::deserialize::Queryable; use diesel::dsl::sql; use diesel::row::NamedRow; use diesel::sql_types::{Bool, Text}; @@ -7,6 +9,7 @@ use diesel::*; use super::data_structures::*; use super::table_data::TableName; +use crate::config::PrintSchema; use crate::print_schema::ColumnSorting; table! { @@ -100,6 +103,12 @@ impl SqliteVersion { } } +impl fmt::Display for SqliteVersion { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}.{}.{}", self.major, self.minor, self.patch) + } +} + fn get_sqlite_version(conn: &mut SqliteConnection) -> QueryResult { let query = "SELECT sqlite_version()"; let result = sql::(query).load::(conn)?; @@ -114,6 +123,13 @@ fn get_sqlite_version(conn: &mut SqliteConnection) -> QueryResult Ok(SqliteVersion::new(parts[0], parts[1], parts[2])) } +// In sqlite the rowid is a signed 64-bit integer. +// See: https://sqlite.org/rowidtable.html +// We should use BigInt here but to avoid type problems with foreign keys to +// rowid columns this is for now not done. A patch can be used after the schema +// is generated to convert the columns to BigInt as needed. +const ROWID_TYPE_NAME: &str = "Integer"; + pub fn get_table_data( conn: &mut SqliteConnection, table: &TableName, @@ -134,6 +150,27 @@ pub fn get_table_data( // See: https://github.com/diesel-rs/diesel/issues/3579 as to why we use a direct // `sql_query` with `QueryableByName` instead of using `sql::`. let mut result = sql_query(query).load::(conn)?; + // Add implicit rowid primary key column if the only primary key is rowid + // and ensure that the rowid column uses the right type. + let primary_key = get_primary_keys(conn, table)?; + if primary_key.len() == 1 { + let primary_key = primary_key.first().expect("guaranteed to have one element"); + if !result.iter_mut().any(|x| &x.column_name == primary_key) { + // Prepend implicit rowid column for the rowid implicit primary key. + result.insert( + 0, + ColumnInformation { + column_name: String::from(primary_key), + type_name: String::from(ROWID_TYPE_NAME), + type_schema: None, + nullable: false, + max_length: None, + comment: None, + }, + ); + } + } + match column_sorting { ColumnSorting::OrdinalPosition => {} ColumnSorting::Name => { @@ -176,6 +213,55 @@ impl QueryableByName for PrimaryKeyInformation { } } +struct WithoutRowIdInformation { + name: String, + without_row_id: bool, +} + +impl QueryableByName for WithoutRowIdInformation { + fn build<'a>(row: &impl NamedRow<'a, Sqlite>) -> deserialize::Result { + Ok(Self { + name: NamedRow::get::(row, "name")?, + without_row_id: NamedRow::get::(row, "wr")?, + }) + } +} + +pub fn column_is_row_id( + conn: &mut SqliteConnection, + table: &TableName, + primary_keys: &[String], + column_name: &str, + type_name: &str, +) -> Result { + let sqlite_version = get_sqlite_version(conn)?; + if sqlite_version < SqliteVersion::new(3, 37, 0) { + return Err(crate::errors::Error::UnsupportedFeature(format!( + "Parameter `sqlite_integer_primary_key_is_bigint` needs SQLite 3.37 or above. \ + Your current SQLite version is {sqlite_version}." + ))); + } + + if type_name != "integer" { + return Ok(false); + } + + if !matches!(primary_keys, [pk] if pk == column_name) { + return Ok(false); + } + + let table_list_query = format!("PRAGMA TABLE_LIST('{}')", &table.sql_name); + let table_list_results = sql_query(table_list_query).load::(conn)?; + + let res = table_list_results + .iter() + .find(|wr_info| wr_info.name == table.sql_name) + .map(|wr_info| !wr_info.without_row_id) + .unwrap_or_default(); + + Ok(res) +} + #[derive(Queryable)] struct ForeignKeyListRow { _id: i32, @@ -232,8 +318,14 @@ pub fn get_primary_keys( Ok(collected) } -#[tracing::instrument] -pub fn determine_column_type(attr: &ColumnInformation) -> Result { +#[tracing::instrument(skip(conn))] +pub fn determine_column_type( + conn: &mut SqliteConnection, + attr: &ColumnInformation, + table: &TableName, + primary_keys: &[String], + config: &PrintSchema, +) -> Result { let mut type_name = attr.type_name.to_lowercase(); if type_name == "generated always" { type_name.clear(); @@ -246,7 +338,17 @@ pub fn determine_column_type(attr: &ColumnInformation) -> Result>(); + + let on_column_types = column_infos + .iter() + .map(|column_info| { + ( + column_info.column_name.as_str(), + determine_column_type( + &mut conn, + column_info, + &table, + &primary_keys, + &PrintSchema { + sqlite_integer_primary_key_is_bigint: Some(true), + ..Default::default() + }, + ) + .unwrap() + .sql_name, + ) + }) + .collect::>(); + + assert_eq!( + (table_name, off_column_types), + (table_name, expected_off_types) + ); + + assert_eq!( + (table_name, on_column_types), + (table_name, expected_on_types) + ); + } +} diff --git a/diesel_cli/src/main.rs b/diesel_cli/src/main.rs index 045248292c85..778fe92e3ca1 100644 --- a/diesel_cli/src/main.rs +++ b/diesel_cli/src/main.rs @@ -96,8 +96,8 @@ where #[tracing::instrument] fn run_setup_command(matches: &ArgMatches) -> Result<(), crate::errors::Error> { - create_config_file(matches)?; let migrations_dir = create_migrations_dir(matches)?; + create_config_file(matches, &migrations_dir)?; database::setup_database(matches, &migrations_dir)?; Ok(()) @@ -136,12 +136,22 @@ fn create_migrations_dir(matches: &ArgMatches) -> Result Result<(), crate::errors::Error> { +fn create_config_file( + matches: &ArgMatches, + migrations_dir: &Path, +) -> Result<(), crate::errors::Error> { use std::io::Write; let path = Config::file_path(matches); if !path.exists() { + let source_content = include_str!("default_files/diesel.toml").to_string(); + // convert the path to a valid toml string (escaping backslashes on windows) + let migrations_dir_toml_string = migrations_dir.display().to_string().replace('\\', "\\\\"); + let modified_content = source_content.replace( + "dir = \"migrations\"", + &format!("dir = \"{}\"", migrations_dir_toml_string), + ); let mut file = fs::File::create(path)?; - file.write_all(include_bytes!("default_files/diesel.toml"))?; + file.write_all(modified_content.as_bytes())?; } Ok(()) @@ -183,7 +193,7 @@ fn generate_completions_command(matches: &ArgMatches) { /// Returns a `DatabaseError::ProjectRootNotFound` if no Cargo.toml is found. fn create_migrations_directory(path: &Path) -> Result { println!("Creating migrations directory at: {}", path.display()); - fs::create_dir(path)?; + fs::create_dir_all(path)?; fs::File::create(path.join(".keep"))?; Ok(path.to_owned()) } @@ -236,57 +246,15 @@ fn run_infer_schema(matches: &ArgMatches) -> Result<(), crate::errors::Error> { use crate::print_schema::*; tracing::info!("Infer schema"); - let mut conn = InferConnection::from_matches(matches)?; - let mut config = Config::read(matches)?.set_filter(matches)?.print_schema; - - if let Some(schema_name) = matches.get_one::("schema") { - config.schema = Some(schema_name.clone()) - } - - if matches.get_flag("with-docs") { - config.with_docs = DocConfig::DatabaseCommentsFallbackToAutoGeneratedDocComment; - } - - if let Some(doc_config) = matches.get_one::("with-docs-config") { - config.with_docs = doc_config.parse().map_err(|_| { - crate::errors::Error::UnsupportedFeature(format!( - "Invalid documentation config mode: {doc_config}" - )) - })?; - } - - if let Some(sorting) = matches.get_one::("column-sorting") { - match sorting as &str { - "ordinal_position" => config.column_sorting = ColumnSorting::OrdinalPosition, - "name" => config.column_sorting = ColumnSorting::Name, - _ => { - return Err(crate::errors::Error::UnsupportedFeature(format!( - "Invalid column sorting mode: {sorting}" - ))) - } - } - } - - if let Some(path) = matches.get_one::("patch-file") { - config.patch_file = Some(path.clone()); + let root_config = Config::read(matches)? + .set_filter(matches)? + .update_config(matches)? + .print_schema; + for config in root_config.all_configs.values() { + run_print_schema(&mut conn, config, &mut stdout())?; } - if let Some(types) = matches.get_many("import-types") { - let types = types.cloned().collect(); - config.import_types = Some(types); - } - - if matches.get_flag("generate-custom-type-definitions") { - config.generate_missing_sql_type_definitions = Some(false); - } - - if let Some(derives) = matches.get_many("custom-type-derives") { - let derives = derives.cloned().collect(); - config.custom_type_derives = Some(derives); - } - - run_print_schema(&mut conn, &config, &mut stdout())?; Ok(()) } @@ -297,33 +265,36 @@ fn regenerate_schema_if_file_specified(matches: &ArgMatches) -> Result<(), crate tracing::debug!("Regenerate schema if required"); let config = Config::read(matches)?.print_schema; - if let Some(ref path) = config.file { - let mut connection = InferConnection::from_matches(matches)?; - if let Some(parent) = path.parent() { - fs::create_dir_all(parent)?; - } - - if matches.get_flag("LOCKED_SCHEMA") { - let mut buf = Vec::new(); - print_schema::run_print_schema(&mut connection, &config, &mut buf)?; - - let mut old_buf = Vec::new(); - let mut file = fs::File::open(path)?; - file.read_to_end(&mut old_buf)?; - - if buf != old_buf { - return Err(crate::errors::Error::SchemaWouldChange( - path.display().to_string(), - )); + for config in config.all_configs.values() { + if let Some(ref path) = config.file { + let mut connection = InferConnection::from_matches(matches)?; + if let Some(parent) = path.parent() { + fs::create_dir_all(parent)?; } - } else { - use std::io::Write; - let mut file = fs::File::create(path)?; - let schema = print_schema::output_schema(&mut connection, &config)?; - file.write_all(schema.as_bytes())?; + if matches.get_flag("LOCKED_SCHEMA") { + let mut buf = Vec::new(); + print_schema::run_print_schema(&mut connection, config, &mut buf)?; + + let mut old_buf = Vec::new(); + let mut file = fs::File::open(path)?; + file.read_to_end(&mut old_buf)?; + + if buf != old_buf { + return Err(crate::errors::Error::SchemaWouldChange( + path.display().to_string(), + )); + } + } else { + use std::io::Write; + + let mut file = fs::File::create(path)?; + let schema = print_schema::output_schema(&mut connection, config)?; + file.write_all(schema.as_bytes())?; + } } } + Ok(()) } diff --git a/diesel_cli/src/migrations/diff_schema.rs b/diesel_cli/src/migrations/diff_schema.rs index 08c05f15d52d..08bce704fb07 100644 --- a/diesel_cli/src/migrations/diff_schema.rs +++ b/diesel_cli/src/migrations/diff_schema.rs @@ -9,13 +9,13 @@ use std::io::Read; use std::path::Path; use syn::visit::Visit; -use crate::config::Config; +use crate::config::PrintSchema; use crate::database::InferConnection; use crate::infer_schema_internals::{ filter_table_names, load_table_names, ColumnDefinition, ColumnType, ForeignKeyConstraint, TableData, TableName, }; -use crate::print_schema::DocConfig; +use crate::print_schema::{ColumnSorting, DocConfig}; fn compatible_type_list() -> HashMap<&'static str, Vec<&'static str>> { let mut map = HashMap::new(); @@ -28,11 +28,11 @@ fn compatible_type_list() -> HashMap<&'static str, Vec<&'static str>> { #[tracing::instrument] pub fn generate_sql_based_on_diff_schema( - config: Config, + config: PrintSchema, matches: &ArgMatches, schema_file_path: &Path, ) -> Result<(String, String), crate::errors::Error> { - let config = config.set_filter(matches)?; + let mut config = config.set_filter(matches)?; let project_root = crate::find_project_root()?; @@ -46,10 +46,6 @@ pub fn generate_sql_based_on_diff_schema( tables_from_schema.visit_file(&syn_file); let mut conn = InferConnection::from_matches(matches)?; - let tables_from_database = filter_table_names( - load_table_names(&mut conn, None)?, - &config.print_schema.filter, - ); let foreign_keys = crate::infer_schema_internals::load_foreign_key_constraints(&mut conn, None)?; @@ -87,17 +83,28 @@ pub fn generate_sql_based_on_diff_schema( table_pk_key_list.insert(t.table_name.to_string(), keys); expected_schema_map.insert(t.table_name.to_string(), t); } + config.with_docs = DocConfig::NoDocComments; + config.column_sorting = ColumnSorting::OrdinalPosition; + + // Parameter `sqlite_integer_primary_key_is_bigint` is only used for a SQLite connection + match conn { + #[cfg(feature = "postgres")] + InferConnection::Pg(_) => config.sqlite_integer_primary_key_is_bigint = None, + #[cfg(feature = "sqlite")] + InferConnection::Sqlite(_) => (), + #[cfg(feature = "mysql")] + InferConnection::Mysql(_) => { + config.sqlite_integer_primary_key_is_bigint = None; + } + } let mut schema_diff = Vec::new(); - + let table_names = load_table_names(&mut conn, None)?; + let tables_from_database = filter_table_names(table_names.clone(), &config.filter); for table in tables_from_database { tracing::info!(?table, "Diff for existing table"); - let columns = crate::infer_schema_internals::load_table_data( - &mut conn, - table.clone(), - &crate::print_schema::ColumnSorting::OrdinalPosition, - DocConfig::NoDocComments, - )?; + let columns = + crate::infer_schema_internals::load_table_data(&mut conn, table.clone(), &config)?; if let Some(t) = expected_schema_map.remove(&table.sql_name.to_lowercase()) { tracing::info!(table = ?t.sql_name, "Table exists in schema.rs"); let mut primary_keys_in_db = @@ -208,19 +215,19 @@ pub fn generate_sql_based_on_diff_schema( #[cfg(feature = "postgres")] InferConnection::Pg(_) => { let mut qb = diesel::pg::PgQueryBuilder::default(); - diff.generate_up_sql(&mut qb)?; + diff.generate_up_sql(&mut qb, &config)?; qb.finish() } #[cfg(feature = "sqlite")] InferConnection::Sqlite(_) => { let mut qb = diesel::sqlite::SqliteQueryBuilder::default(); - diff.generate_up_sql(&mut qb)?; + diff.generate_up_sql(&mut qb, &config)?; qb.finish() } #[cfg(feature = "mysql")] InferConnection::Mysql(_) => { let mut qb = diesel::mysql::MysqlQueryBuilder::default(); - diff.generate_up_sql(&mut qb)?; + diff.generate_up_sql(&mut qb, &config)?; qb.finish() } }; @@ -229,19 +236,19 @@ pub fn generate_sql_based_on_diff_schema( #[cfg(feature = "postgres")] InferConnection::Pg(_) => { let mut qb = diesel::pg::PgQueryBuilder::default(); - diff.generate_down_sql(&mut qb)?; + diff.generate_down_sql(&mut qb, &config)?; qb.finish() } #[cfg(feature = "sqlite")] InferConnection::Sqlite(_) => { let mut qb = diesel::sqlite::SqliteQueryBuilder::default(); - diff.generate_down_sql(&mut qb)?; + diff.generate_down_sql(&mut qb, &config)?; qb.finish() } #[cfg(feature = "mysql")] InferConnection::Mysql(_) => { let mut qb = diesel::mysql::MysqlQueryBuilder::default(); - diff.generate_down_sql(&mut qb)?; + diff.generate_down_sql(&mut qb, &config)?; qb.finish() } }; @@ -311,6 +318,7 @@ impl SchemaDiff { fn generate_up_sql( &self, query_builder: &mut impl QueryBuilder, + config: &PrintSchema, ) -> Result<(), crate::errors::Error> where DB: Backend, @@ -352,12 +360,18 @@ impl SchemaDiff { ) }) .collect::>(); + + let sqlite_integer_primary_key_is_bigint = config + .sqlite_integer_primary_key_is_bigint + .unwrap_or_default(); + generate_create_table( query_builder, table, &column_data, &primary_keys, &foreign_keys, + sqlite_integer_primary_key_is_bigint, )?; } SchemaDiff::ChangeTable { @@ -390,7 +404,11 @@ impl SchemaDiff { Ok(()) } - fn generate_down_sql(&self, query_builder: &mut impl QueryBuilder) -> QueryResult<()> + fn generate_down_sql( + &self, + query_builder: &mut impl QueryBuilder, + config: &PrintSchema, + ) -> QueryResult<()> where DB: Backend, { @@ -411,12 +429,17 @@ impl SchemaDiff { }) .collect::>(); + let sqlite_integer_primary_key_is_bigint = config + .sqlite_integer_primary_key_is_bigint + .unwrap_or_default(); + generate_create_table( query_builder, &table.sql_name.to_lowercase(), &columns.column_data, &columns.primary_key, &fk, + sqlite_integer_primary_key_is_bigint, )?; } SchemaDiff::CreateTable { to_create, .. } => { @@ -428,6 +451,9 @@ impl SchemaDiff { removed_columns, changed_columns, } => { + // We don't need to check the `sqlite_integer_primary_key_is_bigint` parameter here + // since `ÀLTER TABLE` queries cannot modify primary key columns in SQLite. + // See https://www.sqlite.org/lang_altertable.html#alter_table_add_column for more information. for c in added_columns .iter() .chain(changed_columns.iter().map(|(_, b)| b)) @@ -498,6 +524,7 @@ fn generate_create_table( column_data: &[ColumnDefinition], primary_keys: &[String], foreign_keys: &[(String, String, String)], + sqlite_integer_primary_key_is_bigint: bool, ) -> QueryResult<()> where DB: Backend, @@ -514,11 +541,35 @@ where query_builder.push_sql(",\n"); } query_builder.push_sql("\t"); + + let is_only_primary_key = + primary_keys.contains(&column.rust_name) && primary_keys.len() == 1; + query_builder.push_identifier(&column.sql_name)?; - generate_column_type_name(query_builder, &column.ty); - if primary_keys.contains(&column.rust_name) && primary_keys.len() == 1 { + + // When the `sqlite_integer_primary_key_is_bigint` config parameter is used, + // if a column is the only primary key and its type is `BigInt`, + // we consider it equivalent to the `rowid` column in order to be compatible + // with the `print-schema` command using the same config parameter. + // See https://www.sqlite.org/lang_createtable.html#rowid for more information. + if sqlite_integer_primary_key_is_bigint + && is_only_primary_key + && column.ty.sql_name.eq_ignore_ascii_case("BigInt") + { + let ty = ColumnType { + rust_name: "Integer".into(), + sql_name: "Integer".into(), + ..column.ty.clone() + }; + generate_column_type_name(query_builder, &ty); + } else { + generate_column_type_name(query_builder, &column.ty); + } + + if is_only_primary_key { query_builder.push_sql(" PRIMARY KEY"); } + if let Some((table, _, pk)) = foreign_keys.iter().find(|(_, k, _)| k == &column.rust_name) { foreign_key_list.push((column, table, pk)); } diff --git a/diesel_cli/src/migrations/mod.rs b/diesel_cli/src/migrations/mod.rs index f4785795594b..8ed0810ca923 100644 --- a/diesel_cli/src/migrations/mod.rs +++ b/diesel_cli/src/migrations/mod.rs @@ -92,14 +92,32 @@ pub(super) fn run_migration_command(matches: &ArgMatches) -> Result<(), crate::e let (up_sql, down_sql) = if let Some(diff_schema) = args.get_one::("SCHEMA_RS") { let config = Config::read(matches)?; + let mut print_schema = + if let Some(schema_key) = args.get_one::("schema-key") { + config + .print_schema + .all_configs + .get(schema_key) + .ok_or(crate::errors::Error::NoSchemaKeyFound(schema_key.clone()))? + } else { + config + .print_schema + .all_configs + .get("default") + .ok_or(crate::errors::Error::NoSchemaKeyFound("default".into()))? + } + .clone(); let diff_schema = if diff_schema == "NOT_SET" { - config.print_schema.file.clone() + print_schema.file.clone() } else { Some(PathBuf::from(diff_schema)) }; + if args.get_flag("sqlite-integer-primary-key-is-bigint") { + print_schema.sqlite_integer_primary_key_is_bigint = Some(true); + } if let Some(diff_schema) = diff_schema { self::diff_schema::generate_sql_based_on_diff_schema( - config, + print_schema, args, &diff_schema, )? diff --git a/diesel_cli/src/print_schema.rs b/diesel_cli/src/print_schema.rs index 5217192dd345..3f9deda7bf74 100644 --- a/diesel_cli/src/print_schema.rs +++ b/diesel_cli/src/print_schema.rs @@ -10,7 +10,7 @@ use std::io::Write as IoWrite; const SCHEMA_HEADER: &str = "// @generated automatically by Diesel CLI.\n"; /// How to sort columns when querying the table schema. -#[derive(Debug, Deserialize, Serialize)] +#[derive(Debug, Deserialize, Serialize, Clone, Copy)] pub enum ColumnSorting { /// Order by ordinal position #[serde(rename = "ordinal_position")] @@ -157,7 +157,7 @@ pub fn output_schema( remove_unsafe_foreign_keys_for_codegen(connection, &foreign_keys, &table_names); let table_data = table_names .into_iter() - .map(|t| load_table_data(connection, t, &config.column_sorting, config.with_docs)) + .map(|t| load_table_data(connection, t, config)) .collect::, crate::errors::Error>>()?; let mut out = String::new(); diff --git a/diesel_cli/tests/database_setup.rs b/diesel_cli/tests/database_setup.rs index ffedc1630a20..947ee706b149 100644 --- a/diesel_cli/tests/database_setup.rs +++ b/diesel_cli/tests/database_setup.rs @@ -152,6 +152,39 @@ fn database_setup_respects_migration_dir_by_arg() { assert!(db.table_exists("users")); } +#[test] +fn database_setup_respects_migration_nested_dir_by_arg() { + let p = project("database_setup_respects_migration_nested_dir_by_arg") + .folder("foo/bar") + .build(); + let db = database(&p.database_url()); + + p.create_migration_in_directory( + "foo/bar", + "12345_create_users_table", + "CREATE TABLE users ( id INTEGER )", + Some("DROP TABLE users"), + None, + ); + + // sanity check + assert!(!db.exists()); + + let result = p + .command("database") + .arg("setup") + .arg("--migration-dir=foo/bar") + .run(); + + assert!(result.is_success(), "Result was unsuccessful {:?}", result); + assert!( + result.stdout().contains("Running migration 12345"), + "Unexpected stdout {}", + result.stdout() + ); + assert!(db.table_exists("users")); +} + #[test] fn database_setup_respects_migration_dir_by_env() { let p = project("database_setup_respects_migration_dir_by_env") diff --git a/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/mysql/down.sql/expected.snap b/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/mysql/down.sql/expected.snap new file mode 100644 index 000000000000..6bf6dc40cefc --- /dev/null +++ b/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/mysql/down.sql/expected.snap @@ -0,0 +1,7 @@ +--- +source: diesel_cli/tests/migration_generate.rs +description: "Test: diff_add_table" +--- +-- This file should undo anything in `up.sql` +DROP TABLE IF EXISTS `users`; + diff --git a/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/mysql/initial_schema.sql b/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/mysql/initial_schema.sql new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/mysql/initial_schema.sql @@ -0,0 +1 @@ + diff --git a/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/mysql/schema_out.rs/expected.snap b/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/mysql/schema_out.rs/expected.snap new file mode 100644 index 000000000000..a7c2e4fa010d --- /dev/null +++ b/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/mysql/schema_out.rs/expected.snap @@ -0,0 +1,12 @@ +--- +source: diesel_cli/tests/migration_generate.rs +description: "Test: diff_add_table" +--- +// @generated automatically by Diesel CLI. + +diesel::table! { + users (id) { + id -> Bigint, + name -> Text, + } +} diff --git a/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/mysql/up.sql/expected.snap b/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/mysql/up.sql/expected.snap new file mode 100644 index 000000000000..f384ae4bfca8 --- /dev/null +++ b/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/mysql/up.sql/expected.snap @@ -0,0 +1,9 @@ +--- +source: diesel_cli/tests/migration_generate.rs +description: "Test: diff_add_table" +--- +-- Your SQL goes here +CREATE TABLE `users`( + `id` BIGINT NOT NULL PRIMARY KEY, + `name` TEXT NOT NULL +); diff --git a/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/postgres/down.sql/expected.snap b/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/postgres/down.sql/expected.snap new file mode 100644 index 000000000000..432f142620b2 --- /dev/null +++ b/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/postgres/down.sql/expected.snap @@ -0,0 +1,8 @@ +--- +source: diesel_cli/tests/migration_generate.rs +assertion_line: 300 +description: "Test: diff_add_table" +--- +-- This file should undo anything in `up.sql` +DROP TABLE IF EXISTS "users"; + diff --git a/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/postgres/initial_schema.sql b/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/postgres/initial_schema.sql new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/postgres/initial_schema.sql @@ -0,0 +1 @@ + diff --git a/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/postgres/schema_out.rs/expected.snap b/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/postgres/schema_out.rs/expected.snap new file mode 100644 index 000000000000..cb7df36b6aec --- /dev/null +++ b/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/postgres/schema_out.rs/expected.snap @@ -0,0 +1,13 @@ +--- +source: diesel_cli/tests/migration_generate.rs +assertion_line: 323 +description: "Test: diff_add_table" +--- +// @generated automatically by Diesel CLI. + +diesel::table! { + users (id) { + id -> Int8, + name -> Text, + } +} diff --git a/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/postgres/up.sql/expected.snap b/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/postgres/up.sql/expected.snap new file mode 100644 index 000000000000..561ed4c34775 --- /dev/null +++ b/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/postgres/up.sql/expected.snap @@ -0,0 +1,10 @@ +--- +source: diesel_cli/tests/migration_generate.rs +assertion_line: 295 +description: "Test: diff_add_table" +--- +-- Your SQL goes here +CREATE TABLE "users"( + "id" BIGINT NOT NULL PRIMARY KEY, + "name" TEXT NOT NULL +); diff --git a/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/schema.rs b/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/schema.rs new file mode 100644 index 000000000000..bcb1d8b16565 --- /dev/null +++ b/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/schema.rs @@ -0,0 +1,6 @@ +table! { + users { + id -> BigInt, + name -> Text, + } +} diff --git a/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/sqlite/down.sql/expected.snap b/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/sqlite/down.sql/expected.snap new file mode 100644 index 000000000000..6bf6dc40cefc --- /dev/null +++ b/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/sqlite/down.sql/expected.snap @@ -0,0 +1,7 @@ +--- +source: diesel_cli/tests/migration_generate.rs +description: "Test: diff_add_table" +--- +-- This file should undo anything in `up.sql` +DROP TABLE IF EXISTS `users`; + diff --git a/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/sqlite/initial_schema.sql b/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/sqlite/initial_schema.sql new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/sqlite/initial_schema.sql @@ -0,0 +1 @@ + diff --git a/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/sqlite/schema_out.rs/expected.snap b/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/sqlite/schema_out.rs/expected.snap new file mode 100644 index 000000000000..0f8d10baa63e --- /dev/null +++ b/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/sqlite/schema_out.rs/expected.snap @@ -0,0 +1,12 @@ +--- +source: diesel_cli/tests/migration_generate.rs +description: "Test: diff_add_table" +--- +// @generated automatically by Diesel CLI. + +diesel::table! { + users (id) { + id -> BigInt, + name -> Text, + } +} diff --git a/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/sqlite/up.sql/expected.snap b/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/sqlite/up.sql/expected.snap new file mode 100644 index 000000000000..a53abfe52623 --- /dev/null +++ b/diesel_cli/tests/generate_migrations/diff_add_table_sqlite_rowid_column/sqlite/up.sql/expected.snap @@ -0,0 +1,10 @@ +--- +source: diesel_cli/tests/migration_generate.rs +description: "Test: diff_add_table" +--- +-- Your SQL goes here +CREATE TABLE `users`( + `id` INTEGER NOT NULL PRIMARY KEY, + `name` TEXT NOT NULL +); + diff --git a/diesel_cli/tests/migration_generate.rs b/diesel_cli/tests/migration_generate.rs index 1c6deafa663b..fd2169b24615 100644 --- a/diesel_cli/tests/migration_generate.rs +++ b/diesel_cli/tests/migration_generate.rs @@ -220,6 +220,14 @@ fn migration_generate_from_diff_add_table() { test_generate_migration("diff_add_table", Vec::new()); } +#[test] +fn migration_generate_from_diff_add_table_sqlite_rowid_column() { + test_generate_migration( + "diff_add_table_sqlite_rowid_column", + vec!["--sqlite-integer-primary-key-is-bigint"], + ); +} + #[test] fn migration_generate_from_diff_drop_alter_table_add_column() { test_generate_migration("diff_alter_table_add_column", Vec::new()); diff --git a/diesel_cli/tests/print_schema.rs b/diesel_cli/tests/print_schema.rs index cdbdd16cafd1..1d660d8a28f5 100644 --- a/diesel_cli/tests/print_schema.rs +++ b/diesel_cli/tests/print_schema.rs @@ -250,6 +250,15 @@ fn print_schema_generated_columns_with_generated_always() { test_print_schema("print_schema_generated_columns_generated_always", vec![]) } +#[test] +#[cfg(feature = "sqlite")] +fn print_schema_sqlite_rowid_column() { + test_print_schema( + "print_schema_sqlite_rowid_column", + vec!["--sqlite-integer-primary-key-is-bigint"], + ) +} + #[test] #[cfg(feature = "postgres")] fn print_schema_multiple_annotations() { @@ -268,6 +277,12 @@ fn print_schema_sqlite_implicit_foreign_key_reference() { test_print_schema("print_schema_sqlite_implicit_foreign_key_reference", vec![]); } +#[test] +#[cfg(feature = "sqlite")] +fn print_schema_sqlite_without_explicit_primary_key() { + test_print_schema("print_schema_sqlite_without_explicit_primary_key", vec![]) +} + #[test] #[cfg(feature = "postgres")] fn print_schema_respects_type_name_case() { @@ -340,6 +355,27 @@ fn print_schema_quoted_schema_and_table_name() { ) } +#[test] +fn print_schema_with_multiple_schema() { + test_multiple_print_schema( + "print_schema_with_multiple_schema", + vec![ + "--schema-key", + "default", + "--schema-key", + "user1", + "-o", + "users1", + "--with-docs", + "--schema-key", + "user2", + "-o", + "users2", + "--with-docs", + ], + ) +} + #[cfg(feature = "sqlite")] const BACKEND: &str = "sqlite"; #[cfg(feature = "postgres")] @@ -356,6 +392,45 @@ fn backend_file_path(test_name: &str, file: &str) -> PathBuf { .join(file) } +fn test_multiple_print_schema(test_name: &str, args: Vec<&str>) { + let test_path = Path::new(env!("CARGO_MANIFEST_DIR")) + .join("tests") + .join("print_schema") + .join(test_name); + let p = project(test_name) + .file( + "diesel.toml", + r#" + [print_schema.user1] + [print_schema.user2] + "#, + ) + .build(); + let db = database(&p.database_url()); + + p.command("setup").run(); + + let schema = read_file(&backend_file_path(test_name, "schema.sql")); + db.execute(&schema); + + let result = p.command("print-schema").args(args).run(); + + assert!(result.is_success(), "Result was unsuccessful {:?}", result); + + let result = result.stdout().replace("\r\n", "\n"); + + let mut setting = insta::Settings::new(); + setting.set_snapshot_path(backend_file_path(test_name, "")); + setting.set_omit_expression(true); + setting.set_description(format!("Test: {test_name}")); + setting.set_prepend_module_to_snapshot(false); + + setting.bind(|| { + insta::assert_snapshot!("expected", result); + test_multiple_print_schema_config(test_name, &test_path, schema); + }); +} + fn test_print_schema(test_name: &str, args: Vec<&str>) { let test_path = Path::new(env!("CARGO_MANIFEST_DIR")) .join("tests") @@ -416,6 +491,35 @@ fn test_print_schema_config(test_name: &str, test_path: &Path, schema: String) { insta::assert_snapshot!("expected", result); } +fn test_multiple_print_schema_config(test_name: &str, test_path: &Path, schema: String) { + let config = read_file(&test_path.join("diesel.toml")); + let mut p = project(&format!("{}_config", test_name)).file("diesel.toml", &config); + + let patch_file = backend_file_path(test_name, "schema.patch"); + if patch_file.exists() { + let patch_contents = read_file(&patch_file); + p = p.file("schema.patch", &patch_contents); + } + + let p = p.build(); + + p.command("setup").run(); + p.create_migration("12345_create_schema", &schema, None, None); + let result = p.command("migration").arg("run").run(); + assert!(result.is_success(), "Result was unsuccessful {:?}", result); + + let schema = p.file_contents("src/schema1.rs").replace("\r\n", "\n"); + insta::assert_snapshot!("expected_1", schema); + let schema = p.file_contents("src/schema2.rs").replace("\r\n", "\n"); + insta::assert_snapshot!("expected_2", schema); + + let result = p.command("print-schema").run(); + assert!(result.is_success(), "Result was unsuccessful {:?}", result); + + let result = result.stdout().replace("\r\n", "\n"); + insta::assert_snapshot!("expected", result); +} + fn read_file(path: &Path) -> String { let mut file = File::open(path).expect(&format!("Could not open {}", path.display())); let mut string = String::new(); diff --git a/diesel_cli/tests/print_schema/print_schema_sqlite_rowid_column/diesel.toml b/diesel_cli/tests/print_schema/print_schema_sqlite_rowid_column/diesel.toml new file mode 100644 index 000000000000..e914c1be0c32 --- /dev/null +++ b/diesel_cli/tests/print_schema/print_schema_sqlite_rowid_column/diesel.toml @@ -0,0 +1,4 @@ +[print_schema] +file = "src/schema.rs" +with_docs = false +sqlite_integer_primary_key_is_bigint = true diff --git a/diesel_cli/tests/print_schema/print_schema_sqlite_rowid_column/sqlite/expected.snap b/diesel_cli/tests/print_schema/print_schema_sqlite_rowid_column/sqlite/expected.snap new file mode 100644 index 000000000000..543c6a8a50df --- /dev/null +++ b/diesel_cli/tests/print_schema/print_schema_sqlite_rowid_column/sqlite/expected.snap @@ -0,0 +1,30 @@ +--- +source: diesel_cli/tests/print_schema.rs +description: "Test: print_schema_sqlite_rowid_column" +--- +// @generated automatically by Diesel CLI. + +diesel::table! { + users1 (id) { + id -> BigInt, + } +} + +diesel::table! { + users2 (id) { + id -> Integer, + } +} + +diesel::table! { + users3 (rowid) { + rowid -> BigInt, + name -> Text, + } +} + +diesel::allow_tables_to_appear_in_same_query!( + users1, + users2, + users3, +); diff --git a/diesel_cli/tests/print_schema/print_schema_sqlite_rowid_column/sqlite/schema.sql b/diesel_cli/tests/print_schema/print_schema_sqlite_rowid_column/sqlite/schema.sql new file mode 100644 index 000000000000..21c107a7a110 --- /dev/null +++ b/diesel_cli/tests/print_schema/print_schema_sqlite_rowid_column/sqlite/schema.sql @@ -0,0 +1,3 @@ +CREATE TABLE users1 (id INTEGER NOT NULL PRIMARY KEY); +CREATE TABLE users2 (id INTEGER NOT NULL PRIMARY KEY) WITHOUT ROWID; +CREATE TABLE users3 (name TEXT NOT NULL); diff --git a/diesel_cli/tests/print_schema/print_schema_sqlite_without_explicit_primary_key/sqlite/expected.snap b/diesel_cli/tests/print_schema/print_schema_sqlite_without_explicit_primary_key/sqlite/expected.snap index 98bf663e0820..20ce77fb7a64 100644 --- a/diesel_cli/tests/print_schema/print_schema_sqlite_without_explicit_primary_key/sqlite/expected.snap +++ b/diesel_cli/tests/print_schema/print_schema_sqlite_without_explicit_primary_key/sqlite/expected.snap @@ -7,23 +7,85 @@ description: "Test: print_schema_sqlite_without_explicit_primary_key" diesel::table! { no_explicit (rowid) { rowid -> Integer, - name -> Text, + name -> Nullable, + } +} + +diesel::table! { + with_explicit_aliased_rowid (id) { + id -> Nullable, + name -> Nullable, + } +} + +diesel::table! { + with_explicit_aliased_rowid_not_null (id) { + id -> Integer, + name -> Nullable, + } +} + +diesel::table! { + with_explicit_pk_rowid (rowid) { + rowid -> Nullable, + name -> Nullable, + } +} + +diesel::table! { + with_explicit_pk_rowid_autoincrement (rowid) { + rowid -> Nullable, + name -> Nullable, + } +} + +diesel::table! { + with_explicit_pk_rowid_autoincrement_not_null (rowid) { + rowid -> Integer, + name -> Nullable, + } +} + +diesel::table! { + with_explicit_pk_rowid_not_null (rowid) { + rowid -> Integer, + name -> Nullable, } } diesel::table! { with_explicit_rowid (oid) { oid -> Integer, - name -> Text, - rowid -> Text, + name -> Nullable, + rowid -> Nullable, } } diesel::table! { with_explicit_rowid_oid (_rowid_) { _rowid_ -> Integer, - name -> Text, - rowid -> Text, - oid -> Text, + name -> Nullable, + rowid -> Nullable, + oid -> Nullable, } } + +diesel::table! { + without_rowid (word) { + word -> Text, + cnt -> Nullable, + } +} + +diesel::allow_tables_to_appear_in_same_query!( + no_explicit, + with_explicit_aliased_rowid, + with_explicit_aliased_rowid_not_null, + with_explicit_pk_rowid, + with_explicit_pk_rowid_autoincrement, + with_explicit_pk_rowid_autoincrement_not_null, + with_explicit_pk_rowid_not_null, + with_explicit_rowid, + with_explicit_rowid_oid, + without_rowid, +); diff --git a/diesel_cli/tests/print_schema/print_schema_sqlite_without_explicit_primary_key/sqlite/schema.sql b/diesel_cli/tests/print_schema/print_schema_sqlite_without_explicit_primary_key/sqlite/schema.sql index c9d457fdaf34..d4824367defe 100644 --- a/diesel_cli/tests/print_schema/print_schema_sqlite_without_explicit_primary_key/sqlite/schema.sql +++ b/diesel_cli/tests/print_schema/print_schema_sqlite_without_explicit_primary_key/sqlite/schema.sql @@ -12,3 +12,38 @@ CREATE TABLE with_explicit_rowid_oid ( rowid TEXT, oid TEXT ); + +CREATE TABLE with_explicit_pk_rowid ( + rowid INTEGER PRIMARY KEY, + name TEXT +); + +CREATE TABLE with_explicit_pk_rowid_autoincrement ( + rowid INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT +); + +CREATE TABLE with_explicit_pk_rowid_not_null ( + rowid INTEGER PRIMARY KEY NOT NULL, + name TEXT +); + +CREATE TABLE with_explicit_pk_rowid_autoincrement_not_null ( + rowid INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, + name TEXT +); + +CREATE TABLE with_explicit_aliased_rowid ( + id INTEGER PRIMARY KEY, + name TEXT +); + +CREATE TABLE with_explicit_aliased_rowid_not_null ( + id INTEGER PRIMARY KEY NOT NULL, + name TEXT +); + +CREATE TABLE without_rowid ( + word TEXT PRIMARY KEY, + cnt INTEGER +) WITHOUT ROWID; diff --git a/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/diesel.toml b/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/diesel.toml new file mode 100644 index 000000000000..26ed7123ab14 --- /dev/null +++ b/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/diesel.toml @@ -0,0 +1,10 @@ +[print_schema.user1] +file = "src/schema1.rs" +with_docs = true +filter = { only_tables = ["users1"] } + + +[print_schema.user2] +file = "src/schema2.rs" +with_docs = true +filter = { only_tables = ["users2"] } diff --git a/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/mysql/expected.snap b/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/mysql/expected.snap new file mode 100644 index 000000000000..64982a12d7fc --- /dev/null +++ b/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/mysql/expected.snap @@ -0,0 +1,54 @@ +--- +source: diesel_cli/tests/print_schema.rs +assertion_line: 414 +description: "Test: print_schema_with_multiple_schema" +--- +// @generated automatically by Diesel CLI. + +diesel::table! { + users1 (id) { + id -> Integer, + } +} + +diesel::table! { + users2 (id) { + id -> Integer, + } +} + +diesel::allow_tables_to_appear_in_same_query!( + users1, + users2, +); +// @generated automatically by Diesel CLI. + +diesel::table! { + /// Representation of the `users1` table. + /// + /// (Automatically generated by Diesel.) + users1 (id) { + /// The `id` column of the `users1` table. + /// + /// Its SQL type is `Integer`. + /// + /// (Automatically generated by Diesel.) + id -> Integer, + } +} +// @generated automatically by Diesel CLI. + +diesel::table! { + /// Representation of the `users2` table. + /// + /// (Automatically generated by Diesel.) + users2 (id) { + /// The `id` column of the `users2` table. + /// + /// Its SQL type is `Integer`. + /// + /// (Automatically generated by Diesel.) + id -> Integer, + } +} + diff --git a/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/mysql/expected_1.snap b/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/mysql/expected_1.snap new file mode 100644 index 000000000000..2bfe57e806dd --- /dev/null +++ b/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/mysql/expected_1.snap @@ -0,0 +1,20 @@ +--- +source: diesel_cli/tests/print_schema.rs +description: "Test: print_schema_with_multiple_schema" +--- +// @generated automatically by Diesel CLI. + +diesel::table! { + /// Representation of the `users1` table. + /// + /// (Automatically generated by Diesel.) + users1 (id) { + /// The `id` column of the `users1` table. + /// + /// Its SQL type is `Integer`. + /// + /// (Automatically generated by Diesel.) + id -> Integer, + } +} + diff --git a/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/mysql/expected_2.snap b/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/mysql/expected_2.snap new file mode 100644 index 000000000000..54cef0d71d88 --- /dev/null +++ b/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/mysql/expected_2.snap @@ -0,0 +1,20 @@ +--- +source: diesel_cli/tests/print_schema.rs +description: "Test: print_schema_with_multiple_schema" +--- +// @generated automatically by Diesel CLI. + +diesel::table! { + /// Representation of the `users2` table. + /// + /// (Automatically generated by Diesel.) + users2 (id) { + /// The `id` column of the `users2` table. + /// + /// Its SQL type is `Integer`. + /// + /// (Automatically generated by Diesel.) + id -> Integer, + } +} + diff --git a/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/mysql/schema.sql b/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/mysql/schema.sql new file mode 100644 index 000000000000..556e9f7b54e8 --- /dev/null +++ b/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/mysql/schema.sql @@ -0,0 +1,2 @@ +CREATE TABLE users1 (id INTEGER PRIMARY KEY); +CREATE TABLE users2 (id INTEGER PRIMARY KEY); diff --git a/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/postgres/expected.snap b/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/postgres/expected.snap new file mode 100644 index 000000000000..4cf2e0348a4d --- /dev/null +++ b/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/postgres/expected.snap @@ -0,0 +1,53 @@ +--- +source: diesel_cli/tests/print_schema.rs +description: "Test: print_schema_with_multiple_schema" +--- +// @generated automatically by Diesel CLI. + +diesel::table! { + users1 (id) { + id -> Int4, + } +} + +diesel::table! { + users2 (id) { + id -> Int4, + } +} + +diesel::allow_tables_to_appear_in_same_query!( + users1, + users2, +); +// @generated automatically by Diesel CLI. + +diesel::table! { + /// Representation of the `users1` table. + /// + /// (Automatically generated by Diesel.) + users1 (id) { + /// The `id` column of the `users1` table. + /// + /// Its SQL type is `Int4`. + /// + /// (Automatically generated by Diesel.) + id -> Int4, + } +} +// @generated automatically by Diesel CLI. + +diesel::table! { + /// Representation of the `users2` table. + /// + /// (Automatically generated by Diesel.) + users2 (id) { + /// The `id` column of the `users2` table. + /// + /// Its SQL type is `Int4`. + /// + /// (Automatically generated by Diesel.) + id -> Int4, + } +} + diff --git a/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/postgres/expected_1.snap b/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/postgres/expected_1.snap new file mode 100644 index 000000000000..60d6577fe201 --- /dev/null +++ b/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/postgres/expected_1.snap @@ -0,0 +1,20 @@ +--- +source: diesel_cli/tests/print_schema.rs +description: "Test: print_schema_with_multiple_schema" +--- +// @generated automatically by Diesel CLI. + +diesel::table! { + /// Representation of the `users1` table. + /// + /// (Automatically generated by Diesel.) + users1 (id) { + /// The `id` column of the `users1` table. + /// + /// Its SQL type is `Int4`. + /// + /// (Automatically generated by Diesel.) + id -> Int4, + } +} + diff --git a/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/postgres/expected_2.snap b/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/postgres/expected_2.snap new file mode 100644 index 000000000000..2283da73a569 --- /dev/null +++ b/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/postgres/expected_2.snap @@ -0,0 +1,20 @@ +--- +source: diesel_cli/tests/print_schema.rs +description: "Test: print_schema_with_multiple_schema" +--- +// @generated automatically by Diesel CLI. + +diesel::table! { + /// Representation of the `users2` table. + /// + /// (Automatically generated by Diesel.) + users2 (id) { + /// The `id` column of the `users2` table. + /// + /// Its SQL type is `Int4`. + /// + /// (Automatically generated by Diesel.) + id -> Int4, + } +} + diff --git a/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/postgres/schema.sql b/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/postgres/schema.sql new file mode 100644 index 000000000000..37d66071ac40 --- /dev/null +++ b/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/postgres/schema.sql @@ -0,0 +1,2 @@ +CREATE TABLE users1 (id SERIAL PRIMARY KEY); +CREATE TABLE users2 (id SERIAL PRIMARY KEY); diff --git a/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/sqlite/expected.snap b/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/sqlite/expected.snap new file mode 100644 index 000000000000..8dc866e3d2c0 --- /dev/null +++ b/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/sqlite/expected.snap @@ -0,0 +1,53 @@ +--- +source: diesel_cli/tests/print_schema.rs +description: "Test: print_schema_with_multiple_schema" +--- +// @generated automatically by Diesel CLI. + +diesel::table! { + users1 (id) { + id -> Nullable, + } +} + +diesel::table! { + users2 (id) { + id -> Nullable, + } +} + +diesel::allow_tables_to_appear_in_same_query!( + users1, + users2, +); +// @generated automatically by Diesel CLI. + +diesel::table! { + /// Representation of the `users1` table. + /// + /// (Automatically generated by Diesel.) + users1 (id) { + /// The `id` column of the `users1` table. + /// + /// Its SQL type is `Nullable`. + /// + /// (Automatically generated by Diesel.) + id -> Nullable, + } +} +// @generated automatically by Diesel CLI. + +diesel::table! { + /// Representation of the `users2` table. + /// + /// (Automatically generated by Diesel.) + users2 (id) { + /// The `id` column of the `users2` table. + /// + /// Its SQL type is `Nullable`. + /// + /// (Automatically generated by Diesel.) + id -> Nullable, + } +} + diff --git a/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/sqlite/expected_1.snap b/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/sqlite/expected_1.snap new file mode 100644 index 000000000000..0c5b75e16470 --- /dev/null +++ b/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/sqlite/expected_1.snap @@ -0,0 +1,20 @@ +--- +source: diesel_cli/tests/print_schema.rs +description: "Test: print_schema_with_multiple_schema" +--- +// @generated automatically by Diesel CLI. + +diesel::table! { + /// Representation of the `users1` table. + /// + /// (Automatically generated by Diesel.) + users1 (id) { + /// The `id` column of the `users1` table. + /// + /// Its SQL type is `Nullable`. + /// + /// (Automatically generated by Diesel.) + id -> Nullable, + } +} + diff --git a/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/sqlite/expected_2.snap b/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/sqlite/expected_2.snap new file mode 100644 index 000000000000..0b25a7b13fd7 --- /dev/null +++ b/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/sqlite/expected_2.snap @@ -0,0 +1,20 @@ +--- +source: diesel_cli/tests/print_schema.rs +description: "Test: print_schema_with_multiple_schema" +--- +// @generated automatically by Diesel CLI. + +diesel::table! { + /// Representation of the `users2` table. + /// + /// (Automatically generated by Diesel.) + users2 (id) { + /// The `id` column of the `users2` table. + /// + /// Its SQL type is `Nullable`. + /// + /// (Automatically generated by Diesel.) + id -> Nullable, + } +} + diff --git a/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/sqlite/schema.sql b/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/sqlite/schema.sql new file mode 100644 index 000000000000..556e9f7b54e8 --- /dev/null +++ b/diesel_cli/tests/print_schema/print_schema_with_multiple_schema/sqlite/schema.sql @@ -0,0 +1,2 @@ +CREATE TABLE users1 (id INTEGER PRIMARY KEY); +CREATE TABLE users2 (id INTEGER PRIMARY KEY); diff --git a/diesel_cli/tests/setup.rs b/diesel_cli/tests/setup.rs index d0bab07b5b17..a65a0d39ff28 100644 --- a/diesel_cli/tests/setup.rs +++ b/diesel_cli/tests/setup.rs @@ -170,6 +170,41 @@ fn setup_works_with_migration_dir_by_arg() { assert!(p.has_file("foo")); } +#[test] +fn setup_writes_migration_dir_by_arg_to_config_file() { + let p = project("setup_writes_migration_dir_by_arg_to_config_file").build(); + + // make sure the project builder doesn't create it for us + assert!(!p.has_file("migrations")); + assert!(!p.has_file("foo")); + + let result = p.command("setup").arg("--migration-dir=foo").run(); + + assert!(result.is_success(), "Result was unsuccessful {:?}", result); + assert!(!p.has_file("migrations")); + assert!(p.has_file("foo")); + assert!(p.file_contents("diesel.toml").contains("dir = \"foo\"")); +} + +#[test] +#[cfg(windows)] +fn setup_writes_migration_dir_by_arg_to_config_file_win() { + let p = project("setup_writes_migration_dir_by_arg_to_config_file_win").build(); + + // make sure the project builder doesn't create it for us + assert!(!p.has_file("migrations")); + assert!(!p.has_file("foo")); + + let result = p.command("setup").arg("--migration-dir=foo\\bar").run(); + + assert!(result.is_success(), "Result was unsuccessful {:?}", result); + assert!(!p.has_file("migrations")); + assert!(p.has_file("foo")); + assert!(p + .file_contents("diesel.toml") + .contains("dir = \"foo\\\\bar\"")); +} + #[test] fn setup_works_with_migration_dir_by_env() { let p = project("setup_works_with_migration_dir_by_env").build(); diff --git a/diesel_cli/tests/support/project_builder.rs b/diesel_cli/tests/support/project_builder.rs index c1199b1aba17..5110e409c599 100644 --- a/diesel_cli/tests/support/project_builder.rs +++ b/diesel_cli/tests/support/project_builder.rs @@ -45,7 +45,7 @@ impl ProjectBuilder { File::create(tempdir.path().join("Cargo.toml")).unwrap(); for folder in self.folders { - fs::create_dir(tempdir.path().join(folder)).unwrap(); + fs::create_dir_all(tempdir.path().join(folder)).unwrap(); } for (file, contents) in self.files { diff --git a/diesel_compile_tests/tests/fail/aggregate_expression_requires_column_from_same_table.stderr b/diesel_compile_tests/tests/fail/aggregate_expression_requires_column_from_same_table.stderr index 0f9dfcb21c34..5a4d9ceff83c 100644 --- a/diesel_compile_tests/tests/fail/aggregate_expression_requires_column_from_same_table.stderr +++ b/diesel_compile_tests/tests/fail/aggregate_expression_requires_column_from_same_table.stderr @@ -12,6 +12,7 @@ error[E0277]: Cannot select `posts::columns::id` from `users::table` >>> >> >> + >> = note: required for `diesel::expression::functions::aggregate_folding::sum_utils::sum` to implement `SelectableExpression` = note: required for `SelectStatement>` to implement `SelectDsl>` @@ -40,10 +41,13 @@ error[E0277]: the trait bound `users::table: TableNotEqual` is not = help: the following other types implement trait `TableNotEqual`: as TableNotEqual> as TableNotEqual> + as TableNotEqual> + as TableNotEqual> >> + >> > >> - > + and $N others = note: required for `users::table` to implement `AppearsInFromClause` note: required for `posts::columns::id` to implement `AppearsOnTable` --> tests/fail/aggregate_expression_requires_column_from_same_table.rs:14:9 @@ -69,6 +73,7 @@ error[E0277]: Cannot select `posts::columns::id` from `users::table` >>> >> >> + >> = note: required for `diesel::expression::functions::aggregate_folding::avg_utils::avg` to implement `SelectableExpression` = note: required for `SelectStatement>` to implement `SelectDsl>` @@ -97,10 +102,13 @@ error[E0277]: the trait bound `users::table: TableNotEqual` is not = help: the following other types implement trait `TableNotEqual`: as TableNotEqual> as TableNotEqual> + as TableNotEqual> + as TableNotEqual> >> + >> > >> - > + and $N others = note: required for `users::table` to implement `AppearsInFromClause` note: required for `posts::columns::id` to implement `AppearsOnTable` --> tests/fail/aggregate_expression_requires_column_from_same_table.rs:14:9 @@ -126,6 +134,7 @@ error[E0277]: Cannot select `posts::columns::id` from `users::table` >>> >> >> + >> = note: required for `diesel::expression::functions::aggregate_ordering::max_utils::max` to implement `SelectableExpression` = note: required for `SelectStatement>` to implement `SelectDsl>` @@ -154,10 +163,13 @@ error[E0277]: the trait bound `users::table: TableNotEqual` is not = help: the following other types implement trait `TableNotEqual`: as TableNotEqual> as TableNotEqual> + as TableNotEqual> + as TableNotEqual> >> + >> > >> - > + and $N others = note: required for `users::table` to implement `AppearsInFromClause` note: required for `posts::columns::id` to implement `AppearsOnTable` --> tests/fail/aggregate_expression_requires_column_from_same_table.rs:14:9 @@ -183,6 +195,7 @@ error[E0277]: Cannot select `posts::columns::id` from `users::table` >>> >> >> + >> = note: required for `diesel::expression::functions::aggregate_ordering::min_utils::min` to implement `SelectableExpression` = note: required for `SelectStatement>` to implement `SelectDsl>` @@ -211,10 +224,13 @@ error[E0277]: the trait bound `users::table: TableNotEqual` is not = help: the following other types implement trait `TableNotEqual`: as TableNotEqual> as TableNotEqual> + as TableNotEqual> + as TableNotEqual> >> + >> > >> - > + and $N others = note: required for `users::table` to implement `AppearsInFromClause` note: required for `posts::columns::id` to implement `AppearsOnTable` --> tests/fail/aggregate_expression_requires_column_from_same_table.rs:14:9 diff --git a/diesel_compile_tests/tests/fail/any_is_only_selectable_if_inner_expr_is_selectable.stderr b/diesel_compile_tests/tests/fail/any_is_only_selectable_if_inner_expr_is_selectable.stderr index ed4f8e58c7cd..430d972d9796 100644 --- a/diesel_compile_tests/tests/fail/any_is_only_selectable_if_inner_expr_is_selectable.stderr +++ b/diesel_compile_tests/tests/fail/any_is_only_selectable_if_inner_expr_is_selectable.stderr @@ -44,10 +44,13 @@ error[E0277]: the trait bound `stuff::table: TableNotEqual` i = help: the following other types implement trait `TableNotEqual`: as TableNotEqual> as TableNotEqual> + as TableNotEqual> + as TableNotEqual> >> + >> > >> - > + and $N others = note: required for `stuff::table` to implement `AppearsInFromClause` note: required for `more_stuff::columns::names` to implement `AppearsOnTable` --> tests/fail/any_is_only_selectable_if_inner_expr_is_selectable.rs:15:9 diff --git a/diesel_compile_tests/tests/fail/boxed_queries_and_group_by.stderr b/diesel_compile_tests/tests/fail/boxed_queries_and_group_by.stderr index ade1e6ff7790..4295a3d0caf6 100644 --- a/diesel_compile_tests/tests/fail/boxed_queries_and_group_by.stderr +++ b/diesel_compile_tests/tests/fail/boxed_queries_and_group_by.stderr @@ -64,6 +64,7 @@ error[E0277]: the trait bound `BoxedSelectStatement<'_, diesel::sql_types::Text, posts::table users::table Only + Tablesample pg::metadata_lookup::pg_namespace::table pg::metadata_lookup::pg_type::table = note: required for `BoxedSelectStatement<'_, diesel::sql_types::Text, FromClause, _, users::columns::name>` to implement `JoinTo>` @@ -128,6 +129,7 @@ error[E0277]: the trait bound `BoxedSelectStatement<'_, (diesel::sql_types::Inte posts::table users::table Only + Tablesample pg::metadata_lookup::pg_namespace::table pg::metadata_lookup::pg_type::table = note: required for `BoxedSelectStatement<'_, (diesel::sql_types::Integer, diesel::sql_types::Text), FromClause, _>` to implement `GroupByDsl<_>` diff --git a/diesel_compile_tests/tests/fail/boxed_queries_require_selectable_expression_for_filter.stderr b/diesel_compile_tests/tests/fail/boxed_queries_require_selectable_expression_for_filter.stderr index c58a33e4d22f..8e5969588eb6 100644 --- a/diesel_compile_tests/tests/fail/boxed_queries_require_selectable_expression_for_filter.stderr +++ b/diesel_compile_tests/tests/fail/boxed_queries_require_selectable_expression_for_filter.stderr @@ -22,10 +22,13 @@ error[E0277]: the trait bound `users::table: TableNotEqual` is not = help: the following other types implement trait `TableNotEqual`: as TableNotEqual> as TableNotEqual> + as TableNotEqual> + as TableNotEqual> >> + >> > >> - > + and $N others = note: required for `users::table` to implement `AppearsInFromClause` note: required for `posts::columns::title` to implement `AppearsOnTable` --> tests/fail/boxed_queries_require_selectable_expression_for_filter.rs:16:9 diff --git a/diesel_compile_tests/tests/fail/boxed_queries_require_selectable_expression_for_order.stderr b/diesel_compile_tests/tests/fail/boxed_queries_require_selectable_expression_for_order.stderr index 9ce57747927c..429b10864f2d 100644 --- a/diesel_compile_tests/tests/fail/boxed_queries_require_selectable_expression_for_order.stderr +++ b/diesel_compile_tests/tests/fail/boxed_queries_require_selectable_expression_for_order.stderr @@ -22,10 +22,13 @@ error[E0277]: the trait bound `users::table: TableNotEqual` is not = help: the following other types implement trait `TableNotEqual`: as TableNotEqual> as TableNotEqual> + as TableNotEqual> + as TableNotEqual> >> + >> > >> - > + and $N others = note: required for `users::table` to implement `AppearsInFromClause` note: required for `posts::columns::title` to implement `AppearsOnTable` --> tests/fail/boxed_queries_require_selectable_expression_for_order.rs:16:9 diff --git a/diesel_compile_tests/tests/fail/cannot_join_to_non_joinable_table.stderr b/diesel_compile_tests/tests/fail/cannot_join_to_non_joinable_table.stderr index 5cc00c7e2da3..df93dffd6f4e 100644 --- a/diesel_compile_tests/tests/fail/cannot_join_to_non_joinable_table.stderr +++ b/diesel_compile_tests/tests/fail/cannot_join_to_non_joinable_table.stderr @@ -13,6 +13,7 @@ error[E0277]: the trait bound `users::table: JoinTo` is not satisf >> >> >> + >> = note: required for `users::table` to implement `JoinWithImplicitOnClause` note: required by a bound in `inner_join` --> $DIESEL/src/query_dsl/mod.rs @@ -38,6 +39,7 @@ error[E0277]: the trait bound `users::table: JoinTo` is not satisf >> >> >> + >> = note: required for `users::table` to implement `JoinWithImplicitOnClause` note: required by a bound in `left_outer_join` --> $DIESEL/src/query_dsl/mod.rs @@ -62,5 +64,6 @@ error[E0277]: the trait bound `posts::table: JoinTo` is not satisf >> >> >> + >> = note: required for `query_source::joins::Join` to implement `JoinTo` = note: required for `users::table` to implement `JoinWithImplicitOnClause, diesel::expression::grouped::Grouped, NullableExpression>>>>>, Inner>` diff --git a/diesel_compile_tests/tests/fail/custom_returning_requires_selectable_expression.stderr b/diesel_compile_tests/tests/fail/custom_returning_requires_selectable_expression.stderr index 0f5c8f88e388..d7be57379378 100644 --- a/diesel_compile_tests/tests/fail/custom_returning_requires_selectable_expression.stderr +++ b/diesel_compile_tests/tests/fail/custom_returning_requires_selectable_expression.stderr @@ -14,6 +14,7 @@ error[E0277]: Cannot select `bad::columns::age` from `users::table` >>> >> >> + >> = note: required for `UpdateStatement>>>, Assign<..., ...>, ...>` to implement `Query` note: required by a bound in `UpdateStatement::::returning` --> $DIESEL/src/query_builder/update_statement/mod.rs @@ -40,6 +41,7 @@ error[E0277]: Cannot select `bad::columns::age` from `users::table` >>> >> >> + >> = note: required for `(users::columns::name, bad::columns::age)` to implement `SelectableExpression` = note: required for `InsertStatement,), ...>, ..., ...>` to implement `Query` note: required by a bound in `InsertStatement::::returning` @@ -88,10 +90,13 @@ error[E0277]: the trait bound `users::table: TableNotEqual` is not s = help: the following other types implement trait `TableNotEqual`: as TableNotEqual> as TableNotEqual> + as TableNotEqual> + as TableNotEqual> >> + >> > >> - > + and $N others = note: required for `users::table` to implement `AppearsInFromClause` note: required for `bad::columns::age` to implement `AppearsOnTable` --> tests/fail/custom_returning_requires_selectable_expression.rs:16:7 diff --git a/diesel_compile_tests/tests/fail/derive/aliases.stderr b/diesel_compile_tests/tests/fail/derive/aliases.stderr index 2241d8883cca..3614290b838a 100644 --- a/diesel_compile_tests/tests/fail/derive/aliases.stderr +++ b/diesel_compile_tests/tests/fail/derive/aliases.stderr @@ -115,6 +115,7 @@ error[E0277]: Cannot select `users::columns::id` from `Alias` >>> >> >> + >> = note: required for `SelectStatement>>` to implement `SelectDsl` = note: 1 redundant requirement hidden = note: required for `Alias` to implement `SelectDsl` @@ -143,6 +144,7 @@ error[E0277]: Cannot select `users::columns::id` from `Alias` >>> >> >> + >> = note: required for `diesel::query_builder::select_clause::SelectClause` to implement `diesel::query_builder::select_clause::SelectClauseExpression>>` = note: required for `SelectStatement>, diesel::query_builder::select_clause::SelectClause>` to implement `Query` = note: required for `SelectStatement>, diesel::query_builder::select_clause::SelectClause>` to implement `LoadQuery<'_, _, i32>` @@ -202,6 +204,7 @@ error[E0277]: the trait bound `users::table: JoinTo` is not satisfi >> >> >> + >> = note: required for `Alias` to implement `JoinTo` = note: required for `pets::table` to implement `JoinWithImplicitOnClause, Inner>` diff --git a/diesel_compile_tests/tests/fail/derive/bad_insertable.stderr b/diesel_compile_tests/tests/fail/derive/bad_insertable.stderr index dc45369c2656..a90105f72f76 100644 --- a/diesel_compile_tests/tests/fail/derive/bad_insertable.stderr +++ b/diesel_compile_tests/tests/fail/derive/bad_insertable.stderr @@ -5,9 +5,9 @@ error[E0277]: the trait bound `std::string::String: diesel::Expression` is not s | ^^ the trait `diesel::Expression` is not implemented for `std::string::String` | = help: the following other types implement trait `diesel::Expression`: + Box columns::name columns::id - Box columns::star diesel::expression::ops::numeric::Add diesel::expression::ops::numeric::Sub @@ -23,9 +23,9 @@ error[E0277]: the trait bound `i32: diesel::Expression` is not satisfied | ^^^^ the trait `diesel::Expression` is not implemented for `i32` | = help: the following other types implement trait `diesel::Expression`: + Box columns::name columns::id - Box columns::star diesel::expression::ops::numeric::Add diesel::expression::ops::numeric::Sub @@ -41,9 +41,9 @@ error[E0277]: the trait bound `std::string::String: diesel::Expression` is not s | ^^ the trait `diesel::Expression` is not implemented for `std::string::String` | = help: the following other types implement trait `diesel::Expression`: + Box columns::name columns::id - Box columns::star diesel::expression::ops::numeric::Add diesel::expression::ops::numeric::Sub @@ -60,9 +60,9 @@ error[E0277]: the trait bound `i32: diesel::Expression` is not satisfied | ^^^^ the trait `diesel::Expression` is not implemented for `i32` | = help: the following other types implement trait `diesel::Expression`: + Box columns::name columns::id - Box columns::star diesel::expression::ops::numeric::Add diesel::expression::ops::numeric::Sub diff --git a/diesel_compile_tests/tests/fail/distinct_on_allows_only_fields_of_table.stderr b/diesel_compile_tests/tests/fail/distinct_on_allows_only_fields_of_table.stderr index 827d88d42a39..efa783abe46c 100644 --- a/diesel_compile_tests/tests/fail/distinct_on_allows_only_fields_of_table.stderr +++ b/diesel_compile_tests/tests/fail/distinct_on_allows_only_fields_of_table.stderr @@ -14,6 +14,7 @@ error[E0277]: Cannot select `posts::columns::id` from `users::table` >>> >> >> + >> = note: required for `users::table` to implement `DistinctOnDsl` note: required by a bound in `diesel::QueryDsl::distinct_on` --> $DIESEL/src/query_dsl/mod.rs @@ -38,6 +39,7 @@ error[E0277]: Cannot select `users::columns::name` from `posts::table` >>> >> >> + >> = note: required for `(posts::columns::name, users::columns::name)` to implement `SelectableExpression` = note: required for `posts::table` to implement `DistinctOnDsl<(posts::columns::name, users::columns::name)>` @@ -66,10 +68,13 @@ error[E0277]: the trait bound `posts::table: TableNotEqual` is not = help: the following other types implement trait `TableNotEqual`: as TableNotEqual> as TableNotEqual> + as TableNotEqual> + as TableNotEqual> >> + >> > >> - > + and $N others = note: required for `posts::table` to implement `AppearsInFromClause` note: required for `users::columns::name` to implement `AppearsOnTable` --> tests/fail/distinct_on_allows_only_fields_of_table.rs:9:9 diff --git a/diesel_compile_tests/tests/fail/expressions_can_only_be_compared_for_equality_to_expressions_of_same_type.stderr b/diesel_compile_tests/tests/fail/expressions_can_only_be_compared_for_equality_to_expressions_of_same_type.stderr index fe2ef21c5c38..650714057816 100644 --- a/diesel_compile_tests/tests/fail/expressions_can_only_be_compared_for_equality_to_expressions_of_same_type.stderr +++ b/diesel_compile_tests/tests/fail/expressions_can_only_be_compared_for_equality_to_expressions_of_same_type.stderr @@ -6,9 +6,9 @@ error[E0277]: the trait bound `str: diesel::Expression` is not satisfied | = help: the following other types implement trait `diesel::Expression`: columns::name + Box columns::id columns::star - Box diesel::expression::ops::numeric::Add diesel::expression::ops::numeric::Sub diesel::expression::ops::numeric::Mul diff --git a/diesel_compile_tests/tests/fail/order_requires_column_from_same_table.stderr b/diesel_compile_tests/tests/fail/order_requires_column_from_same_table.stderr index db360413304e..f4029c462eaa 100644 --- a/diesel_compile_tests/tests/fail/order_requires_column_from_same_table.stderr +++ b/diesel_compile_tests/tests/fail/order_requires_column_from_same_table.stderr @@ -20,10 +20,13 @@ error[E0277]: the trait bound `users::table: TableNotEqual` is not = help: the following other types implement trait `TableNotEqual`: as TableNotEqual> as TableNotEqual> + as TableNotEqual> + as TableNotEqual> >> + >> > >> - > + and $N others = note: required for `users::table` to implement `AppearsInFromClause` note: required for `posts::columns::id` to implement `AppearsOnTable` --> tests/fail/order_requires_column_from_same_table.rs:13:9 diff --git a/diesel_compile_tests/tests/fail/pg_specific_tablesample_cannot_be_used_on_mysql.rs b/diesel_compile_tests/tests/fail/pg_specific_tablesample_cannot_be_used_on_mysql.rs new file mode 100644 index 000000000000..18ce6ff2ae19 --- /dev/null +++ b/diesel_compile_tests/tests/fail/pg_specific_tablesample_cannot_be_used_on_mysql.rs @@ -0,0 +1,21 @@ +extern crate diesel; + +use diesel::dsl::*; +use diesel::*; + +table! { + users { + id -> Integer, + name -> VarChar, + } +} + +fn main() { + use self::users::dsl::*; + let mut connection = MysqlConnection::establish("").unwrap(); + + let random_user_ids = users + .tablesample_system(10) + .with_seed(42.0) + .load::<(i32, String)>(&mut connection); +} diff --git a/diesel_compile_tests/tests/fail/pg_specific_tablesample_cannot_be_used_on_mysql.stderr b/diesel_compile_tests/tests/fail/pg_specific_tablesample_cannot_be_used_on_mysql.stderr new file mode 100644 index 000000000000..756ff7f4e512 --- /dev/null +++ b/diesel_compile_tests/tests/fail/pg_specific_tablesample_cannot_be_used_on_mysql.stderr @@ -0,0 +1,21 @@ +error[E0277]: the trait bound `Tablesample: QueryFragment` is not satisfied + --> tests/fail/pg_specific_tablesample_cannot_be_used_on_mysql.rs:20:32 + | +20 | .load::<(i32, String)>(&mut connection); + | ---- ^^^^^^^^^^^^^^^ the trait `QueryFragment` is not implemented for `Tablesample` + | | + | required by a bound introduced by this call + | + = help: the trait `QueryFragment` is implemented for `Tablesample` + = note: required for `FromClause>` to implement `QueryFragment` + = note: 2 redundant requirements hidden + = note: required for `SelectStatement>>` to implement `QueryFragment` + = note: required for `Tablesample` to implement `LoadQuery<'_, diesel::MysqlConnection, (i32, std::string::String)>` +note: required by a bound in `diesel::RunQueryDsl::load` + --> $DIESEL/src/query_dsl/mod.rs + | + | fn load<'query, U>(self, conn: &mut Conn) -> QueryResult> + | ---- required by a bound in this associated function + | where + | Self: LoadQuery<'query, Conn, U>, + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `RunQueryDsl::load` diff --git a/diesel_compile_tests/tests/fail/pg_specific_tablesample_cannot_be_used_on_sqlite.rs b/diesel_compile_tests/tests/fail/pg_specific_tablesample_cannot_be_used_on_sqlite.rs new file mode 100644 index 000000000000..1d2d76c716a0 --- /dev/null +++ b/diesel_compile_tests/tests/fail/pg_specific_tablesample_cannot_be_used_on_sqlite.rs @@ -0,0 +1,21 @@ +extern crate diesel; + +use diesel::dsl::*; +use diesel::*; + +table! { + users { + id -> Integer, + name -> VarChar, + } +} + +fn main() { + use self::users::dsl::*; + let mut connection = SqliteConnection::establish(":memory:").unwrap(); + + let random_user_ids = users + .tablesample_system(10) + .with_seed(42.0) + .load::<(i32, String)>(&mut connection); +} diff --git a/diesel_compile_tests/tests/fail/pg_specific_tablesample_cannot_be_used_on_sqlite.stderr b/diesel_compile_tests/tests/fail/pg_specific_tablesample_cannot_be_used_on_sqlite.stderr new file mode 100644 index 000000000000..c72a5911e865 --- /dev/null +++ b/diesel_compile_tests/tests/fail/pg_specific_tablesample_cannot_be_used_on_sqlite.stderr @@ -0,0 +1,21 @@ +error[E0277]: the trait bound `Tablesample: QueryFragment` is not satisfied + --> tests/fail/pg_specific_tablesample_cannot_be_used_on_sqlite.rs:20:32 + | +20 | .load::<(i32, String)>(&mut connection); + | ---- ^^^^^^^^^^^^^^^ the trait `QueryFragment` is not implemented for `Tablesample` + | | + | required by a bound introduced by this call + | + = help: the trait `QueryFragment` is implemented for `Tablesample` + = note: required for `FromClause>` to implement `QueryFragment` + = note: 2 redundant requirements hidden + = note: required for `SelectStatement>>` to implement `QueryFragment` + = note: required for `Tablesample` to implement `LoadQuery<'_, diesel::SqliteConnection, (i32, std::string::String)>` +note: required by a bound in `diesel::RunQueryDsl::load` + --> $DIESEL/src/query_dsl/mod.rs + | + | fn load<'query, U>(self, conn: &mut Conn) -> QueryResult> + | ---- required by a bound in this associated function + | where + | Self: LoadQuery<'query, Conn, U>, + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `RunQueryDsl::load` diff --git a/diesel_compile_tests/tests/fail/pg_upsert_do_update_requires_valid_update.stderr b/diesel_compile_tests/tests/fail/pg_upsert_do_update_requires_valid_update.stderr index 6b7340d97207..cf3b662230d4 100644 --- a/diesel_compile_tests/tests/fail/pg_upsert_do_update_requires_valid_update.stderr +++ b/diesel_compile_tests/tests/fail/pg_upsert_do_update_requires_valid_update.stderr @@ -94,10 +94,13 @@ error[E0277]: the trait bound `users::table: TableNotEqual` is not = help: the following other types implement trait `TableNotEqual`: as TableNotEqual> as TableNotEqual> + as TableNotEqual> + as TableNotEqual> >> + >> > >> - > + and $N others = note: required for `users::table` to implement `AppearsInFromClause` note: required for `posts::columns::title` to implement `AppearsOnTable` --> tests/fail/pg_upsert_do_update_requires_valid_update.rs:16:9 diff --git a/diesel_compile_tests/tests/fail/returning_clause_requires_selectable_expression.stderr b/diesel_compile_tests/tests/fail/returning_clause_requires_selectable_expression.stderr index 8bf34a4aee9f..9f7f8835752b 100644 --- a/diesel_compile_tests/tests/fail/returning_clause_requires_selectable_expression.stderr +++ b/diesel_compile_tests/tests/fail/returning_clause_requires_selectable_expression.stderr @@ -14,6 +14,7 @@ error[E0277]: Cannot select `non_users::columns::noname` from `users::table` >>> >> >> + >> note: required by a bound in `DeleteStatement::::returning` --> $DIESEL/src/query_builder/delete_statement/mod.rs | @@ -39,6 +40,7 @@ error[E0277]: Cannot select `non_users::columns::noname` from `users::table` >>> >> >> + >> = note: required for `InsertStatement,), ...>, ..., ...>` to implement `Query` note: required by a bound in `InsertStatement::::returning` --> $DIESEL/src/query_builder/insert_statement/mod.rs @@ -65,6 +67,7 @@ error[E0277]: Cannot select `non_users::columns::noname` from `users::table` >>> >> >> + >> = note: required for `UpdateStatement, Bound>, ...>` to implement `Query` note: required by a bound in `UpdateStatement::::returning` --> $DIESEL/src/query_builder/update_statement/mod.rs diff --git a/diesel_compile_tests/tests/fail/right_side_of_left_join_requires_nullable.stderr b/diesel_compile_tests/tests/fail/right_side_of_left_join_requires_nullable.stderr index 975359e80bec..0d8941f56475 100644 --- a/diesel_compile_tests/tests/fail/right_side_of_left_join_requires_nullable.stderr +++ b/diesel_compile_tests/tests/fail/right_side_of_left_join_requires_nullable.stderr @@ -27,6 +27,7 @@ error[E0277]: Cannot select `posts::columns::title` from `users::table` >>> >> >> + >> note: required for `posts::columns::title` to implement `SelectableExpression>` --> tests/fail/right_side_of_left_join_requires_nullable.rs:17:9 | @@ -65,6 +66,7 @@ error[E0277]: Cannot select `posts::columns::title` from `users::table` >>> >> >> + >> note: required for `posts::columns::title` to implement `SelectableExpression>` --> tests/fail/right_side_of_left_join_requires_nullable.rs:17:9 | @@ -131,6 +133,7 @@ error[E0277]: Cannot select `posts::columns::title` from `users::table` >>> >> >> + >> note: required for `posts::columns::title` to implement `SelectableExpression>` --> tests/fail/right_side_of_left_join_requires_nullable.rs:17:9 | @@ -169,6 +172,7 @@ error[E0277]: Cannot select `posts::columns::title` from `users::table` >>> >> >> + >> note: required for `posts::columns::title` to implement `SelectableExpression>` --> tests/fail/right_side_of_left_join_requires_nullable.rs:17:9 | @@ -235,6 +239,7 @@ error[E0277]: Cannot select `posts::columns::title` from `users::table` >>> >> >> + >> note: required for `posts::columns::title` to implement `SelectableExpression>` --> tests/fail/right_side_of_left_join_requires_nullable.rs:17:9 | @@ -273,6 +278,7 @@ error[E0277]: Cannot select `posts::columns::title` from `users::table` >>> >> >> + >> note: required for `posts::columns::title` to implement `SelectableExpression>` --> tests/fail/right_side_of_left_join_requires_nullable.rs:17:9 | @@ -339,6 +345,7 @@ error[E0277]: Cannot select `posts::columns::title` from `pets::table` >>> >> >> + >> note: required for `posts::columns::title` to implement `SelectableExpression, diesel::expression::grouped::Grouped, NullableExpression>>>>>, LeftOuter>>` --> tests/fail/right_side_of_left_join_requires_nullable.rs:17:9 | @@ -377,6 +384,7 @@ error[E0277]: Cannot select `posts::columns::title` from `pets::table` >>> >> >> + >> note: required for `posts::columns::title` to implement `SelectableExpression, diesel::expression::grouped::Grouped, NullableExpression>>>>>, LeftOuter>>` --> tests/fail/right_side_of_left_join_requires_nullable.rs:17:9 | @@ -443,6 +451,7 @@ error[E0277]: Cannot select `posts::columns::title` from `users::table` >>> >> >> + >> note: required for `posts::columns::title` to implement `SelectableExpression>` --> tests/fail/right_side_of_left_join_requires_nullable.rs:17:9 | @@ -481,6 +490,7 @@ error[E0277]: Cannot select `posts::columns::title` from `users::table` >>> >> >> + >> note: required for `posts::columns::title` to implement `SelectableExpression>` --> tests/fail/right_side_of_left_join_requires_nullable.rs:17:9 | diff --git a/diesel_compile_tests/tests/fail/select_for_update_cannot_be_mixed_with_some_clauses.stderr b/diesel_compile_tests/tests/fail/select_for_update_cannot_be_mixed_with_some_clauses.stderr index 4f1b179197f9..6383ced39154 100644 --- a/diesel_compile_tests/tests/fail/select_for_update_cannot_be_mixed_with_some_clauses.stderr +++ b/diesel_compile_tests/tests/fail/select_for_update_cannot_be_mixed_with_some_clauses.stderr @@ -17,6 +17,7 @@ error[E0277]: the trait bound `SelectStatement, diesel: = help: the following other types implement trait `Table`: users::table Only + Tablesample pg::metadata_lookup::pg_namespace::table pg::metadata_lookup::pg_type::table = note: required for `SelectStatement, DefaultSelectClause>, ..., ..., ..., ..., ..., ..., ...>` to implement `DistinctDsl` @@ -48,6 +49,7 @@ error[E0277]: the trait bound `SelectStatement, diesel: = help: the following other types implement trait `Table`: users::table Only + Tablesample pg::metadata_lookup::pg_namespace::table pg::metadata_lookup::pg_type::table = note: required for `SelectStatement, DefaultSelectClause>, DistinctClause>` to implement `LockingDsl` @@ -79,6 +81,7 @@ error[E0277]: the trait bound `SelectStatement, diesel: = help: the following other types implement trait `Table`: users::table Only + Tablesample pg::metadata_lookup::pg_namespace::table pg::metadata_lookup::pg_type::table = note: required for `SelectStatement, DefaultSelectClause>, ..., ..., ..., ..., ..., ..., ...>` to implement `DistinctOnDsl<_>` @@ -127,6 +130,7 @@ error[E0277]: Cannot select `columns::id` from `SelectStatement>>> >> >> + >> = note: required for `SelectStatement, DefaultSelectClause>, ..., ..., ..., ..., ..., ..., ...>` to implement `DistinctOnDsl` note: required by a bound in `diesel::QueryDsl::distinct_on` --> $DIESEL/src/query_dsl/mod.rs @@ -156,6 +160,7 @@ error[E0277]: the trait bound `SelectStatement, diesel: = help: the following other types implement trait `Table`: users::table Only + Tablesample pg::metadata_lookup::pg_namespace::table pg::metadata_lookup::pg_type::table = note: required for `SelectStatement, DefaultSelectClause>, DistinctOnClause>` to implement `LockingDsl` @@ -187,6 +192,7 @@ error[E0277]: the trait bound `SelectStatement, diesel: = help: the following other types implement trait `Table`: users::table Only + Tablesample pg::metadata_lookup::pg_namespace::table pg::metadata_lookup::pg_type::table = note: required for `SelectStatement, DefaultSelectClause>, ..., ..., ..., ..., ..., ..., ...>` to implement `GroupByDsl<_>` @@ -218,6 +224,7 @@ error[E0277]: the trait bound `SelectStatement, diesel: = help: the following other types implement trait `Table`: users::table Only + Tablesample pg::metadata_lookup::pg_namespace::table pg::metadata_lookup::pg_type::table = note: required for `SelectStatement, DefaultSelectClause>, ..., ..., ..., ..., ...>` to implement `LockingDsl` @@ -249,6 +256,7 @@ error[E0277]: the trait bound `BoxedSelectStatement<'_, (diesel::sql_types::Inte = help: the following other types implement trait `Table`: users::table Only + Tablesample pg::metadata_lookup::pg_namespace::table pg::metadata_lookup::pg_type::table = note: required for `BoxedSelectStatement<'_, (diesel::sql_types::Integer,), FromClause, _>` to implement `LockingDsl` @@ -280,6 +288,7 @@ error[E0277]: the trait bound `SelectStatement, diesel: = help: the following other types implement trait `Table`: users::table Only + Tablesample pg::metadata_lookup::pg_namespace::table pg::metadata_lookup::pg_type::table = note: required for `SelectStatement, DefaultSelectClause>, ..., ..., ..., ..., ..., ..., ...>` to implement `BoxedDsl<'_, _>` @@ -333,6 +342,7 @@ error[E0277]: the trait bound `SelectStatement, diesel: = help: the following other types implement trait `Table`: users::table Only + Tablesample pg::metadata_lookup::pg_namespace::table pg::metadata_lookup::pg_type::table = note: required for `SelectStatement, DefaultSelectClause>, ..., ..., ..., ..., ..., ..., ...>` to implement `GroupByDsl<_>` @@ -364,6 +374,7 @@ error[E0277]: the trait bound `SelectStatement, diesel: = help: the following other types implement trait `Table`: users::table Only + Tablesample pg::metadata_lookup::pg_namespace::table pg::metadata_lookup::pg_type::table = note: required for `SelectStatement, DefaultSelectClause>, ..., ..., ..., ..., ..., ...>` to implement `LockingDsl` diff --git a/diesel_compile_tests/tests/fail/select_requires_column_from_same_table.stderr b/diesel_compile_tests/tests/fail/select_requires_column_from_same_table.stderr index a3f8fcf51340..ac09ac3a58b7 100644 --- a/diesel_compile_tests/tests/fail/select_requires_column_from_same_table.stderr +++ b/diesel_compile_tests/tests/fail/select_requires_column_from_same_table.stderr @@ -14,6 +14,7 @@ error[E0277]: Cannot select `posts::columns::id` from `users::table` >>> >> >> + >> = note: required for `SelectStatement>` to implement `SelectDsl` = note: 1 redundant requirement hidden = note: required for `users::table` to implement `SelectDsl` diff --git a/diesel_compile_tests/tests/fail/selectable.stderr b/diesel_compile_tests/tests/fail/selectable.stderr index 0b78df3b1620..db8bcef3d6c8 100644 --- a/diesel_compile_tests/tests/fail/selectable.stderr +++ b/diesel_compile_tests/tests/fail/selectable.stderr @@ -54,6 +54,7 @@ error[E0277]: Cannot select `posts::columns::id` from `users::table` >>> >> >> + >> note: required for `posts::columns::id` to implement `SelectableExpression>` --> tests/fail/selectable.rs:15:9 | @@ -77,6 +78,7 @@ error[E0277]: Cannot select `posts::columns::title` from `users::table` >>> >> >> + >> note: required for `posts::columns::title` to implement `SelectableExpression>` --> tests/fail/selectable.rs:16:9 | @@ -129,6 +131,7 @@ error[E0277]: Cannot select `posts::columns::id` from `users::table` >>> >> >> + >> note: required for `posts::columns::id` to implement `SelectableExpression>` --> tests/fail/selectable.rs:15:9 | @@ -164,6 +167,7 @@ error[E0277]: Cannot select `posts::columns::title` from `users::table` >>> >> >> + >> note: required for `posts::columns::title` to implement `SelectableExpression>` --> tests/fail/selectable.rs:16:9 | @@ -251,6 +255,7 @@ error[E0277]: Cannot select `posts::columns::id` from `users::table` >>> >> >> + >> = note: required for `(posts::columns::id, posts::columns::title)` to implement `SelectableExpression` = note: 2 redundant requirements hidden = note: required for `diesel::expression::select_by::SelectBy` to implement `SelectableExpression` @@ -280,6 +285,7 @@ error[E0277]: Cannot select `posts::columns::title` from `users::table` >>> >> >> + >> = note: required for `(posts::columns::id, posts::columns::title)` to implement `SelectableExpression` = note: 2 redundant requirements hidden = note: required for `diesel::expression::select_by::SelectBy` to implement `SelectableExpression` @@ -337,6 +343,7 @@ error[E0277]: Cannot select `posts::columns::id` from `users::table` >>> >> >> + >> = note: required for `(posts::columns::id, posts::columns::title)` to implement `SelectableExpression` = note: 2 redundant requirements hidden = note: required for `diesel::expression::select_by::SelectBy` to implement `SelectableExpression` @@ -367,6 +374,7 @@ error[E0277]: Cannot select `posts::columns::title` from `users::table` >>> >> >> + >> = note: required for `(posts::columns::id, posts::columns::title)` to implement `SelectableExpression` = note: 2 redundant requirements hidden = note: required for `diesel::expression::select_by::SelectBy` to implement `SelectableExpression` @@ -426,6 +434,7 @@ error[E0277]: Cannot select `posts::columns::id` from `users::table` >>> >> >> + >> = note: required for `(posts::columns::id, posts::columns::title)` to implement `SelectableExpression` = note: 2 redundant requirements hidden = note: required for `diesel::expression::select_by::SelectBy` to implement `SelectableExpression` @@ -455,6 +464,7 @@ error[E0277]: Cannot select `posts::columns::title` from `users::table` >>> >> >> + >> = note: required for `(posts::columns::id, posts::columns::title)` to implement `SelectableExpression` = note: 2 redundant requirements hidden = note: required for `diesel::expression::select_by::SelectBy` to implement `SelectableExpression` @@ -512,6 +522,7 @@ error[E0277]: Cannot select `posts::columns::id` from `users::table` >>> >> >> + >> = note: required for `(posts::columns::id, posts::columns::title)` to implement `SelectableExpression` = note: 2 redundant requirements hidden = note: required for `diesel::expression::select_by::SelectBy` to implement `SelectableExpression` @@ -542,6 +553,7 @@ error[E0277]: Cannot select `posts::columns::title` from `users::table` >>> >> >> + >> = note: required for `(posts::columns::id, posts::columns::title)` to implement `SelectableExpression` = note: 2 redundant requirements hidden = note: required for `diesel::expression::select_by::SelectBy` to implement `SelectableExpression` @@ -601,6 +613,7 @@ error[E0277]: Cannot select `posts::columns::id` from `users::table` >>> >> >> + >> = note: required for `(posts::columns::id, posts::columns::title)` to implement `SelectableExpression` = note: 2 redundant requirements hidden = note: required for `diesel::expression::select_by::SelectBy` to implement `SelectableExpression` @@ -629,6 +642,7 @@ error[E0277]: Cannot select `posts::columns::title` from `users::table` >>> >> >> + >> = note: required for `(posts::columns::id, posts::columns::title)` to implement `SelectableExpression` = note: 2 redundant requirements hidden = note: required for `diesel::expression::select_by::SelectBy` to implement `SelectableExpression` @@ -684,6 +698,7 @@ error[E0277]: Cannot select `posts::columns::id` from `users::table` >>> >> >> + >> = note: required for `(posts::columns::id, posts::columns::title)` to implement `SelectableExpression` = note: 2 redundant requirements hidden = note: required for `diesel::expression::select_by::SelectBy` to implement `SelectableExpression` @@ -714,6 +729,7 @@ error[E0277]: Cannot select `posts::columns::title` from `users::table` >>> >> >> + >> = note: required for `(posts::columns::id, posts::columns::title)` to implement `SelectableExpression` = note: 2 redundant requirements hidden = note: required for `diesel::expression::select_by::SelectBy` to implement `SelectableExpression` diff --git a/diesel_compile_tests/tests/fail/selecting_multiple_columns_requires_all_must_be_from_selectable_table.stderr b/diesel_compile_tests/tests/fail/selecting_multiple_columns_requires_all_must_be_from_selectable_table.stderr index ac012d79de1f..eb77850042fc 100644 --- a/diesel_compile_tests/tests/fail/selecting_multiple_columns_requires_all_must_be_from_selectable_table.stderr +++ b/diesel_compile_tests/tests/fail/selecting_multiple_columns_requires_all_must_be_from_selectable_table.stderr @@ -12,6 +12,7 @@ error[E0277]: Cannot select `posts::columns::id` from `users::table` >>> >> >> + >> = note: required for `(posts::columns::id, posts::columns::user_id)` to implement `SelectableExpression` = note: required for `SelectStatement>` to implement `SelectDsl<(posts::columns::id, posts::columns::user_id)>` @@ -29,6 +30,7 @@ error[E0277]: Cannot select `posts::columns::user_id` from `users::table` >>> >> >> + >> = note: required for `(posts::columns::id, posts::columns::user_id)` to implement `SelectableExpression` = note: required for `SelectStatement>` to implement `SelectDsl<(posts::columns::id, posts::columns::user_id)>` @@ -57,10 +59,13 @@ error[E0277]: the trait bound `users::table: TableNotEqual` is not = help: the following other types implement trait `TableNotEqual`: as TableNotEqual> as TableNotEqual> + as TableNotEqual> + as TableNotEqual> >> + >> > >> - > + and $N others = note: required for `users::table` to implement `AppearsInFromClause` note: required for `posts::columns::id` to implement `AppearsOnTable` --> tests/fail/selecting_multiple_columns_requires_all_must_be_from_selectable_table.rs:15:9 @@ -86,6 +91,7 @@ error[E0277]: Cannot select `posts::columns::id` from `users::table` >>> >> >> + >> = note: required for `(posts::columns::id, users::columns::name)` to implement `SelectableExpression` = note: required for `SelectStatement>` to implement `SelectDsl<(posts::columns::id, users::columns::name)>` @@ -114,10 +120,13 @@ error[E0277]: the trait bound `users::table: TableNotEqual` is not = help: the following other types implement trait `TableNotEqual`: as TableNotEqual> as TableNotEqual> + as TableNotEqual> + as TableNotEqual> >> + >> > >> - > + and $N others = note: required for `users::table` to implement `AppearsInFromClause` note: required for `posts::columns::id` to implement `AppearsOnTable` --> tests/fail/selecting_multiple_columns_requires_all_must_be_from_selectable_table.rs:15:9 diff --git a/diesel_compile_tests/tests/fail/update_requires_column_be_from_same_table.stderr b/diesel_compile_tests/tests/fail/update_requires_column_be_from_same_table.stderr index 20910ce77d18..d9dc72bc82fc 100644 --- a/diesel_compile_tests/tests/fail/update_requires_column_be_from_same_table.stderr +++ b/diesel_compile_tests/tests/fail/update_requires_column_be_from_same_table.stderr @@ -66,10 +66,13 @@ error[E0277]: the trait bound `users::table: TableNotEqual` is not = help: the following other types implement trait `TableNotEqual`: as TableNotEqual> as TableNotEqual> + as TableNotEqual> + as TableNotEqual> >> + >> > >> - > + and $N others = note: required for `users::table` to implement `AppearsInFromClause` note: required for `posts::columns::title` to implement `AppearsOnTable` --> tests/fail/update_requires_column_be_from_same_table.rs:15:9 diff --git a/diesel_compile_tests/tests/fail/update_requires_valid_where_clause.stderr b/diesel_compile_tests/tests/fail/update_requires_valid_where_clause.stderr index 8b5b49db19bd..8df011166927 100644 --- a/diesel_compile_tests/tests/fail/update_requires_valid_where_clause.stderr +++ b/diesel_compile_tests/tests/fail/update_requires_valid_where_clause.stderr @@ -45,10 +45,13 @@ error[E0277]: the trait bound `users::table: TableNotEqual` is not = help: the following other types implement trait `TableNotEqual`: as TableNotEqual> as TableNotEqual> + as TableNotEqual> + as TableNotEqual> >> + >> > >> - > + and $N others = note: required for `users::table` to implement `AppearsInFromClause` note: required for `posts::columns::id` to implement `AppearsOnTable` --> tests/fail/update_requires_valid_where_clause.rs:13:9 @@ -83,10 +86,13 @@ error[E0277]: the trait bound `users::table: TableNotEqual` is not = help: the following other types implement trait `TableNotEqual`: as TableNotEqual> as TableNotEqual> + as TableNotEqual> + as TableNotEqual> >> + >> > >> - > + and $N others = note: required for `users::table` to implement `AppearsInFromClause` note: required for `posts::columns::id` to implement `AppearsOnTable` --> tests/fail/update_requires_valid_where_clause.rs:13:9 diff --git a/diesel_compile_tests/tests/fail/user_defined_functions_follow_same_selection_rules.stderr b/diesel_compile_tests/tests/fail/user_defined_functions_follow_same_selection_rules.stderr index 6393ac83363a..eca81fbfdb82 100644 --- a/diesel_compile_tests/tests/fail/user_defined_functions_follow_same_selection_rules.stderr +++ b/diesel_compile_tests/tests/fail/user_defined_functions_follow_same_selection_rules.stderr @@ -44,10 +44,13 @@ error[E0277]: the trait bound `users::table: TableNotEqual` is not = help: the following other types implement trait `TableNotEqual`: as TableNotEqual> as TableNotEqual> + as TableNotEqual> + as TableNotEqual> >> + >> > >> - > + and $N others = note: required for `users::table` to implement `AppearsInFromClause` note: required for `posts::columns::title` to implement `AppearsOnTable` --> tests/fail/user_defined_functions_follow_same_selection_rules.rs:16:9 diff --git a/diesel_derives/src/as_expression.rs b/diesel_derives/src/as_expression.rs index d6f820e3cfa1..19f133416dab 100644 --- a/diesel_derives/src/as_expression.rs +++ b/diesel_derives/src/as_expression.rs @@ -1,5 +1,6 @@ use proc_macro2::TokenStream; use quote::quote; +use syn::parse_quote; use syn::DeriveInput; use syn::Result; @@ -18,19 +19,29 @@ pub fn derive(item: DeriveInput) -> Result { let struct_ty = ty_for_foreign_derive(&item, &model)?; - let (impl_generics, ..) = item.generics.split_for_impl(); - let lifetimes = item.generics.lifetimes().collect::>(); - let ty_params = item.generics.type_params().collect::>(); - let const_params = item.generics.const_params().collect::>(); + // type generics are already handled by `ty_for_foreign_derive` + let (impl_generics_plain, _, where_clause_plain) = item.generics.split_for_impl(); + + let mut generics = item.generics.clone(); + generics.params.push(parse_quote!('__expr)); + + let (impl_generics, _, where_clause) = generics.split_for_impl(); + + let mut generics2 = generics.clone(); + generics2.params.push(parse_quote!('__expr2)); + let (impl_generics2, _, where_clause2) = generics2.split_for_impl(); let tokens = model.sql_types.iter().map(|sql_type| { - let lifetimes = &lifetimes; - let ty_params = &ty_params; - let const_params = &const_params; + + let mut to_sql_generics = item.generics.clone(); + to_sql_generics.params.push(parse_quote!(__DB)); + to_sql_generics.make_where_clause().predicates.push(parse_quote!(__DB: diesel::backend::Backend)); + to_sql_generics.make_where_clause().predicates.push(parse_quote!(Self: ToSql<#sql_type, __DB>)); + let (to_sql_impl_generics, _, to_sql_where_clause) = to_sql_generics.split_for_impl(); let tokens = quote!( - impl<'expr, #(#lifetimes,)* #(#ty_params,)* #(#const_params,)*> AsExpression<#sql_type> - for &'expr #struct_ty + impl #impl_generics AsExpression<#sql_type> + for &'__expr #struct_ty #where_clause { type Expression = Bound<#sql_type, Self>; @@ -39,8 +50,8 @@ pub fn derive(item: DeriveInput) -> Result { } } - impl<'expr, #(#lifetimes,)* #(#ty_params,)* #(#const_params,)*> AsExpression> - for &'expr #struct_ty + impl #impl_generics AsExpression> + for &'__expr #struct_ty #where_clause { type Expression = Bound, Self>; @@ -49,8 +60,8 @@ pub fn derive(item: DeriveInput) -> Result { } } - impl<'expr2, 'expr, #(#lifetimes,)* #(#ty_params,)* #(#const_params,)*> AsExpression<#sql_type> - for &'expr2 &'expr #struct_ty + impl #impl_generics2 AsExpression<#sql_type> + for &'__expr2 &'__expr #struct_ty #where_clause2 { type Expression = Bound<#sql_type, Self>; @@ -59,8 +70,8 @@ pub fn derive(item: DeriveInput) -> Result { } } - impl<'expr2, 'expr, #(#lifetimes,)* #(#ty_params,)* #(#const_params,)*> AsExpression> - for &'expr2 &'expr #struct_ty + impl #impl_generics2 AsExpression> + for &'__expr2 &'__expr #struct_ty #where_clause2 { type Expression = Bound, Self>; @@ -69,11 +80,8 @@ pub fn derive(item: DeriveInput) -> Result { } } - impl<#(#lifetimes,)* #(#ty_params,)* __DB, #(#const_params,)*> diesel::serialize::ToSql, __DB> - for #struct_ty - where - __DB: diesel::backend::Backend, - Self: ToSql<#sql_type, __DB>, + impl #to_sql_impl_generics diesel::serialize::ToSql, __DB> + for #struct_ty #to_sql_where_clause { fn to_sql<'__b>(&'__b self, out: &mut Output<'__b, '_, __DB>) -> serialize::Result { @@ -88,7 +96,7 @@ pub fn derive(item: DeriveInput) -> Result { quote!( #tokens - impl #impl_generics AsExpression<#sql_type> for #struct_ty { + impl #impl_generics_plain AsExpression<#sql_type> for #struct_ty #where_clause_plain { type Expression = Bound<#sql_type, Self>; fn as_expression(self) -> Self::Expression { @@ -96,7 +104,7 @@ pub fn derive(item: DeriveInput) -> Result { } } - impl #impl_generics AsExpression> for #struct_ty { + impl #impl_generics_plain AsExpression> for #struct_ty #where_clause_plain { type Expression = Bound, Self>; fn as_expression(self) -> Self::Expression { diff --git a/diesel_derives/src/from_sql_row.rs b/diesel_derives/src/from_sql_row.rs index d9cce837b650..3753b41cb88c 100644 --- a/diesel_derives/src/from_sql_row.rs +++ b/diesel_derives/src/from_sql_row.rs @@ -12,10 +12,9 @@ pub fn derive(mut item: DeriveInput) -> Result { let struct_ty = ty_for_foreign_derive(&item, &model)?; { - let where_clause = item - .generics - .where_clause - .get_or_insert(parse_quote!(where)); + item.generics.params.push(parse_quote!(__DB)); + item.generics.params.push(parse_quote!(__ST)); + let where_clause = item.generics.make_where_clause(); where_clause .predicates .push(parse_quote!(__DB: diesel::backend::Backend)); @@ -26,17 +25,13 @@ pub fn derive(mut item: DeriveInput) -> Result { .predicates .push(parse_quote!(Self: FromSql<__ST, __DB>)); } - let (_, _, where_clause) = item.generics.split_for_impl(); - - let lifetimes = item.generics.lifetimes().collect::>(); - let ty_params = item.generics.type_params().collect::>(); - let const_params = item.generics.const_params().collect::>(); + let (impl_generics, _, where_clause) = item.generics.split_for_impl(); Ok(wrap_in_dummy_mod(quote! { use diesel::deserialize::{self, FromSql, Queryable}; // Need to put __ST and __DB after lifetimes but before const params - impl<#(#lifetimes,)* __ST, __DB, #(#ty_params,)* #(#const_params,)*> Queryable<__ST, __DB> for #struct_ty + impl #impl_generics Queryable<__ST, __DB> for #struct_ty #where_clause { type Row = Self; diff --git a/diesel_derives/src/lib.rs b/diesel_derives/src/lib.rs index c16532bec75f..aa80ab08895f 100644 --- a/diesel_derives/src/lib.rs +++ b/diesel_derives/src/lib.rs @@ -1634,8 +1634,99 @@ pub fn table_proc(input: TokenStream) -> TokenStream { /// * `diesel::sql_types::Timestamp` /// /// Support for additional types can be added by providing manual implementations of -/// `HasSqlType`, `FromSql` and `ToSql` for the corresponding type + the generated -/// database backend. +/// `HasSqlType`, `FromSql` and `ToSql` for the corresponding type, all databases included +/// in your enum, and the backend generated by this derive called `MultiBackend`. +/// For example to support a custom enum `MyEnum` with the custom SQL type `MyInteger`: +/// ``` +/// extern crate diesel; +/// use diesel::backend::Backend; +/// use diesel::deserialize::{self, FromSql, FromSqlRow}; +/// use diesel::serialize::{self, IsNull, ToSql}; +/// use diesel::AsExpression; +/// use diesel::sql_types::{HasSqlType, SqlType}; +/// use diesel::prelude::*; +/// +/// #[derive(diesel::MultiConnection)] +/// pub enum AnyConnection { +/// # #[cfg(feature = "postgres")] +/// Postgresql(diesel::PgConnection), +/// # #[cfg(feature = "mysql")] +/// Mysql(diesel::MysqlConnection), +/// # #[cfg(feature = "sqlite")] +/// Sqlite(diesel::SqliteConnection), +/// } +/// +/// // defining an custom SQL type is optional +/// // you can also use types from `diesel::sql_types` +/// #[derive(Copy, Clone, Debug, SqlType)] +/// #[diesel(postgres_type(name = "Int4"))] +/// #[diesel(mysql_type(name = "Long"))] +/// #[diesel(sqlite_type(name = "Integer"))] +/// struct MyInteger; +/// +/// +/// // our custom enum +/// #[repr(i32)] +/// #[derive(Debug, Clone, Copy, AsExpression, FromSqlRow)] +/// #[diesel(sql_type = MyInteger)] +/// pub enum MyEnum { +/// A = 1, +/// B = 2, +/// } +/// +/// // The `MultiBackend` type is generated by `#[derive(diesel::MultiConnection)]` +/// // This part is only required if you define a custom sql type +/// impl HasSqlType for MultiBackend { +/// fn metadata(lookup: &mut Self::MetadataLookup) -> Self::TypeMetadata { +/// // The `lookup_sql_type` function is exposed by the `MultiBackend` type +/// MultiBackend::lookup_sql_type::(lookup) +/// } +/// } +/// +/// impl FromSql for MyEnum { +/// fn from_sql(bytes: ::RawValue<'_>) -> deserialize::Result { +/// // The `from_sql` function is exposed by the `RawValue` type of the +/// // `MultiBackend` type +/// // This requires a `FromSql` impl for each backend +/// bytes.from_sql::() +/// } +/// } +/// +/// impl ToSql for MyEnum { +/// fn to_sql<'b>(&'b self, out: &mut serialize::Output<'b, '_, MultiBackend>) -> serialize::Result { +/// /// `set_value` expects a tuple consisting of the target SQL type +/// /// and self for `MultiBackend` +/// /// This requires a `ToSql` impl for each backend +/// out.set_value((MyInteger, self)); +/// Ok(IsNull::No) +/// } +/// } +/// # #[cfg(feature = "postgres")] +/// # impl ToSql for MyEnum { +/// # fn to_sql<'b>(&'b self, out: &mut serialize::Output<'b, '_, diesel::pg::Pg>) -> serialize::Result { todo!() } +/// # } +/// # #[cfg(feature = "mysql")] +/// # impl ToSql for MyEnum { +/// # fn to_sql<'b>(&'b self, out: &mut serialize::Output<'b, '_, diesel::mysql::Mysql>) -> serialize::Result { todo!() } +/// # } +/// # #[cfg(feature = "sqlite")] +/// # impl ToSql for MyEnum { +/// # fn to_sql<'b>(&'b self, out: &mut serialize::Output<'b, '_, diesel::sqlite::Sqlite>) -> serialize::Result { todo!() } +/// # } +/// # #[cfg(feature = "postgres")] +/// # impl FromSql for MyEnum { +/// # fn from_sql(bytes: ::RawValue<'_>) -> deserialize::Result { todo!() } +/// # } +/// # #[cfg(feature = "mysql")] +/// # impl FromSql for MyEnum { +/// # fn from_sql(bytes: ::RawValue<'_>) -> deserialize::Result { todo!() } +/// # } +/// # #[cfg(feature = "sqlite")] +/// # impl FromSql for MyEnum { +/// # fn from_sql(bytes: ::RawValue<'_>) -> deserialize::Result { todo!() } +/// # } +/// # fn main() {} +/// ``` #[proc_macro_derive(MultiConnection)] pub fn derive_multiconnection(input: TokenStream) -> TokenStream { multiconnection::derive(syn::parse_macro_input!(input)).into() diff --git a/diesel_derives/src/multiconnection.rs b/diesel_derives/src/multiconnection.rs index b4ba74214bda..cd2bde892e24 100644 --- a/diesel_derives/src/multiconnection.rs +++ b/diesel_derives/src/multiconnection.rs @@ -212,6 +212,13 @@ fn generate_connection_impl( } }); + let impl_begin_test_transaction = connection_types.iter().map(|c| { + let ident = c.name; + quote::quote! { + Self::#ident(conn) => conn.begin_test_transaction() + } + }); + let r2d2_impl = if cfg!(feature = "r2d2") { let impl_ping_r2d2 = connection_types.iter().map(|c| { let ident = c.name; @@ -295,6 +302,9 @@ fn generate_connection_impl( let mut query_builder = self.query_builder.duplicate(); self.inner.to_sql(&mut query_builder, &self.backend)?; pass.push_sql(&query_builder.finish()); + if !self.inner.is_safe_to_cache_prepared(&self.backend)? { + pass.unsafe_to_cache_prepared(); + } if let Some((outer_collector, lookup)) = pass.bind_collector() { C::handle_inner_pass(outer_collector, lookup, &self.backend, &self.inner)?; } @@ -356,6 +366,12 @@ fn generate_connection_impl( #(#instrumentation_impl,)* } } + + fn begin_test_transaction(&mut self) -> diesel::QueryResult<()> { + match self { + #(#impl_begin_test_transaction,)* + } + } } impl LoadConnection for MultiConnection @@ -757,6 +773,18 @@ fn generate_bind_collector(connection_types: &[ConnectionVariant]) -> TokenStrea } }); + let push_null_to_inner_collector = connection_types + .iter() + .map(|c| { + let ident = c.name; + quote::quote! { + (Self::#ident(ref mut bc), super::backend::MultiTypeMetadata{ #ident: Some(metadata), .. }) => { + bc.push_null_value(metadata)?; + } + } + }) + .collect::>(); + let push_bound_value_super_traits = connection_types .iter() .map(|c| { @@ -948,20 +976,14 @@ fn generate_bind_collector(connection_types: &[ConnectionVariant]) -> TokenStrea // set the `inner` field of `BindValue` to something for the `None` // case. Therefore we need to handle that explicitly here. // - // We just use a specific sql + rust type here to workaround - // the fact that rustc is not able to see that the underlying DBMS - // must support that sql + rust type combination. All tested DBMS - // (postgres, sqlite, mysql, oracle) seems to not care about the - // actual type here and coerce null values to the "right" type - // anyway - BindValue { - inner: Some(InnerBindValue { - value: InnerBindValueKind::Null, - push_bound_value_to_collector: &PushBoundValueToCollectorImpl { - p: std::marker::PhantomData::<(diesel::sql_types::Integer, i32)> - } - }) + let metadata = >::metadata(metadata_lookup); + match (self, metadata) { + #(#push_null_to_inner_collector)* + _ => { + unreachable!("We have matching metadata") + }, } + return Ok(()); } else { out.into_inner() } @@ -972,6 +994,14 @@ fn generate_bind_collector(connection_types: &[ConnectionVariant]) -> TokenStrea Ok(()) } + + fn push_null_value(&mut self, metadata: super::backend::MultiTypeMetadata) -> diesel::QueryResult<()> { + match (self, metadata) { + #(#push_null_to_inner_collector)* + _ => unreachable!("We have matching metadata"), + } + Ok(()) + } } #(#to_sql_impls)* @@ -1368,8 +1398,8 @@ fn generate_backend(connection_types: &[ConnectionVariant]) -> TokenStream { let type_metadata_variants = connection_types.iter().map(|c| { let ident = c.name; let ty = c.ty; - quote::quote!{ - #ident(<<#ty as diesel::Connection>::Backend as diesel::sql_types::TypeMetadata>::TypeMetadata) + quote::quote! { + pub(super) #ident: Option<<<#ty as diesel::Connection>::Backend as diesel::sql_types::TypeMetadata>::TypeMetadata> } }); @@ -1456,7 +1486,7 @@ fn generate_backend(connection_types: &[ConnectionVariant]) -> TokenStream { quote::quote!{ if let Some(lookup) = <#ty as diesel::internal::derives::multiconnection::MultiConnectionHelper>::from_any(lookup) { - return MultiTypeMetadata::#name(<<#ty as diesel::Connection>::Backend as diesel::sql_types::HasSqlType>::metadata(lookup)); + ret.#name = Some(<<#ty as diesel::Connection>::Backend as diesel::sql_types::HasSqlType>::metadata(lookup)); } } @@ -1480,8 +1510,9 @@ fn generate_backend(connection_types: &[ConnectionVariant]) -> TokenStream { pub fn lookup_sql_type(lookup: &mut dyn std::any::Any) -> MultiTypeMetadata where #(#lookup_sql_type_bounds,)* { + let mut ret = MultiTypeMetadata::default(); #(#lookup_impl)* - unreachable!() + ret } } @@ -1519,7 +1550,9 @@ fn generate_backend(connection_types: &[ConnectionVariant]) -> TokenStream { type BindCollector<'a> = super::bind_collector::MultiBindCollector<'a>; } - pub enum MultiTypeMetadata { + #[derive(Default)] + #[allow(non_snake_case)] + pub struct MultiTypeMetadata { #(#type_metadata_variants,)* } diff --git a/diesel_derives/src/parsers/belongs_to.rs b/diesel_derives/src/parsers/belongs_to.rs index 50e00957c2d2..653e30deecfa 100644 --- a/diesel_derives/src/parsers/belongs_to.rs +++ b/diesel_derives/src/parsers/belongs_to.rs @@ -6,7 +6,7 @@ use syn::{Ident, TypePath}; use crate::util::{parse_eq, unknown_attribute, BELONGS_TO_NOTE}; enum Attr { - ForeignKey(Ident, Ident), + ForeignKey(Ident), } impl Parse for Attr { @@ -15,7 +15,7 @@ impl Parse for Attr { let name_str = name.to_string(); match &*name_str { - "foreign_key" => Ok(Attr::ForeignKey(name, parse_eq(input, BELONGS_TO_NOTE)?)), + "foreign_key" => Ok(Attr::ForeignKey(parse_eq(input, BELONGS_TO_NOTE)?)), _ => Err(unknown_attribute(&name, &["foreign_key"])), } @@ -39,7 +39,7 @@ impl Parse for BelongsTo { for attr in Punctuated::::parse_terminated(input)? { match attr { - Attr::ForeignKey(_, value) => foreign_key = Some(value), + Attr::ForeignKey(value) => foreign_key = Some(value), } } diff --git a/diesel_derives/src/parsers/mysql_type.rs b/diesel_derives/src/parsers/mysql_type.rs index baa2dbe66e2f..0266d9598175 100644 --- a/diesel_derives/src/parsers/mysql_type.rs +++ b/diesel_derives/src/parsers/mysql_type.rs @@ -6,7 +6,7 @@ use syn::{Ident, LitStr}; use crate::util::{parse_eq, unknown_attribute, MYSQL_TYPE_NOTE}; enum Attr { - Name(Ident, LitStr), + Name(LitStr), } impl Parse for Attr { @@ -15,7 +15,7 @@ impl Parse for Attr { let name_str = name.to_string(); match &*name_str { - "name" => Ok(Attr::Name(name, parse_eq(input, MYSQL_TYPE_NOTE)?)), + "name" => Ok(Attr::Name(parse_eq(input, MYSQL_TYPE_NOTE)?)), _ => Err(unknown_attribute(&name, &["name"])), } @@ -32,7 +32,7 @@ impl Parse for MysqlType { for attr in Punctuated::::parse_terminated(input)? { match attr { - Attr::Name(_, value) => name = Some(value), + Attr::Name(value) => name = Some(value), } } diff --git a/diesel_derives/src/parsers/sqlite_type.rs b/diesel_derives/src/parsers/sqlite_type.rs index 063ed65f4734..a2b190c39874 100644 --- a/diesel_derives/src/parsers/sqlite_type.rs +++ b/diesel_derives/src/parsers/sqlite_type.rs @@ -6,7 +6,7 @@ use syn::{Ident, LitStr}; use crate::util::{parse_eq, unknown_attribute, SQLITE_TYPE_NOTE}; enum Attr { - Name(Ident, LitStr), + Name(LitStr), } impl Parse for Attr { @@ -15,7 +15,7 @@ impl Parse for Attr { let name_str = name.to_string(); match &*name_str { - "name" => Ok(Attr::Name(name, parse_eq(input, SQLITE_TYPE_NOTE)?)), + "name" => Ok(Attr::Name(parse_eq(input, SQLITE_TYPE_NOTE)?)), _ => Err(unknown_attribute(&name, &["name"])), } @@ -32,7 +32,7 @@ impl Parse for SqliteType { for attr in Punctuated::::parse_terminated(input)? { match attr { - Attr::Name(_, value) => name = Some(value), + Attr::Name(value) => name = Some(value), } } diff --git a/diesel_derives/src/table.rs b/diesel_derives/src/table.rs index ca2cfd9d8705..53545cb597c0 100644 --- a/diesel_derives/src/table.rs +++ b/diesel_derives/src/table.rs @@ -12,7 +12,7 @@ pub(crate) fn expand(input: TableDecl) -> TokenStream { more than 128 columns. Consider using less columns." } else if input.column_defs.len() > 64 { "Table contains more than 64 columns. Consider enabling the \ - `32-column-tables` feature to enable diesels support for \ + `128-column-tables` feature to enable diesels support for \ tables with more than 64 columns." } else if input.column_defs.len() > 32 { "Table contains more than 32 columns. Consider enabling the \ @@ -172,6 +172,36 @@ pub(crate) fn expand(input: TableDecl) -> TokenStream { { type Count = diesel::query_source::Once; } + + impl diesel::JoinTo> for table + where + diesel::query_builder::Tablesample: diesel::JoinTo
, + TSM: diesel::internal::table_macro::TablesampleMethod + { + type FromClause = diesel::query_builder::Tablesample; + type OnClause = as diesel::JoinTo
>::OnClause; + + fn join_target(__diesel_internal_rhs: diesel::query_builder::Tablesample) -> (Self::FromClause, Self::OnClause) { + let (_, __diesel_internal_on_clause) = diesel::query_builder::Tablesample::::join_target(table); + (__diesel_internal_rhs, __diesel_internal_on_clause) + } + } + + impl diesel::query_source::AppearsInFromClause> + for table + where + TSM: diesel::internal::table_macro::TablesampleMethod + { + type Count = diesel::query_source::Once; + } + + impl diesel::query_source::AppearsInFromClause
+ for diesel::query_builder::Tablesample + where + TSM: diesel::internal::table_macro::TablesampleMethod + { + type Count = diesel::query_source::Once; + } }) } else { None @@ -667,6 +697,16 @@ fn expand_column_def(column_def: &ColumnDef) -> TokenStream { type Count = diesel::query_source::Once; } impl diesel::SelectableExpression> for #column_name {} + + impl diesel::query_source::AppearsInFromClause> + for #column_name + where + TSM: diesel::internal::table_macro::TablesampleMethod + { + type Count = diesel::query_source::Once; + } + impl diesel::SelectableExpression> + for #column_name where TSM: diesel::internal::table_macro::TablesampleMethod {} }) } else { None diff --git a/diesel_derives/tests/as_expression.rs b/diesel_derives/tests/as_expression.rs index cf8f739979cf..e7ff6b7fff9e 100644 --- a/diesel_derives/tests/as_expression.rs +++ b/diesel_derives/tests/as_expression.rs @@ -2,9 +2,9 @@ use diesel::backend::Backend; use diesel::deserialize::{FromSql, FromSqlRow}; use diesel::expression::AsExpression; use diesel::serialize::{Output, ToSql}; +use diesel::sql_types::Binary; use diesel::sql_types::Text; use diesel::*; -use std::convert::TryInto; use crate::helpers::connection; @@ -58,3 +58,9 @@ fn struct_with_sql_type() { .get_result(conn); assert!(data.is_err()); } + +// check that defaulted type parameters compile correctly +// This is a regression test for https://github.com/diesel-rs/diesel/issues/3902 +#[derive(AsExpression, FromSqlRow)] +#[diesel(sql_type = Binary)] +pub struct Ewkb = Vec>(pub B); diff --git a/diesel_migrations/src/errors.rs b/diesel_migrations/src/errors.rs index b7239c8e6357..1fa12fbe974b 100644 --- a/diesel_migrations/src/errors.rs +++ b/diesel_migrations/src/errors.rs @@ -2,7 +2,6 @@ //! These are split into multiple segments, depending on //! where in the migration process an error occurs. -use std::convert::From; use std::error::Error; use std::path::PathBuf; use std::{fmt, io}; diff --git a/diesel_tests/Cargo.toml b/diesel_tests/Cargo.toml index 5fdd4a5f35dd..3ce7cf50c300 100644 --- a/diesel_tests/Cargo.toml +++ b/diesel_tests/Cargo.toml @@ -21,7 +21,9 @@ ipnet = { version = "2.5.0" } ipnetwork = ">=0.12.2, <0.21.0" bigdecimal = ">= 0.0.13, < 0.5.0" rand = "0.8.4" -libsqlite3-sys = { version = "0.27", optional = true } +libsqlite3-sys = { version = "0.28", optional = true } +pq-sys = { version = "0.5", optional = true } +pq-src = { version = "0.1.1", optional = true } [features] default = [] diff --git a/diesel_tests/tests/alias.rs b/diesel_tests/tests/alias.rs index 2ad0792323c8..2f933e588cce 100644 --- a/diesel_tests/tests/alias.rs +++ b/diesel_tests/tests/alias.rs @@ -20,6 +20,7 @@ fn selecting_basic_data() { user_alias.field(users::name), user_alias.field(users::hair_color), )) + .order(user_alias.field(users::name)) .load(connection) .unwrap(); diff --git a/diesel_tests/tests/boxed_queries.rs b/diesel_tests/tests/boxed_queries.rs index 1caf399dc48e..bcdce09a493c 100644 --- a/diesel_tests/tests/boxed_queries.rs +++ b/diesel_tests/tests/boxed_queries.rs @@ -37,7 +37,7 @@ fn boxed_queries_can_differ_conditionally() { } let source = |query| match query { - Query::All => users::table.into_boxed(), + Query::All => users::table.order(users::name.desc()).into_boxed(), Query::Ordered => users::table.order(users::name.desc()).into_boxed(), Query::One => users::table .filter(users::name.ne("jim")) @@ -51,7 +51,7 @@ fn boxed_queries_can_differ_conditionally() { let jim = find_user_by_name("Jim", connection); let all = source(Query::All).load(connection); - let expected_data = vec![sean.clone(), tess.clone(), jim.clone()]; + let expected_data = vec![tess.clone(), sean.clone(), jim.clone()]; assert_eq!(Ok(expected_data), all); let ordered = source(Query::Ordered).load(connection); @@ -69,6 +69,7 @@ fn boxed_queries_implement_select_dsl() { let data = users::table .into_boxed() .select(users::name) + .order(users::name) .load::(connection); assert_eq!(Ok(vec!["Sean".into(), "Tess".into()]), data); } @@ -92,7 +93,11 @@ fn boxed_queries_implement_filter_dsl() { #[test] fn boxed_queries_implement_limit_dsl() { let connection = &mut connection_with_sean_and_tess_in_users_table(); - let data = users::table.into_boxed().limit(1).load(connection); + let data = users::table + .into_boxed() + .limit(1) + .order(users::id) + .load(connection); let expected_data = vec![find_user_by_name("Sean", connection)]; assert_eq!(Ok(expected_data), data); } @@ -104,6 +109,7 @@ fn boxed_queries_implement_offset_dsl() { .into_boxed() .limit(1) .offset(1) + .order(users::id) .load(connection); let expected_data = vec![find_user_by_name("Tess", connection)]; assert_eq!(Ok(expected_data), data); @@ -154,6 +160,7 @@ fn boxed_queries_implement_or_filter() { .into_boxed() .filter(users::name.eq("Sean")) .or_filter(users::name.eq("Tess")) + .order(users::name) .load(connection); let expected = vec![ find_user_by_name("Sean", connection), diff --git a/diesel_tests/tests/combination.rs b/diesel_tests/tests/combination.rs index ea6a05e68cdf..368d9810403d 100644 --- a/diesel_tests/tests/combination.rs +++ b/diesel_tests/tests/combination.rs @@ -13,7 +13,7 @@ fn union() { NewUser::new("Jim", None), ]; insert_into(users).values(&data).execute(conn).unwrap(); - let data = users.load::(conn).unwrap(); + let data = users.order(id).load::(conn).unwrap(); let sean = &data[0]; let tess = &data[1]; let jim = &data[2]; @@ -43,7 +43,7 @@ fn union_all() { NewUser::new("Jim", None), ]; insert_into(users).values(&data).execute(conn).unwrap(); - let data = users.load::(conn).unwrap(); + let data = users.order(id).load::(conn).unwrap(); let sean = &data[0]; let tess = &data[1]; let jim = &data[2]; @@ -75,10 +75,10 @@ fn intersect() { NewUser::new("Jim", None), ]; insert_into(users).values(&data).execute(conn).unwrap(); - let data = users.load::(conn).unwrap(); - let _sean = &data[0]; - let tess = &data[1]; - let _jim = &data[2]; + let data = users.order(name).load::(conn).unwrap(); + let _sean = &data[1]; + let tess = &data[2]; + let _jim = &data[0]; let expected_data = vec![User::new(tess.id, "Tess")]; let data: Vec<_> = users @@ -171,6 +171,7 @@ fn as_subquery_for_eq_in() { let out = posts::table .filter(posts::user_id.eq_any(subquery)) .select(posts::title) + .order_by(posts::title) .load::(conn) .unwrap(); diff --git a/diesel_tests/tests/deserialization.rs b/diesel_tests/tests/deserialization.rs index ed3af90cb939..53e7b8bb6c9b 100644 --- a/diesel_tests/tests/deserialization.rs +++ b/diesel_tests/tests/deserialization.rs @@ -22,11 +22,17 @@ fn generated_queryable_allows_lifetimes() { }; assert_eq!( Ok(expected_user), - users.select((id, name)).first(connection) + users.select((id, name)).order(id).first(connection) ); assert_eq!( - users.select((id, name)).first::>(connection), - users.select(CowUser::as_select()).first(connection) + users + .select((id, name)) + .order(id) + .first::>(connection), + users + .select(CowUser::as_select()) + .order(id) + .first(connection) ); } diff --git a/diesel_tests/tests/distinct.rs b/diesel_tests/tests/distinct.rs index 4690918a3702..abf240dab462 100644 --- a/diesel_tests/tests/distinct.rs +++ b/diesel_tests/tests/distinct.rs @@ -178,7 +178,7 @@ fn distinct_of_multiple_columns() { .execute(&mut connection) .unwrap(); let posts = posts::table - .order(posts::id) + .order(posts::title) .load::(&mut connection) .unwrap(); diff --git a/diesel_tests/tests/expressions/mod.rs b/diesel_tests/tests/expressions/mod.rs index 893bfeb77162..3f074c123e2d 100644 --- a/diesel_tests/tests/expressions/mod.rs +++ b/diesel_tests/tests/expressions/mod.rs @@ -238,6 +238,7 @@ fn function_with_multiple_arguments() { let expected_data = vec!["black".to_string(), "Tess".to_string()]; let data = users .select(coalesce(hair_color, name)) + .order(id) .load::(connection); assert_eq!(Ok(expected_data), data); diff --git a/diesel_tests/tests/expressions/ops.rs b/diesel_tests/tests/expressions/ops.rs index 3a82564c6826..0510aa233c43 100644 --- a/diesel_tests/tests/expressions/ops.rs +++ b/diesel_tests/tests/expressions/ops.rs @@ -8,11 +8,11 @@ fn adding_literal_to_column() { let connection = &mut connection_with_sean_and_tess_in_users_table(); let expected_data = vec![2, 3]; - let data = users.select(id + 1).load(connection); + let data = users.select(id + 1).order(id).load(connection); assert_eq!(Ok(expected_data), data); let expected_data = vec![3, 4]; - let data = users.select(id + 2).load(connection); + let data = users.select(id + 2).order(id).load(connection); assert_eq!(Ok(expected_data), data); } @@ -36,7 +36,7 @@ fn adding_column_to_column() { let connection = &mut connection_with_sean_and_tess_in_users_table(); let expected_data = vec![2, 4]; - let data = users.select(id + id).load(connection); + let data = users.select(id + id).order(id).load(connection); assert_eq!(Ok(expected_data), data); } @@ -47,7 +47,7 @@ fn adding_multiple_times() { let connection = &mut connection_with_sean_and_tess_in_users_table(); let expected_data = vec![4, 5]; - let data = users.select(id + 1 + 2).load(connection); + let data = users.select(id + 1 + 2).order(id).load(connection); assert_eq!(Ok(expected_data), data); } @@ -58,7 +58,7 @@ fn subtracting_literal_from_column() { let connection = &mut connection_with_sean_and_tess_in_users_table(); let expected_data = vec![0, 1]; - let data = users.select(id - 1).load(connection); + let data = users.select(id - 1).order(id).load(connection); assert_eq!(Ok(expected_data), data); } @@ -69,7 +69,7 @@ fn adding_then_subtracting() { let connection = &mut connection_with_sean_and_tess_in_users_table(); let expected_data = vec![2, 3]; - let data = users.select(id + 2 - 1).load(connection); + let data = users.select(id + 2 - 1).order(id).load(connection); assert_eq!(Ok(expected_data), data); } @@ -80,7 +80,7 @@ fn multiplying_column() { let connection = &mut connection_with_sean_and_tess_in_users_table(); let expected_data = vec![3, 6]; - let data = users.select(id * 3).load(connection); + let data = users.select(id * 3).order(id).load(connection); assert_eq!(Ok(expected_data), data); } @@ -91,7 +91,7 @@ fn dividing_column() { let connection = &mut connection_with_sean_and_tess_in_users_table(); let expected_data = vec![0, 1]; - let data = users.select(id / 2).load(connection); + let data = users.select(id / 2).order(id).load(connection); assert_eq!(Ok(expected_data), data); } @@ -192,7 +192,7 @@ fn mix_and_match_all_numeric_ops() { .unwrap(); let expected_data = vec![4, 6, 7, 9]; - let data = users.select(id * 3 / 2 + 4 - 1).load(connection); + let data = users.select(id * 3 / 2 + 4 - 1).order(id).load(connection); assert_eq!(Ok(expected_data), data); } diff --git a/diesel_tests/tests/insert_from_select.rs b/diesel_tests/tests/insert_from_select.rs index fe2e80aee9a8..edf14da6fdac 100644 --- a/diesel_tests/tests/insert_from_select.rs +++ b/diesel_tests/tests/insert_from_select.rs @@ -11,7 +11,10 @@ fn insert_from_table() { .execute(conn) .unwrap(); - let data = posts.select((user_id, title, body)).load(conn); + let data = posts + .select((user_id, title, body)) + .order(user_id) + .load(conn); let expected = vec![ (1, String::from("Sean"), None::), (2, String::from("Tess"), None), @@ -29,7 +32,10 @@ fn insert_from_table_reference() { .execute(conn) .unwrap(); - let data = posts.select((user_id, title, body)).load(conn); + let data = posts + .select((user_id, title, body)) + .order(user_id) + .load(conn); let expected = vec![ (1, String::from("Sean"), None::), (2, String::from("Tess"), None), @@ -50,7 +56,11 @@ fn insert_from_select() { .execute(conn) .unwrap(); - let data = posts.select(title).load::(conn).unwrap(); + let data = posts + .select(title) + .order(title) + .load::(conn) + .unwrap(); let expected = vec!["Sean says hi", "Tess says hi"]; assert_eq!(expected, data); } @@ -68,7 +78,11 @@ fn insert_from_select_reference() { .execute(conn) .unwrap(); - let data = posts.select(title).load::(conn).unwrap(); + let data = posts + .select(title) + .order(title) + .load::(conn) + .unwrap(); let expected = vec!["Sean says hi", "Tess says hi"]; assert_eq!(expected, data); } @@ -87,7 +101,11 @@ fn insert_from_boxed() { .execute(conn) .unwrap(); - let data = posts.select(title).load::(conn).unwrap(); + let data = posts + .select(title) + .order(title) + .load::(conn) + .unwrap(); let expected = vec!["Sean says hi", "Tess says hi"]; assert_eq!(expected, data); } @@ -105,7 +123,11 @@ fn insert_from_boxed_reference() { .execute(conn) .unwrap(); - let data = posts.select(title).load::(conn).unwrap(); + let data = posts + .select(title) + .order(title) + .load::(conn) + .unwrap(); let expected = vec!["Sean says hi", "Tess says hi"]; assert_eq!(expected, data); } @@ -255,7 +277,11 @@ fn on_conflict_do_nothing_with_select() { assert_eq!(0, inserted_rows); } - let data = posts.select(title).load::(conn).unwrap(); + let data = posts + .select(title) + .order(title) + .load::(conn) + .unwrap(); let expected = vec!["Sean says hi", "Tess says hi"]; assert_eq!(expected, data); } @@ -369,6 +395,7 @@ fn on_conflict_do_update_with_boxed_select() { users .select((id, name.concat(" says hi"))) + .order(id) .into_boxed() .insert_into(posts) .into_columns((user_id, title)) @@ -385,6 +412,7 @@ fn on_conflict_do_update_with_boxed_select() { users .select((id, name.concat(" says hi"))) + .order(id) .into_boxed() .insert_into(posts) .into_columns((user_id, title)) diff --git a/diesel_tests/tests/joins.rs b/diesel_tests/tests/joins.rs index 91e939c165a5..47bf6d38551c 100644 --- a/diesel_tests/tests/joins.rs +++ b/diesel_tests/tests/joins.rs @@ -20,7 +20,7 @@ fn belongs_to() { let tess_post = Post::new(2, 2, "World", None); let expected_data = vec![(seans_post, sean), (tess_post, tess)]; - let source = posts::table.inner_join(users::table); + let source = posts::table.inner_join(users::table).order(posts::id); let actual_data: Vec<_> = source.load(connection).unwrap(); assert_eq!(expected_data, actual_data); @@ -40,8 +40,8 @@ fn select_single_from_join() { .unwrap(); let source = posts::table.inner_join(users::table); - let select_name = source.select(users::name); - let select_title = source.select(posts::title); + let select_name = source.select(users::name).order(users::name); + let select_title = source.select(posts::title).order(posts::title); let expected_names = vec!["Sean".to_string(), "Tess".to_string()]; let actual_names: Vec = select_name.load(connection).unwrap(); @@ -75,7 +75,7 @@ fn select_multiple_from_join() { ("Sean".to_string(), "Hello".to_string()), ("Tess".to_string(), "World".to_string()), ]; - let actual_data: Vec<_> = source.load(connection).unwrap(); + let actual_data: Vec<_> = source.order(users::name).load(connection).unwrap(); assert_eq!(expected_data, actual_data); } @@ -102,7 +102,7 @@ fn join_boxed_query() { ("Sean".to_string(), "Hello".to_string()), ("Tess".to_string(), "World".to_string()), ]; - let actual_data: Vec<_> = source.load(connection).unwrap(); + let actual_data: Vec<_> = source.order(users::name).load(connection).unwrap(); assert_eq!(expected_data, actual_data); } diff --git a/diesel_tests/tests/order.rs b/diesel_tests/tests/order.rs index 9addd2641540..7c5c76531900 100644 --- a/diesel_tests/tests/order.rs +++ b/diesel_tests/tests/order.rs @@ -12,10 +12,10 @@ fn order_by_column() { NewUser::new("Jim", None), ]; insert_into(users).values(&data).execute(conn).unwrap(); - let data = users.load::(conn).unwrap(); - let sean = &data[0]; - let tess = &data[1]; - let jim = &data[2]; + let data = users.order(name).load::(conn).unwrap(); + let sean = &data[1]; + let tess = &data[2]; + let jim = &data[0]; let expected_data = vec![ User::new(jim.id, "Jim"), diff --git a/diesel_tests/tests/select.rs b/diesel_tests/tests/select.rs index 4a8ed3b44664..300bdd5af79c 100644 --- a/diesel_tests/tests/select.rs +++ b/diesel_tests/tests/select.rs @@ -29,7 +29,11 @@ fn selecting_a_struct() { .unwrap(); let expected_users = vec![NewUser::new("Sean", None), NewUser::new("Tess", None)]; - let actual_users: Vec<_> = users.select((name, hair_color)).load(connection).unwrap(); + let actual_users: Vec<_> = users + .select((name, hair_color)) + .order(name) + .load(connection) + .unwrap(); assert_eq!(expected_users, actual_users); } @@ -42,7 +46,7 @@ fn with_safe_select() { .execute(connection) .unwrap(); - let select_name = users.select(name); + let select_name = users.select(name).order(name); let names: Vec = select_name.load(connection).unwrap(); assert_eq!(vec!["Sean".to_string(), "Tess".to_string()], names); @@ -93,7 +97,7 @@ fn selecting_expression_with_bind_param() { .execute(connection) .unwrap(); - let source = users.select(name.eq("Sean".to_string())); + let source = users.select(name.eq("Sean".to_string())).order(id); let expected_data = vec![true, false]; let actual_data = source.load::(connection).unwrap(); @@ -165,7 +169,11 @@ fn selection_using_subselect() { use crate::schema::posts::dsl::*; let connection = &mut connection_with_sean_and_tess_in_users_table(); - let ids: Vec = users::table.select(users::id).load(connection).unwrap(); + let ids: Vec = users::table + .select(users::id) + .order(users::id) + .load(connection) + .unwrap(); let query = format!( "INSERT INTO posts (user_id, title) VALUES ({}, 'Hello'), ({}, 'World')", ids[0], ids[1] diff --git a/diesel_tests/tests/select_by.rs b/diesel_tests/tests/select_by.rs index be6e8d9f9c11..5b6d16f80620 100644 --- a/diesel_tests/tests/select_by.rs +++ b/diesel_tests/tests/select_by.rs @@ -19,7 +19,11 @@ fn selecting_basic_data() { hair_color: None, }, ]; - let actual_data: Vec<_> = users.select(User::as_select()).load(connection).unwrap(); + let actual_data: Vec<_> = users + .select(User::as_select()) + .order(id) + .load(connection) + .unwrap(); assert_eq!(expected_data, actual_data); } @@ -109,7 +113,11 @@ fn selecting_columns_with_different_definition_order() { #[test] fn selection_using_subselect() { let connection = &mut connection_with_sean_and_tess_in_users_table(); - let ids: Vec = users::table.select(users::id).load(connection).unwrap(); + let ids: Vec = users::table + .select(users::id) + .order(users::id) + .load(connection) + .unwrap(); let query = format!( "INSERT INTO posts (user_id, title) VALUES ({}, 'Hello'), ({}, 'World')", ids[0], ids[1] @@ -213,6 +221,7 @@ fn mixed_selectable_and_plain_select() { ]; let actual_data: Vec<_> = users .select((User::as_select(), name)) + .order(id) .load(connection) .unwrap(); assert_eq!(expected_data, actual_data); diff --git a/diesel_tests/tests/types_roundtrip.rs b/diesel_tests/tests/types_roundtrip.rs index 26c7e29791b5..895886eb6f6f 100644 --- a/diesel_tests/tests/types_roundtrip.rs +++ b/diesel_tests/tests/types_roundtrip.rs @@ -465,7 +465,7 @@ mod mysql_types { t.hour(), t.minute(), t.second(), - t.timestamp_subsec_micros() as _, + t.and_utc().timestamp_subsec_micros() as _, false, MysqlTimestampType::MYSQL_TIMESTAMP_DATETIME, 0, @@ -481,7 +481,7 @@ mod mysql_types { t.hour(), t.minute(), t.second(), - t.timestamp_subsec_micros() as _, + t.and_utc().timestamp_subsec_micros() as _, false, MysqlTimestampType::MYSQL_TIMESTAMP_DATETIME, 0, @@ -508,10 +508,12 @@ mod mysql_types { .unwrap(); if seconds != 0 { - seconds = earliest_mysql_date.timestamp() - + ((latest_mysql_date.timestamp() - earliest_mysql_date.timestamp()) % seconds); + seconds = earliest_mysql_date.and_utc().timestamp() + + ((latest_mysql_date.and_utc().timestamp() + - earliest_mysql_date.and_utc().timestamp()) + % seconds); } else { - seconds = earliest_mysql_date.timestamp(); + seconds = earliest_mysql_date.and_utc().timestamp(); } let r = mk_naive_datetime((seconds, nanos)); @@ -607,7 +609,7 @@ pub fn mk_naive_datetime((mut secs, mut nano): (i64, u32)) -> NaiveDateTime { break; } - NaiveDateTime::from_timestamp_opt(secs, nano).unwrap() + DateTime::from_timestamp(secs, nano).unwrap().naive_utc() } pub fn mk_naive_time((mut seconds, mut nano): (u32, u32)) -> NaiveTime { @@ -647,7 +649,7 @@ pub fn mk_naive_date(days: u32) -> NaiveDate { let num_days_representable = latest_chrono_date .signed_duration_since(earliest_pg_date) .num_days(); - earliest_pg_date + Duration::days(days as i64 % num_days_representable) + earliest_pg_date + Duration::try_days(days as i64 % num_days_representable).unwrap() } #[cfg(feature = "mysql")] @@ -657,7 +659,7 @@ pub fn mk_naive_date(days: u32) -> NaiveDate { let num_days_representable = latest_mysql_date .signed_duration_since(earliest_mysql_date) .num_days(); - earliest_mysql_date + Duration::days(days as i64 % num_days_representable) + earliest_mysql_date + Duration::try_days(days as i64 % num_days_representable).unwrap() } #[cfg(feature = "sqlite")] @@ -667,7 +669,7 @@ pub fn mk_naive_date(days: u32) -> NaiveDate { let num_days_representable = latest_sqlite_date .signed_duration_since(earliest_sqlite_date) .num_days(); - earliest_sqlite_date + Duration::days(days as i64 % num_days_representable) + earliest_sqlite_date + Duration::try_days(days as i64 % num_days_representable).unwrap() } #[derive(Debug, Clone, Copy)] diff --git a/diesel_tests/tests/update.rs b/diesel_tests/tests/update.rs index 238eb10f50a0..d4ede4cf8a60 100644 --- a/diesel_tests/tests/update.rs +++ b/diesel_tests/tests/update.rs @@ -1,6 +1,9 @@ use crate::schema::*; use diesel::*; +#[cfg(feature = "postgres")] +static USER_INDEX_LOCK: std::sync::Mutex<()> = std::sync::Mutex::new(()); + #[test] fn test_updating_single_column() { use crate::schema::users::dsl::*; @@ -328,6 +331,9 @@ fn upsert_with_sql_literal_for_target() { use diesel::sql_types::Text; use diesel::upsert::*; + // cannot run these tests in parallel due to index creation + let _guard = USER_INDEX_LOCK.lock(); + let connection = &mut connection(); // This index needs to happen before the insert or we'll get a deadlock // with any transactions that are trying to get the row lock from insert @@ -366,6 +372,9 @@ fn upsert_with_sql_literal_for_target_with_condition() { use diesel::sql_types::Text; use diesel::upsert::*; + // cannot run these tests in parallel due to index creation + let _guard = USER_INDEX_LOCK.lock(); + let connection = &mut connection(); // This index needs to happen before the insert or we'll get a deadlock // with any transactions that are trying to get the row lock from insert diff --git a/dsl_auto_type/src/auto_type/expression_type_inference.rs b/dsl_auto_type/src/auto_type/expression_type_inference.rs index 5e84406ea13d..840f09fe0282 100644 --- a/dsl_auto_type/src/auto_type/expression_type_inference.rs +++ b/dsl_auto_type/src/auto_type/expression_type_inference.rs @@ -82,6 +82,9 @@ impl TypeInferrer<'_> { expr, type_hint.filter(|h| !matches!(h, syn::Type::Infer(_))), ) { + (syn::Expr::Group(syn::ExprGroup { expr, .. }), type_hint) => { + return self.try_infer_expression_type(expr, type_hint) + } ( syn::Expr::Tuple(syn::ExprTuple { elems: expr_elems, .. diff --git a/examples/postgres/composite_types/Cargo.toml b/examples/postgres/composite_types/Cargo.toml index 67b167d52732..e667388582cd 100644 --- a/examples/postgres/composite_types/Cargo.toml +++ b/examples/postgres/composite_types/Cargo.toml @@ -6,5 +6,5 @@ edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -diesel = { version = "2.1", features = ["postgres" ] } +diesel = { version = "2.1", path = "../../../diesel", features = ["postgres" ] } dotenvy = "0.15" diff --git a/examples/sqlite/all_about_inserts/Cargo.toml b/examples/sqlite/all_about_inserts/Cargo.toml index 80f6e615d21a..00ff023eb31d 100644 --- a/examples/sqlite/all_about_inserts/Cargo.toml +++ b/examples/sqlite/all_about_inserts/Cargo.toml @@ -9,7 +9,7 @@ diesel = { version = "2.1.0", path = "../../../diesel", features = ["sqlite", "c serde = { version = "1.0.130", features = ["derive"] } serde_json = "1.0.68" chrono = { version = "0.4.20", default-features = false, features = ["clock", "std"] } -libsqlite3-sys = { version = "0.27.0", features = ["bundled"] } +libsqlite3-sys = { version = "0.28.0", features = ["bundled"] } [lib] doc = false diff --git a/examples/sqlite/getting_started_step_1/Cargo.toml b/examples/sqlite/getting_started_step_1/Cargo.toml index a53a790cfa1c..a72fddd5c3b2 100644 --- a/examples/sqlite/getting_started_step_1/Cargo.toml +++ b/examples/sqlite/getting_started_step_1/Cargo.toml @@ -9,7 +9,7 @@ publish = false [dependencies] diesel = { version = "2.1.0", path = "../../../diesel", features = ["sqlite"] } dotenvy = "0.15" -libsqlite3-sys = { version = "0.27.0", features = ["bundled"] } +libsqlite3-sys = { version = "0.28.0", features = ["bundled"] } [[bin]] name = "show_posts" diff --git a/examples/sqlite/getting_started_step_2/Cargo.toml b/examples/sqlite/getting_started_step_2/Cargo.toml index ad212b010c17..9d645210109a 100644 --- a/examples/sqlite/getting_started_step_2/Cargo.toml +++ b/examples/sqlite/getting_started_step_2/Cargo.toml @@ -8,7 +8,7 @@ publish = false [dependencies] diesel = { version = "2.1.0", path = "../../../diesel", features = ["sqlite", "returning_clauses_for_sqlite_3_35"] } dotenvy = "0.15" -libsqlite3-sys = { version = "0.27.0", features = ["bundled"] } +libsqlite3-sys = { version = "0.28.0", features = ["bundled"] } [[bin]] name = "show_posts" diff --git a/examples/sqlite/getting_started_step_3/Cargo.toml b/examples/sqlite/getting_started_step_3/Cargo.toml index 1c975637442c..36a106ea72b8 100644 --- a/examples/sqlite/getting_started_step_3/Cargo.toml +++ b/examples/sqlite/getting_started_step_3/Cargo.toml @@ -8,7 +8,7 @@ publish = false [dependencies] diesel = { version = "2.1.0", path = "../../../diesel", features = ["sqlite", "returning_clauses_for_sqlite_3_35"] } dotenvy = "0.15" -libsqlite3-sys = { version = "0.27.0", features = ["bundled"] } +libsqlite3-sys = { version = "0.28.0", features = ["bundled"] } [[bin]] name = "show_posts" diff --git a/rust-toolchain b/rust-toolchain index 7c7053aa2388..32a6ce3c719b 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1 +1 @@ -1.75.0 +1.76.0