Skip to content

Commit

Permalink
Merge branch 'master' into std-except-parseDateTIme
Browse files Browse the repository at this point in the history
  • Loading branch information
liuneng1994 authored Apr 15, 2024
2 parents 1c1054b + e3c09e9 commit 3d87a88
Show file tree
Hide file tree
Showing 187 changed files with 2,255 additions and 1,412 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/master.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,10 @@ jobs:
clear-repository: true # to ensure correct digests
fetch-depth: 0 # to get version
filter: tree:0
- name: Check sync PR
- name: Merge sync PR
run: |
cd "$GITHUB_WORKSPACE/tests/ci"
python3 sync_pr.py || :
python3 sync_pr.py --merge || :
- name: Python unit tests
run: |
cd "$GITHUB_WORKSPACE/tests/ci"
Expand Down
15 changes: 12 additions & 3 deletions .github/workflows/pull_request.yml
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ jobs:
${{secrets.ROBOT_CLICKHOUSE_SSH_KEY}}
RCSK
FastTest:
needs: [RunConfig, StyleCheck]
needs: [RunConfig, BuildDockers]
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'Fast test') }}
uses: ./.github/workflows/reusable_test.yml
with:
Expand Down Expand Up @@ -157,16 +157,25 @@ jobs:
################################# Stage Final #################################
#
FinishCheck:
if: ${{ !failure() && !cancelled() && github.event_name != 'merge_group' }}
needs: [Tests_1, Tests_2]
if: ${{ !failure() && !cancelled() }}
needs: [Tests_1, Tests_2, Builds_1_Report, Builds_2_Report]
runs-on: [self-hosted, style-checker]
steps:
- name: Check out repository code
uses: ClickHouse/checkout@v1
- name: Check sync status
if: ${{ github.event_name == 'merge_group' }}
run: |
cd "$GITHUB_WORKSPACE/tests/ci"
python3 sync_pr.py --status
- name: Finish label
run: |
cd "$GITHUB_WORKSPACE/tests/ci"
python3 finish_check.py
- name: Auto merge if approved
if: ${{ github.event_name != 'merge_group' }}
run: |
cd "$GITHUB_WORKSPACE/tests/ci"
python3 merge_pr.py --check-approved
Expand Down
6 changes: 3 additions & 3 deletions .gitmodules
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,6 @@
[submodule "contrib/capnproto"]
path = contrib/capnproto
url = https://github.com/ClickHouse/capnproto
[submodule "contrib/double-conversion"]
path = contrib/double-conversion
url = https://github.com/google/double-conversion
[submodule "contrib/re2"]
path = contrib/re2
url = https://github.com/google/re2
Expand Down Expand Up @@ -369,6 +366,9 @@
[submodule "contrib/idna"]
path = contrib/idna
url = https://github.com/ada-url/idna.git
[submodule "contrib/double-conversion"]
path = contrib/double-conversion
url = https://github.com/ClickHouse/double-conversion.git
[submodule "contrib/expected"]
path = contrib/expected
url = https://github.com/TartanLlama/expected
2 changes: 1 addition & 1 deletion contrib/double-conversion
Submodule double-conversion updated 48 files
+71 −0 .github/workflows/ci.yml
+26 −0 .github/workflows/scons.yml
+22 −0 .gitignore
+2 −0 AUTHORS
+9 −3 BUILD
+30 −11 CMakeLists.txt
+116 −0 Changelog
+1 −1 Makefile
+4 −3 README.md
+3 −3 SConstruct
+1 −0 double-conversion/.gitignore
+2 −2 double-conversion/SConscript
+26 −26 double-conversion/bignum-dtoa.cc
+1 −1 double-conversion/bignum-dtoa.h
+308 −278 double-conversion/bignum.cc
+39 −31 double-conversion/bignum.h
+104 −104 double-conversion/cached-powers.cc
+14 −14 double-conversion/cached-powers.h
+0 −57 double-conversion/diy-fp.cc
+40 −21 double-conversion/diy-fp.h
+2 −514 double-conversion/double-conversion.h
+447 −0 double-conversion/double-to-string.cc
+470 −0 double-conversion/double-to-string.h
+33 −33 double-conversion/fast-dtoa.cc
+1 −1 double-conversion/fast-dtoa.h
+14 −14 double-conversion/fixed-dtoa.cc
+1 −1 double-conversion/fixed-dtoa.h
+65 −20 double-conversion/ieee.h
+251 −451 double-conversion/string-to-double.cc
+238 −0 double-conversion/string-to-double.h
+116 −63 double-conversion/strtod.cc
+20 −1 double-conversion/strtod.h
+142 −66 double-conversion/utils.h
+5 −4 msvc/double-conversion.vcxproj
+11 −8 msvc/double-conversion.vcxproj.filters
+1 −0 msvc/run_tests/run_tests.vcxproj
+3 −0 test/cctest/CMakeLists.txt
+24 −4 test/cctest/cctest.cc
+45 −33 test/cctest/cctest.h
+23 −23 test/cctest/checks.h
+8 −3 test/cctest/test-bignum-dtoa.cc
+25 −19 test/cctest/test-bignum.cc
+1,445 −171 test/cctest/test-conversions.cc
+31 −6 test/cctest/test-diy-fp.cc
+10 −10 test/cctest/test-dtoa.cc
+30 −5 test/cctest/test-fast-dtoa.cc
+77 −31 test/cctest/test-ieee.cc
+494 −1 test/cctest/test-strtod.cc
17 changes: 9 additions & 8 deletions contrib/double-conversion-cmake/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
SET(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/double-conversion")

add_library(_double-conversion
"${LIBRARY_DIR}/double-conversion/bignum.cc"
"${LIBRARY_DIR}/double-conversion/bignum-dtoa.cc"
"${LIBRARY_DIR}/double-conversion/cached-powers.cc"
"${LIBRARY_DIR}/double-conversion/diy-fp.cc"
"${LIBRARY_DIR}/double-conversion/double-conversion.cc"
"${LIBRARY_DIR}/double-conversion/fast-dtoa.cc"
"${LIBRARY_DIR}/double-conversion/fixed-dtoa.cc"
"${LIBRARY_DIR}/double-conversion/strtod.cc")
"${LIBRARY_DIR}/double-conversion/bignum-dtoa.cc"
"${LIBRARY_DIR}/double-conversion/bignum.cc"
"${LIBRARY_DIR}/double-conversion/cached-powers.cc"
"${LIBRARY_DIR}/double-conversion/double-to-string.cc"
"${LIBRARY_DIR}/double-conversion/fast-dtoa.cc"
"${LIBRARY_DIR}/double-conversion/fixed-dtoa.cc"
"${LIBRARY_DIR}/double-conversion/string-to-double.cc"
"${LIBRARY_DIR}/double-conversion/strtod.cc"
)

target_include_directories(_double-conversion SYSTEM BEFORE PUBLIC "${LIBRARY_DIR}")

Expand Down
5 changes: 5 additions & 0 deletions docker/test/fuzzer/query-fuzzer-tweaks-users.xml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,11 @@
<table_function_remote_max_addresses>
<max>200</max>
</table_function_remote_max_addresses>

<!-- Don't waste cycles testing the old interpreter. Spend time in the new analyzer instead -->
<allow_experimental_analyzer>
<readonly/>
</allow_experimental_analyzer>
</constraints>
</default>
</profiles>
Expand Down
5 changes: 3 additions & 2 deletions docker/test/stateless/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,9 @@ ln -snf "/usr/share/zoneinfo/$TZ" /etc/localtime && echo "$TZ" > /etc/timezone

dpkg -i package_folder/clickhouse-common-static_*.deb
dpkg -i package_folder/clickhouse-common-static-dbg_*.deb
dpkg -i package_folder/clickhouse-odbc-bridge_*.deb
dpkg -i package_folder/clickhouse-library-bridge_*.deb
# Accept failure in the next two commands until 24.4 is released (for compatibility and Bugfix validation run)
dpkg -i package_folder/clickhouse-odbc-bridge_*.deb || true
dpkg -i package_folder/clickhouse-library-bridge_*.deb || true
dpkg -i package_folder/clickhouse-server_*.deb
dpkg -i package_folder/clickhouse-client_*.deb

Expand Down
1 change: 1 addition & 0 deletions docker/test/stress/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -215,6 +215,7 @@ stop_server
export USE_S3_STORAGE_FOR_MERGE_TREE=1
export RANDOMIZE_OBJECT_KEY_TYPE=1
export ZOOKEEPER_FAULT_INJECTION=1
export THREAD_POOL_FAULT_INJECTION=1
configure

# But we still need default disk because some tables loaded only into it
Expand Down
12 changes: 9 additions & 3 deletions docs/en/development/developer-instruction.md
Original file line number Diff line number Diff line change
Expand Up @@ -83,11 +83,17 @@ ClickHouse uses CMake and Ninja for building.

- Ninja - a smaller build system with a focus on the speed used to execute those cmake generated tasks.

To install on Ubuntu, Debian or Mint run `sudo apt install cmake ninja-build`.
- ccache - a compiler cache. It speeds up recompilation by caching previous compilations and detecting when the same compilation is being done again.

On CentOS, RedHat run `sudo yum install cmake ninja-build`.
:::tip
As an alternative for ccache a distributed [sccache](https://github.com/mozilla/sccache) could be used. To prefer it, `-DCOMPILER_CACHE=sccache` CMake flag should be used.
:::

To install on Ubuntu, Debian or Mint run `sudo apt install cmake ninja-build ccache`.

On CentOS, RedHat run `sudo yum install cmake ninja-build ccache`.

If you use Arch or Gentoo, you probably know it yourself how to install CMake.
If you use Arch or Gentoo, you probably know it yourself how to install CMake and others.

## C++ Compiler {#c-compiler}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ CREATE TABLE [IF NOT EXISTS] [db.]table_name [ON CLUSTER cluster]
[ORDER BY expr]
[PRIMARY KEY expr]
[SAMPLE BY expr]
[SETTINGS name=value, clean_deleted_rows=value, ...]
[SETTINGS name=value, ...]
```

For a description of request parameters, see [statement description](../../../sql-reference/statements/create/table.md).
Expand Down Expand Up @@ -97,7 +97,7 @@ SELECT * FROM mySecondReplacingMT FINAL;
:::note
`is_deleted` can only be enabled when `ver` is used.

The row is deleted when `OPTIMIZE ... FINAL CLEANUP` or `OPTIMIZE ... FINAL` is used, or if the engine setting `clean_deleted_rows` has been set to `Always`.
The row is deleted when `OPTIMIZE ... FINAL CLEANUP` or `OPTIMIZE ... FINAL` is used.

No matter the operation on the data, the version must be increased. If two inserted rows have the same version number, the last inserted row is the one kept.

Expand Down
155 changes: 155 additions & 0 deletions docs/en/operations/settings/composable-protocols.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,155 @@
---
slug: /en/operations/settings/composable-protocols
sidebar_position: 64
sidebar_label: Composable Protocols
---

# Composable Protocols

Composable protocols allows more flexible configuration of TCP access to the ClickHouse server. This configuration can co-exist with or replace conventional configuration.

## Composable protocols section is denoted as `protocols` in configuration xml
**Example:**
``` xml
<protocols>

</protocols>
```

## Basic modules define protocol layers
**Example:**
``` xml
<protocols>

<!-- plain_http module -->
<plain_http>
<type>http</type>
</plain_http>

</protocols>
```
where:
- `plain_http` - name which can be referred by another layer
- `type` - denotes protocol handler which will be instantiated to process data, set of protocol handlers is predefined:
* `tcp` - native clickhouse protocol handler
* `http` - http clickhouse protocol handler
* `tls` - TLS encryption layer
* `proxy1` - PROXYv1 layer
* `mysql` - MySQL compatibility protocol handler
* `postgres` - PostgreSQL compatibility protocol handler
* `prometheus` - Prometheus protocol handler
* `interserver` - clickhouse interserver handler

:::note
`gRPC` protocol handler is not implemented for `Composable protocols`
:::

## Endpoint (i.e. listening port) is denoted by `<port>` and (optional) `<host>` tags
**Example:**
``` xml
<protocols>

<plain_http>

<type>http</type>
<!-- endpoint -->
<host>127.0.0.1</host>
<port>8123</port>

</plain_http>

</protocols>
```
If `<host>` is omitted, then `<listen_host>` from root config is used.

## Layers sequence is defined by `<impl>` tag, referencing another module
**Example:** definition for HTTPS protocol
``` xml
<protocols>

<!-- http module -->
<plain_http>
<type>http</type>
</plain_http>

<!-- https module configured as a tls layer on top of plain_http module -->
<https>
<type>tls</type>
<impl>plain_http</impl>
<host>127.0.0.1</host>
<port>8443</port>
</https>

</protocols>
```

## Endpoint can be attached to any layer
**Example:** definition for HTTP (port 8123) and HTTPS (port 8443) endpoints
``` xml
<protocols>

<plain_http>
<type>http</type>
<host>127.0.0.1</host>
<port>8123</port>
</plain_http>

<https>
<type>tls</type>
<impl>plain_http</impl>
<host>127.0.0.1</host>
<port>8443</port>
</https>

</protocols>
```

## Additional endpoints can be defined by referencing any module and omitting `<type>` tag
**Example:** `another_http` endpoint is defined for `plain_http` module
``` xml
<protocols>

<plain_http>
<type>http</type>
<host>127.0.0.1</host>
<port>8123</port>
</plain_http>

<https>
<type>tls</type>
<impl>plain_http</impl>
<host>127.0.0.1</host>
<port>8443</port>
</https>

<another_http>
<impl>plain_http</impl>
<host>127.0.0.1</host>
<port>8223</port>
</another_http>

</protocols>
```

## Some modules can contain specific for its layer parameters
**Example:** for TLS layer private key (`privateKeyFile`) and certificate files (`certificateFile`) can be specified
``` xml
<protocols>

<plain_http>
<type>http</type>
<host>127.0.0.1</host>
<port>8123</port>
</plain_http>

<https>
<type>tls</type>
<impl>plain_http</impl>
<host>127.0.0.1</host>
<port>8443</port>
<privateKeyFile>another_server.key</privateKeyFile>
<certificateFile>another_server.crt</certificateFile>
</https>

</protocols>
```
10 changes: 0 additions & 10 deletions docs/en/operations/settings/merge-tree-settings.md
Original file line number Diff line number Diff line change
Expand Up @@ -852,16 +852,6 @@ If the file name for column is too long (more than `max_file_name_length` bytes)

The maximal length of the file name to keep it as is without hashing. Takes effect only if setting `replace_long_file_name_to_hash` is enabled. The value of this setting does not include the length of file extension. So, it is recommended to set it below the maximum filename length (usually 255 bytes) with some gap to avoid filesystem errors. Default value: 127.

## clean_deleted_rows

Enable/disable automatic deletion of rows flagged as `is_deleted` when perform `OPTIMIZE ... FINAL` on a table using the ReplacingMergeTree engine. When disabled, the `CLEANUP` keyword has to be added to the `OPTIMIZE ... FINAL` to have the same behaviour.

Possible values:

- `Always` or `Never`.

Default value: `Never`

## allow_experimental_block_number_column

Persists virtual column `_block_number` on merges.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ slug: /en/operations/system-tables/asynchronous_metric_log
---
# asynchronous_metric_log

Contains the historical values for `system.asynchronous_metrics`, which are saved once per minute. Enabled by default.
Contains the historical values for `system.asynchronous_metrics`, which are saved once per time interval (one second by default). Enabled by default.

Columns:

Expand Down
2 changes: 2 additions & 0 deletions docs/en/operations/system-tables/backup_log.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ Columns:

- `hostname` ([LowCardinality(String)](../../sql-reference/data-types/string.md)) — Hostname of the server executing the query.
- `event_date` ([Date](../../sql-reference/data-types/date.md)) — Date of the entry.
- `event_time` ([DateTime](../../sql-reference/data-types/datetime.md)) — The date and time of the entry.
- `event_time_microseconds` ([DateTime64](../../sql-reference/data-types/datetime64.md)) — Time of the entry with microseconds precision.
- `id` ([String](../../sql-reference/data-types/string.md)) — Identifier of the backup or restore operation.
- `name` ([String](../../sql-reference/data-types/string.md)) — Name of the backup storage (the contents of the `FROM` or `TO` clause).
Expand Down Expand Up @@ -67,6 +68,7 @@ Row 2:
──────
hostname: clickhouse.eu-central1.internal
event_date: 2023-08-19
event_time: 2023-08-19 11:08:56
event_time_microseconds: 2023-08-19 11:08:56.916192
id: e5b74ecb-f6f1-426a-80be-872f90043885
name: Disk('backups_disk', '1.zip')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ sidebar_position: 106

# argMax

Calculates the `arg` value for a maximum `val` value. If there are several different values of `arg` for maximum values of `val`, returns the first of these values encountered.
Calculates the `arg` value for a maximum `val` value. If there are multiple rows with equal `val` being the maximum, which of the associated `arg` is returned is not deterministic.
Both parts the `arg` and the `max` behave as [aggregate functions](/docs/en/sql-reference/aggregate-functions/index.md), they both [skip `Null`](/docs/en/sql-reference/aggregate-functions/index.md#null-processing) during processing and return not `Null` values if not `Null` values are available.

**Syntax**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ sidebar_position: 105

# argMin

Calculates the `arg` value for a minimum `val` value. If there are several different values of `arg` for minimum values of `val`, returns the first of these values encountered.
Calculates the `arg` value for a minimum `val` value. If there are multiple rows with equal `val` being the maximum, which of the associated `arg` is returned is not deterministic.
Both parts the `arg` and the `min` behave as [aggregate functions](/docs/en/sql-reference/aggregate-functions/index.md), they both [skip `Null`](/docs/en/sql-reference/aggregate-functions/index.md#null-processing) during processing and return not `Null` values if not `Null` values are available.

**Syntax**
Expand Down
2 changes: 1 addition & 1 deletion docs/en/sql-reference/functions/other-functions.md
Original file line number Diff line number Diff line change
Expand Up @@ -675,7 +675,7 @@ There are two variations of this function:

Signature:

For `x` equal to one of the elements in `array_from`, the function returns the corresponding element in `array_to`, i.e. the one at the same array index. Otherwise, it returns `default`. If multiple matching elements exist `array_from`, an arbitrary corresponding element from `array_to` is returned.
For `x` equal to one of the elements in `array_from`, the function returns the corresponding element in `array_to`, i.e. the one at the same array index. Otherwise, it returns `default`. If multiple matching elements exist `array_from`, it returns the element corresponding to the first of them.

`transform(T, Array(T), Array(U), U) -> U`

Expand Down
Loading

0 comments on commit 3d87a88

Please sign in to comment.