Skip to content

Commit

Permalink
Merge branch 'master' of https://github.com/apache/iotdb into informa…
Browse files Browse the repository at this point in the history
…tion_schema
  • Loading branch information
Caideyipi committed Jan 9, 2025
2 parents 310716c + d9214b3 commit 2a2d636
Show file tree
Hide file tree
Showing 76 changed files with 3,543 additions and 923 deletions.
18 changes: 15 additions & 3 deletions .github/workflows/pipe-it-2cluster.yml
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,8 @@ jobs:
-ntp >> ~/run-tests-$attempt.log && return 0
test_output=$(cat ~/run-tests-$attempt.log)
mv ~/run-tests-$attempt.log integration-test/target/cluster-logs/
echo "==================== BEGIN: ~/run-tests-$attempt.log ===================="
echo "$test_output"
echo "==================== END: ~/run-tests-$attempt.log ======================"
Expand Down Expand Up @@ -183,6 +185,8 @@ jobs:
-ntp >> ~/run-tests-$attempt.log && return 0
test_output=$(cat ~/run-tests-$attempt.log)
mv ~/run-tests-$attempt.log integration-test/target/cluster-logs/
echo "==================== BEGIN: ~/run-tests-$attempt.log ===================="
echo "$test_output"
echo "==================== END: ~/run-tests-$attempt.log ======================"
Expand Down Expand Up @@ -263,6 +267,8 @@ jobs:
-ntp >> ~/run-tests-$attempt.log && return 0
test_output=$(cat ~/run-tests-$attempt.log)
mv ~/run-tests-$attempt.log integration-test/target/cluster-logs/
echo "==================== BEGIN: ~/run-tests-$attempt.log ===================="
echo "$test_output"
echo "==================== END: ~/run-tests-$attempt.log ======================"
Expand Down Expand Up @@ -342,7 +348,9 @@ jobs:
-am -PMultiClusterIT2SubscriptionRegressionConsumer \
-ntp >> ~/run-tests-$attempt.log && return 0
test_output=$(cat ~/run-tests-$attempt.log)
mv ~/run-tests-$attempt.log integration-test/target/cluster-logs/
echo "==================== BEGIN: ~/run-tests-$attempt.log ===================="
echo "$test_output"
echo "==================== END: ~/run-tests-$attempt.log ======================"
Expand Down Expand Up @@ -422,7 +430,9 @@ jobs:
-am -PMultiClusterIT2SubscriptionRegressionMisc \
-ntp >> ~/run-tests-$attempt.log && return 0
test_output=$(cat ~/run-tests-$attempt.log)
mv ~/run-tests-$attempt.log integration-test/target/cluster-logs/
echo "==================== BEGIN: ~/run-tests-$attempt.log ===================="
echo "$test_output"
echo "==================== END: ~/run-tests-$attempt.log ======================"
Expand Down Expand Up @@ -501,7 +511,9 @@ jobs:
-am -PMultiClusterIT2TableModel \
-ntp >> ~/run-tests-$attempt.log && return 0
test_output=$(cat ~/run-tests-$attempt.log)
mv ~/run-tests-$attempt.log integration-test/target/cluster-logs/
echo "==================== BEGIN: ~/run-tests-$attempt.log ===================="
echo "$test_output"
echo "==================== END: ~/run-tests-$attempt.log ======================"
Expand Down
6 changes: 5 additions & 1 deletion example/udf/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,13 @@
<artifactId>udf-example</artifactId>
<name>IoTDB: Example: UDF</name>
<dependencies>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.iotdb</groupId>
<artifactId>iotdb-server</artifactId>
<artifactId>udf-api</artifactId>
<version>${project.version}</version>
<scope>provided</scope>
</dependency>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,9 @@
package org.apache.iotdb.udf;

import org.apache.iotdb.udf.api.State;
import org.apache.iotdb.udf.api.customizer.config.AggregateFunctionConfig;
import org.apache.iotdb.udf.api.customizer.parameter.FunctionParameters;
import org.apache.iotdb.udf.api.exception.UDFException;
import org.apache.iotdb.udf.api.exception.UDFParameterNotValidException;
import org.apache.iotdb.udf.api.customizer.analysis.AggregateFunctionAnalysis;
import org.apache.iotdb.udf.api.customizer.parameter.FunctionArguments;
import org.apache.iotdb.udf.api.exception.UDFArgumentNotValidException;
import org.apache.iotdb.udf.api.relational.AggregateFunction;
import org.apache.iotdb.udf.api.relational.access.Record;
import org.apache.iotdb.udf.api.type.Type;
Expand All @@ -47,10 +46,10 @@
*
* <p>SHOW FUNCTIONS;
*
* <p>SELECT time, device_id, my_count(s1) as s1_count, my_count(s2) as s2_count FROM t1 group by
* <p>SELECT device_id, my_count(s1) as s1_count, my_count(s2) as s2_count FROM t1 group by
* device_id;
*
* <p>SELECT time, my_count(s1) as s1_count, my_count(s2) as s2_count FROM t1;
* <p>SELECT my_count(s1) as s1_count, my_count(s2) as s2_count FROM t1;
*/
public class AggregateFunctionExample implements AggregateFunction {

Expand Down Expand Up @@ -78,15 +77,15 @@ public void deserialize(byte[] bytes) {
}

@Override
public void validate(FunctionParameters parameters) throws UDFException {
if (parameters.getChildExpressionsSize() != 1) {
throw new UDFParameterNotValidException("Only one parameter is required.");
public AggregateFunctionAnalysis analyze(FunctionArguments arguments)
throws UDFArgumentNotValidException {
if (arguments.getArgumentsSize() != 1) {
throw new UDFArgumentNotValidException("Only one parameter is required.");
}
}

@Override
public void beforeStart(FunctionParameters parameters, AggregateFunctionConfig configurations) {
configurations.setOutputDataType(Type.INT64);
return new AggregateFunctionAnalysis.Builder()
.outputDataType(Type.INT64)
.removable(true)
.build();
}

@Override
Expand Down Expand Up @@ -114,4 +113,12 @@ public void outputFinal(State state, ResultValue resultValue) {
CountState countState = (CountState) state;
resultValue.setLong(countState.count);
}

@Override
public void remove(State state, Record input) {
CountState countState = (CountState) state;
if (!input.isNull(0)) {
countState.count--;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,43 +19,42 @@

package org.apache.iotdb.udf;

import org.apache.iotdb.udf.api.customizer.config.ScalarFunctionConfig;
import org.apache.iotdb.udf.api.customizer.parameter.FunctionParameters;
import org.apache.iotdb.udf.api.customizer.analysis.ScalarFunctionAnalysis;
import org.apache.iotdb.udf.api.customizer.parameter.FunctionArguments;
import org.apache.iotdb.udf.api.exception.UDFArgumentNotValidException;
import org.apache.iotdb.udf.api.exception.UDFException;
import org.apache.iotdb.udf.api.exception.UDFParameterNotValidException;
import org.apache.iotdb.udf.api.relational.ScalarFunction;
import org.apache.iotdb.udf.api.relational.access.Record;
import org.apache.iotdb.udf.api.type.Type;

/** This is an internal example of the ScalarFunction implementation. */
/**
* This is an internal example of the ScalarFunction implementation.
*
* <p>CREATE DATABASE test;
*
* <p>USE test;
*
* <p>CREATE TABLE t1(device_id STRING TAG, s1 TEXT FIELD, s2 INT32 FIELD);
*
* <p>INSERT INTO t1(time, device_id, s1, s2) VALUES (1, 'd1', 'a', 1), (2, 'd1', null, 2), (3,
* 'd1', 'c', null);
*
* <p>CREATE FUNCTION contain_null AS 'org.apache.iotdb.udf.ScalarFunctionExample';
*
* <p>SHOW FUNCTIONS;
*
* <p>SELECT time, device_id, s1, s2, contain_null(s1, s2) as contain_null, contain_null(s1) as
* s1_isnull, contain_null(s2) as s2_isnull FROM t1;
*/
public class ScalarFunctionExample implements ScalarFunction {
/**
* CREATE DATABASE test;
*
* <p>USE test;
*
* <p>CREATE TABLE t1(device_id STRING TAG, s1 TEXT FIELD, s2 INT32 FIELD);
*
* <p>INSERT INTO t1(time, device_id, s1, s2) VALUES (1, 'd1', 'a', 1), (2, 'd1', null, 2), (3,
* 'd1', 'c', null);
*
* <p>CREATE FUNCTION contain_null AS 'org.apache.iotdb.udf.ScalarFunctionExample';
*
* <p>SHOW FUNCTIONS;
*
* <p>SELECT time, device_id, s1, s2, contain_null(s1, s2) as contain_null, contain_null(s1) as
* s1_isnull, contain_null(s2) as s2_isnull FROM t1;
*/
@Override
public void validate(FunctionParameters parameters) throws UDFException {
if (parameters.getChildExpressionsSize() < 1) {
throw new UDFParameterNotValidException("At least one parameter is required.");
}
}

@Override
public void beforeStart(FunctionParameters parameters, ScalarFunctionConfig configurations) {
configurations.setOutputDataType(Type.BOOLEAN);
public ScalarFunctionAnalysis analyze(FunctionArguments arguments)
throws UDFArgumentNotValidException {
if (arguments.getArgumentsSize() < 1) {
throw new UDFArgumentNotValidException("At least one parameter is required.");
}
return new ScalarFunctionAnalysis.Builder().outputDataType(Type.BOOLEAN).build();
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,54 +19,61 @@

package org.apache.iotdb.db.query.udf.example.relational;

import org.apache.iotdb.udf.api.customizer.config.ScalarFunctionConfig;
import org.apache.iotdb.udf.api.customizer.parameter.FunctionParameters;
import org.apache.iotdb.udf.api.customizer.analysis.ScalarFunctionAnalysis;
import org.apache.iotdb.udf.api.customizer.parameter.FunctionArguments;
import org.apache.iotdb.udf.api.exception.UDFArgumentNotValidException;
import org.apache.iotdb.udf.api.exception.UDFException;
import org.apache.iotdb.udf.api.exception.UDFParameterNotValidException;
import org.apache.iotdb.udf.api.relational.ScalarFunction;
import org.apache.iotdb.udf.api.relational.access.Record;
import org.apache.iotdb.udf.api.type.Type;

import java.util.HashSet;
import java.util.Set;

/** Calculate the sum of all parameters. Only support inputs of INT32,INT64,DOUBLE,FLOAT type. */
/** Calculate the sum of all arguments. Only support inputs of INT32,INT64,DOUBLE,FLOAT type. */
public class AllSum implements ScalarFunction {

private Type outputDataType;

@Override
public void validate(FunctionParameters parameters) throws UDFException {
if (parameters.getChildExpressionsSize() < 1) {
throw new UDFParameterNotValidException("At least one parameter is required.");
public ScalarFunctionAnalysis analyze(FunctionArguments arguments)
throws UDFArgumentNotValidException {
if (arguments.getArgumentsSize() < 1) {
throw new UDFArgumentNotValidException("At least one parameter is required.");
}
for (int i = 0; i < parameters.getChildExpressionsSize(); i++) {
if (parameters.getDataType(i) != Type.INT32
&& parameters.getDataType(i) != Type.INT64
&& parameters.getDataType(i) != Type.FLOAT
&& parameters.getDataType(i) != Type.DOUBLE) {
throw new UDFParameterNotValidException(
for (int i = 0; i < arguments.getArgumentsSize(); i++) {
if (arguments.getDataType(i) != Type.INT32
&& arguments.getDataType(i) != Type.INT64
&& arguments.getDataType(i) != Type.FLOAT
&& arguments.getDataType(i) != Type.DOUBLE) {
throw new UDFArgumentNotValidException(
"Only support inputs of INT32,INT64,DOUBLE,FLOAT type.");
}
}
return new ScalarFunctionAnalysis.Builder()
.outputDataType(inferOutputDataType(arguments))
.build();
}

@Override
public void beforeStart(FunctionParameters parameters, ScalarFunctionConfig configurations) {
public void beforeStart(FunctionArguments arguments) throws UDFException {
this.outputDataType = inferOutputDataType(arguments);
}

private Type inferOutputDataType(FunctionArguments arguments) {
Set<Type> inputTypeSet = new HashSet<>();
for (int i = 0; i < parameters.getChildExpressionsSize(); i++) {
inputTypeSet.add(parameters.getDataType(i));
for (int i = 0; i < arguments.getArgumentsSize(); i++) {
inputTypeSet.add(arguments.getDataType(i));
}
if (inputTypeSet.contains(Type.DOUBLE)) {
outputDataType = Type.DOUBLE;
return Type.DOUBLE;
} else if (inputTypeSet.contains(Type.FLOAT)) {
outputDataType = Type.FLOAT;
return Type.FLOAT;
} else if (inputTypeSet.contains(Type.INT64)) {
outputDataType = Type.INT64;
return Type.INT64;
} else {
outputDataType = Type.INT32;
return Type.INT32;
}
configurations.setOutputDataType(outputDataType);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,25 +19,22 @@

package org.apache.iotdb.db.query.udf.example.relational;

import org.apache.iotdb.udf.api.customizer.config.ScalarFunctionConfig;
import org.apache.iotdb.udf.api.customizer.parameter.FunctionParameters;
import org.apache.iotdb.udf.api.exception.UDFException;
import org.apache.iotdb.udf.api.exception.UDFParameterNotValidException;
import org.apache.iotdb.udf.api.customizer.analysis.ScalarFunctionAnalysis;
import org.apache.iotdb.udf.api.customizer.parameter.FunctionArguments;
import org.apache.iotdb.udf.api.exception.UDFArgumentNotValidException;
import org.apache.iotdb.udf.api.relational.ScalarFunction;
import org.apache.iotdb.udf.api.relational.access.Record;
import org.apache.iotdb.udf.api.type.Type;

public class ContainNull implements ScalarFunction {
@Override
public void validate(FunctionParameters parameters) throws UDFException {
if (parameters.getChildExpressionsSize() < 1) {
throw new UDFParameterNotValidException("At least one parameter is required.");
}
}

@Override
public void beforeStart(FunctionParameters parameters, ScalarFunctionConfig configurations) {
configurations.setOutputDataType(Type.BOOLEAN);
public ScalarFunctionAnalysis analyze(FunctionArguments arguments)
throws UDFArgumentNotValidException {
if (arguments.getArgumentsSize() < 1) {
throw new UDFArgumentNotValidException("At least one parameter is required.");
}
return new ScalarFunctionAnalysis.Builder().outputDataType(Type.BOOLEAN).build();
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,33 +19,30 @@

package org.apache.iotdb.db.query.udf.example.relational;

import org.apache.iotdb.udf.api.customizer.config.ScalarFunctionConfig;
import org.apache.iotdb.udf.api.customizer.parameter.FunctionParameters;
import org.apache.iotdb.udf.api.exception.UDFException;
import org.apache.iotdb.udf.api.exception.UDFParameterNotValidException;
import org.apache.iotdb.udf.api.customizer.analysis.ScalarFunctionAnalysis;
import org.apache.iotdb.udf.api.customizer.parameter.FunctionArguments;
import org.apache.iotdb.udf.api.exception.UDFArgumentNotValidException;
import org.apache.iotdb.udf.api.relational.ScalarFunction;
import org.apache.iotdb.udf.api.relational.access.Record;
import org.apache.iotdb.udf.api.type.Type;

import java.time.LocalDate;

public class DatePlusOne implements ScalarFunction {
public class DatePlus implements ScalarFunction {

@Override
public void validate(FunctionParameters parameters) throws UDFException {
if (parameters.getChildExpressionsSize() != 2) {
throw new UDFParameterNotValidException("Only two parameter is required.");
public ScalarFunctionAnalysis analyze(FunctionArguments arguments)
throws UDFArgumentNotValidException {
if (arguments.getArgumentsSize() != 2) {
throw new UDFArgumentNotValidException("Only two parameter is required.");
}
if (parameters.getDataType(0) != Type.DATE) {
throw new UDFParameterNotValidException("The first parameter should be DATE type.");
if (arguments.getDataType(0) != Type.DATE) {
throw new UDFArgumentNotValidException("The first parameter should be DATE type.");
}
if (parameters.getDataType(1) != Type.INT32 && parameters.getDataType(1) != Type.INT64) {
throw new UDFParameterNotValidException("The second parameter should be INT type.");
if (arguments.getDataType(1) != Type.INT32 && arguments.getDataType(1) != Type.INT64) {
throw new UDFArgumentNotValidException("The second parameter should be INT type.");
}
}

@Override
public void beforeStart(FunctionParameters parameters, ScalarFunctionConfig configurations) {
configurations.setOutputDataType(Type.DATE);
return new ScalarFunctionAnalysis.Builder().outputDataType(Type.DATE).build();
}

@Override
Expand Down
Loading

0 comments on commit 2a2d636

Please sign in to comment.