) allArguments[1];
- MockRedisClusterClient redisClusterClient = (MockRedisClusterClient) objInst;
- StringBuilder peer = new StringBuilder();
- for (RedisURI redisURI : redisURIs) {
- peer.append(redisURI.getHost()).append(":").append(redisURI.getPort()).append(";");
+ public void onConstruct(EnhancedInstance objInst, Object[] allArguments) throws Throwable {
+ Configuration configuration = ((ClusterConnection) allArguments[1]).getConfiguration();
+ Field field = configuration.getClass().getDeclaredField("overlay");
+ field.setAccessible(true);
+ Properties properties = (Properties) field.get(configuration);
+ String value = properties.getProperty("hbase.zookeeper.quorum");
+ if (StringUtil.isNotBlank(value)) {
+ objInst.setSkyWalkingDynamicField(value);
}
- EnhancedInstance optionsInst = redisClusterClient.getOptions();
- optionsInst.setSkyWalkingDynamicField(PeerFormat.shorten(peer.toString()));
}
}
diff --git a/apm-sniffer/apm-sdk-plugin/hbase-1.x-2.x-plugin/src/main/java/org/apache/skywalking/apm/plugin/hbase/HTable200Interceptor.java b/apm-sniffer/apm-sdk-plugin/hbase-1.x-2.x-plugin/src/main/java/org/apache/skywalking/apm/plugin/hbase/HTable200Interceptor.java
new file mode 100644
index 000000000000..be23cba60029
--- /dev/null
+++ b/apm-sniffer/apm-sdk-plugin/hbase-1.x-2.x-plugin/src/main/java/org/apache/skywalking/apm/plugin/hbase/HTable200Interceptor.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.skywalking.apm.plugin.hbase;
+
+import java.lang.reflect.Field;
+import java.util.Properties;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.client.ClusterConnection;
+import org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.EnhancedInstance;
+import org.apache.skywalking.apm.util.StringUtil;
+
+public class HTable200Interceptor extends HTableInterceptor {
+
+ @Override
+ public void onConstruct(EnhancedInstance objInst, Object[] allArguments) throws Throwable {
+ Configuration configuration = ((ClusterConnection) allArguments[0]).getConfiguration();
+ Field field = configuration.getClass().getDeclaredField("overlay");
+ field.setAccessible(true);
+ Properties properties = (Properties) field.get(configuration);
+ String value = properties.getProperty("hbase.zookeeper.quorum");
+ if (StringUtil.isNotBlank(value)) {
+ objInst.setSkyWalkingDynamicField(value);
+ }
+ }
+}
diff --git a/apm-sniffer/apm-sdk-plugin/hbase-1.x-2.x-plugin/src/main/java/org/apache/skywalking/apm/plugin/hbase/HTable220Interceptor.java b/apm-sniffer/apm-sdk-plugin/hbase-1.x-2.x-plugin/src/main/java/org/apache/skywalking/apm/plugin/hbase/HTable220Interceptor.java
new file mode 100644
index 000000000000..4d8e224410a0
--- /dev/null
+++ b/apm-sniffer/apm-sdk-plugin/hbase-1.x-2.x-plugin/src/main/java/org/apache/skywalking/apm/plugin/hbase/HTable220Interceptor.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.skywalking.apm.plugin.hbase;
+
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.util.Properties;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.EnhancedInstance;
+import org.apache.skywalking.apm.util.StringUtil;
+
+public class HTable220Interceptor extends HTableInterceptor {
+
+ @Override
+ public void onConstruct(EnhancedInstance objInst, Object[] allArguments) throws Throwable {
+ Method getConfigurationMethod = Connection.class.getMethod("getConfiguration");
+ Configuration configuration = (Configuration) getConfigurationMethod.invoke(allArguments[0]);
+ Field field = configuration.getClass().getDeclaredField("overlay");
+ field.setAccessible(true);
+ Properties properties = (Properties) field.get(configuration);
+ String value = properties.getProperty("hbase.zookeeper.quorum");
+ if (StringUtil.isNotBlank(value)) {
+ objInst.setSkyWalkingDynamicField(value);
+ }
+ }
+}
diff --git a/apm-sniffer/apm-sdk-plugin/hbase-1.x-plugin/src/main/java/org/apache/skywalking/apm/plugin/hbase/HTableInterceptor.java b/apm-sniffer/apm-sdk-plugin/hbase-1.x-2.x-plugin/src/main/java/org/apache/skywalking/apm/plugin/hbase/HTableInterceptor.java
similarity index 100%
rename from apm-sniffer/apm-sdk-plugin/hbase-1.x-plugin/src/main/java/org/apache/skywalking/apm/plugin/hbase/HTableInterceptor.java
rename to apm-sniffer/apm-sdk-plugin/hbase-1.x-2.x-plugin/src/main/java/org/apache/skywalking/apm/plugin/hbase/HTableInterceptor.java
diff --git a/apm-sniffer/apm-sdk-plugin/hbase-1.x-plugin/src/main/java/org/apache/skywalking/apm/plugin/hbase/define/HTableInstrumentation.java b/apm-sniffer/apm-sdk-plugin/hbase-1.x-2.x-plugin/src/main/java/org/apache/skywalking/apm/plugin/hbase/define/HTableInstrumentation.java
similarity index 58%
rename from apm-sniffer/apm-sdk-plugin/hbase-1.x-plugin/src/main/java/org/apache/skywalking/apm/plugin/hbase/define/HTableInstrumentation.java
rename to apm-sniffer/apm-sdk-plugin/hbase-1.x-2.x-plugin/src/main/java/org/apache/skywalking/apm/plugin/hbase/define/HTableInstrumentation.java
index af73efc9ac6f..7ca5d20d8f5a 100644
--- a/apm-sniffer/apm-sdk-plugin/hbase-1.x-plugin/src/main/java/org/apache/skywalking/apm/plugin/hbase/define/HTableInstrumentation.java
+++ b/apm-sniffer/apm-sdk-plugin/hbase-1.x-2.x-plugin/src/main/java/org/apache/skywalking/apm/plugin/hbase/define/HTableInstrumentation.java
@@ -47,11 +47,22 @@
import static net.bytebuddy.matcher.ElementMatchers.named;
import static net.bytebuddy.matcher.ElementMatchers.takesArgument;
import static net.bytebuddy.matcher.ElementMatchers.takesArguments;
+import static org.apache.skywalking.apm.agent.core.plugin.bytebuddy.ArgumentTypeNameMatch.takesArgumentWithType;
+/**
+ * There have several interceptors to adapt different version hbase client. We use the minimal compatible version to
+ * name the Interceptor. eg.
+ * HTable100Interceptor, 100 means version 1.0.0, compatible with version [1.0.0, 2.0.0)
+ * HTable200Interceptor, 200 means version 2.0.0, compatible with version [2.0.0, 2.2.0)
+ * HTable220Interceptor, 220 means version 2.2.0, compatible with version [2.2.0, )
+ */
public class HTableInstrumentation extends ClassInstanceMethodsEnhancePluginDefine {
private static final String ENHANCE_CLASS = "org.apache.hadoop.hbase.client.HTable";
private static final String INTERCEPT_CLASS = "org.apache.skywalking.apm.plugin.hbase.HTableInterceptor";
+ private static final String INTERCEPT_CLASS_100 = "org.apache.skywalking.apm.plugin.hbase.HTable100Interceptor";
+ private static final String INTERCEPT_CLASS_200 = "org.apache.skywalking.apm.plugin.hbase.HTable200Interceptor";
+ private static final String INTERCEPT_CLASS_220 = "org.apache.skywalking.apm.plugin.hbase.HTable220Interceptor";
@Override
protected ClassMatch enhanceClass() {
@@ -60,16 +71,44 @@ protected ClassMatch enhanceClass() {
@Override
public ConstructorInterceptPoint[] getConstructorsInterceptPoints() {
- return new ConstructorInterceptPoint[]{
+ return new ConstructorInterceptPoint[] {
+ // compatible with version [1.0.0, 2.0.0)
new ConstructorInterceptPoint() {
@Override
public ElementMatcher getConstructorMatcher() {
- return takesArguments(6);
+ return takesArguments(6)
+ .and(takesArgumentWithType(0, "org.apache.hadoop.hbase.TableName"));
}
@Override
public String getConstructorInterceptor() {
- return INTERCEPT_CLASS;
+ return INTERCEPT_CLASS_100;
+ }
+ },
+ // compatible with version [2.0.0, 2.2.0)
+ new ConstructorInterceptPoint() {
+ @Override
+ public ElementMatcher getConstructorMatcher() {
+ return takesArguments(5)
+ .and(takesArgumentWithType(0, "org.apache.hadoop.hbase.client.ClusterConnection"));
+ }
+
+ @Override
+ public String getConstructorInterceptor() {
+ return INTERCEPT_CLASS_200;
+ }
+ },
+ // compatible with version [2.2.0, )
+ new ConstructorInterceptPoint() {
+ @Override
+ public ElementMatcher getConstructorMatcher() {
+ return takesArguments(5)
+ .and(takesArgumentWithType(0, "org.apache.hadoop.hbase.client.ConnectionImplementation"));
+ }
+
+ @Override
+ public String getConstructorInterceptor() {
+ return INTERCEPT_CLASS_220;
}
}
};
@@ -77,13 +116,16 @@ public String getConstructorInterceptor() {
@Override
public InstanceMethodsInterceptPoint[] getInstanceMethodsInterceptPoints() {
- return new InstanceMethodsInterceptPoint[]{
+ return new InstanceMethodsInterceptPoint[] {
new InstanceMethodsInterceptPoint() {
@Override
public ElementMatcher getMethodsMatcher() {
- return named("delete").or(named("put")).or(isPublic().and(named("get")))
- .or(named("getScanner").and(takesArguments(1))
- .and(takesArgument(0, named("org.apache.hadoop.hbase.client.Scan"))));
+ return named("delete")
+ .or(named("put"))
+ .or(isPublic().and(named("get")))
+ .or(named("getScanner")
+ .and(takesArguments(1))
+ .and(takesArgument(0, named("org.apache.hadoop.hbase.client.Scan"))));
}
@Override()
diff --git a/apm-sniffer/apm-sdk-plugin/hbase-1.x-plugin/src/main/resources/skywalking-plugin.def b/apm-sniffer/apm-sdk-plugin/hbase-1.x-2.x-plugin/src/main/resources/skywalking-plugin.def
similarity index 90%
rename from apm-sniffer/apm-sdk-plugin/hbase-1.x-plugin/src/main/resources/skywalking-plugin.def
rename to apm-sniffer/apm-sdk-plugin/hbase-1.x-2.x-plugin/src/main/resources/skywalking-plugin.def
index aad5b0238c0a..7e1acdded3b4 100644
--- a/apm-sniffer/apm-sdk-plugin/hbase-1.x-plugin/src/main/resources/skywalking-plugin.def
+++ b/apm-sniffer/apm-sdk-plugin/hbase-1.x-2.x-plugin/src/main/resources/skywalking-plugin.def
@@ -14,4 +14,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-hbase-1.x=org.apache.skywalking.apm.plugin.hbase.define.HTableInstrumentation
\ No newline at end of file
+hbase-1.x/2.x=org.apache.skywalking.apm.plugin.hbase.define.HTableInstrumentation
\ No newline at end of file
diff --git a/apm-sniffer/apm-sdk-plugin/lettuce-5.x-plugin/src/test/java/org/apache/skywalking/apm/plugin/lettuce/v5/RedisChannelWriterInterceptorTest.java b/apm-sniffer/apm-sdk-plugin/lettuce-5.x-plugin/src/test/java/org/apache/skywalking/apm/plugin/lettuce/v5/RedisChannelWriterInterceptorTest.java
index 6cb512d2b70b..36132331c27d 100644
--- a/apm-sniffer/apm-sdk-plugin/lettuce-5.x-plugin/src/test/java/org/apache/skywalking/apm/plugin/lettuce/v5/RedisChannelWriterInterceptorTest.java
+++ b/apm-sniffer/apm-sdk-plugin/lettuce-5.x-plugin/src/test/java/org/apache/skywalking/apm/plugin/lettuce/v5/RedisChannelWriterInterceptorTest.java
@@ -33,8 +33,8 @@
import org.apache.skywalking.apm.agent.test.tools.SegmentStorage;
import org.apache.skywalking.apm.agent.test.tools.SegmentStoragePoint;
import org.apache.skywalking.apm.agent.test.tools.TracingSegmentRunner;
+import org.apache.skywalking.apm.plugin.lettuce.v5.mock.MockClientOptions;
import org.apache.skywalking.apm.plugin.lettuce.v5.mock.MockRedisClusterClient;
-import org.apache.skywalking.apm.plugin.lettuce.v5.mock.MockRedisClusterClientConstructorInterceptor;
import org.hamcrest.CoreMatchers;
import org.hamcrest.MatcherAssert;
import org.hamcrest.core.Is;
@@ -120,11 +120,13 @@ public void testOnHugeClusterConsumerConfig() {
redisURIs.add(RedisURI.create("localhost", i));
}
MockRedisClusterClient mockRedisClusterClient = new MockRedisClusterClient();
- MockRedisClusterClientConstructorInterceptor constructorInterceptor = new MockRedisClusterClientConstructorInterceptor();
+ MockClientOptions options = new MockClientOptions();
+ mockRedisClusterClient.setOptions(options);
+ RedisClusterClientConstructorInterceptor constructorInterceptor = new RedisClusterClientConstructorInterceptor();
constructorInterceptor.onConstruct(mockRedisClusterClient, new Object[] {
null,
redisURIs
});
- assertThat(mockRedisClusterClient.getOptions().getSkyWalkingDynamicField().toString().length(), Is.is(200));
+ assertThat(options.getSkyWalkingDynamicField().toString().length(), Is.is(200));
}
}
diff --git a/apm-sniffer/apm-sdk-plugin/lettuce-5.x-plugin/src/test/java/org/apache/skywalking/apm/plugin/lettuce/v5/mock/MockClientOptions.java b/apm-sniffer/apm-sdk-plugin/lettuce-5.x-plugin/src/test/java/org/apache/skywalking/apm/plugin/lettuce/v5/mock/MockClientOptions.java
new file mode 100644
index 000000000000..cb2a89e6f132
--- /dev/null
+++ b/apm-sniffer/apm-sdk-plugin/lettuce-5.x-plugin/src/test/java/org/apache/skywalking/apm/plugin/lettuce/v5/mock/MockClientOptions.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.skywalking.apm.plugin.lettuce.v5.mock;
+
+import io.lettuce.core.cluster.ClusterClientOptions;
+import org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.EnhancedInstance;
+
+public class MockClientOptions extends ClusterClientOptions implements EnhancedInstance {
+
+ private Object object;
+
+ public MockClientOptions() {
+ this(ClusterClientOptions.builder());
+ }
+
+ protected MockClientOptions(Builder builder) {
+ super(builder);
+ }
+
+ @Override
+ public Object getSkyWalkingDynamicField() {
+ return object;
+ }
+
+ @Override
+ public void setSkyWalkingDynamicField(Object value) {
+ this.object = value;
+ }
+}
diff --git a/apm-sniffer/apm-sdk-plugin/lettuce-5.x-plugin/src/test/java/org/apache/skywalking/apm/plugin/lettuce/v5/mock/MockRedisClusterClient.java b/apm-sniffer/apm-sdk-plugin/lettuce-5.x-plugin/src/test/java/org/apache/skywalking/apm/plugin/lettuce/v5/mock/MockRedisClusterClient.java
index b0f584e58b07..b97132b21d8d 100644
--- a/apm-sniffer/apm-sdk-plugin/lettuce-5.x-plugin/src/test/java/org/apache/skywalking/apm/plugin/lettuce/v5/mock/MockRedisClusterClient.java
+++ b/apm-sniffer/apm-sdk-plugin/lettuce-5.x-plugin/src/test/java/org/apache/skywalking/apm/plugin/lettuce/v5/mock/MockRedisClusterClient.java
@@ -18,41 +18,20 @@
package org.apache.skywalking.apm.plugin.lettuce.v5.mock;
+import io.lettuce.core.cluster.RedisClusterClient;
import org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.EnhancedInstance;
-public class MockRedisClusterClient implements EnhancedInstance {
+public class MockRedisClusterClient extends RedisClusterClient implements EnhancedInstance {
- private Object ms;
-
- private EnhancedInstance options = new EnhancedInstance() {
- private Object os;
-
- @Override
- public Object getSkyWalkingDynamicField() {
- return os;
- }
-
- @Override
- public void setSkyWalkingDynamicField(Object value) {
- this.os = value;
- }
- };
-
- public EnhancedInstance getOptions() {
- return options;
- }
-
- public void setOptions(EnhancedInstance options) {
- this.options = options;
- }
+ private Object object;
@Override
public Object getSkyWalkingDynamicField() {
- return ms;
+ return object;
}
@Override
public void setSkyWalkingDynamicField(Object value) {
- this.ms = value;
+ this.object = value;
}
}
diff --git a/apm-sniffer/apm-sdk-plugin/pom.xml b/apm-sniffer/apm-sdk-plugin/pom.xml
index 202ddef85078..2676756b95d2 100644
--- a/apm-sniffer/apm-sdk-plugin/pom.xml
+++ b/apm-sniffer/apm-sdk-plugin/pom.xml
@@ -97,7 +97,7 @@
mariadb-2.x-plugin
influxdb-2.x-plugin
baidu-brpc-plugin
- hbase-1.x-plugin
+ hbase-1.x-2.x-plugin
graphql-plugin
xxl-job-2.x-plugin
thrift-plugin
diff --git a/apm-sniffer/apm-sdk-plugin/spring-plugins/mvc-annotation-4.x-plugin/src/main/java/org/apache/skywalking/apm/plugin/spring/mvc/v4/define/ControllerForLowVersionInstrumentation.java b/apm-sniffer/apm-sdk-plugin/spring-plugins/mvc-annotation-4.x-plugin/src/main/java/org/apache/skywalking/apm/plugin/spring/mvc/v4/define/ControllerForLowVersionInstrumentation.java
index 81b57780fe6a..c27cfc860ee1 100644
--- a/apm-sniffer/apm-sdk-plugin/spring-plugins/mvc-annotation-4.x-plugin/src/main/java/org/apache/skywalking/apm/plugin/spring/mvc/v4/define/ControllerForLowVersionInstrumentation.java
+++ b/apm-sniffer/apm-sdk-plugin/spring-plugins/mvc-annotation-4.x-plugin/src/main/java/org/apache/skywalking/apm/plugin/spring/mvc/v4/define/ControllerForLowVersionInstrumentation.java
@@ -25,7 +25,7 @@
import static net.bytebuddy.matcher.ElementMatchers.any;
public class ControllerForLowVersionInstrumentation extends AbstractControllerInstrumentation {
- public static final String WITNESS_CLASSES_LOW_VERSION = "org.springframework.web.context.support.ServletContextPropertyPlaceholderConfigurer";
+ public static final String WITNESS_CLASSES_LOW_VERSION = "org.springframework.web.method.HandlerMethodSelector";
public static final String ENHANCE_ANNOTATION = "org.springframework.stereotype.Controller";
diff --git a/apm-sniffer/apm-sdk-plugin/spring-plugins/mvc-annotation-4.x-plugin/src/main/java/org/apache/skywalking/apm/plugin/spring/mvc/v4/define/RestControllerForLowVersionInstrumentation.java b/apm-sniffer/apm-sdk-plugin/spring-plugins/mvc-annotation-4.x-plugin/src/main/java/org/apache/skywalking/apm/plugin/spring/mvc/v4/define/RestControllerForLowVersionInstrumentation.java
index 97863ec69b8d..55e62ec55f32 100644
--- a/apm-sniffer/apm-sdk-plugin/spring-plugins/mvc-annotation-4.x-plugin/src/main/java/org/apache/skywalking/apm/plugin/spring/mvc/v4/define/RestControllerForLowVersionInstrumentation.java
+++ b/apm-sniffer/apm-sdk-plugin/spring-plugins/mvc-annotation-4.x-plugin/src/main/java/org/apache/skywalking/apm/plugin/spring/mvc/v4/define/RestControllerForLowVersionInstrumentation.java
@@ -25,7 +25,7 @@
import static net.bytebuddy.matcher.ElementMatchers.any;
public class RestControllerForLowVersionInstrumentation extends AbstractControllerInstrumentation {
- public static final String WITNESS_CLASSES_LOW_VERSION = "org.springframework.web.context.support.ServletContextPropertyPlaceholderConfigurer";
+ public static final String WITNESS_CLASSES_LOW_VERSION = "org.springframework.web.method.HandlerMethodSelector";
public static final String ENHANCE_ANNOTATION = "org.springframework.web.bind.annotation.RestController";
diff --git a/apm-sniffer/apm-sdk-plugin/spring-plugins/mvc-annotation-commons/src/main/java/org/apache/skywalking/apm/plugin/spring/mvc/commons/ReactiveRequestHolder.java b/apm-sniffer/apm-sdk-plugin/spring-plugins/mvc-annotation-commons/src/main/java/org/apache/skywalking/apm/plugin/spring/mvc/commons/ReactiveRequestHolder.java
index d7130775cb82..b5e1876ae0ff 100644
--- a/apm-sniffer/apm-sdk-plugin/spring-plugins/mvc-annotation-commons/src/main/java/org/apache/skywalking/apm/plugin/spring/mvc/commons/ReactiveRequestHolder.java
+++ b/apm-sniffer/apm-sdk-plugin/spring-plugins/mvc-annotation-commons/src/main/java/org/apache/skywalking/apm/plugin/spring/mvc/commons/ReactiveRequestHolder.java
@@ -20,6 +20,7 @@
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import org.springframework.http.server.reactive.ServerHttpRequest;
@@ -37,7 +38,11 @@ public String getHeader(final String headerName) {
@Override
public Enumeration getHeaders(final String headerName) {
- return Collections.enumeration(this.serverHttpRequest.getHeaders().get(headerName));
+ List values = this.serverHttpRequest.getHeaders().get(headerName);
+ if (values == null) {
+ return Collections.enumeration(Collections.EMPTY_LIST);
+ }
+ return Collections.enumeration(values);
}
@Override
diff --git a/apm-sniffer/optional-plugins/customize-enhance-plugin/src/main/java/org/apache/skywalking/apm/plugin/customize/conf/CustomizeConfiguration.java b/apm-sniffer/optional-plugins/customize-enhance-plugin/src/main/java/org/apache/skywalking/apm/plugin/customize/conf/CustomizeConfiguration.java
index 2c1864b8c5ff..ba48a9280e80 100644
--- a/apm-sniffer/optional-plugins/customize-enhance-plugin/src/main/java/org/apache/skywalking/apm/plugin/customize/conf/CustomizeConfiguration.java
+++ b/apm-sniffer/optional-plugins/customize-enhance-plugin/src/main/java/org/apache/skywalking/apm/plugin/customize/conf/CustomizeConfiguration.java
@@ -172,22 +172,6 @@ private Map resolverMethodNodeDesc(String className, Node method
Constants.XML_ELEMENT_OPERATION_NAME)
.getNodeValue());
}
- if (methodDesc.getAttributes().getNamedItem(Constants.XML_ELEMENT_CLOSE_BEFORE_METHOD) != null) {
- MethodConfiguration.setCloseBeforeMethod(configuration, Boolean.valueOf(methodDesc.getAttributes()
- .getNamedItem(
- Constants.XML_ELEMENT_CLOSE_BEFORE_METHOD)
- .getNodeValue()));
- } else {
- MethodConfiguration.setCloseBeforeMethod(configuration, false);
- }
- if (methodDesc.getAttributes().getNamedItem(Constants.XML_ELEMENT_CLOSE_AFTER_METHOD) != null) {
- MethodConfiguration.setCloseAfterMethod(configuration, Boolean.valueOf(methodDesc.getAttributes()
- .getNamedItem(
- Constants.XML_ELEMENT_CLOSE_AFTER_METHOD)
- .getNodeValue()));
- } else {
- MethodConfiguration.setCloseAfterMethod(configuration, false);
- }
if (methodDesc.getAttributes().getNamedItem(Constants.XML_ELEMENT_METHOD_IS_STATIC) != null) {
MethodConfiguration.setStatic(configuration, Boolean.valueOf(methodDesc.getAttributes()
.getNamedItem(
diff --git a/apm-sniffer/optional-plugins/customize-enhance-plugin/src/main/java/org/apache/skywalking/apm/plugin/customize/conf/MethodConfiguration.java b/apm-sniffer/optional-plugins/customize-enhance-plugin/src/main/java/org/apache/skywalking/apm/plugin/customize/conf/MethodConfiguration.java
index 89d544b88c95..375672ed90bc 100644
--- a/apm-sniffer/optional-plugins/customize-enhance-plugin/src/main/java/org/apache/skywalking/apm/plugin/customize/conf/MethodConfiguration.java
+++ b/apm-sniffer/optional-plugins/customize-enhance-plugin/src/main/java/org/apache/skywalking/apm/plugin/customize/conf/MethodConfiguration.java
@@ -55,14 +55,6 @@ static void setOperationName(Map configuration, String operation
configuration.put(Constants.CONFIGURATION_ATTRIBUTE_OPERATION_NAME, operationName);
}
- static void setCloseBeforeMethod(Map configuration, Boolean closeBeforeMethod) {
- configuration.put(Constants.CONFIGURATION_ATTRIBUTE_CLOSE_BEFORE_METHOD, closeBeforeMethod);
- }
-
- static void setCloseAfterMethod(Map configuration, Boolean closeAfterMethod) {
- configuration.put(Constants.CONFIGURATION_ATTRIBUTE_CLOSE_AFTER_METHOD, closeAfterMethod);
- }
-
static void setStatic(Map configuration, Boolean isStatic) {
configuration.put(Constants.CONFIGURATION_ATTRIBUTE_IS_STATIC, isStatic);
}
@@ -123,14 +115,6 @@ public static String getOperationName(Map configuration) {
return (String) configuration.get(Constants.CONFIGURATION_ATTRIBUTE_OPERATION_NAME);
}
- public static boolean isCloseBeforeMethod(Map configuration) {
- return (Boolean) configuration.get(Constants.CONFIGURATION_ATTRIBUTE_CLOSE_BEFORE_METHOD);
- }
-
- public static boolean isCloseAfterMethod(Map configuration) {
- return (Boolean) configuration.get(Constants.CONFIGURATION_ATTRIBUTE_CLOSE_AFTER_METHOD);
- }
-
@SuppressWarnings("unchecked")
public static Map getTags(Map configuration) {
return (Map) configuration.get(Constants.CONFIGURATION_ATTRIBUTE_TAGS);
diff --git a/apm-sniffer/optional-plugins/customize-enhance-plugin/src/main/java/org/apache/skywalking/apm/plugin/customize/constants/Constants.java b/apm-sniffer/optional-plugins/customize-enhance-plugin/src/main/java/org/apache/skywalking/apm/plugin/customize/constants/Constants.java
index b9f18634d56d..87e5c8759eee 100644
--- a/apm-sniffer/optional-plugins/customize-enhance-plugin/src/main/java/org/apache/skywalking/apm/plugin/customize/constants/Constants.java
+++ b/apm-sniffer/optional-plugins/customize-enhance-plugin/src/main/java/org/apache/skywalking/apm/plugin/customize/constants/Constants.java
@@ -42,10 +42,6 @@ public class Constants {
public static final String XML_ELEMENT_OPERATION_NAME = "operation_name";
- public static final String XML_ELEMENT_CLOSE_BEFORE_METHOD = "close_before_method";
-
- public static final String XML_ELEMENT_CLOSE_AFTER_METHOD = "close_after_method";
-
public static final String XML_ELEMENT_OPERATION_NAME_SUFFIX = "operation_name_suffix";
public static final String XML_ELEMENT_TAG = "tag";
@@ -70,10 +66,6 @@ public class Constants {
public static final String CONFIGURATION_ATTRIBUTE_OPERATION_NAME = "CONFIGURATION_ATTRIBUTE_OPERATION_NAME";
- public static final String CONFIGURATION_ATTRIBUTE_CLOSE_BEFORE_METHOD = "CONFIGURATION_ATTRIBUTE_CLOSE_BEFORE_METHOD";
-
- public static final String CONFIGURATION_ATTRIBUTE_CLOSE_AFTER_METHOD = "CONFIGURATION_ATTRIBUTE_CLOSE_AFTER_METHOD";
-
public static final String CONFIGURATION_ATTRIBUTE_OPERATION_NAME_SUFFIXES = "CONFIGURATION_ATTRIBUTE_OPERATION_NAME_SUFFIXES";
public static final String CONFIGURATION_ATTRIBUTE_TAGS = "CONFIGURATION_ATTRIBUTE_TAGS";
diff --git a/apm-sniffer/optional-plugins/customize-enhance-plugin/src/main/java/org/apache/skywalking/apm/plugin/customize/interceptor/BaseInterceptorMethods.java b/apm-sniffer/optional-plugins/customize-enhance-plugin/src/main/java/org/apache/skywalking/apm/plugin/customize/interceptor/BaseInterceptorMethods.java
index 109766349fc5..dcef43ec71f6 100644
--- a/apm-sniffer/optional-plugins/customize-enhance-plugin/src/main/java/org/apache/skywalking/apm/plugin/customize/interceptor/BaseInterceptorMethods.java
+++ b/apm-sniffer/optional-plugins/customize-enhance-plugin/src/main/java/org/apache/skywalking/apm/plugin/customize/interceptor/BaseInterceptorMethods.java
@@ -35,56 +35,52 @@ class BaseInterceptorMethods {
void beforeMethod(Method method, Object[] allArguments) {
Map configuration = CustomizeConfiguration.INSTANCE.getConfiguration(method);
- if (!MethodConfiguration.isCloseBeforeMethod(configuration)) {
- String operationName = MethodConfiguration.getOperationName(configuration);
- Map context = CustomizeExpression.evaluationContext(allArguments);
- if (context == null || context.isEmpty()) {
- ContextManager.createLocalSpan(operationName);
- } else {
+ String operationName = MethodConfiguration.getOperationName(configuration);
+ Map context = CustomizeExpression.evaluationContext(allArguments);
+ if (context == null || context.isEmpty()) {
+ ContextManager.createLocalSpan(operationName);
+ } else {
- Map tags = MethodConfiguration.getTags(configuration);
- Map spanTags = new HashMap();
- Map logs = MethodConfiguration.getLogs(configuration);
- Map spanLogs = new HashMap();
+ Map tags = MethodConfiguration.getTags(configuration);
+ Map spanTags = new HashMap();
+ Map logs = MethodConfiguration.getLogs(configuration);
+ Map spanLogs = new HashMap();
- List operationNameSuffixes = MethodConfiguration.getOperationNameSuffixes(configuration);
- StringBuilder operationNameSuffix = new StringBuilder();
- if (operationNameSuffixes != null && !operationNameSuffixes.isEmpty()) {
- for (String expression : operationNameSuffixes) {
- operationNameSuffix.append(Constants.OPERATION_NAME_SEPARATOR);
- operationNameSuffix.append(CustomizeExpression.parseExpression(expression, context));
- }
+ List operationNameSuffixes = MethodConfiguration.getOperationNameSuffixes(configuration);
+ StringBuilder operationNameSuffix = new StringBuilder();
+ if (operationNameSuffixes != null && !operationNameSuffixes.isEmpty()) {
+ for (String expression : operationNameSuffixes) {
+ operationNameSuffix.append(Constants.OPERATION_NAME_SEPARATOR);
+ operationNameSuffix.append(CustomizeExpression.parseExpression(expression, context));
}
- if (tags != null && !tags.isEmpty()) {
- for (Map.Entry expression: tags.entrySet()) {
- spanTags.put(expression.getKey(), CustomizeExpression.parseExpression(expression.getValue(), context));
- }
+ }
+ if (tags != null && !tags.isEmpty()) {
+ for (Map.Entry expression : tags.entrySet()) {
+ spanTags.put(expression.getKey(), CustomizeExpression.parseExpression(expression.getValue(), context));
}
- if (logs != null && !logs.isEmpty()) {
- for (Map.Entry entries : logs.entrySet()) {
- String expression = logs.get(entries.getKey());
- spanLogs.put(entries.getKey(), CustomizeExpression.parseExpression(expression, context));
- }
+ }
+ if (logs != null && !logs.isEmpty()) {
+ for (Map.Entry entries : logs.entrySet()) {
+ String expression = logs.get(entries.getKey());
+ spanLogs.put(entries.getKey(), CustomizeExpression.parseExpression(expression, context));
}
- operationName = operationNameSuffix.insert(0, operationName).toString();
+ }
+ operationName = operationNameSuffix.insert(0, operationName).toString();
- AbstractSpan span = ContextManager.createLocalSpan(operationName);
- if (!spanTags.isEmpty()) {
- for (Map.Entry tag : spanTags.entrySet()) {
- span.tag(Tags.ofKey(tag.getKey()), tag.getValue());
- }
- }
- if (!spanLogs.isEmpty()) {
- span.log(System.currentTimeMillis(), spanLogs);
+ AbstractSpan span = ContextManager.createLocalSpan(operationName);
+ if (!spanTags.isEmpty()) {
+ for (Map.Entry tag : spanTags.entrySet()) {
+ span.tag(Tags.ofKey(tag.getKey()), tag.getValue());
}
}
+ if (!spanLogs.isEmpty()) {
+ span.log(System.currentTimeMillis(), spanLogs);
+ }
}
}
void afterMethod(Method method) {
- if (!MethodConfiguration.isCloseAfterMethod(CustomizeConfiguration.INSTANCE.getConfiguration(method))) {
- ContextManager.stopSpan();
- }
+ ContextManager.stopSpan();
}
void handleMethodException(Throwable t) {
diff --git a/docs/en/concepts-and-designs/backend-overview.md b/docs/en/concepts-and-designs/backend-overview.md
index a60b3aaed245..1889b70ef0f2 100644
--- a/docs/en/concepts-and-designs/backend-overview.md
+++ b/docs/en/concepts-and-designs/backend-overview.md
@@ -1,17 +1,16 @@
# Observability Analysis Platform
-We define SkyWalking as an Observability Analysis Platform, which provides a full observability to the services running
-either in brown zone, or green zone, even hybrid.
+SkyWalking is an Observability Analysis Platform that provides full observability to services running in both brown and green zones, as well as services using a hybrid model.
## Capabilities
-SkyWalking covers all 3 fields of observability, including, **Tracing**, **Metrics** and **logging**.
+SkyWalking covers all 3 areas of observability, including, **Tracing**, **Metrics** and **Logging**.
-- **Tracing**. Including, SkyWalking native data formats. Zipkin v1,v2 data formats and Jaeger data formats.
-- **Metrics**. SkyWalking integrates with Service Mesh platforms, such as Istio, Envoy, Linkerd, to provide observability from data panel
-or control panel. Also, SkyWalking native agents can run in metrics mode, which highly improve the performance.
-- **Logging**. Including the logs collected from disk or through network. Native agent could bind the tracing context with logs automatically,
+- **Tracing**. SkyWalking native data formats, including Zipkin v1 and v2, as well as Jaeger.
+- **Metrics**. SkyWalking integrates with Service Mesh platforms, such as Istio, Envoy, and Linkerd, to build observability into the data panel
+or control panel. Also, SkyWalking native agents can run in the metrics mode, which greatly improves performances.
+- **Logging**. Includes logs collected from disk or through network. Native agents could bind the tracing context with logs automatically,
or use SkyWalking to bind the trace and log through the text content.
-There are 3 powerful and native language engines to focus on analyzing observability data from above fields.
-1. [Observability Analysis Language](oal.md) processes the native traces and service mesh data.
-1. [Meter Analysis Language](mal.md) does metrics calculation for native meter data, and adopts stable and widely used metrics system, such as Prometheus and OpenTelemetry.
-1. [Log Analysis Language](lal.md) focuses on log contents and collaborate with Meter Analysis Language.
\ No newline at end of file
+There are 3 powerful and native language engines designed to analyze observability data from the above areas.
+1. [Observability Analysis Language](oal.md) processes native traces and service mesh data.
+1. [Meter Analysis Language](mal.md) is responsible for metrics calculation for native meter data, and adopts a stable and widely used metrics system, such as Prometheus and OpenTelemetry.
+1. [Log Analysis Language](lal.md) focuses on log contents and collaborate with Meter Analysis Language.
diff --git a/docs/en/concepts-and-designs/event.md b/docs/en/concepts-and-designs/event.md
index 87c9174b79cb..6ea5c93d658e 100644
--- a/docs/en/concepts-and-designs/event.md
+++ b/docs/en/concepts-and-designs/event.md
@@ -2,43 +2,43 @@
SkyWalking already supports the three pillars of observability, namely logs, metrics, and traces.
In reality, a production system experiences many other events that may affect the performance of the system, such as upgrading, rebooting, chaos testing, etc.
-Although some of these events are reflected in the logs, there are many other events that can not. Hence, SkyWalking provides a more native way to collect these events.
-This doc covers the design of how SkyWalking collects events and what events look like in SkyWalking.
+Although some of these events are reflected in the logs, many others are not. Hence, SkyWalking provides a more native way to collect these events.
+This doc details how SkyWalking collects events and what events look like in SkyWalking.
## How to Report Events
-SkyWalking backend supports three protocols to collect events, gRPC, HTTP, and Kafka. Any agent or CLI that implements one of these protocols can report events to SkyWalking.
+The SkyWalking backend supports three protocols to collect events: gRPC, HTTP, and Kafka. Any agent or CLI that implements one of these protocols can report events to SkyWalking.
Currently, the officially supported clients to report events are:
-- [ ] Java Agent Toolkit: Use the Java agent toolkit to report events from inside the applications.
-- [x] SkyWalking CLI: Use the CLI to report events from the command line interface.
-- [ ] Kubernetes Event Exporter: Deploy an event exporter to refine and report Kubernetes events.
+- [ ] Java Agent Toolkit: Using the Java agent toolkit to report events within the applications.
+- [x] SkyWalking CLI: Using the CLI to report events from the command line interface.
+- [ ] Kubernetes Event Exporter: Deploying an event exporter to refine and report Kubernetes events.
-## Event Definition
+## Event Definitions
-An event contains the following fields. The definitions of event can be found at the [protocol repo](https://github.com/apache/skywalking-data-collect-protocol/tree/master/event)
+An event contains the following fields. The definitions of event can be found at the [protocol repo](https://github.com/apache/skywalking-data-collect-protocol/tree/master/event).
### UUID
-Unique ID of the event. Because an event may span a long period of time, the UUID is necessary to associate the start time with the end time of the same event.
+Unique ID of the event. Since an event may span a long period of time, the UUID is necessary to associate the start time with the end time of the same event.
### Source
-The source object that the event occurs on. In the concepts of SkyWalking, the object is typically service, service instance, etc.
+The source object on which the event occurs. In SkyWalking, the object is typically a service, service instance, etc.
### Name
-The name of the event. For example, `Start`, `Stop`, `Crash`, `Reboot`, `Upgrade` etc.
+Name of the event. For example, `Start`, `Stop`, `Crash`, `Reboot`, `Upgrade`, etc.
### Type
-The type of the event. This field is friendly for UI visualization, where events of type `Normal` are considered as normal operations,
-while `Error` is considered as unexpected operations, such as `Crash` events, therefore we can mark them with different colors to be easier identified.
+Type of the event. This field is friendly for UI visualization, where events of type `Normal` are considered normal operations,
+while `Error` is considered unexpected operations, such as `Crash` events. Marking them with different colors allows us to more easily identify them.
### Message
The detail of the event that describes why this event happened. This should be a one-line message that briefly describes why the event is reported. Examples of an `Upgrade` event may be something like `Upgrade from ${from_version} to ${to_version}`.
-It's NOT encouraged to include the detailed logs of this event, such as the exception stack trace.
+It's NOT recommended to include the detailed logs of this event, such as the exception stack trace.
### Parameters
@@ -50,7 +50,7 @@ The start time of the event. This field is mandatory when an event occurs.
### End Time
-The end time of the event. This field may be empty if the event has not stopped yet, otherwise it should be a valid timestamp after `startTime`.
+The end time of the event. This field may be empty if the event has not ended yet, otherwise there should be a valid timestamp after `startTime`.
-**NOTE:** When reporting an event, you typically call the report function twice, one for starting of the event and the other one for ending of the event, with the same UUID.
-There are also cases where you have both the start time and end time already, for example, when exporting events from a 3rd-party system, the start time and end time are already known so that you can call the report function only once.
+**NOTE:** When reporting an event, you typically call the report function twice, the first time for starting of the event and the second time for ending of the event, both with the same UUID.
+There are also cases where you would already have both the start time and end time. For example, when exporting events from a third-party system, the start time and end time are already known so you may simply call the report function once.
diff --git a/docs/en/concepts-and-designs/lal.md b/docs/en/concepts-and-designs/lal.md
index c3f1af4b96eb..ac6e1b764e1d 100644
--- a/docs/en/concepts-and-designs/lal.md
+++ b/docs/en/concepts-and-designs/lal.md
@@ -1,32 +1,31 @@
# Log Analysis Language
Log Analysis Language (LAL) in SkyWalking is essentially a Domain-Specific Language (DSL) to analyze logs. You can use
-LAL to parse, extract, and save the logs, as well as collaborate the logs with traces (by extracting the trace id,
-segment id and span id) and metrics (by generating metrics from the logs and send them to the meter system).
+LAL to parse, extract, and save the logs, as well as collaborate the logs with traces (by extracting the trace ID,
+segment ID and span ID) and metrics (by generating metrics from the logs and sending them to the meter system).
-The LAL config files are in YAML format, and are located under directory `lal`, you can
+The LAL config files are in YAML format, and are located under directory `lal`. You can
set `log-analyzer/default/lalFiles` in the `application.yml` file or set environment variable `SW_LOG_LAL_FILES` to
activate specific LAL config files.
## Filter
A filter is a group of [parser](#parser), [extractor](#extractor) and [sink](#sink). Users can use one or more filters
-to organize their processing logics. Every piece of log will be sent to all filters in an LAL rule. The piece of log
-sent into the filter is available as property `log` in the LAL, therefore you can access the log service name
-via `log.service`, for all available fields of `log`, please refer to [the protocol definition](https://github.com/apache/skywalking-data-collect-protocol/blob/master/logging/Logging.proto#L41).
+to organize their processing logic. Every piece of log will be sent to all filters in an LAL rule. A piece of log
+sent to the filter is available as property `log` in the LAL, therefore you can access the log service name
+via `log.service`. For all available fields of `log`, please refer to [the protocol definition](https://github.com/apache/skywalking-data-collect-protocol/blob/master/logging/Logging.proto#L41).
All components are executed sequentially in the orders they are declared.
### Global Functions
-There are functions globally available that you can use them in all components (i.e. parsers, extractors, and sinks)
-when needed.
+Globally available functions may be used them in all components (i.e. parsers, extractors, and sinks) where necessary.
- `abort`
By default, all components declared are executed no matter what flags (`dropped`, `saved`, etc.) have been set. There
are cases where you may want the filter chain to stop earlier when specified conditions are met. `abort` function aborts
-the remaining filter chain from where it's declared, all the remaining components won't be executed at all.
+the remaining filter chain from where it's declared, and all the remaining components won't be executed at all.
`abort` function serves as a fast-fail mechanism in LAL.
```groovy
@@ -47,8 +46,8 @@ Parsers are responsible for parsing the raw logs into structured data in SkyWalk
types of parsers at the moment, namely `json`, `yaml`, and `text`.
When a piece of log is parsed, there is a corresponding property available, called `parsed`, injected by LAL.
-Property `parsed` is typically a map, containing all the fields parsed from the raw logs, for example, if the parser
-is `json` / `yaml`, `parsed` is a map containing all the key-values in the `json` / `yaml`, if the parser is `text`
+Property `parsed` is typically a map, containing all the fields parsed from the raw logs. For example, if the parser
+is `json` / `yaml`, `parsed` is a map containing all the key-values in the `json` / `yaml`; if the parser is `text`
, `parsed` is a map containing all the captured groups and their values (for `regexp` and `grok`).
All parsers share the following options:
@@ -108,7 +107,7 @@ filter {
- `grok` (TODO)
-Because grok Java library has performance issue, we need some investigations and benchmark on it. Contributions are
+We're aware of certains performance issues in the grok Java library, and so we're currently conducting investigations and benchmarking. Contributions are
welcome.
### Extractor
@@ -155,8 +154,7 @@ The unit of `timestamp` is millisecond.
- `tag`
-`tag` extracts the tags from the `parsed` result, and set them into the `LogData`. The form of this extractor is
-something like `tag key1: value, key2: value2`, you can use the properties of `parsed` as both keys and values.
+`tag` extracts the tags from the `parsed` result, and set them into the `LogData`. The form of this extractor should look something like this: `tag key1: value, key2: value2`. You may use the properties of `parsed` as both keys and values.
```groovy
filter {
@@ -171,9 +169,9 @@ filter {
- `metrics`
-`metrics` extracts / generates metrics from the logs, and sends the generated metrics to the meter system, you can
+`metrics` extracts / generates metrics from the logs, and sends the generated metrics to the meter system. You may
configure [MAL](mal.md) for further analysis of these metrics. The dedicated MAL config files are under
-directory `log-mal-rules`, you can set `log-analyzer/default/malFiles` to enable configured files.
+directory `log-mal-rules`, and you can set `log-analyzer/default/malFiles` to enable configured files.
```yaml
# application.yml
@@ -209,7 +207,7 @@ filter {
}
```
-The extractor above generates a metrics named `log_count`, with tag key `level` and value `1`, after this, you can
+The extractor above generates a metrics named `log_count`, with tag key `level` and value `1`. After that, you can
configure MAL rules to calculate the log count grouping by logging level like this:
```yaml
@@ -223,7 +221,7 @@ metrics:
```
-The other metrics generated is `http_response_time`, so that you can configure MAL rules to generate more useful metrics
+The other metrics generated is `http_response_time`, so you can configure MAL rules to generate more useful metrics
like percentiles.
```yaml
@@ -237,17 +235,17 @@ metrics:
### Sink
Sinks are the persistent layer of the LAL. By default, all the logs of each filter are persisted into the storage.
-However, there are some mechanisms that allow you to selectively save some logs, or even drop all the logs after you've
+However, some mechanisms allow you to selectively save some logs, or even drop all the logs after you've
extracted useful information, such as metrics.
#### Sampler
-Sampler allows you to save the logs in a sampling manner. Currently, sampling strategy `rateLimit` is supported, welcome
-to contribute more sampling strategies. If multiple samplers are specified, the last one determines the final sampling
-result, see examples in [Enforcer](#enforcer).
+Sampler allows you to save the logs in a sampling manner. Currently, the sampling strategy `rateLimit` is supported. We welcome
+contributions on more sampling strategies. If multiple samplers are specified, the last one determines the final sampling
+result. See examples in [Enforcer](#enforcer).
-`rateLimit` samples `n` logs at most in 1 second. `rateLimit("SamplerID")` requires an ID for the sampler, sampler
-declarations with the same ID share the same sampler instance, and thus share the same `qps`, resetting logics.
+`rateLimit` samples `n` logs at a maximum rate of 1 second. `rateLimit("SamplerID")` requires an ID for the sampler. Sampler
+declarations with the same ID share the same sampler instance, thus sharing the same `qps` and resetting logic.
Examples:
@@ -273,8 +271,8 @@ filter {
#### Dropper
-Dropper is a special sink, meaning that all the logs are dropped without any exception. This is useful when you want to
-drop debugging logs,
+Dropper is a special sink, meaning that all logs are dropped without any exception. This is useful when you want to
+drop debugging logs.
```groovy
filter {
@@ -292,7 +290,7 @@ filter {
}
```
-or you have multiple filters, some of which are for extracting metrics, only one of them needs to be persisted.
+Or if you have multiple filters, some of which are for extracting metrics, only one of them has to be persisted.
```groovy
filter { // filter A: this is for persistence
@@ -319,9 +317,9 @@ filter { // filter B:
#### Enforcer
-Enforcer is another special sink that forcibly samples the log, a typical use case of enforcer is when you have
-configured a sampler and want to save some logs forcibly, for example, to save error logs even if the sampling mechanism
-is configured.
+Enforcer is another special sink that forcibly samples the log. A typical use case of enforcer is when you have
+configured a sampler and want to save some logs forcibly, such as to save error logs even if the sampling mechanism
+has been configured.
```groovy
filter {
diff --git a/docs/en/concepts-and-designs/mal.md b/docs/en/concepts-and-designs/mal.md
index 7770b6203843..9a478d581b53 100644
--- a/docs/en/concepts-and-designs/mal.md
+++ b/docs/en/concepts-and-designs/mal.md
@@ -57,6 +57,31 @@ For example, this filters all instance_trace_count samples for values >= 33:
```
instance_trace_count.valueGreaterEqual(33)
```
+### Tag manipulator
+MAL provides tag manipulators to change(add/delete/update) tags and their values.
+
+#### K8s
+MAL supports using the metadata of k8s to manipulate the tags and their values.
+This feature requires OAP Server to have the authority to access the K8s's `API Server`.
+
+##### retagByK8sMeta
+`retagByK8sMeta(newLabelName, K8sRetagType, existingLabelName, namespaceLabelName)`. Add a new tag to the sample family based on an existing label's value. Provide several internal converting types, including
+- K8sRetagType.Pod2Service
+
+Add a tag to the sample by using `service` as the key, `$serviceName.$namespace` as the value, by the given value of the tag key, which represents the name of a pod.
+
+For example:
+```
+container_cpu_usage_seconds_total{namespace=default, container=my-nginx, cpu=total, pod=my-nginx-5dc4865748-mbczh} 2
+```
+Expression:
+```
+container_cpu_usage_seconds_total.retagByK8sMeta('service' , K8sRetagType.Pod2Service , 'pod' , 'namespace')
+```
+Output:
+```
+container_cpu_usage_seconds_total{namespace=default, container=my-nginx, cpu=total, pod=my-nginx-5dc4865748-mbczh, service='nginx-service.default'} 2
+```
### Binary operators
@@ -186,6 +211,7 @@ Examples:
#### time
`time()`. returns the number of seconds since January 1, 1970 UTC.
+
## Down Sampling Operation
MAL should instruct meter-system how to do downsampling for metrics. It doesn't only refer to aggregate raw samples to
`minute` level, but also hints data from `minute` to higher levels, for instance, `hour` and `day`.
diff --git a/docs/en/guides/Plugin-test.md b/docs/en/guides/Plugin-test.md
index e690eb50bcde..fd9def0348de 100644
--- a/docs/en/guides/Plugin-test.md
+++ b/docs/en/guides/Plugin-test.md
@@ -105,7 +105,6 @@ File Name | Descriptions
| entryService | The entrance endpoint(URL) for test case access. Required. (HTTP Method: GET)
| healthCheck | The health check endpoint(URL) for test case access. Required. (HTTP Method: HEAD)
| startScript | Path of start up script. Required in `type: jvm` only.
-| framework | Case name.
| runningMode | Running mode whether with the optional plugin, options, `default`(default), `with_optional`, `with_bootstrap`
| withPlugins | Plugin selector rule. eg:`apm-spring-annotation-plugin-*.jar`. Required when `runningMode=with_optional` or `runningMode=with_bootstrap`.
| environment | Same as `docker-compose#environment`.
@@ -131,7 +130,6 @@ type:
entryService:
healthCheck:
startScript:
-framework:
runningMode:
withPlugins:
environment:
diff --git a/docs/en/setup/backend/grafana-cluster.json b/docs/en/setup/backend/grafana-cluster.json
index 3b688bef1f7b..ba99a904cd3d 100644
--- a/docs/en/setup/backend/grafana-cluster.json
+++ b/docs/en/setup/backend/grafana-cluster.json
@@ -1643,7 +1643,7 @@
"targets": [
{
"expr": "increase(mesh_analysis_error_count{job=\"$job\"}[1m]) ",
- "format": "heatmap",
+ "format": "time_series",
"hide": false,
"instant": false,
"interval": "1m",
@@ -1924,7 +1924,7 @@
"targets": [
{
"expr": "increase(meter_analysis_error_count{job=\"$job\"}[1m]) ",
- "format": "heatmap",
+ "format": "time_series",
"hide": false,
"instant": false,
"interval": "1m",
@@ -2205,7 +2205,7 @@
"targets": [
{
"expr": "increase(log_analysis_error_count{job=\"$job\"}[1m]) ",
- "format": "heatmap",
+ "format": "time_series",
"hide": false,
"instant": false,
"interval": "1m",
@@ -2486,7 +2486,7 @@
"targets": [
{
"expr": "increase(event_error_count{job=\"$job\"}[1m]) ",
- "format": "heatmap",
+ "format": "time_series",
"hide": false,
"instant": false,
"interval": "1m",
@@ -2767,7 +2767,7 @@
"targets": [
{
"expr": "increase(browser_perf_data_analysis_error_count{job=\"$job\"}[1m]) ",
- "format": "heatmap",
+ "format": "time_series",
"hide": false,
"instant": false,
"interval": "1m",
@@ -3048,7 +3048,7 @@
"targets": [
{
"expr": "increase(browser_error_log_analysis_error_count{job=\"$job\"}[1m]) ",
- "format": "heatmap",
+ "format": "time_series",
"hide": false,
"instant": false,
"interval": "1m",
@@ -3329,7 +3329,7 @@
"targets": [
{
"expr": "increase(metrics_fetcher_error_count{job=\"$job\"}[1m]) ",
- "format": "heatmap",
+ "format": "time_series",
"hide": false,
"instant": false,
"interval": "1m",
diff --git a/docs/en/setup/backend/grafana-instance.json b/docs/en/setup/backend/grafana-instance.json
index e085d4951ed5..874595a59367 100644
--- a/docs/en/setup/backend/grafana-instance.json
+++ b/docs/en/setup/backend/grafana-instance.json
@@ -1880,7 +1880,7 @@
"targets": [
{
"expr": "increase(trace_in_latency_sum{instance=\"$instance\",job=\"$job\"}[1m])",
- "format": "heatmap",
+ "format": "time_series",
"hide": false,
"instant": false,
"interval": "1m",
@@ -2157,7 +2157,7 @@
"targets": [
{
"expr": "increase(mesh_analysis_latency_sum{instance=\"$instance\",job=\"$job\"}[1m])",
- "format": "heatmap",
+ "format": "time_series",
"hide": false,
"instant": false,
"interval": "1m",
@@ -2434,7 +2434,7 @@
"targets": [
{
"expr": "increase(meter_in_latency_sum{instance=\"$instance\",job=\"$job\"}[1m])",
- "format": "heatmap",
+ "format": "time_series",
"hide": false,
"instant": false,
"interval": "1m",
@@ -2711,7 +2711,7 @@
"targets": [
{
"expr": "increase(log_in_latency_sum{instance=\"$instance\",job=\"$job\"}[1m])",
- "format": "heatmap",
+ "format": "time_series",
"hide": false,
"instant": false,
"interval": "1m",
@@ -2988,7 +2988,7 @@
"targets": [
{
"expr": "increase(event_in_latency_sum{instance=\"$instance\",job=\"$job\"}[1m])",
- "format": "heatmap",
+ "format": "time_series",
"hide": false,
"instant": false,
"interval": "1m",
@@ -3265,7 +3265,7 @@
"targets": [
{
"expr": "increase(browser_perf_data_in_latency_sum{instance=\"$instance\",job=\"$job\"}[1m])",
- "format": "heatmap",
+ "format": "time_series",
"hide": false,
"instant": false,
"interval": "1m",
@@ -3542,7 +3542,7 @@
"targets": [
{
"expr": "increase(browser_error_log_in_latency_sum{instance=\"$instance\",job=\"$job\"}[1m])",
- "format": "heatmap",
+ "format": "time_series",
"hide": false,
"instant": false,
"interval": "1m",
@@ -3819,7 +3819,7 @@
"targets": [
{
"expr": "increase(metrics_fetcher_latency_sum{instance=\"$instance\",job=\"$job\"}[1m])",
- "format": "heatmap",
+ "format": "time_series",
"hide": false,
"instant": false,
"interval": "1m",
diff --git a/docs/en/setup/backend/metrics-exporter.md b/docs/en/setup/backend/metrics-exporter.md
index b190517864b8..04ca4929bf76 100644
--- a/docs/en/setup/backend/metrics-exporter.md
+++ b/docs/en/setup/backend/metrics-exporter.md
@@ -31,7 +31,12 @@ message ExportMetricValue {
}
message SubscriptionsResp {
- repeated string metricNames = 1;
+ repeated SubscriptionMetric metrics = 1;
+}
+
+message SubscriptionMetric {
+ string metricName = 1;
+ EventType eventType = 2;
}
enum ValueType {
@@ -40,6 +45,13 @@ enum ValueType {
MULTI_LONG = 2;
}
+enum EventType {
+ // The metrics aggregated in this bulk, not include the existing persistent data.
+ INCREMENT = 0;
+ // Final result of the metrics at this moment.
+ TOTAL = 1;
+}
+
message SubscriptionReq {
}
@@ -61,8 +73,8 @@ exporter:
## For target exporter service
### subscription implementation
-Return the expected metrics name list, all the names must match the OAL script definition. Return empty list, if you want
-to export all metrics.
+Return the expected metrics name list with event type(increment or total), all the names must match the OAL/MAL script definition.
+Return empty list, if you want to export all metrics in increment event type.
### export implementation
Stream service, all subscribed metrics will be sent to here, based on OAP core schedule. Also, if the OAP deployed as cluster,
diff --git a/docs/en/setup/service-agent/java-agent/Application-toolkit-log4j-1.x.md b/docs/en/setup/service-agent/java-agent/Application-toolkit-log4j-1.x.md
index 1a0f3ef228f1..a3abcfa55869 100644
--- a/docs/en/setup/service-agent/java-agent/Application-toolkit-log4j-1.x.md
+++ b/docs/en/setup/service-agent/java-agent/Application-toolkit-log4j-1.x.md
@@ -30,6 +30,8 @@ The gRPC report could forward the collected logs to SkyWalking OAP server, or [S
```properties
log4j.rootLogger=INFO,CustomAppender
log4j.appender.CustomAppender=org.apache.skywalking.apm.toolkit.log.log4j.v1.x.log.GRPCLogClientAppender
+log4j.appender.CustomAppender.layout=org.apache.log4j.PatternLayout
+log4j.appender.CustomAppender.layout.ConversionPattern=[%t] %-5p %c %x - %m%n
```
* Add config of the plugin or use default
diff --git a/docs/en/setup/service-agent/java-agent/Application-toolkit-log4j-2.x.md b/docs/en/setup/service-agent/java-agent/Application-toolkit-log4j-2.x.md
index 30f16b17f40e..5dcc4d326ff0 100644
--- a/docs/en/setup/service-agent/java-agent/Application-toolkit-log4j-2.x.md
+++ b/docs/en/setup/service-agent/java-agent/Application-toolkit-log4j-2.x.md
@@ -111,7 +111,9 @@ The gRPC report could forward the collected logs to SkyWalking OAP server, or [S
* Add `GRPCLogClientAppender` in log4j2.xml
```xml
-
+
+
+
```
* Add config of the plugin or use default
diff --git a/docs/en/setup/service-agent/java-agent/Application-toolkit-logback-1.x.md b/docs/en/setup/service-agent/java-agent/Application-toolkit-logback-1.x.md
index 3e35113bfcb0..c5d4e14ba786 100644
--- a/docs/en/setup/service-agent/java-agent/Application-toolkit-logback-1.x.md
+++ b/docs/en/setup/service-agent/java-agent/Application-toolkit-logback-1.x.md
@@ -118,7 +118,13 @@ The gRPC reporter could forward the collected logs to SkyWalking OAP server, or
* Add `GRPCLogClientAppender` in logback.xml
```xml
-
+
+
+
+ %d{yyyy-MM-dd HH:mm:ss.SSS} [%X{tid}] [%thread] %-5level %logger{36} -%msg%n
+
+
+
```
* Add config of the plugin or use default
diff --git a/docs/en/setup/service-agent/java-agent/Plugin-list.md b/docs/en/setup/service-agent/java-agent/Plugin-list.md
index b8e7c2bd6dd6..41ba14d95ef0 100644
--- a/docs/en/setup/service-agent/java-agent/Plugin-list.md
+++ b/docs/en/setup/service-agent/java-agent/Plugin-list.md
@@ -23,7 +23,7 @@
- grpc-1.x
- gson-2.8.x
- h2-1.x
-- hbase-1.x
+- hbase-1.x/2.x
- httpasyncclient-4.x
- httpclient-3.x
- httpclient-4.x
diff --git a/docs/en/setup/service-agent/java-agent/Supported-list.md b/docs/en/setup/service-agent/java-agent/Supported-list.md
index dfc01aa05a36..6b9ee71862b8 100644
--- a/docs/en/setup/service-agent/java-agent/Supported-list.md
+++ b/docs/en/setup/service-agent/java-agent/Supported-list.md
@@ -83,7 +83,7 @@ metrics based on the tracing data.
* [Cassandra](https://github.com/apache/cassandra) 3.x
* [cassandra-java-driver](https://github.com/datastax/java-driver) 3.7.0-3.7.2
* HBase
- * [hbase-client](https://github.com/apache/hbase) HTable 1.x
+ * [hbase-client](https://github.com/apache/hbase) HTable 1.0.0-2.4.2
* Service Discovery
* [Netflix Eureka](https://github.com/Netflix/eureka)
* Distributed Coordination
diff --git a/oap-server/analyzer/meter-analyzer/pom.xml b/oap-server/analyzer/meter-analyzer/pom.xml
index 5b990368ad6f..d4723b79fc0d 100644
--- a/oap-server/analyzer/meter-analyzer/pom.xml
+++ b/oap-server/analyzer/meter-analyzer/pom.xml
@@ -33,7 +33,6 @@
server-core
${project.version}
-
org.codehaus.groovy
groovy
@@ -42,6 +41,11 @@
io.vavr
vavr
+
+ io.kubernetes
+ client-java
+ ${kubernetes.version}
+