Merge branch 'develop' into feat_hippo4j_ui_new

pull/1235/head
yikai 2 years ago
commit 8bc5415193

4
.gitignore vendored

@ -6,6 +6,10 @@ target/
!**/node_modules/
!**/dist/
### Agent ###
/hippo4j-agent/hippo4j-agent/
**/dependency-reduced-pom.xml
### STS ###
.apt_generated
.classpath

@ -32,7 +32,7 @@ sidebar_position: 3
spring:
dynamic:
thread-pool:
tomcat:
web:
enable: true
executors:
- thread-pool-id: message-consume
@ -50,7 +50,7 @@ spring:
spring:
dynamic:
thread-pool:
tomcat:
web:
nodes: 192.168.1.5:*,192.168.1.6:8080
executors:
- thread-pool-id: message-consume

@ -6,215 +6,215 @@ const darkCodeTheme = require('prism-react-renderer/themes/dracula');
/** @type {import('@docusaurus/types').Config} */
const config = {
title: 'Hippo4j',
tagline: '动态可观测线程池框架,为业务系统提高线上运行保障能力',
url: 'https://hippo4j.cn',
baseUrl: '/',
onBrokenLinks: 'throw',
onBrokenMarkdownLinks: 'warn',
favicon: 'img/hippo4j_favicon.ico',
// GitHub pages deployment config.
// If you aren't using GitHub pages, you don't need these.
organizationName: 'hippo4j', // Usually your GitHub org/user name.
projectName: 'hippo4j.github.io', // Usually your repo name.
deploymentBranch: "main",
title: 'Hippo4j',
tagline: '动态可观测线程池框架,为业务系统提高线上运行保障能力',
url: 'https://hippo4j.cn',
baseUrl: '/',
onBrokenLinks: 'throw',
onBrokenMarkdownLinks: 'warn',
favicon: 'img/hippo4j_favicon.ico',
// GitHub pages deployment config.
// If you aren't using GitHub pages, you don't need these.
organizationName: 'hippo4j', // Usually your GitHub org/user name.
projectName: 'hippo4j.github.io', // Usually your repo name.
deploymentBranch: 'main',
// Even if you don't use internalization, you can use this field to set useful
// metadata like html lang. For example, if your site is Chinese, you may want
// to replace "en" with "zh-Hans".
// Even if you don't use internalization, you can use this field to set useful
// metadata like html lang. For example, if your site is Chinese, you may want
// to replace "en" with "zh-Hans".
i18n: {
defaultLocale: "en",
locales: ["en", "zh"],
defaultLocale: 'en',
locales: ['en', 'zh'],
localeConfigs: {
en: {
label: "English",
direction: "ltr",
label: 'English',
direction: 'ltr',
},
zh: {
label: "简体中文",
direction: "ltr",
label: '简体中文',
direction: 'ltr',
},
},
},
presets: [
[
'classic',
/** @type {import('@docusaurus/preset-classic').Options} */
({
docs: {
sidebarPath: require.resolve('./sidebars.js'),
// Please change this to your repo.
// Remove this to remove the "edit this page" links.
sidebarCollapsed: false,
/*editUrl: 'https://github.com/longtai-cn',*/
},
blog: {
showReadingTime: true,
// Please change this to your repo.
// Remove this to remove the "edit this page" links.
editUrl:
'https://github.com/facebook/docusaurus/tree/main/packages/create-docusaurus/templates/shared/',
},
theme: {
customCss: require.resolve('./src/css/custom.css'),
},
}),
],
presets: [
[
'classic',
/** @type {import('@docusaurus/preset-classic').Options} */
({
docs: {
sidebarPath: require.resolve('./sidebars.js'),
// Please change this to your repo.
// Remove this to remove the "edit this page" links.
sidebarCollapsed: false,
/*editUrl: 'https://github.com/longtai-cn',*/
},
blog: {
showReadingTime: true,
// Please change this to your repo.
// Remove this to remove the "edit this page" links.
editUrl:
'https://github.com/facebook/docusaurus/tree/main/packages/create-docusaurus/templates/shared/',
},
theme: {
customCss: require.resolve('./src/css/custom.css'),
},
}),
],
plugins: [
[
"@docusaurus/plugin-content-docs",
{
id: "community",
path: "community",
routeBasePath: "community",
sidebarPath: require.resolve("./sidebarsCommunity.js"),
},
],
],
plugins: [
[
'@docusaurus/plugin-content-docs',
{
id: 'community',
path: 'community',
routeBasePath: 'community',
sidebarPath: require.resolve('./sidebarsCommunity.js'),
},
],
],
themeConfig:
themeConfig:
/** @type {import('@docusaurus/preset-classic').ThemeConfig} */
({
autoCollapseCategories: true,
announcementBar: {
id: 'announcementBar-1', // Increment on change
// content: `⭐️ If you like hippo4j, give it a star on <a target="_blank" rel="noopener noreferrer" href="https://gitee.com/mabaiwancn/hippo4j">Gitee</a>, thanks.`,
// content: `⭐️ 如果您喜欢 hippo4j请在 <a target="_blank" rel="noopener noreferrer" href="https://gitee.com/mabaiwancn/hippo4j">Gitee</a> 和 <a target="_blank" rel="noopener noreferrer" href="https://github.com/opengoofy/hippo4j">GitHub</a> 上给它一个 star谢谢`,
content: `⭐️ 开源不易,hippo4j 如果对您工作有帮助,请在 <a target="_blank" rel="noopener noreferrer" href="https://github.com/opengoofy/hippo4j">GitHub</a> 上给它一个 🌟`,
// content: `<a target="_blank" rel="noopener noreferrer" href="https://xiaomage.info/knowledge-planet/">👉 《小马哥的代码实战课》官方知识星球来啦!!!</a>`,
},
navbar: {
title: '',
logo: {
alt: 'Hippo4j 动态可观测线程池框架',
src: 'img/hippo4j.png',
},
items: [
{
type: 'docSidebar',
docId: 'intro',
position: 'left',
sidebarId: 'user_docs',
label: '文档',
},
{
to: "/community/contributor-guide",
label: "社区",
position: "left",
activeBaseRegex: `/community/`,
},
/*{ to: "/team", label: "团队", position: "left" },*/
{ to: "/users", label: "采用公司", position: "left" },
{ to: "/group", label: "加群沟通", position: "left" },
/*{to: '/blog', label: '博客', position: 'left'},*/
{
href: 'http://console.hippo4j.cn/index.html',
label: '控制台样例',
position: 'left',
},
{
href: 'https://gitee.com/opengoofy/congomall',
label: '👉 刚果商城',
position: 'left',
},
{
type: 'docsVersionDropdown',
position: 'right',
dropdownActiveClassDisabled: true,
},
({
autoCollapseCategories: true,
announcementBar: {
id: 'announcementBar-1', // Increment on change
// content: `⭐️ If you like hippo4j, give it a star on <a target="_blank" rel="noopener noreferrer" href="https://gitee.com/mabaiwancn/hippo4j">Gitee</a>, thanks.`,
// content: `⭐️ 如果您喜欢 hippo4j请在 <a target="_blank" rel="noopener noreferrer" href="https://gitee.com/mabaiwancn/hippo4j">Gitee</a> 和 <a target="_blank" rel="noopener noreferrer" href="https://github.com/opengoofy/hippo4j">GitHub</a> 上给它一个 star谢谢`,
content: `⭐️ 开源不易,如果 Hippo4j 对您有帮助,请在 <a target="_blank" rel="noopener noreferrer" href="https://github.com/opengoofy/hippo4j">GitHub</a> 上给它一个 Star 🌟`,
// content: `<a target="_blank" rel="noopener noreferrer" href="https://xiaomage.info/knowledge-planet/">👉 《小马哥的代码实战课》官方知识星球来啦!!!</a>`,
},
navbar: {
title: '',
logo: {
alt: 'Hippo4j 动态可观测线程池框架',
src: 'img/hippo4j.png',
},
items: [
{
type: 'docSidebar',
docId: 'intro',
position: 'left',
sidebarId: 'user_docs',
label: '文档',
},
{
to: '/community/contributor-guide',
label: '社区',
position: 'left',
activeBaseRegex: `/community/`,
},
/*{ to: "/team", label: "团队", position: "left" },*/
{ to: '/users', label: '采用公司', position: 'left' },
{ to: '/group', label: '加群沟通', position: 'left' },
/*{to: '/blog', label: '博客', position: 'left'},*/
{
href: 'http://console.hippo4j.cn/index.html',
label: '控制台样例',
position: 'left',
},
{
href: 'https://gitee.com/opengoofy/congomall',
label: '👉 刚果商城',
position: 'left',
},
{
type: 'docsVersionDropdown',
position: 'right',
dropdownActiveClassDisabled: true,
},
{type: 'localeDropdown', position: 'right'},
/*{
{ type: 'localeDropdown', position: 'right' },
/*{
href: 'https://gitee.com/mabaiwancn/hippo4j',
label: 'Gitee',
position: 'right',
},*/
{
href: 'https://github.com/opengoofy/hippo4j',
className: 'header-github-link',
'aria-label': 'GitHub repository',
position: 'right',
},
{
href: 'https://github.com/opengoofy/hippo4j',
className: 'header-github-link',
'aria-label': 'GitHub repository',
position: 'right',
},
/*{
/*{
href: 'https://github.com/opengoofy/hippo4j',
label: 'GitHub',
position: 'right',
},*/
],
},
footer: {
style: 'dark',
links: [
{
title: 'Docs',
items: [
{
label: 'Intro',
to: '/docs/user_docs/intro',
},
{
label: 'Config Mode',
to: '/docs/user_docs/getting_started/config/hippo4j-config-start',
},
{
label: 'Server Mode',
to: '/docs/user_docs/getting_started/server/hippo4j-server-start',
},
],
},
{
title: 'Community',
items: [
{
label: 'Group',
href: 'https://hippo4j.cn/group',
},
{
label: 'WeChat',
href: 'https://mp.weixin.qq.com/s/diVHYvwiuYH9aWpZDPc27g',
},
],
},
{
title: 'More',
items: [
{
label: 'Gitee',
href: 'https://gitee.com/opengoofy/hippo4j',
},
{
label: 'GitHub',
href: 'https://github.com/opengoofy/hippo4j',
},
],
},
{
title: 'Links',
items: [
{
label: '书源',
href: 'https://bookyuan.cn/',
},
{
label: '推广合作',
href: 'https://hippo4j.cn/docs/user_docs/other/operation',
},
],
},
],
copyright: `Copyright © 2021-2022 马丁版权所有 <a href="https://beian.miit.gov.cn">京ICP备2021038095号
],
},
footer: {
style: 'dark',
links: [
{
title: 'Docs',
items: [
{
label: 'Intro',
to: '/docs/user_docs/intro',
},
{
label: 'Config Mode',
to: '/docs/user_docs/getting_started/config/hippo4j-config-start',
},
{
label: 'Server Mode',
to: '/docs/user_docs/getting_started/server/hippo4j-server-start',
},
],
},
{
title: 'Community',
items: [
{
label: 'Group',
href: 'https://hippo4j.cn/group',
},
{
label: 'WeChat',
href: 'https://mp.weixin.qq.com/s/diVHYvwiuYH9aWpZDPc27g',
},
],
},
{
title: 'More',
items: [
{
label: 'Gitee',
href: 'https://gitee.com/opengoofy/hippo4j',
},
{
label: 'GitHub',
href: 'https://github.com/opengoofy/hippo4j',
},
],
},
{
title: 'Links',
items: [
{
label: '书源',
href: 'https://bookyuan.cn/',
},
{
label: '推广合作',
href: 'https://hippo4j.cn/docs/user_docs/other/operation',
},
],
},
],
copyright: `Copyright © 2021-2022 马丁版权所有 <a href="https://beian.miit.gov.cn">京ICP备2021038095号
</a>`,
},
prism: {
theme: lightCodeTheme,
darkTheme: darkCodeTheme,
additionalLanguages: ['java'],
},
}),
},
prism: {
theme: lightCodeTheme,
darkTheme: darkCodeTheme,
additionalLanguages: ['java'],
},
}),
};
module.exports = config;

@ -32,7 +32,7 @@ sidebar_position: 3
spring:
dynamic:
thread-pool:
tomcat:
web:
enable: true
executors:
- thread-pool-id: message-consume
@ -50,7 +50,7 @@ spring:
spring:
dynamic:
thread-pool:
tomcat:
web:
nodes: 192.168.1.5:*,192.168.1.6:8080
executors:
- thread-pool-id: message-consume

@ -3,59 +3,60 @@ import clsx from 'clsx';
import styles from './styles.module.css';
const FeatureList = [
{
title: '动态变更',
Svg: require('@site/static/img/undraw_docusaurus_mountain.svg').default,
description: (
<>
应用运行时动态变更线程池参数包括不限于核心最大线程阻塞队列大小和拒绝策略等支持应用集群下不同节点线程池配置差异化
</>
),
},
{
title: '自定义报警',
Svg: require('@site/static/img/undraw_docusaurus_tree.svg').default,
description: (
<>
应用线程池运行时埋点提供四种报警维度线程池过载阻塞队列容量运行超长以及拒绝策略报警并支持自定义时间内不重复报警
</>
),
},
{
title: '运行监控',
Svg: require('@site/static/img/undraw_docusaurus_react.svg').default,
description: (
<>
支持自定义时长线程池运行数据采集存储同时也支持 PrometheusInfluxDB 等采集监控通过 Grafana 或内置监控页面提供可视化大屏监控运行指标
</>
),
},
{
title: '动态变更',
Svg: require('@site/static/img/undraw_docusaurus_mountain.svg').default,
description: (
<>
应用运行时动态变更线程池参数包括不限于核心最大线程阻塞队列大小和拒绝策略等支持应用集群下不同节点线程池配置差异化
</>
),
},
{
title: '自定义报警',
Svg: require('@site/static/img/undraw_docusaurus_tree.svg').default,
description: (
<>
应用线程池运行时埋点提供四种报警维度线程池过载阻塞队列容量运行超长以及拒绝策略报警并支持自定义时间内不重复报警
</>
),
},
{
title: '运行监控',
Svg: require('@site/static/img/undraw_docusaurus_react.svg').default,
description: (
<>
支持自定义时长线程池运行数据采集存储同时也支持 PrometheusInfluxDB 等采集监控通过
Grafana 或内置监控页面提供可视化大屏监控运行指标
</>
),
},
];
function Feature({Svg, title, description}) {
return (
<div className={clsx('col col--4')}>
<div className="text--center">
<Svg className={styles.featureSvg} role="img"/>
</div>
<div className="text--center padding-horiz--md">
<h3>{title}</h3>
<p>{description}</p>
</div>
</div>
);
function Feature({ Svg, title, description }) {
return (
<div className={clsx('col col--4')}>
<div className="text--center">
<Svg className={styles.featureSvg} role="img" />
</div>
<div className="text--center padding-horiz--md">
<h3>{title}</h3>
<p>{description}</p>
</div>
</div>
);
}
export default function HomepageFeatures() {
return (
<section className={styles.features}>
<div className="container">
<div className="row">
{FeatureList.map((props, idx) => (
<Feature key={idx} {...props} />
))}
</div>
</div>
</section>
);
return (
<section className={styles.features}>
<div className="container">
<div className="row">
{FeatureList.map((props, idx) => (
<Feature key={idx} {...props} />
))}
</div>
</div>
</section>
);
}

@ -5,38 +5,48 @@ import Translate from '@docusaurus/Translate';
import useDocusaurusContext from '@docusaurus/useDocusaurusContext';
import Layout from '@theme/Layout';
import HomepageFeatures from '@site/src/components/HomepageFeatures';
import ExecutionEnvironment from '@docusaurus/ExecutionEnvironment';
import styles from './index.module.css';
if (ExecutionEnvironment.canUseDOM) {
var _hmt = _hmt || [];
(function () {
var hm = document.createElement('script');
hm.src = 'https://hm.baidu.com/hm.js?473eaadc06f3d63771f303df1fc29b58';
var s = document.getElementsByTagName('script')[0];
s.parentNode.insertBefore(hm, s);
})();
}
function HomepageHeader() {
const {siteConfig} = useDocusaurusContext();
return (
<header className={clsx('hero hero--primary', styles.heroBanner)}>
<div className="container">
<h1 className="hero__title">{siteConfig.title}</h1>
<p className="hero__subtitle">{siteConfig.tagline}</p>
<div className={styles.buttons}>
<Link
className="button button--secondary button--lg"
to="/docs/user_docs/intro">
快速开始 - 5min
</Link>
</div>
</div>
</header>
);
const { siteConfig } = useDocusaurusContext();
return (
<header className={clsx('hero hero--primary', styles.heroBanner)}>
<div className="container">
<h1 className="hero__title">{siteConfig.title}</h1>
<p className="hero__subtitle">{siteConfig.tagline}</p>
<div className={styles.buttons}>
<Link className="button button--secondary button--lg" to="/docs/user_docs/intro">
快速开始 - 5min
</Link>
</div>
</div>
</header>
);
}
export default function Home() {
const {siteConfig} = useDocusaurusContext();
return (
<Layout
title={`${siteConfig.title}`}
description="Description will go into a meta tag in <head />">
<HomepageHeader/>
<main>
<HomepageFeatures/>
</main>
</Layout>
);
const { siteConfig } = useDocusaurusContext();
return (
<Layout
title={`${siteConfig.title}`}
description="Description will go into a meta tag in <head />"
>
<HomepageHeader />
<main>
<HomepageFeatures />
</main>
</Layout>
);
}

@ -19,29 +19,20 @@ package cn.hippo4j.adapter.hystrix;
import cn.hippo4j.adapter.base.ThreadPoolAdapter;
import cn.hippo4j.adapter.base.ThreadPoolAdapterCacheConfig;
import cn.hippo4j.adapter.base.ThreadPoolAdapterParameter;
import cn.hippo4j.adapter.base.ThreadPoolAdapterRegisterAction;
import cn.hippo4j.adapter.base.ThreadPoolAdapterState;
import cn.hippo4j.common.config.ApplicationContextHolder;
import cn.hippo4j.common.toolkit.CollectionUtil;
import com.netflix.hystrix.HystrixThreadPool;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.context.event.ApplicationStartedEvent;
import org.springframework.context.ApplicationListener;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import static cn.hippo4j.common.constant.ChangeThreadPoolConstants.CHANGE_DELIMITER;
/**
* hystrix thread-pool adapter for hippo4j server.
*/

@ -151,8 +151,8 @@ public class KafkaThreadPoolAdapter implements ThreadPoolAdapter, ApplicationLis
ContainerProperties containerProperties = concurrentContainer.getContainerProperties();
TopicPartitionOffset[] topicPartitions = containerProperties.getTopicPartitions();
if (topicPartitions != null && concurrency > topicPartitions.length) {
log.warn("[{}] Kafka consuming thread pool not support modify. " +
"When specific partitions are provided, the concurrency must be less than or "
log.warn("[{}] Kafka consuming thread pool not support modify. "
+ "When specific partitions are provided, the concurrency must be less than or "
+ "equal to the number of partitions;", threadPoolKey);
return false;
}

@ -22,9 +22,7 @@ import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.web.context.WebServerApplicationContext;
import org.springframework.boot.web.server.WebServer;
import org.springframework.context.ApplicationContext;
import org.springframework.util.ReflectionUtils;
import java.lang.reflect.Field;
import java.util.concurrent.Executor;
/**

@ -17,7 +17,6 @@
package cn.hippo4j.adapter.web;
import cn.hippo4j.adapter.web.WebThreadPoolService;
import cn.hippo4j.common.config.ApplicationContextHolder;
import cn.hippo4j.common.web.exception.ServiceException;
import lombok.extern.slf4j.Slf4j;

@ -18,12 +18,6 @@
package cn.hippo4j.adapter.web.tomcat;
import cn.hippo4j.adapter.web.DefaultAbstractWebThreadPoolService;
import cn.hippo4j.adapter.web.IWebThreadPoolHandlerSupport;
import cn.hippo4j.common.enums.WebContainerEnum;
import cn.hippo4j.common.model.ThreadPoolBaseInfo;
import cn.hippo4j.common.model.ThreadPoolParameter;
import cn.hippo4j.common.model.ThreadPoolParameterInfo;
import cn.hippo4j.common.model.ThreadPoolRunStateInfo;
import cn.hippo4j.core.executor.state.AbstractThreadPoolRuntime;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.web.embedded.tomcat.TomcatWebServer;

@ -0,0 +1,274 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# The service name in UI
# ${service name} = [${group name}::]${logic name}
# The group name is optional only.
agent.service_name=${SW_AGENT_NAME:Your_ApplicationName}
# The agent namespace
agent.namespace=${SW_AGENT_NAMESPACE:}
# The agent cluster
agent.cluster=${SW_AGENT_CLUSTER:}
# The number of sampled traces per 3 seconds
# Negative or zero means off, by default
agent.sample_n_per_3_secs=${SW_AGENT_SAMPLE:-1}
# Authentication active is based on backend setting, see application.yml for more details.
agent.authentication=${SW_AGENT_AUTHENTICATION:}
# The max number of TraceSegmentRef in a single span to keep memory cost estimatable.
agent.trace_segment_ref_limit_per_span=${SW_TRACE_SEGMENT_LIMIT:500}
# The max amount of spans in a single segment.
# Through this config item, SkyWalking keep your application memory cost estimated.
agent.span_limit_per_segment=${SW_AGENT_SPAN_LIMIT:300}
# If the operation name of the first span is included in this set, this segment should be ignored. Multiple values should be separated by `,`.
agent.ignore_suffix=${SW_AGENT_IGNORE_SUFFIX:.jpg,.jpeg,.js,.css,.png,.bmp,.gif,.ico,.mp3,.mp4,.html,.svg}
# If true, SkyWalking agent will save all instrumented classes files in `/debugging` folder.
# SkyWalking team may ask for these files in order to resolve compatible problem.
agent.is_open_debugging_class=${SW_AGENT_OPEN_DEBUG:false}
# If true, SkyWalking agent will cache all instrumented classes files to memory or disk files (decided by class cache mode),
# allow other javaagent to enhance those classes that enhanced by SkyWalking agent.
agent.is_cache_enhanced_class=${SW_AGENT_CACHE_CLASS:false}
# The instrumented classes cache mode: MEMORY or FILE
# MEMORY: cache class bytes to memory, if instrumented classes is too many or too large, it may take up more memory
# FILE: cache class bytes in `/class-cache` folder, automatically clean up cached class files when the application exits
agent.class_cache_mode=${SW_AGENT_CLASS_CACHE_MODE:MEMORY}
# Instance name is the identity of an instance, should be unique in the service. If empty, SkyWalking agent will
# generate an 32-bit uuid. BY Default, SkyWalking uses UUID@hostname as the instance name. Max length is 50(UTF-8 char)
agent.instance_name=${SW_AGENT_INSTANCE_NAME:}
# service instance properties in json format. e.g. agent.instance_properties_json = {"org": "apache-skywalking"}
agent.instance_properties_json=${SW_INSTANCE_PROPERTIES_JSON:}
# How depth the agent goes, when log all cause exceptions.
agent.cause_exception_depth=${SW_AGENT_CAUSE_EXCEPTION_DEPTH:5}
# Force reconnection period of grpc, based on grpc_channel_check_interval.
agent.force_reconnection_period=${SW_AGENT_FORCE_RECONNECTION_PERIOD:1}
# The operationName max length
# Notice, in the current practice, we don't recommend the length over 190.
agent.operation_name_threshold=${SW_AGENT_OPERATION_NAME_THRESHOLD:150}
# Keep tracing even the backend is not available if this value is true.
agent.keep_tracing=${SW_AGENT_KEEP_TRACING:false}
# The agent use gRPC plain text in default.
# If true, SkyWalking agent uses TLS even no CA file detected.
agent.force_tls=${SW_AGENT_FORCE_TLS:false}
# gRPC SSL trusted ca file.
agent.ssl_trusted_ca_path=${SW_AGENT_SSL_TRUSTED_CA_PATH:/ca/ca.crt}
# enable mTLS when ssl_key_path and ssl_cert_chain_path exist.
agent.ssl_key_path=${SW_AGENT_SSL_KEY_PATH:}
agent.ssl_cert_chain_path=${SW_AGENT_SSL_CERT_CHAIN_PATH:}
# Limit the length of the ipv4 list size.
osinfo.ipv4_list_size=${SW_AGENT_OSINFO_IPV4_LIST_SIZE:10}
# grpc channel status check interval.
collector.grpc_channel_check_interval=${SW_AGENT_COLLECTOR_GRPC_CHANNEL_CHECK_INTERVAL:30}
# Agent heartbeat report period. Unit, second.
collector.heartbeat_period=${SW_AGENT_COLLECTOR_HEARTBEAT_PERIOD:30}
# The agent sends the instance properties to the backend every
# collector.heartbeat_period * collector.properties_report_period_factor seconds
collector.properties_report_period_factor=${SW_AGENT_COLLECTOR_PROPERTIES_REPORT_PERIOD_FACTOR:10}
# Backend service addresses.
collector.backend_service=${SW_AGENT_COLLECTOR_BACKEND_SERVICES:127.0.0.1:11800}
# How long grpc client will timeout in sending data to upstream. Unit is second.
collector.grpc_upstream_timeout=${SW_AGENT_COLLECTOR_GRPC_UPSTREAM_TIMEOUT:30}
# Sniffer get profile task list interval.
collector.get_profile_task_interval=${SW_AGENT_COLLECTOR_GET_PROFILE_TASK_INTERVAL:20}
# Sniffer get agent dynamic config interval.
collector.get_agent_dynamic_config_interval=${SW_AGENT_COLLECTOR_GET_AGENT_DYNAMIC_CONFIG_INTERVAL:20}
# If true, skywalking agent will enable periodically resolving DNS to update receiver service addresses.
collector.is_resolve_dns_periodically=${SW_AGENT_COLLECTOR_IS_RESOLVE_DNS_PERIODICALLY:false}
# Logging level
logging.level=${SW_LOGGING_LEVEL:INFO}
# Logging file_name
logging.file_name=${SW_LOGGING_FILE_NAME:skywalking-api.log}
# Log output. Default is FILE. Use CONSOLE means output to stdout.
logging.output=${SW_LOGGING_OUTPUT:FILE}
# Log files directory. Default is blank string, meaning use "{theSkywalkingAgentJarDir}/logs " to output logs.
# {theSkywalkingAgentJarDir} is the directory where the skywalking agent jar file is located
logging.dir=${SW_LOGGING_DIR:}
# Logger resolver: PATTERN or JSON. The default is PATTERN, which uses logging.pattern to print traditional text logs.
# JSON resolver prints logs in JSON format.
logging.resolver=${SW_LOGGING_RESOLVER:PATTERN}
# Logging format. There are all conversion specifiers:
# * %level means log level.
# * %timestamp means now of time with format yyyy-MM-dd HH:mm:ss:SSS.
# * %thread means name of current thread.
# * %msg means some message which user logged.
# * %class means SimpleName of TargetClass.
# * %throwable means a throwable which user called.
# * %agent_name means agent.service_name. Only apply to the PatternLogger.
logging.pattern=${SW_LOGGING_PATTERN:%level %timestamp %thread %class : %msg %throwable}
# Logging max_file_size, default: 300 * 1024 * 1024 = 314572800
logging.max_file_size=${SW_LOGGING_MAX_FILE_SIZE:314572800}
# The max history log files. When rollover happened, if log files exceed this number,
# then the oldest file will be delete. Negative or zero means off, by default.
logging.max_history_files=${SW_LOGGING_MAX_HISTORY_FILES:-1}
# Listed exceptions would not be treated as an error. Because in some codes, the exception is being used as a way of controlling business flow.
# Besides, the annotation named IgnoredException in the trace toolkit is another way to configure ignored exceptions.
statuscheck.ignored_exceptions=${SW_STATUSCHECK_IGNORED_EXCEPTIONS:}
# The max recursive depth when checking the exception traced by the agent. Typically, we don't recommend setting this more than 10, which could cause a performance issue. Negative value and 0 would be ignored, which means all exceptions would make the span tagged in error status.
statuscheck.max_recursive_depth=${SW_STATUSCHECK_MAX_RECURSIVE_DEPTH:1}
# Max element count in the correlation context
correlation.element_max_number=${SW_CORRELATION_ELEMENT_MAX_NUMBER:3}
# Max value length of each element.
correlation.value_max_length=${SW_CORRELATION_VALUE_MAX_LENGTH:128}
# Tag the span by the key/value in the correlation context, when the keys listed here exist.
correlation.auto_tag_keys=${SW_CORRELATION_AUTO_TAG_KEYS:}
# The buffer size of collected JVM info.
jvm.buffer_size=${SW_JVM_BUFFER_SIZE:600}
# The buffer channel size.
buffer.channel_size=${SW_BUFFER_CHANNEL_SIZE:5}
# The buffer size.
buffer.buffer_size=${SW_BUFFER_BUFFER_SIZE:300}
# If true, skywalking agent will enable profile when user create a new profile task. Otherwise disable profile.
profile.active=${SW_AGENT_PROFILE_ACTIVE:true}
# Parallel monitor segment count
profile.max_parallel=${SW_AGENT_PROFILE_MAX_PARALLEL:5}
# Max monitor segment time(minutes), if current segment monitor time out of limit, then stop it.
profile.duration=${SW_AGENT_PROFILE_DURATION:10}
# Max dump thread stack depth
profile.dump_max_stack_depth=${SW_AGENT_PROFILE_DUMP_MAX_STACK_DEPTH:500}
# Snapshot transport to backend buffer size
profile.snapshot_transport_buffer_size=${SW_AGENT_PROFILE_SNAPSHOT_TRANSPORT_BUFFER_SIZE:4500}
# If true, the agent collects and reports metrics to the backend.
meter.active=${SW_METER_ACTIVE:true}
# Report meters interval. The unit is second
meter.report_interval=${SW_METER_REPORT_INTERVAL:20}
# Max size of the meter pool
meter.max_meter_size=${SW_METER_MAX_METER_SIZE:500}
# The max size of message to send to server.Default is 10 MB
log.max_message_size=${SW_GRPC_LOG_MAX_MESSAGE_SIZE:10485760}
# Mount the specific folders of the plugins. Plugins in mounted folders would work.
plugin.mount=${SW_MOUNT_FOLDERS:plugins,activations}
# Peer maximum description limit.
plugin.peer_max_length=${SW_PLUGIN_PEER_MAX_LENGTH:200}
# Exclude some plugins define in plugins dir.Plugin names is defined in [Agent plugin list](Plugin-list.md)
plugin.exclude_plugins=${SW_EXCLUDE_PLUGINS:}
# If true, trace all the parameters in MongoDB access, default is false. Only trace the operation, not include parameters.
plugin.mongodb.trace_param=${SW_PLUGIN_MONGODB_TRACE_PARAM:false}
# If set to positive number, the `WriteRequest.params` would be truncated to this length, otherwise it would be completely saved, which may cause performance problem.
plugin.mongodb.filter_length_limit=${SW_PLUGIN_MONGODB_FILTER_LENGTH_LIMIT:256}
# If true, trace all the DSL(Domain Specific Language) in ElasticSearch access, default is false.
plugin.elasticsearch.trace_dsl=${SW_PLUGIN_ELASTICSEARCH_TRACE_DSL:false}
# If true, the fully qualified method name will be used as the endpoint name instead of the request URL, default is false.
plugin.springmvc.use_qualified_name_as_endpoint_name=${SW_PLUGIN_SPRINGMVC_USE_QUALIFIED_NAME_AS_ENDPOINT_NAME:false}
# If true, the fully qualified method name will be used as the operation name instead of the given operation name, default is false.
plugin.toolit.use_qualified_name_as_operation_name=${SW_PLUGIN_TOOLIT_USE_QUALIFIED_NAME_AS_OPERATION_NAME:false}
# If set to true, the parameters of the sql (typically `java.sql.PreparedStatement`) would be collected.
plugin.jdbc.trace_sql_parameters=${SW_JDBC_TRACE_SQL_PARAMETERS:false}
# If set to positive number, the `db.sql.parameters` would be truncated to this length, otherwise it would be completely saved, which may cause performance problem.
plugin.jdbc.sql_parameters_max_length=${SW_PLUGIN_JDBC_SQL_PARAMETERS_MAX_LENGTH:512}
# If set to positive number, the `db.statement` would be truncated to this length, otherwise it would be completely saved, which may cause performance problem.
plugin.jdbc.sql_body_max_length=${SW_PLUGIN_JDBC_SQL_BODY_MAX_LENGTH:2048}
# If true, trace all the query parameters(include deleteByIds and deleteByQuery) in Solr query request, default is false.
plugin.solrj.trace_statement=${SW_PLUGIN_SOLRJ_TRACE_STATEMENT:false}
# If true, trace all the operation parameters in Solr request, default is false.
plugin.solrj.trace_ops_params=${SW_PLUGIN_SOLRJ_TRACE_OPS_PARAMS:false}
# If true, trace all middleware/business handlers that are part of the Light4J handler chain for a request.
plugin.light4j.trace_handler_chain=${SW_PLUGIN_LIGHT4J_TRACE_HANDLER_CHAIN:false}
# If true, the transaction definition name will be simplified.
plugin.springtransaction.simplify_transaction_definition_name=${SW_PLUGIN_SPRINGTRANSACTION_SIMPLIFY_TRANSACTION_DEFINITION_NAME:false}
# Threading classes (`java.lang.Runnable` and `java.util.concurrent.Callable`) and their subclasses, including anonymous inner classes whose name match any one of the `THREADING_CLASS_PREFIXES` (splitted by `,`) will be instrumented, make sure to only specify as narrow prefixes as what you're expecting to instrument, (`java.` and `javax.` will be ignored due to safety issues)
plugin.jdkthreading.threading_class_prefixes=${SW_PLUGIN_JDKTHREADING_THREADING_CLASS_PREFIXES:}
# This config item controls that whether the Tomcat plugin should collect the parameters of the request. Also, activate implicitly in the profiled trace.
plugin.tomcat.collect_http_params=${SW_PLUGIN_TOMCAT_COLLECT_HTTP_PARAMS:false}
# This config item controls that whether the SpringMVC plugin should collect the parameters of the request, when your Spring application is based on Tomcat, consider only setting either `plugin.tomcat.collect_http_params` or `plugin.springmvc.collect_http_params`. Also, activate implicitly in the profiled trace.
plugin.springmvc.collect_http_params=${SW_PLUGIN_SPRINGMVC_COLLECT_HTTP_PARAMS:false}
# This config item controls that whether the HttpClient plugin should collect the parameters of the request
plugin.httpclient.collect_http_params=${SW_PLUGIN_HTTPCLIENT_COLLECT_HTTP_PARAMS:false}
# When `COLLECT_HTTP_PARAMS` is enabled, how many characters to keep and send to the OAP backend, use negative values to keep and send the complete parameters, NB. this config item is added for the sake of performance.
plugin.http.http_params_length_threshold=${SW_PLUGIN_HTTP_HTTP_PARAMS_LENGTH_THRESHOLD:1024}
# When `include_http_headers` declares header names, this threshold controls the length limitation of all header values. use negative values to keep and send the complete headers. Note. this config item is added for the sake of performance.
plugin.http.http_headers_length_threshold=${SW_PLUGIN_HTTP_HTTP_HEADERS_LENGTH_THRESHOLD:2048}
# Set the header names, which should be collected by the plugin. Header name must follow `javax.servlet.http` definition. Multiple names should be split by comma.
plugin.http.include_http_headers=${SW_PLUGIN_HTTP_INCLUDE_HTTP_HEADERS:}
# This config item controls that whether the Feign plugin should collect the http body of the request.
plugin.feign.collect_request_body=${SW_PLUGIN_FEIGN_COLLECT_REQUEST_BODY:false}
# When `COLLECT_REQUEST_BODY` is enabled, how many characters to keep and send to the OAP backend, use negative values to keep and send the complete body.
plugin.feign.filter_length_limit=${SW_PLUGIN_FEIGN_FILTER_LENGTH_LIMIT:1024}
# When `COLLECT_REQUEST_BODY` is enabled and content-type start with SUPPORTED_CONTENT_TYPES_PREFIX, collect the body of the request , multiple paths should be separated by `,`
plugin.feign.supported_content_types_prefix=${SW_PLUGIN_FEIGN_SUPPORTED_CONTENT_TYPES_PREFIX:application/json,text/}
# If true, trace all the influxql(query and write) in InfluxDB access, default is true.
plugin.influxdb.trace_influxql=${SW_PLUGIN_INFLUXDB_TRACE_INFLUXQL:true}
# Apache Dubbo consumer collect `arguments` in RPC call, use `Object#toString` to collect `arguments`.
plugin.dubbo.collect_consumer_arguments=${SW_PLUGIN_DUBBO_COLLECT_CONSUMER_ARGUMENTS:false}
# When `plugin.dubbo.collect_consumer_arguments` is `true`, Arguments of length from the front will to the OAP backend
plugin.dubbo.consumer_arguments_length_threshold=${SW_PLUGIN_DUBBO_CONSUMER_ARGUMENTS_LENGTH_THRESHOLD:256}
# Apache Dubbo provider collect `arguments` in RPC call, use `Object#toString` to collect `arguments`.
plugin.dubbo.collect_provider_arguments=${SW_PLUGIN_DUBBO_COLLECT_PROVIDER_ARGUMENTS:false}
# When `plugin.dubbo.collect_provider_arguments` is `true`, Arguments of length from the front will to the OAP backend
plugin.dubbo.provider_arguments_length_threshold=${SW_PLUGIN_DUBBO_PROVIDER_ARGUMENTS_LENGTH_THRESHOLD:256}
# A list of host/port pairs to use for establishing the initial connection to the Kafka cluster.
plugin.kafka.bootstrap_servers=${SW_KAFKA_BOOTSTRAP_SERVERS:localhost:9092}
# Timeout period of reading topics from the Kafka server, the unit is second.
plugin.kafka.get_topic_timeout=${SW_GET_TOPIC_TIMEOUT:10}
# Kafka producer configuration. Read [producer configure](http://kafka.apache.org/24/documentation.html#producerconfigs)
# to get more details. Check document for more details and examples.
plugin.kafka.producer_config=${sw_plugin_kafka_producer_config:}
# Configure Kafka Producer configuration in JSON format. Notice it will be overridden by plugin.kafka.producer_config[key], if the key duplication.
plugin.kafka.producer_config_json=${SW_PLUGIN_KAFKA_PRODUCER_CONFIG_JSON:}
# Specify which Kafka topic name for Meter System data to report to.
plugin.kafka.topic_meter=${SW_PLUGIN_KAFKA_TOPIC_METER:skywalking-meters}
# Specify which Kafka topic name for JVM metrics data to report to.
plugin.kafka.topic_metrics=${SW_PLUGIN_KAFKA_TOPIC_METRICS:skywalking-metrics}
# Specify which Kafka topic name for traces data to report to.
plugin.kafka.topic_segment=${SW_PLUGIN_KAFKA_TOPIC_SEGMENT:skywalking-segments}
# Specify which Kafka topic name for Thread Profiling snapshot to report to.
plugin.kafka.topic_profiling=${SW_PLUGIN_KAFKA_TOPIC_PROFILINGS:skywalking-profilings}
# Specify which Kafka topic name for the register or heartbeat data of Service Instance to report to.
plugin.kafka.topic_management=${SW_PLUGIN_KAFKA_TOPIC_MANAGEMENT:skywalking-managements}
# Specify which Kafka topic name for the logging data to report to.
plugin.kafka.topic_logging=${SW_PLUGIN_KAFKA_TOPIC_LOGGING:skywalking-logs}
# isolate multi OAP server when using same Kafka cluster (final topic name will append namespace before Kafka topics with `-` ).
plugin.kafka.namespace=${SW_KAFKA_NAMESPACE:}
# Match spring beans with regular expression for the class name. Multiple expressions could be separated by a comma. This only works when `Spring annotation plugin` has been activated.
plugin.springannotation.classname_match_regex=${SW_SPRINGANNOTATION_CLASSNAME_MATCH_REGEX:}
# Whether or not to transmit logged data as formatted or un-formatted.
plugin.toolkit.log.transmit_formatted=${SW_PLUGIN_TOOLKIT_LOG_TRANSMIT_FORMATTED:true}
# If set to true, the parameters of Redis commands would be collected by Lettuce agent.
plugin.lettuce.trace_redis_parameters=${SW_PLUGIN_LETTUCE_TRACE_REDIS_PARAMETERS:false}
# If set to positive number and `plugin.lettuce.trace_redis_parameters` is set to `true`, Redis command parameters would be collected and truncated to this length.
plugin.lettuce.redis_parameter_max_length=${SW_PLUGIN_LETTUCE_REDIS_PARAMETER_MAX_LENGTH:128}
# If set to true, the parameters of the cypher would be collected.
plugin.neo4j.trace_cypher_parameters=${SW_PLUGIN_NEO4J_TRACE_CYPHER_PARAMETERS:false}
# If set to positive number, the `db.cypher.parameters` would be truncated to this length, otherwise it would be completely saved, which may cause performance problem.
plugin.neo4j.cypher_parameters_max_length=${SW_PLUGIN_NEO4J_CYPHER_PARAMETERS_MAX_LENGTH:512}
# If set to positive number, the `db.statement` would be truncated to this length, otherwise it would be completely saved, which may cause performance problem.
plugin.neo4j.cypher_body_max_length=${SW_PLUGIN_NEO4J_CYPHER_BODY_MAX_LENGTH:2048}
# If set to a positive number and activate `trace sampler CPU policy plugin`, the trace would not be collected when agent process CPU usage percent is greater than `plugin.cpupolicy.sample_cpu_usage_percent_limit`.
plugin.cpupolicy.sample_cpu_usage_percent_limit=${SW_SAMPLE_CPU_USAGE_PERCENT_LIMIT:-1}

@ -0,0 +1,232 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright {yyyy} {name of copyright owner}
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=======================================================================
Apache SkyWalking Subcomponents:
The Apache SkyWalking project contains subcomponents with separate copyright
notices and license terms. Your use of the source code for the these
subcomponents is subject to the terms and conditions of the following
licenses.
========================================================================
Apache 2.0 licenses
========================================================================
The following components are provided under the Apache License. See project link for details.
The text of each license is the standard Apache 2.0 license.
raphw (byte-buddy) 1.12.13: http://bytebuddy.net/ , Apache 2.0
Google: grpc-java 1.44.0: https://github.com/grpc/grpc-java, Apache 2.0
Google: gson 2.8.9: https://github.com/google/gson , Apache 2.0
Google: proto-google-common-protos 2.0.1: https://github.com/googleapis/googleapis , Apache 2.0
Google: jsr305 3.0.2: http://central.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.0/jsr305-3.0.0.pom , Apache 2.0
netty 4.1.79: https://github.com/netty/netty/blob/4.1/LICENSE.txt, Apache 2.0
========================================================================
BSD licenses
========================================================================
The following components are provided under a BSD license. See project link for details.
The text of each license is also included at licenses/LICENSE-[project].txt.
asm 9.2:https://gitlab.ow2.org , BSD-3-Clause

@ -0,0 +1,299 @@
Opengoofy Hippo4j
Copyright 2017-2022 The Apache Software Foundation
This product includes software developed at
The Apache Software Foundation (http://www.apache.org/).
========================================================================
grpc-java NOTICE
========================================================================
Copyright 2014, gRPC Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------------------------
This product contains a modified portion of 'OkHttp', an open source
HTTP & SPDY client for Android and Java applications, which can be obtained
at:
* LICENSE:
* okhttp/third_party/okhttp/LICENSE (Apache License 2.0)
* HOMEPAGE:
* https://github.com/square/okhttp
* LOCATION_IN_GRPC:
* okhttp/third_party/okhttp
This product contains a modified portion of 'Netty', an open source
networking library, which can be obtained at:
* LICENSE:
* netty/third_party/netty/LICENSE.txt (Apache License 2.0)
* HOMEPAGE:
* https://netty.io
* LOCATION_IN_GRPC:
* netty/third_party/netty
========================================================================
grpc NOTICE
========================================================================
Copyright 2014 gRPC authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
========================================================================
netty NOTICE
========================================================================
The Netty Project
=================
Please visit the Netty web site for more information:
* http://netty.io/
Copyright 2014 The Netty Project
The Netty Project licenses this file to you under the Apache License,
version 2.0 (the "License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at:
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
Also, please refer to each LICENSE.<component>.txt file, which is located in
the 'license' directory of the distribution file, for the license terms of the
components that this product depends on.
-------------------------------------------------------------------------------
This product contains the extensions to Java Collections Framework which has
been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene:
* LICENSE:
* license/LICENSE.jsr166y.txt (Public Domain)
* HOMEPAGE:
* http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/
* http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/
This product contains a modified version of Robert Harder's Public Domain
Base64 Encoder and Decoder, which can be obtained at:
* LICENSE:
* license/LICENSE.base64.txt (Public Domain)
* HOMEPAGE:
* http://iharder.sourceforge.net/current/java/base64/
This product contains a modified portion of 'Webbit', an event based
WebSocket and HTTP server, which can be obtained at:
* LICENSE:
* license/LICENSE.webbit.txt (BSD License)
* HOMEPAGE:
* https://github.com/joewalnes/webbit
This product contains a modified portion of 'SLF4J', a simple logging
facade for Java, which can be obtained at:
* LICENSE:
* license/LICENSE.slf4j.txt (MIT License)
* HOMEPAGE:
* http://www.slf4j.org/
This product contains a modified portion of 'Apache Harmony', an open source
Java SE, which can be obtained at:
* NOTICE:
* license/NOTICE.harmony.txt
* LICENSE:
* license/LICENSE.harmony.txt (Apache License 2.0)
* HOMEPAGE:
* http://archive.apache.org/dist/harmony/
This product contains a modified portion of 'jbzip2', a Java bzip2 compression
and decompression library written by Matthew J. Francis. It can be obtained at:
* LICENSE:
* license/LICENSE.jbzip2.txt (MIT License)
* HOMEPAGE:
* https://code.google.com/p/jbzip2/
This product contains a modified portion of 'libdivsufsort', a C API library to construct
the suffix array and the Burrows-Wheeler transformed string for any input string of
a constant-size alphabet written by Yuta Mori. It can be obtained at:
* LICENSE:
* license/LICENSE.libdivsufsort.txt (MIT License)
* HOMEPAGE:
* https://github.com/y-256/libdivsufsort
This product contains a modified portion of Nitsan Wakart's 'JCTools', Java Concurrency Tools for the JVM,
which can be obtained at:
* LICENSE:
* license/LICENSE.jctools.txt (ASL2 License)
* HOMEPAGE:
* https://github.com/JCTools/JCTools
This product optionally depends on 'JZlib', a re-implementation of zlib in
pure Java, which can be obtained at:
* LICENSE:
* license/LICENSE.jzlib.txt (BSD style License)
* HOMEPAGE:
* http://www.jcraft.com/jzlib/
This product optionally depends on 'Compress-LZF', a Java library for encoding and
decoding data in LZF format, written by Tatu Saloranta. It can be obtained at:
* LICENSE:
* license/LICENSE.compress-lzf.txt (Apache License 2.0)
* HOMEPAGE:
* https://github.com/ning/compress
This product optionally depends on 'lz4', a LZ4 Java compression
and decompression library written by Adrien Grand. It can be obtained at:
* LICENSE:
* license/LICENSE.lz4.txt (Apache License 2.0)
* HOMEPAGE:
* https://github.com/jpountz/lz4-java
This product optionally depends on 'lzma-java', a LZMA Java compression
and decompression library, which can be obtained at:
* LICENSE:
* license/LICENSE.lzma-java.txt (Apache License 2.0)
* HOMEPAGE:
* https://github.com/jponge/lzma-java
This product contains a modified portion of 'jfastlz', a Java port of FastLZ compression
and decompression library written by William Kinney. It can be obtained at:
* LICENSE:
* license/LICENSE.jfastlz.txt (MIT License)
* HOMEPAGE:
* https://code.google.com/p/jfastlz/
This product contains a modified portion of and optionally depends on 'Protocol Buffers', Google's data
interchange format, which can be obtained at:
* LICENSE:
* license/LICENSE.protobuf.txt (New BSD License)
* HOMEPAGE:
* https://github.com/google/protobuf
This product optionally depends on 'Bouncy Castle Crypto APIs' to generate
a temporary self-signed X.509 certificate when the JVM does not provide the
equivalent functionality. It can be obtained at:
* LICENSE:
* license/LICENSE.bouncycastle.txt (MIT License)
* HOMEPAGE:
* http://www.bouncycastle.org/
This product optionally depends on 'Snappy', a compression library produced
by Google Inc, which can be obtained at:
* LICENSE:
* license/LICENSE.snappy.txt (New BSD License)
* HOMEPAGE:
* https://github.com/google/snappy
This product optionally depends on 'JBoss Marshalling', an alternative Java
serialization API, which can be obtained at:
* LICENSE:
* license/LICENSE.jboss-marshalling.txt (GNU LGPL 2.1)
* HOMEPAGE:
* http://www.jboss.org/jbossmarshalling
This product optionally depends on 'Caliper', Google's micro-
benchmarking framework, which can be obtained at:
* LICENSE:
* license/LICENSE.caliper.txt (Apache License 2.0)
* HOMEPAGE:
* https://github.com/google/caliper
This product optionally depends on 'Apache Commons Logging', a logging
framework, which can be obtained at:
* LICENSE:
* license/LICENSE.commons-logging.txt (Apache License 2.0)
* HOMEPAGE:
* http://commons.apache.org/logging/
This product optionally depends on 'Apache Log4J', a logging framework, which
can be obtained at:
* LICENSE:
* license/LICENSE.log4j.txt (Apache License 2.0)
* HOMEPAGE:
* http://logging.apache.org/log4j/
This product optionally depends on 'Aalto XML', an ultra-high performance
non-blocking XML processor, which can be obtained at:
* LICENSE:
* license/LICENSE.aalto-xml.txt (Apache License 2.0)
* HOMEPAGE:
* http://wiki.fasterxml.com/AaltoHome
This product contains a modified version of 'HPACK', a Java implementation of
the HTTP/2 HPACK algorithm written by Twitter. It can be obtained at:
* LICENSE:
* license/LICENSE.hpack.txt (Apache License 2.0)
* HOMEPAGE:
* https://github.com/twitter/hpack
This product contains a modified portion of 'Apache Commons Lang', a Java library
provides utilities for the java.lang API, which can be obtained at:
* LICENSE:
* license/LICENSE.commons-lang.txt (Apache License 2.0)
* HOMEPAGE:
* https://commons.apache.org/proper/commons-lang/
This product contains the Maven wrapper scripts from 'Maven Wrapper', that provides an easy way to ensure a user has everything necessary to run the Maven build.
* LICENSE:
* license/LICENSE.mvn-wrapper.txt (Apache License 2.0)
* HOMEPAGE:
* https://github.com/takari/maven-wrapper

@ -0,0 +1,125 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>cn.hippo4j</groupId>
<artifactId>hippo4j-agent</artifactId>
<version>${revision}</version>
</parent>
<artifactId>hippo4j-agent-bootstrap</artifactId>
<packaging>jar</packaging>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<premain.class>cn.hippo4j.agent.bootstrap.Hippo4jAgent</premain.class>
<can.redefine.classes>true</can.redefine.classes>
<can.retransform.classes>true</can.retransform.classes>
<shade.net.bytebuddy.source>net.bytebuddy</shade.net.bytebuddy.source>
<shade.net.bytebuddy.target>${shade.package}.${shade.net.bytebuddy.source}</shade.net.bytebuddy.target>
</properties>
<dependencies>
<dependency>
<groupId>cn.hippo4j</groupId>
<artifactId>hippo4j-agent-core</artifactId>
</dependency>
</dependencies>
<build>
<finalName>hippo4j-agent</finalName>
<plugins>
<plugin>
<artifactId>maven-shade-plugin</artifactId>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<shadedArtifactAttached>false</shadedArtifactAttached>
<createDependencyReducedPom>true</createDependencyReducedPom>
<createSourcesJar>true</createSourcesJar>
<shadeSourcesContent>true</shadeSourcesContent>
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<manifestEntries>
<Premain-Class>${premain.class}</Premain-Class>
<Can-Redefine-Classes>${can.redefine.classes}</Can-Redefine-Classes>
<Can-Retransform-Classes>${can.retransform.classes}</Can-Retransform-Classes>
</manifestEntries>
</transformer>
</transformers>
<artifactSet>
<excludes>
<exclude>*:gson</exclude>
<exclude>io.grpc:*</exclude>
<exclude>io.netty:*</exclude>
<exclude>io.opencensus:*</exclude>
<exclude>com.google.*:*</exclude>
<exclude>com.google.guava:guava</exclude>
<exclude>org.checkerframework:checker-compat-qual</exclude>
<exclude>org.codehaus.mojo:animal-sniffer-annotations</exclude>
<exclude>io.perfmark:*</exclude>
<exclude>org.slf4j:*</exclude>
</excludes>
</artifactSet>
<relocations>
<relocation>
<pattern>${shade.net.bytebuddy.source}</pattern>
<shadedPattern>${shade.net.bytebuddy.target}</shadedPattern>
</relocation>
</relocations>
<filters>
<filter>
<artifact>net.bytebuddy:byte-buddy</artifact>
<excludes>
<exclude>META-INF/versions/9/module-info.class</exclude>
</excludes>
</filter>
</filters>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>clean</id>
<phase>clean</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<delete dir="${project.basedir}/../skywalking-agent" />
</target>
</configuration>
</execution>
<execution>
<id>package</id>
<phase>package</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<mkdir dir="${project.basedir}/../hippo4j-agent" />
<copy file="${project.build.directory}/hippo4j-agent.jar" tofile="${project.basedir}/../hippo4j-agent/hippo4j-agent.jar" overwrite="true" />
<mkdir dir="${project.basedir}/../hippo4j-agent/config" />
<mkdir dir="${project.basedir}/../hippo4j-agent/logs" />
<copydir src="${project.basedir}/../config" dest="${project.basedir}/../hippo4j-agent/config" forceoverwrite="true" />
<copydir src="${project.basedir}/../dist-material" dest="${project.basedir}/../hippo4j-agent" />
</target>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

@ -0,0 +1,256 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.bootstrap;
import cn.hippo4j.agent.core.boot.AgentPackageNotFoundException;
import cn.hippo4j.agent.core.boot.DefaultNamedThreadFactory;
import cn.hippo4j.agent.core.boot.ServiceManager;
import cn.hippo4j.agent.core.conf.Config;
import cn.hippo4j.agent.core.conf.SnifferConfigInitializer;
import cn.hippo4j.agent.core.jvm.LoadedLibraryCollector;
import cn.hippo4j.agent.core.logging.api.ILog;
import cn.hippo4j.agent.core.logging.api.LogManager;
import cn.hippo4j.agent.core.plugin.AbstractClassEnhancePluginDefine;
import cn.hippo4j.agent.core.plugin.EnhanceContext;
import cn.hippo4j.agent.core.plugin.InstrumentDebuggingClass;
import cn.hippo4j.agent.core.plugin.PluginBootstrap;
import cn.hippo4j.agent.core.plugin.PluginException;
import cn.hippo4j.agent.core.plugin.PluginFinder;
import cn.hippo4j.agent.core.plugin.bootstrap.BootstrapInstrumentBoost;
import cn.hippo4j.agent.core.plugin.bytebuddy.CacheableTransformerDecorator;
import cn.hippo4j.agent.core.plugin.jdk9module.JDK9ModuleExporter;
import net.bytebuddy.ByteBuddy;
import net.bytebuddy.agent.builder.AgentBuilder;
import net.bytebuddy.description.NamedElement;
import net.bytebuddy.description.type.TypeDescription;
import net.bytebuddy.dynamic.DynamicType;
import net.bytebuddy.dynamic.scaffold.TypeValidation;
import net.bytebuddy.matcher.ElementMatcher;
import net.bytebuddy.matcher.ElementMatchers;
import net.bytebuddy.utility.JavaModule;
import java.lang.instrument.Instrumentation;
import java.security.ProtectionDomain;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import static net.bytebuddy.matcher.ElementMatchers.nameContains;
import static net.bytebuddy.matcher.ElementMatchers.nameStartsWith;
import static net.bytebuddy.matcher.ElementMatchers.not;
public class Hippo4jAgent {
private static ILog LOGGER = LogManager.getLogger(Hippo4jAgent.class);
/**
* Main entrance. Use byte-buddy transform to enhance all classes, which define in plugins.
*/
public static void premain(String agentArgs, Instrumentation instrumentation) throws PluginException {
final PluginFinder pluginFinder;
try {
SnifferConfigInitializer.initializeCoreConfig(agentArgs);
} catch (Exception e) {
// try to resolve a new logger, and use the new logger to write the error log here
LogManager.getLogger(Hippo4jAgent.class)
.error(e, "Hippo4j agent initialized failure. Shutting down.");
return;
} finally {
// refresh logger again after initialization finishes
LOGGER = LogManager.getLogger(Hippo4jAgent.class);
}
try {
pluginFinder = new PluginFinder(new PluginBootstrap().loadPlugins());
} catch (AgentPackageNotFoundException ape) {
LOGGER.error(ape, "Locate agent.jar failure. Shutting down.");
return;
} catch (Exception e) {
LOGGER.error(e, "Hippo4j agent initialized failure. Shutting down.");
return;
}
final ByteBuddy byteBuddy = new ByteBuddy().with(TypeValidation.of(Config.Agent.IS_OPEN_DEBUGGING_CLASS));
AgentBuilder agentBuilder = new AgentBuilder.Default(byteBuddy).ignore(
nameStartsWith("net.bytebuddy.")
.or(nameStartsWith("org.slf4j."))
.or(nameStartsWith("org.groovy."))
.or(nameContains("javassist"))
.or(nameContains(".asm."))
.or(nameContains(".reflectasm."))
.or(nameStartsWith("sun.reflect"))
.or(allHippo4jAgentExcludeToolkit())
.or(ElementMatchers.isSynthetic()));
JDK9ModuleExporter.EdgeClasses edgeClasses = new JDK9ModuleExporter.EdgeClasses();
try {
agentBuilder = BootstrapInstrumentBoost.inject(pluginFinder, instrumentation, agentBuilder, edgeClasses);
} catch (Exception e) {
LOGGER.error(e, "Hippo4j agent inject bootstrap instrumentation failure. Shutting down.");
return;
}
try {
agentBuilder = JDK9ModuleExporter.openReadEdge(instrumentation, agentBuilder, edgeClasses);
} catch (Exception e) {
LOGGER.error(e, "Hippo4j agent open read edge in JDK 9+ failure. Shutting down.");
return;
}
if (Config.Agent.IS_CACHE_ENHANCED_CLASS) {
try {
agentBuilder = agentBuilder.with(new CacheableTransformerDecorator(Config.Agent.CLASS_CACHE_MODE));
LOGGER.info("Hippo4j agent class cache [{}] activated.", Config.Agent.CLASS_CACHE_MODE);
} catch (Exception e) {
LOGGER.error(e, "Hippo4j agent can't active class cache.");
}
}
agentBuilder.type(pluginFinder.buildMatch())
.transform(new Transformer(pluginFinder))
.with(AgentBuilder.RedefinitionStrategy.RETRANSFORMATION)
.with(new RedefinitionListener())
.with(new Listener())
.installOn(instrumentation);
PluginFinder.pluginInitCompleted();
try {
ServiceManager.INSTANCE.boot();
} catch (Exception e) {
LOGGER.error(e, "Hippo4j agent boot failure.");
}
try {
Class.forName("java.util.concurrent.ThreadPoolExecutor");
} catch (ClassNotFoundException e) {
LOGGER.error(e, "Hippo4j agent boot failure.");
}
Runtime.getRuntime()
.addShutdownHook(new Thread(ServiceManager.INSTANCE::shutdown, "hippo4j service shutdown thread"));
}
private static class Transformer implements AgentBuilder.Transformer {
private PluginFinder pluginFinder;
Transformer(PluginFinder pluginFinder) {
this.pluginFinder = pluginFinder;
}
@Override
public DynamicType.Builder<?> transform(final DynamicType.Builder<?> builder,
final TypeDescription typeDescription,
final ClassLoader classLoader,
final JavaModule javaModule,
final ProtectionDomain protectionDomain) {
LoadedLibraryCollector.registerURLClassLoader(classLoader);
List<AbstractClassEnhancePluginDefine> pluginDefines = pluginFinder.find(typeDescription);
if (pluginDefines.size() > 0) {
DynamicType.Builder<?> newBuilder = builder;
EnhanceContext context = new EnhanceContext();
for (AbstractClassEnhancePluginDefine define : pluginDefines) {
DynamicType.Builder<?> possibleNewBuilder = define.define(
typeDescription, newBuilder, classLoader, context);
if (possibleNewBuilder != null) {
newBuilder = possibleNewBuilder;
}
}
if (context.isEnhanced()) {
LOGGER.debug("Finish the prepare stage for {}.", typeDescription.getName());
}
return newBuilder;
}
LOGGER.debug("Matched class {}, but ignore by finding mechanism.", typeDescription.getTypeName());
return builder;
}
}
private static ElementMatcher.Junction<NamedElement> allHippo4jAgentExcludeToolkit() {
return nameStartsWith("cn.hippo4j").and(not(nameStartsWith("cn.hippo4j.agent.toolkit.")));
}
private static class Listener implements AgentBuilder.Listener {
@Override
public void onDiscovery(String typeName, ClassLoader classLoader, JavaModule module, boolean loaded) {
}
@Override
public void onTransformation(final TypeDescription typeDescription,
final ClassLoader classLoader,
final JavaModule module,
final boolean loaded,
final DynamicType dynamicType) {
if (LOGGER.isDebugEnable()) {
LOGGER.debug("On Transformation class {}.", typeDescription.getName());
}
InstrumentDebuggingClass.INSTANCE.log(dynamicType);
}
@Override
public void onIgnored(final TypeDescription typeDescription,
final ClassLoader classLoader,
final JavaModule module,
final boolean loaded) {
}
@Override
public void onError(final String typeName,
final ClassLoader classLoader,
final JavaModule module,
final boolean loaded,
final Throwable throwable) {
LOGGER.error("Enhance class " + typeName + " error.", throwable);
}
@Override
public void onComplete(String typeName, ClassLoader classLoader, JavaModule module, boolean loaded) {
}
}
private static class RedefinitionListener implements AgentBuilder.RedefinitionStrategy.Listener {
@Override
public void onBatch(int index, List<Class<?>> batch, List<Class<?>> types) {
/* do nothing */
}
@Override
public Iterable<? extends List<Class<?>>> onError(int index, List<Class<?>> batch, Throwable throwable, List<Class<?>> types) {
LOGGER.error(throwable, "index={}, batch={}, types={}", index, batch, types);
return Collections.emptyList();
}
@Override
public void onComplete(int amount, List<Class<?>> types, Map<List<Class<?>>, Throwable> failures) {
/* do nothing */
}
}
}

@ -1,311 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<parent>
<artifactId>hippo4j-agent</artifactId>
<groupId>cn.hippo4j</groupId>
<version>${revision}</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>hippo4j-agent-core</artifactId>
<build>
<plugins>
<plugin>
<groupId>kr.motd.maven</groupId>
<artifactId>os-maven-plugin</artifactId>
<version>${os-maven-plugin.version}</version>
<executions>
<execution>
<phase>initialize</phase>
<goals>
<goal>detect</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-shade-plugin</artifactId>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<artifactSet>
<excludes>
<exclude>net.bytebuddy:byte-buddy:jar:</exclude>
<exclude>com.google.errorprone:error_prone_annotations:jar:</exclude>
<exclude>com.google.code.findbugs:jsr305:jar:</exclude>
<exclude>com.google.android:annotations:jar:</exclude>
<exclude>com.google.api.grpc:proto-google-common-protos:jar:</exclude>
<exclude>org.checkerframework:checker-compat-qual:jar:</exclude>
<exclude>org.codehaus.mojo:animal-sniffer-annotations:jar:</exclude>
</excludes>
</artifactSet>
<relocations>
<relocation>
<pattern>${shade.com.google.source}</pattern>
<shadedPattern>${shade.com.google.target}</shadedPattern>
</relocation>
<relocation>
<pattern>${shade.io.grpc.source}</pattern>
<shadedPattern>${shade.io.grpc.target}</shadedPattern>
</relocation>
<relocation>
<pattern>${shade.io.netty.source}</pattern>
<shadedPattern>${shade.io.netty.target}</shadedPattern>
</relocation>
<relocation>
<pattern>${shade.io.opencensus.source}</pattern>
<shadedPattern>${shade.io.opencensus.target}</shadedPattern>
</relocation>
<relocation>
<pattern>${shade.io.perfmark.source}</pattern>
<shadedPattern>${shade.io.perfmark.target}</shadedPattern>
</relocation>
<relocation>
<pattern>${shade.org.slf4j.source}</pattern>
<shadedPattern>${shade.org.slf4j.target}</shadedPattern>
</relocation>
</relocations>
<filters>
<filter>
<artifact>com.google.protobuf:protobuf-java</artifact>
<excludes>
<exclude>google/protobuf/*.proto</exclude>
<exclude>google/protobuf/compiler/*.proto</exclude>
</excludes>
</filter>
</filters>
<transformers>
<transformer />
</transformers>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>net.bytebuddy</groupId>
<artifactId>byte-buddy</artifactId>
<version>1.12.13</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>net.bytebuddy</groupId>
<artifactId>byte-buddy-agent</artifactId>
<version>1.12.13</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.github.tomakehurst</groupId>
<artifactId>wiremock</artifactId>
<version>2.6.0</version>
<scope>test</scope>
<exclusions>
<exclusion>
<artifactId>jackson-annotations</artifactId>
<groupId>com.fasterxml.jackson.core</groupId>
</exclusion>
<exclusion>
<artifactId>jackson-core</artifactId>
<groupId>com.fasterxml.jackson.core</groupId>
</exclusion>
<exclusion>
<artifactId>jackson-databind</artifactId>
<groupId>com.fasterxml.jackson.core</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-server</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-servlet</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-servlets</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-webapp</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
<exclusion>
<artifactId>httpclient</artifactId>
<groupId>org.apache.httpcomponents</groupId>
</exclusion>
<exclusion>
<artifactId>xmlunit-core</artifactId>
<groupId>org.xmlunit</groupId>
</exclusion>
<exclusion>
<artifactId>xmlunit-legacy</artifactId>
<groupId>org.xmlunit</groupId>
</exclusion>
<exclusion>
<artifactId>json-path</artifactId>
<groupId>com.jayway.jsonpath</groupId>
</exclusion>
<exclusion>
<artifactId>jopt-simple</artifactId>
<groupId>net.sf.jopt-simple</groupId>
</exclusion>
<exclusion>
<artifactId>junit</artifactId>
<groupId>junit</groupId>
</exclusion>
<exclusion>
<artifactId>commons-lang3</artifactId>
<groupId>org.apache.commons</groupId>
</exclusion>
<exclusion>
<artifactId>zjsonpatch</artifactId>
<groupId>com.flipkart.zjsonpatch</groupId>
</exclusion>
<exclusion>
<artifactId>handlebars</artifactId>
<groupId>com.github.jknack</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.github.stefanbirkner</groupId>
<artifactId>system-rules</artifactId>
<version>1.18.0</version>
<scope>test</scope>
<exclusions>
<exclusion>
<artifactId>junit</artifactId>
<groupId>junit</groupId>
</exclusion>
<exclusion>
<artifactId>junit-dep</artifactId>
<groupId>junit</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.openjdk.jmh</groupId>
<artifactId>jmh-generator-annprocess</artifactId>
<version>1.33</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-api-mockito2</artifactId>
<version>2.0.7</version>
<scope>test</scope>
<exclusions>
<exclusion>
<artifactId>powermock-api-support</artifactId>
<groupId>org.powermock</groupId>
</exclusion>
<exclusion>
<artifactId>mockito-core</artifactId>
<groupId>org.mockito</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.objenesis</groupId>
<artifactId>objenesis</artifactId>
<version>3.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.openjdk.jmh</groupId>
<artifactId>jmh-core</artifactId>
<version>1.33</version>
<scope>test</scope>
<exclusions>
<exclusion>
<artifactId>commons-math3</artifactId>
<groupId>org.apache.commons</groupId>
</exclusion>
<exclusion>
<artifactId>jopt-simple</artifactId>
<groupId>net.sf.jopt-simple</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.20</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>javax.annotation</groupId>
<artifactId>javax.annotation-api</artifactId>
<version>1.3.2</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-module-junit4</artifactId>
<version>2.0.7</version>
<scope>test</scope>
<exclusions>
<exclusion>
<artifactId>powermock-module-junit4-common</artifactId>
<groupId>org.powermock</groupId>
</exclusion>
<exclusion>
<artifactId>hamcrest-core</artifactId>
<groupId>org.hamcrest</groupId>
</exclusion>
<exclusion>
<artifactId>junit</artifactId>
<groupId>junit</groupId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
</dependency>
<dependency>
<groupId>net.bytebuddy</groupId>
<artifactId>byte-buddy</artifactId>
<version>${bytebuddy.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
<properties>
<shade.io.perfmark.target>${shade.package}.${shade.io.perfmark.source}</shade.io.perfmark.target>
<shade.io.opencensus.source>io.opencensus</shade.io.opencensus.source>
<shade.org.slf4j.target>${shade.package}.${shade.org.slf4j.source}</shade.org.slf4j.target>
<shade.io.grpc.target>${shade.package}.${shade.io.grpc.source}</shade.io.grpc.target>
<netty-tcnative-boringssl-static.version>2.0.7.Final</netty-tcnative-boringssl-static.version>
<os-maven-plugin.version>1.4.1.Final</os-maven-plugin.version>
<shade.io.netty.source>io.netty</shade.io.netty.source>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<wiremock.version>2.6.0</wiremock.version>
<ststem-rules.version>1.18.0</ststem-rules.version>
<shade.io.opencensus.target>${shade.package}.${shade.io.opencensus.source}</shade.io.opencensus.target>
<slf4j.version>1.7.25</slf4j.version>
<shade.io.netty.target>${shade.package}.${shade.io.netty.source}</shade.io.netty.target>
<shade.com.google.source>com.google</shade.com.google.source>
<guava.version>30.1.1-jre</guava.version>
<shade.io.perfmark.source>io.perfmark</shade.io.perfmark.source>
<shade.org.slf4j.source>org.slf4j</shade.org.slf4j.source>
<shade.io.grpc.source>io.grpc</shade.io.grpc.source>
<shade.com.google.target>${shade.package}.${shade.com.google.source}</shade.com.google.target>
</properties>
</project>

@ -14,8 +14,6 @@
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<guava.version>30.1.1-jre</guava.version>
<wiremock.version>2.6.0</wiremock.version>
<netty-tcnative-boringssl-static.version>2.0.7.Final</netty-tcnative-boringssl-static.version>
<os-maven-plugin.version>1.4.1.Final</os-maven-plugin.version>
<shade.com.google.source>com.google</shade.com.google.source>
@ -31,7 +29,6 @@
<shade.org.slf4j.source>org.slf4j</shade.org.slf4j.source>
<shade.org.slf4j.target>${shade.package}.${shade.org.slf4j.source}</shade.org.slf4j.target>
<ststem-rules.version>1.18.0</ststem-rules.version>
<slf4j.version>1.7.25</slf4j.version>
</properties>
<dependencies>
@ -42,19 +39,19 @@
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>${gson.version}</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
<dependency>
<groupId>net.bytebuddy</groupId>
<artifactId>byte-buddy-agent</artifactId>
<version>${bytebuddy.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.github.tomakehurst</groupId>
<artifactId>wiremock</artifactId>
<version>${wiremock.version}</version>
<scope>test</scope>
<exclusions>
<exclusion>
@ -71,41 +68,19 @@
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.github.stefanbirkner</groupId>
<artifactId>system-rules</artifactId>
<version>${ststem-rules.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.openjdk.jmh</groupId>
<artifactId>jmh-generator-annprocess</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<groupId>cn.hippo4j</groupId>
<artifactId>hippo4j-common</artifactId>
<version>${project.version}</version>
<scope>provided</scope>
</dependency>
</dependencies>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
</dependency>
<dependency>
<groupId>net.bytebuddy</groupId>
<artifactId>byte-buddy</artifactId>
<version>${bytebuddy.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<plugins>
<plugin>

@ -26,7 +26,7 @@ import java.net.URISyntaxException;
import java.net.URL;
/**
* AgentPackagePath is a flag and finder to locate the SkyWalking agent.jar. It gets the absolute path of the agent jar.
* AgentPackagePath is a flag and finder to locate the Hippo4j agent.jar. It gets the absolute path of the agent jar.
* The path is the required metadata for agent core looking up the plugins and toolkit activations. If the lookup
* mechanism fails, the agent will exit directly.
*/

@ -27,7 +27,7 @@ public class DefaultNamedThreadFactory implements ThreadFactory {
private final String namePrefix;
public DefaultNamedThreadFactory(String name) {
namePrefix = "SkywalkingAgent-" + BOOT_SERVICE_SEQ.incrementAndGet() + "-" + name + "-";
namePrefix = "Hippo4jAgent-" + BOOT_SERVICE_SEQ.incrementAndGet() + "-" + name + "-";
}
@Override

@ -24,7 +24,7 @@ import java.lang.annotation.Target;
/**
* ConfigInitializationService provides the config class which should host all parameters originally from agent setup.
* {@link org.apache.skywalking.apm.agent.core.conf.Config} provides the core level config, all plugins could implement
* {@link cn.hippo4j.agent.core.conf.Config} provides the core level config, all plugins could implement
* this interface to have the same capability about initializing config from agent.config, system properties and system
* environment variables.
*/

@ -21,7 +21,13 @@ import cn.hippo4j.agent.core.logging.api.ILog;
import cn.hippo4j.agent.core.logging.api.LogManager;
import cn.hippo4j.agent.core.plugin.loader.AgentClassLoader;
import java.util.*;
import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.ServiceLoader;
/**
* The <code>ServiceManager</code> bases on {@link ServiceLoader}, load all {@link BootService} implementations.

@ -0,0 +1,33 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.core.boot;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Target({ElementType.TYPE})
@Retention(RetentionPolicy.RUNTIME)
public @interface SpringBootConfig {
/**
* @return Class as the root to do config initialization.
*/
Class<?> root();
}

@ -0,0 +1,87 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.core.boot;
import cn.hippo4j.agent.core.logging.api.ILog;
import cn.hippo4j.agent.core.logging.api.LogManager;
import cn.hippo4j.agent.core.util.ConfigInitializer;
import java.util.Collections;
import java.util.HashSet;
import java.util.Properties;
import java.util.Set;
public class SpringBootConfigInitializer {
private static final ILog LOG = LogManager.getLogger(SpringBootConfigInitializer.class);
private static final Set<Class<?>> SPRING_BOOT_CONFIG_LIST = Collections.synchronizedSet(new HashSet<>());
private static final long MAX_CACHE_TIME = 30L * 60L * 1000L; // half an hour.
private static final int MAX_CACHE_SIZE = 1000;
private static long PROPERTIES_LOAD_TIME;
public static Properties SPRING_PROPERTIES = null;
private SpringBootConfigInitializer() {
}
public static boolean isSpringPropertiesEmpty() {
return SPRING_PROPERTIES == null || SPRING_PROPERTIES.isEmpty();
}
public static synchronized void initializeConfig(SpringBootConfig springBootConfig) {
if (SPRING_PROPERTIES != null) {
try {
LOG.info("initialize Spring Config Class {}.", springBootConfig.root());
ConfigInitializer.initialize(SPRING_PROPERTIES, springBootConfig.root(), true);
} catch (Throwable e) {
LOG.error(e, "Failed to set the agent settings {} to Config={} ", SPRING_PROPERTIES, springBootConfig.root());
}
}
boolean isStarting = PROPERTIES_LOAD_TIME == 0L;
boolean overtime = System.currentTimeMillis() - PROPERTIES_LOAD_TIME > MAX_CACHE_TIME;
boolean oversize = SPRING_BOOT_CONFIG_LIST.size() > MAX_CACHE_SIZE;
// avoid memory leak.
if (isStarting || (!oversize && !overtime)) {
SPRING_BOOT_CONFIG_LIST.add(springBootConfig.root());
} else {
LOG.warn("spirng Config Class is skipped {}.", springBootConfig.root());
}
}
public static synchronized void setSpringProperties(Properties properties) {
if (properties != null && (SPRING_PROPERTIES == null || properties.size() > SPRING_PROPERTIES.size())) {
LOG.info("set Spring Config Properties before : {}.", SPRING_PROPERTIES);
SPRING_PROPERTIES = properties;
LOG.info("set Spring Config Properties after : {}.", SPRING_PROPERTIES);
PROPERTIES_LOAD_TIME = System.currentTimeMillis();
}
for (Class<?> clazz : SPRING_BOOT_CONFIG_LIST) {
try {
LOG.info("initialize Spring Config Class in loop {}.", clazz);
ConfigInitializer.initialize(SPRING_PROPERTIES, clazz);
} catch (Throwable e) {
LOG.error(e, "Failed to set the agent Config={} from settings {}", clazz, properties);
}
}
}
}

@ -82,14 +82,14 @@ public class Config {
public static String AUTHENTICATION = "";
/**
* If true, SkyWalking agent will save all instrumented classes files in `/debugging` folder. SkyWalking team
* If true, Hippo4j agent will save all instrumented classes files in `/debugging` folder. Hippo4j team
* may ask for these files in order to resolve compatible problem.
*/
public static boolean IS_OPEN_DEBUGGING_CLASS = false;
/**
* If true, SkyWalking agent will cache all instrumented classes to memory or disk files (decided by class cache
* mode), allow other javaagent to enhance those classes that enhanced by SkyWalking agent.
* If true, Hippo4j agent will cache all instrumented classes to memory or disk files (decided by class cache
* mode), allow other javaagent to enhance those classes that enhanced by Hippo4j agent.
*/
public static boolean IS_CACHE_ENHANCED_CLASS = false;
@ -108,7 +108,7 @@ public class Config {
/**
* service instance properties in json format. e.g. agent.instance_properties_json = {"org":
* "apache-skywalking"}
* "cn.hippo4j"}
*/
public static String INSTANCE_PROPERTIES_JSON = "";
@ -181,7 +181,7 @@ public class Config {
*/
public static int PROPERTIES_REPORT_PERIOD_FACTOR = 10;
/**
* Collector skywalking trace receiver service addresses.
* Collector hippo4j trace receiver service addresses.
*/
public static String BACKEND_SERVICE = "";
/**
@ -197,7 +197,7 @@ public class Config {
*/
public static int GET_AGENT_DYNAMIC_CONFIG_INTERVAL = 20;
/**
* If true, skywalking agent will enable periodically resolving DNS to update receiver service addresses.
* If true, hippo4j agent will enable periodically resolving DNS to update receiver service addresses.
*/
public static boolean IS_RESOLVE_DNS_PERIODICALLY = false;
}
@ -205,7 +205,7 @@ public class Config {
public static class Profile {
/**
* If true, skywalking agent will enable profile when user create a new profile task. Otherwise disable
* If true, hippo4j agent will enable profile when user create a new profile task. Otherwise disable
* profile.
*/
public static boolean ACTIVE = true;
@ -234,7 +234,7 @@ public class Config {
public static class Meter {
/**
* If true, skywalking agent will enable sending meters. Otherwise disable meter report.
* If true, Hippo4j agent will enable sending meters. Otherwise disable meter report.
*/
public static boolean ACTIVE = true;
@ -277,11 +277,11 @@ public class Config {
/**
* Log file name.
*/
public static String FILE_NAME = "skywalking-api.log";
public static String FILE_NAME = "hippo4j-api.log";
/**
* Log files directory. Default is blank string, means, use "{theSkywalkingAgentJarDir}/logs " to output logs.
* {theSkywalkingAgentJarDir} is the directory where the skywalking agent jar file is located.
* Log files directory. Default is blank string, means, use "{theHippo4jAgentJarDir}/logs " to output logs.
* {theHippo4jAgentJarDir} is the directory where the hippo4j agent jar file is located.
* <p>
* Ref to {@link WriterFactory#getLogWriter()}
*/
@ -358,6 +358,32 @@ public class Config {
* Mount the folders of the plugins. The folder path is relative to agent.jar.
*/
public static List<String> MOUNT = Arrays.asList("plugins", "activations");
public static class ThreadPool {
public static List<String> EXCLUDE_PACKAGE_PREFIX = Arrays.asList(
"java", "sun", "okhttp3", "retrofit2", "reactor",
"org.apache", "io.netty", "org.springframework", "com.ctrip", "com.google",
"io.undertow", "org.xnio", "org.jboss", "com.zaxxer", "org.redisson", "com.alibaba",
"com.netflix", "com.mysql", "rx.internal", "io.shardingjdbc", "org.drools", "org.elasticsearch",
"ch.qos.logback", "net.sf.ehcache");
}
public static class Apollo {
public static class App {
public static String ID;
}
public static String META;
public static class BootStrap {
public static boolean ENABLED = false;
public static List<String> NAMESPACES;
}
}
}
public static class Correlation {

@ -33,4 +33,7 @@ public class Constants {
public static String EVENT_LAYER_NAME = "GENERAL";
public static int NULL_VALUE = 0;
public static String SPRING_BOOT_CONFIG_PREFIX = "spring.dynamic.thread-pool";
}

@ -27,7 +27,10 @@ import cn.hippo4j.agent.core.util.ConfigInitializer;
import cn.hippo4j.agent.core.util.PropertyPlaceholderHelper;
import cn.hippo4j.agent.core.util.StringUtil;
import java.io.*;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
@ -42,9 +45,9 @@ import static cn.hippo4j.agent.core.conf.Constants.SERVICE_NAME_PART_CONNECTOR;
public class SnifferConfigInitializer {
private static ILog LOGGER = LogManager.getLogger(SnifferConfigInitializer.class);
private static final String SPECIFIED_CONFIG_PATH = "skywalking_config";
private static final String SPECIFIED_CONFIG_PATH = "hippo4j_config";
private static final String DEFAULT_CONFIG_FILE_NAME = "/config/agent.config";
private static final String ENV_KEY_PREFIX = "skywalking.";
private static final String ENV_KEY_PREFIX = "hippo4j.";
private static Properties AGENT_SETTINGS;
private static boolean IS_INIT_COMPLETED = false;
@ -54,7 +57,7 @@ public class SnifferConfigInitializer {
* /config directory of agent package.
* <p>
* Also try to override the config by system.properties. All the keys in this place should start with {@link
* #ENV_KEY_PREFIX}. e.g. in env `skywalking.agent.service_name=yourAppName` to override `agent.service_name` in
* #ENV_KEY_PREFIX}. e.g. in env `hippo4j.agent.service_name=yourAppName` to override `agent.service_name` in
* config file.
* <p>
* At the end, `agent.service_name` and `collector.servers` must not be blank.
@ -69,7 +72,7 @@ public class SnifferConfigInitializer {
}
} catch (Exception e) {
LOGGER.error(e, "Failed to read the config file, skywalking is going to run in default config.");
LOGGER.error(e, "Failed to read the config file, hippo4j is going to run in default config.");
}
try {
@ -106,9 +109,9 @@ public class SnifferConfigInitializer {
Config.Agent.CLUSTER);
}
}
if (StringUtil.isEmpty(Config.Collector.BACKEND_SERVICE)) {
throw new ExceptionInInitializerError("`collector.backend_service` is missing.");
}
// if (StringUtil.isEmpty(Config.Collector.BACKEND_SERVICE)) {
// throw new ExceptionInInitializerError("`collector.backend_service` is missing.");
// }
if (Config.Plugin.PEER_MAX_LENGTH <= 3) {
LOGGER.warn(
"PEER_MAX_LENGTH configuration:{} error, the default value of 200 will be used.",
@ -180,10 +183,10 @@ public class SnifferConfigInitializer {
}
/**
* Override the config by system properties. The property key must start with `skywalking`, the result should be as
* Override the config by system properties. The property key must start with `hippo4j`, the result should be as
* same as in `agent.config`
* <p>
* such as: Property key of `agent.service_name` should be `skywalking.agent.service_name`
* such as: Property key of `agent.service_name` should be `hippo4j.agent.service_name`
*/
private static void overrideConfigBySystemProp() {
Properties systemProperties = System.getProperties();

@ -44,9 +44,9 @@ public abstract class AgentConfigChangeWatcher {
@Override
public String toString() {
return "AgentConfigChangeWatcher{" +
"propertyKey='" + propertyKey + '\'' +
'}';
return "AgentConfigChangeWatcher{"
+ "propertyKey='" + propertyKey + '\''
+ '}';
}
@Getter

@ -0,0 +1,132 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.core.jvm;
import cn.hippo4j.agent.core.logging.api.ILog;
import cn.hippo4j.agent.core.logging.api.LogManager;
import cn.hippo4j.agent.core.util.CollectionUtil;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import java.io.File;
import java.lang.management.ManagementFactory;
import java.net.URL;
import java.net.URLClassLoader;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class LoadedLibraryCollector {
private static final ILog LOGGER = LogManager.getLogger(LoadedLibraryCollector.class);
private static final String JAR_SEPARATOR = "!";
private static Set<ClassLoader> CURRENT_URL_CLASSLOADER_SET = new HashSet<>();
private static final Gson GSON = new GsonBuilder().disableHtmlEscaping().create();
/**
* Prevent OOM in special scenes
*/
private static int CURRENT_URL_CLASSLOADER_SET_MAX_SIZE = 50;
public static void registerURLClassLoader(ClassLoader classLoader) {
if (CURRENT_URL_CLASSLOADER_SET.size() < CURRENT_URL_CLASSLOADER_SET_MAX_SIZE && classLoader instanceof URLClassLoader) {
CURRENT_URL_CLASSLOADER_SET.add(classLoader);
}
}
private static String getVmStartTime() {
long startTime = ManagementFactory.getRuntimeMXBean().getStartTime();
return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date(startTime));
}
private static List<String> getVmArgs() {
List<String> vmArgs = ManagementFactory.getRuntimeMXBean().getInputArguments();
List<String> sortedVmArgs = new ArrayList<>(vmArgs);
Collections.sort(sortedVmArgs);
return sortedVmArgs;
}
private static List<String> getLibJarNames() {
List<URL> classLoaderUrls = loadClassLoaderUrls();
return extractLibJarNamesFromURLs(classLoaderUrls);
}
private static List<URL> loadClassLoaderUrls() {
List<URL> classLoaderUrls = new ArrayList<>();
for (ClassLoader classLoader : CURRENT_URL_CLASSLOADER_SET) {
try {
URLClassLoader webappClassLoader = (URLClassLoader) classLoader;
URL[] urls = webappClassLoader.getURLs();
classLoaderUrls.addAll(Arrays.asList(urls));
} catch (Exception e) {
LOGGER.warn("Load classloader urls exception: {}", e.getMessage());
}
}
return classLoaderUrls;
}
private static List<String> extractLibJarNamesFromURLs(List<URL> urls) {
Set<String> libJarNames = new HashSet<>();
for (URL url : urls) {
try {
String libJarName = extractLibJarName(url);
if (libJarName.endsWith(".jar")) {
libJarNames.add(libJarName);
}
} catch (Exception e) {
LOGGER.warn("Extracting library name exception: {}", e.getMessage());
}
}
List<String> sortedLibJarNames = new ArrayList<>(libJarNames.size());
if (!CollectionUtil.isEmpty(libJarNames)) {
sortedLibJarNames.addAll(libJarNames);
Collections.sort(sortedLibJarNames);
}
return sortedLibJarNames;
}
private static String extractLibJarName(URL url) {
String protocol = url.getProtocol();
if (protocol.equals("file")) {
return extractNameFromFile(url.toString());
} else if (protocol.equals("jar")) {
return extractNameFromJar(url.toString());
} else {
return "";
}
}
private static String extractNameFromFile(String fileUri) {
int lastIndexOfSeparator = fileUri.lastIndexOf(File.separator);
if (lastIndexOfSeparator < 0) {
return fileUri;
} else {
return fileUri.substring(lastIndexOfSeparator + 1);
}
}
private static String extractNameFromJar(String jarUri) {
String uri = jarUri.substring(0, jarUri.lastIndexOf(JAR_SEPARATOR));
return extractNameFromFile(uri);
}
}

@ -19,7 +19,13 @@ package cn.hippo4j.agent.core.logging.core;
import cn.hippo4j.agent.core.conf.Config;
import cn.hippo4j.agent.core.logging.api.ILog;
import cn.hippo4j.agent.core.logging.core.converters.*;
import cn.hippo4j.agent.core.logging.core.converters.AgentNameConverter;
import cn.hippo4j.agent.core.logging.core.converters.ClassConverter;
import cn.hippo4j.agent.core.logging.core.converters.DateConverter;
import cn.hippo4j.agent.core.logging.core.converters.LevelConverter;
import cn.hippo4j.agent.core.logging.core.converters.MessageConverter;
import cn.hippo4j.agent.core.logging.core.converters.ThreadConverter;
import cn.hippo4j.agent.core.logging.core.converters.ThrowableConverter;
import java.util.ArrayList;
import java.util.HashMap;

@ -22,7 +22,11 @@ import cn.hippo4j.agent.core.conf.Config;
import cn.hippo4j.agent.core.conf.Constants;
import cn.hippo4j.agent.core.util.RunnableWithExceptionProtection;
import java.io.*;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;

@ -24,7 +24,7 @@ import java.util.HashMap;
import java.util.Map;
/**
* An alternative logger for the SkyWalking agent. The default layout is
* An alternative logger for the Hippo4j agent. The default layout is
* {
* "@timestamp": "", // timestamp
* "logger": "", // name of the Logger

@ -24,8 +24,6 @@ import cn.hippo4j.agent.core.conf.SnifferConfigInitializer;
import cn.hippo4j.agent.core.plugin.PluginFinder;
import cn.hippo4j.agent.core.util.StringUtil;
import static cn.hippo4j.agent.core.logging.core.LogOutput.FILE;
public class WriterFactory {
private static IWriter WRITER;

@ -18,8 +18,11 @@
package cn.hippo4j.agent.core.os;
import java.lang.management.ManagementFactory;
import java.net.*;
import java.util.ArrayList;
import java.net.Inet4Address;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.Enumeration;
import java.util.LinkedList;
import java.util.List;

@ -22,7 +22,7 @@ package cn.hippo4j.agent.core.plugin;
*/
public class ByteBuddyCoreClasses {
private static final String SHADE_PACKAGE = "org.apache.skywalking.apm.dependencies.";
private static final String SHADE_PACKAGE = "cn.hippo4j.agent.dependencies.";
public static final String[] CLASSES = {
SHADE_PACKAGE + "net.bytebuddy.implementation.bind.annotation.RuntimeType",

@ -46,7 +46,7 @@ public class PluginBootstrap {
List<URL> resources = resolver.getResources();
if (resources == null || resources.size() == 0) {
LOGGER.info("no plugin files (skywalking-plugin.def) found, continue to start application.");
LOGGER.info("no plugin files (hippo4j-plugin.def) found, continue to start application.");
return new ArrayList<AbstractClassEnhancePluginDefine>();
}

@ -26,7 +26,11 @@ import net.bytebuddy.description.NamedElement;
import net.bytebuddy.description.type.TypeDescription;
import net.bytebuddy.matcher.ElementMatcher;
import java.util.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import static net.bytebuddy.matcher.ElementMatchers.isInterface;
import static net.bytebuddy.matcher.ElementMatchers.not;

@ -28,7 +28,7 @@ import java.util.Enumeration;
import java.util.List;
/**
* Use the current classloader to read all plugin define file. The file must be named 'skywalking-plugin.def'
* Use the current classloader to read all plugin define file. The file must be named 'hippo4j-plugin.def'
*/
public class PluginResourcesResolver {
@ -38,12 +38,12 @@ public class PluginResourcesResolver {
List<URL> cfgUrlPaths = new ArrayList<URL>();
Enumeration<URL> urls;
try {
urls = AgentClassLoader.getDefault().getResources("skywalking-plugin.def");
urls = AgentClassLoader.getDefault().getResources("hippo4j-plugin.def");
while (urls.hasMoreElements()) {
URL pluginUrl = urls.nextElement();
cfgUrlPaths.add(pluginUrl);
LOGGER.info("find skywalking plugin define in {}", pluginUrl);
LOGGER.info("find hippo4j plugin define in {}", pluginUrl);
}
return cfgUrlPaths;

@ -19,7 +19,11 @@ package cn.hippo4j.agent.core.plugin.bootstrap;
import cn.hippo4j.agent.core.logging.api.ILog;
import cn.hippo4j.agent.core.logging.api.LogManager;
import cn.hippo4j.agent.core.plugin.*;
import cn.hippo4j.agent.core.plugin.AbstractClassEnhancePluginDefine;
import cn.hippo4j.agent.core.plugin.ByteBuddyCoreClasses;
import cn.hippo4j.agent.core.plugin.InstrumentDebuggingClass;
import cn.hippo4j.agent.core.plugin.PluginException;
import cn.hippo4j.agent.core.plugin.PluginFinder;
import cn.hippo4j.agent.core.plugin.interceptor.ConstructorInterceptPoint;
import cn.hippo4j.agent.core.plugin.interceptor.InstanceMethodsInterceptPoint;
import cn.hippo4j.agent.core.plugin.interceptor.StaticMethodsInterceptPoint;
@ -55,31 +59,31 @@ public class BootstrapInstrumentBoost {
private static final ILog LOGGER = LogManager.getLogger(BootstrapInstrumentBoost.class);
private static final String[] HIGH_PRIORITY_CLASSES = {
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.BootstrapInterRuntimeAssist",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.InstanceMethodsAroundInterceptor",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.InstanceConstructorInterceptor",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.StaticMethodsAroundInterceptor",
"org.apache.skywalking.apm.agent.core.plugin.bootstrap.IBootstrapLog",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.EnhancedInstance",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.OverrideCallable",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.MethodInterceptResult",
"cn.hippo4j.agent.core.plugin.interceptor.enhance.BootstrapInterRuntimeAssist",
"cn.hippo4j.agent.core.plugin.interceptor.enhance.InstanceMethodsAroundInterceptor",
"cn.hippo4j.agent.core.plugin.interceptor.enhance.InstanceConstructorInterceptor",
"cn.hippo4j.agent.core.plugin.interceptor.enhance.StaticMethodsAroundInterceptor",
"cn.hippo4j.agent.core.plugin.bootstrap.IBootstrapLog",
"cn.hippo4j.agent.core.plugin.interceptor.enhance.EnhancedInstance",
"cn.hippo4j.agent.core.plugin.interceptor.enhance.OverrideCallable",
"cn.hippo4j.agent.core.plugin.interceptor.enhance.MethodInterceptResult",
// interceptor v2
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.v2.InstanceMethodsAroundInterceptorV2",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.v2.StaticMethodsAroundInterceptorV2",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.v2.MethodInvocationContext",
"cn.hippo4j.agent.core.plugin.interceptor.enhance.v2.InstanceMethodsAroundInterceptorV2",
"cn.hippo4j.agent.core.plugin.interceptor.enhance.v2.StaticMethodsAroundInterceptorV2",
"cn.hippo4j.agent.core.plugin.interceptor.enhance.v2.MethodInvocationContext",
};
private static String INSTANCE_METHOD_DELEGATE_TEMPLATE = "org.apache.skywalking.apm.agent.core.plugin.bootstrap.template.InstanceMethodInterTemplate";
private static String INSTANCE_METHOD_WITH_OVERRIDE_ARGS_DELEGATE_TEMPLATE = "org.apache.skywalking.apm.agent.core.plugin.bootstrap.template.InstanceMethodInterWithOverrideArgsTemplate";
private static String CONSTRUCTOR_DELEGATE_TEMPLATE = "org.apache.skywalking.apm.agent.core.plugin.bootstrap.template.ConstructorInterTemplate";
private static String STATIC_METHOD_DELEGATE_TEMPLATE = "org.apache.skywalking.apm.agent.core.plugin.bootstrap.template.StaticMethodInterTemplate";
private static String STATIC_METHOD_WITH_OVERRIDE_ARGS_DELEGATE_TEMPLATE = "org.apache.skywalking.apm.agent.core.plugin.bootstrap.template.StaticMethodInterWithOverrideArgsTemplate";
private static String INSTANCE_METHOD_DELEGATE_TEMPLATE = "cn.hippo4j.agent.core.plugin.bootstrap.template.InstanceMethodInterTemplate";
private static String INSTANCE_METHOD_WITH_OVERRIDE_ARGS_DELEGATE_TEMPLATE = "cn.hippo4j.agent.core.plugin.bootstrap.template.InstanceMethodInterWithOverrideArgsTemplate";
private static String CONSTRUCTOR_DELEGATE_TEMPLATE = "cn.hippo4j.agent.core.plugin.bootstrap.template.ConstructorInterTemplate";
private static String STATIC_METHOD_DELEGATE_TEMPLATE = "cn.hippo4j.agent.core.plugin.bootstrap.template.StaticMethodInterTemplate";
private static String STATIC_METHOD_WITH_OVERRIDE_ARGS_DELEGATE_TEMPLATE = "cn.hippo4j.agent.core.plugin.bootstrap.template.StaticMethodInterWithOverrideArgsTemplate";
private static String INSTANCE_METHOD_V2_DELEGATE_TEMPLATE = "org.apache.skywalking.apm.agent.core.plugin.bootstrap.template.v2.InstanceMethodInterV2Template";
private static String INSTANCE_METHOD_V2_WITH_OVERRIDE_ARGS_DELEGATE_TEMPLATE = "org.apache.skywalking.apm.agent.core.plugin.bootstrap.template.v2.InstanceMethodInterV2WithOverrideArgsTemplate";
private static String STATIC_METHOD_V2_DELEGATE_TEMPLATE = "org.apache.skywalking.apm.agent.core.plugin.bootstrap.template.v2.StaticMethodInterV2Template";
private static String STATIC_METHOD_V2_WITH_OVERRIDE_ARGS_DELEGATE_TEMPLATE = "org.apache.skywalking.apm.agent.core.plugin.bootstrap.template.v2.StaticMethodInterV2WithOverrideArgsTemplate";
private static String INSTANCE_METHOD_V2_DELEGATE_TEMPLATE = "cn.hippo4j.agent.core.plugin.bootstrap.template.v2.InstanceMethodInterV2Template";
private static String INSTANCE_METHOD_V2_WITH_OVERRIDE_ARGS_DELEGATE_TEMPLATE = "cn.hippo4j.agent.core.plugin.bootstrap.template.v2.InstanceMethodInterV2WithOverrideArgsTemplate";
private static String STATIC_METHOD_V2_DELEGATE_TEMPLATE = "cn.hippo4j.agent.core.plugin.bootstrap.template.v2.StaticMethodInterV2Template";
private static String STATIC_METHOD_V2_WITH_OVERRIDE_ARGS_DELEGATE_TEMPLATE = "cn.hippo4j.agent.core.plugin.bootstrap.template.v2.StaticMethodInterV2WithOverrideArgsTemplate";
public static AgentBuilder inject(PluginFinder pluginFinder, Instrumentation instrumentation,
AgentBuilder agentBuilder, JDK9ModuleExporter.EdgeClasses edgeClasses) throws PluginException {
@ -235,7 +239,7 @@ public class BootstrapInstrumentBoost {
* @param classesTypeMap hosts injected binary of generated class
* @param typePool to generate new class
* @param templateClassName represents the class as template in this generation process. The templates are
* pre-defined in SkyWalking agent core.
* pre-defined in Hippo4j agent core.
*/
private static void generateDelegator(Map<String, byte[]> classesTypeMap, TypePool typePool,
String templateClassName, String methodsInterceptor) {

@ -22,7 +22,11 @@ import cn.hippo4j.agent.core.plugin.interceptor.enhance.BootstrapInterRuntimeAss
import cn.hippo4j.agent.core.plugin.interceptor.enhance.EnhancedInstance;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.InstanceMethodsAroundInterceptor;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.MethodInterceptResult;
import net.bytebuddy.implementation.bind.annotation.*;
import net.bytebuddy.implementation.bind.annotation.AllArguments;
import net.bytebuddy.implementation.bind.annotation.Origin;
import net.bytebuddy.implementation.bind.annotation.RuntimeType;
import net.bytebuddy.implementation.bind.annotation.SuperCall;
import net.bytebuddy.implementation.bind.annotation.This;
import java.lang.reflect.Method;
import java.util.concurrent.Callable;

@ -18,8 +18,16 @@
package cn.hippo4j.agent.core.plugin.bootstrap.template;
import cn.hippo4j.agent.core.plugin.bootstrap.IBootstrapLog;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.*;
import net.bytebuddy.implementation.bind.annotation.*;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.BootstrapInterRuntimeAssist;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.EnhancedInstance;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.InstanceMethodsAroundInterceptor;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.MethodInterceptResult;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.OverrideCallable;
import net.bytebuddy.implementation.bind.annotation.AllArguments;
import net.bytebuddy.implementation.bind.annotation.Morph;
import net.bytebuddy.implementation.bind.annotation.Origin;
import net.bytebuddy.implementation.bind.annotation.RuntimeType;
import net.bytebuddy.implementation.bind.annotation.This;
import java.lang.reflect.Method;

@ -22,7 +22,12 @@ import cn.hippo4j.agent.core.plugin.interceptor.enhance.BootstrapInterRuntimeAss
import cn.hippo4j.agent.core.plugin.interceptor.enhance.EnhancedInstance;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.v2.InstanceMethodsAroundInterceptorV2;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.v2.MethodInvocationContext;
import net.bytebuddy.implementation.bind.annotation.*;
import net.bytebuddy.implementation.bind.annotation.AllArguments;
import net.bytebuddy.implementation.bind.annotation.Origin;
import net.bytebuddy.implementation.bind.annotation.RuntimeType;
import net.bytebuddy.implementation.bind.annotation.SuperCall;
import net.bytebuddy.implementation.bind.annotation.This;
import java.lang.reflect.Method;
import java.util.concurrent.Callable;

@ -23,7 +23,11 @@ import cn.hippo4j.agent.core.plugin.interceptor.enhance.EnhancedInstance;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.OverrideCallable;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.v2.InstanceMethodsAroundInterceptorV2;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.v2.MethodInvocationContext;
import net.bytebuddy.implementation.bind.annotation.*;
import net.bytebuddy.implementation.bind.annotation.AllArguments;
import net.bytebuddy.implementation.bind.annotation.Morph;
import net.bytebuddy.implementation.bind.annotation.Origin;
import net.bytebuddy.implementation.bind.annotation.RuntimeType;
import net.bytebuddy.implementation.bind.annotation.This;
import java.lang.reflect.Method;

@ -27,7 +27,11 @@ import net.bytebuddy.agent.builder.AgentBuilder;
import net.bytebuddy.agent.builder.ResettableClassFileTransformer;
import net.bytebuddy.utility.RandomString;
import java.io.*;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.lang.instrument.IllegalClassFormatException;
import java.security.ProtectionDomain;
import java.util.Map;
@ -37,7 +41,7 @@ import java.util.concurrent.ConcurrentHashMap;
* Wrapper classFileTransformer of ByteBuddy, save the enhanced bytecode to memory cache or file cache,
* and automatically load the previously generated bytecode during the second retransform,
* to solve the problem that ByteBuddy generates auxiliary classes with different random names every time.
* Allow other javaagent to enhance those classes that enhanced by SkyWalking agent.
* Allow other javaagent to enhance those classes that enhanced by Hippo4j agent.
*/
public class CacheableTransformerDecorator implements AgentBuilder.TransformerDecorator {

@ -18,7 +18,7 @@
package cn.hippo4j.agent.core.plugin.exception;
/**
* Thrown to indicate that a illegal format plugin definition has been defined in skywalking-plugin.define.
* Thrown to indicate that a illegal format plugin definition has been defined in hippo4j-plugin.define.
*/
public class IllegalPluginDefineException extends Exception {

@ -38,7 +38,7 @@ public interface ConstructorInterceptPoint {
/**
* @return represents a class name, the class instance must be a instance of {@link
* org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.InstanceConstructorInterceptor}
* cn.hippo4j.agent.core.plugin.interceptor.enhance.InstanceConstructorInterceptor}
*/
String getConstructorInterceptor();
}

@ -28,9 +28,9 @@ import java.lang.reflect.Method;
*/
public class BootstrapInterRuntimeAssist {
private static final String AGENT_CLASSLOADER_DEFAULT = "org.apache.skywalking.apm.agent.core.plugin.loader.AgentClassLoader";
private static final String AGENT_CLASSLOADER_DEFAULT = "cn.hippo4j.agent.core.plugin.loader.AgentClassLoader";
private static final String DEFAULT_AGENT_CLASSLOADER_INSTANCE = "DEFAULT_LOADER";
private static final String LOG_MANAGER_CLASS = "org.apache.skywalking.apm.agent.core.plugin.bootstrap.BootstrapPluginLogBridge";
private static final String LOG_MANAGER_CLASS = "cn.hippo4j.agent.core.plugin.bootstrap.BootstrapPluginLogBridge";
private static final String LOG_MANAGER_GET_LOGGER_METHOD = "getLogger";
private static final PrintStream OUT = System.out;

@ -23,7 +23,11 @@ import cn.hippo4j.agent.core.plugin.AbstractClassEnhancePluginDefine;
import cn.hippo4j.agent.core.plugin.EnhanceContext;
import cn.hippo4j.agent.core.plugin.PluginException;
import cn.hippo4j.agent.core.plugin.bootstrap.BootstrapInstrumentBoost;
import cn.hippo4j.agent.core.plugin.interceptor.*;
import cn.hippo4j.agent.core.plugin.interceptor.ConstructorInterceptPoint;
import cn.hippo4j.agent.core.plugin.interceptor.DeclaredInstanceMethodsInterceptPoint;
import cn.hippo4j.agent.core.plugin.interceptor.EnhanceException;
import cn.hippo4j.agent.core.plugin.interceptor.InstanceMethodsInterceptPoint;
import cn.hippo4j.agent.core.plugin.interceptor.StaticMethodsInterceptPoint;
import cn.hippo4j.agent.core.plugin.interceptor.v2.InstanceMethodsInterceptV2Point;
import cn.hippo4j.agent.core.plugin.interceptor.v2.StaticMethodsInterceptV2Point;
import cn.hippo4j.agent.core.util.StringUtil;

@ -19,7 +19,7 @@ package cn.hippo4j.agent.core.plugin.interceptor.enhance;
public interface EnhancedInstance {
Object getSkyWalkingDynamicField();
Object getHippo4jDynamicField();
void setSkyWalkingDynamicField(Object value);
void setHippo4jDynamicField(Object value);
}

@ -21,7 +21,11 @@ import cn.hippo4j.agent.core.logging.api.ILog;
import cn.hippo4j.agent.core.logging.api.LogManager;
import cn.hippo4j.agent.core.plugin.PluginException;
import cn.hippo4j.agent.core.plugin.loader.InterceptorInstanceLoader;
import net.bytebuddy.implementation.bind.annotation.*;
import net.bytebuddy.implementation.bind.annotation.AllArguments;
import net.bytebuddy.implementation.bind.annotation.Origin;
import net.bytebuddy.implementation.bind.annotation.RuntimeType;
import net.bytebuddy.implementation.bind.annotation.SuperCall;
import net.bytebuddy.implementation.bind.annotation.This;
import java.lang.reflect.Method;
import java.util.concurrent.Callable;

@ -21,7 +21,11 @@ import cn.hippo4j.agent.core.logging.api.ILog;
import cn.hippo4j.agent.core.logging.api.LogManager;
import cn.hippo4j.agent.core.plugin.PluginException;
import cn.hippo4j.agent.core.plugin.loader.InterceptorInstanceLoader;
import net.bytebuddy.implementation.bind.annotation.*;
import net.bytebuddy.implementation.bind.annotation.AllArguments;
import net.bytebuddy.implementation.bind.annotation.Morph;
import net.bytebuddy.implementation.bind.annotation.Origin;
import net.bytebuddy.implementation.bind.annotation.RuntimeType;
import net.bytebuddy.implementation.bind.annotation.This;
import java.lang.reflect.Method;

@ -22,7 +22,11 @@ import cn.hippo4j.agent.core.logging.api.LogManager;
import cn.hippo4j.agent.core.plugin.PluginException;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.EnhancedInstance;
import cn.hippo4j.agent.core.plugin.loader.InterceptorInstanceLoader;
import net.bytebuddy.implementation.bind.annotation.*;
import net.bytebuddy.implementation.bind.annotation.AllArguments;
import net.bytebuddy.implementation.bind.annotation.Origin;
import net.bytebuddy.implementation.bind.annotation.RuntimeType;
import net.bytebuddy.implementation.bind.annotation.SuperCall;
import net.bytebuddy.implementation.bind.annotation.This;
import java.lang.reflect.Method;
import java.util.concurrent.Callable;

@ -23,7 +23,11 @@ import cn.hippo4j.agent.core.plugin.PluginException;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.EnhancedInstance;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.OverrideCallable;
import cn.hippo4j.agent.core.plugin.loader.InterceptorInstanceLoader;
import net.bytebuddy.implementation.bind.annotation.*;
import net.bytebuddy.implementation.bind.annotation.AllArguments;
import net.bytebuddy.implementation.bind.annotation.Morph;
import net.bytebuddy.implementation.bind.annotation.Origin;
import net.bytebuddy.implementation.bind.annotation.RuntimeType;
import net.bytebuddy.implementation.bind.annotation.This;
import java.lang.reflect.Method;

@ -31,7 +31,7 @@ public interface ConstructorInterceptV2Point {
/**
* @return represents a class name, the class instance must be a instance of {@link
* org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.InstanceConstructorInterceptor}
* cn.hippo4j.agent.core.plugin.interceptor.enhance.InstanceConstructorInterceptor}
*/
String getConstructorInterceptorV2();

@ -34,14 +34,14 @@ public class JDK9ModuleExporter {
private static final ILog LOGGER = LogManager.getLogger(JDK9ModuleExporter.class);
private static final String[] HIGH_PRIORITY_CLASSES = {
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.EnhancedInstance",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.MethodInterceptResult",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.OverrideCallable",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.ConstructorInter",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.InstMethodsInter",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.InstMethodsInterWithOverrideArgs",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.StaticMethodsInter",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.StaticMethodsInterWithOverrideArgs",
"cn.hippo4j.agent.core.plugin.interceptor.enhance.EnhancedInstance",
"cn.hippo4j.agent.core.plugin.interceptor.enhance.MethodInterceptResult",
"cn.hippo4j.agent.core.plugin.interceptor.enhance.OverrideCallable",
"cn.hippo4j.agent.core.plugin.interceptor.enhance.ConstructorInter",
"cn.hippo4j.agent.core.plugin.interceptor.enhance.InstMethodsInter",
"cn.hippo4j.agent.core.plugin.interceptor.enhance.InstMethodsInterWithOverrideArgs",
"cn.hippo4j.agent.core.plugin.interceptor.enhance.StaticMethodsInter",
"cn.hippo4j.agent.core.plugin.interceptor.enhance.StaticMethodsInterWithOverrideArgs",
};
/**

@ -20,6 +20,8 @@ package cn.hippo4j.agent.core.plugin.loader;
import cn.hippo4j.agent.core.boot.AgentPackageNotFoundException;
import cn.hippo4j.agent.core.boot.AgentPackagePath;
import cn.hippo4j.agent.core.boot.PluginConfig;
import cn.hippo4j.agent.core.boot.SpringBootConfig;
import cn.hippo4j.agent.core.boot.SpringBootConfigInitializer;
import cn.hippo4j.agent.core.conf.Config;
import cn.hippo4j.agent.core.conf.SnifferConfigInitializer;
import cn.hippo4j.agent.core.logging.api.ILog;
@ -169,6 +171,13 @@ public class AgentClassLoader extends ClassLoader {
SnifferConfigInitializer.initializeConfig(pluginConfig.root());
}
final SpringBootConfig springBootConfig = loadedClass.getAnnotation(SpringBootConfig.class);
if (springBootConfig != null) {
// Set up the plugin config when loaded by spring environment is prepared, just scan in here.
// Agent class loader just loaded limited classes in the plugin jar(s), so the cost of this
// isAssignableFrom would be also very limited.
SpringBootConfigInitializer.initializeConfig(springBootConfig);
}
return loadedClass;
}

@ -26,7 +26,10 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static net.bytebuddy.matcher.ElementMatchers.*;
import static net.bytebuddy.matcher.ElementMatchers.isAnnotatedWith;
import static net.bytebuddy.matcher.ElementMatchers.isInterface;
import static net.bytebuddy.matcher.ElementMatchers.named;
import static net.bytebuddy.matcher.ElementMatchers.not;
/**
* Match the class by the given annotations in class.

@ -25,7 +25,10 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static net.bytebuddy.matcher.ElementMatchers.*;
import static net.bytebuddy.matcher.ElementMatchers.hasSuperType;
import static net.bytebuddy.matcher.ElementMatchers.isInterface;
import static net.bytebuddy.matcher.ElementMatchers.named;
import static net.bytebuddy.matcher.ElementMatchers.not;
/**
* Match the class by the given super class or interfaces.

@ -28,7 +28,10 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static net.bytebuddy.matcher.ElementMatchers.*;
import static net.bytebuddy.matcher.ElementMatchers.declaresMethod;
import static net.bytebuddy.matcher.ElementMatchers.isAnnotatedWith;
import static net.bytebuddy.matcher.ElementMatchers.isInterface;
import static net.bytebuddy.matcher.ElementMatchers.named;
/**
* Match the class, which has methods with the certain annotations. This is a very complex match.

@ -26,7 +26,7 @@ import net.bytebuddy.matcher.ElementMatcher;
* them have compatible issues with byte-buddy core, which trigger "Can't resolve type description" exception.
* <p>
* So I build this protective shield by a nested matcher. When the origin matcher(s) can't resolve the type, the
* SkyWalking agent ignores this types.
* Hippo4j agent ignores this types.
* <p>
* Notice: this ignore mechanism may miss some instrumentations, but at most cases, it's same. If missing happens,
* please pay attention to the WARNING logs.

@ -23,7 +23,7 @@ import net.bytebuddy.matcher.ElementMatcher;
import net.bytebuddy.matcher.NegatingMatcher;
/**
* Util class to help to construct logical operations on {@link org.apache.skywalking.apm.agent.core.plugin.match.ClassMatch}s
* Util class to help to construct logical operations on {@link cn.hippo4j.agent.core.plugin.match.ClassMatch}s
*/
public class LogicalMatchOperation {

@ -0,0 +1,53 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.core.registry;
import cn.hippo4j.agent.core.logging.api.ILog;
import cn.hippo4j.agent.core.logging.api.LogManager;
import cn.hippo4j.common.config.ExecutorProperties;
import lombok.Data;
import java.util.Properties;
import java.util.concurrent.ThreadPoolExecutor;
@Data
public class AgentThreadPoolExecutorHolder {
private static final ILog LOGGER = LogManager.getLogger(AgentThreadPoolExecutorHolder.class);
public static final AgentThreadPoolExecutorHolder EMPTY = new AgentThreadPoolExecutorHolder();
private String executorName;
private ThreadPoolExecutor executor;
private Properties properties;
public AgentThreadPoolExecutorHolder() {
}
public AgentThreadPoolExecutorHolder(String executorName, ThreadPoolExecutor executor, Properties properties) {
this.executorName = executorName;
this.executor = executor;
this.properties = properties;
}
public boolean isEmpty() {
return this == EMPTY;
}
}

@ -0,0 +1,66 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.core.registry;
import cn.hippo4j.agent.core.logging.api.ILog;
import cn.hippo4j.agent.core.logging.api.LogManager;
import cn.hippo4j.common.config.ExecutorProperties;
import java.util.Map;
import java.util.Optional;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ThreadPoolExecutor;
public class AgentThreadPoolInstanceRegistry {
private static final ILog LOGGER = LogManager.getLogger(AgentThreadPoolInstanceRegistry.class);
private final Map<String, AgentThreadPoolExecutorHolder> holderMap = new ConcurrentHashMap<>();
public final Map<ThreadPoolExecutor, Class<?>> earlyConstructMap = new ConcurrentHashMap<>();
private volatile static AgentThreadPoolInstanceRegistry INSTANCE;
private AgentThreadPoolInstanceRegistry() {
}
public static AgentThreadPoolInstanceRegistry getInstance() {
if (INSTANCE == null) {
synchronized (AgentThreadPoolInstanceRegistry.class) {
if (INSTANCE == null) {
INSTANCE = new AgentThreadPoolInstanceRegistry();
}
}
}
return INSTANCE;
}
public Map<String, AgentThreadPoolExecutorHolder> getHolderMap() {
return holderMap;
}
public void putHolder(String executorName, ThreadPoolExecutor executor, Properties properties) {
AgentThreadPoolExecutorHolder holder = new AgentThreadPoolExecutorHolder(executorName, executor, properties);
holderMap.put(executorName, holder);
}
public AgentThreadPoolExecutorHolder getHolder(String executorName) {
return Optional.ofNullable(holderMap.get(executorName)).orElse(AgentThreadPoolExecutorHolder.EMPTY);
}
}

@ -0,0 +1,27 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.core.util;
public interface AgentThreadPoolConstants {
String TOMCAT_NAME_PREFIX = "namePrefix";
String DUBBO_NAME_PREFIX = "mPrefix";
String DUBBO_THREAD_NAME = "DubboServerHandler";
String THREAD_POOL_NAME_DUBBO = "dubbo";
}

@ -41,4 +41,24 @@ public final class CollectionUtil {
public static boolean isEmpty(Collection collection) {
return collection == null || collection.isEmpty();
}
/**
* Is empty.
*
* @param map
* @return
*/
public static boolean isEmpty(Map<?, ?> map) {
return map == null || map.isEmpty();
}
/**
* Is not empty.
*
* @param map
* @return
*/
public static boolean isNotEmpty(Map<?, ?> map) {
return !isEmpty(map);
}
}

@ -21,7 +21,12 @@ import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.*;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
/**
* Init a class's static fields by a {@link Properties}, including static fields and static inner classes.
@ -30,14 +35,18 @@ import java.util.*;
public class ConfigInitializer {
public static void initialize(Properties properties, Class<?> rootConfigType) throws IllegalAccessException {
initNextLevel(properties, rootConfigType, new ConfigDesc());
initNextLevel(properties, rootConfigType, new ConfigDesc(), false);
}
public static void initialize(Properties properties, Class<?> rootConfigType, boolean isSpringProperties) throws IllegalAccessException {
initNextLevel(properties, rootConfigType, new ConfigDesc(), isSpringProperties);
}
private static void initNextLevel(Properties properties, Class<?> recentConfigType,
ConfigDesc parentDesc) throws IllegalArgumentException, IllegalAccessException {
ConfigDesc parentDesc, boolean isSpringProperties) throws IllegalArgumentException, IllegalAccessException {
for (Field field : recentConfigType.getFields()) {
if (Modifier.isPublic(field.getModifiers()) && Modifier.isStatic(field.getModifiers())) {
String configKey = (parentDesc + "." + field.getName()).toLowerCase();
String configKey = (parentDesc + "." + (isSpringProperties ? field.getName().replace("_", "-") : field.getName())).toLowerCase();
Class<?> type = field.getType();
if (type.equals(Map.class)) {
@ -78,8 +87,10 @@ public class ConfigInitializer {
}
}
for (Class<?> innerConfiguration : recentConfigType.getClasses()) {
parentDesc.append(innerConfiguration.getSimpleName());
initNextLevel(properties, innerConfiguration, parentDesc);
String simpleName = innerConfiguration.getSimpleName();
String description = isSpringProperties ? simpleName.replace("_", "-") : simpleName;
parentDesc.append(description);
initNextLevel(properties, innerConfiguration, parentDesc, isSpringProperties);
parentDesc.removeLastDesc();
}
}

@ -0,0 +1,44 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.core.util;
import cn.hippo4j.agent.core.logging.api.ILog;
import cn.hippo4j.agent.core.logging.api.LogManager;
import java.lang.reflect.Field;
public class ExecutorNameUtil {
private static final ILog LOGGER = LogManager.getLogger(ExecutorNameUtil.class);
public static boolean isTomcatExecutor(Object threadFactory) {
try {
if ("org.apache.tomcat.util.threads.TaskThreadFactory".equals(threadFactory.getClass().getName())) {
Field namePrefixField = threadFactory.getClass().getDeclaredField(AgentThreadPoolConstants.TOMCAT_NAME_PREFIX);
namePrefixField.setAccessible(true);
String namePrefix = (String) namePrefixField.get(threadFactory);
if (RegexUtil.isTomcatNameMatch(namePrefix)) {
return true;
}
}
} catch (Throwable t) {
LOGGER.error("Fail to put tomcat executor", t);
}
return false;
}
}

@ -17,7 +17,11 @@
package cn.hippo4j.agent.core.util;
import java.io.*;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
/**
* Copied from commons-io-2.2 (org.apache.commons.io.IOUtils)

@ -17,7 +17,11 @@
package cn.hippo4j.agent.core.util;
import java.util.*;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
/**
* Utility class for working with Strings that have placeholder values in them. A placeholder takes the form {@code

@ -0,0 +1,44 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.core.util;
import cn.hippo4j.agent.core.logging.api.ILog;
import cn.hippo4j.agent.core.logging.api.LogManager;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.List;
public class ReflectUtil {
private static final ILog LOGGER = LogManager.getLogger(ReflectUtil.class);
public static List<Field> getStaticFieldsFromType(Class<?> clazz, Class<?> declaredType) {
Field[] fields = clazz.getDeclaredFields();
List<Field> result = new ArrayList<>();
for (Field field : fields) {
if (field.getType().isAssignableFrom(declaredType)
&& Modifier.isStatic(field.getModifiers())) {
field.setAccessible(true);
result.add(field);
}
}
return result;
}
}

@ -0,0 +1,30 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.core.util;
import java.util.regex.Pattern;
public class RegexUtil {
private static final String TOMCAT_NAME_PATTERN_STRING = "http\\S+nio\\S+-exec-";
private static final Pattern TOMCAT_NAME_PATTERN = Pattern.compile(TOMCAT_NAME_PATTERN_STRING);
public static boolean isTomcatNameMatch(String executorName) {
return TOMCAT_NAME_PATTERN.matcher(executorName).find();
}
}

@ -0,0 +1,32 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.core.util;
public interface ThreadPoolPropertyKey {
String THREAD_POOL_ID = "threadPoolId";
String CORE_POOL_SIZE = "corePoolSize";
String MAXIMUM_POOL_SIZE = "maximumPoolSize";
String ALLOW_CORE_THREAD_TIME_OUT = "allowCoreThreadTimeOut";
String KEEP_ALIVE_TIME = "keepAliveTime";
String BLOCKING_QUEUE = "blockingQueue";
String QUEUE_CAPACITY = "queueCapacity";
String THREAD_NAME_PREFIX = "threadNamePrefix";
String REJECTED_HANDLER = "rejectedHandler";
String EXECUTE_TIME_OUT = "executeTimeOut";
}

@ -0,0 +1,27 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>cn.hippo4j</groupId>
<artifactId>hippo4j-agent-plugin</artifactId>
<version>${revision}</version>
</parent>
<artifactId>apollo-plugin</artifactId>
<properties>
<apollo.version>1.9.1</apollo.version>
</properties>
<dependencies>
<dependency>
<groupId>com.ctrip.framework.apollo</groupId>
<artifactId>apollo-client</artifactId>
<version>${apollo.version}</version>
<scope>provided</scope>
</dependency>
</dependencies>
</project>

@ -0,0 +1,28 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.plugin.apollo;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.EnhancedInstance;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.InstanceConstructorInterceptor;
public class DefaultConfigConstructorInterceptor implements InstanceConstructorInterceptor {
@Override
public void onConstruct(EnhancedInstance objInst, Object[] allArguments) throws Throwable {
}
}

@ -0,0 +1,45 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.plugin.apollo.boot;
import cn.hippo4j.agent.core.boot.BootService;
import cn.hippo4j.agent.core.boot.DefaultImplementor;
@DefaultImplementor
public class ApolloPluginBootService implements BootService {
@Override
public void prepare() throws Throwable {
}
@Override
public void boot() throws Throwable {
}
@Override
public void onComplete() throws Throwable {
}
@Override
public void shutdown() throws Throwable {
}
}

@ -0,0 +1,63 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.plugin.apollo.define;
import cn.hippo4j.agent.core.plugin.interceptor.ConstructorInterceptPoint;
import cn.hippo4j.agent.core.plugin.interceptor.InstanceMethodsInterceptPoint;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.ClassInstanceMethodsEnhancePluginDefine;
import cn.hippo4j.agent.core.plugin.match.ClassMatch;
import cn.hippo4j.agent.core.plugin.match.NameMatch;
import net.bytebuddy.description.method.MethodDescription;
import net.bytebuddy.matcher.ElementMatcher;
import static net.bytebuddy.matcher.ElementMatchers.any;
public class ApolloInstrumentation extends ClassInstanceMethodsEnhancePluginDefine {
private static final String ENHANCE_CLASS = "com.ctrip.framework.apollo.internals.DefaultConfig";
private static final String CONSTRUCTOR_INTERCEPT_CLASS = "cn.hippo4j.agent.plugin.apollo.DefaultConfigConstructorInterceptor";
@Override
protected ClassMatch enhanceClass() {
return NameMatch.byName(ENHANCE_CLASS);
}
@Override
public ConstructorInterceptPoint[] getConstructorsInterceptPoints() {
return new ConstructorInterceptPoint[]{
new ConstructorInterceptPoint() {
@Override
public ElementMatcher<MethodDescription> getConstructorMatcher() {
return any();
}
@Override
public String getConstructorInterceptor() {
return CONSTRUCTOR_INTERCEPT_CLASS;
}
}
};
}
@Override
public InstanceMethodsInterceptPoint[] getInstanceMethodsInterceptPoints() {
return new InstanceMethodsInterceptPoint[0];
}
}

@ -0,0 +1,17 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
cn.hippo4j.agent.plugin.apollo.boot.ApolloPluginBootService

@ -0,0 +1,17 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
apollo-plugin=cn.hippo4j.agent.plugin.apollo.define.ApolloInstrumentation

@ -0,0 +1,118 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>cn.hippo4j</groupId>
<artifactId>hippo4j-agent</artifactId>
<version>${revision}</version>
</parent>
<artifactId>hippo4j-agent-plugin</artifactId>
<packaging>pom</packaging>
<modules>
<module>spring-plugins</module>
<module>thread-pool-plugin</module>
<module>apollo-plugin</module>
</modules>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<sdk.plugin.related.dir />
<shade.net.bytebuddy.source>net.bytebuddy</shade.net.bytebuddy.source>
<shade.net.bytebuddy.target>${shade.package}.${shade.net.bytebuddy.source}</shade.net.bytebuddy.target>
<agent.package.dest.dir>${project.build.directory}${sdk.plugin.related.dir}/../../../hippo4j-agent
</agent.package.dest.dir>
<plugin.dest.dir>${agent.package.dest.dir}/plugins</plugin.dest.dir>
<ant-contrib.version>1.0b3</ant-contrib.version>
<ant-nodeps.version>1.8.1</ant-nodeps.version>
</properties>
<dependencies>
<dependency>
<groupId>cn.hippo4j</groupId>
<artifactId>hippo4j-agent-core</artifactId>
<version>${project.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>net.bytebuddy</groupId>
<artifactId>byte-buddy</artifactId>
<version>${bytebuddy.version}</version>
<scope>provided</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-shade-plugin</artifactId>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<shadedArtifactAttached>false</shadedArtifactAttached>
<createDependencyReducedPom>true</createDependencyReducedPom>
<createSourcesJar>true</createSourcesJar>
<shadeSourcesContent>true</shadeSourcesContent>
<relocations>
<relocation>
<pattern>${shade.net.bytebuddy.source}</pattern>
<shadedPattern>${shade.net.bytebuddy.target}</shadedPattern>
</relocation>
</relocations>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<taskdef resource="net/sf/antcontrib/antcontrib.properties" classpathref="maven.runtime.classpath" />
<if>
<equals arg1="${project.packaging}" arg2="jar" />
<then>
<mkdir dir="${plugin.dest.dir}" />
<copy file="${project.build.directory}/${project.artifactId}-${project.version}.jar" tofile="${plugin.dest.dir}/${project.artifactId}-${project.version}.jar" overwrite="true" />
</then>
</if>
</target>
</configuration>
</execution>
</executions>
<dependencies>
<dependency>
<groupId>ant-contrib</groupId>
<artifactId>ant-contrib</artifactId>
<version>${ant-contrib.version}</version>
<exclusions>
<exclusion>
<groupId>ant</groupId>
<artifactId>ant</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.ant</groupId>
<artifactId>ant-nodeps</artifactId>
<version>${ant-nodeps.version}</version>
</dependency>
</dependencies>
</plugin>
</plugins>
</build>
</project>

@ -0,0 +1,35 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>cn.hippo4j</groupId>
<artifactId>hippo4j-agent-plugin</artifactId>
<version>${revision}</version>
</parent>
<artifactId>spring-plugins</artifactId>
<packaging>pom</packaging>
<modules>
<module>spring-boot-1.x-plugin</module>
<module>spring-boot-2.x-plugin</module>
<module>spring-plugin-common</module>
</modules>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<sdk.plugin.related.dir>/..</sdk.plugin.related.dir>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>cn.hippo4j</groupId>
<artifactId>spring-plugin-common</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
</project>

@ -0,0 +1,66 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>cn.hippo4j</groupId>
<artifactId>spring-plugins</artifactId>
<version>${revision}</version>
</parent>
<artifactId>spring-boot-1.x-plugin</artifactId>
<properties>
<spring.boot.version>1.5.22.RELEASE</spring.boot.version>
<apollo.version>1.9.1</apollo.version>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-dependencies</artifactId>
<version>${spring.boot.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<dependency>
<groupId>cn.hippo4j</groupId>
<artifactId>spring-plugin-common</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter</artifactId>
<version>${spring.boot.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.ctrip.framework.apollo</groupId>
<artifactId>apollo-client</artifactId>
<version>${apollo.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>cn.hippo4j</groupId>
<artifactId>hippo4j-config-spring-boot-1x-starter</artifactId>
<version>${project.version}</version>
<scope>provided</scope>
</dependency>
</dependencies>
</project>

@ -0,0 +1,44 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.plugin.spring.boot.v1;
import cn.hippo4j.agent.core.boot.SpringBootConfig;
import java.util.Arrays;
import java.util.List;
public class ApolloSpringBootProperties {
public static class Spring {
public static class Dynamic {
@SpringBootConfig(root = ApolloSpringBootProperties.class)
public static class Thread_Pool {
@SpringBootConfig(root = ApolloSpringBootProperties.class)
public static class Apollo {
public static List<String> NAMESPACE = Arrays.asList("application");
}
public static String CONFIG_FILE_TYPE;
}
}
}
}

@ -0,0 +1,326 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.plugin.spring.boot.v1;
import cn.hippo4j.agent.core.logging.api.ILog;
import cn.hippo4j.agent.core.logging.api.LogManager;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.EnhancedInstance;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.InstanceMethodsAroundInterceptor;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.MethodInterceptResult;
import cn.hippo4j.agent.core.registry.AgentThreadPoolExecutorHolder;
import cn.hippo4j.agent.core.registry.AgentThreadPoolInstanceRegistry;
import cn.hippo4j.agent.core.util.ThreadPoolPropertyKey;
import cn.hippo4j.agent.plugin.spring.common.SpringPropertiesLoader;
import cn.hippo4j.common.executor.support.BlockingQueueTypeEnum;
import cn.hippo4j.common.executor.support.RejectedPolicyTypeEnum;
import cn.hippo4j.common.executor.support.ResizableCapacityLinkedBlockingQueue;
import cn.hippo4j.common.toolkit.CollectionUtil;
import cn.hippo4j.common.toolkit.MapUtil;
import cn.hippo4j.common.toolkit.ThreadPoolExecutorUtil;
import cn.hippo4j.config.springboot.starter.config.BootstrapConfigProperties;
import cn.hippo4j.common.config.ExecutorProperties;
import cn.hippo4j.config.springboot.starter.parser.ConfigFileTypeEnum;
import cn.hippo4j.config.springboot.starter.parser.ConfigParserHandler;
import cn.hippo4j.core.executor.DynamicThreadPoolExecutor;
import cn.hippo4j.message.request.ChangeParameterNotifyRequest;
import com.ctrip.framework.apollo.ConfigChangeListener;
import com.ctrip.framework.apollo.ConfigFile;
import com.ctrip.framework.apollo.ConfigService;
import com.ctrip.framework.apollo.core.enums.ConfigFileFormat;
import com.ctrip.framework.apollo.model.ConfigChange;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.PropertyValues;
import org.springframework.beans.support.ResourceEditorRegistrar;
import org.springframework.boot.bind.CustomPropertyNamePatternsMatcher;
import org.springframework.boot.bind.RelaxedDataBinder;
import org.springframework.boot.bind.RelaxedNames;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.core.env.MapPropertySource;
import org.springframework.core.env.MutablePropertySources;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.RejectedExecutionHandler;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import static cn.hippo4j.agent.core.conf.Constants.SPRING_BOOT_CONFIG_PREFIX;
import static cn.hippo4j.common.constant.ChangeThreadPoolConstants.CHANGE_DELIMITER;
import static cn.hippo4j.common.constant.ChangeThreadPoolConstants.CHANGE_THREAD_POOL_TEXT;
import static cn.hippo4j.config.springboot1x.starter.refresher.SpringBoot1xBootstrapConfigPropertiesBinderAdapt.getNames;
public class EventPublishingFinishedInterceptor implements InstanceMethodsAroundInterceptor {
private static final ILog FILE_LOGGER = LogManager.getLogger(EventPublishingFinishedInterceptor.class);
private static final Logger LOGGER = LoggerFactory.getLogger(EventPublishingFinishedInterceptor.class);
@Override
public void beforeMethod(EnhancedInstance objInst, Method method, Object[] allArguments, Class<?>[] argumentsTypes, MethodInterceptResult result) throws Throwable {
}
@Override
public Object afterMethod(EnhancedInstance objInst, Method method, Object[] allArguments, Class<?>[] argumentsTypes, Object ret) throws Throwable {
ConfigurableApplicationContext context = (ConfigurableApplicationContext) allArguments[0];
SpringPropertiesLoader.loadSpringProperties(context.getEnvironment());
List<String> apolloNamespaces = ApolloSpringBootProperties.Spring.Dynamic.Thread_Pool.Apollo.NAMESPACE;
String namespace = apolloNamespaces.get(0);
String configFileType = ApolloSpringBootProperties.Spring.Dynamic.Thread_Pool.CONFIG_FILE_TYPE;
com.ctrip.framework.apollo.Config config = ConfigService.getConfig(String.format("%s.%s", namespace, configFileType));
ConfigChangeListener configChangeListener = configChangeEvent -> {
String replacedNamespace = namespace.replaceAll("." + configFileType, "");
ConfigFileFormat configFileFormat = ConfigFileFormat.fromString(configFileType);
ConfigFile configFile = ConfigService.getConfigFile(replacedNamespace, configFileFormat);
Map<String, Object> newChangeValueMap = new HashMap<>();
configChangeEvent.changedKeys().stream().filter(each -> each.contains(SPRING_BOOT_CONFIG_PREFIX)).forEach(each -> {
ConfigChange change = configChangeEvent.getChange(each);
String newValue = change.getNewValue();
newChangeValueMap.put(each, newValue);
});
dynamicRefresh(configFile.getContent(), newChangeValueMap, context);
};
config.addChangeListener(configChangeListener);
LOGGER.info("Dynamic thread pool refresher, add apollo listener success. namespace: {}", namespace);
return ret;
}
public BootstrapConfigProperties bindProperties(Map<Object, Object> configInfo, ApplicationContext applicationContext) {
BootstrapConfigProperties bindableCoreProperties = new BootstrapConfigProperties();
if (MapUtil.isEmpty(configInfo)) {
return bindableCoreProperties;
}
RelaxedNames relaxedNames = new RelaxedNames(BootstrapConfigProperties.PREFIX);
Set<String> names = getNames(bindableCoreProperties, relaxedNames);
Map<String, Object> stringConfigInfo = new HashMap<>(configInfo.size());
configInfo.forEach((key, value) -> stringConfigInfo.put(key.toString(), value));
MapPropertySource test = new MapPropertySource("Hippo4j", stringConfigInfo);
MutablePropertySources propertySources = new MutablePropertySources();
propertySources.addFirst(test);
PropertyValues propertyValues = CustomPropertyNamePatternsMatcher.getPropertySourcesPropertyValues(names, propertySources);
RelaxedDataBinder dataBinder = new RelaxedDataBinder(bindableCoreProperties, BootstrapConfigProperties.PREFIX);
dataBinder.setAutoGrowCollectionLimit(Integer.MAX_VALUE);
dataBinder.setIgnoreNestedProperties(false);
dataBinder.setIgnoreInvalidFields(false);
dataBinder.setIgnoreUnknownFields(true);
ResourceEditorRegistrar resourceEditorRegistrar = new ResourceEditorRegistrar(applicationContext, applicationContext.getEnvironment());
resourceEditorRegistrar.registerCustomEditors(dataBinder);
dataBinder.bind(propertyValues);
return bindableCoreProperties;
}
public void dynamicRefresh(String configContent, Map<String, Object> newValueChangeMap, ApplicationContext context) {
try {
String configFileType = ApolloSpringBootProperties.Spring.Dynamic.Thread_Pool.CONFIG_FILE_TYPE;
Map<Object, Object> afterConfigMap = ConfigParserHandler.getInstance().parseConfig(configContent,
ConfigFileTypeEnum.of(configFileType));
if (CollectionUtil.isNotEmpty(newValueChangeMap)) {
Optional.ofNullable(afterConfigMap).ifPresent(each -> each.putAll(newValueChangeMap));
}
BootstrapConfigProperties afterConfigProperties = bindProperties(afterConfigMap, context);
List<ExecutorProperties> executors = afterConfigProperties.getExecutors();
for (ExecutorProperties afterProperties : executors) {
String threadPoolId = afterProperties.getThreadPoolId();
AgentThreadPoolExecutorHolder holder = AgentThreadPoolInstanceRegistry.getInstance().getHolder(threadPoolId);
if (holder.isEmpty() || holder.getExecutor() == null) {
continue;
}
ExecutorProperties beforeProperties = convert(holder.getProperties());
if (!checkConsistency(threadPoolId, beforeProperties, afterProperties)) {
continue;
}
dynamicRefreshPool(beforeProperties, afterProperties);
holder.setProperties(failDefaultExecutorProperties(beforeProperties, afterProperties)); // do refresh.
ChangeParameterNotifyRequest changeRequest = buildChangeRequest(beforeProperties, afterProperties);
LOGGER.info(CHANGE_THREAD_POOL_TEXT,
threadPoolId,
String.format(CHANGE_DELIMITER, beforeProperties.getCorePoolSize(), changeRequest.getNowCorePoolSize()),
String.format(CHANGE_DELIMITER, beforeProperties.getMaximumPoolSize(), changeRequest.getNowMaximumPoolSize()),
String.format(CHANGE_DELIMITER, beforeProperties.getQueueCapacity(), changeRequest.getNowQueueCapacity()),
String.format(CHANGE_DELIMITER, beforeProperties.getKeepAliveTime(), changeRequest.getNowKeepAliveTime()),
String.format(CHANGE_DELIMITER, beforeProperties.getExecuteTimeOut(), changeRequest.getNowExecuteTimeOut()),
String.format(CHANGE_DELIMITER, beforeProperties.getRejectedHandler(), changeRequest.getNowRejectedName()),
String.format(CHANGE_DELIMITER, beforeProperties.getAllowCoreThreadTimeOut(), changeRequest.getNowAllowsCoreThreadTimeOut()));
}
} catch (Exception ex) {
LOGGER.error("[Hippo4j-Agent] config mode dynamic refresh failed.", ex);
}
}
/**
* Dynamic refresh pool.
*/
private void dynamicRefreshPool(ExecutorProperties beforeProperties, ExecutorProperties afterProperties) {
AgentThreadPoolExecutorHolder holder = AgentThreadPoolInstanceRegistry.getInstance().getHolder(afterProperties.getThreadPoolId());
ThreadPoolExecutor executor = holder.getExecutor();
if (afterProperties.getMaximumPoolSize() != null && afterProperties.getCorePoolSize() != null) {
ThreadPoolExecutorUtil.safeSetPoolSize(executor, afterProperties.getCorePoolSize(), afterProperties.getMaximumPoolSize());
} else {
if (afterProperties.getMaximumPoolSize() != null) {
executor.setMaximumPoolSize(afterProperties.getMaximumPoolSize());
}
if (afterProperties.getCorePoolSize() != null) {
executor.setCorePoolSize(afterProperties.getCorePoolSize());
}
}
if (afterProperties.getAllowCoreThreadTimeOut() != null && !Objects.equals(beforeProperties.getAllowCoreThreadTimeOut(), afterProperties.getAllowCoreThreadTimeOut())) {
executor.allowCoreThreadTimeOut(afterProperties.getAllowCoreThreadTimeOut());
}
if (afterProperties.getExecuteTimeOut() != null && !Objects.equals(beforeProperties.getExecuteTimeOut(), afterProperties.getExecuteTimeOut())) {
if (executor instanceof DynamicThreadPoolExecutor) {
((DynamicThreadPoolExecutor) executor).setExecuteTimeOut(afterProperties.getExecuteTimeOut());
}
}
if (afterProperties.getRejectedHandler() != null && !Objects.equals(beforeProperties.getRejectedHandler(), afterProperties.getRejectedHandler())) {
RejectedExecutionHandler rejectedExecutionHandler = RejectedPolicyTypeEnum.createPolicy(afterProperties.getRejectedHandler());
executor.setRejectedExecutionHandler(rejectedExecutionHandler);
}
if (afterProperties.getKeepAliveTime() != null && !Objects.equals(beforeProperties.getKeepAliveTime(), afterProperties.getKeepAliveTime())) {
executor.setKeepAliveTime(afterProperties.getKeepAliveTime(), TimeUnit.SECONDS);
}
if (afterProperties.getQueueCapacity() != null && !Objects.equals(beforeProperties.getQueueCapacity(), afterProperties.getQueueCapacity())
&& Objects.equals(BlockingQueueTypeEnum.RESIZABLE_LINKED_BLOCKING_QUEUE.getName(), executor.getQueue().getClass().getSimpleName())) {
if (executor.getQueue() instanceof ResizableCapacityLinkedBlockingQueue) {
ResizableCapacityLinkedBlockingQueue<?> queue = (ResizableCapacityLinkedBlockingQueue<?>) executor.getQueue();
queue.setCapacity(afterProperties.getQueueCapacity());
} else {
LOGGER.warn("The queue length cannot be modified. Queue type mismatch. Current queue type: {}", executor.getQueue().getClass().getSimpleName());
}
}
}
/**
* Fail default executor properties.
*
* @param beforeProperties old properties
* @param afterProperties new properties
* @return executor properties
*/
private Properties failDefaultExecutorProperties(ExecutorProperties beforeProperties, ExecutorProperties afterProperties) {
return convert(ExecutorProperties.builder()
.corePoolSize(Optional.ofNullable(afterProperties.getCorePoolSize()).orElse(beforeProperties.getCorePoolSize()))
.maximumPoolSize(Optional.ofNullable(afterProperties.getMaximumPoolSize()).orElse(beforeProperties.getMaximumPoolSize()))
.blockingQueue(afterProperties.getBlockingQueue())
.queueCapacity(Optional.ofNullable(afterProperties.getQueueCapacity()).orElse(beforeProperties.getQueueCapacity()))
.keepAliveTime(Optional.ofNullable(afterProperties.getKeepAliveTime()).orElse(beforeProperties.getKeepAliveTime()))
.executeTimeOut(Optional.ofNullable(afterProperties.getExecuteTimeOut()).orElse(beforeProperties.getExecuteTimeOut()))
.rejectedHandler(Optional.ofNullable(afterProperties.getRejectedHandler()).orElse(beforeProperties.getRejectedHandler()))
.allowCoreThreadTimeOut(Optional.ofNullable(afterProperties.getAllowCoreThreadTimeOut()).orElse(beforeProperties.getAllowCoreThreadTimeOut()))
.threadPoolId(beforeProperties.getThreadPoolId())
.build());
}
private ExecutorProperties convert(Properties properties) {
return ExecutorProperties.builder()
.threadPoolId((String) properties.get(ThreadPoolPropertyKey.THREAD_POOL_ID))
.corePoolSize((Integer) properties.get(ThreadPoolPropertyKey.CORE_POOL_SIZE))
.maximumPoolSize((Integer) properties.get(ThreadPoolPropertyKey.MAXIMUM_POOL_SIZE))
.allowCoreThreadTimeOut((Boolean) properties.get(ThreadPoolPropertyKey.ALLOW_CORE_THREAD_TIME_OUT))
.keepAliveTime((Long) properties.get(ThreadPoolPropertyKey.KEEP_ALIVE_TIME))
.blockingQueue((String) properties.get(ThreadPoolPropertyKey.BLOCKING_QUEUE))
.queueCapacity((Integer) properties.get(ThreadPoolPropertyKey.QUEUE_CAPACITY))
.threadNamePrefix((String) properties.get(ThreadPoolPropertyKey.THREAD_NAME_PREFIX))
.rejectedHandler((String) properties.get(ThreadPoolPropertyKey.REJECTED_HANDLER))
.executeTimeOut((Long) properties.get(ThreadPoolPropertyKey.EXECUTE_TIME_OUT))
.build();
}
private Properties convert(ExecutorProperties executorProperties) {
Properties properties = new Properties();
Optional.ofNullable(executorProperties.getCorePoolSize()).ifPresent(v -> properties.put(ThreadPoolPropertyKey.CORE_POOL_SIZE, v));
Optional.ofNullable(executorProperties.getMaximumPoolSize()).ifPresent(v -> properties.put(ThreadPoolPropertyKey.MAXIMUM_POOL_SIZE, v));
Optional.ofNullable(executorProperties.getBlockingQueue()).ifPresent(v -> properties.put(ThreadPoolPropertyKey.BLOCKING_QUEUE, v));
Optional.ofNullable(executorProperties.getQueueCapacity()).ifPresent(v -> properties.put(ThreadPoolPropertyKey.QUEUE_CAPACITY, v));
Optional.ofNullable(executorProperties.getKeepAliveTime()).ifPresent(v -> properties.put(ThreadPoolPropertyKey.KEEP_ALIVE_TIME, v));
Optional.ofNullable(executorProperties.getExecuteTimeOut()).ifPresent(v -> properties.put(ThreadPoolPropertyKey.EXECUTE_TIME_OUT, v));
Optional.ofNullable(executorProperties.getRejectedHandler()).ifPresent(v -> properties.put(ThreadPoolPropertyKey.REJECTED_HANDLER, v));
Optional.ofNullable(executorProperties.getAllowCoreThreadTimeOut()).ifPresent(v -> properties.put(ThreadPoolPropertyKey.ALLOW_CORE_THREAD_TIME_OUT, v));
Optional.ofNullable(executorProperties.getThreadPoolId()).ifPresent(v -> properties.put(ThreadPoolPropertyKey.THREAD_POOL_ID, v));
return properties;
}
/**
* Construct change parameter notify request instance.
*
* @param beforeProperties old properties
* @param afterProperties new properties
* @return instance
*/
private ChangeParameterNotifyRequest buildChangeRequest(ExecutorProperties beforeProperties, ExecutorProperties afterProperties) {
ChangeParameterNotifyRequest changeParameterNotifyRequest = ChangeParameterNotifyRequest.builder()
.beforeCorePoolSize(beforeProperties.getCorePoolSize())
.beforeMaximumPoolSize(beforeProperties.getMaximumPoolSize())
.beforeAllowsCoreThreadTimeOut(beforeProperties.getAllowCoreThreadTimeOut())
.beforeKeepAliveTime(beforeProperties.getKeepAliveTime())
.beforeQueueCapacity(beforeProperties.getQueueCapacity())
.beforeRejectedName(beforeProperties.getRejectedHandler())
.beforeExecuteTimeOut(beforeProperties.getExecuteTimeOut())
.blockingQueueName(afterProperties.getBlockingQueue())
.nowCorePoolSize(Optional.ofNullable(afterProperties.getCorePoolSize()).orElse(beforeProperties.getCorePoolSize()))
.nowMaximumPoolSize(Optional.ofNullable(afterProperties.getMaximumPoolSize()).orElse(beforeProperties.getMaximumPoolSize()))
.nowAllowsCoreThreadTimeOut(Optional.ofNullable(afterProperties.getAllowCoreThreadTimeOut()).orElse(beforeProperties.getAllowCoreThreadTimeOut()))
.nowKeepAliveTime(Optional.ofNullable(afterProperties.getKeepAliveTime()).orElse(beforeProperties.getKeepAliveTime()))
.nowQueueCapacity(Optional.ofNullable(afterProperties.getQueueCapacity()).orElse(beforeProperties.getQueueCapacity()))
.nowRejectedName(Optional.ofNullable(afterProperties.getRejectedHandler()).orElse(beforeProperties.getRejectedHandler()))
.nowExecuteTimeOut(Optional.ofNullable(afterProperties.getExecuteTimeOut()).orElse(beforeProperties.getExecuteTimeOut()))
.build();
changeParameterNotifyRequest.setThreadPoolId(beforeProperties.getThreadPoolId());
return changeParameterNotifyRequest;
}
/**
* Check consistency.
*
* @param threadPoolId
* @param afterProperties
*/
private boolean checkConsistency(String threadPoolId, ExecutorProperties beforeProperties, ExecutorProperties afterProperties) {
AgentThreadPoolExecutorHolder holder = AgentThreadPoolInstanceRegistry.getInstance().getHolder(threadPoolId);
if (holder.isEmpty() || holder.getExecutor() == null) {
return false;
}
ThreadPoolExecutor executor = holder.getExecutor();
return (afterProperties.getCorePoolSize() != null && !Objects.equals(beforeProperties.getCorePoolSize(), afterProperties.getCorePoolSize()))
|| (afterProperties.getMaximumPoolSize() != null && !Objects.equals(beforeProperties.getMaximumPoolSize(), afterProperties.getMaximumPoolSize()))
|| (afterProperties.getAllowCoreThreadTimeOut() != null && !Objects.equals(beforeProperties.getAllowCoreThreadTimeOut(), afterProperties.getAllowCoreThreadTimeOut()))
|| (afterProperties.getExecuteTimeOut() != null && !Objects.equals(beforeProperties.getExecuteTimeOut(), afterProperties.getExecuteTimeOut()))
|| (afterProperties.getKeepAliveTime() != null && !Objects.equals(beforeProperties.getKeepAliveTime(), afterProperties.getKeepAliveTime()))
|| (afterProperties.getRejectedHandler() != null && !Objects.equals(beforeProperties.getRejectedHandler(), afterProperties.getRejectedHandler()))
||
((afterProperties.getQueueCapacity() != null && !Objects.equals(beforeProperties.getQueueCapacity(), afterProperties.getQueueCapacity())
&& Objects.equals(BlockingQueueTypeEnum.RESIZABLE_LINKED_BLOCKING_QUEUE.getName(), executor.getQueue().getClass().getSimpleName())));
}
@Override
public void handleMethodException(EnhancedInstance objInst, Method method, Object[] allArguments, Class<?>[] argumentsTypes, Throwable t) {
}
}

@ -0,0 +1,51 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.plugin.spring.boot.v1;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.EnhancedInstance;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.InstanceMethodsAroundInterceptor;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.MethodInterceptResult;
import cn.hippo4j.agent.plugin.spring.common.SpringEnvironmentSupport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.env.ConfigurableEnvironment;
import java.lang.reflect.Method;
public class EventPublishingRunListenerEnvironmentPreparedInterceptor implements InstanceMethodsAroundInterceptor {
private static final Logger LOGGER = LoggerFactory.getLogger(EventPublishingRunListenerEnvironmentPreparedInterceptor.class);
@Override
public void beforeMethod(EnhancedInstance objInst, Method method, Object[] allArguments, Class<?>[] argumentsTypes, MethodInterceptResult result) throws Throwable {
}
@Override
public Object afterMethod(EnhancedInstance objInst, Method method, Object[] allArguments, Class<?>[] argumentsTypes, Object ret) throws Throwable {
ConfigurableEnvironment environment = (ConfigurableEnvironment) allArguments[0];
SpringEnvironmentSupport.disableNonAgentSwitch(environment);
LOGGER.info("[Hippo4j-Agent] Switch off in non-Agent mode.");
return ret;
}
@Override
public void handleMethodException(EnhancedInstance objInst, Method method, Object[] allArguments, Class<?>[] argumentsTypes, Throwable t) {
}
}

@ -0,0 +1,100 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.plugin.spring.boot.v1;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.EnhancedInstance;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.InstanceMethodsAroundInterceptor;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.MethodInterceptResult;
import cn.hippo4j.agent.core.registry.AgentThreadPoolInstanceRegistry;
import cn.hippo4j.agent.core.util.ReflectUtil;
import cn.hippo4j.agent.core.util.ThreadPoolPropertyKey;
import cn.hippo4j.common.executor.support.BlockingQueueTypeEnum;
import cn.hippo4j.common.executor.support.RejectedPolicyTypeEnum;
import cn.hippo4j.common.toolkit.BooleanUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
public class SpringApplicationRunInterceptor implements InstanceMethodsAroundInterceptor {
private static final Logger LOGGER = LoggerFactory.getLogger(SpringApplicationRunInterceptor.class);
@Override
public void beforeMethod(EnhancedInstance objInst, Method method, Object[] allArguments, Class<?>[] argumentsTypes, MethodInterceptResult result) throws Throwable {
}
@Override
public Object afterMethod(EnhancedInstance objInst, Method method, Object[] allArguments, Class<?>[] argumentsTypes, Object ret) throws Throwable {
registerThreadPoolInstances();
LOGGER.info("[Hippo4j-Agent] Registered thread pool instances successfully.");
return ret;
}
private void registerThreadPoolInstances() {
Map<ThreadPoolExecutor, Class<?>> earlyConstructMap = AgentThreadPoolInstanceRegistry.getInstance().earlyConstructMap;
for (Map.Entry<ThreadPoolExecutor, Class<?>> entry : earlyConstructMap.entrySet()) {
ThreadPoolExecutor enhancedInstance = entry.getKey();
Class<?> declaredClass = entry.getValue();
List<Field> declaredFields = ReflectUtil.getStaticFieldsFromType(declaredClass, ThreadPoolExecutor.class);
for (Field field : declaredFields) {
try {
Object value = field.get(null);
if (value == enhancedInstance) {
String threadPoolId = declaredClass.getName() + "#" + field.getName();
register(threadPoolId, enhancedInstance);
break;
}
} catch (IllegalAccessException e) {
LOGGER.error("Get static field error.", e);
}
}
}
}
private void register(String threadPoolId, ThreadPoolExecutor executor) {
// build parameter properties.
Properties properties = new Properties();
properties.put(ThreadPoolPropertyKey.THREAD_POOL_ID, threadPoolId);
properties.put(ThreadPoolPropertyKey.CORE_POOL_SIZE, executor.getCorePoolSize());
properties.put(ThreadPoolPropertyKey.MAXIMUM_POOL_SIZE, executor.getMaximumPoolSize());
properties.put(ThreadPoolPropertyKey.ALLOW_CORE_THREAD_TIME_OUT, BooleanUtil.toBoolean(String.valueOf(executor.allowsCoreThreadTimeOut())));
properties.put(ThreadPoolPropertyKey.KEEP_ALIVE_TIME, executor.getKeepAliveTime(TimeUnit.MILLISECONDS));
properties.put(ThreadPoolPropertyKey.BLOCKING_QUEUE, BlockingQueueTypeEnum.getBlockingQueueTypeEnumByName(executor.getQueue().getClass().getSimpleName()).getName());
properties.put(ThreadPoolPropertyKey.QUEUE_CAPACITY, executor.getQueue().remainingCapacity());
properties.put(ThreadPoolPropertyKey.THREAD_NAME_PREFIX, threadPoolId);
properties.put(ThreadPoolPropertyKey.REJECTED_HANDLER, RejectedPolicyTypeEnum.getRejectedPolicyTypeEnumByName(executor.getRejectedExecutionHandler().getClass().getSimpleName()).getName());
properties.put(ThreadPoolPropertyKey.EXECUTE_TIME_OUT, 10000L);
// register executor.
AgentThreadPoolInstanceRegistry.getInstance().putHolder(threadPoolId, executor, properties);
}
@Override
public void handleMethodException(EnhancedInstance objInst, Method method, Object[] allArguments, Class<?>[] argumentsTypes, Throwable t) {
}
}

@ -0,0 +1,86 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.plugin.spring.boot.v1.define;
import cn.hippo4j.agent.core.plugin.interceptor.ConstructorInterceptPoint;
import cn.hippo4j.agent.core.plugin.interceptor.InstanceMethodsInterceptPoint;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.ClassInstanceMethodsEnhancePluginDefine;
import cn.hippo4j.agent.core.plugin.match.ClassMatch;
import net.bytebuddy.description.method.MethodDescription;
import net.bytebuddy.matcher.ElementMatcher;
import static net.bytebuddy.matcher.ElementMatchers.named;
import static cn.hippo4j.agent.core.plugin.match.NameMatch.byName;
public class EventPublishingRunListenerInstrumentation extends ClassInstanceMethodsEnhancePluginDefine {
private static final String ENHANCE_CLASS = "org.springframework.boot.context.event.EventPublishingRunListener";
private static final String EVENT_PUBLISHING_FINISHED_INTERCEPTOR = "cn.hippo4j.agent.plugin.spring.boot.v1.EventPublishingFinishedInterceptor";
private static final String EVENT_PUBLISHING_ENVIRONMENT_PREPARED_INTERCEPTOR = "cn.hippo4j.agent.plugin.spring.boot.v1.EventPublishingRunListenerEnvironmentPreparedInterceptor";
@Override
protected ClassMatch enhanceClass() {
return byName(ENHANCE_CLASS);
}
@Override
public ConstructorInterceptPoint[] getConstructorsInterceptPoints() {
return new ConstructorInterceptPoint[0];
}
@Override
public InstanceMethodsInterceptPoint[] getInstanceMethodsInterceptPoints() {
return new InstanceMethodsInterceptPoint[]{
new InstanceMethodsInterceptPoint() {
@Override
public ElementMatcher<MethodDescription> getMethodsMatcher() {
return named("finished");
}
@Override
public String getMethodsInterceptor() {
return EVENT_PUBLISHING_FINISHED_INTERCEPTOR;
}
@Override
public boolean isOverrideArgs() {
return false;
}
},
new InstanceMethodsInterceptPoint() {
@Override
public ElementMatcher<MethodDescription> getMethodsMatcher() {
return named("environmentPrepared");
}
@Override
public String getMethodsInterceptor() {
return EVENT_PUBLISHING_ENVIRONMENT_PREPARED_INTERCEPTOR;
}
@Override
public boolean isOverrideArgs() {
return false;
}
}
};
}
}

@ -0,0 +1,68 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.plugin.spring.boot.v1.define;
import cn.hippo4j.agent.core.plugin.interceptor.ConstructorInterceptPoint;
import cn.hippo4j.agent.core.plugin.interceptor.InstanceMethodsInterceptPoint;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.ClassInstanceMethodsEnhancePluginDefine;
import cn.hippo4j.agent.core.plugin.match.ClassMatch;
import net.bytebuddy.description.method.MethodDescription;
import net.bytebuddy.matcher.ElementMatcher;
import static cn.hippo4j.agent.core.plugin.match.NameMatch.byName;
import static net.bytebuddy.matcher.ElementMatchers.named;
public class SpringApplicationRunInstrumentation extends ClassInstanceMethodsEnhancePluginDefine {
private static final String ENHANCE_CLASS = "org.springframework.boot.SpringApplication";
private static final String SPRING_APPLICATION_RUN_INTERCEPTOR = "cn.hippo4j.agent.plugin.spring.boot.v1.SpringApplicationRunInterceptor";
@Override
protected ClassMatch enhanceClass() {
return byName(ENHANCE_CLASS);
}
@Override
public ConstructorInterceptPoint[] getConstructorsInterceptPoints() {
return new ConstructorInterceptPoint[0];
}
@Override
public InstanceMethodsInterceptPoint[] getInstanceMethodsInterceptPoints() {
return new InstanceMethodsInterceptPoint[]{
new InstanceMethodsInterceptPoint() {
@Override
public ElementMatcher<MethodDescription> getMethodsMatcher() {
return named("run");
}
@Override
public String getMethodsInterceptor() {
return SPRING_APPLICATION_RUN_INTERCEPTOR;
}
@Override
public boolean isOverrideArgs() {
return false;
}
}
};
}
}

@ -0,0 +1,18 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
spring-boot-1.x=cn.hippo4j.agent.plugin.spring.boot.v1.define.EventPublishingRunListenerInstrumentation
spring-boot-1.x=cn.hippo4j.agent.plugin.spring.boot.v1.define.SpringApplicationRunInstrumentation

@ -0,0 +1,33 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>cn.hippo4j</groupId>
<artifactId>spring-plugins</artifactId>
<version>${revision}</version>
</parent>
<artifactId>spring-boot-2.x-plugin</artifactId>
<packaging>jar</packaging>
<properties>
<spring.boot.version>2.3.2.RELEASE</spring.boot.version>
</properties>
<dependencies>
<dependency>
<groupId>cn.hippo4j</groupId>
<artifactId>spring-plugin-common</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-autoconfigure</artifactId>
<version>${spring.boot.version}</version>
<scope>provided</scope>
</dependency>
</dependencies>
</project>

@ -0,0 +1,50 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.plugin.spring.boot.v2;
import cn.hippo4j.agent.core.logging.api.ILog;
import cn.hippo4j.agent.core.logging.api.LogManager;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.EnhancedInstance;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.InstanceMethodsAroundInterceptor;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.MethodInterceptResult;
import cn.hippo4j.agent.plugin.spring.common.SpringPropertiesLoader;
import org.springframework.context.ConfigurableApplicationContext;
import java.lang.reflect.Method;
public class EventPublishingStartedInterceptor implements InstanceMethodsAroundInterceptor {
private static final ILog LOGGER = LogManager.getLogger(EventPublishingStartedInterceptor.class);
@Override
public void beforeMethod(EnhancedInstance objInst, Method method, Object[] allArguments, Class<?>[] argumentsTypes, MethodInterceptResult result) throws Throwable {
}
@Override
public Object afterMethod(EnhancedInstance objInst, Method method, Object[] allArguments, Class<?>[] argumentsTypes, Object ret) throws Throwable {
ConfigurableApplicationContext context = (ConfigurableApplicationContext) allArguments[0];
SpringPropertiesLoader.loadSpringProperties(context.getEnvironment());
return ret;
}
@Override
public void handleMethodException(EnhancedInstance objInst, Method method, Object[] allArguments, Class<?>[] argumentsTypes, Throwable t) {
}
}

@ -0,0 +1,68 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.plugin.spring.boot.v2.define;
import cn.hippo4j.agent.core.plugin.interceptor.ConstructorInterceptPoint;
import cn.hippo4j.agent.core.plugin.interceptor.InstanceMethodsInterceptPoint;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.ClassInstanceMethodsEnhancePluginDefine;
import cn.hippo4j.agent.core.plugin.match.ClassMatch;
import net.bytebuddy.description.method.MethodDescription;
import net.bytebuddy.matcher.ElementMatcher;
import static net.bytebuddy.matcher.ElementMatchers.named;
import static cn.hippo4j.agent.core.plugin.match.NameMatch.byName;
public class EventPublishingRunListenerInstrumentation extends ClassInstanceMethodsEnhancePluginDefine {
private static final String ENHANCE_CLASS = "org.springframework.boot.context.event.EventPublishingRunListener";
private static final String EVENT_PUBLISHING_FINISHED_INTERCEPTOR = "cn.hippo4j.agent.plugin.spring.boot.v2.EventPublishingStartedInterceptor";
@Override
protected ClassMatch enhanceClass() {
return byName(ENHANCE_CLASS);
}
@Override
public ConstructorInterceptPoint[] getConstructorsInterceptPoints() {
return new ConstructorInterceptPoint[0];
}
@Override
public InstanceMethodsInterceptPoint[] getInstanceMethodsInterceptPoints() {
return new InstanceMethodsInterceptPoint[]{
new InstanceMethodsInterceptPoint() {
@Override
public ElementMatcher<MethodDescription> getMethodsMatcher() {
return named("started");
}
@Override
public String getMethodsInterceptor() {
return EVENT_PUBLISHING_FINISHED_INTERCEPTOR;
}
@Override
public boolean isOverrideArgs() {
return false;
}
}
};
}
}

@ -0,0 +1,17 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
spring-boot-2.x=cn.hippo4j.agent.plugin.spring.boot.v2.define.EventPublishingRunListenerInstrumentation

@ -5,16 +5,18 @@
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>cn.hippo4j</groupId>
<artifactId>hippo4j-agent</artifactId>
<artifactId>spring-plugins</artifactId>
<version>${revision}</version>
</parent>
<artifactId>hippo4j-sdk-plugin</artifactId>
<artifactId>spring-plugin-common</artifactId>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
</project>

@ -0,0 +1,34 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.plugin.spring.common;
import org.springframework.core.env.ConfigurableEnvironment;
import org.springframework.core.env.MapPropertySource;
import java.util.HashMap;
import java.util.Map;
public class SpringEnvironmentSupport {
public static void disableNonAgentSwitch(ConfigurableEnvironment environment) {
Map<String, Object> map = new HashMap<>();
map.put("spring.dynamic.thread-pool.enable", false); // Switch off in non-Agent mode
MapPropertySource propertySource = new MapPropertySource("Hippo4j-Agent-Properties", map);
environment.getPropertySources().addFirst(propertySource);
}
}

@ -0,0 +1,66 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.plugin.spring.common;
import cn.hippo4j.agent.core.boot.SpringBootConfigInitializer;
import cn.hippo4j.agent.core.logging.api.ILog;
import cn.hippo4j.agent.core.logging.api.LogManager;
import org.springframework.core.env.ConfigurableEnvironment;
import org.springframework.core.env.EnumerablePropertySource;
import org.springframework.core.env.PropertySource;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
public class SpringPropertiesLoader {
private static final ILog LOGGER = LogManager.getLogger(SpringPropertiesLoader.class);
public static void loadSpringProperties(ConfigurableEnvironment environment) {
Iterator<PropertySource<?>> iterator = environment.getPropertySources().iterator();
Properties properties = new Properties();
List<PropertySource<?>> propertySourceList = new ArrayList<>();
while (iterator.hasNext()) {
propertySourceList.add(iterator.next());
}
for (int i = propertySourceList.size() - 1; i >= 0; i--) {
PropertySource<?> propertySource = propertySourceList.get(i);
if (!(propertySource instanceof EnumerablePropertySource)) {
LOGGER.warn("Skip propertySource[{}] because {} not enumerable.", propertySource.getName(), propertySource.getClass());
continue;
}
LOGGER.info("Load propertySource[{}] into SpringProperties.", propertySource.getName());
EnumerablePropertySource<?> enumerablePropertySource = (EnumerablePropertySource<?>) propertySource;
String[] keys = enumerablePropertySource.getPropertyNames();
for (String key : keys) {
Object value = null;
try {
value = enumerablePropertySource.getProperty(key);
if (value != null) {
properties.put(key.toLowerCase(), value.toString());
}
} catch (Throwable e) {
LOGGER.warn("Put property to spring properties failed, key=[{}], value=[{}]", key, value);
}
}
}
SpringBootConfigInitializer.setSpringProperties(properties);
}
}

@ -0,0 +1,27 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>cn.hippo4j</groupId>
<artifactId>hippo4j-agent-plugin</artifactId>
<version>${revision}</version>
</parent>
<artifactId>thread-pool-plugin</artifactId>
<dependencies>
<dependency>
<groupId>cn.hippo4j</groupId>
<artifactId>hippo4j-core</artifactId>
<version>${project.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>cn.hippo4j</groupId>
<artifactId>hippo4j-common</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
</project>

@ -0,0 +1,100 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.plugin.thread.pool;
import cn.hippo4j.agent.core.conf.Config;
import cn.hippo4j.agent.core.logging.api.ILog;
import cn.hippo4j.agent.core.logging.api.LogManager;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.EnhancedInstance;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.InstanceConstructorInterceptor;
import cn.hippo4j.agent.core.registry.AgentThreadPoolInstanceRegistry;
import cn.hippo4j.agent.core.util.CollectionUtil;
import cn.hippo4j.agent.core.util.StringUtil;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ThreadPoolExecutor;
public class ThreadPoolExecutorConstructorMethodInterceptor implements InstanceConstructorInterceptor {
private static final ILog LOGGER = LogManager.getLogger(ThreadPoolExecutorConstructorMethodInterceptor.class);
private static final List<String> EXCLUDE_STACK_TRACE_ELEMENT_CLASS_PREFIX = Arrays.asList("java", "cn.hippo4j.agent");
@Override
public void onConstruct(EnhancedInstance objInst, Object[] allArguments) throws Throwable {
List<StackTraceElement> stackTraceElements = getStackTraceElements();
if (CollectionUtil.isEmpty(stackTraceElements)) {
return;
}
StackTraceElement declaredClassStackTraceElement = stackTraceElements.get(0);
String declaredClassName = declaredClassStackTraceElement.getClassName();
Class<?> declaredClass = Thread.currentThread().getContextClassLoader().loadClass(declaredClassName);
AgentThreadPoolInstanceRegistry.getInstance().earlyConstructMap.put((ThreadPoolExecutor) objInst, declaredClass);
}
private List<StackTraceElement> getStackTraceElements() {
StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace();
int i;
for (i = 0; i < stackTraceElements.length; i++) {
String fullClassName = stackTraceElements[i].getClassName();
if (isBusinessStackTraceClassName(fullClassName)) {
if (isExcludeThreadPoolClass(fullClassName)) {
return Collections.emptyList();
} else {
break;
}
}
}
List<StackTraceElement> result = new ArrayList<>(3); // Find up to three layers
for (int j = 0; i < stackTraceElements.length && j < 3; i++, j++) {
String fullClassName = stackTraceElements[i].getClassName();
if (isExcludeThreadPoolClass(fullClassName)) {
break;
} else {
result.add(stackTraceElements[i]);
}
}
return result;
}
private boolean isBusinessStackTraceClassName(String className) {
for (String prefix : EXCLUDE_STACK_TRACE_ELEMENT_CLASS_PREFIX) {
if (className.startsWith(prefix)) {
return false;
}
}
return true;
}
private boolean isExcludeThreadPoolClass(String className) {
if (StringUtil.isBlank(className)) {
return true;
}
List<String> excludePackagePrefix = Config.Plugin.ThreadPool.EXCLUDE_PACKAGE_PREFIX;
for (String excludePrefix : excludePackagePrefix) {
if (className.startsWith(excludePrefix)) {
return true;
}
}
return false;
}
}

@ -0,0 +1,73 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.plugin.thread.pool.define;
import cn.hippo4j.agent.core.plugin.interceptor.ConstructorInterceptPoint;
import cn.hippo4j.agent.core.plugin.interceptor.InstanceMethodsInterceptPoint;
import cn.hippo4j.agent.core.plugin.interceptor.enhance.ClassInstanceMethodsEnhancePluginDefine;
import cn.hippo4j.agent.core.plugin.match.ClassMatch;
import cn.hippo4j.agent.core.plugin.match.NameMatch;
import net.bytebuddy.description.method.MethodDescription;
import net.bytebuddy.matcher.ElementMatcher;
import static net.bytebuddy.matcher.ElementMatchers.takesArguments;
public class ThreadPoolExecutorInstrumentation extends ClassInstanceMethodsEnhancePluginDefine {
private static final String ENHANCE_CLASS = "java.util.concurrent.ThreadPoolExecutor";
private static final String CONSTRUCTOR_INTERCEPT_CLASS = "cn.hippo4j.agent.plugin.thread.pool.ThreadPoolExecutorConstructorMethodInterceptor";
private static final int CONSTRUCTOR_INTERCEPT_PARAMETER_LENGTH = 7;
@Override
protected ClassMatch enhanceClass() {
return NameMatch.byName(ENHANCE_CLASS);
}
@Override
public boolean isBootstrapInstrumentation() {
return true;
}
/**
* The constructor method that only intercepts all parameters prevents repeated interception.
*/
@Override
public ConstructorInterceptPoint[] getConstructorsInterceptPoints() {
return new ConstructorInterceptPoint[]{
new ConstructorInterceptPoint() {
@Override
public ElementMatcher<MethodDescription> getConstructorMatcher() {
return takesArguments(CONSTRUCTOR_INTERCEPT_PARAMETER_LENGTH);
}
@Override
public String getConstructorInterceptor() {
return CONSTRUCTOR_INTERCEPT_CLASS;
}
}
};
}
@Override
public InstanceMethodsInterceptPoint[] getInstanceMethodsInterceptPoints() {
return new InstanceMethodsInterceptPoint[0];
}
}

@ -0,0 +1,17 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
thread-pool-plugin=cn.hippo4j.agent.plugin.thread.pool.define.ThreadPoolExecutorInstrumentation

@ -13,7 +13,8 @@
<packaging>pom</packaging>
<modules>
<module>hippo4j-agent-core</module>
<module>hippo4j-sdk-plugin</module>
<module>hippo4j-agent-plugin</module>
<module>hippo4j-agent-bootstrap</module>
</modules>
<properties>
@ -56,53 +57,56 @@
<jmh.version>1.33</jmh.version>
<gmaven-plugin.version>1.5</gmaven-plugin.version>
<checkstyle.fails.on.error>true</checkstyle.fails.on.error>
<slf4j.version>1.7.25</slf4j.version>
<guava.version>30.1.1-jre</guava.version>
<wiremock.version>2.16.0</wiremock.version>
</properties>
<dependencies>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-api-mockito2</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>net.bytebuddy</groupId>
<artifactId>byte-buddy-agent</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.objenesis</groupId>
<artifactId>objenesis</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.openjdk.jmh</groupId>
<artifactId>jmh-core</artifactId>
<version>${jmh.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>${lombok.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>javax.annotation</groupId>
<artifactId>javax.annotation-api</artifactId>
<version>${javax.annotation-api.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-module-junit4</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>cn.hippo4j</groupId>
<artifactId>hippo4j-agent-core</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>${gson.version}</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
</dependency>
<dependency>
<groupId>net.bytebuddy</groupId>
<artifactId>byte-buddy</artifactId>
<version>${bytebuddy.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.version}</version>
</dependency>
<dependency>
<groupId>org.openjdk.jmh</groupId>
<artifactId>jmh-core</artifactId>
<version>${jmh.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>${lombok.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>javax.annotation</groupId>
<artifactId>javax.annotation-api</artifactId>
<version>${javax.annotation-api.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
@ -125,7 +129,6 @@
<groupId>net.bytebuddy</groupId>
<artifactId>byte-buddy-agent</artifactId>
<version>${bytebuddy.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.objenesis</groupId>
@ -133,6 +136,12 @@
<version>${objenesis.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.github.tomakehurst</groupId>
<artifactId>wiremock</artifactId>
<version>${wiremock.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.openjdk.jmh</groupId>
<artifactId>jmh-generator-annprocess</artifactId>
@ -212,9 +221,6 @@
<!-- Build has not yet been updated for Java 9+ -->
<version>1.8</version>
</requireJavaVersion>
<requireMavenVersion>
<version>3.6</version>
</requireMavenVersion>
</rules>
</configuration>
</execution>

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save