Complete the docking of the premain mechanism. (#1203)

* Complete the docking of the premain mechanism.
Modify .gitignore file.

* Modify hippo4j-agent submodule naming.
pull/1207/head
yanrongzhen 2 years ago committed by GitHub
parent 35cf3a2791
commit 7b30ad81c7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

4
.gitignore vendored

@ -6,6 +6,10 @@ target/
!**/node_modules/ !**/node_modules/
!**/dist/ !**/dist/
### Agent ###
/hippo4j-agent/hippo4j-agent/
**/dependency-reduced-pom.xml
### STS ### ### STS ###
.apt_generated .apt_generated
.classpath .classpath

@ -0,0 +1,274 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# The service name in UI
# ${service name} = [${group name}::]${logic name}
# The group name is optional only.
agent.service_name=${SW_AGENT_NAME:Your_ApplicationName}
# The agent namespace
agent.namespace=${SW_AGENT_NAMESPACE:}
# The agent cluster
agent.cluster=${SW_AGENT_CLUSTER:}
# The number of sampled traces per 3 seconds
# Negative or zero means off, by default
agent.sample_n_per_3_secs=${SW_AGENT_SAMPLE:-1}
# Authentication active is based on backend setting, see application.yml for more details.
agent.authentication=${SW_AGENT_AUTHENTICATION:}
# The max number of TraceSegmentRef in a single span to keep memory cost estimatable.
agent.trace_segment_ref_limit_per_span=${SW_TRACE_SEGMENT_LIMIT:500}
# The max amount of spans in a single segment.
# Through this config item, SkyWalking keep your application memory cost estimated.
agent.span_limit_per_segment=${SW_AGENT_SPAN_LIMIT:300}
# If the operation name of the first span is included in this set, this segment should be ignored. Multiple values should be separated by `,`.
agent.ignore_suffix=${SW_AGENT_IGNORE_SUFFIX:.jpg,.jpeg,.js,.css,.png,.bmp,.gif,.ico,.mp3,.mp4,.html,.svg}
# If true, SkyWalking agent will save all instrumented classes files in `/debugging` folder.
# SkyWalking team may ask for these files in order to resolve compatible problem.
agent.is_open_debugging_class=${SW_AGENT_OPEN_DEBUG:false}
# If true, SkyWalking agent will cache all instrumented classes files to memory or disk files (decided by class cache mode),
# allow other javaagent to enhance those classes that enhanced by SkyWalking agent.
agent.is_cache_enhanced_class=${SW_AGENT_CACHE_CLASS:false}
# The instrumented classes cache mode: MEMORY or FILE
# MEMORY: cache class bytes to memory, if instrumented classes is too many or too large, it may take up more memory
# FILE: cache class bytes in `/class-cache` folder, automatically clean up cached class files when the application exits
agent.class_cache_mode=${SW_AGENT_CLASS_CACHE_MODE:MEMORY}
# Instance name is the identity of an instance, should be unique in the service. If empty, SkyWalking agent will
# generate an 32-bit uuid. BY Default, SkyWalking uses UUID@hostname as the instance name. Max length is 50(UTF-8 char)
agent.instance_name=${SW_AGENT_INSTANCE_NAME:}
# service instance properties in json format. e.g. agent.instance_properties_json = {"org": "apache-skywalking"}
agent.instance_properties_json=${SW_INSTANCE_PROPERTIES_JSON:}
# How depth the agent goes, when log all cause exceptions.
agent.cause_exception_depth=${SW_AGENT_CAUSE_EXCEPTION_DEPTH:5}
# Force reconnection period of grpc, based on grpc_channel_check_interval.
agent.force_reconnection_period=${SW_AGENT_FORCE_RECONNECTION_PERIOD:1}
# The operationName max length
# Notice, in the current practice, we don't recommend the length over 190.
agent.operation_name_threshold=${SW_AGENT_OPERATION_NAME_THRESHOLD:150}
# Keep tracing even the backend is not available if this value is true.
agent.keep_tracing=${SW_AGENT_KEEP_TRACING:false}
# The agent use gRPC plain text in default.
# If true, SkyWalking agent uses TLS even no CA file detected.
agent.force_tls=${SW_AGENT_FORCE_TLS:false}
# gRPC SSL trusted ca file.
agent.ssl_trusted_ca_path=${SW_AGENT_SSL_TRUSTED_CA_PATH:/ca/ca.crt}
# enable mTLS when ssl_key_path and ssl_cert_chain_path exist.
agent.ssl_key_path=${SW_AGENT_SSL_KEY_PATH:}
agent.ssl_cert_chain_path=${SW_AGENT_SSL_CERT_CHAIN_PATH:}
# Limit the length of the ipv4 list size.
osinfo.ipv4_list_size=${SW_AGENT_OSINFO_IPV4_LIST_SIZE:10}
# grpc channel status check interval.
collector.grpc_channel_check_interval=${SW_AGENT_COLLECTOR_GRPC_CHANNEL_CHECK_INTERVAL:30}
# Agent heartbeat report period. Unit, second.
collector.heartbeat_period=${SW_AGENT_COLLECTOR_HEARTBEAT_PERIOD:30}
# The agent sends the instance properties to the backend every
# collector.heartbeat_period * collector.properties_report_period_factor seconds
collector.properties_report_period_factor=${SW_AGENT_COLLECTOR_PROPERTIES_REPORT_PERIOD_FACTOR:10}
# Backend service addresses.
collector.backend_service=${SW_AGENT_COLLECTOR_BACKEND_SERVICES:127.0.0.1:11800}
# How long grpc client will timeout in sending data to upstream. Unit is second.
collector.grpc_upstream_timeout=${SW_AGENT_COLLECTOR_GRPC_UPSTREAM_TIMEOUT:30}
# Sniffer get profile task list interval.
collector.get_profile_task_interval=${SW_AGENT_COLLECTOR_GET_PROFILE_TASK_INTERVAL:20}
# Sniffer get agent dynamic config interval.
collector.get_agent_dynamic_config_interval=${SW_AGENT_COLLECTOR_GET_AGENT_DYNAMIC_CONFIG_INTERVAL:20}
# If true, skywalking agent will enable periodically resolving DNS to update receiver service addresses.
collector.is_resolve_dns_periodically=${SW_AGENT_COLLECTOR_IS_RESOLVE_DNS_PERIODICALLY:false}
# Logging level
logging.level=${SW_LOGGING_LEVEL:INFO}
# Logging file_name
logging.file_name=${SW_LOGGING_FILE_NAME:skywalking-api.log}
# Log output. Default is FILE. Use CONSOLE means output to stdout.
logging.output=${SW_LOGGING_OUTPUT:FILE}
# Log files directory. Default is blank string, meaning use "{theSkywalkingAgentJarDir}/logs " to output logs.
# {theSkywalkingAgentJarDir} is the directory where the skywalking agent jar file is located
logging.dir=${SW_LOGGING_DIR:}
# Logger resolver: PATTERN or JSON. The default is PATTERN, which uses logging.pattern to print traditional text logs.
# JSON resolver prints logs in JSON format.
logging.resolver=${SW_LOGGING_RESOLVER:PATTERN}
# Logging format. There are all conversion specifiers:
# * %level means log level.
# * %timestamp means now of time with format yyyy-MM-dd HH:mm:ss:SSS.
# * %thread means name of current thread.
# * %msg means some message which user logged.
# * %class means SimpleName of TargetClass.
# * %throwable means a throwable which user called.
# * %agent_name means agent.service_name. Only apply to the PatternLogger.
logging.pattern=${SW_LOGGING_PATTERN:%level %timestamp %thread %class : %msg %throwable}
# Logging max_file_size, default: 300 * 1024 * 1024 = 314572800
logging.max_file_size=${SW_LOGGING_MAX_FILE_SIZE:314572800}
# The max history log files. When rollover happened, if log files exceed this number,
# then the oldest file will be delete. Negative or zero means off, by default.
logging.max_history_files=${SW_LOGGING_MAX_HISTORY_FILES:-1}
# Listed exceptions would not be treated as an error. Because in some codes, the exception is being used as a way of controlling business flow.
# Besides, the annotation named IgnoredException in the trace toolkit is another way to configure ignored exceptions.
statuscheck.ignored_exceptions=${SW_STATUSCHECK_IGNORED_EXCEPTIONS:}
# The max recursive depth when checking the exception traced by the agent. Typically, we don't recommend setting this more than 10, which could cause a performance issue. Negative value and 0 would be ignored, which means all exceptions would make the span tagged in error status.
statuscheck.max_recursive_depth=${SW_STATUSCHECK_MAX_RECURSIVE_DEPTH:1}
# Max element count in the correlation context
correlation.element_max_number=${SW_CORRELATION_ELEMENT_MAX_NUMBER:3}
# Max value length of each element.
correlation.value_max_length=${SW_CORRELATION_VALUE_MAX_LENGTH:128}
# Tag the span by the key/value in the correlation context, when the keys listed here exist.
correlation.auto_tag_keys=${SW_CORRELATION_AUTO_TAG_KEYS:}
# The buffer size of collected JVM info.
jvm.buffer_size=${SW_JVM_BUFFER_SIZE:600}
# The buffer channel size.
buffer.channel_size=${SW_BUFFER_CHANNEL_SIZE:5}
# The buffer size.
buffer.buffer_size=${SW_BUFFER_BUFFER_SIZE:300}
# If true, skywalking agent will enable profile when user create a new profile task. Otherwise disable profile.
profile.active=${SW_AGENT_PROFILE_ACTIVE:true}
# Parallel monitor segment count
profile.max_parallel=${SW_AGENT_PROFILE_MAX_PARALLEL:5}
# Max monitor segment time(minutes), if current segment monitor time out of limit, then stop it.
profile.duration=${SW_AGENT_PROFILE_DURATION:10}
# Max dump thread stack depth
profile.dump_max_stack_depth=${SW_AGENT_PROFILE_DUMP_MAX_STACK_DEPTH:500}
# Snapshot transport to backend buffer size
profile.snapshot_transport_buffer_size=${SW_AGENT_PROFILE_SNAPSHOT_TRANSPORT_BUFFER_SIZE:4500}
# If true, the agent collects and reports metrics to the backend.
meter.active=${SW_METER_ACTIVE:true}
# Report meters interval. The unit is second
meter.report_interval=${SW_METER_REPORT_INTERVAL:20}
# Max size of the meter pool
meter.max_meter_size=${SW_METER_MAX_METER_SIZE:500}
# The max size of message to send to server.Default is 10 MB
log.max_message_size=${SW_GRPC_LOG_MAX_MESSAGE_SIZE:10485760}
# Mount the specific folders of the plugins. Plugins in mounted folders would work.
plugin.mount=${SW_MOUNT_FOLDERS:plugins,activations}
# Peer maximum description limit.
plugin.peer_max_length=${SW_PLUGIN_PEER_MAX_LENGTH:200}
# Exclude some plugins define in plugins dir.Plugin names is defined in [Agent plugin list](Plugin-list.md)
plugin.exclude_plugins=${SW_EXCLUDE_PLUGINS:}
# If true, trace all the parameters in MongoDB access, default is false. Only trace the operation, not include parameters.
plugin.mongodb.trace_param=${SW_PLUGIN_MONGODB_TRACE_PARAM:false}
# If set to positive number, the `WriteRequest.params` would be truncated to this length, otherwise it would be completely saved, which may cause performance problem.
plugin.mongodb.filter_length_limit=${SW_PLUGIN_MONGODB_FILTER_LENGTH_LIMIT:256}
# If true, trace all the DSL(Domain Specific Language) in ElasticSearch access, default is false.
plugin.elasticsearch.trace_dsl=${SW_PLUGIN_ELASTICSEARCH_TRACE_DSL:false}
# If true, the fully qualified method name will be used as the endpoint name instead of the request URL, default is false.
plugin.springmvc.use_qualified_name_as_endpoint_name=${SW_PLUGIN_SPRINGMVC_USE_QUALIFIED_NAME_AS_ENDPOINT_NAME:false}
# If true, the fully qualified method name will be used as the operation name instead of the given operation name, default is false.
plugin.toolit.use_qualified_name_as_operation_name=${SW_PLUGIN_TOOLIT_USE_QUALIFIED_NAME_AS_OPERATION_NAME:false}
# If set to true, the parameters of the sql (typically `java.sql.PreparedStatement`) would be collected.
plugin.jdbc.trace_sql_parameters=${SW_JDBC_TRACE_SQL_PARAMETERS:false}
# If set to positive number, the `db.sql.parameters` would be truncated to this length, otherwise it would be completely saved, which may cause performance problem.
plugin.jdbc.sql_parameters_max_length=${SW_PLUGIN_JDBC_SQL_PARAMETERS_MAX_LENGTH:512}
# If set to positive number, the `db.statement` would be truncated to this length, otherwise it would be completely saved, which may cause performance problem.
plugin.jdbc.sql_body_max_length=${SW_PLUGIN_JDBC_SQL_BODY_MAX_LENGTH:2048}
# If true, trace all the query parameters(include deleteByIds and deleteByQuery) in Solr query request, default is false.
plugin.solrj.trace_statement=${SW_PLUGIN_SOLRJ_TRACE_STATEMENT:false}
# If true, trace all the operation parameters in Solr request, default is false.
plugin.solrj.trace_ops_params=${SW_PLUGIN_SOLRJ_TRACE_OPS_PARAMS:false}
# If true, trace all middleware/business handlers that are part of the Light4J handler chain for a request.
plugin.light4j.trace_handler_chain=${SW_PLUGIN_LIGHT4J_TRACE_HANDLER_CHAIN:false}
# If true, the transaction definition name will be simplified.
plugin.springtransaction.simplify_transaction_definition_name=${SW_PLUGIN_SPRINGTRANSACTION_SIMPLIFY_TRANSACTION_DEFINITION_NAME:false}
# Threading classes (`java.lang.Runnable` and `java.util.concurrent.Callable`) and their subclasses, including anonymous inner classes whose name match any one of the `THREADING_CLASS_PREFIXES` (splitted by `,`) will be instrumented, make sure to only specify as narrow prefixes as what you're expecting to instrument, (`java.` and `javax.` will be ignored due to safety issues)
plugin.jdkthreading.threading_class_prefixes=${SW_PLUGIN_JDKTHREADING_THREADING_CLASS_PREFIXES:}
# This config item controls that whether the Tomcat plugin should collect the parameters of the request. Also, activate implicitly in the profiled trace.
plugin.tomcat.collect_http_params=${SW_PLUGIN_TOMCAT_COLLECT_HTTP_PARAMS:false}
# This config item controls that whether the SpringMVC plugin should collect the parameters of the request, when your Spring application is based on Tomcat, consider only setting either `plugin.tomcat.collect_http_params` or `plugin.springmvc.collect_http_params`. Also, activate implicitly in the profiled trace.
plugin.springmvc.collect_http_params=${SW_PLUGIN_SPRINGMVC_COLLECT_HTTP_PARAMS:false}
# This config item controls that whether the HttpClient plugin should collect the parameters of the request
plugin.httpclient.collect_http_params=${SW_PLUGIN_HTTPCLIENT_COLLECT_HTTP_PARAMS:false}
# When `COLLECT_HTTP_PARAMS` is enabled, how many characters to keep and send to the OAP backend, use negative values to keep and send the complete parameters, NB. this config item is added for the sake of performance.
plugin.http.http_params_length_threshold=${SW_PLUGIN_HTTP_HTTP_PARAMS_LENGTH_THRESHOLD:1024}
# When `include_http_headers` declares header names, this threshold controls the length limitation of all header values. use negative values to keep and send the complete headers. Note. this config item is added for the sake of performance.
plugin.http.http_headers_length_threshold=${SW_PLUGIN_HTTP_HTTP_HEADERS_LENGTH_THRESHOLD:2048}
# Set the header names, which should be collected by the plugin. Header name must follow `javax.servlet.http` definition. Multiple names should be split by comma.
plugin.http.include_http_headers=${SW_PLUGIN_HTTP_INCLUDE_HTTP_HEADERS:}
# This config item controls that whether the Feign plugin should collect the http body of the request.
plugin.feign.collect_request_body=${SW_PLUGIN_FEIGN_COLLECT_REQUEST_BODY:false}
# When `COLLECT_REQUEST_BODY` is enabled, how many characters to keep and send to the OAP backend, use negative values to keep and send the complete body.
plugin.feign.filter_length_limit=${SW_PLUGIN_FEIGN_FILTER_LENGTH_LIMIT:1024}
# When `COLLECT_REQUEST_BODY` is enabled and content-type start with SUPPORTED_CONTENT_TYPES_PREFIX, collect the body of the request , multiple paths should be separated by `,`
plugin.feign.supported_content_types_prefix=${SW_PLUGIN_FEIGN_SUPPORTED_CONTENT_TYPES_PREFIX:application/json,text/}
# If true, trace all the influxql(query and write) in InfluxDB access, default is true.
plugin.influxdb.trace_influxql=${SW_PLUGIN_INFLUXDB_TRACE_INFLUXQL:true}
# Apache Dubbo consumer collect `arguments` in RPC call, use `Object#toString` to collect `arguments`.
plugin.dubbo.collect_consumer_arguments=${SW_PLUGIN_DUBBO_COLLECT_CONSUMER_ARGUMENTS:false}
# When `plugin.dubbo.collect_consumer_arguments` is `true`, Arguments of length from the front will to the OAP backend
plugin.dubbo.consumer_arguments_length_threshold=${SW_PLUGIN_DUBBO_CONSUMER_ARGUMENTS_LENGTH_THRESHOLD:256}
# Apache Dubbo provider collect `arguments` in RPC call, use `Object#toString` to collect `arguments`.
plugin.dubbo.collect_provider_arguments=${SW_PLUGIN_DUBBO_COLLECT_PROVIDER_ARGUMENTS:false}
# When `plugin.dubbo.collect_provider_arguments` is `true`, Arguments of length from the front will to the OAP backend
plugin.dubbo.provider_arguments_length_threshold=${SW_PLUGIN_DUBBO_PROVIDER_ARGUMENTS_LENGTH_THRESHOLD:256}
# A list of host/port pairs to use for establishing the initial connection to the Kafka cluster.
plugin.kafka.bootstrap_servers=${SW_KAFKA_BOOTSTRAP_SERVERS:localhost:9092}
# Timeout period of reading topics from the Kafka server, the unit is second.
plugin.kafka.get_topic_timeout=${SW_GET_TOPIC_TIMEOUT:10}
# Kafka producer configuration. Read [producer configure](http://kafka.apache.org/24/documentation.html#producerconfigs)
# to get more details. Check document for more details and examples.
plugin.kafka.producer_config=${sw_plugin_kafka_producer_config:}
# Configure Kafka Producer configuration in JSON format. Notice it will be overridden by plugin.kafka.producer_config[key], if the key duplication.
plugin.kafka.producer_config_json=${SW_PLUGIN_KAFKA_PRODUCER_CONFIG_JSON:}
# Specify which Kafka topic name for Meter System data to report to.
plugin.kafka.topic_meter=${SW_PLUGIN_KAFKA_TOPIC_METER:skywalking-meters}
# Specify which Kafka topic name for JVM metrics data to report to.
plugin.kafka.topic_metrics=${SW_PLUGIN_KAFKA_TOPIC_METRICS:skywalking-metrics}
# Specify which Kafka topic name for traces data to report to.
plugin.kafka.topic_segment=${SW_PLUGIN_KAFKA_TOPIC_SEGMENT:skywalking-segments}
# Specify which Kafka topic name for Thread Profiling snapshot to report to.
plugin.kafka.topic_profiling=${SW_PLUGIN_KAFKA_TOPIC_PROFILINGS:skywalking-profilings}
# Specify which Kafka topic name for the register or heartbeat data of Service Instance to report to.
plugin.kafka.topic_management=${SW_PLUGIN_KAFKA_TOPIC_MANAGEMENT:skywalking-managements}
# Specify which Kafka topic name for the logging data to report to.
plugin.kafka.topic_logging=${SW_PLUGIN_KAFKA_TOPIC_LOGGING:skywalking-logs}
# isolate multi OAP server when using same Kafka cluster (final topic name will append namespace before Kafka topics with `-` ).
plugin.kafka.namespace=${SW_KAFKA_NAMESPACE:}
# Match spring beans with regular expression for the class name. Multiple expressions could be separated by a comma. This only works when `Spring annotation plugin` has been activated.
plugin.springannotation.classname_match_regex=${SW_SPRINGANNOTATION_CLASSNAME_MATCH_REGEX:}
# Whether or not to transmit logged data as formatted or un-formatted.
plugin.toolkit.log.transmit_formatted=${SW_PLUGIN_TOOLKIT_LOG_TRANSMIT_FORMATTED:true}
# If set to true, the parameters of Redis commands would be collected by Lettuce agent.
plugin.lettuce.trace_redis_parameters=${SW_PLUGIN_LETTUCE_TRACE_REDIS_PARAMETERS:false}
# If set to positive number and `plugin.lettuce.trace_redis_parameters` is set to `true`, Redis command parameters would be collected and truncated to this length.
plugin.lettuce.redis_parameter_max_length=${SW_PLUGIN_LETTUCE_REDIS_PARAMETER_MAX_LENGTH:128}
# If set to true, the parameters of the cypher would be collected.
plugin.neo4j.trace_cypher_parameters=${SW_PLUGIN_NEO4J_TRACE_CYPHER_PARAMETERS:false}
# If set to positive number, the `db.cypher.parameters` would be truncated to this length, otherwise it would be completely saved, which may cause performance problem.
plugin.neo4j.cypher_parameters_max_length=${SW_PLUGIN_NEO4J_CYPHER_PARAMETERS_MAX_LENGTH:512}
# If set to positive number, the `db.statement` would be truncated to this length, otherwise it would be completely saved, which may cause performance problem.
plugin.neo4j.cypher_body_max_length=${SW_PLUGIN_NEO4J_CYPHER_BODY_MAX_LENGTH:2048}
# If set to a positive number and activate `trace sampler CPU policy plugin`, the trace would not be collected when agent process CPU usage percent is greater than `plugin.cpupolicy.sample_cpu_usage_percent_limit`.
plugin.cpupolicy.sample_cpu_usage_percent_limit=${SW_SAMPLE_CPU_USAGE_PERCENT_LIMIT:-1}

@ -0,0 +1,232 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright {yyyy} {name of copyright owner}
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=======================================================================
Apache SkyWalking Subcomponents:
The Apache SkyWalking project contains subcomponents with separate copyright
notices and license terms. Your use of the source code for the these
subcomponents is subject to the terms and conditions of the following
licenses.
========================================================================
Apache 2.0 licenses
========================================================================
The following components are provided under the Apache License. See project link for details.
The text of each license is the standard Apache 2.0 license.
raphw (byte-buddy) 1.12.13: http://bytebuddy.net/ , Apache 2.0
Google: grpc-java 1.44.0: https://github.com/grpc/grpc-java, Apache 2.0
Google: gson 2.8.9: https://github.com/google/gson , Apache 2.0
Google: proto-google-common-protos 2.0.1: https://github.com/googleapis/googleapis , Apache 2.0
Google: jsr305 3.0.2: http://central.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.0/jsr305-3.0.0.pom , Apache 2.0
netty 4.1.79: https://github.com/netty/netty/blob/4.1/LICENSE.txt, Apache 2.0
========================================================================
BSD licenses
========================================================================
The following components are provided under a BSD license. See project link for details.
The text of each license is also included at licenses/LICENSE-[project].txt.
asm 9.2:https://gitlab.ow2.org , BSD-3-Clause

@ -0,0 +1,299 @@
Opengoofy Hippo4j
Copyright 2017-2022 The Apache Software Foundation
This product includes software developed at
The Apache Software Foundation (http://www.apache.org/).
========================================================================
grpc-java NOTICE
========================================================================
Copyright 2014, gRPC Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------------------------
This product contains a modified portion of 'OkHttp', an open source
HTTP & SPDY client for Android and Java applications, which can be obtained
at:
* LICENSE:
* okhttp/third_party/okhttp/LICENSE (Apache License 2.0)
* HOMEPAGE:
* https://github.com/square/okhttp
* LOCATION_IN_GRPC:
* okhttp/third_party/okhttp
This product contains a modified portion of 'Netty', an open source
networking library, which can be obtained at:
* LICENSE:
* netty/third_party/netty/LICENSE.txt (Apache License 2.0)
* HOMEPAGE:
* https://netty.io
* LOCATION_IN_GRPC:
* netty/third_party/netty
========================================================================
grpc NOTICE
========================================================================
Copyright 2014 gRPC authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
========================================================================
netty NOTICE
========================================================================
The Netty Project
=================
Please visit the Netty web site for more information:
* http://netty.io/
Copyright 2014 The Netty Project
The Netty Project licenses this file to you under the Apache License,
version 2.0 (the "License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at:
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
Also, please refer to each LICENSE.<component>.txt file, which is located in
the 'license' directory of the distribution file, for the license terms of the
components that this product depends on.
-------------------------------------------------------------------------------
This product contains the extensions to Java Collections Framework which has
been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene:
* LICENSE:
* license/LICENSE.jsr166y.txt (Public Domain)
* HOMEPAGE:
* http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/
* http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/
This product contains a modified version of Robert Harder's Public Domain
Base64 Encoder and Decoder, which can be obtained at:
* LICENSE:
* license/LICENSE.base64.txt (Public Domain)
* HOMEPAGE:
* http://iharder.sourceforge.net/current/java/base64/
This product contains a modified portion of 'Webbit', an event based
WebSocket and HTTP server, which can be obtained at:
* LICENSE:
* license/LICENSE.webbit.txt (BSD License)
* HOMEPAGE:
* https://github.com/joewalnes/webbit
This product contains a modified portion of 'SLF4J', a simple logging
facade for Java, which can be obtained at:
* LICENSE:
* license/LICENSE.slf4j.txt (MIT License)
* HOMEPAGE:
* http://www.slf4j.org/
This product contains a modified portion of 'Apache Harmony', an open source
Java SE, which can be obtained at:
* NOTICE:
* license/NOTICE.harmony.txt
* LICENSE:
* license/LICENSE.harmony.txt (Apache License 2.0)
* HOMEPAGE:
* http://archive.apache.org/dist/harmony/
This product contains a modified portion of 'jbzip2', a Java bzip2 compression
and decompression library written by Matthew J. Francis. It can be obtained at:
* LICENSE:
* license/LICENSE.jbzip2.txt (MIT License)
* HOMEPAGE:
* https://code.google.com/p/jbzip2/
This product contains a modified portion of 'libdivsufsort', a C API library to construct
the suffix array and the Burrows-Wheeler transformed string for any input string of
a constant-size alphabet written by Yuta Mori. It can be obtained at:
* LICENSE:
* license/LICENSE.libdivsufsort.txt (MIT License)
* HOMEPAGE:
* https://github.com/y-256/libdivsufsort
This product contains a modified portion of Nitsan Wakart's 'JCTools', Java Concurrency Tools for the JVM,
which can be obtained at:
* LICENSE:
* license/LICENSE.jctools.txt (ASL2 License)
* HOMEPAGE:
* https://github.com/JCTools/JCTools
This product optionally depends on 'JZlib', a re-implementation of zlib in
pure Java, which can be obtained at:
* LICENSE:
* license/LICENSE.jzlib.txt (BSD style License)
* HOMEPAGE:
* http://www.jcraft.com/jzlib/
This product optionally depends on 'Compress-LZF', a Java library for encoding and
decoding data in LZF format, written by Tatu Saloranta. It can be obtained at:
* LICENSE:
* license/LICENSE.compress-lzf.txt (Apache License 2.0)
* HOMEPAGE:
* https://github.com/ning/compress
This product optionally depends on 'lz4', a LZ4 Java compression
and decompression library written by Adrien Grand. It can be obtained at:
* LICENSE:
* license/LICENSE.lz4.txt (Apache License 2.0)
* HOMEPAGE:
* https://github.com/jpountz/lz4-java
This product optionally depends on 'lzma-java', a LZMA Java compression
and decompression library, which can be obtained at:
* LICENSE:
* license/LICENSE.lzma-java.txt (Apache License 2.0)
* HOMEPAGE:
* https://github.com/jponge/lzma-java
This product contains a modified portion of 'jfastlz', a Java port of FastLZ compression
and decompression library written by William Kinney. It can be obtained at:
* LICENSE:
* license/LICENSE.jfastlz.txt (MIT License)
* HOMEPAGE:
* https://code.google.com/p/jfastlz/
This product contains a modified portion of and optionally depends on 'Protocol Buffers', Google's data
interchange format, which can be obtained at:
* LICENSE:
* license/LICENSE.protobuf.txt (New BSD License)
* HOMEPAGE:
* https://github.com/google/protobuf
This product optionally depends on 'Bouncy Castle Crypto APIs' to generate
a temporary self-signed X.509 certificate when the JVM does not provide the
equivalent functionality. It can be obtained at:
* LICENSE:
* license/LICENSE.bouncycastle.txt (MIT License)
* HOMEPAGE:
* http://www.bouncycastle.org/
This product optionally depends on 'Snappy', a compression library produced
by Google Inc, which can be obtained at:
* LICENSE:
* license/LICENSE.snappy.txt (New BSD License)
* HOMEPAGE:
* https://github.com/google/snappy
This product optionally depends on 'JBoss Marshalling', an alternative Java
serialization API, which can be obtained at:
* LICENSE:
* license/LICENSE.jboss-marshalling.txt (GNU LGPL 2.1)
* HOMEPAGE:
* http://www.jboss.org/jbossmarshalling
This product optionally depends on 'Caliper', Google's micro-
benchmarking framework, which can be obtained at:
* LICENSE:
* license/LICENSE.caliper.txt (Apache License 2.0)
* HOMEPAGE:
* https://github.com/google/caliper
This product optionally depends on 'Apache Commons Logging', a logging
framework, which can be obtained at:
* LICENSE:
* license/LICENSE.commons-logging.txt (Apache License 2.0)
* HOMEPAGE:
* http://commons.apache.org/logging/
This product optionally depends on 'Apache Log4J', a logging framework, which
can be obtained at:
* LICENSE:
* license/LICENSE.log4j.txt (Apache License 2.0)
* HOMEPAGE:
* http://logging.apache.org/log4j/
This product optionally depends on 'Aalto XML', an ultra-high performance
non-blocking XML processor, which can be obtained at:
* LICENSE:
* license/LICENSE.aalto-xml.txt (Apache License 2.0)
* HOMEPAGE:
* http://wiki.fasterxml.com/AaltoHome
This product contains a modified version of 'HPACK', a Java implementation of
the HTTP/2 HPACK algorithm written by Twitter. It can be obtained at:
* LICENSE:
* license/LICENSE.hpack.txt (Apache License 2.0)
* HOMEPAGE:
* https://github.com/twitter/hpack
This product contains a modified portion of 'Apache Commons Lang', a Java library
provides utilities for the java.lang API, which can be obtained at:
* LICENSE:
* license/LICENSE.commons-lang.txt (Apache License 2.0)
* HOMEPAGE:
* https://commons.apache.org/proper/commons-lang/
This product contains the Maven wrapper scripts from 'Maven Wrapper', that provides an easy way to ensure a user has everything necessary to run the Maven build.
* LICENSE:
* license/LICENSE.mvn-wrapper.txt (Apache License 2.0)
* HOMEPAGE:
* https://github.com/takari/maven-wrapper

@ -0,0 +1,26 @@
Copyright (c) 2012 France Télécom
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holders nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
THE POSSIBILITY OF SUCH DAMAGE.

@ -0,0 +1,125 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>cn.hippo4j</groupId>
<artifactId>hippo4j-agent</artifactId>
<version>${revision}</version>
</parent>
<artifactId>hippo4j-agent-bootstrap</artifactId>
<packaging>jar</packaging>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<premain.class>cn.hippo4j.agent.bootstrap.Hippo4jAgent</premain.class>
<can.redefine.classes>true</can.redefine.classes>
<can.retransform.classes>true</can.retransform.classes>
<shade.net.bytebuddy.source>net.bytebuddy</shade.net.bytebuddy.source>
<shade.net.bytebuddy.target>${shade.package}.${shade.net.bytebuddy.source}</shade.net.bytebuddy.target>
</properties>
<dependencies>
<dependency>
<groupId>cn.hippo4j</groupId>
<artifactId>hippo4j-agent-core</artifactId>
</dependency>
</dependencies>
<build>
<finalName>hippo4j-agent</finalName>
<plugins>
<plugin>
<artifactId>maven-shade-plugin</artifactId>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<shadedArtifactAttached>false</shadedArtifactAttached>
<createDependencyReducedPom>true</createDependencyReducedPom>
<createSourcesJar>true</createSourcesJar>
<shadeSourcesContent>true</shadeSourcesContent>
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<manifestEntries>
<Premain-Class>${premain.class}</Premain-Class>
<Can-Redefine-Classes>${can.redefine.classes}</Can-Redefine-Classes>
<Can-Retransform-Classes>${can.retransform.classes}</Can-Retransform-Classes>
</manifestEntries>
</transformer>
</transformers>
<artifactSet>
<excludes>
<exclude>*:gson</exclude>
<exclude>io.grpc:*</exclude>
<exclude>io.netty:*</exclude>
<exclude>io.opencensus:*</exclude>
<exclude>com.google.*:*</exclude>
<exclude>com.google.guava:guava</exclude>
<exclude>org.checkerframework:checker-compat-qual</exclude>
<exclude>org.codehaus.mojo:animal-sniffer-annotations</exclude>
<exclude>io.perfmark:*</exclude>
<exclude>org.slf4j:*</exclude>
</excludes>
</artifactSet>
<relocations>
<relocation>
<pattern>${shade.net.bytebuddy.source}</pattern>
<shadedPattern>${shade.net.bytebuddy.target}</shadedPattern>
</relocation>
</relocations>
<filters>
<filter>
<artifact>net.bytebuddy:byte-buddy</artifact>
<excludes>
<exclude>META-INF/versions/9/module-info.class</exclude>
</excludes>
</filter>
</filters>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>clean</id>
<phase>clean</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<delete dir="${project.basedir}/../skywalking-agent" />
</target>
</configuration>
</execution>
<execution>
<id>package</id>
<phase>package</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<mkdir dir="${project.basedir}/../hippo4j-agent" />
<copy file="${project.build.directory}/hippo4j-agent.jar" tofile="${project.basedir}/../hippo4j-agent/hippo4j-agent.jar" overwrite="true" />
<mkdir dir="${project.basedir}/../hippo4j-agent/config" />
<mkdir dir="${project.basedir}/../hippo4j-agent/logs" />
<copydir src="${project.basedir}/../config" dest="${project.basedir}/../hippo4j-agent/config" forceoverwrite="true" />
<copydir src="${project.basedir}/../dist-material" dest="${project.basedir}/../hippo4j-agent" />
</target>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

@ -0,0 +1,238 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.bootstrap;
import cn.hippo4j.agent.core.boot.AgentPackageNotFoundException;
import cn.hippo4j.agent.core.boot.ServiceManager;
import cn.hippo4j.agent.core.conf.Config;
import cn.hippo4j.agent.core.conf.SnifferConfigInitializer;
import cn.hippo4j.agent.core.jvm.LoadedLibraryCollector;
import cn.hippo4j.agent.core.logging.api.ILog;
import cn.hippo4j.agent.core.logging.api.LogManager;
import cn.hippo4j.agent.core.plugin.*;
import cn.hippo4j.agent.core.plugin.bootstrap.BootstrapInstrumentBoost;
import cn.hippo4j.agent.core.plugin.bytebuddy.CacheableTransformerDecorator;
import cn.hippo4j.agent.core.plugin.jdk9module.JDK9ModuleExporter;
import net.bytebuddy.ByteBuddy;
import net.bytebuddy.agent.builder.AgentBuilder;
import net.bytebuddy.description.NamedElement;
import net.bytebuddy.description.type.TypeDescription;
import net.bytebuddy.dynamic.DynamicType;
import net.bytebuddy.dynamic.scaffold.TypeValidation;
import net.bytebuddy.matcher.ElementMatcher;
import net.bytebuddy.matcher.ElementMatchers;
import net.bytebuddy.utility.JavaModule;
import java.lang.instrument.Instrumentation;
import java.security.ProtectionDomain;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import static net.bytebuddy.matcher.ElementMatchers.*;
public class Hippo4jAgent {
private static ILog LOGGER = LogManager.getLogger(Hippo4jAgent.class);
/**
* Main entrance. Use byte-buddy transform to enhance all classes, which define in plugins.
*/
public static void premain(String agentArgs, Instrumentation instrumentation) throws PluginException {
final PluginFinder pluginFinder;
try {
SnifferConfigInitializer.initializeCoreConfig(agentArgs);
} catch (Exception e) {
// try to resolve a new logger, and use the new logger to write the error log here
LogManager.getLogger(Hippo4jAgent.class)
.error(e, "Hippo4j agent initialized failure. Shutting down.");
return;
} finally {
// refresh logger again after initialization finishes
LOGGER = LogManager.getLogger(Hippo4jAgent.class);
}
try {
pluginFinder = new PluginFinder(new PluginBootstrap().loadPlugins());
} catch (AgentPackageNotFoundException ape) {
LOGGER.error(ape, "Locate agent.jar failure. Shutting down.");
return;
} catch (Exception e) {
LOGGER.error(e, "Hippo4j agent initialized failure. Shutting down.");
return;
}
final ByteBuddy byteBuddy = new ByteBuddy().with(TypeValidation.of(Config.Agent.IS_OPEN_DEBUGGING_CLASS));
AgentBuilder agentBuilder = new AgentBuilder.Default(byteBuddy).ignore(
nameStartsWith("net.bytebuddy.")
.or(nameStartsWith("org.slf4j."))
.or(nameStartsWith("org.groovy."))
.or(nameContains("javassist"))
.or(nameContains(".asm."))
.or(nameContains(".reflectasm."))
.or(nameStartsWith("sun.reflect"))
.or(allHippo4jAgentExcludeToolkit())
.or(ElementMatchers.isSynthetic()));
JDK9ModuleExporter.EdgeClasses edgeClasses = new JDK9ModuleExporter.EdgeClasses();
try {
agentBuilder = BootstrapInstrumentBoost.inject(pluginFinder, instrumentation, agentBuilder, edgeClasses);
} catch (Exception e) {
LOGGER.error(e, "Hippo4j agent inject bootstrap instrumentation failure. Shutting down.");
return;
}
try {
agentBuilder = JDK9ModuleExporter.openReadEdge(instrumentation, agentBuilder, edgeClasses);
} catch (Exception e) {
LOGGER.error(e, "Hippo4j agent open read edge in JDK 9+ failure. Shutting down.");
return;
}
if (Config.Agent.IS_CACHE_ENHANCED_CLASS) {
try {
agentBuilder = agentBuilder.with(new CacheableTransformerDecorator(Config.Agent.CLASS_CACHE_MODE));
LOGGER.info("Hippo4j agent class cache [{}] activated.", Config.Agent.CLASS_CACHE_MODE);
} catch (Exception e) {
LOGGER.error(e, "Hippo4j agent can't active class cache.");
}
}
agentBuilder.type(pluginFinder.buildMatch())
.transform(new Transformer(pluginFinder))
.with(AgentBuilder.RedefinitionStrategy.RETRANSFORMATION)
.with(new RedefinitionListener())
.with(new Listener())
.installOn(instrumentation);
PluginFinder.pluginInitCompleted();
try {
ServiceManager.INSTANCE.boot();
} catch (Exception e) {
LOGGER.error(e, "Hippo4j agent boot failure.");
}
Runtime.getRuntime()
.addShutdownHook(new Thread(ServiceManager.INSTANCE::shutdown, "hippo4j service shutdown thread"));
}
private static class Transformer implements AgentBuilder.Transformer {
private PluginFinder pluginFinder;
Transformer(PluginFinder pluginFinder) {
this.pluginFinder = pluginFinder;
}
@Override
public DynamicType.Builder<?> transform(final DynamicType.Builder<?> builder,
final TypeDescription typeDescription,
final ClassLoader classLoader,
final JavaModule javaModule,
final ProtectionDomain protectionDomain) {
LoadedLibraryCollector.registerURLClassLoader(classLoader);
List<AbstractClassEnhancePluginDefine> pluginDefines = pluginFinder.find(typeDescription);
if (pluginDefines.size() > 0) {
DynamicType.Builder<?> newBuilder = builder;
EnhanceContext context = new EnhanceContext();
for (AbstractClassEnhancePluginDefine define : pluginDefines) {
DynamicType.Builder<?> possibleNewBuilder = define.define(
typeDescription, newBuilder, classLoader, context);
if (possibleNewBuilder != null) {
newBuilder = possibleNewBuilder;
}
}
if (context.isEnhanced()) {
LOGGER.debug("Finish the prepare stage for {}.", typeDescription.getName());
}
return newBuilder;
}
LOGGER.debug("Matched class {}, but ignore by finding mechanism.", typeDescription.getTypeName());
return builder;
}
}
private static ElementMatcher.Junction<NamedElement> allHippo4jAgentExcludeToolkit() {
return nameStartsWith("cn.hippo4j.agent").and(not(nameStartsWith("cn.hippo4j.agent.toolkit.")));
}
private static class Listener implements AgentBuilder.Listener {
@Override
public void onDiscovery(String typeName, ClassLoader classLoader, JavaModule module, boolean loaded) {
}
@Override
public void onTransformation(final TypeDescription typeDescription,
final ClassLoader classLoader,
final JavaModule module,
final boolean loaded,
final DynamicType dynamicType) {
if (LOGGER.isDebugEnable()) {
LOGGER.debug("On Transformation class {}.", typeDescription.getName());
}
InstrumentDebuggingClass.INSTANCE.log(dynamicType);
}
@Override
public void onIgnored(final TypeDescription typeDescription,
final ClassLoader classLoader,
final JavaModule module,
final boolean loaded) {
}
@Override
public void onError(final String typeName,
final ClassLoader classLoader,
final JavaModule module,
final boolean loaded,
final Throwable throwable) {
LOGGER.error("Enhance class " + typeName + " error.", throwable);
}
@Override
public void onComplete(String typeName, ClassLoader classLoader, JavaModule module, boolean loaded) {
}
}
private static class RedefinitionListener implements AgentBuilder.RedefinitionStrategy.Listener {
@Override
public void onBatch(int index, List<Class<?>> batch, List<Class<?>> types) {
/* do nothing */
}
@Override
public Iterable<? extends List<Class<?>>> onError(int index, List<Class<?>> batch, Throwable throwable, List<Class<?>> types) {
LOGGER.error(throwable, "index={}, batch={}, types={}", index, batch, types);
return Collections.emptyList();
}
@Override
public void onComplete(int amount, List<Class<?>> types, Map<List<Class<?>>, Throwable> failures) {
/* do nothing */
}
}
}

@ -93,6 +93,12 @@
<version>1.12.13</version> <version>1.12.13</version>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.20</version>
<scope>provided</scope>
</dependency>
<dependency> <dependency>
<groupId>net.bytebuddy</groupId> <groupId>net.bytebuddy</groupId>
<artifactId>byte-buddy-agent</artifactId> <artifactId>byte-buddy-agent</artifactId>
@ -102,7 +108,7 @@
<dependency> <dependency>
<groupId>com.github.tomakehurst</groupId> <groupId>com.github.tomakehurst</groupId>
<artifactId>wiremock</artifactId> <artifactId>wiremock</artifactId>
<version>2.6.0</version> <version>2.16.0</version>
<scope>test</scope> <scope>test</scope>
<exclusions> <exclusions>
<exclusion> <exclusion>
@ -153,6 +159,10 @@
<artifactId>json-path</artifactId> <artifactId>json-path</artifactId>
<groupId>com.jayway.jsonpath</groupId> <groupId>com.jayway.jsonpath</groupId>
</exclusion> </exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion> <exclusion>
<artifactId>jopt-simple</artifactId> <artifactId>jopt-simple</artifactId>
<groupId>net.sf.jopt-simple</groupId> <groupId>net.sf.jopt-simple</groupId>
@ -196,116 +206,30 @@
<artifactId>jmh-generator-annprocess</artifactId> <artifactId>jmh-generator-annprocess</artifactId>
<version>1.33</version> <version>1.33</version>
<scope>test</scope> <scope>test</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-api-mockito2</artifactId>
<version>2.0.7</version>
<scope>test</scope>
<exclusions> <exclusions>
<exclusion> <exclusion>
<artifactId>powermock-api-support</artifactId>
<groupId>org.powermock</groupId>
</exclusion>
<exclusion>
<artifactId>mockito-core</artifactId>
<groupId>org.mockito</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.objenesis</groupId>
<artifactId>objenesis</artifactId>
<version>3.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.openjdk.jmh</groupId>
<artifactId>jmh-core</artifactId> <artifactId>jmh-core</artifactId>
<version>1.33</version> <groupId>org.openjdk.jmh</groupId>
<scope>test</scope>
<exclusions>
<exclusion>
<artifactId>commons-math3</artifactId>
<groupId>org.apache.commons</groupId>
</exclusion>
<exclusion>
<artifactId>jopt-simple</artifactId>
<groupId>net.sf.jopt-simple</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.20</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>javax.annotation</groupId>
<artifactId>javax.annotation-api</artifactId>
<version>1.3.2</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-module-junit4</artifactId>
<version>2.0.7</version>
<scope>test</scope>
<exclusions>
<exclusion>
<artifactId>powermock-module-junit4-common</artifactId>
<groupId>org.powermock</groupId>
</exclusion>
<exclusion>
<artifactId>hamcrest-core</artifactId>
<groupId>org.hamcrest</groupId>
</exclusion>
<exclusion>
<artifactId>junit</artifactId>
<groupId>junit</groupId>
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
</dependencies> </dependencies>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
</dependency>
<dependency>
<groupId>net.bytebuddy</groupId>
<artifactId>byte-buddy</artifactId>
<version>${bytebuddy.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
<properties> <properties>
<shade.io.grpc.target>${shade.package}.${shade.io.grpc.source}</shade.io.grpc.target>
<shade.org.slf4j.target>${shade.package}.${shade.org.slf4j.source}</shade.org.slf4j.target>
<shade.io.opencensus.target>${shade.package}.${shade.io.opencensus.source}</shade.io.opencensus.target>
<shade.io.netty.source>io.netty</shade.io.netty.source>
<shade.io.perfmark.target>${shade.package}.${shade.io.perfmark.source}</shade.io.perfmark.target> <shade.io.perfmark.target>${shade.package}.${shade.io.perfmark.source}</shade.io.perfmark.target>
<shade.com.google.source>com.google</shade.com.google.source>
<ststem-rules.version>1.18.0</ststem-rules.version>
<shade.io.grpc.source>io.grpc</shade.io.grpc.source>
<shade.org.slf4j.source>org.slf4j</shade.org.slf4j.source>
<shade.io.opencensus.source>io.opencensus</shade.io.opencensus.source> <shade.io.opencensus.source>io.opencensus</shade.io.opencensus.source>
<shade.org.slf4j.target>${shade.package}.${shade.org.slf4j.source}</shade.org.slf4j.target> <shade.io.perfmark.source>io.perfmark</shade.io.perfmark.source>
<shade.io.grpc.target>${shade.package}.${shade.io.grpc.source}</shade.io.grpc.target>
<netty-tcnative-boringssl-static.version>2.0.7.Final</netty-tcnative-boringssl-static.version>
<os-maven-plugin.version>1.4.1.Final</os-maven-plugin.version> <os-maven-plugin.version>1.4.1.Final</os-maven-plugin.version>
<shade.io.netty.source>io.netty</shade.io.netty.source>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<wiremock.version>2.6.0</wiremock.version>
<ststem-rules.version>1.18.0</ststem-rules.version>
<shade.io.opencensus.target>${shade.package}.${shade.io.opencensus.source}</shade.io.opencensus.target>
<slf4j.version>1.7.25</slf4j.version>
<shade.io.netty.target>${shade.package}.${shade.io.netty.source}</shade.io.netty.target> <shade.io.netty.target>${shade.package}.${shade.io.netty.source}</shade.io.netty.target>
<shade.com.google.source>com.google</shade.com.google.source> <netty-tcnative-boringssl-static.version>2.0.7.Final</netty-tcnative-boringssl-static.version>
<guava.version>30.1.1-jre</guava.version>
<shade.io.perfmark.source>io.perfmark</shade.io.perfmark.source>
<shade.org.slf4j.source>org.slf4j</shade.org.slf4j.source>
<shade.io.grpc.source>io.grpc</shade.io.grpc.source>
<shade.com.google.target>${shade.package}.${shade.com.google.source}</shade.com.google.target> <shade.com.google.target>${shade.package}.${shade.com.google.source}</shade.com.google.target>
</properties> </properties>
</project> </project>

@ -14,8 +14,6 @@
<properties> <properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<guava.version>30.1.1-jre</guava.version>
<wiremock.version>2.6.0</wiremock.version>
<netty-tcnative-boringssl-static.version>2.0.7.Final</netty-tcnative-boringssl-static.version> <netty-tcnative-boringssl-static.version>2.0.7.Final</netty-tcnative-boringssl-static.version>
<os-maven-plugin.version>1.4.1.Final</os-maven-plugin.version> <os-maven-plugin.version>1.4.1.Final</os-maven-plugin.version>
<shade.com.google.source>com.google</shade.com.google.source> <shade.com.google.source>com.google</shade.com.google.source>
@ -31,7 +29,6 @@
<shade.org.slf4j.source>org.slf4j</shade.org.slf4j.source> <shade.org.slf4j.source>org.slf4j</shade.org.slf4j.source>
<shade.org.slf4j.target>${shade.package}.${shade.org.slf4j.source}</shade.org.slf4j.target> <shade.org.slf4j.target>${shade.package}.${shade.org.slf4j.source}</shade.org.slf4j.target>
<ststem-rules.version>1.18.0</ststem-rules.version> <ststem-rules.version>1.18.0</ststem-rules.version>
<slf4j.version>1.7.25</slf4j.version>
</properties> </properties>
<dependencies> <dependencies>
@ -42,19 +39,19 @@
<dependency> <dependency>
<groupId>com.google.code.gson</groupId> <groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId> <artifactId>gson</artifactId>
<version>${gson.version}</version>
</dependency> </dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
<dependency> <dependency>
<groupId>net.bytebuddy</groupId> <groupId>net.bytebuddy</groupId>
<artifactId>byte-buddy-agent</artifactId> <artifactId>byte-buddy-agent</artifactId>
<version>${bytebuddy.version}</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.github.tomakehurst</groupId> <groupId>com.github.tomakehurst</groupId>
<artifactId>wiremock</artifactId> <artifactId>wiremock</artifactId>
<version>${wiremock.version}</version>
<scope>test</scope> <scope>test</scope>
<exclusions> <exclusions>
<exclusion> <exclusion>
@ -82,30 +79,8 @@
<artifactId>jmh-generator-annprocess</artifactId> <artifactId>jmh-generator-annprocess</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
</dependencies>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
</dependency>
<dependency>
<groupId>net.bytebuddy</groupId>
<artifactId>byte-buddy</artifactId>
<version>${bytebuddy.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.version}</version>
</dependency>
</dependencies> </dependencies>
</dependencyManagement>
<build> <build>
<plugins> <plugins>
<plugin> <plugin>

@ -26,7 +26,7 @@ import java.net.URISyntaxException;
import java.net.URL; import java.net.URL;
/** /**
* AgentPackagePath is a flag and finder to locate the SkyWalking agent.jar. It gets the absolute path of the agent jar. * AgentPackagePath is a flag and finder to locate the Hippo4j agent.jar. It gets the absolute path of the agent jar.
* The path is the required metadata for agent core looking up the plugins and toolkit activations. If the lookup * The path is the required metadata for agent core looking up the plugins and toolkit activations. If the lookup
* mechanism fails, the agent will exit directly. * mechanism fails, the agent will exit directly.
*/ */

@ -27,7 +27,7 @@ public class DefaultNamedThreadFactory implements ThreadFactory {
private final String namePrefix; private final String namePrefix;
public DefaultNamedThreadFactory(String name) { public DefaultNamedThreadFactory(String name) {
namePrefix = "SkywalkingAgent-" + BOOT_SERVICE_SEQ.incrementAndGet() + "-" + name + "-"; namePrefix = "Hippo4jAgent-" + BOOT_SERVICE_SEQ.incrementAndGet() + "-" + name + "-";
} }
@Override @Override

@ -24,7 +24,7 @@ import java.lang.annotation.Target;
/** /**
* ConfigInitializationService provides the config class which should host all parameters originally from agent setup. * ConfigInitializationService provides the config class which should host all parameters originally from agent setup.
* {@link org.apache.skywalking.apm.agent.core.conf.Config} provides the core level config, all plugins could implement * {@link cn.hippo4j.agent.core.conf.Config} provides the core level config, all plugins could implement
* this interface to have the same capability about initializing config from agent.config, system properties and system * this interface to have the same capability about initializing config from agent.config, system properties and system
* environment variables. * environment variables.
*/ */

@ -82,14 +82,14 @@ public class Config {
public static String AUTHENTICATION = ""; public static String AUTHENTICATION = "";
/** /**
* If true, SkyWalking agent will save all instrumented classes files in `/debugging` folder. SkyWalking team * If true, Hippo4j agent will save all instrumented classes files in `/debugging` folder. Hippo4j team
* may ask for these files in order to resolve compatible problem. * may ask for these files in order to resolve compatible problem.
*/ */
public static boolean IS_OPEN_DEBUGGING_CLASS = false; public static boolean IS_OPEN_DEBUGGING_CLASS = false;
/** /**
* If true, SkyWalking agent will cache all instrumented classes to memory or disk files (decided by class cache * If true, Hippo4j agent will cache all instrumented classes to memory or disk files (decided by class cache
* mode), allow other javaagent to enhance those classes that enhanced by SkyWalking agent. * mode), allow other javaagent to enhance those classes that enhanced by Hippo4j agent.
*/ */
public static boolean IS_CACHE_ENHANCED_CLASS = false; public static boolean IS_CACHE_ENHANCED_CLASS = false;
@ -108,7 +108,7 @@ public class Config {
/** /**
* service instance properties in json format. e.g. agent.instance_properties_json = {"org": * service instance properties in json format. e.g. agent.instance_properties_json = {"org":
* "apache-skywalking"} * "cn.hippo4j"}
*/ */
public static String INSTANCE_PROPERTIES_JSON = ""; public static String INSTANCE_PROPERTIES_JSON = "";
@ -181,7 +181,7 @@ public class Config {
*/ */
public static int PROPERTIES_REPORT_PERIOD_FACTOR = 10; public static int PROPERTIES_REPORT_PERIOD_FACTOR = 10;
/** /**
* Collector skywalking trace receiver service addresses. * Collector hippo4j trace receiver service addresses.
*/ */
public static String BACKEND_SERVICE = ""; public static String BACKEND_SERVICE = "";
/** /**
@ -197,7 +197,7 @@ public class Config {
*/ */
public static int GET_AGENT_DYNAMIC_CONFIG_INTERVAL = 20; public static int GET_AGENT_DYNAMIC_CONFIG_INTERVAL = 20;
/** /**
* If true, skywalking agent will enable periodically resolving DNS to update receiver service addresses. * If true, hippo4j agent will enable periodically resolving DNS to update receiver service addresses.
*/ */
public static boolean IS_RESOLVE_DNS_PERIODICALLY = false; public static boolean IS_RESOLVE_DNS_PERIODICALLY = false;
} }
@ -205,7 +205,7 @@ public class Config {
public static class Profile { public static class Profile {
/** /**
* If true, skywalking agent will enable profile when user create a new profile task. Otherwise disable * If true, hippo4j agent will enable profile when user create a new profile task. Otherwise disable
* profile. * profile.
*/ */
public static boolean ACTIVE = true; public static boolean ACTIVE = true;
@ -234,7 +234,7 @@ public class Config {
public static class Meter { public static class Meter {
/** /**
* If true, skywalking agent will enable sending meters. Otherwise disable meter report. * If true, Hippo4j agent will enable sending meters. Otherwise disable meter report.
*/ */
public static boolean ACTIVE = true; public static boolean ACTIVE = true;
@ -277,11 +277,11 @@ public class Config {
/** /**
* Log file name. * Log file name.
*/ */
public static String FILE_NAME = "skywalking-api.log"; public static String FILE_NAME = "hippo4j-api.log";
/** /**
* Log files directory. Default is blank string, means, use "{theSkywalkingAgentJarDir}/logs " to output logs. * Log files directory. Default is blank string, means, use "{theHippo4jAgentJarDir}/logs " to output logs.
* {theSkywalkingAgentJarDir} is the directory where the skywalking agent jar file is located. * {theHippo4jAgentJarDir} is the directory where the hippo4j agent jar file is located.
* <p> * <p>
* Ref to {@link WriterFactory#getLogWriter()} * Ref to {@link WriterFactory#getLogWriter()}
*/ */

@ -42,9 +42,9 @@ import static cn.hippo4j.agent.core.conf.Constants.SERVICE_NAME_PART_CONNECTOR;
public class SnifferConfigInitializer { public class SnifferConfigInitializer {
private static ILog LOGGER = LogManager.getLogger(SnifferConfigInitializer.class); private static ILog LOGGER = LogManager.getLogger(SnifferConfigInitializer.class);
private static final String SPECIFIED_CONFIG_PATH = "skywalking_config"; private static final String SPECIFIED_CONFIG_PATH = "hippo4j_config";
private static final String DEFAULT_CONFIG_FILE_NAME = "/config/agent.config"; private static final String DEFAULT_CONFIG_FILE_NAME = "/config/agent.config";
private static final String ENV_KEY_PREFIX = "skywalking."; private static final String ENV_KEY_PREFIX = "hippo4j.";
private static Properties AGENT_SETTINGS; private static Properties AGENT_SETTINGS;
private static boolean IS_INIT_COMPLETED = false; private static boolean IS_INIT_COMPLETED = false;
@ -54,7 +54,7 @@ public class SnifferConfigInitializer {
* /config directory of agent package. * /config directory of agent package.
* <p> * <p>
* Also try to override the config by system.properties. All the keys in this place should start with {@link * Also try to override the config by system.properties. All the keys in this place should start with {@link
* #ENV_KEY_PREFIX}. e.g. in env `skywalking.agent.service_name=yourAppName` to override `agent.service_name` in * #ENV_KEY_PREFIX}. e.g. in env `hippo4j.agent.service_name=yourAppName` to override `agent.service_name` in
* config file. * config file.
* <p> * <p>
* At the end, `agent.service_name` and `collector.servers` must not be blank. * At the end, `agent.service_name` and `collector.servers` must not be blank.
@ -69,7 +69,7 @@ public class SnifferConfigInitializer {
} }
} catch (Exception e) { } catch (Exception e) {
LOGGER.error(e, "Failed to read the config file, skywalking is going to run in default config."); LOGGER.error(e, "Failed to read the config file, hippo4j is going to run in default config.");
} }
try { try {
@ -180,10 +180,10 @@ public class SnifferConfigInitializer {
} }
/** /**
* Override the config by system properties. The property key must start with `skywalking`, the result should be as * Override the config by system properties. The property key must start with `hippo4j`, the result should be as
* same as in `agent.config` * same as in `agent.config`
* <p> * <p>
* such as: Property key of `agent.service_name` should be `skywalking.agent.service_name` * such as: Property key of `agent.service_name` should be `hippo4j.agent.service_name`
*/ */
private static void overrideConfigBySystemProp() { private static void overrideConfigBySystemProp() {
Properties systemProperties = System.getProperties(); Properties systemProperties = System.getProperties();

@ -0,0 +1,126 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.hippo4j.agent.core.jvm;
import cn.hippo4j.agent.core.logging.api.ILog;
import cn.hippo4j.agent.core.logging.api.LogManager;
import cn.hippo4j.agent.core.util.CollectionUtil;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import java.io.File;
import java.lang.management.ManagementFactory;
import java.net.URL;
import java.net.URLClassLoader;
import java.text.SimpleDateFormat;
import java.util.*;
public class LoadedLibraryCollector {
private static final ILog LOGGER = LogManager.getLogger(LoadedLibraryCollector.class);
private static final String JAR_SEPARATOR = "!";
private static Set<ClassLoader> CURRENT_URL_CLASSLOADER_SET = new HashSet<>();
private static final Gson GSON = new GsonBuilder().disableHtmlEscaping().create();
/**
* Prevent OOM in special scenes
*/
private static int CURRENT_URL_CLASSLOADER_SET_MAX_SIZE = 50;
public static void registerURLClassLoader(ClassLoader classLoader) {
if (CURRENT_URL_CLASSLOADER_SET.size() < CURRENT_URL_CLASSLOADER_SET_MAX_SIZE && classLoader instanceof URLClassLoader) {
CURRENT_URL_CLASSLOADER_SET.add(classLoader);
}
}
private static String getVmStartTime() {
long startTime = ManagementFactory.getRuntimeMXBean().getStartTime();
return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date(startTime));
}
private static List<String> getVmArgs() {
List<String> vmArgs = ManagementFactory.getRuntimeMXBean().getInputArguments();
List<String> sortedVmArgs = new ArrayList<>(vmArgs);
Collections.sort(sortedVmArgs);
return sortedVmArgs;
}
private static List<String> getLibJarNames() {
List<URL> classLoaderUrls = loadClassLoaderUrls();
return extractLibJarNamesFromURLs(classLoaderUrls);
}
private static List<URL> loadClassLoaderUrls() {
List<URL> classLoaderUrls = new ArrayList<>();
for (ClassLoader classLoader : CURRENT_URL_CLASSLOADER_SET) {
try {
URLClassLoader webappClassLoader = (URLClassLoader) classLoader;
URL[] urls = webappClassLoader.getURLs();
classLoaderUrls.addAll(Arrays.asList(urls));
} catch (Exception e) {
LOGGER.warn("Load classloader urls exception: {}", e.getMessage());
}
}
return classLoaderUrls;
}
private static List<String> extractLibJarNamesFromURLs(List<URL> urls) {
Set<String> libJarNames = new HashSet<>();
for (URL url : urls) {
try {
String libJarName = extractLibJarName(url);
if (libJarName.endsWith(".jar")) {
libJarNames.add(libJarName);
}
} catch (Exception e) {
LOGGER.warn("Extracting library name exception: {}", e.getMessage());
}
}
List<String> sortedLibJarNames = new ArrayList<>(libJarNames.size());
if (!CollectionUtil.isEmpty(libJarNames)) {
sortedLibJarNames.addAll(libJarNames);
Collections.sort(sortedLibJarNames);
}
return sortedLibJarNames;
}
private static String extractLibJarName(URL url) {
String protocol = url.getProtocol();
if (protocol.equals("file")) {
return extractNameFromFile(url.toString());
} else if (protocol.equals("jar")) {
return extractNameFromJar(url.toString());
} else {
return "";
}
}
private static String extractNameFromFile(String fileUri) {
int lastIndexOfSeparator = fileUri.lastIndexOf(File.separator);
if (lastIndexOfSeparator < 0) {
return fileUri;
} else {
return fileUri.substring(lastIndexOfSeparator + 1);
}
}
private static String extractNameFromJar(String jarUri) {
String uri = jarUri.substring(0, jarUri.lastIndexOf(JAR_SEPARATOR));
return extractNameFromFile(uri);
}
}

@ -24,7 +24,7 @@ import java.util.HashMap;
import java.util.Map; import java.util.Map;
/** /**
* An alternative logger for the SkyWalking agent. The default layout is * An alternative logger for the Hippo4j agent. The default layout is
* { * {
* "@timestamp": "", // timestamp * "@timestamp": "", // timestamp
* "logger": "", // name of the Logger * "logger": "", // name of the Logger

@ -22,7 +22,7 @@ package cn.hippo4j.agent.core.plugin;
*/ */
public class ByteBuddyCoreClasses { public class ByteBuddyCoreClasses {
private static final String SHADE_PACKAGE = "org.apache.skywalking.apm.dependencies."; private static final String SHADE_PACKAGE = "cn.hippo4j.agent.dependencies.";
public static final String[] CLASSES = { public static final String[] CLASSES = {
SHADE_PACKAGE + "net.bytebuddy.implementation.bind.annotation.RuntimeType", SHADE_PACKAGE + "net.bytebuddy.implementation.bind.annotation.RuntimeType",

@ -46,7 +46,7 @@ public class PluginBootstrap {
List<URL> resources = resolver.getResources(); List<URL> resources = resolver.getResources();
if (resources == null || resources.size() == 0) { if (resources == null || resources.size() == 0) {
LOGGER.info("no plugin files (skywalking-plugin.def) found, continue to start application."); LOGGER.info("no plugin files (hippo4j-plugin.def) found, continue to start application.");
return new ArrayList<AbstractClassEnhancePluginDefine>(); return new ArrayList<AbstractClassEnhancePluginDefine>();
} }

@ -28,7 +28,7 @@ import java.util.Enumeration;
import java.util.List; import java.util.List;
/** /**
* Use the current classloader to read all plugin define file. The file must be named 'skywalking-plugin.def' * Use the current classloader to read all plugin define file. The file must be named 'hippo4j-plugin.def'
*/ */
public class PluginResourcesResolver { public class PluginResourcesResolver {
@ -38,12 +38,12 @@ public class PluginResourcesResolver {
List<URL> cfgUrlPaths = new ArrayList<URL>(); List<URL> cfgUrlPaths = new ArrayList<URL>();
Enumeration<URL> urls; Enumeration<URL> urls;
try { try {
urls = AgentClassLoader.getDefault().getResources("skywalking-plugin.def"); urls = AgentClassLoader.getDefault().getResources("hippo4j-plugin.def");
while (urls.hasMoreElements()) { while (urls.hasMoreElements()) {
URL pluginUrl = urls.nextElement(); URL pluginUrl = urls.nextElement();
cfgUrlPaths.add(pluginUrl); cfgUrlPaths.add(pluginUrl);
LOGGER.info("find skywalking plugin define in {}", pluginUrl); LOGGER.info("find hippo4j plugin define in {}", pluginUrl);
} }
return cfgUrlPaths; return cfgUrlPaths;

@ -55,31 +55,31 @@ public class BootstrapInstrumentBoost {
private static final ILog LOGGER = LogManager.getLogger(BootstrapInstrumentBoost.class); private static final ILog LOGGER = LogManager.getLogger(BootstrapInstrumentBoost.class);
private static final String[] HIGH_PRIORITY_CLASSES = { private static final String[] HIGH_PRIORITY_CLASSES = {
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.BootstrapInterRuntimeAssist", "cn.hippo4j.agent.core.plugin.interceptor.enhance.BootstrapInterRuntimeAssist",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.InstanceMethodsAroundInterceptor", "cn.hippo4j.agent.core.plugin.interceptor.enhance.InstanceMethodsAroundInterceptor",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.InstanceConstructorInterceptor", "cn.hippo4j.agent.core.plugin.interceptor.enhance.InstanceConstructorInterceptor",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.StaticMethodsAroundInterceptor", "cn.hippo4j.agent.core.plugin.interceptor.enhance.StaticMethodsAroundInterceptor",
"org.apache.skywalking.apm.agent.core.plugin.bootstrap.IBootstrapLog", "cn.hippo4j.agent.core.plugin.bootstrap.IBootstrapLog",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.EnhancedInstance", "cn.hippo4j.agent.core.plugin.interceptor.enhance.EnhancedInstance",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.OverrideCallable", "cn.hippo4j.agent.core.plugin.interceptor.enhance.OverrideCallable",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.MethodInterceptResult", "cn.hippo4j.agent.core.plugin.interceptor.enhance.MethodInterceptResult",
// interceptor v2 // interceptor v2
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.v2.InstanceMethodsAroundInterceptorV2", "cn.hippo4j.agent.core.plugin.interceptor.enhance.v2.InstanceMethodsAroundInterceptorV2",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.v2.StaticMethodsAroundInterceptorV2", "cn.hippo4j.agent.core.plugin.interceptor.enhance.v2.StaticMethodsAroundInterceptorV2",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.v2.MethodInvocationContext", "cn.hippo4j.agent.core.plugin.interceptor.enhance.v2.MethodInvocationContext",
}; };
private static String INSTANCE_METHOD_DELEGATE_TEMPLATE = "org.apache.skywalking.apm.agent.core.plugin.bootstrap.template.InstanceMethodInterTemplate"; private static String INSTANCE_METHOD_DELEGATE_TEMPLATE = "cn.hippo4j.agent.core.plugin.bootstrap.template.InstanceMethodInterTemplate";
private static String INSTANCE_METHOD_WITH_OVERRIDE_ARGS_DELEGATE_TEMPLATE = "org.apache.skywalking.apm.agent.core.plugin.bootstrap.template.InstanceMethodInterWithOverrideArgsTemplate"; private static String INSTANCE_METHOD_WITH_OVERRIDE_ARGS_DELEGATE_TEMPLATE = "cn.hippo4j.agent.core.plugin.bootstrap.template.InstanceMethodInterWithOverrideArgsTemplate";
private static String CONSTRUCTOR_DELEGATE_TEMPLATE = "org.apache.skywalking.apm.agent.core.plugin.bootstrap.template.ConstructorInterTemplate"; private static String CONSTRUCTOR_DELEGATE_TEMPLATE = "cn.hippo4j.agent.core.plugin.bootstrap.template.ConstructorInterTemplate";
private static String STATIC_METHOD_DELEGATE_TEMPLATE = "org.apache.skywalking.apm.agent.core.plugin.bootstrap.template.StaticMethodInterTemplate"; private static String STATIC_METHOD_DELEGATE_TEMPLATE = "cn.hippo4j.agent.core.plugin.bootstrap.template.StaticMethodInterTemplate";
private static String STATIC_METHOD_WITH_OVERRIDE_ARGS_DELEGATE_TEMPLATE = "org.apache.skywalking.apm.agent.core.plugin.bootstrap.template.StaticMethodInterWithOverrideArgsTemplate"; private static String STATIC_METHOD_WITH_OVERRIDE_ARGS_DELEGATE_TEMPLATE = "cn.hippo4j.agent.core.plugin.bootstrap.template.StaticMethodInterWithOverrideArgsTemplate";
private static String INSTANCE_METHOD_V2_DELEGATE_TEMPLATE = "org.apache.skywalking.apm.agent.core.plugin.bootstrap.template.v2.InstanceMethodInterV2Template"; private static String INSTANCE_METHOD_V2_DELEGATE_TEMPLATE = "cn.hippo4j.agent.core.plugin.bootstrap.template.v2.InstanceMethodInterV2Template";
private static String INSTANCE_METHOD_V2_WITH_OVERRIDE_ARGS_DELEGATE_TEMPLATE = "org.apache.skywalking.apm.agent.core.plugin.bootstrap.template.v2.InstanceMethodInterV2WithOverrideArgsTemplate"; private static String INSTANCE_METHOD_V2_WITH_OVERRIDE_ARGS_DELEGATE_TEMPLATE = "cn.hippo4j.agent.core.plugin.bootstrap.template.v2.InstanceMethodInterV2WithOverrideArgsTemplate";
private static String STATIC_METHOD_V2_DELEGATE_TEMPLATE = "org.apache.skywalking.apm.agent.core.plugin.bootstrap.template.v2.StaticMethodInterV2Template"; private static String STATIC_METHOD_V2_DELEGATE_TEMPLATE = "cn.hippo4j.agent.core.plugin.bootstrap.template.v2.StaticMethodInterV2Template";
private static String STATIC_METHOD_V2_WITH_OVERRIDE_ARGS_DELEGATE_TEMPLATE = "org.apache.skywalking.apm.agent.core.plugin.bootstrap.template.v2.StaticMethodInterV2WithOverrideArgsTemplate"; private static String STATIC_METHOD_V2_WITH_OVERRIDE_ARGS_DELEGATE_TEMPLATE = "cn.hippo4j.agent.core.plugin.bootstrap.template.v2.StaticMethodInterV2WithOverrideArgsTemplate";
public static AgentBuilder inject(PluginFinder pluginFinder, Instrumentation instrumentation, public static AgentBuilder inject(PluginFinder pluginFinder, Instrumentation instrumentation,
AgentBuilder agentBuilder, JDK9ModuleExporter.EdgeClasses edgeClasses) throws PluginException { AgentBuilder agentBuilder, JDK9ModuleExporter.EdgeClasses edgeClasses) throws PluginException {
@ -235,7 +235,7 @@ public class BootstrapInstrumentBoost {
* @param classesTypeMap hosts injected binary of generated class * @param classesTypeMap hosts injected binary of generated class
* @param typePool to generate new class * @param typePool to generate new class
* @param templateClassName represents the class as template in this generation process. The templates are * @param templateClassName represents the class as template in this generation process. The templates are
* pre-defined in SkyWalking agent core. * pre-defined in Hippo4j agent core.
*/ */
private static void generateDelegator(Map<String, byte[]> classesTypeMap, TypePool typePool, private static void generateDelegator(Map<String, byte[]> classesTypeMap, TypePool typePool,
String templateClassName, String methodsInterceptor) { String templateClassName, String methodsInterceptor) {

@ -37,7 +37,7 @@ import java.util.concurrent.ConcurrentHashMap;
* Wrapper classFileTransformer of ByteBuddy, save the enhanced bytecode to memory cache or file cache, * Wrapper classFileTransformer of ByteBuddy, save the enhanced bytecode to memory cache or file cache,
* and automatically load the previously generated bytecode during the second retransform, * and automatically load the previously generated bytecode during the second retransform,
* to solve the problem that ByteBuddy generates auxiliary classes with different random names every time. * to solve the problem that ByteBuddy generates auxiliary classes with different random names every time.
* Allow other javaagent to enhance those classes that enhanced by SkyWalking agent. * Allow other javaagent to enhance those classes that enhanced by Hippo4j agent.
*/ */
public class CacheableTransformerDecorator implements AgentBuilder.TransformerDecorator { public class CacheableTransformerDecorator implements AgentBuilder.TransformerDecorator {

@ -18,7 +18,7 @@
package cn.hippo4j.agent.core.plugin.exception; package cn.hippo4j.agent.core.plugin.exception;
/** /**
* Thrown to indicate that a illegal format plugin definition has been defined in skywalking-plugin.define. * Thrown to indicate that a illegal format plugin definition has been defined in hippo4j-plugin.define.
*/ */
public class IllegalPluginDefineException extends Exception { public class IllegalPluginDefineException extends Exception {

@ -38,7 +38,7 @@ public interface ConstructorInterceptPoint {
/** /**
* @return represents a class name, the class instance must be a instance of {@link * @return represents a class name, the class instance must be a instance of {@link
* org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.InstanceConstructorInterceptor} * cn.hippo4j.agent.core.plugin.interceptor.enhance.InstanceConstructorInterceptor}
*/ */
String getConstructorInterceptor(); String getConstructorInterceptor();
} }

@ -28,9 +28,9 @@ import java.lang.reflect.Method;
*/ */
public class BootstrapInterRuntimeAssist { public class BootstrapInterRuntimeAssist {
private static final String AGENT_CLASSLOADER_DEFAULT = "org.apache.skywalking.apm.agent.core.plugin.loader.AgentClassLoader"; private static final String AGENT_CLASSLOADER_DEFAULT = "cn.hippo4j.agent.core.plugin.loader.AgentClassLoader";
private static final String DEFAULT_AGENT_CLASSLOADER_INSTANCE = "DEFAULT_LOADER"; private static final String DEFAULT_AGENT_CLASSLOADER_INSTANCE = "DEFAULT_LOADER";
private static final String LOG_MANAGER_CLASS = "org.apache.skywalking.apm.agent.core.plugin.bootstrap.BootstrapPluginLogBridge"; private static final String LOG_MANAGER_CLASS = "cn.hippo4j.agent.core.plugin.bootstrap.BootstrapPluginLogBridge";
private static final String LOG_MANAGER_GET_LOGGER_METHOD = "getLogger"; private static final String LOG_MANAGER_GET_LOGGER_METHOD = "getLogger";
private static final PrintStream OUT = System.out; private static final PrintStream OUT = System.out;

@ -19,7 +19,7 @@ package cn.hippo4j.agent.core.plugin.interceptor.enhance;
public interface EnhancedInstance { public interface EnhancedInstance {
Object getSkyWalkingDynamicField(); Object getHippo4jDynamicField();
void setSkyWalkingDynamicField(Object value); void setHippo4jDynamicField(Object value);
} }

@ -31,7 +31,7 @@ public interface ConstructorInterceptV2Point {
/** /**
* @return represents a class name, the class instance must be a instance of {@link * @return represents a class name, the class instance must be a instance of {@link
* org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.InstanceConstructorInterceptor} * cn.hippo4j.agent.core.plugin.interceptor.enhance.InstanceConstructorInterceptor}
*/ */
String getConstructorInterceptorV2(); String getConstructorInterceptorV2();

@ -34,14 +34,14 @@ public class JDK9ModuleExporter {
private static final ILog LOGGER = LogManager.getLogger(JDK9ModuleExporter.class); private static final ILog LOGGER = LogManager.getLogger(JDK9ModuleExporter.class);
private static final String[] HIGH_PRIORITY_CLASSES = { private static final String[] HIGH_PRIORITY_CLASSES = {
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.EnhancedInstance", "cn.hippo4j.agent.core.plugin.interceptor.enhance.EnhancedInstance",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.MethodInterceptResult", "cn.hippo4j.agent.core.plugin.interceptor.enhance.MethodInterceptResult",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.OverrideCallable", "cn.hippo4j.agent.core.plugin.interceptor.enhance.OverrideCallable",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.ConstructorInter", "cn.hippo4j.agent.core.plugin.interceptor.enhance.ConstructorInter",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.InstMethodsInter", "cn.hippo4j.agent.core.plugin.interceptor.enhance.InstMethodsInter",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.InstMethodsInterWithOverrideArgs", "cn.hippo4j.agent.core.plugin.interceptor.enhance.InstMethodsInterWithOverrideArgs",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.StaticMethodsInter", "cn.hippo4j.agent.core.plugin.interceptor.enhance.StaticMethodsInter",
"org.apache.skywalking.apm.agent.core.plugin.interceptor.enhance.StaticMethodsInterWithOverrideArgs", "cn.hippo4j.agent.core.plugin.interceptor.enhance.StaticMethodsInterWithOverrideArgs",
}; };
/** /**

@ -26,7 +26,7 @@ import net.bytebuddy.matcher.ElementMatcher;
* them have compatible issues with byte-buddy core, which trigger "Can't resolve type description" exception. * them have compatible issues with byte-buddy core, which trigger "Can't resolve type description" exception.
* <p> * <p>
* So I build this protective shield by a nested matcher. When the origin matcher(s) can't resolve the type, the * So I build this protective shield by a nested matcher. When the origin matcher(s) can't resolve the type, the
* SkyWalking agent ignores this types. * Hippo4j agent ignores this types.
* <p> * <p>
* Notice: this ignore mechanism may miss some instrumentations, but at most cases, it's same. If missing happens, * Notice: this ignore mechanism may miss some instrumentations, but at most cases, it's same. If missing happens,
* please pay attention to the WARNING logs. * please pay attention to the WARNING logs.

@ -23,7 +23,7 @@ import net.bytebuddy.matcher.ElementMatcher;
import net.bytebuddy.matcher.NegatingMatcher; import net.bytebuddy.matcher.NegatingMatcher;
/** /**
* Util class to help to construct logical operations on {@link org.apache.skywalking.apm.agent.core.plugin.match.ClassMatch}s * Util class to help to construct logical operations on {@link cn.hippo4j.agent.core.plugin.match.ClassMatch}s
*/ */
public class LogicalMatchOperation { public class LogicalMatchOperation {

@ -9,7 +9,7 @@
<version>${revision}</version> <version>${revision}</version>
</parent> </parent>
<artifactId>hippo4j-sdk-plugin</artifactId> <artifactId>hippo4j-agent-plugin</artifactId>
<properties> <properties>
<maven.compiler.source>8</maven.compiler.source> <maven.compiler.source>8</maven.compiler.source>

@ -13,7 +13,8 @@
<packaging>pom</packaging> <packaging>pom</packaging>
<modules> <modules>
<module>hippo4j-agent-core</module> <module>hippo4j-agent-core</module>
<module>hippo4j-sdk-plugin</module> <module>hippo4j-agent-plugin</module>
<module>hippo4j-agent-bootstrap</module>
</modules> </modules>
<properties> <properties>
@ -56,25 +57,37 @@
<jmh.version>1.33</jmh.version> <jmh.version>1.33</jmh.version>
<gmaven-plugin.version>1.5</gmaven-plugin.version> <gmaven-plugin.version>1.5</gmaven-plugin.version>
<checkstyle.fails.on.error>true</checkstyle.fails.on.error> <checkstyle.fails.on.error>true</checkstyle.fails.on.error>
<slf4j.version>1.7.25</slf4j.version>
<guava.version>30.1.1-jre</guava.version>
<wiremock.version>2.16.0</wiremock.version>
</properties> </properties>
<dependencyManagement>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.powermock</groupId> <groupId>cn.hippo4j</groupId>
<artifactId>powermock-api-mockito2</artifactId> <artifactId>hippo4j-agent-core</artifactId>
<scope>test</scope> <version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>${gson.version}</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>net.bytebuddy</groupId> <groupId>net.bytebuddy</groupId>
<artifactId>byte-buddy-agent</artifactId> <artifactId>byte-buddy</artifactId>
<scope>test</scope> <version>${bytebuddy.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.objenesis</groupId> <groupId>org.slf4j</groupId>
<artifactId>objenesis</artifactId> <artifactId>slf4j-api</artifactId>
<scope>test</scope> <version>${slf4j.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.openjdk.jmh</groupId> <groupId>org.openjdk.jmh</groupId>
@ -94,15 +107,6 @@
<version>${javax.annotation-api.version}</version> <version>${javax.annotation-api.version}</version>
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-module-junit4</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<dependencyManagement>
<dependencies>
<dependency> <dependency>
<groupId>junit</groupId> <groupId>junit</groupId>
<artifactId>junit</artifactId> <artifactId>junit</artifactId>
@ -125,7 +129,6 @@
<groupId>net.bytebuddy</groupId> <groupId>net.bytebuddy</groupId>
<artifactId>byte-buddy-agent</artifactId> <artifactId>byte-buddy-agent</artifactId>
<version>${bytebuddy.version}</version> <version>${bytebuddy.version}</version>
<scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.objenesis</groupId> <groupId>org.objenesis</groupId>
@ -133,6 +136,12 @@
<version>${objenesis.version}</version> <version>${objenesis.version}</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>com.github.tomakehurst</groupId>
<artifactId>wiremock</artifactId>
<version>${wiremock.version}</version>
<scope>test</scope>
</dependency>
<dependency> <dependency>
<groupId>org.openjdk.jmh</groupId> <groupId>org.openjdk.jmh</groupId>
<artifactId>jmh-generator-annprocess</artifactId> <artifactId>jmh-generator-annprocess</artifactId>

Loading…
Cancel
Save