Compare commits
14 Commits
master
...
branch-2.1
Author | SHA1 | Date | |
---|---|---|---|
|
315934ba22 | ||
|
e6cb601c1d | ||
|
0ddcf21a4c | ||
|
29b3c6ee62 | ||
|
114691ded6 | ||
|
30e3709669 | ||
|
930e719863 | ||
|
02df8c16d7 | ||
|
a74d82bd79 | ||
|
270718d79f | ||
|
364303bcef | ||
|
04b5b25327 | ||
|
9d8dda35bf | ||
|
66d95f8500 |
1
.github/CODEOWNERS
vendored
1
.github/CODEOWNERS
vendored
@ -1 +0,0 @@
|
|||||||
* @penberg
|
|
9
.gitignore
vendored
9
.gitignore
vendored
@ -1,9 +0,0 @@
|
|||||||
/target/
|
|
||||||
/bin/
|
|
||||||
dependency-reduced-pom.xml
|
|
||||||
scylla-apiclient/target/
|
|
||||||
.classpath
|
|
||||||
.project
|
|
||||||
.settings
|
|
||||||
build/
|
|
||||||
/.idea/
|
|
@ -7,7 +7,7 @@ Scylla JMX server implements the Apache Cassandra JMX interface for compatibilit
|
|||||||
To compile JMX server, run:
|
To compile JMX server, run:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$Â mvn --file scylla-jmx-parent/pom.xml package
|
$Â mvn package
|
||||||
```
|
```
|
||||||
|
|
||||||
## Running
|
## Running
|
||||||
|
@ -1,49 +1,19 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
PRODUCT=scylla
|
VERSION=2.1.6
|
||||||
VERSION=666.development
|
|
||||||
|
|
||||||
if test -f version
|
if test -f version
|
||||||
then
|
then
|
||||||
SCYLLA_VERSION=$(cat version | awk -F'-' '{print $1}')
|
SCYLLA_VERSION=$(cat version | awk -F'-' '{print $1}')
|
||||||
SCYLLA_RELEASE=$(cat version | awk -F'-' '{print $2}')
|
SCYLLA_RELEASE=$(cat version | awk -F'-' '{print $2}')
|
||||||
else
|
else
|
||||||
DATE=$(date --utc +%Y%m%d)
|
DATE=$(date +%Y%m%d)
|
||||||
GIT_COMMIT=$(git log --pretty=format:'%h' -n 1)
|
GIT_COMMIT=$(git log --pretty=format:'%h' -n 1)
|
||||||
SCYLLA_VERSION=$VERSION
|
SCYLLA_VERSION=$VERSION
|
||||||
SCYLLA_RELEASE=$DATE.$GIT_COMMIT
|
SCYLLA_RELEASE=$DATE.$GIT_COMMIT
|
||||||
fi
|
fi
|
||||||
|
|
||||||
usage() {
|
|
||||||
echo "usage: $0"
|
|
||||||
echo " [--version product-version-release] # override p-v-r"
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
|
|
||||||
OVERRIDE=
|
|
||||||
while [[ $# > 0 ]]; do
|
|
||||||
case "$1" in
|
|
||||||
--version)
|
|
||||||
OVERRIDE="$2"
|
|
||||||
shift 2
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
usage
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
if [[ -n "$OVERRIDE" ]]; then
|
|
||||||
# regular expression for p-v-r: alphabetic+dashes for product, trailing non-dashes
|
|
||||||
# for release, everything else for version
|
|
||||||
RE='^([-a-z]+)-(.+)-([^-]+)$'
|
|
||||||
PRODUCT="$(sed -E "s/$RE/\\1/" <<<"$OVERRIDE")"
|
|
||||||
SCYLLA_VERSION="$(sed -E "s/$RE/\\2/" <<<"$OVERRIDE")"
|
|
||||||
SCYLLA_RELEASE="$(sed -E "s/$RE/\\3/" <<<"$OVERRIDE")"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "$SCYLLA_VERSION-$SCYLLA_RELEASE"
|
echo "$SCYLLA_VERSION-$SCYLLA_RELEASE"
|
||||||
mkdir -p build
|
mkdir -p build
|
||||||
echo "$SCYLLA_VERSION" > build/SCYLLA-VERSION-FILE
|
echo "$SCYLLA_VERSION" > build/SCYLLA-VERSION-FILE
|
||||||
echo "$SCYLLA_RELEASE" > build/SCYLLA-RELEASE-FILE
|
echo "$SCYLLA_RELEASE" > build/SCYLLA-RELEASE-FILE
|
||||||
echo "$PRODUCT" > build/SCYLLA-PRODUCT-FILE
|
|
||||||
|
5
dist/common/sysconfig/scylla-jmx
vendored
5
dist/common/sysconfig/scylla-jmx
vendored
@ -20,13 +20,10 @@ SCYLLA_CONF=/etc/scylla
|
|||||||
#SCYLLA_JMX_FILE="-cf /etc/scylla.d/scylla-user.cfg"
|
#SCYLLA_JMX_FILE="-cf /etc/scylla.d/scylla-user.cfg"
|
||||||
|
|
||||||
# The location of the jmx proxy jar file
|
# The location of the jmx proxy jar file
|
||||||
SCYLLA_JMX_LOCAL="-l /opt/scylladb/jmx"
|
SCYLLA_JMX_LOCAL="-l /usr/lib/scylla/jmx"
|
||||||
|
|
||||||
# allow to run remotely
|
# allow to run remotely
|
||||||
#SCYLLA_JMX_REMOTE="-r"
|
#SCYLLA_JMX_REMOTE="-r"
|
||||||
|
|
||||||
# allow debug
|
# allow debug
|
||||||
#SCYLLA_JMX_DEBUG="-d"
|
#SCYLLA_JMX_DEBUG="-d"
|
||||||
|
|
||||||
# specify JVM options
|
|
||||||
JAVA_TOOL_OPTIONS=""
|
|
||||||
|
18
dist/common/systemd/scylla-jmx.service
vendored
18
dist/common/systemd/scylla-jmx.service
vendored
@ -1,18 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description=Scylla JMX
|
|
||||||
Requires=scylla-server.service
|
|
||||||
After=scylla-server.service
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
Type=simple
|
|
||||||
EnvironmentFile=/etc/sysconfig/scylla-jmx
|
|
||||||
User=scylla
|
|
||||||
Group=scylla
|
|
||||||
ExecStart=/opt/scylladb/jmx/scylla-jmx $SCYLLA_JMX_PORT $SCYLLA_API_PORT $SCYLLA_API_ADDR $SCYLLA_JMX_ADDR $SCYLLA_JMX_FILE $SCYLLA_JMX_LOCAL $SCYLLA_JMX_REMOTE $SCYLLA_JMX_DEBUG
|
|
||||||
KillMode=process
|
|
||||||
Restart=on-abnormal
|
|
||||||
Slice=scylla-helper.slice
|
|
||||||
WorkingDirectory=/var/lib/scylla
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=multi-user.target
|
|
16
dist/common/systemd/scylla-jmx.service.in
vendored
Normal file
16
dist/common/systemd/scylla-jmx.service.in
vendored
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=Scylla JMX
|
||||||
|
Requires=scylla-server.service
|
||||||
|
After=scylla-server.service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
EnvironmentFile=@@SYSCONFDIR@@/scylla-jmx
|
||||||
|
User=scylla
|
||||||
|
Group=scylla
|
||||||
|
ExecStart=/usr/lib/scylla/jmx/scylla-jmx $SCYLLA_JMX_PORT $SCYLLA_API_PORT $SCYLLA_API_ADDR $SCYLLA_JMX_ADDR $SCYLLA_JMX_FILE $SCYLLA_JMX_LOCAL $SCYLLA_JMX_REMOTE $SCYLLA_JMX_DEBUG
|
||||||
|
KillMode=process
|
||||||
|
Restart=on-abnormal
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
126
dist/debian/build_deb.sh
vendored
Executable file
126
dist/debian/build_deb.sh
vendored
Executable file
@ -0,0 +1,126 @@
|
|||||||
|
#!/bin/bash -e
|
||||||
|
|
||||||
|
. /etc/os-release
|
||||||
|
print_usage() {
|
||||||
|
echo "build_deb.sh -target <codename>"
|
||||||
|
echo " --target target distribution codename"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
TARGET=
|
||||||
|
while [ $# -gt 0 ]; do
|
||||||
|
case "$1" in
|
||||||
|
"--target")
|
||||||
|
TARGET=$2
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
print_usage
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
is_redhat_variant() {
|
||||||
|
[ -f /etc/redhat-release ]
|
||||||
|
}
|
||||||
|
is_debian_variant() {
|
||||||
|
[ -f /etc/debian_version ]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
pkg_install() {
|
||||||
|
if is_redhat_variant; then
|
||||||
|
sudo yum install -y $1
|
||||||
|
elif is_debian_variant; then
|
||||||
|
sudo apt-get install -y $1
|
||||||
|
else
|
||||||
|
echo "Requires to install following command: $1"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
if [ ! -e dist/debian/build_deb.sh ]; then
|
||||||
|
echo "run build_deb.sh in top of scylla dir"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
if [ "$(arch)" != "x86_64" ]; then
|
||||||
|
echo "Unsupported architecture: $(arch)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -e debian ] || [ -e build/release ]; then
|
||||||
|
sudo rm -rf debian build
|
||||||
|
mkdir build
|
||||||
|
fi
|
||||||
|
if is_debian_variant; then
|
||||||
|
sudo apt-get -y update
|
||||||
|
fi
|
||||||
|
# this hack is needed since some environment installs 'git-core' package, it's
|
||||||
|
# subset of the git command and doesn't works for our git-archive-all script.
|
||||||
|
if is_redhat_variant && [ ! -f /usr/libexec/git-core/git-submodule ]; then
|
||||||
|
sudo yum install -y git
|
||||||
|
fi
|
||||||
|
if [ ! -f /usr/bin/git ]; then
|
||||||
|
pkg_install git
|
||||||
|
fi
|
||||||
|
if [ ! -f /usr/bin/python ]; then
|
||||||
|
pkg_install python
|
||||||
|
fi
|
||||||
|
if [ ! -f /usr/sbin/pbuilder ]; then
|
||||||
|
pkg_install pbuilder
|
||||||
|
fi
|
||||||
|
if [ ! -f /usr/bin/mvn ]; then
|
||||||
|
pkg_install maven
|
||||||
|
fi
|
||||||
|
if [ ! -f /usr/bin/dh_testdir ]; then
|
||||||
|
pkg_install debhelper
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
if [ -z "$TARGET" ]; then
|
||||||
|
if is_debian_variant; then
|
||||||
|
if [ ! -f /usr/bin/lsb_release ]; then
|
||||||
|
pkg_install lsb-release
|
||||||
|
fi
|
||||||
|
TARGET=`lsb_release -c|awk '{print $2}'`
|
||||||
|
else
|
||||||
|
echo "Please specify target"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
VERSION=$(./SCYLLA-VERSION-GEN)
|
||||||
|
SCYLLA_VERSION=$(cat build/SCYLLA-VERSION-FILE | sed 's/\.rc/~rc/')
|
||||||
|
SCYLLA_RELEASE=$(cat build/SCYLLA-RELEASE-FILE)
|
||||||
|
echo $VERSION > version
|
||||||
|
./scripts/git-archive-all --extra version --force-submodules --prefix scylla-jmx ../scylla-jmx_$SCYLLA_VERSION-$SCYLLA_RELEASE.orig.tar.gz
|
||||||
|
|
||||||
|
cp -a dist/debian/debian debian
|
||||||
|
cp dist/debian/changelog.in debian/changelog
|
||||||
|
cp dist/debian/rules.in debian/rules
|
||||||
|
sed -i -e "s/@@VERSION@@/$SCYLLA_VERSION/g" debian/changelog
|
||||||
|
sed -i -e "s/@@RELEASE@@/$SCYLLA_RELEASE/g" debian/changelog
|
||||||
|
sed -i -e "s/@@CODENAME@@/$TARGET/g" debian/changelog
|
||||||
|
if [ "$TARGET" = "trusty" ] || [ "$TARGET" = "xenial" ] || [ "$TARGET" = "yakkety" ] || [ "$TARGET" = "zesty" ] || [ "$TARGET" = "artful" ]; then
|
||||||
|
sed -i -e "s/@@REVISION@@/0ubuntu1~$TARGET/g" debian/changelog
|
||||||
|
else
|
||||||
|
sed -i -e "s/@@REVISION@@/1~$TARGET/g" debian/changelog
|
||||||
|
fi
|
||||||
|
if [ "$TARGET" = "trusty" ]; then
|
||||||
|
sed -i -e "s/@@DH_INSTALLINIT@@/--upstart-only/g" debian/rules
|
||||||
|
else
|
||||||
|
sed -i -e "s/@@DH_INSTALLINIT@@//g" debian/rules
|
||||||
|
fi
|
||||||
|
cp dist/common/systemd/scylla-jmx.service.in debian/scylla-jmx.service
|
||||||
|
sed -i -e "s#@@SYSCONFDIR@@#/etc/default#g" debian/scylla-jmx.service
|
||||||
|
|
||||||
|
cp ./dist/debian/pbuilderrc ~/.pbuilderrc
|
||||||
|
sudo rm -fv /var/cache/pbuilder/scylla-jmx-$TARGET.tgz
|
||||||
|
sudo -E DIST=$TARGET /usr/sbin/pbuilder clean
|
||||||
|
sudo -E DIST=$TARGET /usr/sbin/pbuilder create
|
||||||
|
sudo -E DIST=$TARGET /usr/sbin/pbuilder update
|
||||||
|
if [ "$TARGET" = "jessie" ]; then
|
||||||
|
echo "apt-get install -y -t jessie-backports ca-certificates-java" > build/jessie-pkginst.sh
|
||||||
|
chmod a+rx build/jessie-pkginst.sh
|
||||||
|
sudo -E DIST=$TARGET /usr/sbin/pbuilder execute build/jessie-pkginst.sh
|
||||||
|
fi
|
||||||
|
sudo -E DIST=$TARGET pdebuild --buildresult build/debs
|
@ -1,4 +1,4 @@
|
|||||||
%{product}-jmx (%{version}-%{release}-%{revision}) %{codename}; urgency=medium
|
scylla-jmx (@@VERSION@@-@@RELEASE@@-@@REVISION@@) @@CODENAME@@; urgency=medium
|
||||||
|
|
||||||
* Initial release.
|
* Initial release.
|
||||||
|
|
@ -1,14 +1,14 @@
|
|||||||
Source: %{product}-jmx
|
Source: scylla-jmx
|
||||||
Maintainer: Takuya ASADA <syuu@scylladb.com>
|
Maintainer: Takuya ASADA <syuu@scylladb.com>
|
||||||
Homepage: http://scylladb.com
|
Homepage: http://scylladb.com
|
||||||
Section: database
|
Section: database
|
||||||
Priority: optional
|
Priority: optional
|
||||||
Standards-Version: 3.9.5
|
Standards-Version: 3.9.5
|
||||||
Rules-Requires-Root: no
|
Build-Depends: debhelper (>= 9), maven, openjdk-8-jdk-headless
|
||||||
|
|
||||||
Package: %{product}-jmx
|
Package: scylla-jmx
|
||||||
Architecture: all
|
Architecture: all
|
||||||
Depends: ${shlibs:Depends}, ${misc:Depends}, openjdk-8-jre-headless | openjdk-8-jre | oracle-java8-set-default | adoptopenjdk-8-hotspot-jre | openjdk-11-jre-headless | openjdk-11-jre |oracle-java11-set-default , %{product}-server
|
Depends: ${shlibs:Depends}, ${misc:Depends}, openjdk-8-jre-headless | openjdk-8-jre | oracle-java8-set-default, scylla-server
|
||||||
Description: Scylla JMX server binaries
|
Description: Scylla JMX server binaries
|
||||||
Scylla is a highly scalable, eventually consistent, distributed,
|
Scylla is a highly scalable, eventually consistent, distributed,
|
||||||
partitioned row DB.
|
partitioned row DB.
|
23
dist/debian/debian/rules
vendored
23
dist/debian/debian/rules
vendored
@ -1,23 +0,0 @@
|
|||||||
#!/usr/bin/make -f
|
|
||||||
|
|
||||||
include /usr/share/dpkg/pkg-info.mk
|
|
||||||
|
|
||||||
override_dh_auto_build:
|
|
||||||
|
|
||||||
override_dh_auto_clean:
|
|
||||||
|
|
||||||
override_dh_auto_install:
|
|
||||||
dh_auto_install
|
|
||||||
cd scylla-jmx; ./install.sh --packaging --root "$(CURDIR)/debian/tmp" --sysconfdir /etc/default
|
|
||||||
|
|
||||||
override_dh_installinit:
|
|
||||||
ifeq ($(DEB_SOURCE),scylla-jmx)
|
|
||||||
dh_installinit --no-start
|
|
||||||
else
|
|
||||||
dh_installinit --no-start --name scylla-jmx
|
|
||||||
endif
|
|
||||||
|
|
||||||
override_dh_strip_nondeterminism:
|
|
||||||
|
|
||||||
%:
|
|
||||||
dh $@
|
|
4
dist/debian/debian/scylla-jmx.install
vendored
4
dist/debian/debian/scylla-jmx.install
vendored
@ -1,4 +0,0 @@
|
|||||||
etc/default/scylla-jmx
|
|
||||||
etc/systemd/system/scylla-jmx.service.d/sysconfdir.conf
|
|
||||||
opt/scylladb/jmx/*
|
|
||||||
usr/lib/scylla/jmx/*
|
|
7
dist/debian/debian/scylla-jmx.postinst
vendored
7
dist/debian/debian/scylla-jmx.postinst
vendored
@ -1,7 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
if [ -d /run/systemd/system ]; then
|
|
||||||
systemctl --system daemon-reload >/dev/null || true
|
|
||||||
fi
|
|
||||||
|
|
||||||
#DEBHELPER#
|
|
7
dist/debian/debian/scylla-jmx.postrm
vendored
7
dist/debian/debian/scylla-jmx.postrm
vendored
@ -1,7 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
if [ -d /run/systemd/system ]; then
|
|
||||||
systemctl --system daemon-reload >/dev/null || true
|
|
||||||
fi
|
|
||||||
|
|
||||||
#DEBHELPER#
|
|
1
dist/debian/debian/scylla-jmx.service
vendored
1
dist/debian/debian/scylla-jmx.service
vendored
@ -1 +0,0 @@
|
|||||||
../../common/systemd/scylla-jmx.service
|
|
21
dist/debian/debian/scylla-jmx.upstart
vendored
Normal file
21
dist/debian/debian/scylla-jmx.upstart
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
# scylla-jmx - ScyllaDB
|
||||||
|
#
|
||||||
|
# ScyllaDB
|
||||||
|
|
||||||
|
description "ScyllaDB jmx"
|
||||||
|
|
||||||
|
start on started scylla-server
|
||||||
|
stop on stopping scylla-server
|
||||||
|
|
||||||
|
umask 022
|
||||||
|
|
||||||
|
console log
|
||||||
|
|
||||||
|
setuid scylla
|
||||||
|
setgid scylla
|
||||||
|
|
||||||
|
script
|
||||||
|
. /etc/default/scylla-jmx
|
||||||
|
export SCYLLA_HOME SCYLLA_CONF
|
||||||
|
exec /usr/lib/scylla/jmx/scylla-jmx -l /usr/lib/scylla/jmx
|
||||||
|
end script
|
80
dist/debian/debian_files_gen.py
vendored
80
dist/debian/debian_files_gen.py
vendored
@ -1,80 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
|
||||||
# Copyright (C) 2020 ScyllaDB
|
|
||||||
#
|
|
||||||
|
|
||||||
#
|
|
||||||
# This file is part of Scylla.
|
|
||||||
#
|
|
||||||
# Scylla is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# Scylla is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with Scylla. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
#
|
|
||||||
|
|
||||||
import string
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import re
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
class DebianFilesTemplate(string.Template):
|
|
||||||
delimiter = '%'
|
|
||||||
|
|
||||||
scriptdir = os.path.dirname(__file__)
|
|
||||||
|
|
||||||
with open(os.path.join(scriptdir, 'changelog.template')) as f:
|
|
||||||
changelog_template = f.read()
|
|
||||||
|
|
||||||
with open(os.path.join(scriptdir, 'control.template')) as f:
|
|
||||||
control_template = f.read()
|
|
||||||
|
|
||||||
with open('build/SCYLLA-PRODUCT-FILE') as f:
|
|
||||||
product = f.read().strip()
|
|
||||||
|
|
||||||
with open('build/SCYLLA-VERSION-FILE') as f:
|
|
||||||
version = f.read().strip().replace('.rc', '~rc').replace('_', '-')
|
|
||||||
|
|
||||||
with open('build/SCYLLA-RELEASE-FILE') as f:
|
|
||||||
release = f.read().strip()
|
|
||||||
|
|
||||||
if os.path.exists('build/debian/debian'):
|
|
||||||
shutil.rmtree('build/debian/debian')
|
|
||||||
shutil.copytree('dist/debian/debian', 'build/debian/debian')
|
|
||||||
|
|
||||||
if product != 'scylla':
|
|
||||||
for p in Path('build/debian/debian').glob('scylla-*'):
|
|
||||||
# pat1: scylla-server.service
|
|
||||||
# -> scylla-enterprise-server.scylla-server.service
|
|
||||||
# pat2: scylla-server.scylla-fstrim.service
|
|
||||||
# -> scylla-enterprise-server.scylla-fstrim.service
|
|
||||||
# pat3: scylla-conf.install
|
|
||||||
# -> scylla-enterprise-conf.install
|
|
||||||
|
|
||||||
if m := re.match(r'^scylla(-[^.]+)\.service$', p.name):
|
|
||||||
p.rename(p.parent / f'{product}{m.group(1)}.{p.name}')
|
|
||||||
elif m := re.match(r'^scylla(-[^.]+\.scylla-[^.]+\.[^.]+)$', p.name):
|
|
||||||
p.rename(p.parent / f'{product}{m.group(1)}')
|
|
||||||
else:
|
|
||||||
p.rename(p.parent / p.name.replace('scylla', product, 1))
|
|
||||||
|
|
||||||
s = DebianFilesTemplate(changelog_template)
|
|
||||||
changelog_applied = s.substitute(product=product, version=version, release=release, revision='1', codename='stable')
|
|
||||||
|
|
||||||
s = DebianFilesTemplate(control_template)
|
|
||||||
control_applied = s.substitute(product=product)
|
|
||||||
|
|
||||||
with open('build/debian/debian/changelog', 'w') as f:
|
|
||||||
f.write(changelog_applied)
|
|
||||||
|
|
||||||
with open('build/debian/debian/control', 'w') as f:
|
|
||||||
f.write(control_applied)
|
|
26
dist/debian/pbuilderrc
vendored
Normal file
26
dist/debian/pbuilderrc
vendored
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
USENETWORK=yes
|
||||||
|
BUILD_HOME=/tmp
|
||||||
|
BASETGZ="/var/cache/pbuilder/scylla-jmx-$DIST.tgz"
|
||||||
|
DISTRIBUTION="$DIST"
|
||||||
|
BUILDRESULT="/var/cache/pbuilder/scylla-jmx-$DIST/result/"
|
||||||
|
APTCACHE="/var/cache/pbuilder/scylla-jmx-$DIST/aptcache/"
|
||||||
|
ALLOWUNTRUSTED=yes
|
||||||
|
|
||||||
|
if [ "$DIST" = "trusty" ] || [ "$DIST" = "xenial" ] || [ "$DIST" = "yakkety" ] || [ "$DIST" = "zesty" ] || [ "$DIST" = "artful" ]; then
|
||||||
|
MIRRORSITE="http://archive.ubuntu.com/ubuntu/"
|
||||||
|
COMPONENTS="main restricted universe multiverse"
|
||||||
|
DEBOOTSTRAPOPTS="--keyring=/usr/share/keyrings/ubuntu-archive-keyring.gpg"
|
||||||
|
if [ "$DIST" = "trusty" ]; then
|
||||||
|
OTHERMIRROR="deb http://ppa.launchpad.net/openjdk-r/ppa/ubuntu trusty main"
|
||||||
|
fi
|
||||||
|
elif [ "$DIST" = "jessie" ] || [ "$DIST" = "stretch" ] || [ "$DIST" = "buster" ] || [ "$DIST" = "sid" ]; then
|
||||||
|
MIRRORSITE="http://deb.debian.org/debian/"
|
||||||
|
COMPONENTS="main contrib non-free"
|
||||||
|
DEBOOTSTRAPOPTS="--keyring=/usr/share/keyrings/debian-archive-keyring.gpg"
|
||||||
|
if [ "$DIST" = "jessie" ]; then
|
||||||
|
OTHERMIRROR="deb http://httpredir.debian.org/debian jessie-backports main"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "Unknown distribution: $DIST"
|
||||||
|
exit 1
|
||||||
|
fi
|
32
dist/debian/rules.in
vendored
Executable file
32
dist/debian/rules.in
vendored
Executable file
@ -0,0 +1,32 @@
|
|||||||
|
#!/usr/bin/make -f
|
||||||
|
|
||||||
|
DOC = $(CURDIR)/debian/scylla-jmx/usr/share/doc/scylla-jmx
|
||||||
|
DEST = $(CURDIR)/debian/scylla-jmx/usr/lib/scylla/jmx
|
||||||
|
M2_REPO= $(CURDIR)/m2
|
||||||
|
|
||||||
|
override_dh_auto_build:
|
||||||
|
mvn -B -Dmaven.repo.local=$(M2_REPO) install
|
||||||
|
|
||||||
|
override_dh_auto_clean:
|
||||||
|
rm -rf target m2
|
||||||
|
|
||||||
|
override_dh_auto_install:
|
||||||
|
mkdir -p $(CURDIR)/debian/scylla-jmx/etc/default/ && \
|
||||||
|
cp $(CURDIR)/dist/common/sysconfig/scylla-jmx \
|
||||||
|
$(CURDIR)/debian/scylla-jmx/etc/default/
|
||||||
|
|
||||||
|
mkdir -p $(DOC) && \
|
||||||
|
cp $(CURDIR)/*.md $(DOC)
|
||||||
|
cp $(CURDIR)/NOTICE $(DOC)
|
||||||
|
|
||||||
|
mkdir -p $(DEST)
|
||||||
|
cp $(CURDIR)/scripts/scylla-jmx $(DEST)
|
||||||
|
cp $(CURDIR)/target/scylla-jmx-1.0.jar $(DEST)
|
||||||
|
mkdir $(DEST)/symlinks
|
||||||
|
ln -sf /usr/bin/java $(DEST)/symlinks/scylla-jmx
|
||||||
|
|
||||||
|
override_dh_installinit:
|
||||||
|
dh_installinit --no-start @@DH_INSTALLINIT@@
|
||||||
|
|
||||||
|
%:
|
||||||
|
dh $@
|
71
dist/redhat/build_rpm.sh
vendored
Executable file
71
dist/redhat/build_rpm.sh
vendored
Executable file
@ -0,0 +1,71 @@
|
|||||||
|
#!/bin/bash -e
|
||||||
|
|
||||||
|
. /etc/os-release
|
||||||
|
print_usage() {
|
||||||
|
echo "build_rpm.sh -target epel-7-x86_64 --configure-user"
|
||||||
|
echo " --target target distribution in mock cfg name"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
TARGET=
|
||||||
|
while [ $# -gt 0 ]; do
|
||||||
|
case "$1" in
|
||||||
|
"--target")
|
||||||
|
TARGET=$2
|
||||||
|
shift 2
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
print_usage
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
is_redhat_variant() {
|
||||||
|
[ -f /etc/redhat-release ]
|
||||||
|
}
|
||||||
|
pkg_install() {
|
||||||
|
if is_redhat_variant; then
|
||||||
|
sudo yum install -y $1
|
||||||
|
else
|
||||||
|
echo "Requires to install following command: $1"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
if [ ! -e dist/redhat/build_rpm.sh ]; then
|
||||||
|
echo "run build_rpm.sh in top of scylla-jmx dir"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$(arch)" != "x86_64" ]; then
|
||||||
|
echo "Unsupported architecture: $(arch)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
if [ -z "$TARGET" ]; then
|
||||||
|
if [ "$ID" = "centos" -o "$ID" = "rhel" ] && [ "$VERSION_ID" = "7" ]; then
|
||||||
|
TARGET=./dist/redhat/mock/scylla-jmx-epel-7-x86_64.cfg
|
||||||
|
elif [ "$ID" = "fedora" ]; then
|
||||||
|
TARGET=$ID-$VERSION_ID-x86_64
|
||||||
|
else
|
||||||
|
echo "Please specify target"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ! -f /usr/bin/mock ]; then
|
||||||
|
pkg_install mock
|
||||||
|
fi
|
||||||
|
if [ ! -f /usr/bin/git ]; then
|
||||||
|
pkg_install git
|
||||||
|
fi
|
||||||
|
|
||||||
|
VERSION=$(./SCYLLA-VERSION-GEN)
|
||||||
|
SCYLLA_VERSION=$(cat build/SCYLLA-VERSION-FILE)
|
||||||
|
SCYLLA_RELEASE=$(cat build/SCYLLA-RELEASE-FILE)
|
||||||
|
git archive --format=tar --prefix=scylla-jmx-$SCYLLA_VERSION/ HEAD -o build/scylla-jmx-$VERSION.tar
|
||||||
|
cp dist/redhat/scylla-jmx.spec.in build/scylla-jmx.spec
|
||||||
|
sed -i -e "s/@@VERSION@@/$SCYLLA_VERSION/g" build/scylla-jmx.spec
|
||||||
|
sed -i -e "s/@@RELEASE@@/$SCYLLA_RELEASE/g" build/scylla-jmx.spec
|
||||||
|
|
||||||
|
sudo mock --buildsrpm --root=$TARGET --resultdir=`pwd`/build/srpms --spec=build/scylla-jmx.spec --sources=build/scylla-jmx-$VERSION.tar
|
||||||
|
sudo mock --rebuild --root=$TARGET --resultdir=`pwd`/build/rpms build/srpms/scylla-jmx-$VERSION*.src.rpm
|
53
dist/redhat/mock/scylla-jmx-epel-7-x86_64.cfg
vendored
Normal file
53
dist/redhat/mock/scylla-jmx-epel-7-x86_64.cfg
vendored
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
config_opts['root'] = 'epel-7-x86_64'
|
||||||
|
config_opts['target_arch'] = 'x86_64'
|
||||||
|
config_opts['legal_host_arches'] = ('x86_64',)
|
||||||
|
config_opts['chroot_setup_cmd'] = 'install @buildsys-build'
|
||||||
|
config_opts['dist'] = 'el7' # only useful for --resultdir variable subst
|
||||||
|
config_opts['releasever'] = '7'
|
||||||
|
config_opts['rpmbuild_networking'] = True
|
||||||
|
|
||||||
|
config_opts['yum.conf'] = """
|
||||||
|
[main]
|
||||||
|
keepcache=1
|
||||||
|
debuglevel=2
|
||||||
|
reposdir=/dev/null
|
||||||
|
logfile=/var/log/yum.log
|
||||||
|
retries=20
|
||||||
|
obsoletes=1
|
||||||
|
gpgcheck=0
|
||||||
|
assumeyes=1
|
||||||
|
syslog_ident=mock
|
||||||
|
syslog_device=
|
||||||
|
mdpolicy=group:primary
|
||||||
|
best=1
|
||||||
|
|
||||||
|
# repos
|
||||||
|
[scylla-centos-base]
|
||||||
|
name=BaseOS
|
||||||
|
mirrorlist=http://mirrorlist.centos.org/?release=7&arch=x86_64&repo=os
|
||||||
|
failovermethod=priority
|
||||||
|
gpgkey=https://www.centos.org/keys/RPM-GPG-KEY-CentOS-7
|
||||||
|
gpgcheck=1
|
||||||
|
|
||||||
|
[scylla-centos-updates]
|
||||||
|
name=updates
|
||||||
|
enabled=1
|
||||||
|
mirrorlist=http://mirrorlist.centos.org/?release=7&arch=x86_64&repo=updates
|
||||||
|
failovermethod=priority
|
||||||
|
gpgkey=https://www.centos.org/keys/RPM-GPG-KEY-CentOS-7
|
||||||
|
gpgcheck=1
|
||||||
|
|
||||||
|
[scylla-centos-extras]
|
||||||
|
name=extras
|
||||||
|
mirrorlist=http://mirrorlist.centos.org/?release=7&arch=x86_64&repo=extras
|
||||||
|
failovermethod=priority
|
||||||
|
gpgkey=https://www.centos.org/keys/RPM-GPG-KEY-CentOS-7
|
||||||
|
gpgcheck=1
|
||||||
|
|
||||||
|
[scylla-epel]
|
||||||
|
name=epel
|
||||||
|
mirrorlist=http://mirrors.fedoraproject.org/mirrorlist?repo=epel-7&arch=x86_64
|
||||||
|
failovermethod=priority
|
||||||
|
gpgkey=https://dl.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-7
|
||||||
|
gpgcheck=1
|
||||||
|
"""
|
75
dist/redhat/scylla-jmx.spec
vendored
75
dist/redhat/scylla-jmx.spec
vendored
@ -1,75 +0,0 @@
|
|||||||
Name: %{product}-jmx
|
|
||||||
Version: %{version}
|
|
||||||
Release: %{release}%{?dist}
|
|
||||||
Summary: Scylla JMX
|
|
||||||
Group: Applications/Databases
|
|
||||||
|
|
||||||
License: AGPLv3
|
|
||||||
URL: http://www.scylladb.com/
|
|
||||||
Source0: %{reloc_pkg}
|
|
||||||
|
|
||||||
BuildArch: noarch
|
|
||||||
BuildRequires: systemd-units
|
|
||||||
Requires: %{product}-server jre-1.8.0-headless
|
|
||||||
AutoReqProv: no
|
|
||||||
|
|
||||||
%description
|
|
||||||
|
|
||||||
|
|
||||||
%prep
|
|
||||||
%setup -q -n scylla-jmx
|
|
||||||
|
|
||||||
|
|
||||||
%build
|
|
||||||
|
|
||||||
%install
|
|
||||||
./install.sh --packaging --root "$RPM_BUILD_ROOT"
|
|
||||||
|
|
||||||
%pre
|
|
||||||
/usr/sbin/groupadd scylla 2> /dev/null || :
|
|
||||||
/usr/sbin/useradd -g scylla -s /sbin/nologin -r -d ${_sharedstatedir}/scylla scylla 2> /dev/null || :
|
|
||||||
ping -c1 `hostname` > /dev/null 2>&1
|
|
||||||
if [ $? -ne 0 ]; then
|
|
||||||
echo
|
|
||||||
echo "**************************************************************"
|
|
||||||
echo "* WARNING: You need to add hostname on /etc/hosts, otherwise *"
|
|
||||||
echo "* scylla-jmx will not able to start up. *"
|
|
||||||
echo "**************************************************************"
|
|
||||||
echo
|
|
||||||
fi
|
|
||||||
|
|
||||||
%post
|
|
||||||
if [ $1 -eq 1 ] ; then
|
|
||||||
/usr/bin/systemctl preset scylla-jmx.service ||:
|
|
||||||
fi
|
|
||||||
|
|
||||||
/usr/bin/systemctl daemon-reload ||:
|
|
||||||
|
|
||||||
%preun
|
|
||||||
if [ $1 -eq 0 ] ; then
|
|
||||||
/usr/bin/systemctl --no-reload disable scylla-jmx.service ||:
|
|
||||||
/usr/bin/systemctl stop scylla-jmx.service ||:
|
|
||||||
fi
|
|
||||||
|
|
||||||
%postun
|
|
||||||
/usr/bin/systemctl daemon-reload ||:
|
|
||||||
|
|
||||||
%clean
|
|
||||||
rm -rf $RPM_BUILD_ROOT
|
|
||||||
|
|
||||||
|
|
||||||
%files
|
|
||||||
%defattr(-,root,root)
|
|
||||||
|
|
||||||
%config(noreplace) %{_sysconfdir}/sysconfig/scylla-jmx
|
|
||||||
%{_unitdir}/scylla-jmx.service
|
|
||||||
/opt/scylladb/jmx/scylla-jmx
|
|
||||||
/opt/scylladb/jmx/scylla-jmx-1.1.jar
|
|
||||||
/opt/scylladb/jmx/symlinks/scylla-jmx
|
|
||||||
%{_prefix}/lib/scylla/jmx/scylla-jmx
|
|
||||||
%{_prefix}/lib/scylla/jmx/scylla-jmx-1.1.jar
|
|
||||||
%{_prefix}/lib/scylla/jmx/symlinks/scylla-jmx
|
|
||||||
|
|
||||||
%changelog
|
|
||||||
* Fri Aug 7 2015 Takuya ASADA Takuya ASADA <syuu@cloudius-systems.com>
|
|
||||||
- inital version of scylla-tools.spec
|
|
80
dist/redhat/scylla-jmx.spec.in
vendored
Normal file
80
dist/redhat/scylla-jmx.spec.in
vendored
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
Name: scylla-jmx
|
||||||
|
Version: @@VERSION@@
|
||||||
|
Release: @@RELEASE@@%{?dist}
|
||||||
|
Summary: Scylla JMX
|
||||||
|
Group: Applications/Databases
|
||||||
|
|
||||||
|
License: AGPLv3
|
||||||
|
URL: http://www.scylladb.com/
|
||||||
|
Source0: %{name}-@@VERSION@@-@@RELEASE@@.tar
|
||||||
|
|
||||||
|
BuildArch: noarch
|
||||||
|
BuildRequires: maven systemd-units java-1.8.0-openjdk-devel
|
||||||
|
Requires: scylla-server java-1.8.0-openjdk-headless
|
||||||
|
|
||||||
|
%description
|
||||||
|
|
||||||
|
|
||||||
|
%prep
|
||||||
|
%setup -q
|
||||||
|
|
||||||
|
|
||||||
|
%build
|
||||||
|
mvn -B install
|
||||||
|
mkdir build
|
||||||
|
cp dist/common/systemd/scylla-jmx.service.in build/scylla-jmx.service
|
||||||
|
sed -i -e "s#@@SYSCONFDIR@@#/etc/sysconfig#g" build/scylla-jmx.service
|
||||||
|
|
||||||
|
%install
|
||||||
|
rm -rf $RPM_BUILD_ROOT
|
||||||
|
mkdir -p $RPM_BUILD_ROOT%{_sysconfdir}/sysconfig/
|
||||||
|
mkdir -p $RPM_BUILD_ROOT%{_unitdir}
|
||||||
|
mkdir -p $RPM_BUILD_ROOT%{_prefix}/lib/scylla/
|
||||||
|
|
||||||
|
install -m644 dist/common/sysconfig/scylla-jmx $RPM_BUILD_ROOT%{_sysconfdir}/sysconfig/
|
||||||
|
install -m644 build/*.service $RPM_BUILD_ROOT%{_unitdir}/
|
||||||
|
install -d -m755 $RPM_BUILD_ROOT%{_prefix}/lib/scylla
|
||||||
|
install -d -m755 $RPM_BUILD_ROOT%{_prefix}/lib/scylla/jmx
|
||||||
|
install -d -m755 $RPM_BUILD_ROOT%{_prefix}/lib/scylla/jmx/symlinks
|
||||||
|
install -m644 target/scylla-jmx-1.0.jar $RPM_BUILD_ROOT%{_prefix}/lib/scylla/jmx/
|
||||||
|
install -m755 scripts/scylla-jmx $RPM_BUILD_ROOT%{_prefix}/lib/scylla/jmx
|
||||||
|
ln -sf /usr/bin/java $RPM_BUILD_ROOT%{_prefix}/lib/scylla/jmx/symlinks/scylla-jmx
|
||||||
|
|
||||||
|
%pre
|
||||||
|
/usr/sbin/groupadd scylla 2> /dev/null || :
|
||||||
|
/usr/sbin/useradd -g scylla -s /sbin/nologin -r -d ${_sharedstatedir}/scylla scylla 2> /dev/null || :
|
||||||
|
ping -c1 `hostname` > /dev/null 2>&1
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
echo
|
||||||
|
echo "**************************************************************"
|
||||||
|
echo "* WARNING: You need to add hostname on /etc/hosts, otherwise *"
|
||||||
|
echo "* scylla-jmx will not able to start up. *"
|
||||||
|
echo "**************************************************************"
|
||||||
|
echo
|
||||||
|
fi
|
||||||
|
|
||||||
|
%post
|
||||||
|
%systemd_post scylla-jmx.service
|
||||||
|
|
||||||
|
%preun
|
||||||
|
%systemd_preun scylla-jmx.service
|
||||||
|
|
||||||
|
%postun
|
||||||
|
%systemd_postun
|
||||||
|
|
||||||
|
%clean
|
||||||
|
rm -rf $RPM_BUILD_ROOT
|
||||||
|
|
||||||
|
|
||||||
|
%files
|
||||||
|
%defattr(-,root,root)
|
||||||
|
|
||||||
|
%config(noreplace) %{_sysconfdir}/sysconfig/scylla-jmx
|
||||||
|
%{_unitdir}/scylla-jmx.service
|
||||||
|
%{_prefix}/lib/scylla/jmx/scylla-jmx
|
||||||
|
%{_prefix}/lib/scylla/jmx/scylla-jmx-1.0.jar
|
||||||
|
%{_prefix}/lib/scylla/jmx/symlinks/scylla-jmx
|
||||||
|
|
||||||
|
%changelog
|
||||||
|
* Fri Aug 7 2015 Takuya ASADA Takuya ASADA <syuu@cloudius-systems.com>
|
||||||
|
- inital version of scylla-tools.spec
|
@ -1,26 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
#
|
|
||||||
# This file is open source software, licensed to you under the terms
|
|
||||||
# of the Apache License, Version 2.0 (the "License"). See the NOTICE file
|
|
||||||
# distributed with this work for additional information regarding copyright
|
|
||||||
# ownership. You may not use this file except in compliance with the License.
|
|
||||||
#
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing,
|
|
||||||
# software distributed under the License is distributed on an
|
|
||||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
# KIND, either express or implied. See the License for the
|
|
||||||
# specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
#
|
|
||||||
|
|
||||||
. /etc/os-release
|
|
||||||
|
|
||||||
if [ "$ID" = "ubuntu" ] || [ "$ID" = "debian" ]; then
|
|
||||||
apt -y install maven openjdk-8-jdk-headless
|
|
||||||
elif [ "$ID" = "fedora" ] || [ "$ID" = "centos" ]; then
|
|
||||||
dnf install -y maven java-1.8.0-openjdk-devel
|
|
||||||
fi
|
|
173
install.sh
173
install.sh
@ -1,173 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
#
|
|
||||||
# Copyright (C) 2019 ScyllaDB
|
|
||||||
#
|
|
||||||
|
|
||||||
#
|
|
||||||
# This file is part of Scylla.
|
|
||||||
#
|
|
||||||
# Scylla is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# Scylla is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with Scylla. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
#
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
print_usage() {
|
|
||||||
cat <<EOF
|
|
||||||
Usage: install.sh [options]
|
|
||||||
|
|
||||||
Options:
|
|
||||||
--root /path/to/root alternative install root (default /)
|
|
||||||
--prefix /prefix directory prefix (default /usr)
|
|
||||||
--nonroot shortcut of '--disttype nonroot'
|
|
||||||
--sysconfdir /etc/sysconfig specify sysconfig directory name
|
|
||||||
--packaging use install.sh for packaging
|
|
||||||
--without-systemd skip installing systemd units
|
|
||||||
--help this helpful message
|
|
||||||
EOF
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
|
|
||||||
root=/
|
|
||||||
sysconfdir=/etc/sysconfig
|
|
||||||
nonroot=false
|
|
||||||
packaging=false
|
|
||||||
without_systemd=false
|
|
||||||
|
|
||||||
while [ $# -gt 0 ]; do
|
|
||||||
case "$1" in
|
|
||||||
"--root")
|
|
||||||
root="$2"
|
|
||||||
shift 2
|
|
||||||
;;
|
|
||||||
"--prefix")
|
|
||||||
prefix="$2"
|
|
||||||
shift 2
|
|
||||||
;;
|
|
||||||
"--nonroot")
|
|
||||||
nonroot=true
|
|
||||||
shift 1
|
|
||||||
;;
|
|
||||||
"--sysconfdir")
|
|
||||||
sysconfdir="$2"
|
|
||||||
shift 2
|
|
||||||
;;
|
|
||||||
"--packaging")
|
|
||||||
packaging=true
|
|
||||||
shift 1
|
|
||||||
;;
|
|
||||||
"--without-systemd")
|
|
||||||
without_systemd=true
|
|
||||||
shift 1
|
|
||||||
;;
|
|
||||||
"--help")
|
|
||||||
shift 1
|
|
||||||
print_usage
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
print_usage
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
check_usermode_support() {
|
|
||||||
user=$(systemctl --help|grep -e '--user')
|
|
||||||
[ -n "$user" ]
|
|
||||||
}
|
|
||||||
|
|
||||||
if ! $packaging; then
|
|
||||||
has_java=false
|
|
||||||
if [ -x /usr/bin/java ]; then
|
|
||||||
javaver=$(/usr/bin/java -version 2>&1|head -n1|cut -f 3 -d " ")
|
|
||||||
has_java=true
|
|
||||||
fi
|
|
||||||
if ! $has_java; then
|
|
||||||
echo "Please install openjdk-8, openjdk-11, or openjdk-17 before running install.sh."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "$prefix" ]; then
|
|
||||||
if $nonroot; then
|
|
||||||
prefix=~/scylladb
|
|
||||||
else
|
|
||||||
prefix=/opt/scylladb
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
rprefix=$(realpath -m "$root/$prefix")
|
|
||||||
if ! $nonroot; then
|
|
||||||
retc="$root/etc"
|
|
||||||
rsysconfdir="$root/$sysconfdir"
|
|
||||||
rusr="$root/usr"
|
|
||||||
rsystemd="$rusr/lib/systemd/system"
|
|
||||||
else
|
|
||||||
retc="$rprefix/etc"
|
|
||||||
rsysconfdir="$rprefix/$sysconfdir"
|
|
||||||
rsystemd="$HOME/.config/systemd/user"
|
|
||||||
fi
|
|
||||||
|
|
||||||
install -d -m755 "$rsysconfdir"
|
|
||||||
if ! $without_systemd; then
|
|
||||||
install -d -m755 "$rsystemd"
|
|
||||||
fi
|
|
||||||
install -d -m755 "$rprefix/scripts" "$rprefix/jmx" "$rprefix/jmx/symlinks"
|
|
||||||
|
|
||||||
install -m644 dist/common/sysconfig/scylla-jmx -Dt "$rsysconfdir"
|
|
||||||
if ! $without_systemd; then
|
|
||||||
install -m644 dist/common/systemd/scylla-jmx.service -Dt "$rsystemd"
|
|
||||||
fi
|
|
||||||
if ! $nonroot && ! $without_systemd; then
|
|
||||||
if [ "$sysconfdir" != "/etc/sysconfig" ]; then
|
|
||||||
install -d -m755 "$retc"/systemd/system/scylla-jmx.service.d
|
|
||||||
cat << EOS > "$retc"/systemd/system/scylla-jmx.service.d/sysconfdir.conf
|
|
||||||
[Service]
|
|
||||||
EnvironmentFile=
|
|
||||||
EnvironmentFile=$sysconfdir/scylla-jmx
|
|
||||||
EOS
|
|
||||||
fi
|
|
||||||
elif ! $without_systemd; then
|
|
||||||
install -d -m755 "$rsystemd"/scylla-jmx.service.d
|
|
||||||
cat << EOS > "$rsystemd"/scylla-jmx.service.d/nonroot.conf
|
|
||||||
[Service]
|
|
||||||
EnvironmentFile=
|
|
||||||
EnvironmentFile=$retc/sysconfig/scylla-jmx
|
|
||||||
ExecStart=
|
|
||||||
ExecStart=$rprefix/jmx/scylla-jmx \$SCYLLA_JMX_PORT \$SCYLLA_API_PORT \$SCYLLA_API_ADDR \$SCYLLA_JMX_ADDR \$SCYLLA_JMX_FILE \$SCYLLA_JMX_LOCAL \$SCYLLA_JMX_REMOTE \$SCYLLA_JMX_DEBUG
|
|
||||||
User=
|
|
||||||
Group=
|
|
||||||
WorkingDirectory=$rprefix
|
|
||||||
EOS
|
|
||||||
fi
|
|
||||||
|
|
||||||
install -m644 scylla-jmx-1.1.jar "$rprefix/jmx"
|
|
||||||
install -m755 scylla-jmx "$rprefix/jmx"
|
|
||||||
ln -sf /usr/bin/java "$rprefix/jmx/symlinks/scylla-jmx"
|
|
||||||
if ! $nonroot; then
|
|
||||||
install -m755 -d "$rusr"/lib/scylla/jmx/symlinks
|
|
||||||
ln -srf "$rprefix"/jmx/scylla-jmx-1.1.jar "$rusr"/lib/scylla/jmx/
|
|
||||||
ln -srf "$rprefix"/jmx/scylla-jmx "$rusr"/lib/scylla/jmx/
|
|
||||||
ln -sf /usr/bin/java "$rusr"/lib/scylla/jmx/symlinks/scylla-jmx
|
|
||||||
fi
|
|
||||||
|
|
||||||
if $nonroot; then
|
|
||||||
sed -i -e "s#/var/lib/scylla#$rprefix#g" "$rsysconfdir"/scylla-jmx
|
|
||||||
sed -i -e "s#/etc/scylla#$rprefix/etc/scylla#g" "$rsysconfdir"/scylla-jmx
|
|
||||||
sed -i -e "s#/opt/scylladb/jmx#$rprefix/jmx#g" "$rsysconfdir"/scylla-jmx
|
|
||||||
if ! $without_systemd && check_usermode_support; then
|
|
||||||
systemctl --user daemon-reload
|
|
||||||
fi
|
|
||||||
echo "Scylla-JMX non-root install completed."
|
|
||||||
elif ! $without_systemd && ! $packaging; then
|
|
||||||
systemctl --system daemon-reload
|
|
||||||
fi
|
|
118
pom.xml
118
pom.xml
@ -2,81 +2,87 @@
|
|||||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
<groupId>com.scylladb.jmx</groupId>
|
||||||
<artifactId>scylla-jmx</artifactId>
|
<artifactId>scylla-jmx</artifactId>
|
||||||
<version>1.1</version>
|
<version>1.0</version>
|
||||||
<packaging>jar</packaging>
|
<packaging>jar</packaging>
|
||||||
|
|
||||||
<parent>
|
|
||||||
<groupId>it.cavallium.scylladb.jmx</groupId>
|
|
||||||
<artifactId>scylla-jmx-parent</artifactId>
|
|
||||||
<version>1.1</version>
|
|
||||||
<relativePath>./scylla-jmx-parent/pom.xml</relativePath>
|
|
||||||
</parent>
|
|
||||||
|
|
||||||
<name>Scylla JMX</name>
|
<name>Scylla JMX</name>
|
||||||
|
|
||||||
|
<properties>
|
||||||
|
<maven.compiler.target>1.8</maven.compiler.target>
|
||||||
|
<maven.compiler.source>1.8</maven.compiler.source>
|
||||||
|
</properties>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>it.cavallium.scylladb.jmx</groupId>
|
<groupId>org.yaml</groupId>
|
||||||
<artifactId>scylla-apiclient</artifactId>
|
<artifactId>snakeyaml</artifactId>
|
||||||
<version>1.1</version>
|
<version>1.16</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.glassfish.jersey.core</groupId>
|
||||||
|
<artifactId>jersey-common</artifactId>
|
||||||
|
<version>2.22.1</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>javax.ws.rs</groupId>
|
||||||
|
<artifactId>javax.ws.rs-api</artifactId>
|
||||||
|
<version>2.0.1</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>javax.ws.rs</groupId>
|
||||||
|
<artifactId>jsr311-api</artifactId>
|
||||||
|
<version>1.1.1</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.glassfish.jersey.core</groupId>
|
||||||
|
<artifactId>jersey-client</artifactId>
|
||||||
|
<version>2.22.1</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.slf4j</groupId>
|
||||||
|
<artifactId>slf4j-api</artifactId>
|
||||||
|
<version>1.7.5</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.slf4j</groupId>
|
||||||
|
<artifactId>slf4j-simple</artifactId>
|
||||||
|
<version>1.6.4</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.slf4j</groupId>
|
||||||
|
<artifactId>slf4j-log4j12</artifactId>
|
||||||
|
<version>1.7.5</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>junit</groupId>
|
<groupId>junit</groupId>
|
||||||
<artifactId>junit</artifactId>
|
<artifactId>junit</artifactId>
|
||||||
<version>4.13.1</version>
|
<version>4.8.2</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.glassfish</groupId>
|
||||||
|
<artifactId>javax.json</artifactId>
|
||||||
|
<version>1.0.4</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.google.guava</groupId>
|
||||||
|
<artifactId>guava</artifactId>
|
||||||
|
<version>18.0</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.google.collections</groupId>
|
||||||
|
<artifactId>google-collections</artifactId>
|
||||||
|
<version>1.0</version>
|
||||||
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
<build>
|
<build>
|
||||||
<plugins>
|
<plugins>
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-compiler-plugin</artifactId>
|
|
||||||
<version>3.10.1</version>
|
|
||||||
<configuration>
|
|
||||||
<source>11</source>
|
|
||||||
<target>11</target>
|
|
||||||
<compilerArgs>
|
|
||||||
<arg>--add-exports</arg>
|
|
||||||
<arg>java.management/com.sun.jmx.mbeanserver=scylla.jmx</arg>
|
|
||||||
<arg>--add-exports</arg>
|
|
||||||
<arg>java.management/com.sun.jmx.interceptor=scylla.jmx</arg>
|
|
||||||
</compilerArgs>
|
|
||||||
</configuration>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-shade-plugin</artifactId>
|
<artifactId>maven-shade-plugin</artifactId>
|
||||||
<version>3.4.1</version>
|
<version>2.4.1</version>
|
||||||
<configuration>
|
|
||||||
<artifactSet>
|
|
||||||
<includes>
|
|
||||||
<include>*:*</include>
|
|
||||||
</includes>
|
|
||||||
<excludes>
|
|
||||||
<exclude>com.sun.activation:jakarta.activation</exclude>
|
|
||||||
</excludes>
|
|
||||||
</artifactSet>
|
|
||||||
<filters>
|
|
||||||
<filter>
|
|
||||||
<artifact>*:*</artifact>
|
|
||||||
<excludes>
|
|
||||||
<exclude>module-info.class</exclude>
|
|
||||||
<exclude>META-INF/versions/*/module-info.class</exclude>
|
|
||||||
<exclude>META-INF/*.SF</exclude>
|
|
||||||
<exclude>META-INF/*.DSA</exclude>
|
|
||||||
<exclude>META-INF/*.RSA</exclude>
|
|
||||||
<exclude>META-INF/MANIFEST.MF</exclude>
|
|
||||||
<exclude>META-INF/*.MD</exclude>
|
|
||||||
<exclude>META-INF/*.md</exclude>
|
|
||||||
<exclude>META-INF/LICENSE</exclude>
|
|
||||||
<exclude>META-INF/LICENSE.txt</exclude>
|
|
||||||
<exclude>META-INF/NOTICE</exclude>
|
|
||||||
</excludes>
|
|
||||||
</filter>
|
|
||||||
</filters>
|
|
||||||
</configuration>
|
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<phase>package</phase>
|
<phase>package</phase>
|
||||||
|
@ -1,42 +0,0 @@
|
|||||||
#!/bin/bash -e
|
|
||||||
|
|
||||||
print_usage() {
|
|
||||||
echo "build_deb.sh --reloc-pkg build/scylla-jmx-package.tar.gz"
|
|
||||||
echo " --reloc-pkg specify relocatable package path"
|
|
||||||
echo " --builddir specify Debian package build path"
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
|
|
||||||
RELOC_PKG=build/scylla-jmx-package.tar.gz
|
|
||||||
BUILDDIR=build/debian
|
|
||||||
while [ $# -gt 0 ]; do
|
|
||||||
case "$1" in
|
|
||||||
"--reloc-pkg")
|
|
||||||
RELOC_PKG=$2
|
|
||||||
shift 2
|
|
||||||
;;
|
|
||||||
"--builddir")
|
|
||||||
BUILDDIR="$2"
|
|
||||||
shift 2
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
print_usage
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
RELOC_PKG=$(readlink -f $RELOC_PKG)
|
|
||||||
rm -rf "$BUILDDIR"/scylla-package "$BUILDDIR"/scylla-package.orig "$BUILDDIR"/debian
|
|
||||||
mkdir -p "$BUILDDIR"/scylla-package
|
|
||||||
tar -C "$BUILDDIR"/scylla-package -xpf $RELOC_PKG
|
|
||||||
cd "$BUILDDIR"/scylla-package
|
|
||||||
|
|
||||||
RELOC_PKG=$(readlink -f $RELOC_PKG)
|
|
||||||
|
|
||||||
mv scylla-jmx/debian debian
|
|
||||||
PKG_NAME=$(dpkg-parsechangelog --show-field Source)
|
|
||||||
# XXX: Drop revision number from version string.
|
|
||||||
# Since it always '1', this should be okay for now.
|
|
||||||
PKG_VERSION=$(dpkg-parsechangelog --show-field Version |sed -e 's/-1$//')
|
|
||||||
ln -fv $RELOC_PKG ../"$PKG_NAME"_"$PKG_VERSION".orig.tar.gz
|
|
||||||
debuild -rfakeroot -us -uc
|
|
@ -1,70 +0,0 @@
|
|||||||
#!/bin/bash -e
|
|
||||||
|
|
||||||
. /etc/os-release
|
|
||||||
|
|
||||||
print_usage() {
|
|
||||||
echo "build_reloc.sh --clean --nodeps"
|
|
||||||
echo " --clean clean build directory"
|
|
||||||
echo " --nodeps skip installing dependencies"
|
|
||||||
echo " --version V product-version-release string (overriding SCYLLA-VERSION-GEN)"
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
|
|
||||||
CLEAN=
|
|
||||||
NODEPS=
|
|
||||||
VERSION_OVERRIDE=
|
|
||||||
while [ $# -gt 0 ]; do
|
|
||||||
case "$1" in
|
|
||||||
"--clean")
|
|
||||||
CLEAN=yes
|
|
||||||
shift 1
|
|
||||||
;;
|
|
||||||
"--nodeps")
|
|
||||||
NODEPS=yes
|
|
||||||
shift 1
|
|
||||||
;;
|
|
||||||
"--version")
|
|
||||||
VERSION_OVERRIDE="$2"
|
|
||||||
shift 2
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
print_usage
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
VERSION=$(./SCYLLA-VERSION-GEN ${VERSION_OVERRIDE:+ --version "$VERSION_OVERRIDE"})
|
|
||||||
# the former command should generate build/SCYLLA-PRODUCT-FILE and some other version
|
|
||||||
# related files
|
|
||||||
PRODUCT=`cat build/SCYLLA-PRODUCT-FILE`
|
|
||||||
DEST="build/$PRODUCT-jmx-$VERSION.noarch.tar.gz"
|
|
||||||
|
|
||||||
is_redhat_variant() {
|
|
||||||
[ -f /etc/redhat-release ]
|
|
||||||
}
|
|
||||||
is_debian_variant() {
|
|
||||||
[ -f /etc/debian_version ]
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
if [ ! -e reloc/build_reloc.sh ]; then
|
|
||||||
echo "run build_reloc.sh in top of scylla dir"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$CLEAN" = "yes" ]; then
|
|
||||||
rm -rf build target
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -f "$DEST" ]; then
|
|
||||||
rm "$DEST"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "$NODEPS" ]; then
|
|
||||||
sudo ./install-dependencies.sh
|
|
||||||
fi
|
|
||||||
|
|
||||||
mvn -B --file scylla-jmx-parent/pom.xml install
|
|
||||||
./SCYLLA-VERSION-GEN ${VERSION_OVERRIDE:+ --version "$VERSION_OVERRIDE"}
|
|
||||||
./dist/debian/debian_files_gen.py
|
|
||||||
scripts/create-relocatable-package.py "$DEST"
|
|
@ -1,52 +0,0 @@
|
|||||||
#!/bin/bash -e
|
|
||||||
|
|
||||||
print_usage() {
|
|
||||||
echo "build_rpm.sh --reloc-pkg build/scylla-jmx-package.tar.gz"
|
|
||||||
echo " --reloc-pkg specify relocatable package path"
|
|
||||||
echo " --builddir specify rpmbuild directory"
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
RELOC_PKG=build/scylla-jmx-package.tar.gz
|
|
||||||
BUILDDIR=build/redhat
|
|
||||||
while [ $# -gt 0 ]; do
|
|
||||||
case "$1" in
|
|
||||||
"--reloc-pkg")
|
|
||||||
RELOC_PKG=$2
|
|
||||||
shift 2
|
|
||||||
;;
|
|
||||||
"--builddir")
|
|
||||||
BUILDDIR="$2"
|
|
||||||
shift 2
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
print_usage
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
RELOC_PKG=$(readlink -f $RELOC_PKG)
|
|
||||||
RPMBUILD=$(readlink -f $BUILDDIR)
|
|
||||||
mkdir -p "$BUILDDIR"
|
|
||||||
tar -C "$BUILDDIR" -xpf $RELOC_PKG scylla-jmx/SCYLLA-RELEASE-FILE scylla-jmx/SCYLLA-RELOCATABLE-FILE scylla-jmx/SCYLLA-VERSION-FILE scylla-jmx/SCYLLA-PRODUCT-FILE scylla-jmx/dist/redhat
|
|
||||||
cd "$BUILDDIR"/scylla-jmx
|
|
||||||
|
|
||||||
RELOC_PKG_BASENAME=$(basename "$RELOC_PKG")
|
|
||||||
SCYLLA_VERSION=$(cat SCYLLA-VERSION-FILE)
|
|
||||||
SCYLLA_RELEASE=$(cat SCYLLA-RELEASE-FILE)
|
|
||||||
VERSION=$SCYLLA_VERSION-$SCYLLA_RELEASE
|
|
||||||
PRODUCT=$(cat SCYLLA-PRODUCT-FILE)
|
|
||||||
|
|
||||||
mkdir -p $RPMBUILD/{BUILD,BUILDROOT,RPMS,SOURCES,SPECS,SRPMS}
|
|
||||||
|
|
||||||
ln -fv $RELOC_PKG $RPMBUILD/SOURCES/
|
|
||||||
|
|
||||||
parameters=(
|
|
||||||
-D"version $SCYLLA_VERSION"
|
|
||||||
-D"release $SCYLLA_RELEASE"
|
|
||||||
-D"product $PRODUCT"
|
|
||||||
-D"reloc_pkg $RELOC_PKG_BASENAME"
|
|
||||||
)
|
|
||||||
|
|
||||||
cp dist/redhat/scylla-jmx.spec $RPMBUILD/SPECS
|
|
||||||
# this rpm can be install on both fedora / centos7, so drop distribution name from the file name
|
|
||||||
rpmbuild -ba "${parameters[@]}" --define '_binary_payload w2.xzdio' --define "_topdir $RPMBUILD" --undefine "dist" $RPMBUILD/SPECS/scylla-jmx.spec
|
|
@ -1,64 +0,0 @@
|
|||||||
#!/usr/bin/python3
|
|
||||||
|
|
||||||
#
|
|
||||||
# Copyright (C) 2018 ScyllaDB
|
|
||||||
#
|
|
||||||
|
|
||||||
#
|
|
||||||
# This file is part of Scylla.
|
|
||||||
#
|
|
||||||
# Scylla is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# Scylla is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with Scylla. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
#
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import io
|
|
||||||
import os
|
|
||||||
import tarfile
|
|
||||||
import pathlib
|
|
||||||
|
|
||||||
RELOC_PREFIX='scylla-jmx'
|
|
||||||
def reloc_add(self, name, arcname=None, recursive=True, *, filter=None):
|
|
||||||
if arcname:
|
|
||||||
return self.add(name, arcname="{}/{}".format(RELOC_PREFIX, arcname))
|
|
||||||
else:
|
|
||||||
return self.add(name, arcname="{}/{}".format(RELOC_PREFIX, name))
|
|
||||||
|
|
||||||
tarfile.TarFile.reloc_add = reloc_add
|
|
||||||
|
|
||||||
ap = argparse.ArgumentParser(description='Create a relocatable scylla package.')
|
|
||||||
ap.add_argument('dest',
|
|
||||||
help='Destination file (tar format)')
|
|
||||||
|
|
||||||
args = ap.parse_args()
|
|
||||||
|
|
||||||
output = args.dest
|
|
||||||
|
|
||||||
ar = tarfile.open(output, mode='w|gz')
|
|
||||||
# relocatable package format version = 2.2
|
|
||||||
with open('build/.relocatable_package_version', 'w') as f:
|
|
||||||
f.write('2.2\n')
|
|
||||||
ar.add('build/.relocatable_package_version', arcname='.relocatable_package_version')
|
|
||||||
|
|
||||||
pathlib.Path('build/SCYLLA-RELOCATABLE-FILE').touch()
|
|
||||||
ar.reloc_add('build/SCYLLA-RELOCATABLE-FILE', arcname='SCYLLA-RELOCATABLE-FILE')
|
|
||||||
ar.reloc_add('build/SCYLLA-RELEASE-FILE', arcname='SCYLLA-RELEASE-FILE')
|
|
||||||
ar.reloc_add('build/SCYLLA-VERSION-FILE', arcname='SCYLLA-VERSION-FILE')
|
|
||||||
ar.reloc_add('build/SCYLLA-PRODUCT-FILE', arcname='SCYLLA-PRODUCT-FILE')
|
|
||||||
ar.reloc_add('dist')
|
|
||||||
ar.reloc_add('install.sh')
|
|
||||||
ar.reloc_add('target/scylla-jmx-1.1.jar', arcname='scylla-jmx-1.1.jar')
|
|
||||||
ar.reloc_add('scripts/scylla-jmx', arcname='scylla-jmx')
|
|
||||||
ar.reloc_add('README.md')
|
|
||||||
ar.reloc_add('NOTICE')
|
|
||||||
ar.reloc_add('build/debian/debian', arcname='debian')
|
|
@ -1,4 +1,4 @@
|
|||||||
#!/bin/bash
|
#!/bin/sh
|
||||||
#
|
#
|
||||||
# Copyright (C) 2015 Cloudius Systems, Ltd.
|
# Copyright (C) 2015 Cloudius Systems, Ltd.
|
||||||
|
|
||||||
@ -131,13 +131,10 @@ else
|
|||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
"$LOCATION_SCRIPTS"/symlinks/scylla-jmx $DEBUG \
|
exec "$LOCATION_SCRIPTS"/symlinks/scylla-jmx $DEBUG \
|
||||||
$API_PORT $API_ADDR $CONF_FILE -Xmx256m -XX:+UseSerialGC \
|
$API_PORT $API_ADDR $CONF_FILE -Xmx256m -XX:+UseSerialGC \
|
||||||
-XX:+HeapDumpOnOutOfMemoryError \
|
|
||||||
$JMX_AUTH $JMX_SSL $JMX_ADDR $JMX_LOCAL \
|
$JMX_AUTH $JMX_SSL $JMX_ADDR $JMX_LOCAL \
|
||||||
--add-exports java.management/com.sun.jmx.mbeanserver=ALL-UNNAMED \
|
|
||||||
--add-exports java.management/com.sun.jmx.interceptor=ALL-UNNAMED \
|
|
||||||
-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=$JMX_PORT \
|
-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=$JMX_PORT \
|
||||||
-Djava.rmi.server.hostname=$HOSTNAME -Dcom.sun.management.jmxremote.rmi.port=$JMX_PORT \
|
-Djava.rmi.server.hostname=$HOSTNAME -Dcom.sun.management.jmxremote.rmi.port=$JMX_PORT \
|
||||||
-Djavax.management.builder.initial=com.scylladb.jmx.utils.APIBuilder \
|
-Djavax.management.builder.initial=com.scylladb.jmx.utils.APIBuilder \
|
||||||
$PROPERTIES -jar $LOCATION/scylla-jmx-1.1.jar
|
$PROPERTIES -jar $LOCATION/scylla-jmx-1.0.jar
|
||||||
|
@ -1,99 +0,0 @@
|
|||||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
|
||||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|
||||||
<modelVersion>4.0.0</modelVersion>
|
|
||||||
|
|
||||||
<artifactId>scylla-apiclient</artifactId>
|
|
||||||
<packaging>jar</packaging>
|
|
||||||
<version>1.1</version>
|
|
||||||
|
|
||||||
<parent>
|
|
||||||
<relativePath>../scylla-jmx-parent/pom.xml</relativePath>
|
|
||||||
<groupId>it.cavallium.scylladb.jmx</groupId>
|
|
||||||
<artifactId>scylla-jmx-parent</artifactId>
|
|
||||||
<version>1.1</version>
|
|
||||||
</parent>
|
|
||||||
|
|
||||||
<name>Scylla REST API client</name>
|
|
||||||
|
|
||||||
<properties>
|
|
||||||
<jackson.version>2.14.0</jackson.version>
|
|
||||||
<jackson.databind.version>2.14.0</jackson.databind.version>
|
|
||||||
</properties>
|
|
||||||
|
|
||||||
<dependencies>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.eclipse.parsson</groupId>
|
|
||||||
<artifactId>parsson</artifactId>
|
|
||||||
<version>1.1.1</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.yaml</groupId>
|
|
||||||
<artifactId>snakeyaml</artifactId>
|
|
||||||
<version>1.33</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.glassfish.jersey.core</groupId>
|
|
||||||
<artifactId>jersey-common</artifactId>
|
|
||||||
<version>3.1.0</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>jakarta.ws.rs</groupId>
|
|
||||||
<artifactId>jakarta.ws.rs-api</artifactId>
|
|
||||||
<version>3.1.0</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.glassfish.jersey.core</groupId>
|
|
||||||
<artifactId>jersey-client</artifactId>
|
|
||||||
<version>3.1.0</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.glassfish.jersey.inject</groupId>
|
|
||||||
<artifactId>jersey-hk2</artifactId>
|
|
||||||
<version>3.1.0</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>jakarta.json</groupId>
|
|
||||||
<artifactId>jakarta.json-api</artifactId>
|
|
||||||
<version>2.1.1</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>com.google.guava</groupId>
|
|
||||||
<artifactId>guava</artifactId>
|
|
||||||
<version>31.1-jre</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>jakarta.activation</groupId>
|
|
||||||
<artifactId>jakarta.activation-api</artifactId>
|
|
||||||
<version>2.1.1</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>com.fasterxml.jackson.core</groupId>
|
|
||||||
<artifactId>jackson-annotations</artifactId>
|
|
||||||
<version>${jackson.version}</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>com.fasterxml.jackson.core</groupId>
|
|
||||||
<artifactId>jackson-databind</artifactId>
|
|
||||||
<version>${jackson.databind.version}</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>com.fasterxml.jackson.jakarta.rs</groupId>
|
|
||||||
<artifactId>jackson-jakarta-rs-json-provider</artifactId>
|
|
||||||
<version>2.14.1</version>
|
|
||||||
</dependency>
|
|
||||||
</dependencies>
|
|
||||||
|
|
||||||
<build>
|
|
||||||
<plugins>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-compiler-plugin</artifactId>
|
|
||||||
<version>3.10.1</version>
|
|
||||||
<configuration>
|
|
||||||
<release>11</release>
|
|
||||||
</configuration>
|
|
||||||
</plugin>
|
|
||||||
</plugins>
|
|
||||||
</build>
|
|
||||||
|
|
||||||
</project>
|
|
@ -1,15 +0,0 @@
|
|||||||
module scylla.apiclient {
|
|
||||||
exports com.scylladb.jmx.api;
|
|
||||||
exports com.scylladb.jmx.api.utils;
|
|
||||||
requires org.eclipse.parsson;
|
|
||||||
requires jakarta.ws.rs;
|
|
||||||
requires com.fasterxml.jackson.jakarta.rs.json;
|
|
||||||
requires jersey.client;
|
|
||||||
requires java.logging;
|
|
||||||
requires jakarta.json;
|
|
||||||
requires java.management;
|
|
||||||
requires org.yaml.snakeyaml;
|
|
||||||
requires com.google.common;
|
|
||||||
requires jersey.common;
|
|
||||||
requires jersey.hk2;
|
|
||||||
}
|
|
@ -1,29 +0,0 @@
|
|||||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
|
||||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|
||||||
<modelVersion>4.0.0</modelVersion>
|
|
||||||
|
|
||||||
<groupId>it.cavallium.scylladb.jmx</groupId>
|
|
||||||
<artifactId>scylla-jmx-parent</artifactId>
|
|
||||||
<version>1.1</version>
|
|
||||||
<packaging>pom</packaging>
|
|
||||||
|
|
||||||
<modules>
|
|
||||||
<module>../</module>
|
|
||||||
<module>../scylla-apiclient</module>
|
|
||||||
</modules>
|
|
||||||
|
|
||||||
<name>Scylla JMX Parent</name>
|
|
||||||
|
|
||||||
<distributionManagement>
|
|
||||||
<repository>
|
|
||||||
<id>mchv-release-distribution</id>
|
|
||||||
<name>MCHV Release Apache Maven Packages Distribution</name>
|
|
||||||
<url>https://mvn.mchv.eu/repository/mchv</url>
|
|
||||||
</repository>
|
|
||||||
<snapshotRepository>
|
|
||||||
<id>mchv-snapshot-distribution</id>
|
|
||||||
<name>MCHV Snapshot Apache Maven Packages Distribution</name>
|
|
||||||
<url>https://mvn.mchv.eu/repository/mchv-snapshot</url>
|
|
||||||
</snapshotRepository>
|
|
||||||
</distributionManagement>
|
|
||||||
</project>
|
|
@ -3,24 +3,7 @@
|
|||||||
*/
|
*/
|
||||||
package com.scylladb.jmx.api;
|
package com.scylladb.jmx.api;
|
||||||
|
|
||||||
import com.fasterxml.jackson.jakarta.rs.json.JacksonJsonProvider;
|
|
||||||
import jakarta.json.Json;
|
|
||||||
import jakarta.json.JsonArray;
|
|
||||||
import jakarta.json.JsonObject;
|
|
||||||
import jakarta.json.JsonReader;
|
|
||||||
import jakarta.json.JsonReaderFactory;
|
|
||||||
import jakarta.json.JsonString;
|
|
||||||
import jakarta.ws.rs.ProcessingException;
|
|
||||||
import jakarta.ws.rs.client.Client;
|
|
||||||
import jakarta.ws.rs.client.ClientBuilder;
|
|
||||||
import jakarta.ws.rs.client.Entity;
|
|
||||||
import jakarta.ws.rs.client.Invocation;
|
|
||||||
import jakarta.ws.rs.client.WebTarget;
|
|
||||||
import jakarta.ws.rs.core.MediaType;
|
|
||||||
import jakarta.ws.rs.core.MultivaluedMap;
|
|
||||||
import jakarta.ws.rs.core.Response;
|
|
||||||
import java.io.StringReader;
|
import java.io.StringReader;
|
||||||
import java.lang.System.Logger.Level;
|
|
||||||
import java.net.InetAddress;
|
import java.net.InetAddress;
|
||||||
import java.net.UnknownHostException;
|
import java.net.UnknownHostException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
@ -35,28 +18,30 @@ import java.util.Set;
|
|||||||
import java.util.function.BiFunction;
|
import java.util.function.BiFunction;
|
||||||
import java.util.logging.Logger;
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
import javax.json.Json;
|
||||||
|
import javax.json.JsonArray;
|
||||||
|
import javax.json.JsonObject;
|
||||||
|
import javax.json.JsonReader;
|
||||||
|
import javax.json.JsonReaderFactory;
|
||||||
|
import javax.json.JsonString;
|
||||||
import javax.management.openmbean.TabularData;
|
import javax.management.openmbean.TabularData;
|
||||||
import javax.management.openmbean.TabularDataSupport;
|
import javax.management.openmbean.TabularDataSupport;
|
||||||
|
import javax.ws.rs.ProcessingException;
|
||||||
|
import javax.ws.rs.client.Client;
|
||||||
|
import javax.ws.rs.client.ClientBuilder;
|
||||||
|
import javax.ws.rs.client.Entity;
|
||||||
|
import javax.ws.rs.client.Invocation;
|
||||||
|
import javax.ws.rs.client.WebTarget;
|
||||||
|
import javax.ws.rs.core.MediaType;
|
||||||
|
import javax.ws.rs.core.MultivaluedMap;
|
||||||
|
import javax.ws.rs.core.Response;
|
||||||
|
|
||||||
import org.glassfish.jersey.client.ClientConfig;
|
import org.glassfish.jersey.client.ClientConfig;
|
||||||
|
|
||||||
import com.scylladb.jmx.api.utils.SnapshotDetailsTabularData;
|
import com.scylladb.jmx.utils.SnapshotDetailsTabularData;
|
||||||
|
|
||||||
public class APIClient {
|
public class APIClient {
|
||||||
private Map<String, CacheEntry> cache = new HashMap<String, CacheEntry>();
|
private Map<String, CacheEntry> cache = new HashMap<String, CacheEntry>();
|
||||||
private final APIConfig config;
|
|
||||||
private final ClientConfig clientConfig;
|
|
||||||
private final Client client;
|
|
||||||
private JsonReaderFactory factory = Json.createReaderFactory(null);
|
|
||||||
|
|
||||||
private static final Logger logger = Logger.getLogger(APIClient.class.getName());
|
|
||||||
|
|
||||||
public APIClient(APIConfig config) {
|
|
||||||
this.config = config;
|
|
||||||
this.clientConfig = new ClientConfig();
|
|
||||||
clientConfig.register(new JacksonJsonProvider());
|
|
||||||
this.client = ClientBuilder.newClient(clientConfig);
|
|
||||||
}
|
|
||||||
|
|
||||||
private String getCacheKey(String key, MultivaluedMap<String, String> param, long duration) {
|
private String getCacheKey(String key, MultivaluedMap<String, String> param, long duration) {
|
||||||
if (duration <= 0) {
|
if (duration <= 0) {
|
||||||
@ -89,12 +74,21 @@ public class APIClient {
|
|||||||
return (value != null && value.valid(duration)) ? value.jsonObject() : null;
|
return (value != null && value.valid(duration)) ? value.jsonObject() : null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private JsonReaderFactory factory = Json.createReaderFactory(null);
|
||||||
|
private static final Logger logger = Logger.getLogger(APIClient.class.getName());
|
||||||
|
|
||||||
|
private final APIConfig config;
|
||||||
|
|
||||||
|
public APIClient(APIConfig config) {
|
||||||
|
this.config = config;
|
||||||
|
}
|
||||||
|
|
||||||
private String getBaseUrl() {
|
private String getBaseUrl() {
|
||||||
return config.getBaseUrl();
|
return config.getBaseUrl();
|
||||||
}
|
}
|
||||||
|
|
||||||
public Invocation.Builder get(String path, MultivaluedMap<String, String> queryParams) {
|
public Invocation.Builder get(String path, MultivaluedMap<String, String> queryParams) {
|
||||||
|
Client client = ClientBuilder.newClient(new ClientConfig());
|
||||||
WebTarget webTarget = client.target(getBaseUrl()).path(path);
|
WebTarget webTarget = client.target(getBaseUrl()).path(path);
|
||||||
if (queryParams != null) {
|
if (queryParams != null) {
|
||||||
for (Entry<String, List<String>> qp : queryParams.entrySet()) {
|
for (Entry<String, List<String>> qp : queryParams.entrySet()) {
|
||||||
@ -158,11 +152,7 @@ public class APIClient {
|
|||||||
get(path, queryParams).delete();
|
get(path, queryParams).delete();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
Response response = get(path).delete();
|
get(path).delete();
|
||||||
if (response.getStatus() != Response.Status.OK.getStatusCode()) {
|
|
||||||
throw getException("Scylla API server HTTP delete to URL '" + path + "' failed",
|
|
||||||
response.readEntity(String.class));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void delete(String path) {
|
public void delete(String path) {
|
@ -21,8 +21,7 @@
|
|||||||
|
|
||||||
package com.scylladb.jmx.api;
|
package com.scylladb.jmx.api;
|
||||||
|
|
||||||
|
import javax.json.JsonObject;
|
||||||
import jakarta.json.JsonObject;
|
|
||||||
|
|
||||||
class CacheEntry {
|
class CacheEntry {
|
||||||
private long time;
|
private long time;
|
@ -27,27 +27,12 @@ import com.scylladb.jmx.api.APIConfig;
|
|||||||
import com.scylladb.jmx.metrics.APIMBean;
|
import com.scylladb.jmx.metrics.APIMBean;
|
||||||
|
|
||||||
public class Main {
|
public class Main {
|
||||||
|
// todo: command line options. Make us an agent class (also)
|
||||||
private static APIConfig config;
|
private static final APIConfig config = new APIConfig();
|
||||||
private static APIClient client;
|
public static final APIClient client = new APIClient(config);
|
||||||
|
|
||||||
public static synchronized APIConfig getApiConfig() {
|
|
||||||
if (config == null) {
|
|
||||||
config = new APIConfig();
|
|
||||||
}
|
|
||||||
return config;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static synchronized APIClient getApiClient() {
|
|
||||||
if (client == null) {
|
|
||||||
client = new APIClient(getApiConfig());
|
|
||||||
}
|
|
||||||
return client;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
System.out.printf("Java %s%n", System.getProperty("java.version"));
|
System.out.println("Connecting to " + config.getBaseUrl());
|
||||||
System.out.printf("Connecting to %s%n", getApiConfig().getBaseUrl());
|
|
||||||
System.out.println("Starting the JMX server");
|
System.out.println("Starting the JMX server");
|
||||||
|
|
||||||
MBeanServer server = getPlatformMBeanServer();
|
MBeanServer server = getPlatformMBeanServer();
|
||||||
@ -55,7 +40,7 @@ public class Main {
|
|||||||
CommitLog.class, Gossiper.class, EndpointSnitchInfo.class, FailureDetector.class, CacheService.class,
|
CommitLog.class, Gossiper.class, EndpointSnitchInfo.class, FailureDetector.class, CacheService.class,
|
||||||
CompactionManager.class, GCInspector.class, StreamManager.class)) {
|
CompactionManager.class, GCInspector.class, StreamManager.class)) {
|
||||||
Constructor<? extends APIMBean> c = clazz.getDeclaredConstructor(APIClient.class);
|
Constructor<? extends APIMBean> c = clazz.getDeclaredConstructor(APIClient.class);
|
||||||
APIMBean m = c.newInstance(getApiClient());
|
APIMBean m = c.newInstance(client);
|
||||||
server.registerMBean(m, null);
|
server.registerMBean(m, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
package com.scylladb.jmx.metrics;
|
package com.scylladb.jmx.metrics;
|
||||||
|
|
||||||
import java.lang.reflect.Field;
|
import java.lang.reflect.Field;
|
||||||
import java.util.EnumSet;
|
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
import java.util.function.Predicate;
|
import java.util.function.Predicate;
|
||||||
@ -21,7 +20,6 @@ import javax.management.ObjectName;
|
|||||||
import javax.management.QueryExp;
|
import javax.management.QueryExp;
|
||||||
|
|
||||||
import com.scylladb.jmx.api.APIClient;
|
import com.scylladb.jmx.api.APIClient;
|
||||||
import com.sun.jmx.mbeanserver.JmxMBeanServer;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Base type for MBeans in scylla-jmx. Wraps auto naming and {@link APIClient}
|
* Base type for MBeans in scylla-jmx. Wraps auto naming and {@link APIClient}
|
||||||
@ -56,39 +54,35 @@ public class APIMBean implements MBeanRegistration {
|
|||||||
* @param generator
|
* @param generator
|
||||||
* {@link Function} to create a new MBean instance for a given
|
* {@link Function} to create a new MBean instance for a given
|
||||||
* {@link ObjectName}
|
* {@link ObjectName}
|
||||||
|
*
|
||||||
* @return
|
* @return
|
||||||
* @throws MalformedObjectNameException
|
* @throws MalformedObjectNameException
|
||||||
*/
|
*/
|
||||||
public static boolean checkRegistration(JmxMBeanServer server, Set<ObjectName> all,
|
public static boolean checkRegistration(MBeanServer server, Set<ObjectName> all,
|
||||||
EnumSet<RegistrationMode> mode, final Predicate<ObjectName> predicate,
|
final Predicate<ObjectName> predicate, Function<ObjectName, Object> generator)
|
||||||
Function<ObjectName, Object> generator) throws MalformedObjectNameException {
|
throws MalformedObjectNameException {
|
||||||
Set<ObjectName> registered = queryNames(server, predicate);
|
Set<ObjectName> registered = queryNames(server, predicate);
|
||||||
if (mode.contains(RegistrationMode.Remove)) {
|
for (ObjectName name : registered) {
|
||||||
for (ObjectName name : registered) {
|
if (!all.contains(name)) {
|
||||||
if (!all.contains(name)) {
|
try {
|
||||||
try {
|
server.unregisterMBean(name);
|
||||||
server.getMBeanServerInterceptor().unregisterMBean(name);
|
} catch (MBeanRegistrationException | InstanceNotFoundException e) {
|
||||||
} catch (MBeanRegistrationException | InstanceNotFoundException e) {
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
int added = 0;
|
int added = 0;
|
||||||
if (mode.contains(RegistrationMode.Add)) {
|
for (ObjectName name : all) {
|
||||||
for (ObjectName name : all) {
|
if (!registered.contains(name)) {
|
||||||
if (!registered.contains(name)) {
|
try {
|
||||||
try {
|
server.registerMBean(generator.apply(name), name);
|
||||||
server.getMBeanServerInterceptor().registerMBean(generator.apply(name), name);
|
added++;
|
||||||
added++;
|
} catch (InstanceAlreadyExistsException | MBeanRegistrationException | NotCompliantMBeanException e) {
|
||||||
} catch (InstanceAlreadyExistsException | MBeanRegistrationException
|
}
|
||||||
| NotCompliantMBeanException e) {
|
}
|
||||||
}
|
}
|
||||||
}
|
return added > 0;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
return added > 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Helper method to query {@link ObjectName}s from an {@link MBeanServer}
|
* Helper method to query {@link ObjectName}s from an {@link MBeanServer}
|
||||||
@ -98,7 +92,7 @@ public class APIMBean implements MBeanRegistration {
|
|||||||
* @param predicate
|
* @param predicate
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
public static Set<ObjectName> queryNames(JmxMBeanServer server, final Predicate<ObjectName> predicate) {
|
public static Set<ObjectName> queryNames(MBeanServer server, final Predicate<ObjectName> predicate) {
|
||||||
@SuppressWarnings("serial")
|
@SuppressWarnings("serial")
|
||||||
Set<ObjectName> registered = server.queryNames(null, new QueryExp() {
|
Set<ObjectName> registered = server.queryNames(null, new QueryExp() {
|
||||||
@Override
|
@Override
|
||||||
@ -114,7 +108,7 @@ public class APIMBean implements MBeanRegistration {
|
|||||||
return registered;
|
return registered;
|
||||||
}
|
}
|
||||||
|
|
||||||
JmxMBeanServer server;
|
MBeanServer server;
|
||||||
ObjectName name;
|
ObjectName name;
|
||||||
|
|
||||||
protected final ObjectName getBoundName() {
|
protected final ObjectName getBoundName() {
|
||||||
@ -168,7 +162,7 @@ public class APIMBean implements MBeanRegistration {
|
|||||||
if (this.server != null) {
|
if (this.server != null) {
|
||||||
throw new IllegalStateException("Can only exist in a single MBeanServer");
|
throw new IllegalStateException("Can only exist in a single MBeanServer");
|
||||||
}
|
}
|
||||||
this.server = (JmxMBeanServer) server;
|
this.server = server;
|
||||||
if (name == null) {
|
if (name == null) {
|
||||||
name = generateName();
|
name = generateName();
|
||||||
}
|
}
|
||||||
|
@ -3,11 +3,6 @@ package com.scylladb.jmx.metrics;
|
|||||||
import static java.util.Arrays.asList;
|
import static java.util.Arrays.asList;
|
||||||
|
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Set;
|
|
||||||
import java.util.function.Predicate;
|
import java.util.function.Predicate;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
|
|
||||||
@ -21,7 +16,6 @@ import org.apache.cassandra.metrics.Metrics;
|
|||||||
import org.apache.cassandra.metrics.MetricsRegistry;
|
import org.apache.cassandra.metrics.MetricsRegistry;
|
||||||
|
|
||||||
import com.scylladb.jmx.api.APIClient;
|
import com.scylladb.jmx.api.APIClient;
|
||||||
import com.sun.jmx.mbeanserver.JmxMBeanServer;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Base type for MBeans containing {@link Metrics}.
|
* Base type for MBeans containing {@link Metrics}.
|
||||||
@ -30,9 +24,6 @@ import com.sun.jmx.mbeanserver.JmxMBeanServer;
|
|||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
public abstract class MetricsMBean extends APIMBean {
|
public abstract class MetricsMBean extends APIMBean {
|
||||||
private static final Map<JmxMBeanServer, Map<String, Integer>> registered = new HashMap<>();
|
|
||||||
private static final Object registrationLock = new Object();
|
|
||||||
|
|
||||||
private final Collection<Metrics> metrics;
|
private final Collection<Metrics> metrics;
|
||||||
|
|
||||||
public MetricsMBean(APIClient client, Metrics... metrics) {
|
public MetricsMBean(APIClient client, Metrics... metrics) {
|
||||||
@ -56,50 +47,13 @@ public abstract class MetricsMBean extends APIMBean {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Has to be called with registrationLock hold
|
private void register(MetricsRegistry registry, MBeanServer server) throws MalformedObjectNameException {
|
||||||
private static boolean shouldRegisterGlobals(JmxMBeanServer server, String domainAndType, boolean reversed) {
|
|
||||||
Map<String, Integer> serverMap = registered.get(server);
|
|
||||||
if (serverMap == null) {
|
|
||||||
assert !reversed;
|
|
||||||
serverMap = new HashMap<>();
|
|
||||||
serverMap.put(domainAndType, 1);
|
|
||||||
registered.put(server, serverMap);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
Integer count = serverMap.get(domainAndType);
|
|
||||||
if (count == null) {
|
|
||||||
assert !reversed;
|
|
||||||
serverMap.put(domainAndType, 1);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
if (reversed) {
|
|
||||||
--count;
|
|
||||||
if (count == 0) {
|
|
||||||
serverMap.remove(domainAndType);
|
|
||||||
if (serverMap.isEmpty()) {
|
|
||||||
registered.remove(server);
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
serverMap.put(domainAndType, count);
|
|
||||||
return false;
|
|
||||||
} else {
|
|
||||||
serverMap.put(domainAndType, count + 1);
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void register(MetricsRegistry registry, JmxMBeanServer server, boolean reversed) throws MalformedObjectNameException {
|
|
||||||
// Check if we're the first/last of our type bound/removed.
|
// Check if we're the first/last of our type bound/removed.
|
||||||
synchronized (registrationLock) {
|
boolean empty = queryNames(server, getTypePredicate()).isEmpty();
|
||||||
boolean registerGlobals = shouldRegisterGlobals(server, name.getDomain() + ":" + name.getKeyProperty("type"), reversed);
|
|
||||||
if (registerGlobals) {
|
|
||||||
for (Metrics m : metrics) {
|
|
||||||
m.registerGlobals(registry);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (Metrics m : metrics) {
|
for (Metrics m : metrics) {
|
||||||
|
if (empty) {
|
||||||
|
m.registerGlobals(registry);
|
||||||
|
}
|
||||||
m.register(registry);
|
m.register(registry);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -109,7 +63,7 @@ public abstract class MetricsMBean extends APIMBean {
|
|||||||
// Get name etc.
|
// Get name etc.
|
||||||
name = super.preRegister(server, name);
|
name = super.preRegister(server, name);
|
||||||
// Register all metrics in server
|
// Register all metrics in server
|
||||||
register(new MetricsRegistry(client, (JmxMBeanServer) server), (JmxMBeanServer) server, false);
|
register(new MetricsRegistry(client, server), server);
|
||||||
return name;
|
return name;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -123,12 +77,12 @@ public abstract class MetricsMBean extends APIMBean {
|
|||||||
public void register(Supplier<MetricMBean> s, ObjectName... objectNames) {
|
public void register(Supplier<MetricMBean> s, ObjectName... objectNames) {
|
||||||
for (ObjectName name : objectNames) {
|
for (ObjectName name : objectNames) {
|
||||||
try {
|
try {
|
||||||
server.getMBeanServerInterceptor().unregisterMBean(name);
|
server.unregisterMBean(name);
|
||||||
} catch (MBeanRegistrationException | InstanceNotFoundException e) {
|
} catch (MBeanRegistrationException | InstanceNotFoundException e) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}, server, true);
|
}, server);
|
||||||
} catch (MalformedObjectNameException e) {
|
} catch (MalformedObjectNameException e) {
|
||||||
// TODO : log?
|
// TODO : log?
|
||||||
}
|
}
|
||||||
|
@ -1,69 +0,0 @@
|
|||||||
package com.scylladb.jmx.metrics;
|
|
||||||
|
|
||||||
import static com.scylladb.jmx.metrics.RegistrationMode.Remove;
|
|
||||||
import static com.scylladb.jmx.metrics.RegistrationMode.Wait;
|
|
||||||
import static java.util.EnumSet.allOf;
|
|
||||||
import static java.util.EnumSet.of;
|
|
||||||
|
|
||||||
import java.net.UnknownHostException;
|
|
||||||
import java.util.EnumSet;
|
|
||||||
import java.util.concurrent.locks.Lock;
|
|
||||||
import java.util.concurrent.locks.ReentrantLock;
|
|
||||||
|
|
||||||
import javax.management.OperationsException;
|
|
||||||
|
|
||||||
import com.scylladb.jmx.api.APIClient;
|
|
||||||
import com.sun.jmx.mbeanserver.JmxMBeanServer;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Helper type to do optional locking for registration. Allows for
|
|
||||||
* per-bind-point locks and registration, instead of per-type or per-instance
|
|
||||||
* locks which may be misguiding, since for example one instance can be bound to
|
|
||||||
* many MBeanServers etc.
|
|
||||||
*
|
|
||||||
* Also allows for polled checks, i.e. try-lock and either wait or skip. Wait,
|
|
||||||
* because we probably should not repeat things hidden by this type too often,
|
|
||||||
* and skip because for example a periodic task checking can just skip if a
|
|
||||||
* user-initiated registration check is being done.
|
|
||||||
*
|
|
||||||
* @author calle
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
@SuppressWarnings("restriction")
|
|
||||||
public abstract class RegistrationChecker {
|
|
||||||
private final Lock lock = new ReentrantLock();
|
|
||||||
|
|
||||||
public static final EnumSet<RegistrationMode> REMOVE_NO_WAIT = of(Remove);
|
|
||||||
public static final EnumSet<RegistrationMode> ADD_AND_REMOVE = allOf(RegistrationMode.class);
|
|
||||||
|
|
||||||
public final void reap(APIClient client, JmxMBeanServer server) throws OperationsException, UnknownHostException {
|
|
||||||
check(client, server, REMOVE_NO_WAIT);
|
|
||||||
}
|
|
||||||
|
|
||||||
public final void check(APIClient client, JmxMBeanServer server) throws OperationsException, UnknownHostException {
|
|
||||||
check(client, server, ADD_AND_REMOVE);
|
|
||||||
}
|
|
||||||
|
|
||||||
public final void check(APIClient client, JmxMBeanServer server, EnumSet<RegistrationMode> mode)
|
|
||||||
throws OperationsException, UnknownHostException {
|
|
||||||
if (!lock.tryLock()) {
|
|
||||||
if (mode.contains(Wait)) {
|
|
||||||
// someone is doing update.
|
|
||||||
// since this is jmx, and sloppy, we'll just
|
|
||||||
// assume that once he is done, things are
|
|
||||||
// good enough.
|
|
||||||
lock.lock();
|
|
||||||
lock.unlock();
|
|
||||||
}
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
doCheck(client, server, mode);
|
|
||||||
} finally {
|
|
||||||
lock.unlock();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected abstract void doCheck(APIClient client, JmxMBeanServer server, EnumSet<RegistrationMode> mode)
|
|
||||||
throws OperationsException, UnknownHostException;
|
|
||||||
}
|
|
@ -1,5 +0,0 @@
|
|||||||
package com.scylladb.jmx.metrics;
|
|
||||||
|
|
||||||
public enum RegistrationMode {
|
|
||||||
Wait, Add, Remove,
|
|
||||||
}
|
|
@ -3,19 +3,7 @@ package com.scylladb.jmx.utils;
|
|||||||
* Copyright 2016 ScyllaDB
|
* Copyright 2016 ScyllaDB
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import static com.scylladb.jmx.main.Main.getApiClient;
|
import static com.scylladb.jmx.main.Main.client;
|
||||||
import static com.sun.jmx.mbeanserver.Util.wildmatch;
|
|
||||||
import static java.util.logging.Level.SEVERE;
|
|
||||||
import static javax.management.MBeanServerDelegate.DELEGATE_NAME;
|
|
||||||
|
|
||||||
import java.security.AccessController;
|
|
||||||
import java.security.PrivilegedActionException;
|
|
||||||
import java.security.PrivilegedExceptionAction;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Set;
|
|
||||||
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
|
||||||
import java.util.logging.Logger;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* This file is part of Scylla.
|
* This file is part of Scylla.
|
||||||
@ -34,463 +22,14 @@ import java.util.logging.Logger;
|
|||||||
* along with Scylla. If not, see <http://www.gnu.org/licenses/>.
|
* along with Scylla. If not, see <http://www.gnu.org/licenses/>.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import javax.management.DynamicMBean;
|
|
||||||
import javax.management.InstanceAlreadyExistsException;
|
|
||||||
import javax.management.InstanceNotFoundException;
|
|
||||||
import javax.management.MBeanServer;
|
import javax.management.MBeanServer;
|
||||||
import javax.management.MBeanServerBuilder;
|
import javax.management.MBeanServerBuilder;
|
||||||
import javax.management.MBeanServerDelegate;
|
import javax.management.MBeanServerDelegate;
|
||||||
import javax.management.MalformedObjectNameException;
|
|
||||||
import javax.management.ObjectName;
|
|
||||||
import javax.management.QueryExp;
|
|
||||||
import javax.management.RuntimeOperationsException;
|
|
||||||
|
|
||||||
import com.sun.jmx.interceptor.DefaultMBeanServerInterceptor;
|
|
||||||
import com.sun.jmx.mbeanserver.JmxMBeanServer;
|
|
||||||
import com.sun.jmx.mbeanserver.NamedObject;
|
|
||||||
import com.sun.jmx.mbeanserver.Repository;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This class purposly knows way to much of the inner workings
|
|
||||||
* of Oracle JDK MBeanServer workings, and pervert it for
|
|
||||||
* performance sakes. It is not portable to other MBean implementations.
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
@SuppressWarnings("restriction")
|
|
||||||
public class APIBuilder extends MBeanServerBuilder {
|
public class APIBuilder extends MBeanServerBuilder {
|
||||||
|
|
||||||
private static final Logger logger = Logger.getLogger(APIBuilder.class.getName());
|
|
||||||
|
|
||||||
private static class TableRepository extends Repository {
|
|
||||||
private static final Logger logger = Logger.getLogger(TableRepository.class.getName());
|
|
||||||
|
|
||||||
private final Repository wrapped;
|
|
||||||
|
|
||||||
private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
|
|
||||||
|
|
||||||
private final Map<TableMetricParams, DynamicMBean> tableMBeans = new HashMap<>();
|
|
||||||
|
|
||||||
private static boolean isTableMetricName(ObjectName name) {
|
|
||||||
return isTableMetricDomain(name.getDomain());
|
|
||||||
}
|
|
||||||
|
|
||||||
private static boolean isTableMetricDomain(String domain) {
|
|
||||||
return TableMetricParams.TABLE_METRICS_DOMAIN.equals(domain);
|
|
||||||
}
|
|
||||||
|
|
||||||
public TableRepository(String defaultDomain, final Repository repository) {
|
|
||||||
super(defaultDomain);
|
|
||||||
wrapped = repository;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getDefaultDomain() {
|
|
||||||
return wrapped.getDefaultDomain();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean contains(final ObjectName name) {
|
|
||||||
if (!isTableMetricName(name)) {
|
|
||||||
return wrapped.contains(name);
|
|
||||||
} else {
|
|
||||||
lock.readLock().lock();
|
|
||||||
try {
|
|
||||||
return tableMBeans.containsKey(new TableMetricParams(name));
|
|
||||||
} finally {
|
|
||||||
lock.readLock().unlock();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String[] getDomains() {
|
|
||||||
final String[] domains = wrapped.getDomains();
|
|
||||||
if (tableMBeans.isEmpty()) {
|
|
||||||
return domains;
|
|
||||||
}
|
|
||||||
final String[] res = new String[domains.length + 1];
|
|
||||||
System.arraycopy(domains, 0, res, 0, domains.length);
|
|
||||||
res[domains.length] = TableMetricParams.TABLE_METRICS_DOMAIN;
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Integer getCount() {
|
|
||||||
lock.readLock().lock();
|
|
||||||
try {
|
|
||||||
return wrapped.getCount() + tableMBeans.size();
|
|
||||||
} finally {
|
|
||||||
lock.readLock().unlock();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void addMBean(final DynamicMBean bean, final ObjectName name, final RegistrationContext ctx)
|
|
||||||
throws InstanceAlreadyExistsException {
|
|
||||||
if (!isTableMetricName(name)) {
|
|
||||||
wrapped.addMBean(bean, name, ctx);
|
|
||||||
} else {
|
|
||||||
final TableMetricParams key = new TableMetricParams(name);
|
|
||||||
lock.writeLock().lock();
|
|
||||||
try {
|
|
||||||
if (tableMBeans.containsKey(key)) {
|
|
||||||
throw new InstanceAlreadyExistsException(name.toString());
|
|
||||||
}
|
|
||||||
tableMBeans.put(key, bean);
|
|
||||||
if (ctx == null) return;
|
|
||||||
try {
|
|
||||||
ctx.registering();
|
|
||||||
} catch (RuntimeOperationsException x) {
|
|
||||||
throw x;
|
|
||||||
} catch (RuntimeException x) {
|
|
||||||
throw new RuntimeOperationsException(x);
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
lock.writeLock().unlock();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void remove(final ObjectName name, final RegistrationContext ctx) throws InstanceNotFoundException {
|
|
||||||
if (!isTableMetricName(name)) {
|
|
||||||
wrapped.remove(name, ctx);
|
|
||||||
} else {
|
|
||||||
final TableMetricParams key = new TableMetricParams(name);
|
|
||||||
lock.writeLock().lock();
|
|
||||||
try {
|
|
||||||
if (tableMBeans.remove(key) == null) {
|
|
||||||
throw new InstanceNotFoundException(name.toString());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (ctx == null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
ctx.unregistered();
|
|
||||||
} catch (Exception x) {
|
|
||||||
logger.log(SEVERE, "Unexpected error.", x);
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
lock.writeLock().unlock();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public DynamicMBean retrieve(final ObjectName name) {
|
|
||||||
if (!isTableMetricName(name)) {
|
|
||||||
return wrapped.retrieve(name);
|
|
||||||
} else {
|
|
||||||
lock.readLock().lock();
|
|
||||||
try {
|
|
||||||
return tableMBeans.get(new TableMetricParams(name));
|
|
||||||
} finally {
|
|
||||||
lock.readLock().unlock();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void addAll(final Set<NamedObject> res) {
|
|
||||||
for (Map.Entry<TableMetricParams, DynamicMBean> e : tableMBeans.entrySet()) {
|
|
||||||
try {
|
|
||||||
res.add(new NamedObject(e.getKey().toName(), e.getValue()));
|
|
||||||
} catch (MalformedObjectNameException e1) {
|
|
||||||
// This should never happen
|
|
||||||
logger.log(SEVERE, "Unexpected error.", e1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void addAllMatching(final Set<NamedObject> res,
|
|
||||||
final ObjectNamePattern pattern) {
|
|
||||||
for (Map.Entry<TableMetricParams, DynamicMBean> e : tableMBeans.entrySet()) {
|
|
||||||
try {
|
|
||||||
ObjectName name = e.getKey().toName();
|
|
||||||
if (pattern.matchKeys(name)) {
|
|
||||||
res.add(new NamedObject(name, e.getValue()));
|
|
||||||
}
|
|
||||||
} catch (MalformedObjectNameException e1) {
|
|
||||||
// This should never happen
|
|
||||||
logger.log(SEVERE, "Unexpected error.", e1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Set<NamedObject> query(final ObjectName pattern, final QueryExp query) {
|
|
||||||
Set<NamedObject> res = wrapped.query(pattern, query);
|
|
||||||
ObjectName name;
|
|
||||||
if (pattern == null ||
|
|
||||||
pattern.getCanonicalName().length() == 0 ||
|
|
||||||
pattern.equals(ObjectName.WILDCARD)) {
|
|
||||||
name = ObjectName.WILDCARD;
|
|
||||||
} else {
|
|
||||||
name = pattern;
|
|
||||||
}
|
|
||||||
|
|
||||||
lock.readLock().lock();
|
|
||||||
try {
|
|
||||||
// If pattern is not a pattern, retrieve this mbean !
|
|
||||||
if (!name.isPattern() && isTableMetricName(name)) {
|
|
||||||
final DynamicMBean bean = tableMBeans.get(new TableMetricParams(name));
|
|
||||||
if (bean != null) {
|
|
||||||
res.add(new NamedObject(name, bean));
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// All names in all domains
|
|
||||||
if (name == ObjectName.WILDCARD) {
|
|
||||||
addAll(res);
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
final String canonical_key_property_list_string =
|
|
||||||
name.getCanonicalKeyPropertyListString();
|
|
||||||
|
|
||||||
final boolean allNames =
|
|
||||||
(canonical_key_property_list_string.length()==0);
|
|
||||||
final ObjectNamePattern namePattern =
|
|
||||||
(allNames?null:new ObjectNamePattern(name));
|
|
||||||
|
|
||||||
// All names in default domain
|
|
||||||
if (name.getDomain().length() == 0) {
|
|
||||||
if (isTableMetricDomain(getDefaultDomain())) {
|
|
||||||
if (allNames) {
|
|
||||||
addAll(res);
|
|
||||||
} else {
|
|
||||||
addAllMatching(res, namePattern);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!name.isDomainPattern()) {
|
|
||||||
if (isTableMetricDomain(getDefaultDomain())) {
|
|
||||||
if (allNames) {
|
|
||||||
addAll(res);
|
|
||||||
} else {
|
|
||||||
addAllMatching(res, namePattern);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Pattern matching in the domain name (*, ?)
|
|
||||||
final String dom2Match = name.getDomain();
|
|
||||||
if (wildmatch(TableMetricParams.TABLE_METRICS_DOMAIN, dom2Match)) {
|
|
||||||
if (allNames) {
|
|
||||||
addAll(res);
|
|
||||||
} else {
|
|
||||||
addAllMatching(res, namePattern);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
lock.readLock().unlock();
|
|
||||||
}
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private final static class ObjectNamePattern {
|
|
||||||
private final String[] keys;
|
|
||||||
private final String[] values;
|
|
||||||
private final String properties;
|
|
||||||
private final boolean isPropertyListPattern;
|
|
||||||
private final boolean isPropertyValuePattern;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The ObjectName pattern against which ObjectNames are matched.
|
|
||||||
**/
|
|
||||||
public final ObjectName pattern;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Builds a new ObjectNamePattern object from an ObjectName pattern.
|
|
||||||
* @param pattern The ObjectName pattern under examination.
|
|
||||||
**/
|
|
||||||
public ObjectNamePattern(ObjectName pattern) {
|
|
||||||
this(pattern.isPropertyListPattern(),
|
|
||||||
pattern.isPropertyValuePattern(),
|
|
||||||
pattern.getCanonicalKeyPropertyListString(),
|
|
||||||
pattern.getKeyPropertyList(),
|
|
||||||
pattern);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Builds a new ObjectNamePattern object from an ObjectName pattern
|
|
||||||
* constituents.
|
|
||||||
* @param propertyListPattern pattern.isPropertyListPattern().
|
|
||||||
* @param propertyValuePattern pattern.isPropertyValuePattern().
|
|
||||||
* @param canonicalProps pattern.getCanonicalKeyPropertyListString().
|
|
||||||
* @param keyPropertyList pattern.getKeyPropertyList().
|
|
||||||
* @param pattern The ObjectName pattern under examination.
|
|
||||||
**/
|
|
||||||
ObjectNamePattern(boolean propertyListPattern,
|
|
||||||
boolean propertyValuePattern,
|
|
||||||
String canonicalProps,
|
|
||||||
Map<String,String> keyPropertyList,
|
|
||||||
ObjectName pattern) {
|
|
||||||
this.isPropertyListPattern = propertyListPattern;
|
|
||||||
this.isPropertyValuePattern = propertyValuePattern;
|
|
||||||
this.properties = canonicalProps;
|
|
||||||
final int len = keyPropertyList.size();
|
|
||||||
this.keys = new String[len];
|
|
||||||
this.values = new String[len];
|
|
||||||
int i = 0;
|
|
||||||
for (Map.Entry<String,String> entry : keyPropertyList.entrySet()) {
|
|
||||||
keys[i] = entry.getKey();
|
|
||||||
values[i] = entry.getValue();
|
|
||||||
i++;
|
|
||||||
}
|
|
||||||
this.pattern = pattern;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return true if the given ObjectName matches the ObjectName pattern
|
|
||||||
* for which this object has been built.
|
|
||||||
* WARNING: domain name is not considered here because it is supposed
|
|
||||||
* not to be wildcard when called. PropertyList is also
|
|
||||||
* supposed not to be zero-length.
|
|
||||||
* @param name The ObjectName we want to match against the pattern.
|
|
||||||
* @return true if <code>name</code> matches the pattern.
|
|
||||||
**/
|
|
||||||
public boolean matchKeys(ObjectName name) {
|
|
||||||
// If key property value pattern but not key property list
|
|
||||||
// pattern, then the number of key properties must be equal
|
|
||||||
//
|
|
||||||
if (isPropertyValuePattern &&
|
|
||||||
!isPropertyListPattern &&
|
|
||||||
(name.getKeyPropertyList().size() != keys.length)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If key property value pattern or key property list pattern,
|
|
||||||
// then every property inside pattern should exist in name
|
|
||||||
//
|
|
||||||
if (isPropertyValuePattern || isPropertyListPattern) {
|
|
||||||
for (int i = keys.length - 1; i >= 0 ; i--) {
|
|
||||||
// Find value in given object name for key at current
|
|
||||||
// index in receiver
|
|
||||||
//
|
|
||||||
String v = name.getKeyProperty(keys[i]);
|
|
||||||
// Did we find a value for this key ?
|
|
||||||
//
|
|
||||||
if (v == null) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
// If this property is ok (same key, same value), go to next
|
|
||||||
//
|
|
||||||
if (isPropertyValuePattern &&
|
|
||||||
pattern.isPropertyValuePattern(keys[i])) {
|
|
||||||
// wildmatch key property values
|
|
||||||
// values[i] is the pattern;
|
|
||||||
// v is the string
|
|
||||||
if (wildmatch(v,values[i])) {
|
|
||||||
continue;
|
|
||||||
} else {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (v.equals(values[i])) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If no pattern, then canonical names must be equal
|
|
||||||
//
|
|
||||||
final String p1 = name.getCanonicalKeyPropertyListString();
|
|
||||||
final String p2 = properties;
|
|
||||||
return (p1.equals(p2));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static class TableMetricParams {
|
|
||||||
public static final String TABLE_METRICS_DOMAIN = "org.apache.cassandra.metrics";
|
|
||||||
|
|
||||||
private final ObjectName name;
|
|
||||||
|
|
||||||
public TableMetricParams(ObjectName name) {
|
|
||||||
this.name = name;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ObjectName toName() throws MalformedObjectNameException {
|
|
||||||
return name;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static boolean equal(Object a, Object b) {
|
|
||||||
return (a == null) ? b == null : a.equals(b);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(Object o) {
|
|
||||||
if (this == o) return true;
|
|
||||||
if (!(o instanceof TableMetricParams)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
TableMetricParams oo = (TableMetricParams) o;
|
|
||||||
return equal(name.getKeyProperty("keyspace"), oo.name.getKeyProperty("keyspace"))
|
|
||||||
&& equal(name.getKeyProperty("scope"), oo.name.getKeyProperty("scope"))
|
|
||||||
&& equal(name.getKeyProperty("name"), oo.name.getKeyProperty("name"))
|
|
||||||
&& equal(name.getKeyProperty("type"), oo.name.getKeyProperty("type"));
|
|
||||||
}
|
|
||||||
|
|
||||||
private static int hash(Object o) {
|
|
||||||
return o == null ? 0 : o.hashCode();
|
|
||||||
}
|
|
||||||
|
|
||||||
private static int safeAdd(int ... nums) {
|
|
||||||
long res = 0;
|
|
||||||
for (int n : nums) {
|
|
||||||
res = (res + n) % Integer.MAX_VALUE;
|
|
||||||
}
|
|
||||||
return (int)res;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
return safeAdd(hash(name.getKeyProperty("keyspace")),
|
|
||||||
hash(name.getKeyProperty("scope")),
|
|
||||||
hash(name.getKeyProperty("name")),
|
|
||||||
hash(name.getKeyProperty("type")));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public MBeanServer newMBeanServer(String defaultDomain, MBeanServer outer, MBeanServerDelegate delegate) {
|
public MBeanServer newMBeanServer(String defaultDomain, MBeanServer outer, MBeanServerDelegate delegate) {
|
||||||
// It is important to set |interceptors| to true while creating the
|
MBeanServer nested = super.newMBeanServer(defaultDomain, outer, delegate);
|
||||||
// JmxMBeanSearver. It is required for calls to
|
return new APIMBeanServer(client, nested);
|
||||||
// JmxMBeanServer.setMBeanServerInterceptor() to be allowed.
|
|
||||||
JmxMBeanServer nested = (JmxMBeanServer) JmxMBeanServer.newMBeanServer(defaultDomain, outer, delegate, true);
|
|
||||||
// This is not very clean, we depend on knowledge of how the Sun/Oracle
|
|
||||||
// MBean chain looks internally. But we need haxxor support, so
|
|
||||||
// lets replace the interceptor.
|
|
||||||
// Note: Removed reflection gunk to eliminate jdk9+ warnings on
|
|
||||||
// execution. Also, if we can get by without reflection, it is
|
|
||||||
// better.
|
|
||||||
final DefaultMBeanServerInterceptor interceptor = new DefaultMBeanServerInterceptor(outer != null ? outer : nested,
|
|
||||||
delegate, nested.getMBeanInstantiator(),
|
|
||||||
new TableRepository(defaultDomain, new Repository(defaultDomain)));
|
|
||||||
nested.setMBeanServerInterceptor(interceptor);
|
|
||||||
final MBeanServerDelegate d = nested.getMBeanServerDelegate();
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Interceptor needs the delegate present. Normally done
|
|
||||||
// by inaccessible method in JmxMBeanServer
|
|
||||||
AccessController.doPrivileged(new PrivilegedExceptionAction<Object>() {
|
|
||||||
public Object run() throws Exception {
|
|
||||||
interceptor.registerMBean(d, DELEGATE_NAME);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
} catch (PrivilegedActionException e) {
|
|
||||||
logger.log(SEVERE, "Unexpected error.", e);
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
|
|
||||||
return new APIMBeanServer(getApiClient(), nested);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -1,16 +1,10 @@
|
|||||||
package com.scylladb.jmx.utils;
|
package com.scylladb.jmx.utils;
|
||||||
|
|
||||||
import static java.util.Arrays.asList;
|
|
||||||
import static java.util.concurrent.Executors.newScheduledThreadPool;
|
|
||||||
import static java.util.concurrent.TimeUnit.MINUTES;
|
|
||||||
|
|
||||||
import java.io.ObjectInputStream;
|
import java.io.ObjectInputStream;
|
||||||
import java.net.UnknownHostException;
|
import java.net.UnknownHostException;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.ScheduledExecutorService;
|
|
||||||
import java.util.logging.Logger;
|
import java.util.logging.Logger;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
import javax.management.Attribute;
|
import javax.management.Attribute;
|
||||||
import javax.management.AttributeList;
|
import javax.management.AttributeList;
|
||||||
@ -39,84 +33,50 @@ import org.apache.cassandra.db.ColumnFamilyStore;
|
|||||||
import org.apache.cassandra.metrics.StreamingMetrics;
|
import org.apache.cassandra.metrics.StreamingMetrics;
|
||||||
|
|
||||||
import com.scylladb.jmx.api.APIClient;
|
import com.scylladb.jmx.api.APIClient;
|
||||||
import com.scylladb.jmx.metrics.RegistrationChecker;
|
|
||||||
import com.sun.jmx.mbeanserver.JmxMBeanServer;
|
|
||||||
|
|
||||||
@SuppressWarnings("restriction")
|
|
||||||
public class APIMBeanServer implements MBeanServer {
|
public class APIMBeanServer implements MBeanServer {
|
||||||
@SuppressWarnings("unused")
|
@SuppressWarnings("unused")
|
||||||
private static final Logger logger = Logger.getLogger(APIMBeanServer.class.getName());
|
private static final Logger logger = Logger.getLogger(APIMBeanServer.class.getName());
|
||||||
private static final ScheduledExecutorService executor = newScheduledThreadPool(1);
|
|
||||||
|
|
||||||
private final RegistrationChecker columnFamilyStoreChecker = ColumnFamilyStore.createRegistrationChecker();
|
|
||||||
private final RegistrationChecker streamingMetricsChecker = StreamingMetrics.createRegistrationChecker();
|
|
||||||
|
|
||||||
private final APIClient client;
|
private final APIClient client;
|
||||||
private final JmxMBeanServer server;
|
private final MBeanServer server;
|
||||||
|
|
||||||
public APIMBeanServer(APIClient client, JmxMBeanServer server) {
|
public APIMBeanServer(APIClient client, MBeanServer server) {
|
||||||
this.client = client;
|
this.client = client;
|
||||||
this.server = server;
|
this.server = server;
|
||||||
|
|
||||||
executor.scheduleWithFixedDelay(() -> {
|
|
||||||
for (RegistrationChecker c : asList(columnFamilyStoreChecker, streamingMetricsChecker)) {
|
|
||||||
try {
|
|
||||||
c.reap(client, server);
|
|
||||||
} catch (OperationsException | UnknownHostException e) {
|
|
||||||
// TODO: log?
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}, 1, 5, MINUTES);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static ObjectInstance prepareForRemote(final ObjectInstance i) {
|
|
||||||
return new ObjectInstance(prepareForRemote(i.getObjectName()), i.getClassName());
|
|
||||||
}
|
|
||||||
|
|
||||||
private static ObjectName prepareForRemote(final ObjectName n) {
|
|
||||||
/*
|
|
||||||
* ObjectName.getInstance has changed in JDK (micro) updates so it no longer applies
|
|
||||||
* overridable methods -> wrong name published.
|
|
||||||
* Fix by doing explicit ObjectName instansiation.
|
|
||||||
*/
|
|
||||||
try {
|
|
||||||
return new ObjectName(n.getCanonicalName());
|
|
||||||
} catch (MalformedObjectNameException e) {
|
|
||||||
throw new IllegalArgumentException(n.toString());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ObjectInstance createMBean(String className, ObjectName name) throws ReflectionException,
|
public ObjectInstance createMBean(String className, ObjectName name) throws ReflectionException,
|
||||||
InstanceAlreadyExistsException, MBeanRegistrationException, MBeanException, NotCompliantMBeanException {
|
InstanceAlreadyExistsException, MBeanRegistrationException, MBeanException, NotCompliantMBeanException {
|
||||||
return prepareForRemote(server.createMBean(className, name));
|
return server.createMBean(className, name);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ObjectInstance createMBean(String className, ObjectName name, ObjectName loaderName)
|
public ObjectInstance createMBean(String className, ObjectName name, ObjectName loaderName)
|
||||||
throws ReflectionException, InstanceAlreadyExistsException, MBeanRegistrationException, MBeanException,
|
throws ReflectionException, InstanceAlreadyExistsException, MBeanRegistrationException, MBeanException,
|
||||||
NotCompliantMBeanException, InstanceNotFoundException {
|
NotCompliantMBeanException, InstanceNotFoundException {
|
||||||
return prepareForRemote(server.createMBean(className, name, loaderName));
|
return server.createMBean(className, name, loaderName);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ObjectInstance createMBean(String className, ObjectName name, Object[] params, String[] signature)
|
public ObjectInstance createMBean(String className, ObjectName name, Object[] params, String[] signature)
|
||||||
throws ReflectionException, InstanceAlreadyExistsException, MBeanRegistrationException, MBeanException,
|
throws ReflectionException, InstanceAlreadyExistsException, MBeanRegistrationException, MBeanException,
|
||||||
NotCompliantMBeanException {
|
NotCompliantMBeanException {
|
||||||
return prepareForRemote(server.createMBean(className, name, params, signature));
|
return server.createMBean(className, name, params, signature);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ObjectInstance createMBean(String className, ObjectName name, ObjectName loaderName, Object[] params,
|
public ObjectInstance createMBean(String className, ObjectName name, ObjectName loaderName, Object[] params,
|
||||||
String[] signature) throws ReflectionException, InstanceAlreadyExistsException, MBeanRegistrationException,
|
String[] signature) throws ReflectionException, InstanceAlreadyExistsException, MBeanRegistrationException,
|
||||||
MBeanException, NotCompliantMBeanException, InstanceNotFoundException {
|
MBeanException, NotCompliantMBeanException, InstanceNotFoundException {
|
||||||
return prepareForRemote(server.createMBean(className, name, loaderName, params, signature));
|
return server.createMBean(className, name, loaderName, params, signature);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ObjectInstance registerMBean(Object object, ObjectName name)
|
public ObjectInstance registerMBean(Object object, ObjectName name)
|
||||||
throws InstanceAlreadyExistsException, MBeanRegistrationException, NotCompliantMBeanException {
|
throws InstanceAlreadyExistsException, MBeanRegistrationException, NotCompliantMBeanException {
|
||||||
return prepareForRemote(server.registerMBean(object, name));
|
return server.registerMBean(object, name);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -127,19 +87,19 @@ public class APIMBeanServer implements MBeanServer {
|
|||||||
@Override
|
@Override
|
||||||
public ObjectInstance getObjectInstance(ObjectName name) throws InstanceNotFoundException {
|
public ObjectInstance getObjectInstance(ObjectName name) throws InstanceNotFoundException {
|
||||||
checkRegistrations(name);
|
checkRegistrations(name);
|
||||||
return prepareForRemote(server.getObjectInstance(name));
|
return server.getObjectInstance(name);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Set<ObjectName> queryNames(ObjectName name, QueryExp query) {
|
public Set<ObjectName> queryNames(ObjectName name, QueryExp query) {
|
||||||
checkRegistrations(name);
|
checkRegistrations(name);
|
||||||
return server.queryNames(name, query).stream().map(n -> prepareForRemote(n)).collect(Collectors.toSet());
|
return server.queryNames(name, query);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Set<ObjectInstance> queryMBeans(ObjectName name, QueryExp query) {
|
public Set<ObjectInstance> queryMBeans(ObjectName name, QueryExp query) {
|
||||||
checkRegistrations(name);
|
checkRegistrations(name);
|
||||||
return server.queryMBeans(name, query).stream().map(i -> prepareForRemote(i)).collect(Collectors.toSet());
|
return server.queryMBeans(name, query);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -305,23 +265,26 @@ public class APIMBeanServer implements MBeanServer {
|
|||||||
return server.getClassLoaderRepository();
|
return server.getClassLoaderRepository();
|
||||||
}
|
}
|
||||||
|
|
||||||
static final Pattern tables = Pattern.compile("^\\*?((Index)?ColumnFamil(ies|y)|(Index)?(Table(s)?)?)$");
|
static final Pattern tables = Pattern.compile("^(ColumnFamil(ies|y)|(Index)?Tables?)$");
|
||||||
|
|
||||||
private void checkRegistrations(ObjectName name) {
|
private boolean checkRegistrations(ObjectName name) {
|
||||||
if (name != null && server.isRegistered(name)) {
|
if (name != null && server.isRegistered(name)) {
|
||||||
return;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
boolean result = false;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
String type = name != null ? name.getKeyProperty("type") : null;
|
String type = name != null ? name.getKeyProperty("type") : null;
|
||||||
if (type == null || tables.matcher(type).matches()) {
|
if (type == null || tables.matcher(type).matches()) {
|
||||||
columnFamilyStoreChecker.check(client, server);
|
result |= ColumnFamilyStore.checkRegistration(client, server);
|
||||||
}
|
}
|
||||||
if (type == null || StreamingMetrics.TYPE_NAME.equals(type)) {
|
if (type == null || StreamingMetrics.TYPE_NAME.equals(type)) {
|
||||||
streamingMetricsChecker.check(client, server);
|
result |= StreamingMetrics.checkRegistration(client, server);
|
||||||
}
|
}
|
||||||
} catch (OperationsException | UnknownHostException e) {
|
} catch (MalformedObjectNameException | UnknownHostException e) {
|
||||||
// TODO: log
|
// TODO: log
|
||||||
}
|
}
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -1,18 +0,0 @@
|
|||||||
package com.scylladb.jmx.utils;
|
|
||||||
|
|
||||||
import jakarta.xml.bind.annotation.adapters.XmlAdapter;
|
|
||||||
import java.time.Instant;
|
|
||||||
import java.util.Date;
|
|
||||||
|
|
||||||
public class DateXmlAdapter extends XmlAdapter<String, Date> {
|
|
||||||
@Override
|
|
||||||
public String marshal(Date v) throws Exception {
|
|
||||||
return Instant.ofEpochMilli(v.getTime()).toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Date unmarshal(String v) throws Exception {
|
|
||||||
return new Date(Instant.parse(v).toEpochMilli());
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -22,7 +22,7 @@
|
|||||||
* Modified by Cloudius Systems
|
* Modified by Cloudius Systems
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package com.scylladb.jmx.api.utils;
|
package com.scylladb.jmx.utils;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.text.DecimalFormat;
|
import java.text.DecimalFormat;
|
@ -22,7 +22,7 @@
|
|||||||
* Modified by Cloudius Systems
|
* Modified by Cloudius Systems
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package com.scylladb.jmx.api.utils;
|
package com.scylladb.jmx.utils;
|
||||||
|
|
||||||
import com.google.common.base.Objects;
|
import com.google.common.base.Objects;
|
||||||
|
|
@ -20,7 +20,7 @@
|
|||||||
*
|
*
|
||||||
* Modified by Cloudius Systems
|
* Modified by Cloudius Systems
|
||||||
*/
|
*/
|
||||||
package com.scylladb.jmx.api.utils;
|
package com.scylladb.jmx.utils;
|
||||||
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
@ -1,16 +0,0 @@
|
|||||||
module scylla.jmx {
|
|
||||||
opens com.scylladb.jmx.utils;
|
|
||||||
exports com.scylladb.jmx.utils;
|
|
||||||
opens com.scylladb.jmx.main;
|
|
||||||
exports com.scylladb.jmx.main;
|
|
||||||
opens com.scylladb.jmx.metrics;
|
|
||||||
exports com.scylladb.jmx.metrics;
|
|
||||||
requires java.logging;
|
|
||||||
requires java.management;
|
|
||||||
requires scylla.apiclient;
|
|
||||||
requires jakarta.json;
|
|
||||||
requires jakarta.ws.rs;
|
|
||||||
requires com.google.common;
|
|
||||||
requires jakarta.xml.bind;
|
|
||||||
requires com.fasterxml.jackson.annotation;
|
|
||||||
}
|
|
@ -23,55 +23,37 @@
|
|||||||
*/
|
*/
|
||||||
package org.apache.cassandra.db;
|
package org.apache.cassandra.db;
|
||||||
|
|
||||||
import static jakarta.json.Json.createObjectBuilder;
|
|
||||||
import static java.lang.String.valueOf;
|
import static java.lang.String.valueOf;
|
||||||
import static java.util.Arrays.asList;
|
import static java.util.Arrays.asList;
|
||||||
import static java.util.stream.Collectors.toMap;
|
import static java.util.stream.Collectors.toMap;
|
||||||
|
import static javax.json.Json.createObjectBuilder;
|
||||||
|
|
||||||
import jakarta.json.Json;
|
|
||||||
import jakarta.json.JsonArray;
|
|
||||||
import jakarta.json.JsonObject;
|
|
||||||
import jakarta.json.JsonObjectBuilder;
|
|
||||||
import jakarta.json.JsonReader;
|
|
||||||
import jakarta.ws.rs.core.MultivaluedHashMap;
|
|
||||||
import jakarta.ws.rs.core.MultivaluedMap;
|
|
||||||
import java.io.StringReader;
|
import java.io.StringReader;
|
||||||
import java.io.OutputStream;
|
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.EnumSet;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.ExecutionException;
|
import java.util.concurrent.ExecutionException;
|
||||||
import java.util.concurrent.ExecutorService;
|
|
||||||
import java.util.concurrent.Executors;
|
|
||||||
import java.util.concurrent.Future;
|
|
||||||
import java.util.concurrent.TimeUnit;
|
|
||||||
import java.util.logging.Logger;
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
import javax.json.Json;
|
||||||
|
import javax.json.JsonArray;
|
||||||
|
import javax.json.JsonObject;
|
||||||
|
import javax.json.JsonObjectBuilder;
|
||||||
|
import javax.json.JsonReader;
|
||||||
import javax.management.MBeanServer;
|
import javax.management.MBeanServer;
|
||||||
import javax.management.MalformedObjectNameException;
|
import javax.management.MalformedObjectNameException;
|
||||||
import javax.management.ObjectName;
|
import javax.management.ObjectName;
|
||||||
import javax.management.OperationsException;
|
|
||||||
import javax.management.openmbean.CompositeData;
|
import javax.management.openmbean.CompositeData;
|
||||||
import javax.management.openmbean.CompositeDataSupport;
|
|
||||||
import javax.management.openmbean.CompositeType;
|
|
||||||
import javax.management.openmbean.OpenDataException;
|
import javax.management.openmbean.OpenDataException;
|
||||||
import javax.management.openmbean.OpenType;
|
import javax.ws.rs.core.MultivaluedHashMap;
|
||||||
import javax.management.openmbean.SimpleType;
|
import javax.ws.rs.core.MultivaluedMap;
|
||||||
import javax.management.openmbean.TabularDataSupport;
|
|
||||||
import javax.management.openmbean.TabularType;
|
|
||||||
|
|
||||||
import org.apache.cassandra.metrics.TableMetrics;
|
import org.apache.cassandra.metrics.TableMetrics;
|
||||||
|
|
||||||
import com.scylladb.jmx.api.APIClient;
|
import com.scylladb.jmx.api.APIClient;
|
||||||
import com.scylladb.jmx.metrics.MetricsMBean;
|
import com.scylladb.jmx.metrics.MetricsMBean;
|
||||||
import com.scylladb.jmx.metrics.RegistrationChecker;
|
|
||||||
import com.scylladb.jmx.metrics.RegistrationMode;
|
|
||||||
import com.sun.jmx.mbeanserver.JmxMBeanServer;
|
|
||||||
import com.google.common.base.Throwables;
|
|
||||||
|
|
||||||
public class ColumnFamilyStore extends MetricsMBean implements ColumnFamilyStoreMBean {
|
public class ColumnFamilyStore extends MetricsMBean implements ColumnFamilyStoreMBean {
|
||||||
private static final Logger logger = Logger.getLogger(ColumnFamilyStore.class.getName());
|
private static final Logger logger = Logger.getLogger(ColumnFamilyStore.class.getName());
|
||||||
@ -79,72 +61,6 @@ public class ColumnFamilyStore extends MetricsMBean implements ColumnFamilyStore
|
|||||||
private final String type;
|
private final String type;
|
||||||
private final String keyspace;
|
private final String keyspace;
|
||||||
private final String name;
|
private final String name;
|
||||||
private static final String[] COUNTER_NAMES = new String[]{"raw", "count", "error", "string"};
|
|
||||||
private static final String[] COUNTER_DESCS = new String[]
|
|
||||||
{ "partition key in raw hex bytes", // Table name and comments match Cassandra, we will use the partition key
|
|
||||||
"value of this partition for given sampler",
|
|
||||||
"value is within the error bounds plus or minus of this",
|
|
||||||
"the partition key turned into a human readable format" };
|
|
||||||
private static final CompositeType COUNTER_COMPOSITE_TYPE;
|
|
||||||
private static final TabularType COUNTER_TYPE;
|
|
||||||
|
|
||||||
private static final String[] SAMPLER_NAMES = new String[]{"cardinality", "partitions"};
|
|
||||||
private static final String[] SAMPLER_DESCS = new String[]
|
|
||||||
{ "cardinality of partitions",
|
|
||||||
"list of counter results" };
|
|
||||||
|
|
||||||
private static final String SAMPLING_RESULTS_NAME = "SAMPLING_RESULTS";
|
|
||||||
private static final CompositeType SAMPLING_RESULT;
|
|
||||||
|
|
||||||
public static final String SNAPSHOT_TRUNCATE_PREFIX = "truncated";
|
|
||||||
public static final String SNAPSHOT_DROP_PREFIX = "dropped";
|
|
||||||
private JsonObject tableSamplerResult = null;
|
|
||||||
|
|
||||||
private Future<JsonObject> futureTableSamperResult = null;
|
|
||||||
private ExecutorService service = null;
|
|
||||||
|
|
||||||
static
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
OpenType<?>[] counterTypes = new OpenType[] { SimpleType.STRING, SimpleType.LONG, SimpleType.LONG, SimpleType.STRING };
|
|
||||||
COUNTER_COMPOSITE_TYPE = new CompositeType(SAMPLING_RESULTS_NAME, SAMPLING_RESULTS_NAME, COUNTER_NAMES, COUNTER_DESCS, counterTypes);
|
|
||||||
COUNTER_TYPE = new TabularType(SAMPLING_RESULTS_NAME, SAMPLING_RESULTS_NAME, COUNTER_COMPOSITE_TYPE, COUNTER_NAMES);
|
|
||||||
|
|
||||||
OpenType<?>[] samplerTypes = new OpenType[] { SimpleType.LONG, COUNTER_TYPE };
|
|
||||||
SAMPLING_RESULT = new CompositeType(SAMPLING_RESULTS_NAME, SAMPLING_RESULTS_NAME, SAMPLER_NAMES, SAMPLER_DESCS, samplerTypes);
|
|
||||||
} catch (OpenDataException e)
|
|
||||||
{
|
|
||||||
throw Throwables.propagate(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected synchronized void startTableSampling(MultivaluedMap<String, String> queryParams) {
|
|
||||||
if (futureTableSamperResult != null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
futureTableSamperResult = service.submit(() -> {
|
|
||||||
tableSamplerResult = client.getJsonObj("column_family/toppartitions/" + getCFName(), queryParams);
|
|
||||||
return null;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Wait until the action is completed
|
|
||||||
* It is safe to call this method multiple times
|
|
||||||
*/
|
|
||||||
public synchronized void waitUntilSamplingCompleted() {
|
|
||||||
try {
|
|
||||||
if (futureTableSamperResult != null) {
|
|
||||||
futureTableSamperResult.get();
|
|
||||||
futureTableSamperResult = null;
|
|
||||||
}
|
|
||||||
} catch (InterruptedException | ExecutionException e) {
|
|
||||||
futureTableSamperResult = null;
|
|
||||||
throw new RuntimeException("Failed getting table statistics", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public static final Set<String> TYPE_NAMES = new HashSet<>(asList("ColumnFamilies", "IndexTables", "Tables"));
|
public static final Set<String> TYPE_NAMES = new HashSet<>(asList("ColumnFamilies", "IndexTables", "Tables"));
|
||||||
|
|
||||||
@ -158,7 +74,6 @@ public class ColumnFamilyStore extends MetricsMBean implements ColumnFamilyStore
|
|||||||
this.type = type;
|
this.type = type;
|
||||||
this.keyspace = keyspace;
|
this.keyspace = keyspace;
|
||||||
this.name = name;
|
this.name = name;
|
||||||
service = Executors.newSingleThreadExecutor();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public ColumnFamilyStore(APIClient client, ObjectName name) {
|
public ColumnFamilyStore(APIClient client, ObjectName name) {
|
||||||
@ -187,22 +102,15 @@ public class ColumnFamilyStore extends MetricsMBean implements ColumnFamilyStore
|
|||||||
"org.apache.cassandra.db:type=" + type + ",keyspace=" + keyspace + ",columnfamily=" + name);
|
"org.apache.cassandra.db:type=" + type + ",keyspace=" + keyspace + ",columnfamily=" + name);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static RegistrationChecker createRegistrationChecker() {
|
public static boolean checkRegistration(APIClient client, MBeanServer server) throws MalformedObjectNameException {
|
||||||
return new RegistrationChecker() {
|
JsonArray mbeans = client.getJsonArray("/column_family/");
|
||||||
@Override
|
Set<ObjectName> all = new HashSet<ObjectName>();
|
||||||
protected void doCheck(APIClient client, JmxMBeanServer server, EnumSet<RegistrationMode> mode)
|
for (int i = 0; i < mbeans.size(); i++) {
|
||||||
throws OperationsException {
|
JsonObject mbean = mbeans.getJsonObject(i);
|
||||||
JsonArray mbeans = client.getJsonArray("/column_family/");
|
all.add(getName(mbean.getString("type"), mbean.getString("ks"), mbean.getString("cf")));
|
||||||
Set<ObjectName> all = new HashSet<ObjectName>();
|
}
|
||||||
for (int i = 0; i < mbeans.size(); i++) {
|
return checkRegistration(server, all, n -> TYPE_NAMES.contains(n.getKeyProperty("type")), n -> new ColumnFamilyStore(client, n));
|
||||||
JsonObject mbean = mbeans.getJsonObject(i);
|
}
|
||||||
all.add(getName(mbean.getString("type"), mbean.getString("ks"), mbean.getString("cf")));
|
|
||||||
}
|
|
||||||
checkRegistration(server, all, mode,
|
|
||||||
n -> TYPE_NAMES.contains(n.getKeyProperty("type")), n -> new ColumnFamilyStore(client, n));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return the name of the column family
|
* @return the name of the column family
|
||||||
@ -332,7 +240,7 @@ public class ColumnFamilyStore extends MetricsMBean implements ColumnFamilyStore
|
|||||||
@Override
|
@Override
|
||||||
public boolean isAutoCompactionDisabled() {
|
public boolean isAutoCompactionDisabled() {
|
||||||
log(" isAutoCompactionDisabled()");
|
log(" isAutoCompactionDisabled()");
|
||||||
return !client.getBooleanValue("column_family/autocompaction/" + getCFName());
|
return client.getBooleanValue("column_family/autocompaction/" + getCFName());
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Number of tombstoned cells retreived during the last slicequery */
|
/** Number of tombstoned cells retreived during the last slicequery */
|
||||||
@ -380,23 +288,6 @@ public class ColumnFamilyStore extends MetricsMBean implements ColumnFamilyStore
|
|||||||
return client.getListStrValue("column_family/sstables/by_key/" + getCFName(), queryParams);
|
return client.getListStrValue("column_family/sstables/by_key/" + getCFName(), queryParams);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns a list of filenames that contain the given key on this node
|
|
||||||
* @param key
|
|
||||||
* @param hexFormat if key is in hex string format
|
|
||||||
* @return list of filenames containing the key
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public List<String> getSSTablesForKey(String key, boolean hexFormat)
|
|
||||||
{
|
|
||||||
log(" getSSTablesForKey(String key)");
|
|
||||||
MultivaluedMap<String, String> queryParams = new MultivaluedHashMap<String, String>();
|
|
||||||
queryParams.add("key", key);
|
|
||||||
if (hexFormat) {
|
|
||||||
queryParams.add("format", "hex");
|
|
||||||
}
|
|
||||||
return client.getListStrValue("column_family/sstables/by_key/" + getCFName(), queryParams);
|
|
||||||
}
|
|
||||||
/**
|
/**
|
||||||
* Scan through Keyspace/ColumnFamily's data directory determine which
|
* Scan through Keyspace/ColumnFamily's data directory determine which
|
||||||
* SSTables should be loaded and load them
|
* SSTables should be loaded and load them
|
||||||
@ -524,41 +415,16 @@ public class ColumnFamilyStore extends MetricsMBean implements ColumnFamilyStore
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void beginLocalSampling(String sampler_base, int capacity) {
|
public void beginLocalSampling(String sampler, int capacity) {
|
||||||
MultivaluedMap<String, String> queryParams = new MultivaluedHashMap<String, String>();
|
// TODO Auto-generated method stub
|
||||||
queryParams.add("capacity", Integer.toString(capacity));
|
|
||||||
if (sampler_base.contains(":")) {
|
|
||||||
String[] parts = sampler_base.split(":");
|
|
||||||
queryParams.add("duration", parts[1]);
|
|
||||||
} else {
|
|
||||||
queryParams.add("duration", "10000");
|
|
||||||
}
|
|
||||||
startTableSampling(queryParams);
|
|
||||||
log(" beginLocalSampling()");
|
log(" beginLocalSampling()");
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public CompositeData finishLocalSampling(String samplerType, int count) throws OpenDataException {
|
public CompositeData finishLocalSampling(String sampler, int count) throws OpenDataException {
|
||||||
|
// TODO Auto-generated method stub
|
||||||
log(" finishLocalSampling()");
|
log(" finishLocalSampling()");
|
||||||
|
return null;
|
||||||
waitUntilSamplingCompleted();
|
|
||||||
|
|
||||||
TabularDataSupport result = new TabularDataSupport(COUNTER_TYPE);
|
|
||||||
|
|
||||||
JsonArray counters = tableSamplerResult.getJsonArray((samplerType.equalsIgnoreCase("reads")) ? "read" : "write");
|
|
||||||
long cardinality = tableSamplerResult.getJsonNumber((samplerType.equalsIgnoreCase("reads")) ? "read_cardinality" : "write_cardinality").longValue();
|
|
||||||
long size = 0;
|
|
||||||
if (counters != null) {
|
|
||||||
size = (count > counters.size()) ? counters.size() : count;
|
|
||||||
for (int i = 0; i < size; i++) {
|
|
||||||
JsonObject counter = counters.getJsonObject(i);
|
|
||||||
result.put(new CompositeDataSupport(COUNTER_COMPOSITE_TYPE, COUNTER_NAMES,
|
|
||||||
new Object[] { counter.getString("partition"), // raw
|
|
||||||
counter.getJsonNumber("count").longValue(), // count
|
|
||||||
counter.getJsonNumber("error").longValue(), // error
|
|
||||||
counter.getString("partition") })); // string
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return new CompositeDataSupport(SAMPLING_RESULT, SAMPLER_NAMES, new Object[] { cardinality, result });
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -17,7 +17,6 @@
|
|||||||
*/
|
*/
|
||||||
package org.apache.cassandra.db;
|
package org.apache.cassandra.db;
|
||||||
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.concurrent.ExecutionException;
|
import java.util.concurrent.ExecutionException;
|
||||||
@ -46,11 +45,6 @@ public interface ColumnFamilyStoreMBean {
|
|||||||
*/
|
*/
|
||||||
public void forceMajorCompaction(boolean splitOutput) throws ExecutionException, InterruptedException;
|
public void forceMajorCompaction(boolean splitOutput) throws ExecutionException, InterruptedException;
|
||||||
|
|
||||||
// NOT even default-throw implementing
|
|
||||||
// forceCompactionForTokenRange
|
|
||||||
// as this is clearly a misplaced method that should not be in the mbean interface
|
|
||||||
// (uses internal cassandra types)
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the minimum number of sstables in queue before compaction kicks off
|
* Gets the minimum number of sstables in queue before compaction kicks off
|
||||||
*/
|
*/
|
||||||
@ -141,14 +135,6 @@ public interface ColumnFamilyStoreMBean {
|
|||||||
*/
|
*/
|
||||||
public List<String> getSSTablesForKey(String key);
|
public List<String> getSSTablesForKey(String key);
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns a list of filenames that contain the given key on this node
|
|
||||||
* @param key
|
|
||||||
* @param hexFormat if key is in hex string format
|
|
||||||
* @return list of filenames containing the key
|
|
||||||
*/
|
|
||||||
public List<String> getSSTablesForKey(String key, boolean hexFormat);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Scan through Keyspace/ColumnFamily's data directory determine which
|
* Scan through Keyspace/ColumnFamily's data directory determine which
|
||||||
* SSTables should be loaded and load them
|
* SSTables should be loaded and load them
|
||||||
@ -168,14 +154,6 @@ public interface ColumnFamilyStoreMBean {
|
|||||||
*/
|
*/
|
||||||
public int[] getSSTableCountPerLevel();
|
public int[] getSSTableCountPerLevel();
|
||||||
|
|
||||||
/**
|
|
||||||
* @return sstable fanout size for level compaction strategy.
|
|
||||||
*/
|
|
||||||
default public int getLevelFanoutSize() {
|
|
||||||
// TODO: implement for real. This is sort of default.
|
|
||||||
return 10;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the ratio of droppable tombstones to real columns (and non-droppable
|
* Get the ratio of droppable tombstones to real columns (and non-droppable
|
||||||
* tombstones)
|
* tombstones)
|
||||||
|
@ -22,8 +22,8 @@
|
|||||||
*/
|
*/
|
||||||
package org.apache.cassandra.db.compaction;
|
package org.apache.cassandra.db.compaction;
|
||||||
|
|
||||||
import jakarta.json.JsonArray;
|
import javax.json.JsonArray;
|
||||||
import jakarta.json.JsonObject;
|
import javax.json.JsonObject;
|
||||||
import javax.management.openmbean.CompositeDataSupport;
|
import javax.management.openmbean.CompositeDataSupport;
|
||||||
import javax.management.openmbean.CompositeType;
|
import javax.management.openmbean.CompositeType;
|
||||||
import javax.management.openmbean.OpenDataException;
|
import javax.management.openmbean.OpenDataException;
|
||||||
|
@ -17,18 +17,18 @@
|
|||||||
*/
|
*/
|
||||||
package org.apache.cassandra.db.compaction;
|
package org.apache.cassandra.db.compaction;
|
||||||
|
|
||||||
import jakarta.json.JsonArray;
|
|
||||||
import jakarta.json.JsonObject;
|
|
||||||
import jakarta.ws.rs.core.MultivaluedHashMap;
|
|
||||||
import jakarta.ws.rs.core.MultivaluedMap;
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.logging.Logger;
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
import javax.json.JsonArray;
|
||||||
|
import javax.json.JsonObject;
|
||||||
import javax.management.openmbean.OpenDataException;
|
import javax.management.openmbean.OpenDataException;
|
||||||
import javax.management.openmbean.TabularData;
|
import javax.management.openmbean.TabularData;
|
||||||
|
import javax.ws.rs.core.MultivaluedHashMap;
|
||||||
|
import javax.ws.rs.core.MultivaluedMap;
|
||||||
|
|
||||||
import org.apache.cassandra.metrics.CompactionMetrics;
|
import org.apache.cassandra.metrics.CompactionMetrics;
|
||||||
|
|
||||||
@ -75,7 +75,7 @@ public class CompactionManager extends MetricsMBean implements CompactionManager
|
|||||||
result.put("keyspace", compaction.getString("ks"));
|
result.put("keyspace", compaction.getString("ks"));
|
||||||
result.put("columnfamily", compaction.getString("cf"));
|
result.put("columnfamily", compaction.getString("cf"));
|
||||||
result.put("unit", compaction.getString("unit"));
|
result.put("unit", compaction.getString("unit"));
|
||||||
result.put("compactionId", (compaction.containsKey("id"))? compaction.getString("id") : "<none>");
|
result.put("compactionId", "<none>");
|
||||||
results.add(result);
|
results.add(result);
|
||||||
}
|
}
|
||||||
return results;
|
return results;
|
||||||
|
@ -43,18 +43,6 @@ public interface CompactionManagerMBean {
|
|||||||
*/
|
*/
|
||||||
public void forceUserDefinedCompaction(String dataFiles);
|
public void forceUserDefinedCompaction(String dataFiles);
|
||||||
|
|
||||||
/**
|
|
||||||
* Triggers the cleanup of user specified sstables.
|
|
||||||
* You can specify files from various keyspaces and columnfamilies.
|
|
||||||
* If you do so, cleanup is performed each file individually
|
|
||||||
*
|
|
||||||
* @param dataFiles a comma separated list of sstable file to cleanup.
|
|
||||||
* must contain keyspace and columnfamily name in path(for 2.1+) or file name itself.
|
|
||||||
*/
|
|
||||||
default public void forceUserDefinedCleanup(String dataFiles) {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Stop all running compaction-like tasks having the provided {@code type}.
|
* Stop all running compaction-like tasks having the provided {@code type}.
|
||||||
*
|
*
|
||||||
|
@ -24,13 +24,13 @@
|
|||||||
|
|
||||||
package org.apache.cassandra.gms;
|
package org.apache.cassandra.gms;
|
||||||
|
|
||||||
import jakarta.json.JsonArray;
|
|
||||||
import jakarta.json.JsonObject;
|
|
||||||
import jakarta.json.JsonValue;
|
|
||||||
import java.net.UnknownHostException;
|
import java.net.UnknownHostException;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
import javax.json.JsonArray;
|
||||||
|
import javax.json.JsonObject;
|
||||||
|
import javax.json.JsonValue;
|
||||||
import javax.management.openmbean.CompositeData;
|
import javax.management.openmbean.CompositeData;
|
||||||
import javax.management.openmbean.CompositeDataSupport;
|
import javax.management.openmbean.CompositeDataSupport;
|
||||||
import javax.management.openmbean.CompositeType;
|
import javax.management.openmbean.CompositeType;
|
||||||
|
@ -23,11 +23,12 @@
|
|||||||
*/
|
*/
|
||||||
package org.apache.cassandra.gms;
|
package org.apache.cassandra.gms;
|
||||||
|
|
||||||
import jakarta.ws.rs.core.MultivaluedHashMap;
|
|
||||||
import jakarta.ws.rs.core.MultivaluedMap;
|
|
||||||
import java.net.UnknownHostException;
|
import java.net.UnknownHostException;
|
||||||
import java.util.logging.Logger;
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
import javax.ws.rs.core.MultivaluedHashMap;
|
||||||
|
import javax.ws.rs.core.MultivaluedMap;
|
||||||
|
|
||||||
import com.scylladb.jmx.api.APIClient;
|
import com.scylladb.jmx.api.APIClient;
|
||||||
import com.scylladb.jmx.metrics.APIMBean;
|
import com.scylladb.jmx.metrics.APIMBean;
|
||||||
|
|
||||||
|
@ -19,12 +19,13 @@ package org.apache.cassandra.locator;
|
|||||||
|
|
||||||
import static java.util.Collections.singletonMap;
|
import static java.util.Collections.singletonMap;
|
||||||
|
|
||||||
import jakarta.ws.rs.core.MultivaluedHashMap;
|
|
||||||
import jakarta.ws.rs.core.MultivaluedMap;
|
|
||||||
import java.net.InetAddress;
|
import java.net.InetAddress;
|
||||||
import java.net.UnknownHostException;
|
import java.net.UnknownHostException;
|
||||||
import java.util.logging.Logger;
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
import javax.ws.rs.core.MultivaluedHashMap;
|
||||||
|
import javax.ws.rs.core.MultivaluedMap;
|
||||||
|
|
||||||
import com.scylladb.jmx.api.APIClient;
|
import com.scylladb.jmx.api.APIClient;
|
||||||
import com.scylladb.jmx.metrics.APIMBean;
|
import com.scylladb.jmx.metrics.APIMBean;
|
||||||
|
|
||||||
|
@ -23,11 +23,6 @@
|
|||||||
*/
|
*/
|
||||||
package org.apache.cassandra.metrics;
|
package org.apache.cassandra.metrics;
|
||||||
|
|
||||||
import jakarta.json.JsonArray;
|
|
||||||
import jakarta.json.JsonObject;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import javax.management.MalformedObjectNameException;
|
import javax.management.MalformedObjectNameException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -52,25 +47,6 @@ public class CompactionMetrics implements Metrics {
|
|||||||
/** Total number of bytes compacted since server [re]start */
|
/** Total number of bytes compacted since server [re]start */
|
||||||
registry.register(() -> registry.meter("/compaction_manager/metrics/bytes_compacted"),
|
registry.register(() -> registry.meter("/compaction_manager/metrics/bytes_compacted"),
|
||||||
factory.createMetricName("BytesCompacted"));
|
factory.createMetricName("BytesCompacted"));
|
||||||
|
|
||||||
registry.register(() -> registry.gauge((client) -> {
|
|
||||||
Map<String, Map<String, Integer>> result = new HashMap<>();
|
|
||||||
JsonArray compactions = client.getJsonArray("compaction_manager/metrics/pending_tasks_by_table");
|
|
||||||
|
|
||||||
for (int i = 0; i < compactions.size(); i++) {
|
|
||||||
JsonObject c = compactions.getJsonObject(i);
|
|
||||||
|
|
||||||
String ks = c.getString("ks");
|
|
||||||
String cf = c.getString("cf");
|
|
||||||
|
|
||||||
if (!result.containsKey(ks)) {
|
|
||||||
result.put(ks, new HashMap<>());
|
|
||||||
}
|
|
||||||
|
|
||||||
Map<String, Integer> map = result.get(ks);
|
|
||||||
map.put(cf, (int)(c.getJsonNumber("task").longValue()));
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}), factory.createMetricName("PendingTasksByTableName"));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -21,9 +21,6 @@ import static com.scylladb.jmx.api.APIClient.getReader;
|
|||||||
import static java.lang.Math.floor;
|
import static java.lang.Math.floor;
|
||||||
import static java.util.logging.Level.SEVERE;
|
import static java.util.logging.Level.SEVERE;
|
||||||
|
|
||||||
import jakarta.json.JsonArray;
|
|
||||||
import jakarta.json.JsonNumber;
|
|
||||||
import jakarta.json.JsonObject;
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
@ -32,6 +29,9 @@ import java.util.function.Function;
|
|||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
import java.util.logging.Logger;
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
import javax.json.JsonArray;
|
||||||
|
import javax.json.JsonNumber;
|
||||||
|
import javax.json.JsonObject;
|
||||||
import javax.management.InstanceAlreadyExistsException;
|
import javax.management.InstanceAlreadyExistsException;
|
||||||
import javax.management.MBeanRegistrationException;
|
import javax.management.MBeanRegistrationException;
|
||||||
import javax.management.MBeanServer;
|
import javax.management.MBeanServer;
|
||||||
@ -39,7 +39,6 @@ import javax.management.NotCompliantMBeanException;
|
|||||||
import javax.management.ObjectName;
|
import javax.management.ObjectName;
|
||||||
|
|
||||||
import com.scylladb.jmx.api.APIClient;
|
import com.scylladb.jmx.api.APIClient;
|
||||||
import com.sun.jmx.mbeanserver.JmxMBeanServer;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Makes integrating 3.0 metrics API with 2.0.
|
* Makes integrating 3.0 metrics API with 2.0.
|
||||||
@ -54,9 +53,9 @@ public class MetricsRegistry {
|
|||||||
private static final Logger logger = Logger.getLogger(MetricsRegistry.class.getName());
|
private static final Logger logger = Logger.getLogger(MetricsRegistry.class.getName());
|
||||||
|
|
||||||
private final APIClient client;
|
private final APIClient client;
|
||||||
private final JmxMBeanServer mBeanServer;
|
private final MBeanServer mBeanServer;
|
||||||
|
|
||||||
public MetricsRegistry(APIClient client, JmxMBeanServer mBeanServer) {
|
public MetricsRegistry(APIClient client, MBeanServer mBeanServer) {
|
||||||
this.client = client;
|
this.client = client;
|
||||||
this.mBeanServer = mBeanServer;
|
this.mBeanServer = mBeanServer;
|
||||||
}
|
}
|
||||||
@ -109,7 +108,7 @@ public class MetricsRegistry {
|
|||||||
MetricMBean bean = f.get();
|
MetricMBean bean = f.get();
|
||||||
for (ObjectName name : objectNames) {
|
for (ObjectName name : objectNames) {
|
||||||
try {
|
try {
|
||||||
mBeanServer.getMBeanServerInterceptor().registerMBean(bean, name);
|
mBeanServer.registerMBean(bean, name);
|
||||||
} catch (InstanceAlreadyExistsException | MBeanRegistrationException | NotCompliantMBeanException e) {
|
} catch (InstanceAlreadyExistsException | MBeanRegistrationException | NotCompliantMBeanException e) {
|
||||||
logger.log(SEVERE, "Could not register mbean", e);
|
logger.log(SEVERE, "Could not register mbean", e);
|
||||||
}
|
}
|
||||||
@ -131,15 +130,7 @@ public class MetricsRegistry {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public MetricMBean counter(final String url) {
|
public MetricMBean counter(final String url) {
|
||||||
if (url != null) {
|
return new JmxCounter(url);
|
||||||
return new JmxCounter(url);
|
|
||||||
}
|
|
||||||
return new JmxCounter(url) {
|
|
||||||
@Override
|
|
||||||
public long getCount() {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private abstract class IntermediatelyUpdated {
|
private abstract class IntermediatelyUpdated {
|
||||||
|
@ -24,24 +24,21 @@
|
|||||||
package org.apache.cassandra.metrics;
|
package org.apache.cassandra.metrics;
|
||||||
|
|
||||||
import static java.util.Arrays.asList;
|
import static java.util.Arrays.asList;
|
||||||
|
import static java.util.Collections.emptySet;
|
||||||
import static org.apache.cassandra.metrics.DefaultNameFactory.createMetricName;
|
import static org.apache.cassandra.metrics.DefaultNameFactory.createMetricName;
|
||||||
|
|
||||||
import jakarta.json.JsonArray;
|
|
||||||
import java.net.InetAddress;
|
import java.net.InetAddress;
|
||||||
import java.net.UnknownHostException;
|
import java.net.UnknownHostException;
|
||||||
import java.util.EnumSet;
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
|
import javax.json.JsonArray;
|
||||||
|
import javax.management.MBeanServer;
|
||||||
import javax.management.MalformedObjectNameException;
|
import javax.management.MalformedObjectNameException;
|
||||||
import javax.management.ObjectName;
|
import javax.management.ObjectName;
|
||||||
import javax.management.OperationsException;
|
|
||||||
|
|
||||||
import com.scylladb.jmx.api.APIClient;
|
import com.scylladb.jmx.api.APIClient;
|
||||||
import com.scylladb.jmx.metrics.APIMBean;
|
import com.scylladb.jmx.metrics.APIMBean;
|
||||||
import com.scylladb.jmx.metrics.RegistrationChecker;
|
|
||||||
import com.scylladb.jmx.metrics.RegistrationMode;
|
|
||||||
import com.sun.jmx.mbeanserver.JmxMBeanServer;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Metrics for streaming.
|
* Metrics for streaming.
|
||||||
@ -68,44 +65,45 @@ public class StreamingMetrics {
|
|||||||
return TYPE_NAME.equals(n.getKeyProperty("type"));
|
return TYPE_NAME.equals(n.getKeyProperty("type"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public static RegistrationChecker createRegistrationChecker() {
|
public static void unregister(APIClient client, MBeanServer server) throws MalformedObjectNameException {
|
||||||
return new RegistrationChecker() {
|
APIMBean.checkRegistration(server, emptySet(), StreamingMetrics::isStreamingName, (n) -> null);
|
||||||
@Override
|
}
|
||||||
protected void doCheck(APIClient client, JmxMBeanServer server, EnumSet<RegistrationMode> mode) throws OperationsException, UnknownHostException {
|
|
||||||
Set<ObjectName> all = new HashSet<ObjectName>(globalNames);
|
|
||||||
JsonArray streams = client.getJsonArray("/stream_manager/");
|
|
||||||
for (int i = 0; i < streams.size(); i++) {
|
|
||||||
JsonArray sessions = streams.getJsonObject(i).getJsonArray("sessions");
|
|
||||||
for (int j = 0; j < sessions.size(); j++) {
|
|
||||||
String peer = sessions.getJsonObject(j).getString("peer");
|
|
||||||
String scope = InetAddress.getByName(peer).getHostAddress().replaceAll(":", ".");
|
|
||||||
all.add(createMetricName(TYPE_NAME, "IncomingBytes", scope));
|
|
||||||
all.add(createMetricName(TYPE_NAME, "OutgoingBytes", scope));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
MetricsRegistry registry = new MetricsRegistry(client, server);
|
public static boolean checkRegistration(APIClient client, MBeanServer server)
|
||||||
APIMBean.checkRegistration(server, all, mode, StreamingMetrics::isStreamingName, n -> {
|
throws MalformedObjectNameException, UnknownHostException {
|
||||||
String scope = n.getKeyProperty("scope");
|
|
||||||
String name = n.getKeyProperty("name");
|
|
||||||
|
|
||||||
String url = null;
|
Set<ObjectName> all = new HashSet<ObjectName>(globalNames);
|
||||||
if ("ActiveOutboundStreams".equals(name)) {
|
JsonArray streams = client.getJsonArray("/stream_manager/");
|
||||||
url = "/stream_manager/metrics/outbound";
|
for (int i = 0; i < streams.size(); i++) {
|
||||||
} else if ("IncomingBytes".equals(name) || "TotalIncomingBytes".equals(name)) {
|
JsonArray sessions = streams.getJsonObject(i).getJsonArray("sessions");
|
||||||
url = "/stream_manager/metrics/incoming";
|
for (int j = 0; j < sessions.size(); j++) {
|
||||||
} else if ("OutgoingBytes".equals(name) || "TotalOutgoingBytes".equals(name)) {
|
String peer = sessions.getJsonObject(j).getString("peer");
|
||||||
url = "/stream_manager/metrics/outgoing";
|
String scope = InetAddress.getByName(peer).getHostAddress().replaceAll(":", ".");
|
||||||
}
|
all.add(createMetricName(TYPE_NAME, "IncomingBytes", scope));
|
||||||
if (url == null) {
|
all.add(createMetricName(TYPE_NAME, "OutgoingBytes", scope));
|
||||||
throw new IllegalArgumentException();
|
}
|
||||||
}
|
}
|
||||||
if (scope != null) {
|
|
||||||
url = url + "/" + scope;
|
MetricsRegistry registry = new MetricsRegistry(client, server);
|
||||||
}
|
return APIMBean.checkRegistration(server, all, StreamingMetrics::isStreamingName, n -> {
|
||||||
return registry.counter(url);
|
String scope = n.getKeyProperty("scope");
|
||||||
});
|
String name = n.getKeyProperty("name");
|
||||||
}
|
|
||||||
};
|
String url = null;
|
||||||
}
|
if ("ActiveOutboundStreams".equals(name)) {
|
||||||
|
url = "/stream_manager/metrics/outbound";
|
||||||
|
} else if ("IncomingBytes".equals(name) || "TotalIncomingBytes".equals(name)) {
|
||||||
|
url = "/stream_manager/metrics/incoming";
|
||||||
|
} else if ("OutgoingBytes".equals(name) || "TotalOutgoingBytes".equals(name)) {
|
||||||
|
url = "/stream_manager/metrics/outgoing";
|
||||||
|
}
|
||||||
|
if (url == null) {
|
||||||
|
throw new IllegalArgumentException();
|
||||||
|
}
|
||||||
|
if (scope != null) {
|
||||||
|
url = url + "/" + scope;
|
||||||
|
}
|
||||||
|
return registry.counter(url);
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -19,9 +19,6 @@ package org.apache.cassandra.metrics;
|
|||||||
|
|
||||||
import static com.scylladb.jmx.api.APIClient.getReader;
|
import static com.scylladb.jmx.api.APIClient.getReader;
|
||||||
|
|
||||||
import java.io.InvalidObjectException;
|
|
||||||
import java.io.ObjectStreamException;
|
|
||||||
import java.util.Hashtable;
|
|
||||||
import java.util.function.BiFunction;
|
import java.util.function.BiFunction;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
@ -30,6 +27,7 @@ import javax.management.MalformedObjectNameException;
|
|||||||
import javax.management.ObjectName;
|
import javax.management.ObjectName;
|
||||||
|
|
||||||
import org.apache.cassandra.db.ColumnFamilyStore;
|
import org.apache.cassandra.db.ColumnFamilyStore;
|
||||||
|
import org.apache.cassandra.metrics.MetricsRegistry.MetricMBean;
|
||||||
|
|
||||||
import com.scylladb.jmx.api.APIClient;
|
import com.scylladb.jmx.api.APIClient;
|
||||||
|
|
||||||
@ -157,24 +155,6 @@ public class TableMetrics implements Metrics {
|
|||||||
aliasFactory.createMetricName(alias));
|
aliasFactory.createMetricName(alias));
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <T> BiFunction<APIClient, String, T> getDummy(Class<T> type) {
|
|
||||||
if (type == String.class) {
|
|
||||||
return (c, s) -> type.cast("");
|
|
||||||
} else if (type == Integer.class) {
|
|
||||||
return (c, s) -> type.cast(0);
|
|
||||||
} else if (type == Double.class) {
|
|
||||||
return (c, s) -> type.cast(0.0);
|
|
||||||
} else if (type == Long.class) {
|
|
||||||
return (c, s) -> type.cast(0L);
|
|
||||||
}
|
|
||||||
throw new IllegalArgumentException(type.getName());
|
|
||||||
}
|
|
||||||
|
|
||||||
public <T> void createDummyTableGauge(Class<T> c, String name) throws MalformedObjectNameException {
|
|
||||||
register(() -> gauge(newGauge(getDummy(c), null)), factory.createMetricName(name),
|
|
||||||
aliasFactory.createMetricName(name));
|
|
||||||
}
|
|
||||||
|
|
||||||
public <L, G> void createTableGauge(Class<L> c1, Class<G> c2, String name, String alias, String uri)
|
public <L, G> void createTableGauge(Class<L> c1, Class<G> c2, String name, String alias, String uri)
|
||||||
throws MalformedObjectNameException {
|
throws MalformedObjectNameException {
|
||||||
if (cfName != null) {
|
if (cfName != null) {
|
||||||
@ -193,11 +173,6 @@ public class TableMetrics implements Metrics {
|
|||||||
aliasFactory.createMetricName(alias));
|
aliasFactory.createMetricName(alias));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void createDummyTableCounter(String name) throws MalformedObjectNameException {
|
|
||||||
register(() -> counter(null), factory.createMetricName(name),
|
|
||||||
aliasFactory.createMetricName(name));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void createTableHistogram(String name, String uri, boolean considerZeros)
|
public void createTableHistogram(String name, String uri, boolean considerZeros)
|
||||||
throws MalformedObjectNameException {
|
throws MalformedObjectNameException {
|
||||||
createTableHistogram(name, name, uri, considerZeros);
|
createTableHistogram(name, name, uri, considerZeros);
|
||||||
@ -230,9 +205,6 @@ public class TableMetrics implements Metrics {
|
|||||||
for (LatencyMetrics l : latencyMetrics) {
|
for (LatencyMetrics l : latencyMetrics) {
|
||||||
l.register(registry);
|
l.register(registry);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: implement
|
|
||||||
registry.createDummyTableCounter("DroppedMutations");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void registerCommon(Registry registry) throws MalformedObjectNameException {
|
private static void registerCommon(Registry registry) throws MalformedObjectNameException {
|
||||||
@ -292,138 +264,9 @@ public class TableMetrics implements Metrics {
|
|||||||
registry.createTableCounter("RowCacheHitOutOfRange", "row_cache_hit_out_of_range");
|
registry.createTableCounter("RowCacheHitOutOfRange", "row_cache_hit_out_of_range");
|
||||||
registry.createTableCounter("RowCacheHit", "row_cache_hit");
|
registry.createTableCounter("RowCacheHit", "row_cache_hit");
|
||||||
registry.createTableCounter("RowCacheMiss", "row_cache_miss");
|
registry.createTableCounter("RowCacheMiss", "row_cache_miss");
|
||||||
|
|
||||||
// TODO: implement
|
|
||||||
registry.createDummyTableGauge(Double.class, "PercentRepaired");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static class TableMetricObjectName extends javax.management.ObjectName {
|
static class TableMetricNameFactory implements MetricNameFactory {
|
||||||
private final TableMetricStringNameFactory factory;
|
|
||||||
private final String metricName;
|
|
||||||
|
|
||||||
public TableMetricObjectName(TableMetricStringNameFactory factory, String metricName) throws MalformedObjectNameException {
|
|
||||||
super("");
|
|
||||||
this.factory = factory;
|
|
||||||
this.metricName = metricName;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isPropertyValuePattern(String property) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getCanonicalName() {
|
|
||||||
return factory.createMetricStringName(metricName);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getDomain() {
|
|
||||||
return factory.getDomain();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getKeyProperty(String property) {
|
|
||||||
if (property == "name") {
|
|
||||||
return metricName;
|
|
||||||
}
|
|
||||||
return factory.getKeyProperty(property);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Hashtable<String,String> getKeyPropertyList() {
|
|
||||||
Hashtable<String, String> res = factory.getKeyPropertyList();
|
|
||||||
res.put("name", metricName);
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getKeyPropertyListString() {
|
|
||||||
return factory.getKeyPropertyListString(metricName);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getCanonicalKeyPropertyListString() {
|
|
||||||
return getKeyPropertyListString();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
return getCanonicalName();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean equals(Object o) {
|
|
||||||
if (this == o) return true;
|
|
||||||
return getCanonicalName().equals(((ObjectName) o).getCanonicalName());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int hashCode() {
|
|
||||||
return getCanonicalName().hashCode();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean apply(ObjectName name) {
|
|
||||||
if (name.isDomainPattern() || name.isPropertyListPattern() || name.isPropertyValuePattern()) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return getCanonicalName().equals(name.getCanonicalName());
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isPattern() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isDomainPattern() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isPropertyPattern() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isPropertyListPattern() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isPropertyValuePattern() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This type is not really serializable.
|
|
||||||
* Replace it with vanilla objectname.
|
|
||||||
*/
|
|
||||||
private Object writeReplace() throws ObjectStreamException {
|
|
||||||
try {
|
|
||||||
return new ObjectName(getDomain(), getKeyPropertyList());
|
|
||||||
} catch (MalformedObjectNameException e) {
|
|
||||||
throw new InvalidObjectException(toString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static interface TableMetricStringNameFactory {
|
|
||||||
String createMetricStringName(String metricName);
|
|
||||||
String getDomain();
|
|
||||||
String getKeyProperty(String property);
|
|
||||||
Hashtable<String,String> getKeyPropertyList();
|
|
||||||
String getKeyPropertyListString(String metricName);
|
|
||||||
}
|
|
||||||
|
|
||||||
static class TableMetricNameFactory implements MetricNameFactory, TableMetricStringNameFactory {
|
|
||||||
private final String keyspaceName;
|
private final String keyspaceName;
|
||||||
private final String tableName;
|
private final String tableName;
|
||||||
private final boolean isIndex;
|
private final boolean isIndex;
|
||||||
@ -436,114 +279,37 @@ public class TableMetrics implements Metrics {
|
|||||||
this.type = type;
|
this.type = type;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void appendKeyPropertyListString(final StringBuilder sb, final String metricName) {
|
|
||||||
String type = isIndex ? "Index" + this.type : this.type;
|
|
||||||
// Order matters here - keys have to be sorted
|
|
||||||
sb.append("keyspace=").append(keyspaceName);
|
|
||||||
sb.append(",name=").append(metricName);
|
|
||||||
sb.append(",scope=").append(tableName);
|
|
||||||
sb.append(",type=").append(type);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String createMetricStringName(String metricName) {
|
public ObjectName createMetricName(String metricName) throws MalformedObjectNameException {
|
||||||
String groupName = TableMetrics.class.getPackage().getName();
|
String groupName = TableMetrics.class.getPackage().getName();
|
||||||
|
String type = isIndex ? "Index" + this.type : this.type;
|
||||||
|
|
||||||
StringBuilder mbeanName = new StringBuilder();
|
StringBuilder mbeanName = new StringBuilder();
|
||||||
mbeanName.append(groupName).append(":");
|
mbeanName.append(groupName).append(":");
|
||||||
appendKeyPropertyListString(mbeanName, metricName);
|
mbeanName.append("type=").append(type);
|
||||||
return mbeanName.toString();
|
mbeanName.append(",keyspace=").append(keyspaceName);
|
||||||
}
|
mbeanName.append(",scope=").append(tableName);
|
||||||
|
mbeanName.append(",name=").append(metricName);
|
||||||
|
|
||||||
@Override
|
return new ObjectName(mbeanName.toString());
|
||||||
public String getDomain() {
|
|
||||||
return TableMetrics.class.getPackage().getName();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getKeyProperty(String property) {
|
|
||||||
switch (property) {
|
|
||||||
case "keyspace": return keyspaceName;
|
|
||||||
case "scope": return tableName;
|
|
||||||
case "type": return type;
|
|
||||||
default: return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Hashtable<String,String> getKeyPropertyList() {
|
|
||||||
Hashtable<String, String> res = new Hashtable<>();
|
|
||||||
res.put("keyspace", keyspaceName);
|
|
||||||
res.put("scope", tableName);
|
|
||||||
res.put("type", type);
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getKeyPropertyListString(String metricName) {
|
|
||||||
final StringBuilder sb = new StringBuilder();
|
|
||||||
appendKeyPropertyListString(sb, metricName);
|
|
||||||
return sb.toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ObjectName createMetricName(String metricName) throws MalformedObjectNameException {
|
|
||||||
return new TableMetricObjectName(this, metricName);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static class AllTableMetricNameFactory implements MetricNameFactory, TableMetricStringNameFactory {
|
static class AllTableMetricNameFactory implements MetricNameFactory {
|
||||||
private final String type;
|
private final String type;
|
||||||
|
|
||||||
public AllTableMetricNameFactory(String type) {
|
public AllTableMetricNameFactory(String type) {
|
||||||
this.type = type;
|
this.type = type;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void appendKeyPropertyListString(final StringBuilder sb, final String metricName) {
|
|
||||||
// Order matters here - keys have to be sorted
|
|
||||||
sb.append("name=").append(metricName);
|
|
||||||
sb.append(",type=" + type);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String createMetricStringName(String metricName) {
|
public ObjectName createMetricName(String metricName) throws MalformedObjectNameException {
|
||||||
String groupName = TableMetrics.class.getPackage().getName();
|
String groupName = TableMetrics.class.getPackage().getName();
|
||||||
StringBuilder mbeanName = new StringBuilder();
|
StringBuilder mbeanName = new StringBuilder();
|
||||||
mbeanName.append(groupName).append(":");
|
mbeanName.append(groupName).append(":");
|
||||||
appendKeyPropertyListString(mbeanName, metricName);
|
mbeanName.append("type=" + type);
|
||||||
return mbeanName.toString();
|
mbeanName.append(",name=").append(metricName);
|
||||||
}
|
return new ObjectName(mbeanName.toString());
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getDomain() {
|
|
||||||
return TableMetrics.class.getPackage().getName();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getKeyProperty(String property) {
|
|
||||||
switch (property) {
|
|
||||||
case "type": return type;
|
|
||||||
default: return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Hashtable<String,String> getKeyPropertyList() {
|
|
||||||
Hashtable<String, String> res = new Hashtable<>();
|
|
||||||
res.put("type", type);
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getKeyPropertyListString(String metricName) {
|
|
||||||
final StringBuilder sb = new StringBuilder();
|
|
||||||
appendKeyPropertyListString(sb, metricName);
|
|
||||||
return sb.toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ObjectName createMetricName(String metricName) throws MalformedObjectNameException {
|
|
||||||
return new TableMetricObjectName(this, metricName);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -24,8 +24,6 @@ package org.apache.cassandra.net;
|
|||||||
|
|
||||||
import static java.util.Collections.emptyMap;
|
import static java.util.Collections.emptyMap;
|
||||||
|
|
||||||
import jakarta.json.JsonArray;
|
|
||||||
import jakarta.json.JsonObject;
|
|
||||||
import java.net.UnknownHostException;
|
import java.net.UnknownHostException;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
@ -34,6 +32,9 @@ import java.util.logging.Logger;
|
|||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
import javax.json.JsonArray;
|
||||||
|
import javax.json.JsonObject;
|
||||||
|
|
||||||
import org.apache.cassandra.metrics.DroppedMessageMetrics;
|
import org.apache.cassandra.metrics.DroppedMessageMetrics;
|
||||||
|
|
||||||
import com.scylladb.jmx.api.APIClient;
|
import com.scylladb.jmx.api.APIClient;
|
||||||
|
@ -122,20 +122,6 @@ public interface MessagingServiceMBean {
|
|||||||
*/
|
*/
|
||||||
public Map<String, Long> getTimeoutsPerHost();
|
public Map<String, Long> getTimeoutsPerHost();
|
||||||
|
|
||||||
/**
|
|
||||||
* Back-pressure rate limiting per host
|
|
||||||
*/
|
|
||||||
default public Map<String, Double> getBackPressurePerHost() {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Enable/Disable back-pressure
|
|
||||||
*/
|
|
||||||
default public void setBackPressureEnabled(boolean enabled) {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Number of timeouts since last check.
|
* Number of timeouts since last check.
|
||||||
*/
|
*/
|
||||||
|
@ -24,11 +24,12 @@
|
|||||||
|
|
||||||
package org.apache.cassandra.service;
|
package org.apache.cassandra.service;
|
||||||
|
|
||||||
import jakarta.ws.rs.core.MultivaluedHashMap;
|
|
||||||
import jakarta.ws.rs.core.MultivaluedMap;
|
|
||||||
import java.util.concurrent.ExecutionException;
|
import java.util.concurrent.ExecutionException;
|
||||||
import java.util.logging.Logger;
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
import javax.ws.rs.core.MultivaluedHashMap;
|
||||||
|
import javax.ws.rs.core.MultivaluedMap;
|
||||||
|
|
||||||
import org.apache.cassandra.metrics.CacheMetrics;
|
import org.apache.cassandra.metrics.CacheMetrics;
|
||||||
|
|
||||||
import com.scylladb.jmx.api.APIClient;
|
import com.scylladb.jmx.api.APIClient;
|
||||||
|
@ -1,66 +0,0 @@
|
|||||||
package org.apache.cassandra.service;
|
|
||||||
|
|
||||||
import static com.sun.jmx.mbeanserver.MXBeanMappingFactory.DEFAULT;
|
|
||||||
|
|
||||||
import jakarta.xml.bind.annotation.XmlRootElement;
|
|
||||||
import java.io.InvalidObjectException;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import javax.management.openmbean.CompositeData;
|
|
||||||
import javax.management.openmbean.OpenDataException;
|
|
||||||
|
|
||||||
import com.sun.jmx.mbeanserver.MXBeanMapping;
|
|
||||||
|
|
||||||
@SuppressWarnings("restriction")
|
|
||||||
@XmlRootElement
|
|
||||||
public class PerTableSSTableInfo {
|
|
||||||
private static final MXBeanMapping mxBeanMapping;
|
|
||||||
|
|
||||||
static {
|
|
||||||
try {
|
|
||||||
mxBeanMapping = DEFAULT.mappingForType(PerTableSSTableInfo.class, DEFAULT);
|
|
||||||
} catch (OpenDataException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private String keyspace;
|
|
||||||
private List<SSTableInfo> sstables;
|
|
||||||
private String table;
|
|
||||||
|
|
||||||
public String getTable() {
|
|
||||||
return table;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setTable(String table) {
|
|
||||||
this.table = table;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getKeyspace() {
|
|
||||||
return keyspace;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setKeyspace(String keyspace) {
|
|
||||||
this.keyspace = keyspace;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<SSTableInfo> getSSTables() {
|
|
||||||
return sstables;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setSSTableInfos(List<SSTableInfo> sstableInfos) {
|
|
||||||
this.sstables = sstableInfos;
|
|
||||||
}
|
|
||||||
|
|
||||||
public CompositeData toCompositeData() {
|
|
||||||
try {
|
|
||||||
return (CompositeData) mxBeanMapping.toOpenValue(this);
|
|
||||||
} catch (OpenDataException e) {
|
|
||||||
throw new Error(e); // should not reach.
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static PerTableSSTableInfo from(CompositeData data) throws InvalidObjectException {
|
|
||||||
return (PerTableSSTableInfo) mxBeanMapping.fromOpenValue(data);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,153 +0,0 @@
|
|||||||
package org.apache.cassandra.service;
|
|
||||||
|
|
||||||
import jakarta.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.Date;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
|
||||||
import com.scylladb.jmx.utils.DateXmlAdapter;
|
|
||||||
|
|
||||||
public class SSTableInfo {
|
|
||||||
private long size;
|
|
||||||
|
|
||||||
@JsonProperty("data_size")
|
|
||||||
private long dataSize;
|
|
||||||
|
|
||||||
@JsonProperty("index_size")
|
|
||||||
private long indexSize;
|
|
||||||
|
|
||||||
@JsonProperty("filter_size")
|
|
||||||
private long filterSize;
|
|
||||||
|
|
||||||
@XmlJavaTypeAdapter(type = Date.class, value = DateXmlAdapter.class)
|
|
||||||
private Date timestamp;
|
|
||||||
|
|
||||||
private long generation;
|
|
||||||
|
|
||||||
private long level;
|
|
||||||
|
|
||||||
private String version;
|
|
||||||
|
|
||||||
private Map<String, String> properties;
|
|
||||||
|
|
||||||
public void setProperties(Map<String, String> properties) {
|
|
||||||
this.properties = properties;
|
|
||||||
}
|
|
||||||
|
|
||||||
@JsonIgnore
|
|
||||||
private Map<String, Map<String, String>> extendedProperties;
|
|
||||||
|
|
||||||
public String getVersion() {
|
|
||||||
return version;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setVersion(String version) {
|
|
||||||
this.version = version;
|
|
||||||
}
|
|
||||||
|
|
||||||
public long getSize() {
|
|
||||||
return size;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setSize(long size) {
|
|
||||||
this.size = size;
|
|
||||||
}
|
|
||||||
|
|
||||||
public long getDataSize() {
|
|
||||||
return dataSize;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setDataSize(long dataSize) {
|
|
||||||
this.dataSize = dataSize;
|
|
||||||
}
|
|
||||||
|
|
||||||
public long getIndexSize() {
|
|
||||||
return indexSize;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setIndexSize(long indexSize) {
|
|
||||||
this.indexSize = indexSize;
|
|
||||||
}
|
|
||||||
|
|
||||||
public long getFilterSize() {
|
|
||||||
return filterSize;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setFilterSize(long filterSize) {
|
|
||||||
this.filterSize = filterSize;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Date getTimestamp() {
|
|
||||||
return timestamp;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setTimestamp(Date timestamp) {
|
|
||||||
this.timestamp = timestamp;
|
|
||||||
}
|
|
||||||
|
|
||||||
public long getGeneration() {
|
|
||||||
return generation;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setGeneration(long generation) {
|
|
||||||
this.generation = generation;
|
|
||||||
}
|
|
||||||
|
|
||||||
public long getLevel() {
|
|
||||||
return level;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setLevel(long level) {
|
|
||||||
this.level = level;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Map<String, String> getProperties() {
|
|
||||||
return properties;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Map<String, Map<String, String>> getExtendedProperties() {
|
|
||||||
return extendedProperties;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setExtendedProperties(Map<String, Map<String, String>> extendedProperties) {
|
|
||||||
this.extendedProperties = extendedProperties;
|
|
||||||
}
|
|
||||||
|
|
||||||
@JsonProperty("properties")
|
|
||||||
private void unpackProperties(List<Map<String, String>> maps) {
|
|
||||||
Map<String, String> result = new HashMap<>();
|
|
||||||
for (Map<String, String> map : maps) {
|
|
||||||
String key = map.get("key");
|
|
||||||
String value = map.get("value");
|
|
||||||
result.put(key, value);
|
|
||||||
}
|
|
||||||
properties = result;
|
|
||||||
}
|
|
||||||
|
|
||||||
@JsonProperty("extended_properties")
|
|
||||||
private void unpackNested(List<Map<String, Object>> properties) {
|
|
||||||
Map<String, Map<String, String>> result = new HashMap<String, Map<String, String>>();
|
|
||||||
|
|
||||||
for (Map<String, Object> map : properties) {
|
|
||||||
Object name = map.get("group");
|
|
||||||
if (name != null) {
|
|
||||||
Map<String, String> dst = new HashMap<>();
|
|
||||||
List<?> value = (List<?>) map.get("attributes");
|
|
||||||
for (Object v : value) {
|
|
||||||
Map<?, ?> subMap = (Map<?, ?>) v;
|
|
||||||
dst.put(String.valueOf(subMap.get("key")), String.valueOf(subMap.get("value")));
|
|
||||||
}
|
|
||||||
result.put(String.valueOf(name), dst);
|
|
||||||
} else {
|
|
||||||
for (Map.Entry<String, Object> e : map.entrySet()) {
|
|
||||||
result.put(e.getKey(), Collections.singletonMap(String.valueOf(e.getValue()), ""));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
extendedProperties = result;
|
|
||||||
}
|
|
||||||
}
|
|
@ -25,16 +25,14 @@ package org.apache.cassandra.service;
|
|||||||
|
|
||||||
import static java.util.Collections.emptySet;
|
import static java.util.Collections.emptySet;
|
||||||
|
|
||||||
import jakarta.json.JsonArray;
|
|
||||||
import jakarta.ws.rs.core.MultivaluedHashMap;
|
|
||||||
import jakarta.ws.rs.core.MultivaluedMap;
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.logging.Logger;
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
import org.apache.cassandra.db.ColumnFamilyStore;
|
import javax.ws.rs.core.MultivaluedHashMap;
|
||||||
|
import javax.ws.rs.core.MultivaluedMap;
|
||||||
|
|
||||||
import org.apache.cassandra.metrics.CASClientRequestMetrics;
|
import org.apache.cassandra.metrics.CASClientRequestMetrics;
|
||||||
import org.apache.cassandra.metrics.ClientRequestMetrics;
|
import org.apache.cassandra.metrics.ClientRequestMetrics;
|
||||||
|
|
||||||
@ -56,8 +54,7 @@ public class StorageProxy extends MetricsMBean implements StorageProxyMBean {
|
|||||||
new ClientRequestMetrics("RangeSlice", "/storage_proxy/metrics/range"),
|
new ClientRequestMetrics("RangeSlice", "/storage_proxy/metrics/range"),
|
||||||
new ClientRequestMetrics("Write", "storage_proxy/metrics/write"),
|
new ClientRequestMetrics("Write", "storage_proxy/metrics/write"),
|
||||||
new CASClientRequestMetrics("CASWrite", "storage_proxy/metrics/cas_write"),
|
new CASClientRequestMetrics("CASWrite", "storage_proxy/metrics/cas_write"),
|
||||||
new CASClientRequestMetrics("CASRead", "storage_proxy/metrics/cas_read"),
|
new CASClientRequestMetrics("CASRead", "storage_proxy/metrics/cas_read"));
|
||||||
new ClientRequestMetrics("ViewWrite", "storage_proxy/metrics/view_write"));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -289,11 +286,4 @@ public class StorageProxy extends MetricsMBean implements StorageProxyMBean {
|
|||||||
log(" getHintedHandoffDisabledDCs()");
|
log(" getHintedHandoffDisabledDCs()");
|
||||||
return emptySet();
|
return emptySet();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getNumberOfTables() {
|
|
||||||
// TODO: could be like 1000% more efficient
|
|
||||||
JsonArray mbeans = client.getJsonArray("/column_family/");
|
|
||||||
return mbeans.size();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -95,15 +95,6 @@ public interface StorageProxyMBean {
|
|||||||
|
|
||||||
public long getReadRepairRepairedBackground();
|
public long getReadRepairRepairedBackground();
|
||||||
|
|
||||||
default public int getOtcBacklogExpirationInterval() {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
default void setOtcBacklogExpirationInterval(int intervalInMillis) {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Returns each live node's schema version */
|
/** Returns each live node's schema version */
|
||||||
public Map<String, List<String>> getSchemaVersions();
|
public Map<String, List<String>> getSchemaVersions();
|
||||||
|
|
||||||
public int getNumberOfTables();
|
|
||||||
}
|
}
|
||||||
|
@ -24,20 +24,16 @@ package org.apache.cassandra.service;
|
|||||||
|
|
||||||
import static java.util.Arrays.asList;
|
import static java.util.Arrays.asList;
|
||||||
|
|
||||||
import jakarta.json.JsonArray;
|
|
||||||
import jakarta.json.JsonObject;
|
|
||||||
import jakarta.ws.rs.core.GenericType;
|
|
||||||
import jakarta.ws.rs.core.MultivaluedHashMap;
|
|
||||||
import jakarta.ws.rs.core.MultivaluedMap;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.InetAddress;
|
import java.net.InetAddress;
|
||||||
import java.net.UnknownHostException;
|
import java.net.UnknownHostException;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
|
import java.util.LinkedHashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
@ -48,8 +44,9 @@ import java.util.concurrent.ExecutionException;
|
|||||||
import java.util.concurrent.TimeoutException;
|
import java.util.concurrent.TimeoutException;
|
||||||
import java.util.concurrent.atomic.AtomicLong;
|
import java.util.concurrent.atomic.AtomicLong;
|
||||||
import java.util.logging.Logger;
|
import java.util.logging.Logger;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
|
import javax.json.JsonArray;
|
||||||
|
import javax.json.JsonObject;
|
||||||
import javax.management.ListenerNotFoundException;
|
import javax.management.ListenerNotFoundException;
|
||||||
import javax.management.MBeanNotificationInfo;
|
import javax.management.MBeanNotificationInfo;
|
||||||
import javax.management.Notification;
|
import javax.management.Notification;
|
||||||
@ -57,15 +54,9 @@ import javax.management.NotificationBroadcaster;
|
|||||||
import javax.management.NotificationBroadcasterSupport;
|
import javax.management.NotificationBroadcasterSupport;
|
||||||
import javax.management.NotificationFilter;
|
import javax.management.NotificationFilter;
|
||||||
import javax.management.NotificationListener;
|
import javax.management.NotificationListener;
|
||||||
import javax.management.openmbean.CompositeData;
|
|
||||||
import javax.management.openmbean.CompositeDataSupport;
|
|
||||||
import javax.management.openmbean.CompositeType;
|
|
||||||
import javax.management.openmbean.OpenDataException;
|
|
||||||
import javax.management.openmbean.OpenType;
|
|
||||||
import javax.management.openmbean.SimpleType;
|
|
||||||
import javax.management.openmbean.TabularData;
|
import javax.management.openmbean.TabularData;
|
||||||
import javax.management.openmbean.TabularDataSupport;
|
import javax.ws.rs.core.MultivaluedHashMap;
|
||||||
import javax.management.openmbean.TabularType;
|
import javax.ws.rs.core.MultivaluedMap;
|
||||||
|
|
||||||
import org.apache.cassandra.metrics.StorageMetrics;
|
import org.apache.cassandra.metrics.StorageMetrics;
|
||||||
import org.apache.cassandra.repair.RepairParallelism;
|
import org.apache.cassandra.repair.RepairParallelism;
|
||||||
@ -73,8 +64,7 @@ import org.apache.cassandra.repair.RepairParallelism;
|
|||||||
import com.google.common.base.Joiner;
|
import com.google.common.base.Joiner;
|
||||||
import com.scylladb.jmx.api.APIClient;
|
import com.scylladb.jmx.api.APIClient;
|
||||||
import com.scylladb.jmx.metrics.MetricsMBean;
|
import com.scylladb.jmx.metrics.MetricsMBean;
|
||||||
import com.scylladb.jmx.api.utils.FileUtils;
|
import com.scylladb.jmx.utils.FileUtils;
|
||||||
import com.google.common.base.Throwables;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This abstraction contains the token/identifier of this node on the identifier
|
* This abstraction contains the token/identifier of this node on the identifier
|
||||||
@ -85,41 +75,6 @@ public class StorageService extends MetricsMBean implements StorageServiceMBean,
|
|||||||
private static final Logger logger = Logger.getLogger(StorageService.class.getName());
|
private static final Logger logger = Logger.getLogger(StorageService.class.getName());
|
||||||
private static final Timer timer = new Timer("Storage Service Repair", true);
|
private static final Timer timer = new Timer("Storage Service Repair", true);
|
||||||
|
|
||||||
private static final String[] COUNTER_NAMES = new String[]{"raw", "count", "error", "string"};
|
|
||||||
private static final String[] COUNTER_DESCS = new String[]
|
|
||||||
{ "partition key in raw hex bytes",
|
|
||||||
"value of this partition for given sampler",
|
|
||||||
"value is within the error bounds plus or minus of this",
|
|
||||||
"the partition key turned into a human readable format" };
|
|
||||||
private static final CompositeType COUNTER_COMPOSITE_TYPE;
|
|
||||||
private static final TabularType COUNTER_TYPE;
|
|
||||||
|
|
||||||
private static final String[] OPERATION_NAMES = new String[]{"read", "write"};
|
|
||||||
|
|
||||||
private static final String[] SAMPLER_NAMES = new String[]{"cardinality", "partitions"};
|
|
||||||
private static final String[] SAMPLER_DESCS = new String[]
|
|
||||||
{ "cardinality of partitions",
|
|
||||||
"list of counter results" };
|
|
||||||
|
|
||||||
private static final String SAMPLING_RESULTS_NAME = "SAMPLING_RESULTS";
|
|
||||||
private static final CompositeType SAMPLING_RESULT;
|
|
||||||
|
|
||||||
static
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
OpenType<?>[] counterTypes = new OpenType[] { SimpleType.STRING, SimpleType.LONG, SimpleType.LONG, SimpleType.STRING };
|
|
||||||
COUNTER_COMPOSITE_TYPE = new CompositeType(SAMPLING_RESULTS_NAME, SAMPLING_RESULTS_NAME, COUNTER_NAMES, COUNTER_DESCS, counterTypes);
|
|
||||||
COUNTER_TYPE = new TabularType(SAMPLING_RESULTS_NAME, SAMPLING_RESULTS_NAME, COUNTER_COMPOSITE_TYPE, COUNTER_NAMES);
|
|
||||||
|
|
||||||
OpenType<?>[] samplerTypes = new OpenType[] { SimpleType.LONG, COUNTER_TYPE };
|
|
||||||
SAMPLING_RESULT = new CompositeType(SAMPLING_RESULTS_NAME, SAMPLING_RESULTS_NAME, SAMPLER_NAMES, SAMPLER_DESCS, samplerTypes);
|
|
||||||
} catch (OpenDataException e)
|
|
||||||
{
|
|
||||||
throw Throwables.propagate(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private final NotificationBroadcasterSupport notificationBroadcasterSupport = new NotificationBroadcasterSupport();
|
private final NotificationBroadcasterSupport notificationBroadcasterSupport = new NotificationBroadcasterSupport();
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -314,7 +269,7 @@ public class StorageService extends MetricsMBean implements StorageServiceMBean,
|
|||||||
@Override
|
@Override
|
||||||
public Map<List<String>, List<String>> getRangeToEndpointMap(String keyspace) {
|
public Map<List<String>, List<String>> getRangeToEndpointMap(String keyspace) {
|
||||||
log(" getRangeToEndpointMap(String keyspace)");
|
log(" getRangeToEndpointMap(String keyspace)");
|
||||||
return client.getMapListStrValue("/storage_service/range_to_endpoint_map/" + keyspace);
|
return client.getMapListStrValue("/storage_service/range/" + keyspace);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -520,12 +475,6 @@ public class StorageService extends MetricsMBean implements StorageServiceMBean,
|
|||||||
return client.getListInetAddressValue("");
|
return client.getListInetAddressValue("");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void checkAndRepairCdcStreams() throws IOException {
|
|
||||||
log(" checkAndRepairCdcStreams() throws IOException");
|
|
||||||
client.post("/storage_service/cdc_streams_check_and_repair");
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Takes the snapshot for the given keyspaces. A snapshot name must be
|
* Takes the snapshot for the given keyspaces. A snapshot name must be
|
||||||
* specified.
|
* specified.
|
||||||
@ -537,24 +486,10 @@ public class StorageService extends MetricsMBean implements StorageServiceMBean,
|
|||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void takeSnapshot(String tag, String... keyspaceNames) throws IOException {
|
public void takeSnapshot(String tag, String... keyspaceNames) throws IOException {
|
||||||
takeSnapshot(tag, null, keyspaceNames);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void takeSnapshot(String tag, Map<String, String> options, String... keyspaceNames) throws IOException {
|
|
||||||
log(" takeSnapshot(String tag, String... keyspaceNames) throws IOException");
|
log(" takeSnapshot(String tag, String... keyspaceNames) throws IOException");
|
||||||
MultivaluedMap<String, String> queryParams = new MultivaluedHashMap<String, String>();
|
MultivaluedMap<String, String> queryParams = new MultivaluedHashMap<String, String>();
|
||||||
APIClient.set_query_param(queryParams, "tag", tag);
|
APIClient.set_query_param(queryParams, "tag", tag);
|
||||||
|
|
||||||
if (keyspaceNames.length == 1 && keyspaceNames[0].indexOf('.') != -1) {
|
|
||||||
String[] parts = keyspaceNames[0].split("\\.");
|
|
||||||
keyspaceNames = new String[] { parts[0] };
|
|
||||||
APIClient.set_query_param(queryParams, "cf", parts[1]);
|
|
||||||
}
|
|
||||||
APIClient.set_query_param(queryParams, "kn", APIClient.join(keyspaceNames));
|
APIClient.set_query_param(queryParams, "kn", APIClient.join(keyspaceNames));
|
||||||
if (options.containsKey("skipFlush")) {
|
|
||||||
APIClient.set_query_param(queryParams, "sf", options.get("skipFlush"));
|
|
||||||
}
|
|
||||||
client.post("/storage_service/snapshots", queryParams);
|
client.post("/storage_service/snapshots", queryParams);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -656,13 +591,6 @@ public class StorageService extends MetricsMBean implements StorageServiceMBean,
|
|||||||
client.post("/storage_service/keyspace_compaction/" + keyspaceName, queryParams);
|
client.post("/storage_service/keyspace_compaction/" + keyspaceName, queryParams);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void forceKeyspaceCompactionForTokenRange(String keyspaceName, String startToken, String endToken,
|
|
||||||
String... tableNames) throws IOException, ExecutionException, InterruptedException {
|
|
||||||
// TODO: actually handle token ranges.
|
|
||||||
forceKeyspaceCompaction(keyspaceName, tableNames);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Trigger a cleanup of keys on a single keyspace
|
* Trigger a cleanup of keys on a single keyspace
|
||||||
*/
|
*/
|
||||||
@ -697,7 +625,7 @@ public class StorageService extends MetricsMBean implements StorageServiceMBean,
|
|||||||
APIClient.set_bool_query_param(queryParams, "disable_snapshot", disableSnapshot);
|
APIClient.set_bool_query_param(queryParams, "disable_snapshot", disableSnapshot);
|
||||||
APIClient.set_bool_query_param(queryParams, "skip_corrupted", skipCorrupted);
|
APIClient.set_bool_query_param(queryParams, "skip_corrupted", skipCorrupted);
|
||||||
APIClient.set_query_param(queryParams, "cf", APIClient.join(columnFamilies));
|
APIClient.set_query_param(queryParams, "cf", APIClient.join(columnFamilies));
|
||||||
return client.getIntValue("/storage_service/keyspace_scrub/" + keyspaceName, queryParams);
|
return client.getIntValue("/storage_service/keyspace_scrub/" + keyspaceName);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -711,7 +639,7 @@ public class StorageService extends MetricsMBean implements StorageServiceMBean,
|
|||||||
MultivaluedMap<String, String> queryParams = new MultivaluedHashMap<String, String>();
|
MultivaluedMap<String, String> queryParams = new MultivaluedHashMap<String, String>();
|
||||||
APIClient.set_bool_query_param(queryParams, "exclude_current_version", excludeCurrentVersion);
|
APIClient.set_bool_query_param(queryParams, "exclude_current_version", excludeCurrentVersion);
|
||||||
APIClient.set_query_param(queryParams, "cf", APIClient.join(columnFamilies));
|
APIClient.set_query_param(queryParams, "cf", APIClient.join(columnFamilies));
|
||||||
return client.getIntValue("/storage_service/keyspace_upgrade_sstables/" + keyspaceName, queryParams);
|
return client.getIntValue("/storage_service/keyspace_upgrade_sstables/" + keyspaceName);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -923,7 +851,7 @@ public class StorageService extends MetricsMBean implements StorageServiceMBean,
|
|||||||
@Deprecated
|
@Deprecated
|
||||||
public int forceRepairAsync(String keyspace, boolean isSequential, Collection<String> dataCenters,
|
public int forceRepairAsync(String keyspace, boolean isSequential, Collection<String> dataCenters,
|
||||||
Collection<String> hosts, boolean primaryRange, boolean repairedAt, String... columnFamilies)
|
Collection<String> hosts, boolean primaryRange, boolean repairedAt, String... columnFamilies)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
log(" forceRepairAsync(String keyspace, boolean isSequential, Collection<String> dataCenters, Collection<String> hosts, boolean primaryRange, boolean repairedAt, String... columnFamilies) throws IOException");
|
log(" forceRepairAsync(String keyspace, boolean isSequential, Collection<String> dataCenters, Collection<String> hosts, boolean primaryRange, boolean repairedAt, String... columnFamilies) throws IOException");
|
||||||
return repairRangeAsync(null, null, keyspace, isSequential, dataCenters, hosts, primaryRange, repairedAt,
|
return repairRangeAsync(null, null, keyspace, isSequential, dataCenters, hosts, primaryRange, repairedAt,
|
||||||
columnFamilies);
|
columnFamilies);
|
||||||
@ -991,21 +919,13 @@ public class StorageService extends MetricsMBean implements StorageServiceMBean,
|
|||||||
* the host id to remove
|
* the host id to remove
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void removeNode(String hostIdString, String ignoreNodes) {
|
public void removeNode(String hostIdString) {
|
||||||
log(" removeNode(String token, String ignoreNodes)");
|
log(" removeNode(String token)");
|
||||||
MultivaluedMap<String, String> queryParams = new MultivaluedHashMap<String, String>();
|
MultivaluedMap<String, String> queryParams = new MultivaluedHashMap<String, String>();
|
||||||
APIClient.set_query_param(queryParams, "host_id", hostIdString);
|
APIClient.set_query_param(queryParams, "host_id", hostIdString);
|
||||||
if (ignoreNodes != null) {
|
|
||||||
APIClient.set_query_param(queryParams, "ignore_nodes", ignoreNodes);
|
|
||||||
}
|
|
||||||
client.post("/storage_service/remove_node", queryParams);
|
client.post("/storage_service/remove_node", queryParams);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void removeNode(String hostIdString) {
|
|
||||||
String ignoreNodes = null;
|
|
||||||
removeNode(hostIdString, ignoreNodes);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the status of a token removal.
|
* Get the status of a token removal.
|
||||||
*/
|
*/
|
||||||
@ -1175,12 +1095,6 @@ public class StorageService extends MetricsMBean implements StorageServiceMBean,
|
|||||||
return client.getListStrValue("/storage_service/keyspaces", queryParams);
|
return client.getListStrValue("/storage_service/keyspaces", queryParams);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<String, String> getViewBuildStatuses(String keyspace, String view) {
|
|
||||||
log(" getViewBuildStatuses()");
|
|
||||||
return client.getMapStrValue("storage_service/view_build_statuses/" + keyspace + "/" + view);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Change endpointsnitch class and dynamic-ness (and dynamic attributes) at
|
* Change endpointsnitch class and dynamic-ness (and dynamic attributes) at
|
||||||
* runtime
|
* runtime
|
||||||
@ -1356,36 +1270,7 @@ public class StorageService extends MetricsMBean implements StorageServiceMBean,
|
|||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void rebuild(String sourceDc) {
|
public void rebuild(String sourceDc) {
|
||||||
rebuild(sourceDc, null, null, null);
|
log(" rebuild(String sourceDc)");
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Same as {@link #rebuild(String)}, but only for specified keyspace and
|
|
||||||
* ranges.
|
|
||||||
*
|
|
||||||
* @param sourceDc
|
|
||||||
* Name of DC from which to select sources for streaming or null
|
|
||||||
* to pick any node
|
|
||||||
* @param keyspace
|
|
||||||
* Name of the keyspace which to rebuild or null to rebuild all
|
|
||||||
* keyspaces.
|
|
||||||
* @param tokens
|
|
||||||
* Range of tokens to rebuild or null to rebuild all token
|
|
||||||
* ranges. In the format of:
|
|
||||||
* "(start_token_1,end_token_1],(start_token_2,end_token_2],...(start_token_n,end_token_n]"
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public void rebuild(String sourceDc, String keyspace, String tokens, String specificSources) {
|
|
||||||
log(" rebuild(String sourceDc, String keyspace, String tokens, String specificSources)");
|
|
||||||
if (keyspace != null) {
|
|
||||||
throw new UnsupportedOperationException("Rebuild: 'keyspace' not yet supported");
|
|
||||||
}
|
|
||||||
if (tokens != null) {
|
|
||||||
throw new UnsupportedOperationException("Rebuild: 'token range' not yet supported");
|
|
||||||
}
|
|
||||||
if (specificSources != null) {
|
|
||||||
throw new UnsupportedOperationException("Rebuild: 'specific sources' not yet supported");
|
|
||||||
}
|
|
||||||
if (sourceDc != null) {
|
if (sourceDc != null) {
|
||||||
MultivaluedMap<String, String> queryParams = new MultivaluedHashMap<String, String>();
|
MultivaluedMap<String, String> queryParams = new MultivaluedHashMap<String, String>();
|
||||||
APIClient.set_query_param(queryParams, "source_dc", sourceDc);
|
APIClient.set_query_param(queryParams, "source_dc", sourceDc);
|
||||||
@ -1425,25 +1310,15 @@ public class StorageService extends MetricsMBean implements StorageServiceMBean,
|
|||||||
* The parent keyspace name
|
* The parent keyspace name
|
||||||
* @param cfName
|
* @param cfName
|
||||||
* The ColumnFamily name where SSTables belong
|
* The ColumnFamily name where SSTables belong
|
||||||
* @param isLoadAndStream
|
|
||||||
* Whether or not arbitrary SSTables should be loaded (and streamed to the owning nodes)
|
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void loadNewSSTables(String ksName, String cfName, boolean isLoadAndStream) {
|
public void loadNewSSTables(String ksName, String cfName) {
|
||||||
log(" loadNewSSTables(String ksName, String cfName, boolean isLoadAndStream)");
|
log(" loadNewSSTables(String ksName, String cfName)");
|
||||||
MultivaluedMap<String, String> queryParams = new MultivaluedHashMap<String, String>();
|
MultivaluedMap<String, String> queryParams = new MultivaluedHashMap<String, String>();
|
||||||
queryParams.add("cf", cfName);
|
queryParams.add("cf", cfName);
|
||||||
if (isLoadAndStream) {
|
|
||||||
queryParams.add("load_and_stream", "true");
|
|
||||||
}
|
|
||||||
client.post("/storage_service/sstables/" + ksName, queryParams);
|
client.post("/storage_service/sstables/" + ksName, queryParams);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void loadNewSSTables(String ksName, String cfName) {
|
|
||||||
loadNewSSTables(ksName, cfName, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return a List of Tokens representing a sample of keys across all
|
* Return a List of Tokens representing a sample of keys across all
|
||||||
* ColumnFamilyStores.
|
* ColumnFamilyStores.
|
||||||
@ -1514,7 +1389,13 @@ public class StorageService extends MetricsMBean implements StorageServiceMBean,
|
|||||||
log("enableAutoCompaction(String ks, String... columnFamilies)");
|
log("enableAutoCompaction(String ks, String... columnFamilies)");
|
||||||
MultivaluedMap<String, String> queryParams = new MultivaluedHashMap<String, String>();
|
MultivaluedMap<String, String> queryParams = new MultivaluedHashMap<String, String>();
|
||||||
APIClient.set_query_param(queryParams, "cf", APIClient.join(columnFamilies));
|
APIClient.set_query_param(queryParams, "cf", APIClient.join(columnFamilies));
|
||||||
client.post("/storage_service/auto_compaction/" + ks, queryParams);
|
try {
|
||||||
|
client.post("/storage_service/auto_compaction/" + ks, queryParams);
|
||||||
|
} catch (RuntimeException e) {
|
||||||
|
// FIXME should throw the right exception
|
||||||
|
throw new IOException(e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -1733,7 +1614,7 @@ public class StorageService extends MetricsMBean implements StorageServiceMBean,
|
|||||||
public int scrub(boolean disableSnapshot, boolean skipCorrupted, boolean checkData, int jobs, String keyspaceName,
|
public int scrub(boolean disableSnapshot, boolean skipCorrupted, boolean checkData, int jobs, String keyspaceName,
|
||||||
String... columnFamilies) throws IOException, ExecutionException, InterruptedException {
|
String... columnFamilies) throws IOException, ExecutionException, InterruptedException {
|
||||||
// "jobs" not (yet) relevant for scylla. (though possibly useful...)
|
// "jobs" not (yet) relevant for scylla. (though possibly useful...)
|
||||||
return scrub(disableSnapshot, skipCorrupted, checkData, keyspaceName, columnFamilies);
|
return scrub(disableSnapshot, skipCorrupted, checkData, 0, keyspaceName, columnFamilies);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -1777,99 +1658,4 @@ public class StorageService extends MetricsMBean implements StorageServiceMBean,
|
|||||||
log(" resumeBootstrap");
|
log(" resumeBootstrap");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public List<CompositeData> getSSTableInfo(String keyspace, String table) {
|
|
||||||
if (keyspace == null && table != null) {
|
|
||||||
throw new IllegalArgumentException("Missing keyspace name");
|
|
||||||
}
|
|
||||||
MultivaluedMap<String, String> queryParams = null;
|
|
||||||
|
|
||||||
if (keyspace != null) {
|
|
||||||
queryParams = new MultivaluedHashMap<String, String>();
|
|
||||||
queryParams.add("keyspace", keyspace);
|
|
||||||
}
|
|
||||||
if (table != null) {
|
|
||||||
queryParams.add("cf", table);
|
|
||||||
}
|
|
||||||
|
|
||||||
return client.get("/storage_service/sstable_info", queryParams)
|
|
||||||
.get(new GenericType<List<PerTableSSTableInfo>>() {
|
|
||||||
}).stream().map((i) -> i.toCompositeData()).collect(Collectors.toList());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public List<CompositeData> getSSTableInfo() {
|
|
||||||
return getSSTableInfo(null, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int scrub(boolean disableSnapshot, boolean skipCorrupted, boolean checkData, boolean reinsertOverflowedTTL,
|
|
||||||
int jobs, String keyspaceName, String... columnFamilies)
|
|
||||||
throws IOException, ExecutionException, InterruptedException {
|
|
||||||
MultivaluedMap<String, String> queryParams = new MultivaluedHashMap<String, String>();
|
|
||||||
APIClient.set_bool_query_param(queryParams, "disable_snapshot", disableSnapshot);
|
|
||||||
APIClient.set_bool_query_param(queryParams, "skip_corrupted", skipCorrupted);
|
|
||||||
APIClient.set_query_param(queryParams, "cf", APIClient.join(columnFamilies));
|
|
||||||
return client.getIntValue("/storage_service/keyspace_scrub/" + keyspaceName, queryParams);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int scrub(boolean disableSnapshot, String scrubMode, boolean checkData, boolean reinsertOverflowedTTL,
|
|
||||||
int jobs, String keyspaceName, String... columnFamilies)
|
|
||||||
throws IOException, ExecutionException, InterruptedException {
|
|
||||||
MultivaluedMap<String, String> queryParams = new MultivaluedHashMap<String, String>();
|
|
||||||
APIClient.set_bool_query_param(queryParams, "disable_snapshot", disableSnapshot);
|
|
||||||
if (!"".equals(scrubMode)) {
|
|
||||||
APIClient.set_query_param(queryParams, "scrub_mode", scrubMode);
|
|
||||||
}
|
|
||||||
APIClient.set_query_param(queryParams, "cf", APIClient.join(columnFamilies));
|
|
||||||
return client.getIntValue("/storage_service/keyspace_scrub/" + keyspaceName, queryParams);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long getUptime() {
|
|
||||||
log("getUptime()");
|
|
||||||
return client.getLongValue("/system/uptime_ms");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public CompositeData getToppartitions(String sampler, List<String> keyspaceFilters, List<String> tableFilters, int duration, int capacity, int count) throws OpenDataException {
|
|
||||||
return getToppartitions(Arrays.asList(sampler), keyspaceFilters, tableFilters, duration, capacity, count).get(sampler.toLowerCase());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<String, CompositeData> getToppartitions(List<String> samplers, List<String> keyspaceFilters, List<String> tableFilters, int duration, int capacity, int count) throws OpenDataException {
|
|
||||||
MultivaluedMap<String, String> queryParams = new MultivaluedHashMap<String, String>();
|
|
||||||
APIClient.set_query_param(queryParams, "duration", Integer.toString(duration));
|
|
||||||
APIClient.set_query_param(queryParams, "capacity", Integer.toString(capacity));
|
|
||||||
APIClient.set_query_param(queryParams, "keyspace_filters", keyspaceFilters != null ? APIClient.join(keyspaceFilters.toArray(new String[0])) : null);
|
|
||||||
APIClient.set_query_param(queryParams, "table_filters", tableFilters != null ? APIClient.join(tableFilters.toArray(new String[0])) : null);
|
|
||||||
JsonObject result = client.getJsonObj("/storage_service/toppartitions", queryParams);
|
|
||||||
|
|
||||||
Map<String, CompositeData> resultsMap = new HashMap<>();
|
|
||||||
|
|
||||||
for (String operation : OPERATION_NAMES) {
|
|
||||||
JsonArray counters = result.getJsonArray(operation);
|
|
||||||
long cardinality = result.getJsonNumber(operation + "_cardinality").longValue();
|
|
||||||
long size = 0;
|
|
||||||
TabularDataSupport tabularResult = new TabularDataSupport(COUNTER_TYPE);
|
|
||||||
|
|
||||||
if (counters != null) {
|
|
||||||
size = (count > counters.size()) ? counters.size() : count;
|
|
||||||
for (int i = 0; i < size; i++) {
|
|
||||||
JsonObject counter = counters.getJsonObject(i);
|
|
||||||
tabularResult.put(new CompositeDataSupport(COUNTER_COMPOSITE_TYPE, COUNTER_NAMES,
|
|
||||||
new Object[] { counter.getString("partition"), // raw
|
|
||||||
counter.getJsonNumber("count").longValue(), // count
|
|
||||||
counter.getJsonNumber("error").longValue(), // error
|
|
||||||
counter.getString("partition") })); // string
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resultsMap.put(operation + "s", new CompositeDataSupport(SAMPLING_RESULT, SAMPLER_NAMES, new Object[] { cardinality, tabularResult }));
|
|
||||||
}
|
|
||||||
|
|
||||||
return resultsMap;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -35,9 +35,7 @@ import java.util.concurrent.ExecutionException;
|
|||||||
import java.util.concurrent.TimeoutException;
|
import java.util.concurrent.TimeoutException;
|
||||||
|
|
||||||
import javax.management.NotificationEmitter;
|
import javax.management.NotificationEmitter;
|
||||||
import javax.management.openmbean.CompositeData;
|
|
||||||
import javax.management.openmbean.TabularData;
|
import javax.management.openmbean.TabularData;
|
||||||
import javax.management.openmbean.OpenDataException;
|
|
||||||
|
|
||||||
public interface StorageServiceMBean extends NotificationEmitter {
|
public interface StorageServiceMBean extends NotificationEmitter {
|
||||||
/**
|
/**
|
||||||
@ -214,7 +212,6 @@ public interface StorageServiceMBean extends NotificationEmitter {
|
|||||||
|
|
||||||
public List<InetAddress> getNaturalEndpoints(String keyspaceName, ByteBuffer key);
|
public List<InetAddress> getNaturalEndpoints(String keyspaceName, ByteBuffer key);
|
||||||
|
|
||||||
public void checkAndRepairCdcStreams() throws IOException;
|
|
||||||
/**
|
/**
|
||||||
* Takes the snapshot for the given keyspaces. A snapshot name must be
|
* Takes the snapshot for the given keyspaces. A snapshot name must be
|
||||||
* specified.
|
* specified.
|
||||||
@ -250,14 +247,6 @@ public interface StorageServiceMBean extends NotificationEmitter {
|
|||||||
*/
|
*/
|
||||||
public void takeColumnFamilySnapshot(String keyspaceName, String columnFamilyName, String tag) throws IOException;
|
public void takeColumnFamilySnapshot(String keyspaceName, String columnFamilyName, String tag) throws IOException;
|
||||||
|
|
||||||
/**
|
|
||||||
* @deprecated use {@link #takeSnapshot(String tag, Map options, String... entities)} instead.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
default public void takeMultipleTableSnapshot(String tag, String... tableList) throws IOException {
|
|
||||||
takeMultipleColumnFamilySnapshot(tag, tableList);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Takes the snapshot of a multiple column family from different keyspaces.
|
* Takes the snapshot of a multiple column family from different keyspaces.
|
||||||
* A snapshot name must be specified.
|
* A snapshot name must be specified.
|
||||||
@ -270,18 +259,6 @@ public interface StorageServiceMBean extends NotificationEmitter {
|
|||||||
*/
|
*/
|
||||||
public void takeMultipleColumnFamilySnapshot(String tag, String... columnFamilyList) throws IOException;
|
public void takeMultipleColumnFamilySnapshot(String tag, String... columnFamilyList) throws IOException;
|
||||||
|
|
||||||
/**
|
|
||||||
* Takes the snapshot of a multiple column family from different keyspaces. A snapshot name must be specified.
|
|
||||||
*
|
|
||||||
* @param tag
|
|
||||||
* the tag given to the snapshot; may not be null or empty
|
|
||||||
* @param options
|
|
||||||
* Map of options (skipFlush is the only supported option for now)
|
|
||||||
* @param entities
|
|
||||||
* list of keyspaces / tables in the form of empty | ks1 ks2 ... | ks1.cf1,ks2.cf2,...
|
|
||||||
*/
|
|
||||||
public void takeSnapshot(String tag, Map<String, String> options, String... entities) throws IOException;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Remove the snapshot with the given name from the given keyspaces. If no
|
* Remove the snapshot with the given name from the given keyspaces. If no
|
||||||
* tag is specified we will remove all snapshots.
|
* tag is specified we will remove all snapshots.
|
||||||
@ -320,20 +297,6 @@ public interface StorageServiceMBean extends NotificationEmitter {
|
|||||||
public void forceKeyspaceCompaction(String keyspaceName, String... tableNames)
|
public void forceKeyspaceCompaction(String keyspaceName, String... tableNames)
|
||||||
throws IOException, ExecutionException, InterruptedException;
|
throws IOException, ExecutionException, InterruptedException;
|
||||||
|
|
||||||
@Deprecated
|
|
||||||
default public int relocateSSTables(String keyspace, String ... cfnames) throws IOException, ExecutionException, InterruptedException {
|
|
||||||
return relocateSSTables(0, keyspace, cfnames);
|
|
||||||
}
|
|
||||||
default public int relocateSSTables(int jobs, String keyspace, String ... cfnames) throws IOException, ExecutionException, InterruptedException {
|
|
||||||
// Node tool op disabled in scylla
|
|
||||||
throw new UnsupportedOperationException("relocateSSTables");
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Forces major compaction of specified token range in a single keyspace
|
|
||||||
*/
|
|
||||||
public void forceKeyspaceCompactionForTokenRange(String keyspaceName, String startToken, String endToken, String... tableNames) throws IOException, ExecutionException, InterruptedException;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Trigger a cleanup of keys on a single keyspace
|
* Trigger a cleanup of keys on a single keyspace
|
||||||
*/
|
*/
|
||||||
@ -350,10 +313,6 @@ public interface StorageServiceMBean extends NotificationEmitter {
|
|||||||
* scrubbed.
|
* scrubbed.
|
||||||
*
|
*
|
||||||
* Scrubbed CFs will be snapshotted first, if disableSnapshot is false
|
* Scrubbed CFs will be snapshotted first, if disableSnapshot is false
|
||||||
*
|
|
||||||
* scrubMode controls what scrub does when encountering corruption.
|
|
||||||
* It replaces skipCorrupted where skipCorrupted is equivalent to scrubMode="SKIP".
|
|
||||||
* Can be one of: "ABORT", "SKIP", "SEGREGATE", or "VALIDATE".
|
|
||||||
*/
|
*/
|
||||||
@Deprecated
|
@Deprecated
|
||||||
public int scrub(boolean disableSnapshot, boolean skipCorrupted, String keyspaceName, String... tableNames)
|
public int scrub(boolean disableSnapshot, boolean skipCorrupted, String keyspaceName, String... tableNames)
|
||||||
@ -363,19 +322,9 @@ public interface StorageServiceMBean extends NotificationEmitter {
|
|||||||
public int scrub(boolean disableSnapshot, boolean skipCorrupted, boolean checkData, String keyspaceName,
|
public int scrub(boolean disableSnapshot, boolean skipCorrupted, boolean checkData, String keyspaceName,
|
||||||
String... tableNames) throws IOException, ExecutionException, InterruptedException;
|
String... tableNames) throws IOException, ExecutionException, InterruptedException;
|
||||||
|
|
||||||
@Deprecated
|
|
||||||
public int scrub(boolean disableSnapshot, boolean skipCorrupted, boolean checkData, int jobs, String keyspaceName,
|
public int scrub(boolean disableSnapshot, boolean skipCorrupted, boolean checkData, int jobs, String keyspaceName,
|
||||||
String... columnFamilies) throws IOException, ExecutionException, InterruptedException;
|
String... columnFamilies) throws IOException, ExecutionException, InterruptedException;
|
||||||
|
|
||||||
@Deprecated
|
|
||||||
public int scrub(boolean disableSnapshot, boolean skipCorrupted, boolean checkData, boolean reinsertOverflowedTTL,
|
|
||||||
int jobs, String keyspaceName, String... columnFamilies)
|
|
||||||
throws IOException, ExecutionException, InterruptedException;
|
|
||||||
|
|
||||||
public int scrub(boolean disableSnapshot, String scrubMode, boolean checkData, boolean reinsertOverflowedTTL,
|
|
||||||
int jobs, String keyspaceName, String... columnFamilies)
|
|
||||||
throws IOException, ExecutionException, InterruptedException;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Verify (checksums of) the given keyspace. If tableNames array is empty,
|
* Verify (checksums of) the given keyspace. If tableNames array is empty,
|
||||||
* all CFs are verified.
|
* all CFs are verified.
|
||||||
@ -397,15 +346,6 @@ public interface StorageServiceMBean extends NotificationEmitter {
|
|||||||
public int upgradeSSTables(String keyspaceName, boolean excludeCurrentVersion, int jobs, String... tableNames)
|
public int upgradeSSTables(String keyspaceName, boolean excludeCurrentVersion, int jobs, String... tableNames)
|
||||||
throws IOException, ExecutionException, InterruptedException;
|
throws IOException, ExecutionException, InterruptedException;
|
||||||
|
|
||||||
/**
|
|
||||||
* Rewrites all sstables from the given tables to remove deleted data.
|
|
||||||
* The tombstone option defines the granularity of the procedure: ROW removes deleted partitions and rows, CELL also removes overwritten or deleted cells.
|
|
||||||
*/
|
|
||||||
default public int garbageCollect(String tombstoneOption, int jobs, String keyspaceName, String... tableNames) throws IOException, ExecutionException, InterruptedException {
|
|
||||||
// Node tool op disabled in scylla
|
|
||||||
throw new UnsupportedOperationException("garbageCollect");
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Flush all memtables for the given column families, or all columnfamilies
|
* Flush all memtables for the given column families, or all columnfamilies
|
||||||
* for the given keyspace if none are explicitly listed.
|
* for the given keyspace if none are explicitly listed.
|
||||||
@ -513,11 +453,8 @@ public interface StorageServiceMBean extends NotificationEmitter {
|
|||||||
* removeToken removes token (and all data associated with enpoint that had
|
* removeToken removes token (and all data associated with enpoint that had
|
||||||
* it) from the ring
|
* it) from the ring
|
||||||
*/
|
*/
|
||||||
@Deprecated
|
|
||||||
public void removeNode(String token);
|
public void removeNode(String token);
|
||||||
|
|
||||||
public void removeNode(String token, String ignoreNodes);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the status of a token removal.
|
* Get the status of a token removal.
|
||||||
*/
|
*/
|
||||||
@ -603,8 +540,6 @@ public interface StorageServiceMBean extends NotificationEmitter {
|
|||||||
|
|
||||||
public List<String> getNonSystemKeyspaces();
|
public List<String> getNonSystemKeyspaces();
|
||||||
|
|
||||||
public Map<String, String> getViewBuildStatuses(String keyspace, String view);
|
|
||||||
|
|
||||||
public List<String> getNonLocalStrategyKeyspaces();
|
public List<String> getNonLocalStrategyKeyspaces();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -626,21 +561,6 @@ public interface StorageServiceMBean extends NotificationEmitter {
|
|||||||
public void updateSnitch(String epSnitchClassName, Boolean dynamic, Integer dynamicUpdateInterval,
|
public void updateSnitch(String epSnitchClassName, Boolean dynamic, Integer dynamicUpdateInterval,
|
||||||
Integer dynamicResetInterval, Double dynamicBadnessThreshold) throws ClassNotFoundException;
|
Integer dynamicResetInterval, Double dynamicBadnessThreshold) throws ClassNotFoundException;
|
||||||
|
|
||||||
/*
|
|
||||||
* Update dynamic_snitch_update_interval_in_ms
|
|
||||||
*/
|
|
||||||
default public void setDynamicUpdateInterval(int dynamicUpdateInterval) {
|
|
||||||
// afaict not used by nodetool.
|
|
||||||
throw new UnsupportedOperationException("setDynamicUpdateInterval");
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Get dynamic_snitch_update_interval_in_ms
|
|
||||||
*/
|
|
||||||
default public int getDynamicUpdateInterval() {
|
|
||||||
throw new UnsupportedOperationException("getDynamicUpdateInterval");
|
|
||||||
}
|
|
||||||
|
|
||||||
// allows a user to forcibly 'kill' a sick node
|
// allows a user to forcibly 'kill' a sick node
|
||||||
public void stopGossiping();
|
public void stopGossiping();
|
||||||
|
|
||||||
@ -676,78 +596,6 @@ public interface StorageServiceMBean extends NotificationEmitter {
|
|||||||
|
|
||||||
public boolean isJoined();
|
public boolean isJoined();
|
||||||
|
|
||||||
default public boolean isDrained() {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
default public boolean isDraining() {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
default public void setRpcTimeout(long value) {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
default public long getRpcTimeout() {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
default public void setReadRpcTimeout(long value) {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
default public long getReadRpcTimeout() {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
default public void setRangeRpcTimeout(long value) {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
default public long getRangeRpcTimeout() {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
default public void setWriteRpcTimeout(long value) {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
default public long getWriteRpcTimeout() {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
default public void setCounterWriteRpcTimeout(long value) {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
default public long getCounterWriteRpcTimeout() {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
default public void setCasContentionTimeout(long value) {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
default public long getCasContentionTimeout() {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
default public void setTruncateRpcTimeout(long value) {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
default public long getTruncateRpcTimeout() {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
default public void setStreamingSocketTimeout(int value) {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
default public int getStreamingSocketTimeout() {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setStreamThroughputMbPerSec(int value);
|
public void setStreamThroughputMbPerSec(int value);
|
||||||
|
|
||||||
public int getStreamThroughputMbPerSec();
|
public int getStreamThroughputMbPerSec();
|
||||||
@ -759,13 +607,6 @@ public interface StorageServiceMBean extends NotificationEmitter {
|
|||||||
public int getCompactionThroughputMbPerSec();
|
public int getCompactionThroughputMbPerSec();
|
||||||
public void setCompactionThroughputMbPerSec(int value);
|
public void setCompactionThroughputMbPerSec(int value);
|
||||||
|
|
||||||
default public int getConcurrentCompactors() {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
default public void setConcurrentCompactors(int value) {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isIncrementalBackupsEnabled();
|
public boolean isIncrementalBackupsEnabled();
|
||||||
|
|
||||||
public void setIncrementalBackupsEnabled(boolean value);
|
public void setIncrementalBackupsEnabled(boolean value);
|
||||||
@ -782,16 +623,6 @@ public interface StorageServiceMBean extends NotificationEmitter {
|
|||||||
*/
|
*/
|
||||||
public void rebuild(String sourceDc);
|
public void rebuild(String sourceDc);
|
||||||
|
|
||||||
/**
|
|
||||||
* Same as {@link #rebuild(String)}, but only for specified keyspace and ranges.
|
|
||||||
*
|
|
||||||
* @param sourceDc Name of DC from which to select sources for streaming or null to pick any node
|
|
||||||
* @param keyspace Name of the keyspace which to rebuild or null to rebuild all keyspaces.
|
|
||||||
* @param tokens Range of tokens to rebuild or null to rebuild all token ranges. In the format of:
|
|
||||||
* "(start_token_1,end_token_1],(start_token_2,end_token_2],...(start_token_n,end_token_n]"
|
|
||||||
*/
|
|
||||||
public void rebuild(String sourceDc, String keyspace, String tokens, String specificSources);
|
|
||||||
|
|
||||||
/** Starts a bulk load and blocks until it completes. */
|
/** Starts a bulk load and blocks until it completes. */
|
||||||
public void bulkLoad(String directory);
|
public void bulkLoad(String directory);
|
||||||
|
|
||||||
@ -810,16 +641,7 @@ public interface StorageServiceMBean extends NotificationEmitter {
|
|||||||
* The parent keyspace name
|
* The parent keyspace name
|
||||||
* @param cfName
|
* @param cfName
|
||||||
* The ColumnFamily name where SSTables belong
|
* The ColumnFamily name where SSTables belong
|
||||||
* @param isLoadAndStream
|
|
||||||
* Whether or not arbitrary SSTables should be loaded (and streamed to the owning nodes)
|
|
||||||
*/
|
*/
|
||||||
public void loadNewSSTables(String ksName, String cfName, boolean isLoadAndStream);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @deprecated use {@link #loadNewSSTables(String ksName, String cfName, boolean isLoadAndStream)} instead.
|
|
||||||
* This is kept for backward compatibility.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public void loadNewSSTables(String ksName, String cfName);
|
public void loadNewSSTables(String ksName, String cfName);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -841,10 +663,6 @@ public interface StorageServiceMBean extends NotificationEmitter {
|
|||||||
|
|
||||||
public void resetLocalSchema() throws IOException;
|
public void resetLocalSchema() throws IOException;
|
||||||
|
|
||||||
default public void reloadLocalSchema() {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Enables/Disables tracing for the whole system. Only thrift requests can
|
* Enables/Disables tracing for the whole system. Only thrift requests can
|
||||||
* start tracing currently.
|
* start tracing currently.
|
||||||
@ -901,15 +719,4 @@ public interface StorageServiceMBean extends NotificationEmitter {
|
|||||||
* Sets the hinted handoff throttle in kb per second, per delivery thread.
|
* Sets the hinted handoff throttle in kb per second, per delivery thread.
|
||||||
*/
|
*/
|
||||||
public boolean resumeBootstrap();
|
public boolean resumeBootstrap();
|
||||||
|
|
||||||
public List<CompositeData> getSSTableInfo(String keyspace, String table);
|
|
||||||
|
|
||||||
public List<CompositeData> getSSTableInfo();
|
|
||||||
|
|
||||||
/** retun the system uptime */
|
|
||||||
public long getUptime();
|
|
||||||
|
|
||||||
public CompositeData getToppartitions(String sampler, List<String> keyspaceFilters, List<String> tableFilters, int duration, int capacity, int count) throws OpenDataException;
|
|
||||||
|
|
||||||
public Map<String, CompositeData> getToppartitions(List<String> samplers, List<String> keyspaceFilters, List<String> tableFilters, int duration, int capacity, int count) throws OpenDataException;
|
|
||||||
}
|
}
|
||||||
|
@ -24,14 +24,15 @@
|
|||||||
|
|
||||||
package org.apache.cassandra.streaming;
|
package org.apache.cassandra.streaming;
|
||||||
|
|
||||||
import jakarta.json.JsonArray;
|
|
||||||
import jakarta.json.JsonObject;
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.net.InetAddress;
|
import java.net.InetAddress;
|
||||||
import java.net.UnknownHostException;
|
import java.net.UnknownHostException;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
import javax.json.JsonArray;
|
||||||
|
import javax.json.JsonObject;
|
||||||
|
|
||||||
import com.google.common.base.Objects;
|
import com.google.common.base.Objects;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -24,8 +24,6 @@
|
|||||||
|
|
||||||
package org.apache.cassandra.streaming;
|
package org.apache.cassandra.streaming;
|
||||||
|
|
||||||
import jakarta.json.JsonArray;
|
|
||||||
import jakarta.json.JsonObject;
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.net.InetAddress;
|
import java.net.InetAddress;
|
||||||
import java.net.UnknownHostException;
|
import java.net.UnknownHostException;
|
||||||
@ -34,6 +32,9 @@ import java.util.HashSet;
|
|||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
|
import javax.json.JsonArray;
|
||||||
|
import javax.json.JsonObject;
|
||||||
|
|
||||||
import com.google.common.collect.Iterables;
|
import com.google.common.collect.Iterables;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -24,12 +24,12 @@
|
|||||||
|
|
||||||
package org.apache.cassandra.streaming;
|
package org.apache.cassandra.streaming;
|
||||||
|
|
||||||
import jakarta.json.JsonArray;
|
|
||||||
import jakarta.json.JsonObject;
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.logging.Logger;
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
import javax.json.JsonArray;
|
||||||
|
import javax.json.JsonObject;
|
||||||
import javax.management.ListenerNotFoundException;
|
import javax.management.ListenerNotFoundException;
|
||||||
import javax.management.MBeanNotificationInfo;
|
import javax.management.MBeanNotificationInfo;
|
||||||
import javax.management.NotificationBroadcasterSupport;
|
import javax.management.NotificationBroadcasterSupport;
|
||||||
|
@ -24,12 +24,13 @@
|
|||||||
|
|
||||||
package org.apache.cassandra.streaming;
|
package org.apache.cassandra.streaming;
|
||||||
|
|
||||||
import jakarta.json.JsonArray;
|
|
||||||
import jakarta.json.JsonObject;
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
|
||||||
|
import javax.json.JsonArray;
|
||||||
|
import javax.json.JsonObject;
|
||||||
|
|
||||||
import com.google.common.base.Objects;
|
import com.google.common.base.Objects;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
Loading…
Reference in New Issue
Block a user