@@ -0,0 +1,9 @@ | |||
*.iml | |||
.idea | |||
target | |||
tmp | |||
logs | |||
dependency-reduced-pom.xml | |||
velocity.log | |||
*~ | |||
build.log |
@@ -0,0 +1,202 @@ | |||
Apache License | |||
Version 2.0, January 2004 | |||
http://www.apache.org/licenses/ | |||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION | |||
1. Definitions. | |||
"License" shall mean the terms and conditions for use, reproduction, | |||
and distribution as defined by Sections 1 through 9 of this document. | |||
"Licensor" shall mean the copyright owner or entity authorized by | |||
the copyright owner that is granting the License. | |||
"Legal Entity" shall mean the union of the acting entity and all | |||
other entities that control, are controlled by, or are under common | |||
control with that entity. For the purposes of this definition, | |||
"control" means (i) the power, direct or indirect, to cause the | |||
direction or management of such entity, whether by contract or | |||
otherwise, or (ii) ownership of fifty percent (50%) or more of the | |||
outstanding shares, or (iii) beneficial ownership of such entity. | |||
"You" (or "Your") shall mean an individual or Legal Entity | |||
exercising permissions granted by this License. | |||
"Source" form shall mean the preferred form for making modifications, | |||
including but not limited to software source code, documentation | |||
source, and configuration files. | |||
"Object" form shall mean any form resulting from mechanical | |||
transformation or translation of a Source form, including but | |||
not limited to compiled object code, generated documentation, | |||
and conversions to other media types. | |||
"Work" shall mean the work of authorship, whether in Source or | |||
Object form, made available under the License, as indicated by a | |||
copyright notice that is included in or attached to the work | |||
(an example is provided in the Appendix below). | |||
"Derivative Works" shall mean any work, whether in Source or Object | |||
form, that is based on (or derived from) the Work and for which the | |||
editorial revisions, annotations, elaborations, or other modifications | |||
represent, as a whole, an original work of authorship. For the purposes | |||
of this License, Derivative Works shall not include works that remain | |||
separable from, or merely link (or bind by name) to the interfaces of, | |||
the Work and Derivative Works thereof. | |||
"Contribution" shall mean any work of authorship, including | |||
the original version of the Work and any modifications or additions | |||
to that Work or Derivative Works thereof, that is intentionally | |||
submitted to Licensor for inclusion in the Work by the copyright owner | |||
or by an individual or Legal Entity authorized to submit on behalf of | |||
the copyright owner. For the purposes of this definition, "submitted" | |||
means any form of electronic, verbal, or written communication sent | |||
to the Licensor or its representatives, including but not limited to | |||
communication on electronic mailing lists, source code control systems, | |||
and issue tracking systems that are managed by, or on behalf of, the | |||
Licensor for the purpose of discussing and improving the Work, but | |||
excluding communication that is conspicuously marked or otherwise | |||
designated in writing by the copyright owner as "Not a Contribution." | |||
"Contributor" shall mean Licensor and any individual or Legal Entity | |||
on behalf of whom a Contribution has been received by Licensor and | |||
subsequently incorporated within the Work. | |||
2. Grant of Copyright License. Subject to the terms and conditions of | |||
this License, each Contributor hereby grants to You a perpetual, | |||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable | |||
copyright license to reproduce, prepare Derivative Works of, | |||
publicly display, publicly perform, sublicense, and distribute the | |||
Work and such Derivative Works in Source or Object form. | |||
3. Grant of Patent License. Subject to the terms and conditions of | |||
this License, each Contributor hereby grants to You a perpetual, | |||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable | |||
(except as stated in this section) patent license to make, have made, | |||
use, offer to sell, sell, import, and otherwise transfer the Work, | |||
where such license applies only to those patent claims licensable | |||
by such Contributor that are necessarily infringed by their | |||
Contribution(s) alone or by combination of their Contribution(s) | |||
with the Work to which such Contribution(s) was submitted. If You | |||
institute patent litigation against any entity (including a | |||
cross-claim or counterclaim in a lawsuit) alleging that the Work | |||
or a Contribution incorporated within the Work constitutes direct | |||
or contributory patent infringement, then any patent licenses | |||
granted to You under this License for that Work shall terminate | |||
as of the date such litigation is filed. | |||
4. Redistribution. You may reproduce and distribute copies of the | |||
Work or Derivative Works thereof in any medium, with or without | |||
modifications, and in Source or Object form, provided that You | |||
meet the following conditions: | |||
(a) You must give any other recipients of the Work or | |||
Derivative Works a copy of this License; and | |||
(b) You must cause any modified files to carry prominent notices | |||
stating that You changed the files; and | |||
(c) You must retain, in the Source form of any Derivative Works | |||
that You distribute, all copyright, patent, trademark, and | |||
attribution notices from the Source form of the Work, | |||
excluding those notices that do not pertain to any part of | |||
the Derivative Works; and | |||
(d) If the Work includes a "NOTICE" text file as part of its | |||
distribution, then any Derivative Works that You distribute must | |||
include a readable copy of the attribution notices contained | |||
within such NOTICE file, excluding those notices that do not | |||
pertain to any part of the Derivative Works, in at least one | |||
of the following places: within a NOTICE text file distributed | |||
as part of the Derivative Works; within the Source form or | |||
documentation, if provided along with the Derivative Works; or, | |||
within a display generated by the Derivative Works, if and | |||
wherever such third-party notices normally appear. The contents | |||
of the NOTICE file are for informational purposes only and | |||
do not modify the License. You may add Your own attribution | |||
notices within Derivative Works that You distribute, alongside | |||
or as an addendum to the NOTICE text from the Work, provided | |||
that such additional attribution notices cannot be construed | |||
as modifying the License. | |||
You may add Your own copyright statement to Your modifications and | |||
may provide additional or different license terms and conditions | |||
for use, reproduction, or distribution of Your modifications, or | |||
for any such Derivative Works as a whole, provided Your use, | |||
reproduction, and distribution of the Work otherwise complies with | |||
the conditions stated in this License. | |||
5. Submission of Contributions. Unless You explicitly state otherwise, | |||
any Contribution intentionally submitted for inclusion in the Work | |||
by You to the Licensor shall be under the terms and conditions of | |||
this License, without any additional terms or conditions. | |||
Notwithstanding the above, nothing herein shall supersede or modify | |||
the terms of any separate license agreement you may have executed | |||
with Licensor regarding such Contributions. | |||
6. Trademarks. This License does not grant permission to use the trade | |||
names, trademarks, service marks, or product names of the Licensor, | |||
except as required for reasonable and customary use in describing the | |||
origin of the Work and reproducing the content of the NOTICE file. | |||
7. Disclaimer of Warranty. Unless required by applicable law or | |||
agreed to in writing, Licensor provides the Work (and each | |||
Contributor provides its Contributions) on an "AS IS" BASIS, | |||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or | |||
implied, including, without limitation, any warranties or conditions | |||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A | |||
PARTICULAR PURPOSE. You are solely responsible for determining the | |||
appropriateness of using or redistributing the Work and assume any | |||
risks associated with Your exercise of permissions under this License. | |||
8. Limitation of Liability. In no event and under no legal theory, | |||
whether in tort (including negligence), contract, or otherwise, | |||
unless required by applicable law (such as deliberate and grossly | |||
negligent acts) or agreed to in writing, shall any Contributor be | |||
liable to You for damages, including any direct, indirect, special, | |||
incidental, or consequential damages of any character arising as a | |||
result of this License or out of the use or inability to use the | |||
Work (including but not limited to damages for loss of goodwill, | |||
work stoppage, computer failure or malfunction, or any and all | |||
other commercial damages or losses), even if such Contributor | |||
has been advised of the possibility of such damages. | |||
9. Accepting Warranty or Additional Liability. While redistributing | |||
the Work or Derivative Works thereof, You may choose to offer, | |||
and charge a fee for, acceptance of support, warranty, indemnity, | |||
or other liability obligations and/or rights consistent with this | |||
License. However, in accepting such obligations, You may act only | |||
on Your own behalf and on Your sole responsibility, not on behalf | |||
of any other Contributor, and only if You agree to indemnify, | |||
defend, and hold each Contributor harmless for any liability | |||
incurred by, or claims asserted against, such Contributor by reason | |||
of your accepting any such warranty or additional liability. | |||
END OF TERMS AND CONDITIONS | |||
APPENDIX: How to apply the Apache License to your work. | |||
To apply the Apache License to your work, attach the following | |||
boilerplate notice, with the fields enclosed by brackets "[]" | |||
replaced with your own identifying information. (Don't include | |||
the brackets!) The text should be enclosed in the appropriate | |||
comment syntax for the file format. We also recommend that a | |||
file or class name and description of purpose be included on the | |||
same "printed page" as the copyright notice for easier | |||
identification within third-party archives. | |||
Copyright [yyyy] [name of copyright owner] | |||
Licensed under the Apache License, Version 2.0 (the "License"); | |||
you may not use this file except in compliance with the License. | |||
You may obtain a copy of the License at | |||
http://www.apache.org/licenses/LICENSE-2.0 | |||
Unless required by applicable law or agreed to in writing, software | |||
distributed under the License is distributed on an "AS IS" BASIS, | |||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
See the License for the specific language governing permissions and | |||
limitations under the License. |
@@ -0,0 +1,52 @@ | |||
#!/bin/bash | |||
# | |||
# Put this script in the same directory as your djbdns data file, usually /etc/tinydns/root | |||
# | |||
# Break up your monolithic data file into one-file per domain, and put them in the "domains" subdirectory. | |||
# | |||
# When this script runs, it concatenates all files in the "domains" subdirectory into a monolithic data | |||
# file and tried to build it via make. If that fails, the old file is kept. | |||
# | |||
# Note that you may still want to restart the tinydns service (svc -h /path/to/tinydns) | |||
# | |||
function die () { | |||
echo "${1}" | |||
exit 1 | |||
} | |||
BASE=$(cd $(dirname $0) && pwd) | |||
if [ ! -f data ] ; then | |||
echo "No data file found!" | |||
exit 1 | |||
fi | |||
if [ ! -d ${BASE}/domains ] ; then | |||
echo "No domains dir found!" | |||
exit 1 | |||
fi | |||
mkdir -p ${BASE}/backups || die "Error creating backups dir" | |||
TODAY=$(date +%Y-%m-%d) | |||
BACKUP=$(mktemp ${BASE}/backups/data.backup.${TODAY}.XXXXXXX) | |||
cp data ${BACKUP} || die "Error backing up data file" | |||
CHANGED=$(mktemp data.tried.${TODAY}.XXXXXX) | |||
for domain in $(find ${BASE}/domains -type f) ; do | |||
echo "" >> ${CHANGED} | |||
echo "####################" >> ${CHANGED} | |||
echo "# $(basename ${domain})" >> ${CHANGED} | |||
echo "####################" >> ${CHANGED} | |||
cat ${domain} >> ${CHANGED} | |||
echo "" >> ${CHANGED} | |||
done | |||
mv ${CHANGED} data | |||
make | |||
if [ $? -ne 0 ] ; then | |||
echo "Error rebuilding data file, rolling back. Failed data file is in ${CHANGED}" | |||
mv data ${CHANGED} | |||
mv ${BACKUP} data | |||
fi |
@@ -0,0 +1,76 @@ | |||
#!/bin/bash | |||
# | |||
# Usage: gsync [rsync-options] source destination | |||
# | |||
# Synchronize a git source directory with a remote directory, excluding files according to then | |||
# rules found in .gitignore. If subdirectories also contain .gitignore files, then those rules | |||
# will be applied (but only in each respective subdirectory). | |||
# | |||
# Note that ONLY .gitignore files in the directory where this command is run from will be considered | |||
# Thus, when the source or destination is a local path, it should be specified relative to the current | |||
# directory. | |||
# | |||
# There will be two rsync statements - one to exclude everything that should be excluded, | |||
# and a second to handle the exceptions to the exclusion rules - the lines in .gitignore that begin with ! | |||
# | |||
# The exceptions to the exclusions are rsync'd first, and if that succeeds, the second rsync | |||
# copies everything else. | |||
# | |||
# | |||
# --- SUPPORT OPEN SOURCE --- | |||
# If you find this script has saved you a decent amount time, please consider dropping me some coin. | |||
# I will be forever grateful and your name will be permanently emblazoned on my Wall of Honor. | |||
# My bitcoin wallet address is 1HoiSHKxYM4EtsP3xFGsY2xWYvh4hAuJ2q | |||
# Paypal or Dwolla: jonathan (replace this with the 'AT' sign on your keyboard) kyuss.org | |||
# | |||
# Thank You. | |||
# | |||
# - jonathan. | |||
# | |||
if [[ -z "${1}" || -z "${2}" || "${1}" == "--help" || "${1}" == "-help" || "${1}" == "-h" ]] ; then | |||
echo "Usage: gsync [rsync-options] source destination" | |||
exit 1 | |||
fi | |||
includes="" | |||
excludes='--exclude=.git*' | |||
base="$(pwd)" | |||
function process_git_ignore () { | |||
git_ignore="${1}" | |||
if [ "$(dirname ${git_ignore})" = "${base}" ] ; then | |||
prefix="" | |||
else | |||
prefix=".$(echo -n "$(dirname ${git_ignore})" | sed -e 's,^'${base}',,')" | |||
fi | |||
while read -r line || [[ -n "${line}" ]] ; do | |||
# todo: there is probably a cleaner test for "first char == !" | |||
if [ $(echo "${line}" | head -c 1 | grep -- '!' | wc -l) -gt 0 ] ; then | |||
includes="${includes} | |||
--include='${prefix}$(echo "${line}" | sed -e 's/^!//' | sed -e 's/ /\\ /g')'" | |||
else | |||
excludes="${excludes} | |||
--exclude='${prefix}$(echo "${line}" | sed -e 's/ /\\ /g')'" | |||
fi | |||
done < ${git_ignore} | |||
} | |||
# root .gitignore file | |||
if [ -f .gitignore ] ; then | |||
process_git_ignore "$(pwd)/.gitignore" | |||
fi | |||
# check for other .gitignore files | |||
for i in $(find $(pwd) -mindepth 2 -type f -name .gitignore) ; do | |||
process_git_ignore "${i}" | |||
done | |||
rsync ${includes} --exclude="*" ${@} && rsync ${excludes} ${@} | |||
# for debugging | |||
#echo "rsync ${includes} --exclude=\"*\" ${@}" && echo "rsync ${excludes} ${@}" | |||
#echo "rsync ${excludes} ${@}" |
@@ -0,0 +1,63 @@ | |||
#!/usr/bin/python | |||
import sys | |||
import json | |||
import re | |||
from optparse import OptionParser | |||
parser = OptionParser() | |||
parser.add_option("-i", "--infile", dest="infile", | |||
help="read JSON from here (stdin if omitted)", metavar="FILE") | |||
parser.add_option("-o", "--outfile", dest="outfile", | |||
help="write JSON to here (stdout if omitted)", metavar="FILE") | |||
parser.add_option("-p", "--path", dest="json_path", | |||
help="JSON path to read", metavar="path.to.data") | |||
parser.add_option("-v", "--value", dest="json_value", | |||
help="Value to write to the JSON path", metavar="value") | |||
(options, args) = parser.parse_args() | |||
if options.infile is None: | |||
data = json.load(sys.stdin) | |||
else: | |||
with open(options.infile) as infile: | |||
data = json.load(infile) | |||
ref = data | |||
if options.json_value is None: | |||
# READ mode | |||
if options.json_path is not None: | |||
for token in re.split('\.', options.json_path): | |||
try: | |||
ref = ref[token] | |||
except KeyError: | |||
# JSON path does not exist, we treat that as "empty" | |||
ref = None | |||
break | |||
data = ref | |||
else: | |||
# WRITE mode | |||
if options.json_path is not None: | |||
token_path = re.split('\.', options.json_path) | |||
if len(token_path) == 0: | |||
data = options.json_value | |||
else: | |||
for token in token_path[0:-1]: | |||
try: | |||
ref = ref[token] | |||
except KeyError: | |||
# JSON path does not exist, create it | |||
ref[token] = {} | |||
ref = ref[token] | |||
ref[token_path[-1]] = options.json_value | |||
else: | |||
data = options.json_value | |||
if ref is not None: | |||
if options.outfile is None: | |||
print json.dumps(data, sys.stdout, indent=2) | |||
else: | |||
with open(options.outfile, 'w') as outfile: | |||
outfile.write(json.dumps(data, indent=2)) |
@@ -0,0 +1,3 @@ | |||
#!/bin/bash | |||
grep class,load | awk '{print $2}' | awk -F '$' '{print $1}' | sort | uniq |
@@ -0,0 +1,2 @@ | |||
#!/bin/bash | |||
grep class,load | awk '{print $2}' | awk -F '$' '{print $1}' | awk -F '.' 'BEGIN {OFS="."} {$(NF--)=""; print}' | sed -e 's/\.$//' | sort | uniq |
@@ -0,0 +1,18 @@ | |||
#!/bin/bash | |||
function die () { | |||
echo >&2 "${1}" | |||
exit 1 | |||
} | |||
if [[ -z "${JAVA_HOME}" ]] ; then | |||
die "No JAVA_HOME env var defined" | |||
fi | |||
OUTPUT_JRE=${1:?no output JRE dir provided} | |||
if [[ -e ${OUTPUT_JRE} ]] ; then | |||
die "Output JRE dir already exists: ${OUTPUT_JRE}" | |||
fi | |||
MODS=${2:-java.base} | |||
${JAVA_HOME}/bin/jlink --module-path ${JAVA_HOME}/jmods --add-modules ${MODS} --output ${OUTPUT_JRE} |
@@ -0,0 +1,42 @@ | |||
#!/bin/bash | |||
# | |||
# patchjar - for quick patching jar files in a world of maven projects | |||
# | |||
# Usage: patchjar <jar-file> <destination> <dir> [<dir2> <dir3> ...] | |||
# | |||
# jar-file: path to a jar file. you can use relative paths | |||
# destination: a local path or user@host:path | |||
# dirs: any directory with a maven pom.xml file | |||
# | |||
# Rationale: building uber-jars is time-consuming. Uber-jars are wonderful for | |||
# deployment, but can sometimes feel like the slow down iteration cycles. | |||
# | |||
# patchjar runs "mvn compile" in each maven directory, then directly updates | |||
# the jar via "jar uvf". Only the class files that changed are updated. | |||
# | |||
# Once all patches are applied, patchjar copies the jar file to the destination. | |||
# | |||
function die () { | |||
echo >&2 "${1}" | |||
exit 1 | |||
} | |||
START_DIR=$(pwd) | |||
JAR="$(cd $(dirname ${1}) && pwd)/$(basename ${1})" | |||
DESTINATION="${2}" | |||
shift 2 | |||
MAVEN="mvn -DskipTests=true -Dcheckstyle.skip=true" | |||
for dir in ${@} ; do | |||
cd ${START_DIR} && cd $(cd ${dir} && pwd) \ | |||
&& ${MAVEN} compile \ | |||
&& cd target/classes \ | |||
&& classes="$(find . -type f -mmin -3)" && if [ -z "${classes}" ] ; then classes="./*" ; fi \ | |||
&& jar uvf ${JAR} ${classes} \ | |||
|| die "Error building: ${dir}" | |||
done | |||
cd ${START_DIR} | |||
scp ${JAR} ${DESTINATION} |
@@ -0,0 +1,368 @@ | |||
<?xml version="1.0" encoding="UTF-8"?> | |||
<!-- | |||
(c) Copyright 2013-2015 Jonathan Cobb | |||
cobbzilla-utils is available under the Apache License, version 2: http://www.apache.org/licenses/LICENSE-2.0.html | |||
--> | |||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> | |||
<modelVersion>4.0.0</modelVersion> | |||
<parent> | |||
<groupId>org.cobbzilla</groupId> | |||
<artifactId>cobbzilla-parent</artifactId> | |||
<version>1.0.0-SNAPSHOT</version> | |||
</parent> | |||
<artifactId>cobbzilla-utils</artifactId> | |||
<name>cobbzilla-utils</name> | |||
<version>1.0.0-SNAPSHOT</version> | |||
<packaging>jar</packaging> | |||
<licenses> | |||
<license> | |||
<name>The Apache Software License, Version 2.0</name> | |||
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url> | |||
<distribution>repo</distribution> | |||
</license> | |||
</licenses> | |||
<profiles> | |||
<profile> | |||
<id>uberjar</id> | |||
<build> | |||
<plugins> | |||
<plugin> | |||
<groupId>org.apache.maven.plugins</groupId> | |||
<artifactId>maven-shade-plugin</artifactId> | |||
<version>2.1</version> | |||
<configuration> | |||
<finalName>zilla-utils</finalName> | |||
</configuration> | |||
<executions> | |||
<execution> | |||
<phase>package</phase> | |||
<goals> | |||
<goal>shade</goal> | |||
</goals> | |||
<configuration> | |||
<transformers> | |||
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer"> | |||
<mainClass>org.cobbzilla.util.main.IndexMain</mainClass> | |||
</transformer> | |||
</transformers> | |||
<!-- Exclude signed jars to avoid errors | |||
see: http://stackoverflow.com/a/6743609/1251543 | |||
--> | |||
<filters> | |||
<filter> | |||
<artifact>*:*</artifact> | |||
<excludes> | |||
<exclude>META-INF/*.SF</exclude> | |||
<exclude>META-INF/*.DSA</exclude> | |||
<exclude>META-INF/*.RSA</exclude> | |||
</excludes> | |||
</filter> | |||
</filters> | |||
</configuration> | |||
</execution> | |||
</executions> | |||
</plugin> | |||
</plugins> | |||
</build> | |||
</profile> | |||
</profiles> | |||
<dependencies> | |||
<!-- https://mvnrepository.com/artifact/org.graalvm.js/js --> | |||
<dependency> | |||
<groupId>org.graalvm.js</groupId> | |||
<artifactId>js</artifactId> | |||
<version>19.2.0</version> | |||
</dependency> | |||
<!-- https://mvnrepository.com/artifact/org.graalvm.js/js-scriptengine --> | |||
<dependency> | |||
<groupId>org.graalvm.js</groupId> | |||
<artifactId>js-scriptengine</artifactId> | |||
<version>19.2.0</version> | |||
</dependency> | |||
<!-- https://mvnrepository.com/artifact/org.graalvm.truffle/truffle-api --> | |||
<dependency> | |||
<groupId>org.graalvm.truffle</groupId> | |||
<artifactId>truffle-api</artifactId> | |||
<version>19.2.0</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>com.fasterxml.jackson.core</groupId> | |||
<artifactId>jackson-core</artifactId> | |||
<version>${jackson.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>com.fasterxml.jackson.core</groupId> | |||
<artifactId>jackson-annotations</artifactId> | |||
<version>${jackson.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>com.fasterxml.jackson.core</groupId> | |||
<artifactId>jackson-databind</artifactId> | |||
<version>${jackson.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>com.fasterxml.jackson.dataformat</groupId> | |||
<artifactId>jackson-dataformat-xml</artifactId> | |||
<version>${jackson.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>commons-beanutils</groupId> | |||
<artifactId>commons-beanutils</artifactId> | |||
<version>${commons-beanutils.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>org.apache.commons</groupId> | |||
<artifactId>commons-collections4</artifactId> | |||
<version>${commons-collections.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>org.apache.commons</groupId> | |||
<artifactId>commons-lang3</artifactId> | |||
<version>${commons-lang3.version}</version> | |||
</dependency> | |||
<!-- https://mvnrepository.com/artifact/org.apache.commons/commons-text --> | |||
<dependency> | |||
<groupId>org.apache.commons</groupId> | |||
<artifactId>commons-text</artifactId> | |||
<version>1.7</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>org.apache.ant</groupId> | |||
<artifactId>ant</artifactId> | |||
<version>${ant.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>commons-io</groupId> | |||
<artifactId>commons-io</artifactId> | |||
<version>${commons-io.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>org.apache.commons</groupId> | |||
<artifactId>commons-compress</artifactId> | |||
<version>${commons-compress.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>org.apache.commons</groupId> | |||
<artifactId>commons-exec</artifactId> | |||
<version>${commons-exec.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>org.apache.httpcomponents</groupId> | |||
<artifactId>httpcore</artifactId> | |||
<version>${httpcore.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>org.apache.httpcomponents</groupId> | |||
<artifactId>httpclient</artifactId> | |||
<version>${httpclient.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>org.apache.httpcomponents</groupId> | |||
<artifactId>httpmime</artifactId> | |||
<version>${httpmime.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>com.google.guava</groupId> | |||
<artifactId>guava</artifactId> | |||
<version>${guava.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>joda-time</groupId> | |||
<artifactId>joda-time</artifactId> | |||
<version>${joda-time.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>com.github.jknack</groupId> | |||
<artifactId>handlebars</artifactId> | |||
<version>${handlebars.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>com.github.jknack</groupId> | |||
<artifactId>handlebars-jackson2</artifactId> | |||
<version>${handlebars.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>org.slf4j</groupId> | |||
<artifactId>slf4j-api</artifactId> | |||
<version>${slf4j.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>org.slf4j</groupId> | |||
<artifactId>jul-to-slf4j</artifactId> | |||
<version>${slf4j.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>ch.qos.logback</groupId> | |||
<artifactId>logback-classic</artifactId> | |||
<version>${logback.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>org.quartz-scheduler</groupId> | |||
<artifactId>quartz</artifactId> | |||
<version>${quartz.version}</version> | |||
<exclusions> | |||
<exclusion> | |||
<groupId>c3p0</groupId> | |||
<artifactId>c3p0</artifactId> | |||
</exclusion> | |||
</exclusions> | |||
</dependency> | |||
<dependency> | |||
<groupId>org.quartz-scheduler</groupId> | |||
<artifactId>quartz-jobs</artifactId> | |||
<version>${quartz.version}</version> | |||
</dependency> | |||
<!-- auto-generate java boilerplate --> | |||
<dependency> | |||
<groupId>org.projectlombok</groupId> | |||
<artifactId>lombok</artifactId> | |||
<version>${lombok.version}</version> | |||
<scope>compile</scope> | |||
</dependency> | |||
<!--<dependency>--> | |||
<!--<groupId>org.projectlombok</groupId>--> | |||
<!--<artifactId>lombok-maven-plugin</artifactId>--> | |||
<!--<version>1.18.0.0</version>--> | |||
<!--<!–<scope>compile</scope>–>--> | |||
<!--</dependency>--> | |||
<!-- HTML/XPath utilities --> | |||
<dependency> | |||
<groupId>jtidy</groupId> | |||
<artifactId>jtidy</artifactId> | |||
<version>${jtidy.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>xalan</groupId> | |||
<artifactId>xalan</artifactId> | |||
<version>${xalan.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>net.sf.saxon</groupId> | |||
<artifactId>Saxon-HE</artifactId> | |||
<version>9.7.0-10</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>args4j</groupId> | |||
<artifactId>args4j</artifactId> | |||
<version>${args4j.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>net.java.dev.jna</groupId> | |||
<artifactId>jna</artifactId> | |||
<version>4.1.0</version> | |||
</dependency> | |||
<!-- web/html screenshots --> | |||
<dependency> | |||
<groupId>com.codeborne</groupId> | |||
<artifactId>phantomjsdriver</artifactId> | |||
<!-- upgrading to 1.4.x breaks cglib for some reason --> | |||
<!-- we should switch to headless-chrome, phantomjs has been deprecated --> | |||
<version>1.3.0</version> | |||
</dependency> | |||
<!-- merge word doc templates --> | |||
<dependency> | |||
<groupId>fr.opensagres.xdocreport</groupId> | |||
<artifactId>fr.opensagres.xdocreport.document</artifactId> | |||
<version>${xdocreport.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>fr.opensagres.xdocreport</groupId> | |||
<artifactId>fr.opensagres.xdocreport.template.velocity</artifactId> | |||
<version>${xdocreport.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>fr.opensagres.xdocreport</groupId> | |||
<artifactId>fr.opensagres.xdocreport.document.docx</artifactId> | |||
<version>${xdocreport.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>fr.opensagres.xdocreport</groupId> | |||
<artifactId>fr.opensagres.xdocreport.converter.docx.xwpf</artifactId> | |||
<version>${xdocreport.version}</version> | |||
<exclusions> | |||
<exclusion> | |||
<groupId>fr.opensagres.xdocreport</groupId> | |||
<artifactId>org.apache.poi.xwpf.converter.pdf</artifactId> | |||
</exclusion> | |||
</exclusions> | |||
</dependency> | |||
<dependency> | |||
<groupId>fr.opensagres.xdocreport</groupId> | |||
<artifactId>org.apache.poi.xwpf.converter.pdf.itext5</artifactId> | |||
<version>${xdocreport.version}</version> | |||
</dependency> | |||
<!-- merge PDF templates --> | |||
<dependency> | |||
<groupId>org.apache.pdfbox</groupId> | |||
<artifactId>pdfbox</artifactId> | |||
<version>2.0.16</version> | |||
</dependency> | |||
<!-- merge XML documents --> | |||
<dependency> | |||
<groupId>org.atteo</groupId> | |||
<artifactId>xml-combiner</artifactId> | |||
<version>2.2</version> | |||
</dependency> | |||
<!-- Allows us to embed phantomjs --> | |||
<dependency> | |||
<groupId>io.github.bonigarcia</groupId> | |||
<artifactId>webdrivermanager</artifactId> | |||
<version>2.1.0</version> | |||
</dependency> | |||
<!-- https://mvnrepository.com/artifact/org.bouncycastle/bcprov-jdk15on --> | |||
<dependency> | |||
<groupId>org.bouncycastle</groupId> | |||
<artifactId>bcprov-jdk15on</artifactId> | |||
<version>1.64</version> | |||
</dependency> | |||
</dependencies> | |||
<build> | |||
<plugins> | |||
<plugin> | |||
<groupId>org.apache.maven.plugins</groupId> | |||
<artifactId>maven-compiler-plugin</artifactId> | |||
<version>2.3.2</version> | |||
<configuration> | |||
<source>11</source> | |||
<target>11</target> | |||
<showWarnings>true</showWarnings> | |||
</configuration> | |||
</plugin> | |||
</plugins> | |||
</build> | |||
</project> |
@@ -0,0 +1,59 @@ | |||
package org.cobbzilla.util.bean; | |||
import org.apache.commons.beanutils.PropertyUtilsBean; | |||
import java.beans.PropertyDescriptor; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.die; | |||
public class BeanMerger { | |||
private static final PropertyUtilsBean propertyUtils = new PropertyUtilsBean(); | |||
public static void mergeProperties(Object dest, Object orig) { | |||
merge(dest, orig, AlwaysCopy.INSTANCE); | |||
} | |||
public static void mergeNotNullProperties(Object dest, Object orig) { | |||
merge(dest, orig, NotNull.INSTANCE); | |||
} | |||
private static void merge(Object dest, Object orig, CopyEvaluator evaluator) { | |||
if (dest == null) throw new IllegalArgumentException ("No destination bean specified"); | |||
if (orig == null) throw new IllegalArgumentException("No origin bean specified"); | |||
PropertyDescriptor[] origDescriptors = propertyUtils.getPropertyDescriptors(orig); | |||
for (PropertyDescriptor origDescriptor : origDescriptors) { | |||
String name = origDescriptor.getName(); | |||
if ("class".equals(name)) { | |||
continue; // No point in trying to set an object's class | |||
} | |||
if (propertyUtils.isReadable(orig, name) && | |||
propertyUtils.isWriteable(dest, name)) { | |||
try { | |||
Object value = propertyUtils.getSimpleProperty(orig, name); | |||
if (evaluator.shouldCopy(name, value)) { | |||
propertyUtils.setProperty(dest, name, value); | |||
} | |||
} catch (NoSuchMethodException e) { | |||
// Should not happen | |||
} catch (Exception e) { | |||
die("Error copying properties: " + e, e); | |||
} | |||
} | |||
} | |||
} | |||
private interface CopyEvaluator { | |||
boolean shouldCopy(String name, Object value); | |||
} | |||
static class AlwaysCopy implements CopyEvaluator { | |||
static final AlwaysCopy INSTANCE = new AlwaysCopy(); | |||
@Override public boolean shouldCopy(String name, Object value) { return true; } | |||
} | |||
static class NotNull implements CopyEvaluator { | |||
static final NotNull INSTANCE = new NotNull(); | |||
@Override public boolean shouldCopy(String name, Object value) { return value != null; } | |||
} | |||
} |
@@ -0,0 +1,43 @@ | |||
package org.cobbzilla.util.cache; | |||
import lombok.Getter; | |||
import java.util.concurrent.atomic.AtomicLong; | |||
import java.util.concurrent.atomic.AtomicReference; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.now; | |||
public abstract class AutoRefreshingReference<T> { | |||
@Getter private final AtomicReference<T> object = new AtomicReference<>(); | |||
@Getter private final AtomicLong lastSet = new AtomicLong(); | |||
public abstract T refresh(); | |||
public abstract long getTimeout(); | |||
public T get() { | |||
synchronized (object) { | |||
if (isEmpty() || now() - lastSet.get() > getTimeout()) update(); | |||
return object.get(); | |||
} | |||
} | |||
public boolean isEmpty() { synchronized (object) { return object.get() == null; } } | |||
public void update() { | |||
synchronized (object) { | |||
object.set(refresh()); | |||
lastSet.set(now()); | |||
} | |||
} | |||
public void flush() { set(null); } | |||
public void set(T thing) { | |||
synchronized (object) { | |||
object.set(thing); | |||
lastSet.set(now()); | |||
} | |||
} | |||
} |
@@ -0,0 +1,52 @@ | |||
package org.cobbzilla.util.cache; | |||
import lombok.extern.slf4j.Slf4j; | |||
import org.cobbzilla.util.system.Sleep; | |||
import java.util.concurrent.TimeUnit; | |||
import java.util.concurrent.atomic.AtomicBoolean; | |||
import java.util.concurrent.atomic.AtomicInteger; | |||
@Slf4j | |||
public abstract class BackgroundRefreshingReference<T> extends AutoRefreshingReference<T> { | |||
private final AtomicBoolean updateInProgress = new AtomicBoolean(false); | |||
private final Refresher refresher = new Refresher(); | |||
private final AtomicInteger errorCount = new AtomicInteger(0); | |||
public boolean initialize () { return true; } | |||
public BackgroundRefreshingReference() { | |||
if (initialize()) update(); | |||
} | |||
@Override public void update() { | |||
synchronized (updateInProgress) { | |||
if (updateInProgress.get()) return; | |||
updateInProgress.set(true); | |||
new Thread(refresher).start(); | |||
} | |||
} | |||
private class Refresher implements Runnable { | |||
@Override public void run() { | |||
try { | |||
int errCount = errorCount.get(); | |||
if (errCount > 0) { | |||
Sleep.sleep(TimeUnit.SECONDS.toMillis(1) * (long) Math.pow(2, Math.min(errCount, 6))); | |||
} | |||
set(refresh()); | |||
errorCount.set(0); | |||
} catch (Exception e) { | |||
log.warn("error refreshing: "+e); | |||
errorCount.incrementAndGet(); | |||
} finally { | |||
synchronized (updateInProgress) { | |||
updateInProgress.set(false); | |||
} | |||
} | |||
} | |||
} | |||
} |
@@ -0,0 +1,39 @@ | |||
package org.cobbzilla.util.chef; | |||
import lombok.Getter; | |||
import lombok.Setter; | |||
import lombok.experimental.Accessors; | |||
import org.cobbzilla.util.security.ShaUtil; | |||
import java.util.ArrayList; | |||
import java.util.List; | |||
@Accessors(chain=true) | |||
public class VendorDatabag { | |||
public static final VendorDatabag NULL = new VendorDatabag(); | |||
@Getter @Setter private String service_key_endpoint; | |||
@Getter @Setter private String ssl_key_sha; | |||
@Getter @Setter private List<VendorDatabagSetting> settings = new ArrayList<>(); | |||
public VendorDatabag addSetting (VendorDatabagSetting setting) { settings.add(setting); return this; } | |||
public VendorDatabagSetting getSetting(String path) { | |||
for (VendorDatabagSetting s : settings) { | |||
if (s.getPath().equals(path)) return s; | |||
} | |||
return null; | |||
} | |||
public boolean containsSetting (String path) { return getSetting(path) != null; } | |||
public boolean isDefault (String path, String value) { | |||
final VendorDatabagSetting setting = getSetting(path); | |||
if (setting == null) return false; | |||
final String shasum = setting.getShasum(); | |||
return shasum != null && ShaUtil.sha256_hex(value).equals(shasum); | |||
} | |||
} |
@@ -0,0 +1,20 @@ | |||
package org.cobbzilla.util.chef; | |||
import lombok.AllArgsConstructor; | |||
import lombok.Getter; | |||
import lombok.NoArgsConstructor; | |||
import lombok.Setter; | |||
@NoArgsConstructor @AllArgsConstructor | |||
public class VendorDatabagSetting { | |||
@Getter @Setter private String path; | |||
@Getter @Setter private String shasum; | |||
@Getter @Setter private boolean block_ssh = false; | |||
public VendorDatabagSetting(String path, String shasum) { | |||
setPath(path); | |||
setShasum(shasum); | |||
} | |||
} |
@@ -0,0 +1,20 @@ | |||
package org.cobbzilla.util.collection; | |||
import java.util.Comparator; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.die; | |||
public class AllowNullComparator<E> implements Comparator<E> { | |||
public static final AllowNullComparator<String> STRING = new AllowNullComparator<>(); | |||
public static final AllowNullComparator<Integer> INT = new AllowNullComparator<>(); | |||
public static final AllowNullComparator<Long> LONG = new AllowNullComparator<>(); | |||
@Override public int compare(E o1, E o2) { | |||
if (o1 == null) return o2 == null ? 0 : -1; | |||
if (o2 == null) return 1; | |||
if (o1 instanceof Comparable && o2 instanceof Comparable) return ((Comparable) o1).compareTo(o2); | |||
return die("compare: incomparable objects: "+o1+", "+o2); | |||
} | |||
} |
@@ -0,0 +1,145 @@ | |||
package org.cobbzilla.util.collection; | |||
import org.cobbzilla.util.string.StringUtil; | |||
import java.lang.reflect.Array; | |||
import java.util.*; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.die; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.empty; | |||
import static org.cobbzilla.util.reflect.ReflectionUtil.arrayClass; | |||
public class ArrayUtil { | |||
public static final Object[] SINGLE_NULL_OBJECT = new Object[]{null}; | |||
public static final Object[] EMPTY_OBJECT_ARRAY = new Object[0]; | |||
public static <T> T[] append (T[] array, T... elements) { | |||
if (array == null || array.length == 0) { | |||
if (elements.length == 0) return (T[]) new Object[]{}; // punt, it's empty anyway | |||
final T[] newArray = (T[]) Array.newInstance(elements[0].getClass(), elements.length); | |||
System.arraycopy(elements, 0, newArray, 0, elements.length); | |||
return newArray; | |||
} else { | |||
if (elements.length == 0) return Arrays.copyOf(array, array.length); | |||
final T[] copy = Arrays.copyOf(array, array.length + elements.length); | |||
System.arraycopy(elements, 0, copy, array.length, elements.length); | |||
return copy; | |||
} | |||
} | |||
public static <T> T[] concat (T[]... arrays) { | |||
int size = 0; | |||
for (T[] array : arrays) { | |||
size += array == null ? 0 : array.length; | |||
} | |||
final Class<?> componentType = arrays.getClass().getComponentType().getComponentType(); | |||
final T[] newArray = (T[]) Array.newInstance(componentType, size); | |||
int destPos = 0; | |||
for (T[] array : arrays) { | |||
System.arraycopy(array, 0, newArray, destPos, array.length); | |||
destPos += array.length; | |||
} | |||
return newArray; | |||
} | |||
public static <T> T[] remove(T[] array, int indexToRemove) { | |||
if (array == null) throw new NullPointerException("remove: array was null"); | |||
if (indexToRemove >= array.length || indexToRemove < 0) throw new IndexOutOfBoundsException("remove: cannot remove element "+indexToRemove+" from array of length "+array.length); | |||
final List<T> list = new ArrayList<>(Arrays.asList(array)); | |||
list.remove(indexToRemove); | |||
final T[] newArray = (T[]) Array.newInstance(array.getClass().getComponentType(), array.length-1); | |||
return list.toArray(newArray); | |||
} | |||
/** | |||
* Return a slice of an array. If from == to then an empty array will be returned. | |||
* @param array the source array | |||
* @param from the start index, inclusive. If less than zero or greater than the length of the array, an Exception is thrown | |||
* @param to the end index, NOT inclusive. If less than zero or greater than the length of the array, an Exception is thrown | |||
* @param <T> the of the array | |||
* @return A slice of the array. The source array is not modified. | |||
*/ | |||
public static <T> T[] slice(T[] array, int from, int to) { | |||
if (array == null) throw new NullPointerException("slice: array was null"); | |||
if (from < 0 || from > array.length) die("slice: invalid 'from' index ("+from+") for array of size "+array.length); | |||
if (to < 0 || to < from || to > array.length) die("slice: invalid 'to' index ("+to+") for array of size "+array.length); | |||
final T[] newArray = (T[]) Array.newInstance(array.getClass().getComponentType(), to-from); | |||
if (to == from) return newArray; | |||
System.arraycopy(array, from, newArray, 0, to-from); | |||
return newArray; | |||
} | |||
public static <T> List<T> merge(Collection<T>... collections) { | |||
if (empty(collections)) return Collections.emptyList(); | |||
final Set<T> result = new HashSet<>(); | |||
for (Collection<T> c : collections) result.addAll(c); | |||
return new ArrayList<>(result); | |||
} | |||
/** | |||
* Produce a delimited string from an array. Null values will appear as "null" | |||
* @param array the array to consider | |||
* @param delim the delimiter to put in between each element | |||
* @return the result of calling .toString on each array element, or "null" for null elements, separated by the given delimiter. | |||
*/ | |||
public static String arrayToString(Object[] array, String delim) { | |||
return arrayToString(array, delim, "null"); | |||
} | |||
/** | |||
* Produce a delimited string from an array. | |||
* @param array the array to consider | |||
* @param delim the delimiter to put in between each element | |||
* @param nullValue the value to write if an array entry is null. if this parameter is null, then null array entries will not be included in the output. | |||
* @return a string that starts with [ and ends with ] and within is the result of calling .toString on each non-null element (and printing nullValue for each null element, unless nulValue == null in which case null elements are omitted), with 'delim' in between each entry. | |||
*/ | |||
public static String arrayToString(Object[] array, String delim, String nullValue) { | |||
return arrayToString(array, delim, nullValue, true); | |||
} | |||
/** | |||
* Produce a delimited string from an array. | |||
* @param array the array to consider | |||
* @param delim the delimiter to put in between each element | |||
* @param nullValue the value to write if an array entry is null. if this parameter is null, then null array entries will not be included in the output. | |||
* @param includeBrackets if false, the return value will not start/end with [] | |||
* @return a string that starts with [ and ends with ] and within is the result of calling .toString on each non-null element (and printing nullValue for each null element, unless nulValue == null in which case null elements are omitted), with 'delim' in between each entry. | |||
*/ | |||
public static String arrayToString(Object[] array, String delim, String nullValue, boolean includeBrackets) { | |||
if (array == null) return "null"; | |||
final StringBuilder b = new StringBuilder(); | |||
for (Object o : array) { | |||
if (b.length() > 0) b.append(delim); | |||
if (o == null) { | |||
if (nullValue == null) continue; | |||
b.append(nullValue); | |||
} else if (o.getClass().isArray()) { | |||
b.append(arrayToString((Object[]) o, delim, nullValue)); | |||
} else if (o instanceof Map) { | |||
b.append(StringUtil.toString((Map) o)); | |||
} else { | |||
b.append(o.toString()); | |||
} | |||
} | |||
return includeBrackets ? b.insert(0, "[").append("]").toString() : b.toString(); | |||
} | |||
public static <T> T[] shift(T[] args) { | |||
if (args == null) return null; | |||
if (args.length == 0) return args; | |||
final T[] newArgs = (T[]) Array.newInstance(args[0].getClass(), args.length-1); | |||
System.arraycopy(args, 1, newArgs, 0, args.length-1); | |||
return newArgs; | |||
} | |||
public static <T> T[] singletonArray (T thing) { return singletonArray(thing, (Class<T>) thing.getClass()); } | |||
public static <T> T[] singletonArray (T thing, Class<T> clazz) { | |||
final T[] array = (T[]) Array.newInstance(arrayClass(clazz), 1); | |||
array[0] = thing; | |||
return array; | |||
} | |||
} |
@@ -0,0 +1,55 @@ | |||
package org.cobbzilla.util.collection; | |||
import java.util.Collections; | |||
import java.util.Enumeration; | |||
import java.util.concurrent.ConcurrentHashMap; | |||
public class CaseInsensitiveStringKeyMap<V> extends ConcurrentHashMap<String, V> { | |||
private ConcurrentHashMap<String, String> origKeys = new ConcurrentHashMap<>(); | |||
public String key(Object key) { return key == null ? null : key.toString().toLowerCase(); } | |||
@Override public KeySetView<String, V> keySet() { return super.keySet(); } | |||
@Override public Enumeration<String> keys() { return Collections.enumeration(origKeys.values()); } | |||
@Override public V get(Object key) { return super.get(key(key)); } | |||
@Override public boolean containsKey(Object key) { return super.containsKey(key(key)); } | |||
@Override public V put(String key, V value) { | |||
final String ciKey = key(key); | |||
origKeys.put(ciKey, key); | |||
return super.put(ciKey, value); | |||
} | |||
@Override public V putIfAbsent(String key, V value) { | |||
final String ciKey = key(key); | |||
origKeys.putIfAbsent(ciKey, key); | |||
return super.putIfAbsent(ciKey, value); | |||
} | |||
@Override public V remove(Object key) { | |||
final String ciKey = key(key); | |||
origKeys.remove(ciKey); | |||
return super.remove(ciKey); | |||
} | |||
@Override public boolean remove(Object key, Object value) { | |||
final String ciKey = key(key); | |||
origKeys.remove(ciKey, value); | |||
return super.remove(ciKey, value); | |||
} | |||
@Override public boolean replace(String key, V oldValue, V newValue) { | |||
final String ciKey = key(key); | |||
return super.replace(ciKey, oldValue, newValue); | |||
} | |||
@Override public V replace(String key, V value) { | |||
final String ciKey = key(key); | |||
return super.replace(ciKey, value); | |||
} | |||
} |
@@ -0,0 +1,12 @@ | |||
package org.cobbzilla.util.collection; | |||
import java.util.Collection; | |||
import java.util.TreeSet; | |||
public class CaseInsensitiveStringSet extends TreeSet<String> { | |||
public CaseInsensitiveStringSet() { super(String.CASE_INSENSITIVE_ORDER); } | |||
public CaseInsensitiveStringSet(Collection<String> c) { addAll(c); } | |||
} |
@@ -0,0 +1,12 @@ | |||
package org.cobbzilla.util.collection; | |||
import java.util.Collection; | |||
public interface CollectionSource<T> { | |||
void addValue (T val); | |||
void addValues (Collection<T> vals); | |||
Collection<T> getValues(); | |||
} |
@@ -0,0 +1,55 @@ | |||
package org.cobbzilla.util.collection; | |||
import java.util.HashSet; | |||
import java.util.LinkedHashSet; | |||
import java.util.Set; | |||
public class CombinationsGenerator { | |||
// The main function that gets all combinations of size n-1 to 1, in set of size n. | |||
// This function mainly uses combinationUtil() | |||
public static Set<Set<String>> generateCombinations(Set<String> elements) { | |||
Set<Set<String>> result = new LinkedHashSet<>(); | |||
// i - A number of elements which will be used in the combination in this iteration | |||
for (int i = elements.size() - 1; i >= 1 ; i--) { | |||
// A temporary array to store all combinations one by one | |||
String[] data = new String[i]; | |||
// Get all combination using temporary array 'data' | |||
result = _generate(result, elements.toArray(new String[elements.size()]), | |||
data, 0, elements.size() - 1, 0, i); | |||
} | |||
return result; | |||
} | |||
/** | |||
* @param combinations - Resulting array with all combinations of arr | |||
* @param arr - Input Array | |||
* @param data - Temporary array to store current combination | |||
* @param start - Staring index in arr[] | |||
* @param end - Ending index in arr[] | |||
* @param index - Current index in data[] | |||
* @param r - Size of a combination | |||
*/ | |||
private static Set<Set<String>> _generate(Set<Set<String>> combinations, String[] arr, | |||
String[] data, int start, int end, int index, int r) { | |||
// Current combination is ready | |||
if (index == r) { | |||
Set<String> current = new HashSet<>(); | |||
for (int j = 0; j < r; j++) { | |||
current.add(data[j]); | |||
} | |||
combinations.add(current); | |||
return combinations; | |||
} | |||
// replace index with all possible elements. The condition `end - i + 1 >= r - index` makes sure that including | |||
// one element at index will make a combination with remaining elements at remaining positions | |||
for (int i = start; i <= end && end - i + 1 >= r - index; i++) { | |||
data[index] = arr[i]; | |||
combinations = _generate(combinations, arr, data, i + 1, end, index + 1, r); | |||
} | |||
return combinations; | |||
} | |||
} |
@@ -0,0 +1,23 @@ | |||
package org.cobbzilla.util.collection; | |||
import com.fasterxml.jackson.annotation.JsonCreator; | |||
import lombok.AllArgsConstructor; | |||
import lombok.Getter; | |||
@AllArgsConstructor | |||
public enum ComparisonOperator { | |||
lt ("<", "<", "-lt"), | |||
le ("<=", "<=", "-le"), | |||
eq ("=", "==", "-eq"), | |||
ge (">=", ">=", "-ge"), | |||
gt (">", ">", "-gt"), | |||
ne ("!=", "!=", "-ne"); | |||
@Getter public final String sql; | |||
@Getter public final String java; | |||
@Getter public final String shell; | |||
@JsonCreator public static ComparisonOperator fromString(String val) { return valueOf(val.toLowerCase()); } | |||
} |
@@ -0,0 +1,94 @@ | |||
package org.cobbzilla.util.collection; | |||
import lombok.Getter; | |||
import lombok.NoArgsConstructor; | |||
import lombok.Setter; | |||
import lombok.experimental.Accessors; | |||
import java.util.*; | |||
import java.util.concurrent.ConcurrentHashMap; | |||
@NoArgsConstructor @Accessors(chain=true) | |||
public class CustomHashSet<E> implements Set<E> { | |||
public interface Hasher<E> { String hash (E thing); } | |||
@Getter @Setter private Class<E> elementClass; | |||
@Getter @Setter private Hasher hasher; | |||
private Map<String, E> map = new ConcurrentHashMap<>(); | |||
public CustomHashSet(Class<E> clazz, Hasher<E> hasher, Collection<E> collection) { | |||
this(clazz, hasher); | |||
addAll(collection); | |||
} | |||
public CustomHashSet(Class<E> elementClass, Hasher<E> hasher) { | |||
this.elementClass = elementClass; | |||
this.hasher = hasher; | |||
} | |||
@Override public int size() { return map.size(); } | |||
@Override public boolean isEmpty() { return map.isEmpty(); } | |||
@Override public boolean contains(Object o) { | |||
if (o == null) return false; | |||
if (getElementClass().isAssignableFrom(o.getClass())) { | |||
return map.containsKey(hasher.hash(o)); | |||
} else if (o instanceof String) { | |||
return map.containsKey(o); | |||
} | |||
return false; | |||
} | |||
@Override public Iterator<E> iterator() { return map.values().iterator(); } | |||
@Override public Object[] toArray() { return map.values().toArray(); } | |||
@Override public <T> T[] toArray(T[] a) { return (T[]) map.values().toArray(); } | |||
@Override public boolean add(E e) { return map.put(hasher.hash(e), e) == null; } | |||
public E find(E e) { return map.get(hasher.hash(e)); } | |||
@Override public boolean remove(Object o) { | |||
if (getElementClass().isAssignableFrom(o.getClass())) { | |||
return map.remove(hasher.hash(o)) != null; | |||
} else if (o instanceof String) { | |||
return map.remove(o) != null; | |||
} | |||
return false; | |||
} | |||
@Override public boolean containsAll(Collection<?> c) { | |||
for (Object o : c) if (!contains(o)) return false; | |||
return true; | |||
} | |||
@Override public boolean addAll(Collection<? extends E> c) { | |||
boolean anyAdded = false; | |||
for (E o : c) if (!add(o)) anyAdded = true; | |||
return anyAdded; | |||
} | |||
@Override public boolean retainAll(Collection<?> c) { | |||
final Set<String> toRemove = new HashSet<>(); | |||
for (Map.Entry<String, E> entry : map.entrySet()) { | |||
if (!c.contains(entry.getValue())) toRemove.add(entry.getKey()); | |||
} | |||
for (String k : toRemove) remove(k); | |||
return !toRemove.isEmpty(); | |||
} | |||
@Override public boolean removeAll(Collection<?> c) { | |||
boolean anyRemoved = false; | |||
for (Object o : c) if (map.remove(o) != null) anyRemoved = true; | |||
return anyRemoved; | |||
} | |||
@Override public void clear() { map.clear(); } | |||
} |
@@ -0,0 +1,10 @@ | |||
package org.cobbzilla.util.collection; | |||
import java.util.List; | |||
import java.util.Map; | |||
public interface Expandable<T> { | |||
List<T> expand(Map<String, Object> context); | |||
} |
@@ -0,0 +1,130 @@ | |||
package org.cobbzilla.util.collection; | |||
import lombok.AllArgsConstructor; | |||
import lombok.Getter; | |||
import lombok.experimental.Accessors; | |||
import java.util.Collection; | |||
import java.util.HashSet; | |||
import java.util.Map; | |||
import java.util.Set; | |||
import java.util.concurrent.ConcurrentHashMap; | |||
import java.util.concurrent.TimeUnit; | |||
import java.util.stream.Collectors; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.notSupported; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.now; | |||
public class ExpirationMap<K, V> implements Map<K, V> { | |||
private final Map<K, ExpirationMapEntry<V>> map; | |||
private long expiration = TimeUnit.HOURS.toMillis(1); | |||
private long cleanInterval = TimeUnit.HOURS.toMillis(4); | |||
private long lastCleaned = 0; | |||
public ExpirationMap() { | |||
this.map = new ConcurrentHashMap<>(); | |||
} | |||
public ExpirationMap(long expiration) { | |||
this.map = new ConcurrentHashMap<>(); | |||
this.expiration = expiration; | |||
} | |||
public ExpirationMap(long expiration, long cleanInterval) { | |||
this(expiration); | |||
this.cleanInterval = cleanInterval; | |||
} | |||
public ExpirationMap(long expiration, long cleanInterval, int initialCapacity) { | |||
this.map = new ConcurrentHashMap<>(initialCapacity); | |||
this.expiration = expiration; | |||
this.cleanInterval = cleanInterval; | |||
} | |||
public ExpirationMap(long expiration, long cleanInterval, int initialCapacity, float loadFactor) { | |||
this.map = new ConcurrentHashMap<>(initialCapacity, loadFactor); | |||
this.expiration = expiration; | |||
this.cleanInterval = cleanInterval; | |||
} | |||
public ExpirationMap(long expiration, long cleanInterval, int initialCapacity, float loadFactor, int concurrencyLevel) { | |||
this.map = new ConcurrentHashMap<>(initialCapacity, loadFactor, concurrencyLevel); | |||
this.expiration = expiration; | |||
this.cleanInterval = cleanInterval; | |||
} | |||
@Accessors(chain=true) | |||
private class ExpirationMapEntry<VAL> { | |||
public final VAL value; | |||
public volatile long atime = now(); | |||
public ExpirationMapEntry(VAL value) { this.value = value; } | |||
public VAL touch() { atime = now(); return value; } | |||
public boolean expired() { return now() > atime+expiration; } | |||
} | |||
@Override public int size() { return map.size(); } | |||
@Override public boolean isEmpty() { return map.isEmpty(); } | |||
@Override public boolean containsKey(Object key) { return map.containsKey(key); } | |||
@Override public boolean containsValue(Object value) { | |||
for (ExpirationMapEntry<V> val : map.values()) { | |||
if (val.value == value) return true; | |||
} | |||
return false; | |||
} | |||
@Override public V get(Object key) { | |||
final ExpirationMapEntry<V> value = map.get(key); | |||
return value == null ? null : value.touch(); | |||
} | |||
@Override public V put(K key, V value) { | |||
if (lastCleaned+cleanInterval > now()) cleanExpired(); | |||
final ExpirationMapEntry<V> previous = map.put(key, new ExpirationMapEntry<>(value)); | |||
return previous == null ? null : previous.value; | |||
} | |||
@Override public V remove(Object key) { | |||
final ExpirationMapEntry<V> previous = map.remove(key); | |||
return previous == null ? null : previous.value; | |||
} | |||
@Override public void putAll(Map<? extends K, ? extends V> m) { | |||
for (Map.Entry<? extends K, ? extends V> entry : m.entrySet()) { | |||
put(entry.getKey(), entry.getValue()); | |||
} | |||
} | |||
@Override public void clear() { map.clear(); } | |||
@Override public Set<K> keySet() { return map.keySet(); } | |||
@Override public Collection<V> values() { | |||
return map.values().stream().map(v -> v.value).collect(Collectors.toList()); | |||
} | |||
@AllArgsConstructor | |||
private static class EMEntry<K, V> implements Entry<K, V> { | |||
@Getter private K key; | |||
@Getter private V value; | |||
@Override public V setValue(V value) { return notSupported("setValue"); } | |||
} | |||
@Override public Set<Entry<K, V>> entrySet() { | |||
return map.entrySet().stream().map(e -> new EMEntry<>(e.getKey(), e.getValue().value)).collect(Collectors.toSet()); | |||
} | |||
private synchronized void cleanExpired () { | |||
if (lastCleaned+cleanInterval < now()) return; | |||
lastCleaned = now(); | |||
final Set<K> toRemove = new HashSet<>(); | |||
for (Map.Entry<K, ExpirationMapEntry<V>> entry : map.entrySet()) { | |||
if (entry.getValue().expired()) toRemove.add(entry.getKey()); | |||
} | |||
for (K k : toRemove) map.remove(k); | |||
} | |||
} |
@@ -0,0 +1,47 @@ | |||
package org.cobbzilla.util.collection; | |||
import lombok.AllArgsConstructor; | |||
import lombok.Getter; | |||
import lombok.NoArgsConstructor; | |||
import lombok.Setter; | |||
import lombok.experimental.Accessors; | |||
import lombok.extern.slf4j.Slf4j; | |||
import java.util.Iterator; | |||
import java.util.Map; | |||
import java.util.concurrent.ConcurrentHashMap; | |||
import java.util.concurrent.TimeUnit; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.now; | |||
@NoArgsConstructor @AllArgsConstructor @Accessors(chain=true) @Slf4j | |||
public class FailedOperationCounter<T> extends ConcurrentHashMap<T, Map<Long, Long>> { | |||
@Getter @Setter private long expiration = TimeUnit.MINUTES.toMillis(5); | |||
@Getter @Setter private int maxFailures = 1; | |||
public void fail(T value) { | |||
Map<Long, Long> failures = get(value); | |||
if (failures == null) { | |||
failures = new ConcurrentHashMap<>(); | |||
put(value, failures); | |||
} | |||
final long ftime = now(); | |||
failures.put(ftime, ftime); | |||
} | |||
public boolean tooManyFailures(T value) { | |||
final Map<Long, Long> failures = get(value); | |||
if (failures == null) return false; | |||
int count = 0; | |||
for (Iterator<Long> iter = failures.keySet().iterator(); iter.hasNext();) { | |||
Long ftime = iter.next(); | |||
if (now() - ftime > expiration) { | |||
iter.remove(); | |||
} else { | |||
if (++count >= maxFailures) return true; | |||
} | |||
} | |||
return false; | |||
} | |||
} |
@@ -0,0 +1,39 @@ | |||
package org.cobbzilla.util.collection; | |||
import lombok.AllArgsConstructor; | |||
import lombok.Getter; | |||
import org.apache.commons.collections.CollectionUtils; | |||
import org.apache.commons.collections.Transformer; | |||
import org.cobbzilla.util.reflect.ReflectionUtil; | |||
import java.lang.reflect.Array; | |||
import java.util.Collection; | |||
import java.util.HashSet; | |||
import java.util.List; | |||
import java.util.Set; | |||
import static org.cobbzilla.util.collection.ArrayUtil.EMPTY_OBJECT_ARRAY; | |||
@AllArgsConstructor | |||
public class FieldTransformer implements Transformer { | |||
public static final FieldTransformer TO_NAME = new FieldTransformer("name"); | |||
public static final FieldTransformer TO_ID = new FieldTransformer("id"); | |||
public static final FieldTransformer TO_UUID = new FieldTransformer("uuid"); | |||
@Getter private final String field; | |||
@Override public Object transform(Object o) { return ReflectionUtil.get(o, field); } | |||
public <E> List<E> collect (Collection c) { return c == null ? null : (List<E>) CollectionUtils.collect(c, this); } | |||
public <E> Set<E> collectSet (Collection c) { return c == null ? null : new HashSet<>(CollectionUtils.collect(c, this)); } | |||
public <E> E[] array (Collection c) { | |||
if (c == null) return null; | |||
if (c.isEmpty()) return (E[]) EMPTY_OBJECT_ARRAY; | |||
final List<E> collect = (List<E>) CollectionUtils.collect(c, this); | |||
final Class<E> elementType = (Class<E>) ReflectionUtil.getterType(c.iterator().next(), field); | |||
return collect.toArray((E[]) Array.newInstance(elementType, collect.size())); | |||
} | |||
} |
@@ -0,0 +1,35 @@ | |||
package org.cobbzilla.util.collection; | |||
import java.util.ArrayList; | |||
import java.util.Collection; | |||
import java.util.Comparator; | |||
public interface HasPriority { | |||
Integer getPriority (); | |||
default boolean hasPriority () { return getPriority() != null; } | |||
Comparator<HasPriority> SORT_PRIORITY = (r1, r2) -> { | |||
if (!r2.hasPriority()) return r1.hasPriority() ? -1 : 0; | |||
if (!r1.hasPriority()) return 1; | |||
return r1.getPriority().compareTo(r2.getPriority()); | |||
}; | |||
static int compare(Object o1, Object o2) { | |||
return o1 instanceof HasPriority && o2 instanceof HasPriority ? SORT_PRIORITY.compare((HasPriority) o1, (HasPriority) o2) : 0; | |||
} | |||
static <T extends HasPriority> Collection<T> priorityAsc (Collection<T> c) { | |||
final Collection<T> sorted = new ArrayList<>(c); | |||
((ArrayList<T>) sorted).sort(SORT_PRIORITY); | |||
return sorted; | |||
} | |||
static <T extends HasPriority> Collection<T> priorityDesc (Collection<T> c) { | |||
final Collection<T> sorted = new ArrayList<>(c); | |||
((ArrayList<T>) sorted).sort(SORT_PRIORITY.reversed()); | |||
return sorted; | |||
} | |||
} |
@@ -0,0 +1,39 @@ | |||
package org.cobbzilla.util.collection; | |||
import java.util.*; | |||
public class InspectCollection { | |||
public static boolean containsCircularReference(String start, Map<String, List<String>> graph) { | |||
return containsCircularReference(new HashSet<String>(), start, graph); | |||
} | |||
public static boolean containsCircularReference(Set<String> found, String start, Map<String, List<String>> graph) { | |||
final List<String> descendents = graph.get(start); | |||
if (descendents == null) return false; // special case: our starting point is outside the graph. | |||
for (String target : descendents) { | |||
if (found.contains(target)) { | |||
// we've seen this target already, we have a circular reference | |||
return true; | |||
} | |||
if (graph.containsKey(target)) { | |||
// this target is also a member of the graph -- add to found and recurse | |||
found.add(target); | |||
if (containsCircularReference(new HashSet<>(found), target, graph)) return true; | |||
} | |||
// no "else" clause here: we don't care about anything not in the graph, it can't create a circular reference. | |||
} | |||
return false; | |||
} | |||
public static boolean isLargerThan (Collection c, int size) { | |||
int count = 0; | |||
final Iterator i = c.iterator(); | |||
while (i.hasNext() && count <= size) { | |||
i.next(); | |||
count++; | |||
} | |||
return count > size; | |||
} | |||
} |
@@ -0,0 +1,78 @@ | |||
package org.cobbzilla.util.collection; | |||
import com.google.common.collect.Lists; | |||
import org.cobbzilla.util.reflect.ReflectionUtil; | |||
import java.util.*; | |||
public class ListUtil { | |||
public static <T> List<T> concat(List<T> list1, List<T> list2) { | |||
if (list1 == null || list1.isEmpty()) return list2 == null ? null : new ArrayList<>(list2); | |||
if (list2 == null || list2.isEmpty()) return new ArrayList<>(list1); | |||
final List<T> newList = new ArrayList<>(list1.size() + list2.size()); | |||
newList.addAll(list1); | |||
newList.addAll(list2); | |||
return newList; | |||
} | |||
// adapted from: https://stackoverflow.com/a/23870892/1251543 | |||
/** | |||
* Combines several collections of elements and create permutations of all of them, taking one element from each | |||
* collection, and keeping the same order in resultant lists as the one in original list of collections. | |||
* <p/> | |||
* <ul>Example | |||
* <li>Input = { {a,b,c} , {1,2,3,4} }</li> | |||
* <li>Output = { {a,1} , {a,2} , {a,3} , {a,4} , {b,1} , {b,2} , {b,3} , {b,4} , {c,1} , {c,2} , {c,3} , {c,4} }</li> | |||
* </ul> | |||
* | |||
* @param collections Original list of collections which elements have to be combined. | |||
* @return Resultant collection of lists with all permutations of original list. | |||
*/ | |||
public static <T> List<List<T>> permutations(List<List<T>> collections) { | |||
if (collections == null || collections.isEmpty()) { | |||
return Collections.emptyList(); | |||
} else { | |||
List<List<T>> res = Lists.newLinkedList(); | |||
permutationsImpl(collections, res, 0, new LinkedList<T>()); | |||
return res; | |||
} | |||
} | |||
private static <T> void permutationsImpl(List<List<T>> ori, Collection<List<T>> res, int d, List<T> current) { | |||
// if depth equals number of original collections, final reached, add and return | |||
if (d == ori.size()) { | |||
res.add(current); | |||
return; | |||
} | |||
// iterate from current collection and copy 'current' element N times, one for each element | |||
Collection<T> currentCollection = ori.get(d); | |||
for (T element : currentCollection) { | |||
List<T> copy = Lists.newLinkedList(current); | |||
copy.add(element); | |||
permutationsImpl(ori, res, d + 1, copy); | |||
} | |||
} | |||
public static List<Object> expand(Object[] things, Map<String, Object> context) { | |||
final List<Object> results = new ArrayList<>(); | |||
for (Object thing : things) { | |||
if (thing instanceof Expandable) { | |||
results.addAll(((Expandable) thing).expand(context)); | |||
} else { | |||
results.add(thing); | |||
} | |||
} | |||
return results; | |||
} | |||
public static <T> List<T> deepCopy(List<T> list) { | |||
if (list == null) return null; | |||
final List<T> copy = new ArrayList<>(); | |||
for (T item : list) copy.add(item == null ? null : ReflectionUtil.copy(item)); | |||
return copy; | |||
} | |||
} |
@@ -0,0 +1,71 @@ | |||
package org.cobbzilla.util.collection; | |||
import java.util.HashMap; | |||
import java.util.LinkedHashMap; | |||
import java.util.Map; | |||
import static org.cobbzilla.util.reflect.ReflectionUtil.instantiate; | |||
/** | |||
* A handy utility for creating and initializing Maps in a single statement. | |||
* @author Jonathan Cobb. | |||
*/ | |||
public class MapBuilder { | |||
/** | |||
* Most common create/init case. Usage: | |||
* | |||
* Map<String, Boolean> myPremadeMap = MapBuilder.build(new Object[][]{ | |||
* { "a", true }, { "b", false }, { "c", true }, { "d", true }, | |||
* { "e", "yes, still dangerous but at least it's not an anonymous class" } | |||
* }); | |||
* | |||
* If your keys and values are of the same type, it will even be typesafe: | |||
* Map<String, String> someProperties = MapBuilder.build(new String[][]{ | |||
* {"propA", "valueA" }, { "propB", "valueB" } | |||
* }); | |||
* | |||
* @param values [x][2] array. items at [x][0] are keys and [x][1] are values. | |||
* @return a LinkedHashMap (to preserve order of declaration) with the "values" mappings | |||
*/ | |||
public static <K,V> Map<K,V> build(Object[][] values) { | |||
return build((Map<K,V>) new LinkedHashMap<>(), values); | |||
} | |||
/** | |||
* Usage: | |||
* Map<K,V> myMap = MapBuilder.build(new MyMapClass(options), | |||
* new Object[][]{ {k,v}, {k,v}, ... }); | |||
* @param map add key/value pairs to this map | |||
* @return the map passed in, now containing new "values" mappings | |||
*/ | |||
public static <K,V> Map<K,V> build(Map<K,V> map, Object[][] values) { | |||
for (Object[] value : values) { | |||
map.put((K) value[0], (V) value[1]); | |||
} | |||
return map; | |||
} | |||
/** Same as above, for single-value maps */ | |||
public static <K,V> Map<K,V> build(Map<K,V> map, K key, V value) { | |||
return build(map, new Object[][]{{key,value}}); | |||
} | |||
/** | |||
* Usage: | |||
* Map<K,V> myMap = MapBuilder.build(MyMapClass.class, new Object[][]{ {k,v}, {k,v}, ... }); | |||
* @param mapClass a Class that implements Map | |||
* @return the map passed in, now containing new "values" mappings | |||
*/ | |||
public static <K,V> Map<K,V> build(Class<? extends Map<K,V>> mapClass, Object[][] values) { | |||
return build(instantiate(mapClass), values); | |||
} | |||
/** Usage: Map<K,V> myMap = MapBuilder.build(key, value); */ | |||
public static <K,V> Map<K, V> build(K key, V value) { | |||
Map<K,V> map = new HashMap<>(); | |||
map.put(key, value); | |||
return map; | |||
} | |||
} |
@@ -0,0 +1,42 @@ | |||
package org.cobbzilla.util.collection; | |||
import com.fasterxml.jackson.core.type.TypeReference; | |||
import java.util.*; | |||
public class MapUtil { | |||
public static final TypeReference<HashMap<String, Object>> JSON_STRING_OBJECT_MAP = new TypeReference<HashMap<String, Object>>() {}; | |||
public static final TypeReference<HashMap<String, String>> JSON_STRING_STRING_MAP = new TypeReference<HashMap<String, String>>() {}; | |||
public static Map<String, String> toMap (Properties props) { | |||
if (props == null || props.isEmpty()) return Collections.emptyMap(); | |||
final Map<String, String> map = new LinkedHashMap<>(props.size()); | |||
for (String name : props.stringPropertyNames()) map.put(name, props.getProperty(name)); | |||
return map; | |||
} | |||
public static <K, V> boolean deepEquals (Map<K, V> m1, Map<K, V> m2) { | |||
if (m1 == null) return m2 == null; | |||
if (m2 == null) return false; | |||
if (m1.size() != m2.size()) return false; | |||
final Set<Map.Entry<K, V>> set = m1.entrySet(); | |||
for (Map.Entry<K, V> e : set) { | |||
V m1v = e.getValue(); | |||
V m2v = m2.get(e.getKey()); | |||
if (m2v == null) return false; | |||
if ((m1v instanceof Map && !deepEquals((Map<K,V>) m1v, (Map<K,V>) m2v)) || !m1v.equals(m2v)) { | |||
return false; | |||
} | |||
} | |||
return true; | |||
} | |||
public static <K, V> int deepHash(Map<K, V> m) { | |||
int hash = 0; | |||
for (Map.Entry<K, V> e : m.entrySet()) { | |||
hash = (31 * hash) + e.getKey().hashCode() + (31 * e.getValue().hashCode()); | |||
} | |||
return hash; | |||
} | |||
} |
@@ -0,0 +1,102 @@ | |||
package org.cobbzilla.util.collection; | |||
import com.fasterxml.jackson.annotation.JsonIgnore; | |||
import lombok.AllArgsConstructor; | |||
import lombok.Getter; | |||
import lombok.NoArgsConstructor; | |||
import lombok.Setter; | |||
import lombok.experimental.Accessors; | |||
import org.cobbzilla.util.javascript.JsEngine; | |||
import java.util.*; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.empty; | |||
@NoArgsConstructor @AllArgsConstructor @Accessors(chain=true) | |||
public class NameAndValue { | |||
public static final NameAndValue[] EMPTY_ARRAY = new NameAndValue[0]; | |||
public static final Comparator<NameAndValue> NAME_COMPARATOR = Comparator.comparing(NameAndValue::getName); | |||
public static List<NameAndValue> map2list(Map<String, Object> map) { | |||
final List<NameAndValue> list = new ArrayList<>(map.size()); | |||
for (Map.Entry<String, Object> entry : map.entrySet()) { | |||
list.add(new NameAndValue(entry.getKey(), entry.getValue() == null ? null : entry.getValue().toString())); | |||
} | |||
return list; | |||
} | |||
@Getter @Setter private String name; | |||
public static Integer findInt(NameAndValue[] pairs, String name) { | |||
final String val = find(pairs, name); | |||
return val == null ? null : Integer.parseInt(val); | |||
} | |||
public static String find(NameAndValue[] pairs, String name) { | |||
if (pairs == null) return null; | |||
for (NameAndValue pair : pairs) if (pair.getName().equals(name)) return pair.getValue(); | |||
return null; | |||
} | |||
public static String find(Collection<NameAndValue> pairs, String name) { | |||
if (pairs == null || pairs.isEmpty()) return null; | |||
return pairs.stream() | |||
.filter(p -> p.getName().equals(name)) | |||
.findFirst() | |||
.map(NameAndValue::getValue) | |||
.orElse(null); | |||
} | |||
public static NameAndValue[] update(NameAndValue[] params, String name, String value) { | |||
if (params == null) return new NameAndValue[] { new NameAndValue(name, value) }; | |||
for (NameAndValue pair : params) { | |||
if (pair.getName().equals(name)) { | |||
pair.setValue(value); | |||
return params; | |||
} | |||
} | |||
return ArrayUtil.append(params, new NameAndValue(name, value)); | |||
} | |||
public boolean hasName () { return !empty(name); } | |||
@JsonIgnore public boolean getHasName () { return !empty(name); } | |||
@Getter @Setter private String value; | |||
public boolean hasValue () { return !empty(value); } | |||
@JsonIgnore public boolean getHasValue () { return !empty(value); } | |||
@Override public String toString() { return getName()+": "+getValue(); } | |||
public static NameAndValue[] evaluate (NameAndValue[] pairs, Map<String, Object> context) { | |||
return evaluate(pairs, context, new JsEngine()); | |||
} | |||
public static NameAndValue[] evaluate (NameAndValue[] pairs, Map<String, Object> context, JsEngine engine) { | |||
if (empty(context) || empty(pairs)) return pairs; | |||
final NameAndValue[] results = new NameAndValue[pairs.length]; | |||
for (int i=0; i<pairs.length; i++) { | |||
final boolean isCode = pairs[i].getHasValue() && pairs[i].getValue().trim().startsWith("@"); | |||
if (isCode) { | |||
results[i] = new NameAndValue(pairs[i].getName(), engine.evaluateString(pairs[i].getValue().trim().substring(1), context)); | |||
} else { | |||
results[i] = pairs[i]; | |||
} | |||
} | |||
return results; | |||
} | |||
public static Map<String, String> toMap(NameAndValue[] attrs) { | |||
final Map<String, String> map = new HashMap<>(); | |||
if (!empty(attrs)) { | |||
for (NameAndValue attr : attrs) { | |||
map.put(attr.getName(), attr.value); | |||
} | |||
} | |||
return map; | |||
} | |||
} |
@@ -0,0 +1,15 @@ | |||
package org.cobbzilla.util.collection; | |||
import java.math.BigDecimal; | |||
import java.util.Comparator; | |||
// adapted from: https://stackoverflow.com/a/2683388/1251543 | |||
public class NumberComparator implements Comparator<Number> { | |||
public static final NumberComparator INSTANCE = new NumberComparator(); | |||
public int compare(Number a, Number b){ | |||
return new BigDecimal(a.toString()).compareTo(new BigDecimal(b.toString())); | |||
} | |||
} |
@@ -0,0 +1,28 @@ | |||
package org.cobbzilla.util.collection; | |||
import lombok.ToString; | |||
import org.cobbzilla.util.string.StringUtil; | |||
import java.util.Collection; | |||
import java.util.HashSet; | |||
import java.util.concurrent.atomic.AtomicReference; | |||
public class SetSourceBase<T> implements CollectionSource<T> { | |||
private final AtomicReference<Collection<T>> values = new AtomicReference(new HashSet<T>()); | |||
@Override public Collection<T> getValues() { | |||
synchronized (values) { return new HashSet<>(values.get()); } | |||
} | |||
@Override public void addValue (T val) { | |||
synchronized (values) { values.get().add(val); } | |||
} | |||
@Override public void addValues (Collection<T> vals) { | |||
synchronized (values) { values.get().addAll(vals); } | |||
} | |||
@Override public String toString () { return StringUtil.toString(values.get(), ", "); } | |||
} |
@@ -0,0 +1,24 @@ | |||
package org.cobbzilla.util.collection; | |||
import java.util.ArrayList; | |||
import java.util.Collection; | |||
public class SingletonList<E> extends ArrayList<E> { | |||
public SingletonList (E element) { super.add(element); } | |||
@Override public E set(int index, E element) { throw unsupported(); } | |||
@Override public boolean add(E e) { throw unsupported(); } | |||
@Override public void add(int index, E element) { throw unsupported(); } | |||
@Override public E remove(int index) { throw unsupported(); } | |||
@Override public boolean remove(Object o) { throw unsupported(); } | |||
@Override public void clear() { throw unsupported(); } | |||
@Override public boolean addAll(Collection<? extends E> c) { throw unsupported(); } | |||
@Override public boolean addAll(int index, Collection<? extends E> c) { throw unsupported(); } | |||
@Override protected void removeRange(int fromIndex, int toIndex) { throw unsupported(); } | |||
@Override public boolean removeAll(Collection<?> c) { throw unsupported(); } | |||
@Override public boolean retainAll(Collection<?> c) { throw unsupported(); } | |||
private UnsupportedOperationException unsupported () { return new UnsupportedOperationException("singleton list is immutable"); } | |||
} |
@@ -0,0 +1,18 @@ | |||
package org.cobbzilla.util.collection; | |||
import java.util.Collection; | |||
import java.util.HashSet; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.notSupported; | |||
public class SingletonSet<E> extends HashSet<E> { | |||
public SingletonSet (E element) { super.add(element); } | |||
@Override public boolean add(E e) { return notSupported(); } | |||
@Override public boolean remove(Object o) { return notSupported(); } | |||
@Override public void clear() { notSupported(); } | |||
@Override public boolean addAll(Collection<? extends E> c) { return notSupported(); } | |||
@Override public boolean retainAll(Collection<?> c) { return notSupported(); } | |||
} |
@@ -0,0 +1,18 @@ | |||
package org.cobbzilla.util.collection; | |||
import java.util.Collection; | |||
import java.util.Comparator; | |||
import java.util.TreeSet; | |||
public class Sorter { | |||
public static <E> Collection<E> sort (Collection<E> things, Comparator sorter) { | |||
return sort(things, new TreeSet<>(sorter)); | |||
} | |||
public static <C extends Collection> C sort (Collection things, C rval) { | |||
rval.addAll(things); | |||
return rval; | |||
} | |||
} |
@@ -0,0 +1,14 @@ | |||
package org.cobbzilla.util.collection; | |||
import lombok.AllArgsConstructor; | |||
import lombok.Getter; | |||
import org.apache.commons.collections.Transformer; | |||
@AllArgsConstructor | |||
public class StringPrefixTransformer implements Transformer { | |||
@Getter private String prefix; | |||
@Override public Object transform(Object input) { return prefix + input.toString(); } | |||
} |
@@ -0,0 +1,3 @@ | |||
package org.cobbzilla.util.collection; | |||
public class StringSetSource extends SetSourceBase<String> {} |
@@ -0,0 +1,11 @@ | |||
package org.cobbzilla.util.collection; | |||
import org.apache.commons.collections.Transformer; | |||
public class ToStringTransformer implements Transformer { | |||
public static final ToStringTransformer instance = new ToStringTransformer(); | |||
@Override public Object transform(Object o) { return o == null ? "null" : o.toString(); } | |||
} |
@@ -0,0 +1,114 @@ | |||
package org.cobbzilla.util.collection; | |||
import lombok.AllArgsConstructor; | |||
import lombok.NoArgsConstructor; | |||
import java.util.*; | |||
import java.util.stream.Collectors; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.die; | |||
// adapted from: https://en.wikipedia.org/wiki/Topological_sorting | |||
@NoArgsConstructor | |||
public class Topology<T> { | |||
private final List<Node<T>> nodes = new ArrayList<>(); | |||
public void addNode(T thing, Collection<T> refs) { | |||
final Node<T> node = nodes.stream() | |||
.filter(n -> n.thing.equals(thing)) | |||
.findFirst() | |||
.orElse(new Node<>(thing)); | |||
// add refs as edges; skip self-references | |||
refs.stream().filter(ref -> !ref.equals(thing)).forEach(ref -> { | |||
final Node<T> existingEdgeNode = nodes.stream() | |||
.filter(n -> n.thing.equals(ref)) | |||
.findFirst() | |||
.orElse(null); | |||
if (existingEdgeNode != null) { | |||
node.addEdge(existingEdgeNode); | |||
} else { | |||
final Node<T> newEdgeNode = new Node<>(ref); | |||
nodes.add(newEdgeNode); | |||
node.addEdge(newEdgeNode); | |||
} | |||
}); | |||
nodes.add(node); | |||
} | |||
static class Node<T> { | |||
public final T thing; | |||
public final HashSet<Edge<T>> inEdges; | |||
public final HashSet<Edge<T>> outEdges; | |||
public Node(T thing) { | |||
this.thing = thing; | |||
inEdges = new HashSet<>(); | |||
outEdges = new HashSet<>(); | |||
} | |||
public Node addEdge(Node<T> node){ | |||
final Edge<T> e = new Edge<>(this, node); | |||
outEdges.add(e); | |||
node.inEdges.add(e); | |||
return this; | |||
} | |||
public String toString() { return thing.toString(); } | |||
} | |||
@AllArgsConstructor | |||
static class Edge<T> { | |||
public final Node<T> from; | |||
public final Node<T> to; | |||
@Override public boolean equals(Object obj) { | |||
final Edge<T> e = (Edge<T>) obj; | |||
return e.from == from && e.to == to; | |||
} | |||
} | |||
public List<T> sort() { | |||
// L <- Empty list that will contain the sorted elements | |||
final ArrayList<Node<T>> L = new ArrayList<>(); | |||
// S <- Set of all nodes with no incoming edges | |||
final HashSet<Node<T>> S = new HashSet<>(); | |||
nodes.stream().filter(n -> n.inEdges.isEmpty()).forEach(S::add); | |||
// while S is non-empty do | |||
while (!S.isEmpty()) { | |||
// remove a node n from S | |||
final Node<T> n = S.iterator().next(); | |||
S.remove(n); | |||
// insert n into L | |||
L.add(n); | |||
// for each node m with an edge e from n to m do | |||
for (Iterator<Edge<T>> it = n.outEdges.iterator(); it.hasNext();) { | |||
// remove edge e from the graph | |||
final Edge<T> e = it.next(); | |||
final Node<T> m = e.to; | |||
it.remove();//Remove edge from n | |||
m.inEdges.remove(e);//Remove edge from m | |||
// if m has no other incoming edges then insert m into S | |||
if (m.inEdges.isEmpty()) S.add(m); | |||
} | |||
} | |||
// Check to see if all edges are removed | |||
for (Node<T> n : nodes) { | |||
if (!n.inEdges.isEmpty()) { | |||
return die("Cycle present, topological sort not possible"); | |||
} | |||
} | |||
return L.stream().map(n -> n.thing).collect(Collectors.toList()); | |||
} | |||
public List<T> sortReversed() { | |||
final List<T> sorted = sort(); | |||
Collections.reverse(sorted); | |||
return sorted; | |||
} | |||
} |
@@ -0,0 +1,246 @@ | |||
package org.cobbzilla.util.collection.mappy; | |||
import lombok.Getter; | |||
import lombok.experimental.Accessors; | |||
import org.cobbzilla.util.string.StringUtil; | |||
import java.util.*; | |||
import java.util.concurrent.ConcurrentHashMap; | |||
import static org.cobbzilla.util.reflect.ReflectionUtil.getTypeParam; | |||
/** | |||
* Mappy is a map of keys to collections of values. The collection type is configurable and there are several | |||
* subclasses available. See MappyList, MappySet, MappySortedSet, and MappyConcurrentSortedSet | |||
* | |||
* It can be viewed either as a mapping of K->V or as K->C->V | |||
* | |||
* Mappy objects are meant to be short-lived. While methods are generally thread-safe, the getter will create a new empty | |||
* collection every time a key is not found. So it makes a horrible cache. Mappy instances are best suited to be value | |||
* objects of limited scope. | |||
* | |||
* @param <K> key class | |||
* @param <V> value class | |||
* @param <C> collection class | |||
*/ | |||
@Accessors(chain=true) | |||
public abstract class Mappy<K, V, C extends Collection<V>> implements Map<K, V> { | |||
private final ConcurrentHashMap<K, C> map; | |||
@Getter(lazy=true) private final Class<C> valueClass = initValueClass(); | |||
private Class<C> initValueClass() { return getTypeParam(getClass(), 2); } | |||
public Mappy () { map = new ConcurrentHashMap<>(); } | |||
public Mappy (int size) { map = new ConcurrentHashMap<>(size); } | |||
public Mappy(Map<K, Collection<V>> other) { | |||
this(); | |||
for (Map.Entry<K, Collection<V>> entry : other.entrySet()) { | |||
putAll(entry.getKey(), entry.getValue()); | |||
} | |||
} | |||
/** | |||
* For subclasses to override and provide their own collection types | |||
* @return A new (empty) instance of the collection type | |||
*/ | |||
protected abstract C newCollection(); | |||
/** | |||
* @return the number of key mappings | |||
*/ | |||
@Override public int size() { return map.size(); } | |||
/** | |||
* @return the total number of values (may be higher than # of keys) | |||
*/ | |||
public int totalSize () { | |||
int count = 0; | |||
for (Collection<V> c : allValues()) count += c.size(); | |||
return count; | |||
} | |||
/** | |||
* @return true if this Mappy contains no values. It may contain keys whose collections have no values. | |||
*/ | |||
@Override public boolean isEmpty() { return flatten().isEmpty(); } | |||
@Override public boolean containsKey(Object key) { return map.containsKey(key); } | |||
/** | |||
* @param value the value to check | |||
* @return true if the Mappy contains any collection that contains the value, which should be of type V | |||
*/ | |||
@Override public boolean containsValue(Object value) { | |||
for (C collection : allValues()) { | |||
//noinspection SuspiciousMethodCalls | |||
if (collection.contains(value)) return true; | |||
} | |||
return false; | |||
} | |||
/** | |||
* @param key the key to find | |||
* @return the first value in the collection for they key, or null if the collection is empty | |||
*/ | |||
@Override public V get(Object key) { | |||
final C collection = getAll((K) key); | |||
return collection.isEmpty() ? null : firstInCollection(collection); | |||
} | |||
protected V firstInCollection(C collection) { return collection.iterator().next(); } | |||
/** | |||
* Get the collection of values for a key. This method never returns null. | |||
* @param key the key to find | |||
* @return the collection of values for the key, which may be empty | |||
*/ | |||
public C getAll (K key) { | |||
C collection = map.get(key); | |||
if (collection == null) { | |||
collection = newCollection(); | |||
map.put(key, collection); | |||
} | |||
return collection; | |||
} | |||
/** | |||
* Add a mapping. | |||
* @param key the key to add | |||
* @param value the value to add | |||
* @return the value passed in, if the map already contained the item. null otherwise. | |||
*/ | |||
@Override public V put(K key, V value) { | |||
V rval = null; | |||
synchronized (map) { | |||
C group = map.get(key); | |||
if (group == null) { | |||
group = newCollection(); | |||
map.put(key, group); | |||
} else { | |||
rval = group.contains(value) ? value : null; | |||
} | |||
group.add(value); | |||
} | |||
return rval; | |||
} | |||
/** | |||
* Remove a key | |||
* @param key the key to remove | |||
* @return The first value in the collection that was referenced by the key | |||
*/ | |||
@Override public V remove(Object key) { | |||
final C group = map.remove(key); | |||
if (group == null || group.isEmpty()) return null; // empty case should never happen, but just in case | |||
return group.iterator().next(); | |||
} | |||
/** | |||
* Put a bunch of stuff into the map | |||
* @param m mappings to add | |||
*/ | |||
@Override public void putAll(Map<? extends K, ? extends V> m) { | |||
for (Entry<? extends K, ? extends V> e : m.entrySet()) { | |||
put(e.getKey(), e.getValue()); | |||
} | |||
} | |||
/** | |||
* Put a bunch of stuff into the map | |||
* @param key the key to add | |||
* @param values the values to add to the key's collection | |||
*/ | |||
public void putAll(K key, Collection<V> values) { | |||
synchronized (map) { | |||
C collection = getAll(key); | |||
if (collection == null) collection = newCollection(); | |||
collection.addAll(values); | |||
map.put(key, collection); | |||
} | |||
} | |||
/** | |||
* Erase the entire map. | |||
*/ | |||
@Override public void clear() { map.clear(); } | |||
@Override public Set<K> keySet() { return map.keySet(); } | |||
@Override public Collection<V> values() { | |||
final List<V> vals = new ArrayList<>(); | |||
for (C collection : map.values()) vals.addAll(collection); | |||
return vals; | |||
} | |||
@Override public Set<Entry<K, V>> entrySet() { | |||
final Set<Entry<K, V>> entries = new HashSet<>(); | |||
for (Entry<K, C> entry : map.entrySet()) { | |||
for (V item : entry.getValue()) { | |||
entries.add(new AbstractMap.SimpleEntry<K, V>(entry.getKey(), item)); | |||
} | |||
} | |||
return entries; | |||
} | |||
public Collection<C> allValues() { return map.values(); } | |||
public Set<Entry<K, C>> allEntrySets() { return map.entrySet(); } | |||
public List<V> flatten() { | |||
final List<V> values = new ArrayList<>(); | |||
for (C collection : allValues()) values.addAll(collection); | |||
return values; | |||
} | |||
public List<V> flatten(Collection<V> values) { | |||
for (C collection : allValues()) values.addAll(collection); | |||
return new ArrayList<>(values); | |||
} | |||
@Override public boolean equals(Object o) { | |||
if (this == o) return true; | |||
if (o == null || getClass() != o.getClass()) return false; | |||
final Mappy other = (Mappy) o; | |||
if (totalSize() != other.totalSize()) return false; | |||
for (K key : keySet()) { | |||
if (!other.containsKey(key)) return false; | |||
final Collection otherValues = other.getAll(key); | |||
final Collection thisValues = getAll(key); | |||
if (otherValues.size() != thisValues.size()) return false; | |||
for (Object value : thisValues) { | |||
if (!otherValues.contains(value)) return false; | |||
} | |||
} | |||
return true; | |||
} | |||
@Override public int hashCode() { | |||
int result = Integer.valueOf(totalSize()).hashCode(); | |||
result = 31 * result + (valueClass != null ? valueClass.hashCode() : 0); | |||
for (K key : keySet()) { | |||
result = 31 * result + (key.hashCode() + 13); | |||
for (V value : getAll(key)) { | |||
result = 31 * result + (value == null ? 0 : value.hashCode()); | |||
} | |||
} | |||
return result; | |||
} | |||
@Override public String toString() { | |||
final StringBuilder b = new StringBuilder(); | |||
for (K key : keySet()) { | |||
if (b.length() > 0) b.append(" | "); | |||
b.append(key).append("->(").append(StringUtil.toString(getAll(key), ", ")).append(")"); | |||
} | |||
return "{"+b.toString()+"}"; | |||
} | |||
public Map<K, C> toMap() { | |||
final HashMap<K, C> m = new HashMap<>(); | |||
for (K key : keySet()) m.put(key, getAll(key)); | |||
return m; | |||
} | |||
} |
@@ -0,0 +1,24 @@ | |||
package org.cobbzilla.util.collection.mappy; | |||
import lombok.AllArgsConstructor; | |||
import lombok.Getter; | |||
import lombok.NoArgsConstructor; | |||
import lombok.Setter; | |||
import java.util.Comparator; | |||
import java.util.concurrent.ConcurrentSkipListSet; | |||
@NoArgsConstructor @AllArgsConstructor | |||
public class MappyConcurrentSortedSet<K, V> extends Mappy<K, V, ConcurrentSkipListSet<V>> { | |||
public MappyConcurrentSortedSet(int size) { super(size); } | |||
@Getter @Setter private Comparator<? super V> comparator; | |||
@Override protected ConcurrentSkipListSet<V> newCollection() { | |||
return comparator == null ? new ConcurrentSkipListSet<V>() : new ConcurrentSkipListSet<>(comparator); | |||
} | |||
@Override protected V firstInCollection(ConcurrentSkipListSet<V> collection) { return collection.first(); } | |||
} |
@@ -0,0 +1,29 @@ | |||
package org.cobbzilla.util.collection.mappy; | |||
import lombok.NoArgsConstructor; | |||
import java.util.ArrayList; | |||
import java.util.Collection; | |||
import java.util.List; | |||
import java.util.Map; | |||
@NoArgsConstructor | |||
public class MappyList<K, V> extends Mappy<K, V, List<V>> { | |||
protected Integer subSize; | |||
public MappyList (int size) { super(size); } | |||
public MappyList (int size, int subSize) { super(size); this.subSize = subSize; } | |||
public MappyList(Map<K, Collection<V>> other, Integer subSize) { | |||
super(other); | |||
this.subSize = subSize; | |||
} | |||
public MappyList(Map<K, Collection<V>> other) { this(other, null); } | |||
@Override protected List<V> newCollection() { return subSize != null ? new ArrayList<V>(subSize) : new ArrayList<V>(); } | |||
@Override protected V firstInCollection(List<V> collection) { return collection.get(0); } | |||
} |
@@ -0,0 +1,16 @@ | |||
package org.cobbzilla.util.collection.mappy; | |||
import lombok.NoArgsConstructor; | |||
import java.util.HashSet; | |||
import java.util.Set; | |||
@NoArgsConstructor | |||
public class MappySet<K, V> extends Mappy<K, V, Set<V>> { | |||
public MappySet (int size) { super(size); } | |||
@Override protected Set<V> newCollection() { return new HashSet<>(); } | |||
} | |||
@@ -0,0 +1,22 @@ | |||
package org.cobbzilla.util.collection.mappy; | |||
import lombok.AllArgsConstructor; | |||
import lombok.Getter; | |||
import lombok.NoArgsConstructor; | |||
import lombok.Setter; | |||
import java.util.Comparator; | |||
import java.util.TreeSet; | |||
@NoArgsConstructor @AllArgsConstructor | |||
public class MappySortedSet<K, V> extends Mappy<K, V, TreeSet<V>> { | |||
public MappySortedSet(int size) { super(size); } | |||
@Getter @Setter private Comparator<? super V> comparator; | |||
@Override protected TreeSet<V> newCollection() { return comparator == null ? new TreeSet<V>() : new TreeSet<>(comparator); } | |||
@Override protected V firstInCollection(TreeSet<V> collection) { return collection.first(); } | |||
} |
@@ -0,0 +1,39 @@ | |||
package org.cobbzilla.util.collection.mappy; | |||
import com.fasterxml.jackson.databind.JsonNode; | |||
import com.fasterxml.jackson.databind.node.ObjectNode; | |||
import lombok.NoArgsConstructor; | |||
import java.util.Collection; | |||
import java.util.Iterator; | |||
import java.util.Map; | |||
import static java.util.Arrays.asList; | |||
import static org.cobbzilla.util.json.JsonUtil.json; | |||
import static org.cobbzilla.util.reflect.ReflectionUtil.arrayClass; | |||
import static org.cobbzilla.util.reflect.ReflectionUtil.getFirstTypeParam; | |||
@NoArgsConstructor | |||
public class MappyStringKeyList<V> extends MappyList<String, V> { | |||
public MappyStringKeyList(int size) { super(size); } | |||
public MappyStringKeyList(int size, int subSize) { super(size, subSize); } | |||
public MappyStringKeyList(Map<String, Collection<V>> other, Integer subSize) { | |||
super(other); | |||
this.subSize = subSize; | |||
} | |||
public MappyStringKeyList(Map other) { this(other, null); } | |||
public MappyStringKeyList(String json) { | |||
final ObjectNode object = json(json, ObjectNode.class); | |||
final Class<?> arrayClass = arrayClass(getFirstTypeParam(getClass())); | |||
for (Iterator<Entry<String, JsonNode>> iter = object.fields(); iter.hasNext(); ) { | |||
final Map.Entry<String, JsonNode> entry = iter.next(); | |||
putAll(entry.getKey(), asList((V[]) json(entry.getValue(), arrayClass))); | |||
} | |||
} | |||
} |
@@ -0,0 +1,46 @@ | |||
package org.cobbzilla.util.collection.multi; | |||
import java.util.ArrayList; | |||
import java.util.LinkedHashMap; | |||
import java.util.List; | |||
import java.util.Map; | |||
public class MultiResult { | |||
public List<String> successes = new ArrayList<>(); | |||
public Map<String, String> failures = new LinkedHashMap<>(); | |||
public int successCount() { return successes.size(); } | |||
public int failCount() { return failures.size(); } | |||
public void success(String name) { successes.add(name); } | |||
public void fail(String name, String reason) { failures.put(name, reason); } | |||
public boolean hasFailures () { return !failures.isEmpty(); } | |||
public String getHeader() { return "TEST RESULTS"; } | |||
public String toString() { | |||
final StringBuilder b = new StringBuilder(); | |||
b.append("\n\n").append(getHeader()).append("\n--------------------\n") | |||
.append(successCount()).append("\tsucceeded\n") | |||
.append(failCount()).append("\tfailed"); | |||
if (!failures.isEmpty()) { | |||
b.append(":\n"); | |||
for (String fail : failures.keySet()) { | |||
b.append(fail).append("\n"); | |||
} | |||
b.append("--------------------\n"); | |||
b.append("\nfailure details:\n"); | |||
for (Map.Entry<String, String> fail : failures.entrySet()) { | |||
b.append(fail.getKey()).append(":\t").append(fail.getValue()).append("\n"); | |||
b.append("--------\n"); | |||
} | |||
} else { | |||
b.append("\n"); | |||
} | |||
b.append("--------------------\n"); | |||
return b.toString(); | |||
} | |||
} |
@@ -0,0 +1,30 @@ | |||
package org.cobbzilla.util.collection.multi; | |||
import java.util.Map; | |||
public interface MultiResultDriver { | |||
MultiResult getResult (); | |||
// called before trying calculate result | |||
void before (); | |||
void exec (Object task); | |||
// called if calculation was a success | |||
void success (String message); | |||
// called if calculation failed | |||
void failure (String message, Exception e); | |||
// called at the end (should via finally block) | |||
void after (); | |||
// allows the caller/user to stash things for use during execution | |||
Map<String, Object> getContext(); | |||
void setContext(Map<String, Object> context); | |||
int getMaxConcurrent(); | |||
long getTimeout(); | |||
} |
@@ -0,0 +1,32 @@ | |||
package org.cobbzilla.util.collection.multi; | |||
import lombok.Getter; | |||
import org.apache.commons.lang3.exception.ExceptionUtils; | |||
public abstract class MultiResultDriverBase implements MultiResultDriver { | |||
@Getter protected MultiResult result = new MultiResult(); | |||
protected abstract String successMessage(Object task); | |||
protected abstract String failureMessage(Object task); | |||
protected abstract void run(Object task) throws Exception; | |||
@Override public void before() {} | |||
@Override public void after() {} | |||
@Override public void exec(Object task) { | |||
try { | |||
before(); | |||
run(task); | |||
success(successMessage(task)); | |||
} catch (Exception e) { | |||
failure(failureMessage(task), e); | |||
} finally { | |||
after(); | |||
} | |||
} | |||
@Override public void success(String message) { result.success(message); } | |||
@Override public void failure(String message, Exception e) { result.fail(message, e.toString() + "\n- stack -\n" + ExceptionUtils.getStackTrace(e) + "\n- end stack -\n"); } | |||
} |
@@ -0,0 +1,12 @@ | |||
package org.cobbzilla.util.cron; | |||
import java.util.Map; | |||
import java.util.Properties; | |||
public interface CronCommand { | |||
void init (Properties properties); | |||
void exec (Map<String, Object> context) throws Exception; | |||
} |
@@ -0,0 +1,12 @@ | |||
package org.cobbzilla.util.cron; | |||
import java.util.Properties; | |||
public abstract class CronCommandBase implements CronCommand { | |||
protected Properties properties; | |||
@Override | |||
public void init(Properties properties) { this.properties = properties; } | |||
} |
@@ -0,0 +1,13 @@ | |||
package org.cobbzilla.util.cron; | |||
public interface CronDaemon { | |||
void start () throws Exception; | |||
void stop() throws Exception; | |||
void addJob(final CronJob job) throws Exception; | |||
void removeJob(final String id) throws Exception; | |||
} |
@@ -0,0 +1,29 @@ | |||
package org.cobbzilla.util.cron; | |||
import lombok.Getter; | |||
import lombok.Setter; | |||
import org.cobbzilla.util.reflect.ReflectionUtil; | |||
import java.util.Properties; | |||
public class CronJob { | |||
@Getter @Setter private String id; | |||
@Getter @Setter private String cronTimeString; | |||
@Getter @Setter private boolean startNow = false; | |||
@Getter @Setter private String commandClass; | |||
@Getter @Setter private Properties properties = new Properties(); | |||
// todo | |||
// @Getter @Setter private String user; | |||
// @Getter @Setter private String shellCommand; | |||
public CronCommand getCommandInstance() { | |||
CronCommand command = ReflectionUtil.instantiate(commandClass); | |||
command.init(properties); | |||
return command; | |||
} | |||
} |
@@ -0,0 +1,74 @@ | |||
package org.cobbzilla.util.cron.quartz; | |||
import lombok.Getter; | |||
import lombok.Setter; | |||
import org.cobbzilla.util.cron.CronCommand; | |||
import org.cobbzilla.util.cron.CronDaemon; | |||
import org.cobbzilla.util.cron.CronJob; | |||
import org.quartz.*; | |||
import org.quartz.impl.StdSchedulerFactory; | |||
import java.util.List; | |||
import java.util.Map; | |||
import java.util.TimeZone; | |||
import static org.quartz.CronScheduleBuilder.cronSchedule; | |||
import static org.quartz.JobBuilder.newJob; | |||
import static org.quartz.TriggerBuilder.newTrigger; | |||
public class QuartzMaster implements CronDaemon { | |||
private static final String CAL_SUFFIX = "_calendar"; | |||
private static final String JOB_SUFFIX = "_jobDetail"; | |||
private static final String TRIGGER_SUFFIX = "_trigger"; | |||
private Scheduler scheduler; | |||
@Getter @Setter private TimeZone timeZone; | |||
@Getter @Setter private List<? extends CronJob> jobs; | |||
public void start () throws Exception { | |||
scheduler = StdSchedulerFactory.getDefaultScheduler(); | |||
scheduler.start(); | |||
if (jobs != null) { | |||
for (final CronJob job : jobs) { | |||
addJob(job); | |||
} | |||
} | |||
} | |||
public void addJob(final CronJob job) throws SchedulerException { | |||
String id = job.getId(); | |||
Job specialJob = new Job () { | |||
@Override | |||
public void execute(JobExecutionContext context) throws JobExecutionException { | |||
Map<String, Object> map = context.getMergedJobDataMap(); | |||
try { | |||
CronCommand command = job.getCommandInstance(); | |||
command.init(job.getProperties()); | |||
command.exec(map); | |||
} catch (Exception e) { | |||
throw new JobExecutionException(e); | |||
} | |||
} | |||
}; | |||
final JobDetail jobDetail = newJob(specialJob.getClass()).withIdentity(id+JOB_SUFFIX).build(); | |||
TriggerBuilder<Trigger> builder = newTrigger().withIdentity(id+TRIGGER_SUFFIX); | |||
if (job.isStartNow()) builder = builder.startNow(); | |||
final CronScheduleBuilder cronSchedule = cronSchedule(job.getCronTimeString()); | |||
final Trigger trigger = builder.withSchedule(timeZone != null ? cronSchedule.inTimeZone(timeZone) : cronSchedule).build(); | |||
scheduler.scheduleJob(jobDetail, trigger); | |||
} | |||
@Override public void removeJob(final String id) throws Exception { | |||
scheduler.deleteJob(new JobKey(id+JOB_SUFFIX)); | |||
} | |||
public void stop () throws Exception { scheduler.shutdown(); } | |||
} |
@@ -0,0 +1,170 @@ | |||
package org.cobbzilla.util.daemon; | |||
import lombok.extern.slf4j.Slf4j; | |||
import org.cobbzilla.util.time.ClockProvider; | |||
import java.util.*; | |||
import java.util.concurrent.ExecutionException; | |||
import java.util.concurrent.Future; | |||
import java.util.concurrent.TimeUnit; | |||
import java.util.concurrent.TimeoutException; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.die; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.now; | |||
import static org.cobbzilla.util.system.Sleep.sleep; | |||
@Slf4j | |||
public class Await { | |||
public static final long DEFAULT_AWAIT_GET_SLEEP = 10; | |||
public static final long DEFAULT_AWAIT_RETRY_SLEEP = 100; | |||
public static <E> E awaitFirst(Collection<Future<E>> futures, long timeout) throws TimeoutException { | |||
return awaitFirst(futures, timeout, DEFAULT_AWAIT_RETRY_SLEEP); | |||
} | |||
public static <E> E awaitFirst(Collection<Future<E>> futures, long timeout, long retrySleep) throws TimeoutException { | |||
return awaitFirst(futures, timeout, retrySleep, DEFAULT_AWAIT_GET_SLEEP); | |||
} | |||
public static <E> E awaitFirst(Collection<Future<E>> futures, long timeout, long retrySleep, long getSleep) throws TimeoutException { | |||
long start = now(); | |||
while (!futures.isEmpty() && now() - start < timeout) { | |||
for (Iterator<Future<E>> iter = futures.iterator(); iter.hasNext(); ) { | |||
Future<E> future = iter.next(); | |||
try { | |||
final E value = future.get(getSleep, TimeUnit.MILLISECONDS); | |||
if (value != null) return value; | |||
iter.remove(); | |||
if (futures.isEmpty()) break; | |||
} catch (InterruptedException e) { | |||
die("await: interrupted: " + e); | |||
} catch (ExecutionException e) { | |||
die("await: execution error: " + e); | |||
} catch (TimeoutException e) { | |||
// noop | |||
} | |||
sleep(retrySleep); | |||
} | |||
} | |||
if (now() - start > timeout) throw new TimeoutException("await: timed out"); | |||
return null; // all futures had a null result | |||
} | |||
public static List awaitAndCollect(Collection<Future<List>> futures, int maxResults, long timeout) throws TimeoutException { | |||
return awaitAndCollect(futures, maxResults, timeout, DEFAULT_AWAIT_RETRY_SLEEP); | |||
} | |||
public static List awaitAndCollect(Collection<Future<List>> futures, int maxResults, long timeout, long retrySleep) throws TimeoutException { | |||
return awaitAndCollect(futures, maxResults, timeout, retrySleep, DEFAULT_AWAIT_GET_SLEEP); | |||
} | |||
public static List awaitAndCollect(Collection<Future<List>> futures, int maxResults, long timeout, long retrySleep, long getSleep) throws TimeoutException { | |||
return awaitAndCollect(futures, maxResults, timeout, retrySleep, getSleep, new ArrayList()); | |||
} | |||
public static List awaitAndCollect(List<Future<List>> futures, int maxQueryResults, long timeout, List results) throws TimeoutException { | |||
return awaitAndCollect(futures, maxQueryResults, timeout, DEFAULT_AWAIT_RETRY_SLEEP, DEFAULT_AWAIT_GET_SLEEP, results); | |||
} | |||
public static List awaitAndCollect(Collection<Future<List>> futures, int maxResults, long timeout, long retrySleep, long getSleep, List results) throws TimeoutException { | |||
long start = now(); | |||
int size = futures.size(); | |||
while (!futures.isEmpty() && now() - start < timeout) { | |||
for (Iterator<Future<List>> iter = futures.iterator(); iter.hasNext(); ) { | |||
Future future = iter.next(); | |||
try { | |||
results.addAll((List) future.get(getSleep, TimeUnit.MILLISECONDS)); | |||
iter.remove(); | |||
if (--size <= 0 || results.size() >= maxResults) return results; | |||
break; | |||
} catch (InterruptedException e) { | |||
die("await: interrupted: " + e); | |||
} catch (ExecutionException e) { | |||
die("await: execution error: " + e); | |||
} catch (TimeoutException e) { | |||
// noop | |||
} | |||
sleep(retrySleep); | |||
} | |||
} | |||
if (now() - start > timeout) throw new TimeoutException("await: timed out"); | |||
return results; | |||
} | |||
public static Set awaitAndCollectSet(Collection<Future<List>> futures, int maxResults, long timeout) throws TimeoutException { | |||
return awaitAndCollectSet(futures, maxResults, timeout, DEFAULT_AWAIT_RETRY_SLEEP); | |||
} | |||
public static Set awaitAndCollectSet(Collection<Future<List>> futures, int maxResults, long timeout, long retrySleep) throws TimeoutException { | |||
return awaitAndCollectSet(futures, maxResults, timeout, retrySleep, DEFAULT_AWAIT_GET_SLEEP); | |||
} | |||
public static Set awaitAndCollectSet(Collection<Future<List>> futures, int maxResults, long timeout, long retrySleep, long getSleep) throws TimeoutException { | |||
return awaitAndCollectSet(futures, maxResults, timeout, retrySleep, getSleep, new HashSet()); | |||
} | |||
public static Set awaitAndCollectSet(List<Future<List>> futures, int maxQueryResults, long timeout, Set results) throws TimeoutException { | |||
return awaitAndCollectSet(futures, maxQueryResults, timeout, DEFAULT_AWAIT_RETRY_SLEEP, DEFAULT_AWAIT_GET_SLEEP, results); | |||
} | |||
public static Set awaitAndCollectSet(Collection<Future<List>> futures, int maxResults, long timeout, long retrySleep, long getSleep, Set results) throws TimeoutException { | |||
long start = now(); | |||
int size = futures.size(); | |||
while (!futures.isEmpty() && now() - start < timeout) { | |||
for (Iterator<Future<List>> iter = futures.iterator(); iter.hasNext(); ) { | |||
Future future = iter.next(); | |||
try { | |||
results.addAll((Collection) future.get(getSleep, TimeUnit.MILLISECONDS)); | |||
iter.remove(); | |||
if (--size <= 0 || results.size() >= maxResults) return results; | |||
break; | |||
} catch (InterruptedException e) { | |||
die("await: interrupted: " + e); | |||
} catch (ExecutionException e) { | |||
die("await: execution error: " + e); | |||
} catch (TimeoutException e) { | |||
// noop | |||
} | |||
sleep(retrySleep); | |||
} | |||
} | |||
if (now() - start > timeout) throw new TimeoutException("await: timed out"); | |||
return results; | |||
} | |||
public static <T> AwaitResult<T> awaitAll(Collection<Future<?>> futures, long timeout) { | |||
return awaitAll(futures, timeout, ClockProvider.SYSTEM); | |||
} | |||
public static <T> AwaitResult<T> awaitAll(Collection<Future<?>> futures, long timeout, ClockProvider clock) { | |||
long start = clock.now(); | |||
final AwaitResult<T> result = new AwaitResult<>(); | |||
final Collection<Future<?>> awaiting = new ArrayList<>(futures); | |||
while (clock.now() - start < timeout) { | |||
for (Iterator iter = awaiting.iterator(); iter.hasNext(); ) { | |||
final Future f = (Future) iter.next(); | |||
if (f.isDone()) { | |||
iter.remove(); | |||
try { | |||
final T r = (T) f.get(); | |||
if (r != null) log.info("awaitAll: "+ r); | |||
result.success(f, r); | |||
} catch (Exception e) { | |||
log.warn("awaitAll: "+e, e); | |||
result.fail(f, e); | |||
} | |||
} | |||
} | |||
if (awaiting.isEmpty()) break; | |||
sleep(200); | |||
} | |||
result.timeout(awaiting); | |||
return result; | |||
} | |||
} |
@@ -0,0 +1,38 @@ | |||
package org.cobbzilla.util.daemon; | |||
import com.fasterxml.jackson.annotation.JsonIgnore; | |||
import lombok.Getter; | |||
import java.util.*; | |||
import java.util.concurrent.Future; | |||
public class AwaitResult<T> { | |||
@Getter private Map<Future, T> successes = new HashMap<>(); | |||
public void success(Future f, T thing) { successes.put(f, thing); } | |||
public int numSuccesses () { return successes.size(); } | |||
@Getter private Map<Future, Exception> failures = new HashMap<>(); | |||
public void fail(Future f, Exception e) { failures.put(f, e); } | |||
public int numFails () { return failures.size(); } | |||
@Getter private List<Future> timeouts = new ArrayList<>(); | |||
public void timeout (Collection<Future<?>> timedOut) { timeouts.addAll(timedOut); } | |||
public boolean timedOut() { return !timeouts.isEmpty(); } | |||
public int numTimeouts () { return timeouts.size(); } | |||
public boolean allSucceeded() { return failures.isEmpty() && timeouts.isEmpty(); } | |||
@JsonIgnore public List<T> getNotNullSuccesses() { | |||
final List<T> ok = new ArrayList<>(); | |||
for (T t : getSuccesses().values()) if (t != null) ok.add(t); | |||
return ok; | |||
} | |||
public String toString() { | |||
return "successes=" + successes.size() | |||
+ ", failures=" + failures.size() | |||
+ ", timeouts=" + timeouts.size(); | |||
} | |||
} |
@@ -0,0 +1,83 @@ | |||
package org.cobbzilla.util.daemon; | |||
import lombok.AllArgsConstructor; | |||
import lombok.extern.slf4j.Slf4j; | |||
import java.util.concurrent.atomic.AtomicBoolean; | |||
import java.util.concurrent.atomic.AtomicLong; | |||
import java.util.concurrent.atomic.AtomicReference; | |||
import static java.util.concurrent.TimeUnit.HOURS; | |||
import static java.util.concurrent.TimeUnit.SECONDS; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.background; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.now; | |||
import static org.cobbzilla.util.system.Sleep.nap; | |||
import static org.cobbzilla.util.time.TimeUtil.formatDuration; | |||
@AllArgsConstructor @Slf4j | |||
public class BufferedRunDaemon implements Runnable { | |||
public static final long IDLE_SYNC_INTERVAL = HOURS.toMillis(1); | |||
public static final long MIN_SYNC_WAIT = SECONDS.toMillis(10); | |||
private final String logPrefix; | |||
private final Runnable action; | |||
private final AtomicReference<Thread> daemonThread = new AtomicReference<>(); | |||
private final AtomicLong lastRun = new AtomicLong(0); | |||
private final AtomicLong lastRunRequested = new AtomicLong(0); | |||
private final AtomicBoolean done = new AtomicBoolean(false); | |||
protected long getIdleSyncInterval() { return IDLE_SYNC_INTERVAL; } | |||
protected long getMinSyncWait () { return MIN_SYNC_WAIT; } | |||
public void start () { daemonThread.set(background(this)); } | |||
protected void interrupt() { if (daemonThread.get() != null) daemonThread.get().interrupt(); } | |||
public void poke () { lastRunRequested.set(now()); interrupt(); } | |||
public void done () { done.set(true); interrupt(); } | |||
@Override public void run () { | |||
long napTime; | |||
//noinspection InfiniteLoopStatement | |||
while (true) { | |||
napTime = getIdleSyncInterval(); | |||
log.info(logPrefix+": sleep for "+formatDuration(napTime)+" awaiting activity"); | |||
if (!nap(napTime, logPrefix+" napping for "+formatDuration(napTime)+" awaiting activity")) { | |||
log.info(logPrefix + " interrupted during initial pause, continuing"); | |||
} else { | |||
boolean shouldDoIdleSleep = lastRunRequested.get() == 0; | |||
if (shouldDoIdleSleep) { | |||
shouldDoIdleSleep = lastRunRequested.get() == 0; | |||
while (shouldDoIdleSleep && lastRun.get() > 0 && now() - lastRun.get() < getIdleSyncInterval()) { | |||
log.info(logPrefix + " napping for " + formatDuration(napTime) + " due to no activity"); | |||
if (!nap(napTime, logPrefix + " idle loop sleep")) { | |||
log.info(logPrefix + " nap was interrupted, breaking out"); | |||
break; | |||
} | |||
shouldDoIdleSleep = lastRunRequested.get() == 0; | |||
} | |||
} | |||
} | |||
final long minSyncWait = getMinSyncWait(); | |||
while (lastRunRequested.get() > 0 && now() - lastRunRequested.get() < minSyncWait) { | |||
napTime = minSyncWait / 4; | |||
log.info(logPrefix+" napping for "+formatDuration(napTime)+", waiting for at least "+formatDuration(minSyncWait)+" of no activity before starting sync"); | |||
nap(napTime, logPrefix + " waiting for inactivity"); | |||
} | |||
try { | |||
action.run(); | |||
} catch (Exception e) { | |||
log.error(logPrefix+" sync: " + e, e); | |||
} finally { | |||
lastRun.set(now()); | |||
lastRunRequested.set(0); | |||
} | |||
} | |||
} | |||
} |
@@ -0,0 +1,28 @@ | |||
package org.cobbzilla.util.daemon; | |||
import lombok.extern.slf4j.Slf4j; | |||
import java.util.concurrent.ExecutorService; | |||
import java.util.concurrent.Executors; | |||
import java.util.concurrent.ThreadFactory; | |||
@Slf4j | |||
public class DaemonThreadFactory implements ThreadFactory { | |||
public static final DaemonThreadFactory instance = new DaemonThreadFactory(); | |||
@Override public Thread newThread(Runnable r) { | |||
final Thread t = new Thread(r); | |||
t.setDaemon(true); | |||
return t; | |||
} | |||
public static ExecutorService fixedPool (int count) { | |||
if (count <= 0) { | |||
log.warn("fixedPool: invalid count ("+count+"), using single thread"); | |||
count = 1; | |||
} | |||
return Executors.newFixedThreadPool(count, instance); | |||
} | |||
} |
@@ -0,0 +1,12 @@ | |||
package org.cobbzilla.util.daemon; | |||
/** | |||
* A generic interface for error reporting services like Errbit and Airbrake | |||
*/ | |||
public interface ErrorApi { | |||
void report(Exception e); | |||
void report(String s); | |||
void report(String s, Exception e); | |||
} |
@@ -0,0 +1,181 @@ | |||
package org.cobbzilla.util.daemon; | |||
import lombok.Getter; | |||
import lombok.extern.slf4j.Slf4j; | |||
import org.joda.time.format.DateTimeFormat; | |||
import org.joda.time.format.DateTimeFormatter; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.now; | |||
import static org.cobbzilla.util.system.Sleep.sleep; | |||
@Slf4j | |||
public abstract class SimpleDaemon implements Runnable { | |||
public static final DateTimeFormatter DFORMAT = DateTimeFormat.forPattern("yyyy-MMM-dd HH:mm:ss"); | |||
public SimpleDaemon () { this.name = getClass().getSimpleName(); } | |||
public SimpleDaemon (String name) { this.name = name; } | |||
@Getter private String name; | |||
@Getter private long lastProcessTime = 0; | |||
private volatile Thread mainThread = null; | |||
private final Object lock = new Object(); | |||
private volatile boolean isDone = false; | |||
/** Called right after daemon has started */ | |||
public void onStart () {} | |||
/** Called right before daemon is about to exit */ | |||
public void onStop () {} | |||
public void start() { | |||
log.info(name+": Starting daemon"); | |||
synchronized (lock) { | |||
if (mainThread != null) { | |||
log.warn(name+": daemon is already running, not starting it again"); | |||
return; | |||
} | |||
mainThread = new Thread(this); | |||
mainThread.setName(name); | |||
} | |||
mainThread.setDaemon(true); | |||
mainThread.start(); | |||
} | |||
private boolean alreadyStopped() { | |||
if (mainThread == null) { | |||
log.warn(name+": daemon is already stopped"); | |||
return true; | |||
} | |||
return false; | |||
} | |||
public void stop() { | |||
if (alreadyStopped()) return; | |||
isDone = true; | |||
mainThread.interrupt(); | |||
// Let's leave it at that, this thread is a daemon anyway. | |||
} | |||
public void interrupt() { | |||
if (alreadyStopped()) return; | |||
mainThread.interrupt(); | |||
} | |||
/** | |||
* @deprecated USE WITH CAUTION -- calls Thread.stop() !! | |||
*/ | |||
private void kill() { | |||
if (alreadyStopped()) return; | |||
isDone = true; | |||
mainThread.stop(); | |||
} | |||
/** | |||
* Tries to stop the daemon. If it doesn't stop within "wait" millis, | |||
* it gets killed. | |||
*/ | |||
public void stopWithPossibleKill(long wait) { | |||
stop(); | |||
long start = now(); | |||
while (getIsAlive() | |||
&& (now() - start < wait)) { | |||
wait(25, "stopWithPossibleKill"); | |||
} | |||
if (getIsAlive()) { | |||
kill(); | |||
} | |||
} | |||
protected void init() throws Exception {} | |||
public void run() { | |||
onStart(); | |||
long delay = getStartupDelay(); | |||
if (delay > 0) { | |||
log.debug(name + ": Delaying daemon startup for " + delay + "ms..."); | |||
if (!wait(delay, "run[startup-delay]")) { | |||
if (!canInterruptSleep()) return; | |||
} | |||
} | |||
log.debug(name + ": Daemon thread now running"); | |||
try { | |||
log.debug(name + ": Daemon thread invoking init"); | |||
init(); | |||
while (!isDone) { | |||
log.debug(name + ": Daemon thread invoking process"); | |||
try { | |||
process(); | |||
lastProcessTime = now(); | |||
} catch (Exception e) { | |||
processException(e); | |||
continue; | |||
} | |||
if (isDone) return; | |||
if (!wait(getSleepTime(), "run[post-processing]")) { | |||
if (canInterruptSleep()) continue; | |||
return; | |||
} | |||
} | |||
} catch (Exception e) { | |||
log.error(name + ": Error in daemon, exiting: " + e, e); | |||
} finally { | |||
cleanup(); | |||
try { | |||
onStop(); | |||
} catch (Exception e) { | |||
log.error(name + ": Error in onStop, exiting and ignoring error: " + e, e); | |||
} | |||
} | |||
} | |||
public void processException(Exception e) throws Exception { throw e; } | |||
protected boolean wait(long delay, String reason) { | |||
try { | |||
sleep(delay, reason); | |||
return true; | |||
} catch (RuntimeException e) { | |||
if (isDone) { | |||
log.info("sleep("+delay+") interrupted but daemon is done"); | |||
} else { | |||
log.error("sleep("+delay+") interrupted, exiting: "+e); | |||
} | |||
return false; | |||
} | |||
} | |||
protected boolean canInterruptSleep() { return false; } | |||
protected long getStartupDelay() { return 0; } | |||
protected abstract long getSleepTime(); | |||
protected abstract void process(); | |||
public boolean getIsDone() { return isDone; } | |||
public boolean getIsAlive() { | |||
try { | |||
return mainThread != null && mainThread.isAlive(); | |||
} catch (NullPointerException npe) { | |||
return false; | |||
} | |||
} | |||
private void cleanup() { | |||
mainThread = null; | |||
isDone = true; | |||
} | |||
public String getStatus() { | |||
return "isDone=" + getIsDone() | |||
+ "\nlastProcessTime=" + DFORMAT.print(lastProcessTime) | |||
+ "\nsleepTime=" + getSleepTime()+"ms"; | |||
} | |||
} |
@@ -0,0 +1,407 @@ | |||
package org.cobbzilla.util.daemon; | |||
import com.fasterxml.jackson.databind.JsonNode; | |||
import com.fasterxml.jackson.databind.node.ArrayNode; | |||
import com.fasterxml.jackson.databind.node.ObjectNode; | |||
import lombok.Getter; | |||
import lombok.Setter; | |||
import lombok.extern.slf4j.Slf4j; | |||
import org.apache.commons.lang3.RandomUtils; | |||
import org.apache.commons.lang3.SystemUtils; | |||
import org.cobbzilla.util.collection.ToStringTransformer; | |||
import org.cobbzilla.util.error.GeneralErrorHandler; | |||
import org.cobbzilla.util.io.StreamUtil; | |||
import org.cobbzilla.util.string.StringUtil; | |||
import org.slf4j.Logger; | |||
import java.io.*; | |||
import java.lang.management.ManagementFactory; | |||
import java.lang.reflect.Array; | |||
import java.math.BigDecimal; | |||
import java.math.BigInteger; | |||
import java.math.RoundingMode; | |||
import java.util.*; | |||
import java.util.concurrent.Callable; | |||
import java.util.concurrent.atomic.AtomicLong; | |||
import java.util.concurrent.atomic.AtomicReference; | |||
import java.util.function.Function; | |||
import static java.lang.Long.toHexString; | |||
import static java.util.concurrent.TimeUnit.SECONDS; | |||
import static java.util.stream.LongStream.range; | |||
import static org.apache.commons.collections.CollectionUtils.collect; | |||
import static org.apache.commons.lang3.exception.ExceptionUtils.getStackTrace; | |||
import static org.cobbzilla.util.io.FileUtil.abs; | |||
import static org.cobbzilla.util.io.FileUtil.list; | |||
import static org.cobbzilla.util.reflect.ReflectionUtil.instantiate; | |||
import static org.cobbzilla.util.string.StringUtil.truncate; | |||
import static org.cobbzilla.util.system.Sleep.sleep; | |||
import static org.cobbzilla.util.time.TimeUtil.formatDuration; | |||
/** | |||
* the Zilla doesn't mess around. | |||
*/ | |||
@Slf4j | |||
public class ZillaRuntime { | |||
public static final String CLASSPATH_PREFIX = "classpath:"; | |||
public static String getJava() { return System.getProperty("java.home") + "/bin/java"; } | |||
public static boolean terminate(Thread thread, long timeout) { | |||
if (thread == null || !thread.isAlive()) return true; | |||
thread.interrupt(); | |||
final long start = realNow(); | |||
while (thread.isAlive() && realNow() - start < timeout) { | |||
sleep(100, "terminate: waiting for thread to die: "+thread); | |||
} | |||
if (thread.isAlive()) { | |||
log.warn("terminate: thread did not die voluntarily, killing it: "+thread); | |||
thread.stop(); | |||
} | |||
return false; | |||
} | |||
public static boolean bool(Boolean b) { return b != null && b; } | |||
public static boolean bool(Boolean b, boolean val) { return b != null ? b : val; } | |||
public interface ExceptionRunnable { void handle(Exception e); } | |||
public static final ExceptionRunnable DEFAULT_EX_RUNNABLE = e -> log.error("Error: " + e); | |||
public static ExceptionRunnable exceptionRunnable (Class<? extends Throwable>[] fatalExceptionClasses) { | |||
return e -> { | |||
for (Class<? extends Throwable> c : fatalExceptionClasses) { | |||
if (c.isAssignableFrom(e.getClass())) { | |||
if (e instanceof RuntimeException) throw (RuntimeException) e; | |||
die("fatal exception: "+e); | |||
} | |||
} | |||
DEFAULT_EX_RUNNABLE.handle(e); | |||
}; | |||
} | |||
public static Thread background (Runnable r) { return background(r, DEFAULT_EX_RUNNABLE); } | |||
public static Thread background (Runnable r, ExceptionRunnable ex) { | |||
final Thread t = new Thread(() -> { | |||
try { | |||
r.run(); | |||
} catch (Exception e) { | |||
ex.handle(e); | |||
} | |||
}); | |||
t.start(); | |||
return t; | |||
} | |||
public static final Function<Integer, Long> DEFAULT_RETRY_BACKOFF = SECONDS::toMillis; | |||
public static <T> T retry (Callable<T> func, int tries) { | |||
return retry(func, tries, DEFAULT_RETRY_BACKOFF, DEFAULT_EX_RUNNABLE); | |||
} | |||
public static <T> T retry (Callable<T> func, int tries, Function<Integer, Long> backoff) { | |||
return retry(func, tries, backoff, DEFAULT_EX_RUNNABLE); | |||
} | |||
public static <T> T retry (Callable<T> func, | |||
int tries, | |||
Logger logger) { | |||
return retry(func, tries, DEFAULT_RETRY_BACKOFF, e -> logger.error("Error: "+e)); | |||
} | |||
public static <T> T retry (Callable<T> func, | |||
int tries, | |||
Function<Integer, Long> backoff, | |||
Logger logger) { | |||
return retry(func, tries, backoff, e -> logger.error("Error: "+e)); | |||
} | |||
public static <T> T retry (Callable<T> func, | |||
int tries, | |||
Function<Integer, Long> backoff, | |||
ExceptionRunnable ex) { | |||
Exception lastEx = null; | |||
try { | |||
for (int i = 0; i < tries; i++) { | |||
try { | |||
final T rVal = func.call(); | |||
log.debug("retry: successful, returning: " + rVal); | |||
return rVal; | |||
} catch (Exception e) { | |||
lastEx = e; | |||
log.debug("retry: failed (attempt " + (i + 1) + "/" + tries + "): " + e); | |||
ex.handle(e); | |||
sleep(backoff.apply(i), "waiting to retry " + func.getClass().getSimpleName()); | |||
} | |||
} | |||
} catch (Exception e) { | |||
return die("retry: fatal exception, exiting: "+e); | |||
} | |||
return die("retry: max tries ("+tries+") exceeded. last exception: "+lastEx); | |||
} | |||
public static Thread daemon (Runnable r) { | |||
final Thread t = new Thread(r); | |||
t.setDaemon(true); | |||
t.start(); | |||
return t; | |||
} | |||
@Getter @Setter private static ErrorApi errorApi; | |||
public static <T> T die(String message) { return _throw(new IllegalStateException(message, null)); } | |||
public static <T> T die(String message, Exception e) { return _throw(new IllegalStateException(message, e)); } | |||
public static <T> T die(Exception e) { return _throw(new IllegalStateException("(no message)", e)); } | |||
public static <T> T notSupported() { return notSupported("not supported"); } | |||
public static <T> T notSupported(String message) { return _throw(new UnsupportedOperationException(message)); } | |||
private static <T> T _throw (RuntimeException e) { | |||
final String message = e.getMessage(); | |||
final Throwable cause = e.getCause(); | |||
if (errorApi != null) { | |||
if (cause != null && cause instanceof Exception) errorApi.report(message, (Exception) cause); | |||
else errorApi.report(e); | |||
} | |||
if (cause != null) log.error("Inner exception: " + message, cause); | |||
throw e; | |||
} | |||
public static String errorString(Exception e) { return errorString(e, 1000); } | |||
public static String errorString(Exception e, int maxlen) { | |||
return truncate(e.getClass().getName()+": "+e.getMessage()+"\n"+ getStackTrace(e), maxlen); | |||
} | |||
public static boolean empty(String s) { return s == null || s.length() == 0; } | |||
/** | |||
* Determines if the parameter is "empty", by criteria described in @return | |||
* Tries to avoid throwing exceptions, handling just about any case in a true/false fashion. | |||
* | |||
* @param o anything | |||
* @return true if and only o is: | |||
* * null | |||
* * a collection, map, iterable or array that contains no objects | |||
* * a file that does not exist or whose size is zero | |||
* * a directory that does not exist or that contains no files | |||
* * any object whose .toString method returns a zero-length string | |||
*/ | |||
public static boolean empty(Object o) { | |||
if (o == null) return true; | |||
if (o instanceof String) return o.toString().length() == 0; | |||
if (o instanceof Collection) return ((Collection)o).isEmpty(); | |||
if (o instanceof Map) return ((Map)o).isEmpty(); | |||
if (o instanceof JsonNode) { | |||
if (o instanceof ObjectNode) return ((ObjectNode) o).size() == 0; | |||
if (o instanceof ArrayNode) return ((ArrayNode) o).size() == 0; | |||
final String json = ((JsonNode) o).textValue(); | |||
return json == null || json.length() == 0; | |||
} | |||
if (o instanceof Iterable) return !((Iterable)o).iterator().hasNext(); | |||
if (o instanceof File) { | |||
final File f = (File) o; | |||
return !f.exists() || f.length() == 0 || (f.isDirectory() && list(f).length == 0); | |||
} | |||
if (o.getClass().isArray()) { | |||
if (o.getClass().getComponentType().isPrimitive()) { | |||
switch (o.getClass().getComponentType().getName()) { | |||
case "boolean": return ((boolean[]) o).length == 0; | |||
case "byte": return ((byte[]) o).length == 0; | |||
case "short": return ((short[]) o).length == 0; | |||
case "char": return ((char[]) o).length == 0; | |||
case "int": return ((int[]) o).length == 0; | |||
case "long": return ((long[]) o).length == 0; | |||
case "float": return ((float[]) o).length == 0; | |||
case "double": return ((double[]) o).length == 0; | |||
default: return o.toString().length() == 0; | |||
} | |||
} else { | |||
return ((Object[]) o).length == 0; | |||
} | |||
} | |||
return o.toString().length() == 0; | |||
} | |||
public static <T> T first (Iterable<T> o) { return (T) ((Iterable) o).iterator().next(); } | |||
public static <K, T> T first (Map<K, T> o) { return first(o.values()); } | |||
public static <T> T first (T[] o) { return o[0]; } | |||
public static <T> T sorted(T o) { | |||
if (empty(o)) return o; | |||
if (o.getClass().isArray()) { | |||
final Object[] copy = (Object[]) Array.newInstance(o.getClass().getComponentType(), | |||
((Object[])o).length); | |||
System.arraycopy(o, 0, copy, 0 , copy.length); | |||
Arrays.sort(copy); | |||
return (T) copy; | |||
} | |||
if (o instanceof Collection) { | |||
final List list = new ArrayList((Collection) o); | |||
Collections.sort(list); | |||
final Collection copy = (Collection) instantiate(o.getClass()); | |||
copy.addAll(list); | |||
return (T) copy; | |||
} | |||
return die("sorted: cannot sort a "+o.getClass().getSimpleName()+", can only sort arrays and Collections"); | |||
} | |||
public static <T> List sortedList(T o) { | |||
if (o == null) return null; | |||
if (o instanceof Collection) return new ArrayList((Collection) o); | |||
if (o instanceof Object[]) return Arrays.asList((Object[]) o); | |||
return die("sortedList: cannot sort a "+o.getClass().getSimpleName()+", can only sort arrays and Collections"); | |||
} | |||
public static Boolean safeBoolean(String val, Boolean ifNull) { return empty(val) ? ifNull : Boolean.valueOf(val); } | |||
public static Boolean safeBoolean(String val) { return safeBoolean(val, null); } | |||
public static Integer safeInt(String val, Integer ifNull) { return empty(val) ? ifNull : Integer.valueOf(val); } | |||
public static Integer safeInt(String val) { return safeInt(val, null); } | |||
public static Long safeLong(String val, Long ifNull) { return empty(val) ? ifNull : Long.valueOf(val); } | |||
public static Long safeLong(String val) { return safeLong(val, null); } | |||
public static BigInteger bigint(long val) { return new BigInteger(String.valueOf(val)); } | |||
public static BigInteger bigint(int val) { return new BigInteger(String.valueOf(val)); } | |||
public static BigInteger bigint(byte val) { return new BigInteger(String.valueOf(val)); } | |||
public static BigDecimal big(String val) { return new BigDecimal(val); } | |||
public static BigDecimal big(double val) { return new BigDecimal(String.valueOf(val)); } | |||
public static BigDecimal big(float val) { return new BigDecimal(String.valueOf(val)); } | |||
public static BigDecimal big(long val) { return new BigDecimal(String.valueOf(val)); } | |||
public static BigDecimal big(int val) { return new BigDecimal(String.valueOf(val)); } | |||
public static BigDecimal big(byte val) { return new BigDecimal(String.valueOf(val)); } | |||
public static int percent(int value, double pct) { return percent(value, pct, RoundingMode.HALF_UP); } | |||
public static int percent(int value, double pct, RoundingMode rounding) { | |||
return big(value).multiply(big(pct)).setScale(0, rounding).intValue(); | |||
} | |||
public static int percent(BigDecimal value, BigDecimal pct) { | |||
return percent(value.intValue(), pct.multiply(big(0.01)).doubleValue(), RoundingMode.HALF_UP); | |||
} | |||
public static String uuid() { return UUID.randomUUID().toString(); } | |||
@Getter @Setter private static volatile long systemTimeOffset = 0; | |||
public static long now() { return System.currentTimeMillis() + systemTimeOffset; } | |||
public static String hexnow() { return toHexString(now()); } | |||
public static String hexnow(long now) { return toHexString(now); } | |||
public static long realNow() { return System.currentTimeMillis(); } | |||
public static <T> T pickRandom(T[] things) { return things[RandomUtils.nextInt(0, things.length)]; } | |||
public static <T> T pickRandom(List<T> things) { return things.get(RandomUtils.nextInt(0, things.size())); } | |||
public static BufferedReader stdin() { return new BufferedReader(new InputStreamReader(System.in)); } | |||
public static BufferedWriter stdout() { return new BufferedWriter(new OutputStreamWriter(System.out)); } | |||
public static String readStdin() { return StreamUtil.toStringOrDie(System.in); } | |||
public static int envInt (String name, int defaultValue) { return envInt(name, defaultValue, null, null); } | |||
public static int envInt (String name, int defaultValue, Integer maxValue) { return envInt(name, defaultValue, null, maxValue); } | |||
public static int envInt (String name, int defaultValue, Integer minValue, Integer maxValue) { | |||
return envInt(name, defaultValue, minValue, maxValue, System.getenv()); | |||
} | |||
public static int envInt (String name, int defaultValue, Integer minValue, Integer maxValue, Map<String, String> env) { | |||
final String s = env.get(name); | |||
if (!empty(s)) { | |||
try { | |||
final int val = Integer.parseInt(s); | |||
if (val <= 0) { | |||
log.warn("envInt: invalid value("+name+"): " +val+", returning "+defaultValue); | |||
return defaultValue; | |||
} else if (maxValue != null && val > maxValue) { | |||
log.warn("envInt: value too large ("+name+"): " +val+ ", returning " + maxValue); | |||
return maxValue; | |||
} else if (minValue != null && val < minValue) { | |||
log.warn("envInt: value too small ("+name+"): " +val+ ", returning " + minValue); | |||
return minValue; | |||
} | |||
return val; | |||
} catch (Exception e) { | |||
log.warn("envInt: invalid value("+name+"): " +s+", returning "+defaultValue); | |||
return defaultValue; | |||
} | |||
} | |||
return defaultValue; | |||
} | |||
public static int processorCount() { return Runtime.getRuntime().availableProcessors(); } | |||
public static String hashOf (Object... things) { | |||
final StringBuilder b = new StringBuilder(); | |||
for (Object thing : things) { | |||
b.append(thing == null ? "null" : thing).append(":::"); | |||
} | |||
return b.toString(); | |||
} | |||
public static Collection<String> stringRange(Number start, Number end) { | |||
return collect(range(start.longValue(), end.longValue()).boxed().iterator(), ToStringTransformer.instance); | |||
} | |||
public static String zcat() { return SystemUtils.IS_OS_MAC ? "gzcat" : "zcat"; } | |||
public static String zcat(File f) { return (SystemUtils.IS_OS_MAC ? "gzcat" : "zcat") + " " + abs(f); } | |||
public static final String[] OMIT_DEBUG_OPTIONS = {"-Xdebug", "-agentlib", "-Xrunjdwp"}; | |||
public static boolean isDebugOption (String arg) { | |||
for (String opt : OMIT_DEBUG_OPTIONS) if (arg.startsWith(opt)) return true; | |||
return false; | |||
} | |||
public static String javaOptions() { return javaOptions(true); } | |||
public static String javaOptions(boolean excludeDebugOptions) { | |||
final List<String> opts = new ArrayList<>(); | |||
for (String arg : ManagementFactory.getRuntimeMXBean().getInputArguments()) { | |||
if (excludeDebugOptions && isDebugOption(arg)) continue; | |||
opts.add(arg); | |||
} | |||
return StringUtil.toString(opts, " "); | |||
} | |||
public static <T> T dcl (AtomicReference<T> target, Callable<T> init) { | |||
return dcl(target, init, null); | |||
} | |||
@SuppressWarnings("SynchronizationOnLocalVariableOrMethodParameter") | |||
public static <T> T dcl (AtomicReference<T> target, Callable<T> init, GeneralErrorHandler error) { | |||
if (target.get() == null) { | |||
synchronized (target) { | |||
if (target.get() == null) { | |||
try { | |||
target.set(init.call()); | |||
} catch (Exception e) { | |||
if (error != null) { | |||
error.handleError("dcl: error initializing: "+e, e); | |||
} else { | |||
log.warn("dcl: "+e); | |||
return null; | |||
} | |||
} | |||
} | |||
} | |||
} | |||
return target.get(); | |||
} | |||
public static String stacktrace() { return getStackTrace(new Exception()); } | |||
private static final AtomicLong selfDestructInitiated = new AtomicLong(-1); | |||
public static void setSelfDestruct (long t) { setSelfDestruct(t, 0); } | |||
public static void setSelfDestruct (long t, int status) { | |||
synchronized (selfDestructInitiated) { | |||
final long dieTime = selfDestructInitiated.get(); | |||
if (dieTime == -1) { | |||
selfDestructInitiated.set(now()+t); | |||
daemon(() -> { sleep(t); System.exit(status); }); | |||
} else { | |||
log.warn("setSelfDestruct: already set: self-destructing in "+formatDuration(dieTime-now())); | |||
} | |||
} | |||
} | |||
} |
@@ -0,0 +1,33 @@ | |||
package org.cobbzilla.util.dns; | |||
import java.util.List; | |||
public interface DnsManager { | |||
/** | |||
* List matching DNS records | |||
* @param match The DnsRecordMatch query | |||
* @return a List of DnsRecords that match | |||
*/ | |||
List<DnsRecord> list(DnsRecordMatch match) throws Exception; | |||
/** | |||
* Write a DNS record | |||
* @param record a DNS record to create or update | |||
* @return true if the record was written, false if it was not (it may have been unchanged) | |||
*/ | |||
boolean write(DnsRecord record) throws Exception; | |||
/** | |||
* Publish changes to DNS records. Must be called after calling write if you want to see the changes publicly. | |||
*/ | |||
void publish() throws Exception; | |||
/** | |||
* Delete matching DNS records | |||
* @param match The DnsRecordMatch query | |||
* @return A count of the number of records deleted, or -1 if this DnsManager does not support returning counts | |||
*/ | |||
int remove(DnsRecordMatch match) throws Exception; | |||
} |
@@ -0,0 +1,131 @@ | |||
package org.cobbzilla.util.dns; | |||
import com.fasterxml.jackson.annotation.JsonIgnore; | |||
import lombok.Getter; | |||
import lombok.NoArgsConstructor; | |||
import lombok.Setter; | |||
import lombok.ToString; | |||
import lombok.experimental.Accessors; | |||
import org.cobbzilla.util.string.StringUtil; | |||
import java.util.Comparator; | |||
import java.util.HashMap; | |||
import java.util.Map; | |||
import java.util.concurrent.TimeUnit; | |||
import static java.util.Comparator.comparing; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.empty; | |||
import static org.cobbzilla.util.dns.DnsType.A; | |||
import static org.cobbzilla.util.dns.DnsType.SOA; | |||
@NoArgsConstructor @Accessors(chain=true) @ToString(callSuper=true) | |||
public class DnsRecord extends DnsRecordBase { | |||
public static final int DEFAULT_TTL = (int) TimeUnit.HOURS.toSeconds(1); | |||
public static final String OPT_MX_RANK = "rank"; | |||
public static final String OPT_NS_NAME = "ns"; | |||
public static final String OPT_SOA_MNAME = "mname"; | |||
public static final String OPT_SOA_RNAME = "rname"; | |||
public static final String OPT_SOA_SERIAL = "serial"; | |||
public static final String OPT_SOA_REFRESH = "refresh"; | |||
public static final String OPT_SOA_RETRY = "retry"; | |||
public static final String OPT_SOA_EXPIRE = "expire"; | |||
public static final String OPT_SOA_MINIMUM = "minimum"; | |||
public static final String[] MX_REQUIRED_OPTIONS = {OPT_MX_RANK}; | |||
public static final String[] NS_REQUIRED_OPTIONS = {OPT_NS_NAME}; | |||
public static final String[] SOA_REQUIRED_OPTIONS = { | |||
OPT_SOA_MNAME, OPT_SOA_RNAME, OPT_SOA_SERIAL, OPT_SOA_REFRESH, OPT_SOA_EXPIRE, OPT_SOA_RETRY | |||
}; | |||
public static final Comparator<? super DnsRecord> DUPE_COMPARATOR = comparing(DnsRecord::dnsUniq); | |||
@Getter @Setter private int ttl = DEFAULT_TTL; | |||
@Getter @Setter private Map<String, String> options; | |||
public boolean hasOptions () { return options != null && !options.isEmpty(); } | |||
public DnsRecord (DnsType type, String fqdn, String value, int ttl) { | |||
setType(type); | |||
setFqdn(fqdn); | |||
setValue(value); | |||
setTtl(ttl); | |||
} | |||
public static DnsRecord A(String host, String ip) { | |||
return (DnsRecord) new DnsRecord().setType(A).setFqdn(host).setValue(ip); | |||
} | |||
public DnsRecord setOption(String optName, String value) { | |||
if (options == null) options = new HashMap<>(); | |||
options.put(optName, value); | |||
return this; | |||
} | |||
public String getOption(String optName) { return options == null ? null : options.get(optName); } | |||
public int getIntOption(String optName, int defaultValue) { | |||
try { | |||
return Integer.parseInt(options.get(optName)); | |||
} catch (Exception ignored) { | |||
return defaultValue; | |||
} | |||
} | |||
@JsonIgnore public String[] getRequiredOptions () { | |||
switch (getType()) { | |||
case MX: return MX_REQUIRED_OPTIONS; | |||
case NS: return NS_REQUIRED_OPTIONS; | |||
case SOA: return SOA_REQUIRED_OPTIONS; | |||
default: return StringUtil.EMPTY_ARRAY; | |||
} | |||
} | |||
@JsonIgnore public boolean hasAllRequiredOptions () { | |||
for (String opt : getRequiredOptions()) { | |||
if (options == null || !options.containsKey(opt)) return false; | |||
} | |||
return true; | |||
} | |||
public String getOptions_string(String sep) { | |||
final StringBuilder b = new StringBuilder(); | |||
if (options != null) { | |||
for (Map.Entry<String, String> e : options.entrySet()) { | |||
if (b.length() > 0) b.append(sep); | |||
if (empty(e.getValue())) { | |||
b.append(e.getKey()).append("=true"); | |||
} else { | |||
b.append(e.getKey()).append("=").append(e.getValue()); | |||
} | |||
} | |||
} | |||
return b.toString(); | |||
} | |||
public DnsRecord setOptions_string(String arg) { | |||
if (options == null) options = new HashMap<>(); | |||
if (empty(arg)) return this; | |||
for (String kvPair : arg.split(",")) { | |||
int eqPos = kvPair.indexOf("="); | |||
if (eqPos == kvPair.length()) throw new IllegalArgumentException("Option cannot end in '=' character"); | |||
if (eqPos == -1) { | |||
options.put(kvPair.trim(), "true"); | |||
} else { | |||
options.put(kvPair.substring(0, eqPos).trim(), kvPair.substring(eqPos+1).trim()); | |||
} | |||
} | |||
return this; | |||
} | |||
public String dnsUniq() { return type == SOA ? SOA+":"+fqdn : dnsFormat(",", "|"); } | |||
public String dnsFormat() { | |||
return dnsFormat(",", "|"); | |||
} | |||
public String dnsFormat(String fieldSep, String optionsSep) { | |||
return getType().name().toUpperCase()+fieldSep+getFqdn()+fieldSep+getValue()+fieldSep+getTtl()+fieldSep+(!hasOptions() ? "" : getOptions_string(optionsSep)); | |||
} | |||
} |
@@ -0,0 +1,39 @@ | |||
package org.cobbzilla.util.dns; | |||
import com.fasterxml.jackson.annotation.JsonIgnore; | |||
import lombok.*; | |||
import lombok.experimental.Accessors; | |||
import lombok.extern.slf4j.Slf4j; | |||
import static org.apache.commons.lang3.StringUtils.chop; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.die; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.empty; | |||
@NoArgsConstructor @AllArgsConstructor @Accessors(chain=true) | |||
@ToString @EqualsAndHashCode @Slf4j | |||
public class DnsRecordBase { | |||
@Getter @Setter protected String fqdn; | |||
public boolean hasFqdn() { return !empty(fqdn); } | |||
@JsonIgnore public String getNormalFqdn() { return empty(fqdn) ? fqdn : fqdn.endsWith(".") ? chop(fqdn) : fqdn; } | |||
public String getHost (String suffix) { | |||
if (!hasFqdn()) return die("getHost: fqdn not set"); | |||
if (getFqdn().endsWith(suffix)) return getFqdn().substring(0, getFqdn().length() - suffix.length() - 1); | |||
log.warn("getHost: suffix mismatch: fqdn "+getFqdn()+" does not end with "+suffix); | |||
return getFqdn(); | |||
} | |||
@Getter @Setter protected DnsType type; | |||
public boolean hasType () { return type != null; } | |||
@Getter @Setter protected String value; | |||
public boolean hasValue () { return !empty(value); } | |||
@JsonIgnore | |||
public DnsRecordMatch getMatcher() { | |||
return (DnsRecordMatch) new DnsRecordMatch().setFqdn(fqdn).setType(type).setValue(value); | |||
} | |||
} |
@@ -0,0 +1,47 @@ | |||
package org.cobbzilla.util.dns; | |||
import lombok.Getter; | |||
import lombok.NoArgsConstructor; | |||
import lombok.Setter; | |||
import lombok.ToString; | |||
import lombok.experimental.Accessors; | |||
import java.util.Set; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.empty; | |||
@NoArgsConstructor @Accessors(chain=true) @ToString(of={"pattern", "fqdns", "subdomain"}, callSuper=true) | |||
public class DnsRecordMatch extends DnsRecordBase { | |||
@Getter @Setter private String pattern; | |||
@Getter @Setter private Set<String> fqdns; | |||
public boolean hasFqdns () { return fqdns != null && !fqdns.isEmpty(); } | |||
public DnsRecordMatch(Set<String> fqdns) { this.fqdns = fqdns; } | |||
public boolean hasPattern() { return !empty(pattern); } | |||
@Getter @Setter private String subdomain; | |||
public boolean hasSubdomain() { return !empty(subdomain); } | |||
public DnsRecordMatch(DnsRecordBase record) { | |||
super(record.getFqdn(), record.getType(), record.getValue()); | |||
} | |||
public DnsRecordMatch(DnsType type, String fqdn) { | |||
setType(type); | |||
setFqdn(fqdn); | |||
} | |||
public DnsRecordMatch(String fqdn) { this(null, fqdn); } | |||
public boolean matches (DnsRecord record) { | |||
if (hasType() && !getType().equals(record.getType())) return false; | |||
if (hasFqdn() && !getFqdn().equals(record.getFqdn())) return false; | |||
if (hasSubdomain() && record.hasFqdn() && !record.getFqdn().endsWith(getSubdomain())) return false; | |||
if (hasPattern() && record.hasFqdn() && !record.getFqdn().matches(getPattern())) return false; | |||
if (hasFqdns() && record.hasFqdn() && !getFqdns().contains(record.getFqdn())) return false; | |||
return true; | |||
} | |||
} |
@@ -0,0 +1,11 @@ | |||
package org.cobbzilla.util.dns; | |||
import com.fasterxml.jackson.annotation.JsonCreator; | |||
public enum DnsServerType { | |||
dyn, namecheap, djbdns, bind; | |||
@JsonCreator public static DnsServerType create (String v) { return valueOf(v.toLowerCase()); } | |||
} |
@@ -0,0 +1,17 @@ | |||
package org.cobbzilla.util.dns; | |||
import com.fasterxml.jackson.annotation.JsonCreator; | |||
public enum DnsType { | |||
A, AAAA, CNAME, MX, NS, TXT, SOA, PTR, // very common record types | |||
RP, LOC, SIG, SPF, SRV, TSIG, TKEY, CERT, // sometimes used | |||
KEY, DS, DNSKEY, NSEC, NSEC3, NSEC3PARAM, RRSIG, IPSECKEY, DLV, // DNSSEC and other security-related types | |||
DNAME, DLCID, HIP, NAPTR, SSHFP, TLSA, // infrequently used | |||
IXFR, AXFR, OPT; // pseudo-record types | |||
public static final DnsType[] A_TYPES = new DnsType[] {A, AAAA}; | |||
@JsonCreator public static DnsType fromString(String value) { return valueOf(value.toUpperCase()); } | |||
} |
@@ -0,0 +1,13 @@ | |||
package org.cobbzilla.util.error; | |||
import org.cobbzilla.util.string.StringUtil; | |||
import java.util.List; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.die; | |||
public interface GeneralErrorHandler { | |||
default <T> T handleError(String message) { return die(message); } | |||
default <T> T handleError(String message, Exception e) { return die(message, e); } | |||
default <T> T handleError(List<String> validationErrors) { return die("validation errors: "+ StringUtil.toString(validationErrors)); } | |||
} |
@@ -0,0 +1,11 @@ | |||
package org.cobbzilla.util.error; | |||
import java.util.concurrent.atomic.AtomicReference; | |||
public class GeneralErrorHandlerBase implements GeneralErrorHandler { | |||
public static final GeneralErrorHandlerBase instance = new GeneralErrorHandlerBase(); | |||
public static AtomicReference<GeneralErrorHandler> defaultErrorHandler() { | |||
return new AtomicReference<>(GeneralErrorHandlerBase.instance); | |||
} | |||
} |
@@ -0,0 +1,12 @@ | |||
package org.cobbzilla.util.error; | |||
import java.util.concurrent.atomic.AtomicReference; | |||
public interface HasGeneralErrorHandler { | |||
AtomicReference<GeneralErrorHandler> getErrorHandler (); | |||
default <T> T error(String message) { return getErrorHandler().get().handleError(message); } | |||
default <T> T error(String message, Exception e) { return getErrorHandler().get().handleError(message, e); } | |||
} |
@@ -0,0 +1,11 @@ | |||
package org.cobbzilla.util.graphics; | |||
import com.fasterxml.jackson.annotation.JsonCreator; | |||
public enum ColorMode { | |||
rgb, ansi; | |||
@JsonCreator public static ColorMode fromString (String val) { return valueOf(val.toLowerCase()); } | |||
} |
@@ -0,0 +1,68 @@ | |||
package org.cobbzilla.util.graphics; | |||
import org.apache.commons.lang3.RandomUtils; | |||
import java.awt.*; | |||
import java.util.Collection; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.die; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.empty; | |||
import static org.cobbzilla.util.string.StringUtil.getHexValue; | |||
public class ColorUtil { | |||
public static final String ANSI_RESET = "\\033[0m"; | |||
public static int parseRgb(String colorString) { return parseRgb(colorString, null); } | |||
public static int parseRgb(String colorString, Integer defaultRgb) { | |||
try { | |||
if (empty(colorString)) return defaultRgb; | |||
if (colorString.startsWith("0x")) return Integer.parseInt(colorString.substring(2), 16); | |||
if (colorString.startsWith("#")) return Integer.parseInt(colorString.substring(1), 16); | |||
return Integer.parseInt(colorString, 16); | |||
} catch (Exception e) { | |||
if (defaultRgb == null) { | |||
return die("parseRgb: '' was unparseable and no default value provided: "+e.getClass().getSimpleName()+": "+e.getMessage(), e); | |||
} | |||
return defaultRgb; | |||
} | |||
} | |||
public static int rgb2ansi(int color) { return rgb2ansi(new Color(color)); } | |||
public static int rgb2ansi(Color c) { | |||
return 16 + (36 * (c.getRed() / 51)) + (6 * (c.getGreen() / 51)) + c.getBlue() / 51; | |||
} | |||
public static String rgb2hex(int color) { | |||
final Color c = new Color(color); | |||
return getHexValue((byte) c.getRed()) | |||
+ getHexValue((byte) c.getGreen()) | |||
+ getHexValue((byte) c.getBlue()); | |||
} | |||
public static int randomColor() { return randomColor(null, ColorMode.rgb); } | |||
public static int randomColor(ColorMode mode) { return randomColor(null, mode); } | |||
public static int randomColor(Collection<Integer> usedColors) { return randomColor(usedColors, ColorMode.rgb); } | |||
public static int randomColor(Collection<Integer> usedColors, ColorMode mode) { | |||
int val; | |||
do { | |||
val = RandomUtils.nextInt(0x000000, 0xffffff); | |||
} while (usedColors != null && usedColors.contains(val)); | |||
return mode == ColorMode.rgb ? val : rgb2ansi(val); | |||
} | |||
public static String ansiColor(int fg) { return ansiColor(fg, null); } | |||
public static String ansiColor(int fg, Integer bg) { | |||
final StringBuilder b = new StringBuilder(); | |||
b.append("\\033[38;5;") | |||
.append(rgb2ansi(fg)) | |||
.append(bg == null ? "" : ";48;5;"+rgb2ansi(bg)) | |||
.append("m"); | |||
return b.toString(); | |||
} | |||
} |
@@ -0,0 +1,23 @@ | |||
package org.cobbzilla.util.graphics; | |||
import lombok.Getter; | |||
import lombok.Setter; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.die; | |||
public class ImageTransformConfig { | |||
@Getter @Setter private int height; | |||
@Getter @Setter private int width; | |||
public ImageTransformConfig(String config) { | |||
final int xpos = config.indexOf('x'); | |||
try { | |||
width = Integer.parseInt(config.substring(xpos + 1)); | |||
height = Integer.parseInt(config.substring(0, xpos)); | |||
} catch (Exception e) { | |||
die("invalid config (expected WxH): " + config + ": " + e, e); | |||
} | |||
} | |||
} |
@@ -0,0 +1,43 @@ | |||
package org.cobbzilla.util.handlebars; | |||
import lombok.Getter; | |||
import lombok.NoArgsConstructor; | |||
import lombok.Setter; | |||
import lombok.experimental.Accessors; | |||
import org.apache.commons.codec.binary.Base64InputStream; | |||
import org.cobbzilla.util.io.FileUtil; | |||
import java.io.ByteArrayInputStream; | |||
import java.io.File; | |||
import java.io.IOException; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.empty; | |||
import static org.cobbzilla.util.io.FileUtil.temp; | |||
@NoArgsConstructor @Accessors(chain=true) | |||
public class Base64ImageInsertion extends ImageInsertion { | |||
public static final Base64ImageInsertion[] NO_IMAGE_INSERTIONS = new Base64ImageInsertion[0]; | |||
public Base64ImageInsertion(Base64ImageInsertion other) { super(other); } | |||
public Base64ImageInsertion(String spec) { super(spec); } | |||
@Getter @Setter private String image; // base64-encoded image data | |||
@Override public File getImageFile() throws IOException { | |||
if (empty(getImage())) return null; | |||
final File temp = temp("."+getFormat()); | |||
final Base64InputStream stream = new Base64InputStream(new ByteArrayInputStream(image.getBytes())); | |||
FileUtil.toFile(temp, stream); | |||
return temp; | |||
} | |||
@Override protected void setField(String key, String value) { | |||
switch (key) { | |||
case "image": this.image = value; break; | |||
default: super.setField(key, value); | |||
} | |||
} | |||
} |
@@ -0,0 +1,7 @@ | |||
package org.cobbzilla.util.handlebars; | |||
public interface ContextMessageSender { | |||
void send(String recipient, String subject, String message, String contentType); | |||
} |
@@ -0,0 +1,34 @@ | |||
package org.cobbzilla.util.handlebars; | |||
import lombok.Getter; | |||
import lombok.NoArgsConstructor; | |||
import lombok.Setter; | |||
import lombok.experimental.Accessors; | |||
import java.io.File; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.notSupported; | |||
@NoArgsConstructor @Accessors(chain=true) | |||
public class ESignInsertion extends ImageInsertion { | |||
public static final ESignInsertion[] NO_ESIGN_INSERTIONS = new ESignInsertion[0]; | |||
@Getter @Setter private String role = null; | |||
public ESignInsertion(ESignInsertion other) { super(other); } | |||
public ESignInsertion(String spec) { super(spec); } | |||
@Override protected void setField(String key, String value) { | |||
switch (key) { | |||
case "role": role = value; break; | |||
default: super.setField(key, value); | |||
} | |||
} | |||
@Override public File getImageFile() { | |||
return notSupported("getImageFile not supported for " + this.getClass().getName()); | |||
} | |||
} |
@@ -0,0 +1,940 @@ | |||
package org.cobbzilla.util.handlebars; | |||
import com.fasterxml.jackson.core.io.JsonStringEncoder; | |||
import com.fasterxml.jackson.databind.JsonNode; | |||
import com.fasterxml.jackson.databind.node.ArrayNode; | |||
import com.github.jknack.handlebars.Handlebars; | |||
import com.github.jknack.handlebars.HandlebarsException; | |||
import com.github.jknack.handlebars.Helper; | |||
import com.github.jknack.handlebars.Options; | |||
import com.github.jknack.handlebars.io.AbstractTemplateLoader; | |||
import com.github.jknack.handlebars.io.StringTemplateSource; | |||
import com.github.jknack.handlebars.io.TemplateSource; | |||
import lombok.AllArgsConstructor; | |||
import lombok.Cleanup; | |||
import lombok.Getter; | |||
import lombok.Setter; | |||
import lombok.extern.slf4j.Slf4j; | |||
import org.apache.commons.collections.iterators.ArrayIterator; | |||
import org.apache.commons.lang3.RandomStringUtils; | |||
import org.apache.commons.lang3.StringUtils; | |||
import org.apache.pdfbox.io.IOUtils; | |||
import org.cobbzilla.util.collection.SingletonList; | |||
import org.cobbzilla.util.http.HttpContentTypes; | |||
import org.cobbzilla.util.io.FileResolver; | |||
import org.cobbzilla.util.io.FileUtil; | |||
import org.cobbzilla.util.io.PathListFileResolver; | |||
import org.cobbzilla.util.javascript.JsEngineFactory; | |||
import org.cobbzilla.util.reflect.ReflectionUtil; | |||
import org.cobbzilla.util.string.LocaleUtil; | |||
import org.cobbzilla.util.string.StringUtil; | |||
import org.cobbzilla.util.time.JavaTimezone; | |||
import org.cobbzilla.util.time.TimeUtil; | |||
import org.cobbzilla.util.time.UnicodeTimezone; | |||
import org.joda.time.DateTime; | |||
import org.joda.time.DateTimeZone; | |||
import org.joda.time.Period; | |||
import org.joda.time.format.DateTimeFormat; | |||
import org.joda.time.format.DateTimeFormatter; | |||
import java.io.File; | |||
import java.io.FileNotFoundException; | |||
import java.io.IOException; | |||
import java.io.StringWriter; | |||
import java.lang.reflect.Method; | |||
import java.math.BigDecimal; | |||
import java.math.MathContext; | |||
import java.util.*; | |||
import java.util.concurrent.ExecutorService; | |||
import java.util.concurrent.atomic.AtomicReference; | |||
import java.util.regex.Pattern; | |||
import static java.util.regex.Pattern.quote; | |||
import static org.cobbzilla.util.collection.ArrayUtil.arrayToString; | |||
import static org.cobbzilla.util.daemon.DaemonThreadFactory.fixedPool; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.*; | |||
import static org.cobbzilla.util.io.FileUtil.abs; | |||
import static org.cobbzilla.util.io.FileUtil.touch; | |||
import static org.cobbzilla.util.io.StreamUtil.loadResourceAsStream; | |||
import static org.cobbzilla.util.io.StreamUtil.stream2string; | |||
import static org.cobbzilla.util.json.JsonUtil.getJsonStringEncoder; | |||
import static org.cobbzilla.util.json.JsonUtil.json; | |||
import static org.cobbzilla.util.security.ShaUtil.sha256_hex; | |||
import static org.cobbzilla.util.string.Base64.encodeBytes; | |||
import static org.cobbzilla.util.string.Base64.encodeFromFile; | |||
import static org.cobbzilla.util.string.StringUtil.*; | |||
import static org.cobbzilla.util.system.CommandShell.*; | |||
@AllArgsConstructor @Slf4j | |||
public class HandlebarsUtil extends AbstractTemplateLoader { | |||
public static final char HB_START_CHAR = '{'; | |||
public static final char HB_END_CHAR = '}'; | |||
public static final String HB_START = StringUtils.repeat(HB_START_CHAR, 2); | |||
public static final String HB_END = StringUtils.repeat(HB_END_CHAR, 2); | |||
public static final String HB_LSTART = StringUtils.repeat(HB_START_CHAR, 3); | |||
public static final String HB_LEND = StringUtils.repeat(HB_END_CHAR, 3); | |||
public static final String DEFAULT_FLOAT_FORMAT = "%1$,.3f"; | |||
public static final JsonStringEncoder JSON_STRING_ENCODER = new JsonStringEncoder(); | |||
private String sourceName = "unknown"; | |||
public static Map<String, Object> apply(Handlebars handlebars, Map<String, Object> map, Map<String, Object> ctx) { | |||
return apply(handlebars, map, ctx, HB_START_CHAR, HB_END_CHAR); | |||
} | |||
public static Map<String, Object> apply(Handlebars handlebars, Map<String, Object> map, Map<String, Object> ctx, char altStart, char altEnd) { | |||
if (empty(map)) return map; | |||
final Map<String, Object> merged = new LinkedHashMap<>(); | |||
final String hbStart = StringUtils.repeat(altStart, 2); | |||
final String hbEnd = StringUtils.repeat(altEnd, 2); | |||
for (Map.Entry<String, Object> entry : map.entrySet()) { | |||
final Object value = entry.getValue(); | |||
if (value instanceof String) { | |||
final String val = (String) value; | |||
if (val.contains(hbStart) && val.contains(hbEnd)) { | |||
merged.put(entry.getKey(), apply(handlebars, value.toString(), ctx, altStart, altEnd)); | |||
} else { | |||
merged.put(entry.getKey(), entry.getValue()); | |||
} | |||
} else if (value instanceof Map) { | |||
// recurse | |||
merged.put(entry.getKey(), apply(handlebars, (Map<String, Object>) value, ctx, altStart, altEnd)); | |||
} else { | |||
log.info("apply: "); | |||
merged.put(entry.getKey(), entry.getValue()); | |||
} | |||
} | |||
return merged; | |||
} | |||
public static String apply(Handlebars handlebars, String value, Map<String, Object> ctx) { | |||
return apply(handlebars, value, ctx, (char) 0, (char) 0); | |||
} | |||
public static final String DUMMY_START3 = "~~~___~~~"; | |||
public static final String DUMMY_START2 = "~~__~~"; | |||
public static final String DUMMY_END3 = "%%%\\^\\^\\^%%%"; | |||
public static final String DUMMY_END2 = "\\$\\$\\^\\^\\$\\$"; | |||
public static String apply(Handlebars handlebars, String value, Map<String, Object> ctx, char altStart, char altEnd) { | |||
if (value == null) return null; | |||
if (altStart != 0 && altEnd != 0 && (altStart != HB_START_CHAR && altEnd != HB_END_CHAR)) { | |||
final String s3 = StringUtils.repeat(altStart, 3); | |||
final String s2 = StringUtils.repeat(altStart, 2); | |||
final String e3 = StringUtils.repeat(altEnd, 3); | |||
final String e2 = StringUtils.repeat(altEnd, 2); | |||
// escape existing handlebars delimiters with dummy placeholders (we'll put them back later) | |||
value = value.replaceAll(quote(HB_LSTART), DUMMY_START3).replaceAll(HB_LEND, DUMMY_END3) | |||
.replaceAll(quote(HB_START), DUMMY_START2).replaceAll(HB_END, DUMMY_END2) | |||
// replace our custom start/end delimiters with handlebars standard ones | |||
.replaceAll(quote(s3), HB_LSTART).replaceAll(quote(e3), HB_LEND) | |||
.replaceAll(quote(s2), HB_START).replaceAll(quote(e2), HB_END); | |||
// run handlebars, then put the real handlebars stuff back (removing the dummy placeholders) | |||
value = apply(handlebars, value, ctx) | |||
.replaceAll(DUMMY_START3, HB_LSTART).replaceAll(DUMMY_END3, HB_LEND) | |||
.replaceAll(DUMMY_START2, HandlebarsUtil.HB_START).replaceAll(DUMMY_END2, HB_END); | |||
return value; | |||
} | |||
try { | |||
@Cleanup final StringWriter writer = new StringWriter(value.length()); | |||
handlebars.compile(value).apply(ctx, writer); | |||
return writer.toString(); | |||
} catch (HandlebarsException e) { | |||
final Throwable cause = e.getCause(); | |||
if (cause != null && ((cause instanceof FileNotFoundException) || (cause instanceof RequiredVariableUndefinedException))) { | |||
log.error(e.getMessage()+": \""+value+"\""); | |||
throw e; | |||
} | |||
return die("apply("+value+"): "+e, e); | |||
} catch (Exception e) { | |||
return die("apply("+value+"): "+e, e); | |||
} catch (Error e) { | |||
log.warn("apply: "+e, e); | |||
throw e; | |||
} | |||
} | |||
/** | |||
* Using reflection, we find all public getters of a thing (and if the getter returns an object, find all | |||
* of its public getters, recursively and so on). We limit our results to those getters that have corresponding | |||
* setters: methods whose sole parameter is of a compatible type with the return type of the getter. | |||
* For each such property whose value is a String, we apply handlebars using the provided context. | |||
* @param handlebars the handlebars template processor | |||
* @param thing the object to operate upon | |||
* @param ctx the context to apply | |||
* @param <T> the return type | |||
* @return the thing, possibly with String-valued properties having been modified | |||
*/ | |||
public static <T> T applyReflectively(Handlebars handlebars, T thing, Map<String, Object> ctx) { | |||
return applyReflectively(handlebars, thing, ctx, HB_START_CHAR, HB_END_CHAR); | |||
} | |||
public static <T> T applyReflectively(Handlebars handlebars, T thing, Map<String, Object> ctx, char altStart, char altEnd) { | |||
for (Method getterCandidate : thing.getClass().getMethods()) { | |||
if (!getterCandidate.getName().startsWith("get")) continue; | |||
if (!canApplyReflectively(getterCandidate.getReturnType())) continue; | |||
final String setterName = ReflectionUtil.setterForGetter(getterCandidate.getName()); | |||
for (Method setterCandidate : thing.getClass().getMethods()) { | |||
if (!setterCandidate.getName().equals(setterName) | |||
|| setterCandidate.getParameterTypes().length != 1 | |||
|| !setterCandidate.getParameterTypes()[0].isAssignableFrom(getterCandidate.getReturnType())) { | |||
continue; | |||
} | |||
try { | |||
final Object value = getterCandidate.invoke(thing, (Object[]) null); | |||
if (value == null) break; | |||
if (value instanceof String) { | |||
if (value.toString().contains("" + altStart + altStart)) { | |||
setterCandidate.invoke(thing, apply(handlebars, (String) value, ctx, altStart, altEnd)); | |||
} | |||
} else if (value instanceof JsonNode) { | |||
setterCandidate.invoke(thing, json(apply(handlebars, json(value), ctx, altStart, altEnd), JsonNode.class)); | |||
} else if (value instanceof Map) { | |||
setterCandidate.invoke(thing, apply(handlebars, (Map<String, Object>) value, ctx, altStart, altEnd)); | |||
// } else if (Object[].class.isAssignableFrom(value.getClass())) { | |||
// final Object[] array = (Object[]) value; | |||
// final Object[] rendered = new Object[array.length]; | |||
// for (int i=0; i<array.length; i++) { | |||
// rendered[i] = applyReflectively(handlebars, array[i], ctx, altStart, altEnd); | |||
// } | |||
// try { | |||
// setterCandidate.invoke(thing, rendered); | |||
// } catch (Exception e) { | |||
// die(e); | |||
// } | |||
// | |||
} else { | |||
// recurse | |||
setterCandidate.invoke(thing, applyReflectively(handlebars, value, ctx, altStart, altEnd)); | |||
} | |||
} catch (HandlebarsException e) { | |||
throw e; | |||
} catch (Exception e) { | |||
// no setter for getter | |||
log.warn("applyReflectively: " + e); | |||
} | |||
} | |||
} | |||
return thing; | |||
} | |||
private static boolean canApplyReflectively(Class<?> returnType) { | |||
if (returnType.equals(String.class)) return true; | |||
try { | |||
return !(returnType.isPrimitive() || (returnType.getPackage() != null && returnType.getPackage().getName().equals("java.lang"))); | |||
} catch (NullPointerException npe) { | |||
log.warn("canApplyReflectively("+returnType+"): "+npe); | |||
return false; | |||
} | |||
} | |||
@Override public TemplateSource sourceAt(String source) throws IOException { | |||
return new StringTemplateSource(sourceName, source); | |||
} | |||
public static final CharSequence EMPTY_SAFE_STRING = ""; | |||
private static final AtomicReference<ContextMessageSender> messageSender = new AtomicReference<>(); | |||
public static void setMessageSender(ContextMessageSender sender) { | |||
synchronized (messageSender) { | |||
final ContextMessageSender current = messageSender.get(); | |||
if (current != null && current != sender && !current.equals(sender)) die("setMessageSender: already set to "+current); | |||
messageSender.set(sender); | |||
} | |||
} | |||
public static void registerUtilityHelpers (final Handlebars hb) { | |||
hb.registerHelper("hostname", (src, options) -> { | |||
switch (src.toString()) { | |||
case "short": return new Handlebars.SafeString(hostname_short()); | |||
case "domain": return new Handlebars.SafeString(domainname()); | |||
case "regular": default: return new Handlebars.SafeString(hostname()); | |||
} | |||
}); | |||
hb.registerHelper("exists", (src, options) -> empty(src) ? null : options.apply(options.fn)); | |||
hb.registerHelper("not_exists", (src, options) -> !empty(src) ? null : options.apply(options.fn)); | |||
hb.registerHelper("sha256", (src, options) -> { | |||
if (empty(src)) return ""; | |||
src = apply(hb, src.toString(), (Map<String, Object>) options.context.model()); | |||
src = sha256_hex(src.toString()); | |||
return new Handlebars.SafeString(src.toString()); | |||
}); | |||
hb.registerHelper("format_epoch", (val, options) -> { | |||
if (empty(val)) return ""; | |||
if (options.params.length != 2) return die("format_epoch: Usage: {{format_epoch expr format timezone}}"); | |||
final String format = options.param(0); | |||
final String timezone = options.param(1); | |||
DateTimeZone tz; | |||
try { | |||
final JavaTimezone jtz = JavaTimezone.fromString(timezone); | |||
if (jtz == null) { | |||
final UnicodeTimezone utz = UnicodeTimezone.fromString(timezone); | |||
if (utz == null) return die("format_epoch: invalid timezone: "+timezone); | |||
tz = DateTimeZone.forTimeZone(utz.toJava().getTimeZone()); | |||
} else { | |||
tz = DateTimeZone.forTimeZone(jtz.getTimeZone()); | |||
} | |||
} catch (Exception e) { | |||
log.warn("format_epoch: timezone error: "+timezone+", will retry with default parsing, or use GMT: "+e); | |||
tz = DateTimeZone.forTimeZone(TimeZone.getTimeZone(timezone)); | |||
} | |||
return new Handlebars.SafeString(DateTimeFormat.forPattern(format).withZone(tz).print(Long.valueOf(val.toString().trim()))); | |||
}); | |||
hb.registerHelper("format_float", (val, options) -> { | |||
if (empty(val)) return ""; | |||
if (options.params.length > 2) return die("format_float: too many parameters. Usage: {{format_float expr [format] [locale]}}"); | |||
final String format = options.params.length > 0 && !empty(options.param(0)) ? options.param(0) : DEFAULT_FLOAT_FORMAT; | |||
final Locale locale = LocaleUtil.fromString(options.params.length > 1 && !empty(options.param(1)) ? options.param(1) : null); | |||
val = apply(hb, val.toString(), (Map<String, Object>) options.context.model()); | |||
val = String.format(locale, format, Double.valueOf(val.toString())); | |||
return new Handlebars.SafeString(val.toString()); | |||
}); | |||
hb.registerHelper("json", (src, options) -> { | |||
if (empty(src)) return ""; | |||
return new Handlebars.SafeString(json(src)); | |||
}); | |||
hb.registerHelper("escaped_json", (src, options) -> { | |||
if (empty(src)) return ""; | |||
return new Handlebars.SafeString(new String(JSON_STRING_ENCODER.quoteAsString(json(src)))); | |||
}); | |||
hb.registerHelper("escaped_regex", (src, options) -> { | |||
if (empty(src)) return ""; | |||
return new Handlebars.SafeString(Pattern.quote(src.toString())); | |||
}); | |||
hb.registerHelper("url_encoded_escaped_regex", (src, options) -> { | |||
if (empty(src)) return ""; | |||
return new Handlebars.SafeString(urlEncode(Pattern.quote(src.toString()))); | |||
}); | |||
hb.registerHelper("escape_js_single_quoted_string", (src, options) -> { | |||
if (empty(src)) return ""; | |||
return new Handlebars.SafeString(src.toString().replace("'", "\\'")); | |||
}); | |||
hb.registerHelper("context", (src, options) -> { | |||
if (empty(src)) return ""; | |||
if (options.params.length > 0) return die("context: too many parameters. Usage: {{context [recipient]}}"); | |||
final String ctxString = options.context.toString(); | |||
final String recipient = src.toString(); | |||
final String subject = options.params.length > 1 ? options.param(0) : null; | |||
sendContext(recipient, subject, ctxString,HttpContentTypes.TEXT_PLAIN); | |||
return new Handlebars.SafeString(ctxString); | |||
}); | |||
hb.registerHelper("context_json", (src, options) -> { | |||
if (empty(src)) return ""; | |||
try { | |||
if (options.params.length > 0) return die("context: too many parameters. Usage: {{context [recipient]}}"); | |||
final String json = json(options.context.model()); | |||
final String recipient = src.toString(); | |||
final String subject = options.params.length > 1 ? options.param(0) : null; | |||
sendContext(recipient, subject, json, HttpContentTypes.APPLICATION_JSON); | |||
return new Handlebars.SafeString(json); | |||
} catch (Exception e) { | |||
return new Handlebars.SafeString("Error calling json(options.context): "+e.getClass()+": "+e.getMessage()); | |||
} | |||
}); | |||
hb.registerHelper("required", (src, options) -> { | |||
if (src == null) throw new RequiredVariableUndefinedException("required: undefined variable"); | |||
return new Handlebars.SafeString(src.toString()); | |||
}); | |||
hb.registerHelper("safe_name", (src, options) -> { | |||
if (empty(src)) return ""; | |||
return new Handlebars.SafeString(safeSnakeName(src.toString())); | |||
}); | |||
hb.registerHelper("urlEncode", (src, options) -> { | |||
if (empty(src)) return ""; | |||
src = apply(hb, src.toString(), (Map<String, Object>) options.context.model()); | |||
src = urlEncode(src.toString()); | |||
return new Handlebars.SafeString(src.toString()); | |||
}); | |||
hb.registerHelper("lastElement", (thing, options) -> { | |||
if (thing == null) return null; | |||
final Iterator iter = getIterator(thing); | |||
final String path = options.param(0); | |||
Object lastElement = null; | |||
while (iter.hasNext()) { | |||
lastElement = iter.next(); | |||
} | |||
final Object val = ReflectionUtil.get(lastElement, path); | |||
if (val != null) return new Handlebars.SafeString(""+val); | |||
return EMPTY_SAFE_STRING; | |||
}); | |||
hb.registerHelper("find", (thing, options) -> { | |||
if (thing == null) return null; | |||
final Iterator iter = getIterator(thing); | |||
final String path = options.param(0); | |||
final String arg = options.param(1); | |||
final String output = options.param(2); | |||
while (iter.hasNext()) { | |||
final Object item = iter.next(); | |||
try { | |||
final Object val = ReflectionUtil.get(item, path); | |||
if (val != null && String.valueOf(val).equals(arg)) { | |||
return new Handlebars.SafeString(""+ReflectionUtil.get(item, output)); | |||
} | |||
} catch (Exception e) { | |||
log.warn("find: "+e); | |||
} | |||
} | |||
return EMPTY_SAFE_STRING; | |||
}); | |||
hb.registerHelper("compare", (val1, options) -> { | |||
final String operator = options.param(0); | |||
final Object val2 = getComparisonArgParam(options); | |||
final Comparable v1 = cval(val1); | |||
final Object v2 = cval(val2); | |||
return (v1 == null && v2 == null) || (v1 != null && compare(operator, v1, v2)) ? options.fn(options) : options.inverse(options); | |||
}); | |||
hb.registerHelper("not", (val1, options) -> | |||
new Handlebars.SafeString(""+(!Boolean.valueOf(val1 == null ? "false" : val1.toString()))) | |||
); | |||
hb.registerHelper("string_compare", (val1, options) -> { | |||
final String operator = options.param(0); | |||
final Object val2 = getComparisonArgParam(options); | |||
final String v1 = val1 == null ? null : val1.toString(); | |||
final String v2 = val2 == null ? null : val2.toString(); | |||
return compare(operator, v1, v2) ? options.fn(options) : options.inverse(options); | |||
}); | |||
hb.registerHelper("long_compare", (val1, options) -> { | |||
final String operator = options.param(0); | |||
final Object val2 = getComparisonArgParam(options); | |||
final Long v1 = val1 == null ? null : Long.valueOf(val1.toString()); | |||
final Long v2 = val2 == null ? null : Long.valueOf(val2.toString()); | |||
return compare(operator, v1, v2) ? options.fn(options) : options.inverse(options); | |||
}); | |||
hb.registerHelper("double_compare", (val1, options) -> { | |||
final String operator = options.param(0); | |||
final Object val2 = getComparisonArgParam(options); | |||
final Double v1 = val1 == null ? null : Double.valueOf(val1.toString()); | |||
final Double v2 = val2 == null ? null : Double.valueOf(val2.toString()); | |||
return compare(operator, v1, v2) ? options.fn(options) : options.inverse(options); | |||
}); | |||
hb.registerHelper("big_compare", (val1, options) -> { | |||
final String operator = options.param(0); | |||
final Object val2 = getComparisonArgParam(options); | |||
final BigDecimal v1 = val1 == null ? null : big(val1.toString()); | |||
final BigDecimal v2 = val2 == null ? null : big(val2.toString()); | |||
return compare(operator, v1, v2) ? options.fn(options) : options.inverse(options); | |||
}); | |||
hb.registerHelper("expr", (val1, options) -> { | |||
final String operator = options.param(0); | |||
final String format = options.params.length > 2 ? options.param(2) : null; | |||
final Object val2 = getComparisonArgParam(options); | |||
final String v1 = val1.toString(); | |||
final String v2 = val2.toString(); | |||
final BigDecimal result; | |||
switch (operator) { | |||
case "+": result = big(v1).add(big(v2)); break; | |||
case "-": result = big(v1).subtract(big(v2)); break; | |||
case "*": result = big(v1).multiply(big(v2)); break; | |||
case "/": result = big(v1).divide(big(v2), MathContext.DECIMAL128); break; | |||
case "%": result = big(v1).remainder(big(v2)).abs(); break; | |||
case "^": result = big(v1).pow(big(v2).intValue()); break; | |||
default: return die("expr: invalid operator: "+operator); | |||
} | |||
// can't use trigraph (?:) operator here, if we do then for some reason rval always ends up as a double | |||
final Number rval; | |||
if (v1.contains(".") || v2.contains(".") || operator.equals("/")) { | |||
rval = result.doubleValue(); | |||
} else { | |||
rval = result.intValue(); | |||
} | |||
if (format != null) { | |||
final Locale locale = LocaleUtil.fromString(options.params.length > 3 && !empty(options.param(3)) ? options.param(3) : null); | |||
return new Handlebars.SafeString(String.format(locale, format, rval)); | |||
} else { | |||
return new Handlebars.SafeString(rval.toString()); | |||
} | |||
}); | |||
hb.registerHelper("rand", (Helper<Integer>) (len, options) -> { | |||
final String kind = options.param(0, "alphanumeric"); | |||
final String alphaCase = options.param(1, "lowercase"); | |||
switch (kind) { | |||
case "alphanumeric": case "alnum": default: return new Handlebars.SafeString(adjustCase(RandomStringUtils.randomAlphanumeric(len), alphaCase)); | |||
case "alpha": case "alphabetic": return new Handlebars.SafeString(adjustCase(RandomStringUtils.randomAlphabetic(len), alphaCase)); | |||
case "num": case "numeric": return new Handlebars.SafeString(RandomStringUtils.randomNumeric(len)); | |||
} | |||
}); | |||
hb.registerHelper("truncate", (Helper<Integer>) (max, options) -> { | |||
final String val = options.param(0, " "); | |||
if (empty(val)) return ""; | |||
if (max == -1 || max >= val.length()) return val; | |||
return new Handlebars.SafeString(val.substring(0, max)); | |||
}); | |||
hb.registerHelper("truncate_and_url_encode", (Helper<Integer>) (max, options) -> { | |||
final String val = options.param(0, " "); | |||
if (empty(val)) return ""; | |||
if (max == -1 || max >= val.length()) return simpleUrlEncode(val); | |||
return new Handlebars.SafeString(simpleUrlEncode(val.substring(0, max))); | |||
}); | |||
hb.registerHelper("truncate_and_double_url_encode", (Helper<Integer>) (max, options) -> { | |||
final String val = options.param(0, " "); | |||
if (empty(val)) return ""; | |||
if (max == -1 || max >= val.length()) return simpleUrlEncode(simpleUrlEncode(val)); | |||
return new Handlebars.SafeString(simpleUrlEncode(simpleUrlEncode(val.substring(0, max)))); | |||
}); | |||
hb.registerHelper("length", (thing, options) -> { | |||
if (empty(thing)) return "0"; | |||
if (thing.getClass().isArray()) return ""+((Object[]) thing).length; | |||
if (thing instanceof Collection) return ""+((Collection) thing).size(); | |||
if (thing instanceof ArrayNode) return ""+((ArrayNode) thing).size(); | |||
return ""; | |||
}); | |||
hb.registerHelper("first_nonempty", (thing, options) -> { | |||
if (!empty(thing)) return new Handlebars.SafeString(thing.toString()); | |||
for (final Object param : options.params) { | |||
if (!empty(param)) return new Handlebars.SafeString(param.toString()); | |||
} | |||
return EMPTY_SAFE_STRING; | |||
}); | |||
hb.registerHelper("key_file", (thing, options) -> { | |||
if (empty(thing)) return die("key_file: no file provided"); | |||
try { | |||
String path = thing.toString(); | |||
final String[] paths = new String[]{ | |||
System.getProperty("user.home") + "/" + path, | |||
System.getProperty("user.dir") + "/" + path, | |||
path | |||
}; | |||
log.debug("key_file: checking paths: "+arrayToString(paths, ",")); | |||
File found = null; | |||
File okToCreate = null; | |||
for (String p : paths) { | |||
try { | |||
final File f = new File(p); | |||
if (f.exists() && f.canRead() && f.length() > 0) { | |||
log.debug("key_file: assigning found="+abs(f)); | |||
found = f; | |||
break; | |||
} | |||
if (f.getParentFile().canWrite()) { | |||
log.debug("key_file: assigning okToCreate="+abs(f)); | |||
okToCreate = f; | |||
break; | |||
} | |||
} catch (Exception e) { | |||
log.warn("key_file: "+e.getClass().getName()+": "+e.getMessage()); | |||
} | |||
} | |||
if (found != null) { | |||
log.debug("key_file: returning contents of found: "+abs(found)); | |||
chmod(found, "o-rwx"); | |||
return new Handlebars.SafeString(FileUtil.toString(found).trim()); | |||
} | |||
if (okToCreate != null) { | |||
touch(okToCreate); | |||
initKey(okToCreate); | |||
chmod(okToCreate, "o-rwx"); | |||
log.info("key_file: chmodding and returning contents of okToCreate: "+abs(okToCreate)); | |||
return new Handlebars.SafeString(FileUtil.toString(okToCreate)); | |||
} | |||
return die("key_file: no file could be found or created"); | |||
} catch (Exception e) { | |||
return die("key_file: "+e, e); | |||
} | |||
}); | |||
} | |||
private static String adjustCase(String val, String alphaCase) { | |||
switch (alphaCase) { | |||
case "lower": case "lowercase": case "lc": return val.toLowerCase(); | |||
case "upper": case "uppercase": case "uc": return val.toUpperCase(); | |||
default: return val; | |||
} | |||
} | |||
private static String initKey(File f) throws IOException { | |||
chmod(f, "600"); | |||
FileUtil.toFile(f, UUID.randomUUID().toString()); | |||
return FileUtil.toString(f); | |||
} | |||
public static Object getComparisonArgParam(Options options) { | |||
if (options.params.length <= 1) return die("getComparisonArgParam: missing argument"); | |||
return options.param(1); | |||
} | |||
public static String getEmailRecipient(Handlebars hb, Options options, int index) { | |||
return options.params.length > index && !empty(options.param(index)) | |||
? apply(hb, options.param(index).toString(), (Map<String, Object>) options.context.model()) | |||
: null; | |||
} | |||
private static final ExecutorService contextSender = fixedPool(10); | |||
public static void sendContext(String recipient, String subject, String message, String contentType) { | |||
contextSender.submit(() -> { | |||
if (!empty(recipient) && !empty(message)) { | |||
synchronized (messageSender) { | |||
final ContextMessageSender sender = messageSender.get(); | |||
if (sender != null) { | |||
try { | |||
sender.send(recipient, subject, message, contentType); | |||
} catch (Exception e) { | |||
log.error("context: error sending message: " + e, e); | |||
} | |||
} | |||
} | |||
} | |||
}); | |||
} | |||
private static Iterator getIterator(Object thing) { | |||
if (thing instanceof Collection) { | |||
return ((Collection) thing).iterator(); | |||
} else if (thing instanceof Map) { | |||
return ((Map) thing).values().iterator(); | |||
} else if (Object[].class.isAssignableFrom(thing.getClass())) { | |||
return new ArrayIterator(thing); | |||
} else { | |||
return die("find: invalid argument type "+thing.getClass().getName()); | |||
} | |||
} | |||
private static Comparable cval(Object v) { | |||
if (v == null) return null; | |||
if (v instanceof Number) return (Comparable) v; | |||
if (v instanceof String) { | |||
final String s = v.toString(); | |||
try { | |||
return Long.parseLong(s); | |||
} catch (Exception e) { | |||
try { | |||
return big(s); | |||
} catch (Exception e2) { | |||
return s; | |||
} | |||
} | |||
} else { | |||
return die("don't know to compare objects of class "+v.getClass()); | |||
} | |||
} | |||
public static <T> boolean compare(String operator, Comparable<T> v1, T v2) { | |||
if (v1 == null) return v2 == null; | |||
if (v2 == null) return false; | |||
boolean result; | |||
final List<String> parts; | |||
switch (operator) { | |||
case "==": result = v1.equals(v2); break; | |||
case "!=": result = !v1.equals(v2); break; | |||
case ">": result = v1.compareTo(v2) > 0; break; | |||
case ">=": result = v1.compareTo(v2) >= 0; break; | |||
case "<": result = v1.compareTo(v2) < 0; break; | |||
case "<=": result = v1.compareTo(v2) <= 0; break; | |||
case "in": | |||
parts = StringUtil.split(v2.toString(), ", \n\t"); | |||
for (String part : parts) { | |||
if (v1.equals(part)) return true; | |||
} | |||
return false; | |||
case "not_in": | |||
parts = StringUtil.split(v2.toString(), ", \n\t"); | |||
for (String part : parts) { | |||
if (v1.equals(part)) return false; | |||
} | |||
return true; | |||
default: result = false; | |||
} | |||
return result; | |||
} | |||
public static void registerCurrencyHelpers(Handlebars hb) { | |||
hb.registerHelper("dollarsNoSign", (src, options) -> { | |||
if (empty(src)) return ""; | |||
return new Handlebars.SafeString(formatDollarsNoSign(longDollarVal(src))); | |||
}); | |||
hb.registerHelper("dollarsWithSign", (src, options) -> { | |||
if (empty(src)) return ""; | |||
return new Handlebars.SafeString(formatDollarsWithSign(longDollarVal(src))); | |||
}); | |||
hb.registerHelper("dollarsAndCentsNoSign", (src, options) -> { | |||
if (empty(src)) return ""; | |||
return new Handlebars.SafeString(formatDollarsAndCentsNoSign(longDollarVal(src))); | |||
}); | |||
hb.registerHelper("dollarsAndCentsWithSign", (src, options) -> { | |||
if (empty(src)) return ""; | |||
return new Handlebars.SafeString(formatDollarsAndCentsWithSign(longDollarVal(src))); | |||
}); | |||
hb.registerHelper("dollarsAndCentsPlain", (src, options) -> { | |||
if (empty(src)) return ""; | |||
return new Handlebars.SafeString(formatDollarsAndCentsPlain(longDollarVal(src))); | |||
}); | |||
} | |||
@Getter @Setter private static String defaultTimeZone = "US/Eastern"; | |||
private abstract static class DateHelper implements Helper<Object> { | |||
protected DateTimeZone getTimeZone (Options options) { return getTimeZone(options, 0); } | |||
protected DateTimeZone getTimeZone (Options options, int index) { | |||
final String timeZoneName = options.param(index, getDefaultTimeZone()); | |||
try { | |||
return DateTimeZone.forID(timeZoneName); | |||
} catch (Exception e) { | |||
return die("getTimeZone: invalid timezone: "+timeZoneName); | |||
} | |||
} | |||
protected long zonedTimestamp (Object src, Options options) { return zonedTimestamp(src, options, 0); } | |||
protected long zonedTimestamp (Object src, Options options, int index) { | |||
if (empty(src)) src = "now"; | |||
final DateTimeZone timeZone = getTimeZone(options, index); | |||
return longVal(src, timeZone); | |||
} | |||
protected CharSequence print (DateTimeFormatter formatter, Object src, Options options) { | |||
return new Handlebars.SafeString(formatter.print(new DateTime(zonedTimestamp(src, options), getTimeZone(options)))); | |||
} | |||
} | |||
public static void registerDateHelpers(Handlebars hb) { | |||
hb.registerHelper("date_format", new DateHelper() { | |||
public CharSequence apply(Object src, Options options) { | |||
final DateTimeFormatter formatter = DateTimeFormat.forPattern(options.param(0)); | |||
return new Handlebars.SafeString(formatter.print(new DateTime(zonedTimestamp(src, options, 1), | |||
getTimeZone(options, 1)))); | |||
} | |||
}); | |||
hb.registerHelper("date_short", new DateHelper() { | |||
public CharSequence apply(Object src, Options options) { | |||
return print(TimeUtil.DATE_FORMAT_MMDDYYYY, src, options); | |||
} | |||
}); | |||
hb.registerHelper("date_yyyy_mm_dd", new DateHelper() { | |||
public CharSequence apply(Object src, Options options) { | |||
return print(TimeUtil.DATE_FORMAT_YYYY_MM_DD, src, options); | |||
} | |||
}); | |||
hb.registerHelper("date_mmm_dd_yyyy", new DateHelper() { | |||
public CharSequence apply(Object src, Options options) { | |||
return print(TimeUtil.DATE_FORMAT_MMM_DD_YYYY, src, options); | |||
} | |||
}); | |||
hb.registerHelper("date_long", new DateHelper() { | |||
public CharSequence apply(Object src, Options options) { | |||
return print(TimeUtil.DATE_FORMAT_MMMM_D_YYYY, src, options); | |||
} | |||
}); | |||
hb.registerHelper("timestamp", new DateHelper() { | |||
public CharSequence apply(Object src, Options options) { | |||
return new Handlebars.SafeString(Long.toString(zonedTimestamp(src, options))); | |||
} | |||
}); | |||
} | |||
private static long longVal(Object src, DateTimeZone timeZone) { | |||
return longVal(src, timeZone, true); | |||
} | |||
private static long longVal(Object src, DateTimeZone timeZone, boolean tryAgain) { | |||
if (src == null) return now(); | |||
String srcStr = src.toString().trim(); | |||
if (srcStr.equals("") || srcStr.equals("0") || srcStr.equals("now")) return now(); | |||
if (srcStr.startsWith("now")) { | |||
// Multiple periods may be added to the original timestamp (separated by comma), but in the correct order. | |||
final String[] splitSrc = srcStr.substring(3).split(","); | |||
DateTime result = new DateTime(now(), timeZone).withTimeAtStartOfDay(); | |||
for (String period : splitSrc) { | |||
result = result.plus(Period.parse(period, TimeUtil.PERIOD_FORMATTER)); | |||
} | |||
return result.getMillis(); | |||
} | |||
try { | |||
return ((Number) src).longValue(); | |||
} catch (Exception e) { | |||
if (!tryAgain) return die("longVal: unparseable long: "+src+": "+e.getClass().getSimpleName()+": "+e.getMessage()); | |||
// try to parse it in different formats | |||
final Object t = TimeUtil.parse(src.toString(), timeZone); | |||
return longVal(t, timeZone, false); | |||
} | |||
} | |||
public static long longDollarVal(Object src) { | |||
final Long val = ReflectionUtil.toLong(src); | |||
return val == null ? 0 : val; | |||
} | |||
public static final String CLOSE_XML_DECL = "?>"; | |||
public static void registerXmlHelpers(final Handlebars hb) { | |||
hb.registerHelper("strip_xml_declaration", (src, options) -> { | |||
if (empty(src)) return ""; | |||
String xml = src.toString().trim(); | |||
if (xml.startsWith("<?xml")) { | |||
final int closeDecl = xml.indexOf(CLOSE_XML_DECL); | |||
if (closeDecl != -1) { | |||
xml = xml.substring(closeDecl + CLOSE_XML_DECL.length()).trim(); | |||
} | |||
} | |||
return new Handlebars.SafeString(xml); | |||
}); | |||
} | |||
public static void registerJurisdictionHelpers(final Handlebars hb, JurisdictionResolver jurisdictionResolver) { | |||
hb.registerHelper("us_state", (src, options) -> { | |||
if (empty(src)) return ""; | |||
return new Handlebars.SafeString(jurisdictionResolver.usState(src.toString())); | |||
}); | |||
hb.registerHelper("us_zip", (src, options) -> { | |||
if (empty(src)) return ""; | |||
return new Handlebars.SafeString(jurisdictionResolver.usZip(src.toString())); | |||
}); | |||
} | |||
public static void registerJavaScriptHelper(final Handlebars hb, JsEngineFactory jsEngineFactory) { | |||
hb.registerHelper("js", (src, options) -> { | |||
if (empty(src)) return ""; | |||
final String format = options.params.length > 0 && !empty(options.param(0)) ? options.param(0) : null; | |||
final Locale locale = LocaleUtil.fromString(options.params.length > 1 && !empty(options.param(1)) ? options.param(1) : null); | |||
final Map<String, Object> ctx = (Map<String, Object>) options.context.model(); | |||
final Object result = jsEngineFactory.getJs().evaluate(src.toString(), ctx); | |||
if (result == null) return new Handlebars.SafeString("null"); | |||
return format != null | |||
? new Handlebars.SafeString(String.format(locale, format, Double.valueOf(result.toString()))) | |||
: new Handlebars.SafeString(result.toString()); | |||
}); | |||
} | |||
public static final String DEFAULT_FILE_RESOLVER = "_"; | |||
private static final Map<String, FileResolver> fileResolverMap = new HashMap<>(); | |||
public static void setFileIncludePath(String path) { setFileIncludePaths(DEFAULT_FILE_RESOLVER, new SingletonList<>(path)); } | |||
public static void setFileIncludePaths(Collection<String> paths) { setFileIncludePaths(DEFAULT_FILE_RESOLVER, paths); } | |||
public static void setFileIncludePaths(String name, Collection<String> paths) { | |||
fileResolverMap.put(name, new PathListFileResolver(paths)); | |||
} | |||
@AllArgsConstructor | |||
private static class FileLoaderHelper implements Helper<String> { | |||
private boolean isBase64EncoderOn; | |||
@Override public CharSequence apply(String filename, Options options) throws IOException { | |||
if (empty(filename)) return EMPTY_SAFE_STRING; | |||
final String include = options.get("includePath", DEFAULT_FILE_RESOLVER); | |||
final FileResolver fileResolver = fileResolverMap.get(include); | |||
if (fileResolver == null) return die("apply: no file resolve found for includePath="+include); | |||
final boolean escapeSpecialChars = options.get("escape", false); | |||
File f = fileResolver.resolve(filename); | |||
if (f == null && filename.startsWith(File.separator)) { | |||
// looks like an absolute path, try the filesystem | |||
f = new File(filename); | |||
if (!f.exists() || !f.canRead()) f = null; | |||
} | |||
if (f == null) { | |||
// try classpath | |||
try { | |||
String content = isBase64EncoderOn | |||
? encodeBytes(IOUtils.toByteArray(loadResourceAsStream(filename))) | |||
: stream2string(filename); | |||
if (escapeSpecialChars) { | |||
content = new String(getJsonStringEncoder().quoteAsString(content)); | |||
} | |||
return new Handlebars.SafeString(content); | |||
} catch (Exception e) { | |||
throw new FileNotFoundException("Cannot find readable file " + filename + ", resolver: " + fileResolver); | |||
} | |||
} | |||
try { | |||
String string = isBase64EncoderOn ? encodeFromFile(f) : FileUtil.toString(f); | |||
if (escapeSpecialChars) string = new String(getJsonStringEncoder().quoteAsString(string)); | |||
return new Handlebars.SafeString(string); | |||
} catch (IOException e) { | |||
return die("Cannot read file from: " + f, e); | |||
} | |||
} | |||
} | |||
public static void registerFileHelpers(final Handlebars hb) { | |||
hb.registerHelper("rawImagePng", (src, options) -> { | |||
if (empty(src)) return ""; | |||
final String include = options.get("includePath", DEFAULT_FILE_RESOLVER); | |||
final FileResolver fileResolver = fileResolverMap.get(include); | |||
if (fileResolver == null) return die("rawImagePng: no file resolve found for includePath="+include); | |||
final File f = fileResolver.resolve(src.toString()); | |||
String imgSrc = (f == null) ? src.toString() : f.getAbsolutePath(); | |||
final Object width = options.get("width"); | |||
final String widthAttr = empty(width) ? "" : "width=\"" + width + "\" "; | |||
return new Handlebars.SafeString( | |||
"<img " + widthAttr + "src=\"data:image/png;base64," + imgSrc + "\"/>"); | |||
}); | |||
hb.registerHelper("base64File", new FileLoaderHelper(true)); | |||
hb.registerHelper("textFile", new FileLoaderHelper(false)); | |||
} | |||
} |
@@ -0,0 +1,9 @@ | |||
package org.cobbzilla.util.handlebars; | |||
import com.github.jknack.handlebars.Handlebars; | |||
public interface HasHandlebars { | |||
Handlebars getHandlebars(); | |||
} |
@@ -0,0 +1,63 @@ | |||
package org.cobbzilla.util.handlebars; | |||
import com.fasterxml.jackson.annotation.JsonIgnore; | |||
import lombok.Getter; | |||
import lombok.NoArgsConstructor; | |||
import lombok.Setter; | |||
import lombok.experimental.Accessors; | |||
import org.cobbzilla.util.string.StringUtil; | |||
import java.io.File; | |||
import java.io.IOException; | |||
import java.util.Map; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.die; | |||
import static org.cobbzilla.util.reflect.ReflectionUtil.copy; | |||
@NoArgsConstructor @Accessors(chain=true) | |||
public abstract class ImageInsertion { | |||
@Getter @Setter private String name = null; | |||
@Getter @Setter private int page = 0; | |||
@Getter @Setter private float x; | |||
@Getter @Setter private float y; | |||
@Getter @Setter private float width = 0; | |||
@Getter @Setter private float height = 0; | |||
@Getter @Setter private String format = "png"; | |||
@Getter @Setter private boolean watermark = false; | |||
@JsonIgnore public abstract File getImageFile() throws IOException; | |||
public ImageInsertion(ImageInsertion other) { copy(this, other); } | |||
public ImageInsertion(String spec) { | |||
for (String part : StringUtil.split(spec, ", ")) { | |||
final int eqPos = part.indexOf("="); | |||
if (eqPos == -1) die("invalid image insertion (missing '='): "+spec); | |||
if (eqPos == part.length()-1) die("invalid image insertion (no value): "+spec); | |||
final String key = part.substring(0, eqPos).trim(); | |||
final String value = part.substring(eqPos+1).trim(); | |||
setField(key, value); | |||
} | |||
} | |||
public void init (Map<String, Object> map) { | |||
for (Map.Entry<String, Object> entry : map.entrySet()) { | |||
setField(entry.getKey(), entry.getValue().toString()); | |||
} | |||
} | |||
protected void setField(String key, String value) { | |||
switch (key) { | |||
case "name": this.name = value; break; | |||
case "page": this.page = Integer.parseInt(value); break; | |||
case "x": this.x = Float.parseFloat(value); break; | |||
case "y": this.y = Float.parseFloat(value); break; | |||
case "width": this.width = Float.parseFloat(value); break; | |||
case "height": this.height = Float.parseFloat(value); break; | |||
case "format": this.format = value; break; | |||
default: die("invalid parameter: "+key); | |||
} | |||
} | |||
} |
@@ -0,0 +1,13 @@ | |||
package org.cobbzilla.util.handlebars; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.empty; | |||
public interface JurisdictionResolver { | |||
String usState (String value); | |||
String usZip (String value); | |||
default boolean isValidUsStateAbbreviation(String a) { return !empty(a) && usState(a) != null; } | |||
} |
@@ -0,0 +1,251 @@ | |||
package org.cobbzilla.util.handlebars; | |||
import com.github.jknack.handlebars.Handlebars; | |||
import lombok.Cleanup; | |||
import lombok.Getter; | |||
import lombok.extern.slf4j.Slf4j; | |||
import org.apache.commons.io.FileUtils; | |||
import org.apache.pdfbox.io.MemoryUsageSetting; | |||
import org.apache.pdfbox.multipdf.PDFMergerUtility; | |||
import org.apache.pdfbox.pdmodel.*; | |||
import org.apache.pdfbox.pdmodel.graphics.image.PDImageXObject; | |||
import org.apache.pdfbox.pdmodel.interactive.form.PDAcroForm; | |||
import org.apache.pdfbox.pdmodel.interactive.form.PDCheckBox; | |||
import org.apache.pdfbox.pdmodel.interactive.form.PDField; | |||
import org.apache.pdfbox.pdmodel.interactive.form.PDTextField; | |||
import org.cobbzilla.util.error.GeneralErrorHandler; | |||
import java.io.File; | |||
import java.io.IOException; | |||
import java.io.InputStream; | |||
import java.io.OutputStream; | |||
import java.nio.file.Files; | |||
import java.util.ArrayList; | |||
import java.util.List; | |||
import java.util.Map; | |||
import java.util.concurrent.atomic.AtomicReference; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.die; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.empty; | |||
import static org.cobbzilla.util.error.GeneralErrorHandlerBase.defaultErrorHandler; | |||
import static org.cobbzilla.util.io.FileUtil.abs; | |||
import static org.cobbzilla.util.io.FileUtil.temp; | |||
import static org.cobbzilla.util.reflect.ReflectionUtil.instantiate; | |||
@Slf4j | |||
public class PdfMerger { | |||
@Getter private static final AtomicReference<GeneralErrorHandler> errorHandler = defaultErrorHandler(); | |||
public static void setErrorHandler (GeneralErrorHandler handler) { errorHandler.set(handler); } | |||
public static final String NULL_FORM_VALUE = "þÿ"; | |||
public static final String CTX_IMAGE_INSERTIONS = "imageInsertions"; | |||
public static final String CTX_TEXT_INSERTIONS = "textInsertions"; | |||
public static void merge(InputStream in, | |||
File outfile, | |||
Map<String, Object> context, | |||
Handlebars handlebars) throws Exception { | |||
final File out = merge(in, context, handlebars); | |||
if (empty(out)) die("merge: no outfiles generated"); | |||
if (!out.renameTo(outfile)) die("merge: error renaming "+abs(out)+"->"+abs(outfile)); | |||
} | |||
public static File merge(InputStream in, | |||
Map<String, Object> context, | |||
Handlebars handlebars) throws Exception { | |||
return merge(in, context, handlebars, new ArrayList<>()); | |||
} | |||
@SuppressWarnings("Duplicates") | |||
public static File merge(InputStream in, | |||
Map<String, Object> context, | |||
Handlebars handlebars, | |||
List<String> validationErrors) throws Exception { | |||
final Map<String, String> fieldMappings = (Map<String, String>) context.get("fields"); | |||
// load the document | |||
@Cleanup final PDDocument pdfDocument = PDDocument.load(in); | |||
// get the document catalog | |||
final PDAcroForm acroForm = pdfDocument.getDocumentCatalog().getAcroForm(); | |||
// as there might not be an AcroForm entry a null check is necessary | |||
if (acroForm != null) { | |||
acroForm.setNeedAppearances(false); | |||
// Retrieve an individual field and set its value. | |||
for (PDField field : acroForm.getFields()) { | |||
try { | |||
String fieldValue = fieldMappings == null ? null : fieldMappings.get(field.getFullyQualifiedName()); | |||
if (!empty(fieldValue)) { | |||
fieldValue = safeApply(context, handlebars, fieldValue, validationErrors); | |||
if (fieldValue == null) continue; | |||
} | |||
if (field instanceof PDCheckBox) { | |||
PDCheckBox box = (PDCheckBox) field; | |||
if (!empty(fieldValue)) { | |||
if (Boolean.valueOf(fieldValue)) { | |||
box.check(); | |||
} else { | |||
box.unCheck(); | |||
} | |||
} | |||
} else { | |||
String formValue = field.getValueAsString(); | |||
if (formValue.equals(NULL_FORM_VALUE)) formValue = null; | |||
if (empty(formValue) && field instanceof PDTextField) { | |||
formValue = ((PDTextField) field).getDefaultValue(); | |||
if (formValue.equals(NULL_FORM_VALUE)) formValue = null; | |||
} | |||
if (empty(formValue)) formValue = fieldValue; | |||
if (!empty(formValue)) { | |||
formValue = safeApply(context, handlebars, formValue, validationErrors); | |||
if (formValue == null) continue; | |||
try { | |||
field.setValue(formValue); | |||
} catch (Exception e) { | |||
errorHandler.get().handleError("merge (field="+field+", value="+formValue+"): "+e, e); | |||
} | |||
} | |||
} | |||
} catch (Exception e) { | |||
errorHandler.get().handleError("merge: "+e, e); | |||
} | |||
field.setReadOnly(true); | |||
field.getCOSObject().setInt("Ff", 1); | |||
} | |||
// acroForm.flatten(); | |||
acroForm.setNeedAppearances(false); | |||
} | |||
// add images | |||
final Map<String, Object> imageInsertions = (Map<String, Object>) context.get(CTX_IMAGE_INSERTIONS); | |||
if (!empty(imageInsertions)) { | |||
for (Object insertion : imageInsertions.values()) { | |||
insertImage(pdfDocument, insertion, Base64ImageInsertion.class); | |||
} | |||
} | |||
// add text | |||
final Map<String, Object> textInsertions = (Map<String, Object>) context.get(CTX_TEXT_INSERTIONS); | |||
if (!empty(textInsertions)) { | |||
for (Object insertion : textInsertions.values()) { | |||
insertImage(pdfDocument, insertion, TextImageInsertion.class); | |||
} | |||
} | |||
final File output = temp(".pdf"); | |||
// Save and close the filled out form. | |||
pdfDocument.getDocumentCatalog().setPageMode(PageMode.USE_THUMBS); | |||
pdfDocument.save(output); | |||
if (validationErrors != null && !validationErrors.isEmpty()) { | |||
errorHandler.get().handleError(validationErrors); | |||
return null; | |||
} | |||
return output; | |||
} | |||
public static String safeApply(Map<String, Object> context, Handlebars handlebars, String fieldValue, List<String> validationErrors) { | |||
try { | |||
return HandlebarsUtil.apply(handlebars, fieldValue, context); | |||
} catch (Exception e) { | |||
if (validationErrors != null) { | |||
log.warn("safeApply("+fieldValue+"): "+e); | |||
validationErrors.add(fieldValue+"\t"+e.getMessage()); | |||
return null; | |||
} else { | |||
throw e; | |||
} | |||
} | |||
} | |||
protected static void insertImage(PDDocument pdfDocument, Object insert, Class<? extends ImageInsertion> clazz) throws IOException { | |||
final ImageInsertion insertion; | |||
if (insert instanceof ImageInsertion) { | |||
insertion = (ImageInsertion) insert; | |||
} else if (insert instanceof Map) { | |||
insertion = instantiate(clazz); | |||
insertion.init((Map<String, Object>) insert); | |||
} else { | |||
die("insertImage("+clazz.getSimpleName()+"): invalid object: "+insert); | |||
return; | |||
} | |||
// write image to temp file | |||
File imageTemp = null; | |||
try { | |||
imageTemp = insertion.getImageFile(); | |||
if (imageTemp != null) { | |||
// create PD image | |||
final PDImageXObject image = PDImageXObject.createFromFile(abs(imageTemp), pdfDocument); | |||
final PDPageTree pages = pdfDocument.getDocumentCatalog().getPages(); | |||
final float insertionHeight = insertion.getHeight(); | |||
if (insertion.isWatermark()) { | |||
for (PDPage page : pages) { | |||
// set x, y, width and height to center insertion and maximize size on page | |||
final float y = (page.getBBox().getHeight()/2.0f) - insertionHeight; | |||
insertion.setX(20) | |||
.setY(y) | |||
.setWidth(page.getBBox().getWidth()-20) | |||
.setHeight(page.getBBox().getHeight()-10); | |||
insertImageOnPage(image, insertion, pdfDocument, page); | |||
} | |||
} else { | |||
insertImageOnPage(image, insertion, pdfDocument, pages.get(insertion.getPage())); | |||
} | |||
} | |||
} finally { | |||
if (imageTemp != null && !imageTemp.delete()) log.warn("insertImage("+clazz.getSimpleName()+"): error deleting image file: "+abs(imageTemp)); | |||
} | |||
} | |||
private static void insertImageOnPage(PDImageXObject image, ImageInsertion insertion, PDDocument pdfDocument, PDPage page) throws IOException { | |||
// open stream for writing inserted image | |||
final PDPageContentStream contentStream = new PDPageContentStream(pdfDocument, page, PDPageContentStream.AppendMode.APPEND, true); | |||
// draw image on page | |||
contentStream.drawImage(image, insertion.getX(), insertion.getY(), insertion.getWidth(), insertion.getHeight()); | |||
contentStream.close(); | |||
} | |||
public static void concatenate(List infiles, OutputStream out, long maxMemory, long maxDisk) throws IOException { | |||
final PDFMergerUtility merger = new PDFMergerUtility(); | |||
for (Object infile : infiles) { | |||
if (infile instanceof File) { | |||
merger.addSource((File) infile); | |||
} else if (infile instanceof InputStream) { | |||
merger.addSource((InputStream) infile); | |||
} else if (infile instanceof String) { | |||
merger.addSource((String) infile); | |||
} else { | |||
die("concatenate: invalid infile ("+infile.getClass().getName()+"): "+infile); | |||
} | |||
} | |||
merger.setDestinationStream(out); | |||
merger.mergeDocuments(MemoryUsageSetting.setupMixed(maxMemory, maxDisk)); | |||
} | |||
public static void scrubAcroForm(File file, OutputStream output) throws IOException { | |||
@Cleanup final InputStream pdfIn = FileUtils.openInputStream(file); | |||
@Cleanup final PDDocument pdfDoc = PDDocument.load(pdfIn); | |||
final PDAcroForm acroForm = pdfDoc.getDocumentCatalog().getAcroForm(); | |||
if (acroForm == null) { | |||
Files.copy(file.toPath(), output); | |||
} else { | |||
acroForm.setNeedAppearances(false); | |||
File tempFile = temp(".pdf"); | |||
pdfDoc.save(tempFile); | |||
pdfDoc.close(); | |||
Files.copy(tempFile.toPath(), output); | |||
tempFile.delete(); | |||
} | |||
} | |||
} |
@@ -0,0 +1,5 @@ | |||
package org.cobbzilla.util.handlebars; | |||
public class RequiredVariableUndefinedException extends RuntimeException { | |||
public RequiredVariableUndefinedException(String s) { super(s); } | |||
} |
@@ -0,0 +1,22 @@ | |||
package org.cobbzilla.util.handlebars; | |||
import org.cobbzilla.util.string.StringUtil; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.die; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.empty; | |||
public class SimpleJurisdictionResolver implements JurisdictionResolver { | |||
public static final SimpleJurisdictionResolver instance = new SimpleJurisdictionResolver(); | |||
@Override public String usState(String value) { | |||
return empty(value) || value.length() != 2 ? die("usState: invalid: " + value) : value.toUpperCase(); | |||
} | |||
@Override public String usZip(String value) { | |||
return empty(value) || value.length() != 5 || StringUtil.onlyDigits(value).length() != value.length() | |||
? die("usZip: invalid: " + value) | |||
: value.toUpperCase(); | |||
} | |||
} |
@@ -0,0 +1,170 @@ | |||
package org.cobbzilla.util.handlebars; | |||
import com.fasterxml.jackson.annotation.JsonIgnore; | |||
import lombok.Getter; | |||
import lombok.NoArgsConstructor; | |||
import lombok.Setter; | |||
import lombok.experimental.Accessors; | |||
import javax.imageio.ImageIO; | |||
import java.awt.*; | |||
import java.awt.image.BufferedImage; | |||
import java.io.File; | |||
import java.io.IOException; | |||
import java.util.ArrayList; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.die; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.empty; | |||
import static org.cobbzilla.util.io.FileUtil.temp; | |||
@NoArgsConstructor @Accessors(chain=true) | |||
public class TextImageInsertion extends ImageInsertion { | |||
public static final TextImageInsertion[] NO_TEXT_INSERTIONS = new TextImageInsertion[0]; | |||
@Getter @Setter private String content; | |||
public void capitalizeContent() { content = content == null ? null : content.toUpperCase(); } | |||
@Getter @Setter private String fontFamily = "Arial"; | |||
@Getter @Setter private String fontStyle = "plain"; | |||
@Getter @Setter private String fontColor = "000000"; | |||
@Getter @Setter private int fontSize = 14; | |||
@Getter @Setter private int alpha = 255; | |||
@Getter @Setter private int maxWidth = -1; | |||
@Getter @Setter private int widthPadding = 10; | |||
@Getter @Setter private int lineSpacing = 4; | |||
public TextImageInsertion(TextImageInsertion other) { super(other); } | |||
public TextImageInsertion(String spec) { super(spec); } | |||
@Override protected void setField(String key, String value) { | |||
switch (key) { | |||
case "content": content = value; break; | |||
case "fontFamily": fontFamily = value; break; | |||
case "fontStyle": fontStyle = value; break; | |||
case "fontColor": fontColor = value; break; | |||
case "fontSize": fontSize = Integer.parseInt(value); break; | |||
case "alpha": alpha = Integer.parseInt(value); break; | |||
case "maxWidth": maxWidth = Integer.parseInt(value); break; | |||
case "widthPadding": widthPadding = Integer.parseInt(value); break; | |||
case "lineSpacing": lineSpacing = Integer.parseInt(value); break; | |||
default: super.setField(key, value); | |||
} | |||
} | |||
@JsonIgnore private int getRed () { return (int) (Long.parseLong(fontColor, 16) & 0xff0000) >> 16; } | |||
@JsonIgnore private int getGreen () { return (int) (Long.parseLong(fontColor, 16) & 0x00ff00) >> 8; } | |||
@JsonIgnore private int getBlue () { return (int) (Long.parseLong(fontColor, 16) & 0x0000ff); } | |||
@JsonIgnore private Color getAwtFontColor() { return new Color(getRed(), getGreen(), getBlue(), getAlpha()); } | |||
@JsonIgnore private int getAwtFontStyle() { | |||
switch (fontStyle.toLowerCase()) { | |||
case "plain": return Font.PLAIN; | |||
case "bold": return Font.BOLD; | |||
case "italic": return Font.ITALIC; | |||
default: return Font.PLAIN; | |||
} | |||
} | |||
// adapted from: https://stackoverflow.com/a/18800845/1251543 | |||
@Override public File getImageFile() { | |||
if (empty(getContent())) return null; | |||
Graphics2D g2d = getGraphics2D(); | |||
final ParsedText txt = getParsedText(g2d); | |||
if (getWidth() == 0) setWidth(txt.width); | |||
if (getHeight() == 0) setHeight(txt.height); | |||
g2d.dispose(); | |||
final BufferedImage img = new BufferedImage(txt.width, txt.height, BufferedImage.TYPE_INT_ARGB); | |||
g2d = img.createGraphics(); | |||
g2d.setRenderingHint(RenderingHints.KEY_ALPHA_INTERPOLATION, RenderingHints.VALUE_ALPHA_INTERPOLATION_QUALITY); | |||
g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); | |||
g2d.setRenderingHint(RenderingHints.KEY_COLOR_RENDERING, RenderingHints.VALUE_COLOR_RENDER_QUALITY); | |||
g2d.setRenderingHint(RenderingHints.KEY_DITHERING, RenderingHints.VALUE_DITHER_ENABLE); | |||
g2d.setRenderingHint(RenderingHints.KEY_FRACTIONALMETRICS, RenderingHints.VALUE_FRACTIONALMETRICS_ON); | |||
g2d.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BICUBIC); | |||
g2d.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY); | |||
g2d.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_PURE); | |||
g2d.setFont(getFont()); | |||
final FontMetrics fm = g2d.getFontMetrics(); | |||
g2d.setColor(getAwtFontColor()); | |||
for (int i=0; i<txt.lines.size(); i++) { | |||
final String line = txt.lines.get(i); | |||
int y = getLineY(fm, i); | |||
g2d.drawString(line, 0, y); | |||
} | |||
g2d.dispose(); | |||
final File temp = temp("."+getFormat()); | |||
try { | |||
ImageIO.write(img, getFormat(), temp); | |||
return temp; | |||
} catch (IOException e) { | |||
return die("getImageStream: "+e, e); | |||
} | |||
} | |||
protected ParsedText getParsedText() { return getParsedText(getGraphics2D()); } | |||
private Graphics2D getGraphics2D() { | |||
final BufferedImage img = new BufferedImage(1, 1, BufferedImage.TYPE_INT_ARGB); | |||
final Graphics2D g2d = img.createGraphics(); | |||
final Font font = getFont(); | |||
g2d.setFont(font); | |||
return g2d; | |||
} | |||
private Font getFont() { | |||
return new Font(getFontFamily(), getAwtFontStyle(), getFontSize()); | |||
} | |||
protected ParsedText getParsedText(Graphics2D g2d) { | |||
FontMetrics fm = g2d.getFontMetrics(); | |||
final ParsedText txt = new ParsedText(); | |||
int widest = -1; | |||
final String[] inLines = getContent().trim().split("\n"); | |||
if (getMaxWidth() == -1) { | |||
for (String inLine : inLines) { | |||
txt.lines.add(inLine); | |||
txt.width = fm.stringWidth(getContent()) + getWidthPadding(); | |||
if (txt.width > widest) widest = txt.width; | |||
} | |||
} else { | |||
for (String inLine : inLines) { | |||
final String[] words = inLine.split("\\s+"); | |||
StringBuilder b = new StringBuilder(); | |||
for (String word : words) { | |||
int stringWidth = fm.stringWidth(b.toString() + " " + word); | |||
if (stringWidth + getWidthPadding() > getMaxWidth()) { | |||
if (b.length() == 0) die("getImageFile: word too long for maxWidth=" + maxWidth + ": " + word); | |||
txt.lines.add(b.toString()); | |||
b = new StringBuilder(word); | |||
} else { | |||
if (b.length() > 0) b.append(" "); | |||
b.append(word); | |||
if (stringWidth > widest) widest = stringWidth; | |||
} | |||
} | |||
txt.lines.add(b.toString()); | |||
} | |||
} | |||
txt.width = widest + getWidthPadding(); | |||
txt.height = getLineY(fm, txt.lines.size()); | |||
return txt; | |||
} | |||
protected int getLineY(FontMetrics fm, int i) { return (i+1) * (fm.getAscent() + getLineSpacing()); } | |||
public int determineHeight() { return getParsedText().height; } | |||
private class ParsedText { | |||
public java.util.List<String> lines = new ArrayList<>(); | |||
public int width; | |||
public int height; | |||
} | |||
} |
@@ -0,0 +1,52 @@ | |||
package org.cobbzilla.util.handlebars; | |||
import com.github.jknack.handlebars.Handlebars; | |||
import lombok.extern.slf4j.Slf4j; | |||
import org.apache.poi.xwpf.converter.xhtml.XHTMLConverter; | |||
import org.apache.poi.xwpf.converter.xhtml.XHTMLOptions; | |||
import org.apache.poi.xwpf.usermodel.XWPFDocument; | |||
import org.cobbzilla.util.http.HtmlScreenCapture; | |||
import org.cobbzilla.util.io.FileUtil; | |||
import org.cobbzilla.util.xml.TidyHandlebarsSpanMerger; | |||
import java.io.File; | |||
import java.io.FileOutputStream; | |||
import java.io.InputStream; | |||
import java.io.OutputStream; | |||
import java.util.Map; | |||
import static org.cobbzilla.util.io.FileUtil.temp; | |||
import static org.cobbzilla.util.xml.TidyUtil.tidy; | |||
@Slf4j | |||
public class WordDocxMerger { | |||
public static File merge(InputStream in, | |||
Map<String, Object> context, | |||
HtmlScreenCapture capture, | |||
Handlebars handlebars) throws Exception { | |||
// convert to HTML | |||
final XWPFDocument document = new XWPFDocument(in); | |||
final File mergedHtml = temp(".html"); | |||
try (OutputStream out = new FileOutputStream(mergedHtml)) { | |||
final XHTMLOptions options = XHTMLOptions.create().setIgnoreStylesIfUnused(true); | |||
XHTMLConverter.getInstance().convert(document, out, options); | |||
} | |||
// - tidy HTML file | |||
// - merge consecutive <span> tags (which might occur in the middle of a {{variable}}) | |||
// - replace HTML-entities encoded within handlebars templates (for example, convert ‘ and ’ to single-quote char) | |||
// - apply Handlebars | |||
String tidyHtml = tidy(mergedHtml, TidyHandlebarsSpanMerger.instance); | |||
tidyHtml = TidyHandlebarsSpanMerger.scrubHandlebars(tidyHtml); | |||
FileUtil.toFile(mergedHtml, HandlebarsUtil.apply(handlebars, tidyHtml, context)); | |||
// convert HTML -> PDF | |||
final File pdfOutput = temp(".pdf"); | |||
capture.capture(mergedHtml, pdfOutput); | |||
return pdfOutput; | |||
} | |||
} |
@@ -0,0 +1,20 @@ | |||
package org.cobbzilla.util.handlebars.main; | |||
import lombok.Cleanup; | |||
import org.cobbzilla.util.handlebars.PdfMerger; | |||
import org.cobbzilla.util.main.BaseMain; | |||
import java.io.OutputStream; | |||
public class PdfConcatMain extends BaseMain<PdfConcatOptions> { | |||
public static void main (String[] args) { main(PdfConcatMain.class, args); } | |||
@Override protected void run() throws Exception { | |||
final PdfConcatOptions options = getOptions(); | |||
@Cleanup final OutputStream out = options.getOut(); | |||
PdfMerger.concatenate(options.getInfiles(), out, options.getMaxMemory(), options.getMaxDisk()); | |||
out("success"); | |||
} | |||
} |
@@ -0,0 +1,39 @@ | |||
package org.cobbzilla.util.handlebars.main; | |||
import lombok.Getter; | |||
import lombok.Setter; | |||
import org.cobbzilla.util.main.BaseMainOptions; | |||
import org.kohsuke.args4j.Argument; | |||
import org.kohsuke.args4j.Option; | |||
import java.io.File; | |||
import java.io.OutputStream; | |||
import java.util.List; | |||
public class PdfConcatOptions extends BaseMainOptions { | |||
public static final String USAGE_OUTFILE = "Output file. Default is stdout."; | |||
public static final String OPT_OUTFILE = "-o"; | |||
public static final String LONGOPT_OUTFILE= "--output"; | |||
@Option(name=OPT_OUTFILE, aliases=LONGOPT_OUTFILE, usage=USAGE_OUTFILE) | |||
@Getter @Setter private File outfile; | |||
public OutputStream getOut () { return outStream(outfile); } | |||
public static final String USAGE_INFILES = "Show help for this command"; | |||
@Argument(usage=USAGE_INFILES) | |||
@Getter @Setter private List<String> infiles; | |||
public static final String USAGE_MAX_MEMORY = "Max memory to use. Default is unlimited"; | |||
public static final String OPT_MAX_MEMORY = "-m"; | |||
public static final String LONGOPT_MAX_MEMORY= "--max-memory"; | |||
@Option(name=OPT_MAX_MEMORY, aliases=LONGOPT_MAX_MEMORY, usage=USAGE_MAX_MEMORY) | |||
@Getter @Setter private long maxMemory = -1; | |||
public static final String USAGE_MAX_DISK = "Max disk to use. Default is unlimited"; | |||
public static final String OPT_MAX_DISK = "-d"; | |||
public static final String LONGOPT_MAX_DISK= "--max-disk"; | |||
@Option(name=OPT_MAX_DISK, aliases=LONGOPT_MAX_DISK, usage=USAGE_MAX_DISK) | |||
@Getter @Setter private long maxDisk = -1; | |||
} |
@@ -0,0 +1,53 @@ | |||
package org.cobbzilla.util.handlebars.main; | |||
import com.github.jknack.handlebars.Handlebars; | |||
import lombok.Cleanup; | |||
import lombok.Getter; | |||
import org.cobbzilla.util.error.GeneralErrorHandler; | |||
import org.cobbzilla.util.handlebars.PdfMerger; | |||
import org.cobbzilla.util.main.BaseMain; | |||
import org.cobbzilla.util.string.StringUtil; | |||
import java.io.File; | |||
import java.io.InputStream; | |||
import java.util.ArrayList; | |||
import java.util.List; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.empty; | |||
import static org.cobbzilla.util.io.FileUtil.abs; | |||
public class PdfMergeMain extends BaseMain<PdfMergeOptions> { | |||
public static void main (String[] args) { main(PdfMergeMain.class, args); } | |||
@Getter protected Handlebars handlebars; | |||
@Override protected void run() throws Exception { | |||
final PdfMergeOptions options = getOptions(); | |||
final List<String> errors = new ArrayList<>(); | |||
PdfMerger.setErrorHandler(new GeneralErrorHandler() { | |||
@Override public <T> T handleError(String message) { errors.add(message); return null; } | |||
@Override public <T> T handleError(String message, Exception e) { return handleError(message+": "+e.getClass().getSimpleName()+": "+e.getMessage()); } | |||
@Override public <T> T handleError(List<String> validationErrors) { errors.addAll(validationErrors); return null; } | |||
}); | |||
@Cleanup final InputStream in = options.getInputStream(); | |||
try { | |||
if (options.hasOutfile()) { | |||
final File outfile = options.getOutfile(); | |||
PdfMerger.merge(in, outfile, options.getContext(), getHandlebars()); | |||
out(abs(outfile)); | |||
} else { | |||
final File output = PdfMerger.merge(in, options.getContext(), getHandlebars()); | |||
out(abs(output)); | |||
} | |||
} catch (Exception e) { | |||
err("Unexpected exception merging PDF: "+e.getClass().getSimpleName()+": "+e.getMessage()); | |||
} | |||
if (!empty(errors)) { | |||
err(errors.size()+" error"+(errors.size() > 1 ? "s" : "")+" found when merging PDF:\n"+ StringUtil.toString(errors, "\n")); | |||
} | |||
} | |||
} |
@@ -0,0 +1,42 @@ | |||
package org.cobbzilla.util.handlebars.main; | |||
import lombok.Getter; | |||
import lombok.Setter; | |||
import org.cobbzilla.util.json.JsonUtil; | |||
import org.cobbzilla.util.main.BaseMainOptions; | |||
import org.kohsuke.args4j.Option; | |||
import java.io.File; | |||
import java.io.InputStream; | |||
import java.util.HashMap; | |||
import java.util.Map; | |||
public class PdfMergeOptions extends BaseMainOptions { | |||
public static final String USAGE_INFILE = "Input file. Default is stdin"; | |||
public static final String OPT_INFILE = "-i"; | |||
public static final String LONGOPT_INFILE= "--infile"; | |||
@Option(name=OPT_INFILE, aliases=LONGOPT_INFILE, usage=USAGE_INFILE) | |||
@Getter @Setter private File infile; | |||
public InputStream getInputStream() { return inStream(getInfile()); } | |||
public static final String USAGE_CTXFILE = "Context file, must be a JSON map of String->Object"; | |||
public static final String OPT_CTXFILE = "-c"; | |||
public static final String LONGOPT_CTXFILE= "--context"; | |||
@Option(name=OPT_CTXFILE, aliases=LONGOPT_CTXFILE, usage=USAGE_CTXFILE) | |||
@Getter @Setter private File contextFile; | |||
public Map<String, Object> getContext() throws Exception { | |||
if (contextFile == null) return new HashMap<>(); | |||
return JsonUtil.fromJson(contextFile, Map.class); | |||
} | |||
public static final String USAGE_OUTFILE = "Output file. Default is a random temp file"; | |||
public static final String OPT_OUTFILE = "-o"; | |||
public static final String LONGOPT_OUTFILE= "--outfile"; | |||
@Option(name=OPT_OUTFILE, aliases=LONGOPT_OUTFILE, usage=USAGE_OUTFILE) | |||
@Getter @Setter private File outfile; | |||
public boolean hasOutfile () { return outfile != null; } | |||
} |
@@ -0,0 +1,27 @@ | |||
package org.cobbzilla.util.http; | |||
import lombok.*; | |||
import static org.cobbzilla.util.reflect.ReflectionUtil.copy; | |||
@NoArgsConstructor @AllArgsConstructor @ToString(of={"baseUri", "user"}) | |||
@EqualsAndHashCode(of={"baseUri", "user", "password"}) | |||
public class ApiConnectionInfo { | |||
@Getter @Setter private String baseUri; | |||
public boolean hasBaseUri () { return baseUri != null; } | |||
@Getter @Setter private String user; | |||
public boolean hasUser () { return user != null; } | |||
@Getter @Setter private String password; | |||
public ApiConnectionInfo (String baseUri) { this.baseUri = baseUri; } | |||
public ApiConnectionInfo (ApiConnectionInfo other) { copy(this, other); } | |||
// alias for when this is used in json with snake_case naming conventions | |||
public String getBase_uri () { return getBaseUri(); } | |||
public void setBase_uri (String uri) { setBaseUri(uri); } | |||
} |
@@ -0,0 +1,57 @@ | |||
package org.cobbzilla.util.http; | |||
import com.fasterxml.jackson.annotation.JsonIgnore; | |||
import lombok.NoArgsConstructor; | |||
import org.apache.http.client.CookieStore; | |||
import org.apache.http.cookie.Cookie; | |||
import org.cobbzilla.util.collection.CaseInsensitiveStringKeyMap; | |||
import java.util.*; | |||
@NoArgsConstructor | |||
public class CookieJar extends CaseInsensitiveStringKeyMap<HttpCookieBean> implements CookieStore { | |||
public CookieJar(List<HttpCookieBean> cookies) { for (HttpCookieBean cookie : cookies) add(cookie); } | |||
public CookieJar(HttpCookieBean cookie) { add(cookie); } | |||
public void add (HttpCookieBean cookie) { | |||
if (cookie.expired()) { | |||
remove(cookie.getName()); | |||
} else { | |||
put(cookie.getName(), cookie); | |||
} | |||
} | |||
@JsonIgnore | |||
public String getRequestValue() { | |||
final StringBuilder sb = new StringBuilder(); | |||
for (String name : keySet()) { | |||
if (sb.length() > 0) sb.append("; "); | |||
sb.append(name).append("=").append(get(name).getValue()); | |||
} | |||
return sb.toString(); | |||
} | |||
public List<HttpCookieBean> getCookiesList () { return new ArrayList<>(values()); } | |||
@Override public void addCookie(Cookie cookie) { add(new HttpCookieBean(cookie)); } | |||
@Override public List<Cookie> getCookies() { | |||
final List<Cookie> cookies = new ArrayList<>(size()); | |||
for (HttpCookieBean cookie : values()) { | |||
cookies.add(cookie.toHttpClientCookie()); | |||
} | |||
return cookies; | |||
} | |||
@Override public boolean clearExpired(Date date) { | |||
final long expiration = date.getTime(); | |||
final Set<String> toRemove = new HashSet<>(); | |||
for (HttpCookieBean cookie : values()) { | |||
if (cookie.expired(expiration)) toRemove.add(cookie.getName()); | |||
} | |||
for (String name : toRemove) remove(name); | |||
return false; | |||
} | |||
} |
@@ -0,0 +1,49 @@ | |||
package org.cobbzilla.util.http; | |||
import lombok.Cleanup; | |||
import lombok.extern.slf4j.Slf4j; | |||
import java.io.File; | |||
import static java.util.concurrent.TimeUnit.SECONDS; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.die; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.now; | |||
import static org.cobbzilla.util.io.FileUtil.abs; | |||
import static org.cobbzilla.util.io.StreamUtil.loadResourceAsStringOrDie; | |||
import static org.cobbzilla.util.string.StringUtil.getPackagePath; | |||
import static org.cobbzilla.util.system.Sleep.sleep; | |||
import static org.cobbzilla.util.time.TimeUtil.formatDuration; | |||
@Slf4j | |||
public class HtmlScreenCapture extends PhantomUtil { | |||
private static final long TIMEOUT = SECONDS.toMillis(60); | |||
public static final String SCRIPT = loadResourceAsStringOrDie(getPackagePath(HtmlScreenCapture.class)+"/html_screen_capture.js"); | |||
public synchronized void capture (String url, File file) { capture(url, file, TIMEOUT); } | |||
public synchronized void capture (String url, File file, long timeout) { | |||
final String script = SCRIPT.replace("@@URL@@", url).replace("@@FILE@@", abs(file)); | |||
try { | |||
@Cleanup final PhantomJSHandle handle = execJs(script); | |||
long start = now(); | |||
while (file.length() == 0 && now() - start < timeout) sleep(200); | |||
if (file.length() == 0 && now() - start >= timeout) { | |||
sleep(5000); | |||
if (file.length() == 0) die("capture: after " + formatDuration(timeout) + " file was never written to: " + abs(file)+", handle="+handle); | |||
} | |||
} catch (Exception e) { | |||
die("capture: unexpected exception: "+e, e); | |||
} | |||
} | |||
public void capture (File in, File out) { | |||
try { | |||
capture(in.toURI().toString(), out); | |||
} catch (Exception e) { | |||
die("capture("+abs(in)+"): "+e, e); | |||
} | |||
} | |||
} |
@@ -0,0 +1,24 @@ | |||
package org.cobbzilla.util.http; | |||
import com.fasterxml.jackson.annotation.JsonCreator; | |||
import org.apache.http.auth.AuthScheme; | |||
import org.apache.http.impl.auth.BasicScheme; | |||
import org.apache.http.impl.auth.DigestScheme; | |||
import org.apache.http.impl.auth.KerberosScheme; | |||
import static org.cobbzilla.util.reflect.ReflectionUtil.instantiate; | |||
public enum HttpAuthType { | |||
basic (BasicScheme.class), | |||
digest (DigestScheme.class), | |||
kerberos (KerberosScheme.class); | |||
private final Class<? extends AuthScheme> scheme; | |||
HttpAuthType(Class<? extends AuthScheme> scheme) { this.scheme = scheme; } | |||
public AuthScheme newScheme () { return instantiate(scheme); } | |||
@JsonCreator public static HttpAuthType create(String value) { return valueOf(value.toLowerCase()); } | |||
} |
@@ -0,0 +1,11 @@ | |||
package org.cobbzilla.util.http; | |||
import com.fasterxml.jackson.annotation.JsonCreator; | |||
public enum HttpCallStatus { | |||
initialized, pending, requested, received_response, success, error, timeout; | |||
@JsonCreator public static HttpCallStatus fromString (String val) { return valueOf(val.toLowerCase()); } | |||
} |
@@ -0,0 +1,30 @@ | |||
package org.cobbzilla.util.http; | |||
import org.apache.http.client.methods.CloseableHttpResponse; | |||
import org.apache.http.impl.client.CloseableHttpClient; | |||
import java.io.FilterInputStream; | |||
import java.io.IOException; | |||
public class HttpClosingFilterInputStream extends FilterInputStream { | |||
private final CloseableHttpClient httpClient; | |||
public HttpClosingFilterInputStream(CloseableHttpClient httpClient, | |||
CloseableHttpResponse response) throws IOException { | |||
super(response.getEntity().getContent()); | |||
this.httpClient = httpClient; | |||
} | |||
@Override public void close() throws IOException { | |||
IOException ioe = null; | |||
try { | |||
super.close(); | |||
} catch (IOException e) { | |||
ioe = e; | |||
} | |||
httpClient.close(); | |||
if (ioe != null) throw ioe; | |||
} | |||
} |
@@ -0,0 +1,125 @@ | |||
package org.cobbzilla.util.http; | |||
import lombok.extern.slf4j.Slf4j; | |||
import org.cobbzilla.util.collection.MapBuilder; | |||
import org.cobbzilla.util.collection.NameAndValue; | |||
import java.util.Map; | |||
import static org.apache.commons.lang3.StringEscapeUtils.*; | |||
import static org.apache.http.HttpHeaders.CONTENT_TYPE; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.die; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.empty; | |||
@Slf4j | |||
public class HttpContentTypes { | |||
public static final String TEXT_HTML = "text/html"; | |||
public static final String TEXT_PLAIN = "text/plain"; | |||
public static final String TEXT_CSV = "text/csv"; | |||
public static final String APPLICATION_JSON = "application/json"; | |||
public static final String APPLICATION_XML = "application/xml"; | |||
public static final String APPLICATION_PDF = "application/pdf"; | |||
public static final String IMAGE_PNG = "image/png"; | |||
public static final String IMAGE_JPEG = "image/jpg"; | |||
public static final String IMAGE_GIF = "image/gif"; | |||
public static final String APPLICATION_PEM_FILE = "application/x-pem-file"; | |||
public static final String APPLICATION_PKCS12_FILE = "application/x-pkcs12"; | |||
public static final String APPLICATION_CER_FILE = "application/x-x509-user-cert"; | |||
public static final String APPLICATION_CRT_FILE = "application/x-x509-ca-cert"; | |||
public static final String APPLICATION_OCTET_STREAM = "application/octet-stream"; | |||
public static final String UNKNOWN = APPLICATION_OCTET_STREAM; | |||
public static final String APPLICATION_ZIP = "application/zip"; | |||
public static final String APPLICATION_JAR = "application/java-archive"; | |||
public static final String APPLICATION_GZIP = "application/gzip"; | |||
public static final String MULTIPART_FORM_DATA = "multipart/form-data"; | |||
public static final String APPLICATION_FORM_URL_ENCODED = "application/x-www-form-urlencoded"; | |||
// useful when constructing HttpRequestBeans that will be used against a JSON API | |||
private static NameAndValue[] nvHttp(String type) { return new NameAndValue[]{new NameAndValue(CONTENT_TYPE, type)}; } | |||
public static final NameAndValue[] NV_HTTP_JSON = nvHttp(APPLICATION_JSON); | |||
public static final NameAndValue[] NV_HTTP_XML = nvHttp(APPLICATION_XML); | |||
public static final Map<String, NameAndValue[]> HTTP_CONTENT_TYPES = MapBuilder.build(new Object[][] { | |||
{ APPLICATION_JSON, NV_HTTP_JSON }, | |||
{ APPLICATION_XML, NV_HTTP_XML }, | |||
}); | |||
public static final String CONTENT_TYPE_ANY = "*/*"; | |||
public static String contentType(String name) { | |||
if (empty(name)) { | |||
log.warn("contentType: no content-type could be determined for name (empty)"); | |||
return APPLICATION_OCTET_STREAM; | |||
} | |||
final int dot = name.lastIndexOf('.'); | |||
final String ext = (dot != -1 && dot != name.length()-1) ? name.substring(dot+1) : name; | |||
switch (ext) { | |||
case "htm": case "html": return TEXT_HTML; | |||
case "png": return IMAGE_PNG; | |||
case "jpg": case "jpeg": return IMAGE_JPEG; | |||
case "gif": return IMAGE_GIF; | |||
case "xml": return APPLICATION_XML; | |||
case "pdf": return APPLICATION_PDF; | |||
case "json": return APPLICATION_JSON; | |||
case "gz": case "tgz": return APPLICATION_GZIP; | |||
case "zip": return APPLICATION_ZIP; | |||
case "jar": return APPLICATION_JAR; | |||
case "txt": return TEXT_PLAIN; | |||
case "csv": return TEXT_CSV; | |||
case "pem": return APPLICATION_PEM_FILE; | |||
case "p12": return APPLICATION_PKCS12_FILE; | |||
case "cer": return APPLICATION_CER_FILE; | |||
case "crt": return APPLICATION_CRT_FILE; | |||
default: | |||
log.warn("contentType: no content-type could be determined for name: "+name); | |||
return APPLICATION_OCTET_STREAM; | |||
} | |||
} | |||
public static String fileExt (String contentType) { | |||
switch (contentType) { | |||
case TEXT_HTML: return ".html"; | |||
case TEXT_PLAIN: return ".txt"; | |||
case TEXT_CSV: return ".csv"; | |||
case IMAGE_PNG: return ".png"; | |||
case IMAGE_JPEG: return ".jpeg"; | |||
case IMAGE_GIF: return ".gif"; | |||
case APPLICATION_XML: return ".xml"; | |||
case APPLICATION_PDF: return ".pdf"; | |||
case APPLICATION_JSON: return ".json"; | |||
case APPLICATION_ZIP: return ".zip"; | |||
case APPLICATION_GZIP: return ".tar.gz"; | |||
case APPLICATION_PEM_FILE: return ".pem"; | |||
case APPLICATION_PKCS12_FILE: return ".p12"; | |||
case APPLICATION_CER_FILE: return ".cer"; | |||
case APPLICATION_CRT_FILE: return ".crt"; | |||
default: return die("fileExt: no file extension could be determined for content-type: "+contentType); | |||
} | |||
} | |||
public static String fileExtNoDot (String contentType) { | |||
return fileExt(contentType).substring(1); | |||
} | |||
public static String escape(String mime, String data) { | |||
switch (mime) { | |||
case APPLICATION_XML: return escapeXml10(data); | |||
case TEXT_HTML: return escapeHtml4(data); | |||
} | |||
return data; | |||
} | |||
public static String unescape(String mime, String data) { | |||
if (empty(data)) return data; | |||
switch (mime) { | |||
case APPLICATION_XML: return unescapeXml(data); | |||
case TEXT_HTML: return unescapeHtml4(data); | |||
} | |||
return data; | |||
} | |||
public static String multipartWithBoundary(String boundary) { return "multipart/form-data; boundary=" + boundary; } | |||
} |
@@ -0,0 +1,145 @@ | |||
package org.cobbzilla.util.http; | |||
import com.fasterxml.jackson.annotation.JsonIgnore; | |||
import lombok.Getter; | |||
import lombok.NoArgsConstructor; | |||
import lombok.Setter; | |||
import lombok.experimental.Accessors; | |||
import lombok.extern.slf4j.Slf4j; | |||
import org.apache.http.cookie.Cookie; | |||
import org.apache.http.impl.cookie.BasicClientCookie; | |||
import org.joda.time.DateTime; | |||
import org.joda.time.format.DateTimeFormat; | |||
import org.joda.time.format.DateTimeFormatter; | |||
import java.util.Date; | |||
import java.util.StringTokenizer; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.die; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.empty; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.now; | |||
import static org.cobbzilla.util.reflect.ReflectionUtil.copy; | |||
@NoArgsConstructor @Accessors(chain=true) @Slf4j | |||
public class HttpCookieBean { | |||
public static final DateTimeFormatter[] EXPIRES_PATTERNS = { | |||
DateTimeFormat.forPattern("E, dd MMM yyyy HH:mm:ss Z"), | |||
DateTimeFormat.forPattern("E, dd-MMM-yyyy HH:mm:ss Z"), | |||
DateTimeFormat.forPattern("E, dd MMM yyyy HH:mm:ss z"), | |||
DateTimeFormat.forPattern("E, dd-MMM-yyyy HH:mm:ss z") | |||
}; | |||
@Getter @Setter private String name; | |||
@Getter @Setter private String value; | |||
@Getter @Setter private String domain; | |||
public boolean hasDomain () { return !empty(domain); } | |||
@Getter @Setter private String path; | |||
@Getter @Setter private String expires; | |||
@Getter @Setter private Long maxAge; | |||
@Getter @Setter private boolean secure; | |||
@Getter @Setter private boolean httpOnly; | |||
public HttpCookieBean(String name, String value) { this(name, value, null); } | |||
public HttpCookieBean(String name, String value, String domain) { | |||
this.name = name; | |||
this.value = value; | |||
this.domain = domain; | |||
} | |||
public HttpCookieBean (HttpCookieBean other) { copy(this, other); } | |||
public HttpCookieBean(Cookie cookie) { | |||
this(cookie.getName(), cookie.getValue(), cookie.getDomain()); | |||
path = cookie.getPath(); | |||
secure = cookie.isSecure(); | |||
final Date expiryDate = cookie.getExpiryDate(); | |||
if (expiryDate != null) { | |||
expires = EXPIRES_PATTERNS[0].print(expiryDate.getTime()); | |||
} | |||
} | |||
public static HttpCookieBean parse (String setCookie) { | |||
final HttpCookieBean cookie = new HttpCookieBean(); | |||
final StringTokenizer st = new StringTokenizer(setCookie, ";"); | |||
while (st.hasMoreTokens()) { | |||
final String token = st.nextToken().trim(); | |||
if (cookie.name == null) { | |||
// first element is the name=value | |||
final String[] parts = token.split("="); | |||
cookie.name = parts[0]; | |||
cookie.value = parts.length == 1 ? "" : parts[1]; | |||
} else if (token.contains("=")) { | |||
final String[] parts = token.split("="); | |||
switch (parts[0].toLowerCase()) { | |||
case "path": cookie.path = parts[1]; break; | |||
case "domain": cookie.domain = parts[1]; break; | |||
case "expires": cookie.expires = parts[1]; break; | |||
case "max-age": cookie.maxAge = Long.valueOf(parts[1]); break; | |||
default: log.warn("Unrecognized cookie attribute: "+parts[0]); | |||
} | |||
} else { | |||
switch (token.toLowerCase()) { | |||
case "httponly": cookie.httpOnly = true; break; | |||
case "secure": cookie.secure = true; break; | |||
default: log.warn("Unrecognized cookie attribute: "+token); | |||
} | |||
} | |||
} | |||
return cookie; | |||
} | |||
public String toHeaderValue () { | |||
StringBuilder sb = new StringBuilder(); | |||
sb.append(name).append("=").append(value); | |||
if (!empty(expires)) sb.append("; Expires=").append(expires); | |||
if (maxAge != null) sb.append("; Max-Age=").append(maxAge); | |||
if (!empty(path)) sb.append("; Path=").append(path); | |||
if (!empty(domain)) sb.append("; Domain=").append(domain); | |||
if (httpOnly) sb.append("; HttpOnly"); | |||
if (secure) sb.append("; Secure"); | |||
return sb.toString(); | |||
} | |||
public String toRequestHeader () { return name + "=" + value; } | |||
public boolean expired () { | |||
return (maxAge != null && maxAge <= 0) | |||
|| (expires != null && getExpiredDateTime().isBeforeNow()); | |||
} | |||
public boolean expired (long expiration) { | |||
return (maxAge != null && now() + maxAge < expiration) | |||
|| (expires != null && getExpiredDateTime().isBefore(expiration)); | |||
} | |||
@JsonIgnore public Date getExpiryDate () { | |||
if (maxAge != null) return new Date(now() + maxAge); | |||
if (expires != null) return getExpiredDateTime().toDate(); | |||
return null; | |||
} | |||
protected DateTime getExpiredDateTime() { | |||
if (empty(expires)) { | |||
return null; | |||
} | |||
for (DateTimeFormatter formatter : EXPIRES_PATTERNS) { | |||
try { | |||
return formatter.parseDateTime(expires); | |||
} catch (Exception ignored) {} | |||
} | |||
return die("getExpiredDateTime: unparseable 'expires' value for cookie "+name+": '"+expires+"'"); | |||
} | |||
public Cookie toHttpClientCookie() { | |||
final BasicClientCookie cookie = new BasicClientCookie(name, value); | |||
cookie.setExpiryDate(getExpiryDate()); | |||
cookie.setPath(path); | |||
cookie.setDomain(domain); | |||
cookie.setSecure(secure); | |||
return cookie; | |||
} | |||
} |
@@ -0,0 +1,42 @@ | |||
package org.cobbzilla.util.http; | |||
import lombok.Getter; | |||
import lombok.NoArgsConstructor; | |||
import lombok.Setter; | |||
import lombok.experimental.Accessors; | |||
import lombok.extern.slf4j.Slf4j; | |||
import org.cobbzilla.util.io.FileUtil; | |||
import java.io.File; | |||
import static org.cobbzilla.util.io.FileUtil.abs; | |||
@NoArgsConstructor @Accessors(chain=true) @Slf4j | |||
public class HttpMeta { | |||
public HttpMeta (String url) { this.url = url; } | |||
@Getter @Setter private String url; | |||
@Getter @Setter private Long lastModified; | |||
public boolean hasLastModified () { return lastModified != null; } | |||
@Getter @Setter private String etag; | |||
public boolean hasEtag () { return etag != null; } | |||
public boolean shouldRefresh(File file) { | |||
if (file == null) return true; | |||
if (hasLastModified()) return getLastModified() > file.lastModified(); | |||
if (hasEtag()) { | |||
final File etagFile = new File(abs(file)+".etag"); | |||
if (etagFile.exists()) { | |||
try { | |||
return !FileUtil.toString(etagFile).equals(etag); | |||
} catch (Exception e) { | |||
log.warn("shouldRefresh: "+e); | |||
return true; | |||
} | |||
} | |||
} | |||
return true; | |||
} | |||
} |